From a7813a04b431f0552d1260477175d15da564b071 Mon Sep 17 00:00:00 2001 From: Ximin Luo Date: Thu, 7 Jul 2016 21:35:28 +0200 Subject: [PATCH] Imported Upstream version 1.10.0+dfsg1 --- CONTRIBUTING.md | 2 +- Makefile.in | 32 +- README.md | 2 +- RELEASES.md | 322 +- configure | 78 +- mk/cfg/armv7-linux-androideabi.mk | 25 + mk/cfg/i586-pc-windows-msvc.mk | 14 +- mk/cfg/i686-pc-windows-msvc.mk | 14 +- mk/cfg/i686-unknown-linux-musl.mk | 3 +- mk/cfg/x86_64-pc-windows-msvc.mk | 14 +- mk/cfg/x86_64-unknown-linux-musl.mk | 3 +- mk/crates.mk | 26 +- mk/ctags.mk | 15 +- mk/debuggers.mk | 11 +- mk/dist.mk | 6 +- mk/main.mk | 7 +- mk/platform.mk | 8 +- mk/reconfig.mk | 2 +- mk/rustllvm.mk | 4 + mk/snap.mk | 28 - mk/stage0.mk | 9 +- mk/target.mk | 29 +- mk/tests.mk | 174 +- src/bootstrap/Cargo.lock | 4 +- src/bootstrap/Cargo.toml | 2 +- src/bootstrap/README.md | 142 +- src/bootstrap/bootstrap.py | 223 +- src/bootstrap/build/cc.rs | 23 + src/bootstrap/build/channel.rs | 29 +- src/bootstrap/build/check.rs | 281 +- src/bootstrap/build/clean.rs | 10 + src/bootstrap/build/compile.rs | 21 +- src/bootstrap/build/config.rs | 33 +- src/bootstrap/build/dist.rs | 75 +- src/bootstrap/build/doc.rs | 36 + src/bootstrap/build/flags.rs | 11 +- src/bootstrap/build/job.rs | 2 +- src/bootstrap/build/mod.rs | 326 +- src/bootstrap/build/native.rs | 47 +- src/bootstrap/build/sanity.rs | 32 + src/bootstrap/build/step.rs | 191 +- src/bootstrap/build/util.rs | 20 +- src/bootstrap/config.toml.example | 159 + src/bootstrap/lib.rs | 9 + src/bootstrap/main.rs | 10 + src/bootstrap/mk/Makefile.in | 2 + src/bootstrap/rustc.rs | 122 +- src/bootstrap/rustdoc.rs | 10 +- src/compiletest/header.rs | 455 -- src/compiletest/runtest.rs | 2034 ------- src/doc/README.md | 4 +- src/doc/book/advanced-linking.md | 4 +- src/doc/book/casting-between-types.md | 17 +- src/doc/book/closures.md | 53 +- src/doc/book/compiler-plugins.md | 10 +- src/doc/book/concurrency.md | 29 +- src/doc/book/const-and-static.md | 2 +- src/doc/book/crates-and-modules.md | 2 +- src/doc/book/documentation.md | 12 +- src/doc/book/error-handling.md | 124 +- src/doc/book/ffi.md | 12 +- src/doc/book/functions.md | 4 +- src/doc/book/getting-started.md | 70 +- src/doc/book/guessing-game.md | 7 +- src/doc/book/inline-assembly.md | 2 +- src/doc/book/lifetimes.md | 4 +- src/doc/book/loops.md | 2 +- src/doc/book/macros.md | 10 +- src/doc/book/mutability.md | 10 +- src/doc/book/operators-and-overloading.md | 2 +- src/doc/book/ownership.md | 14 +- src/doc/book/primitive-types.md | 7 +- src/doc/book/references-and-borrowing.md | 30 +- src/doc/book/strings.md | 2 +- src/doc/book/testing.md | 4 +- src/doc/book/the-stack-and-the-heap.md | 4 +- src/doc/book/trait-objects.md | 2 +- src/doc/book/traits.md | 2 +- src/doc/book/unsized-types.md | 9 +- src/doc/book/vectors.md | 4 +- src/doc/footer.inc | 2 +- src/doc/nomicon/subtyping.md | 2 +- src/doc/nomicon/vec-alloc.md | 2 +- src/doc/reference.md | 49 +- src/doc/style/errors/ergonomics.md | 4 +- .../features/functions-and-methods/README.md | 4 +- .../features/functions-and-methods/input.md | 26 +- .../features/functions-and-methods/output.md | 10 +- src/doc/style/features/let.md | 14 +- src/doc/style/features/match.md | 4 +- src/doc/style/features/modules.md | 8 +- src/doc/style/features/traits/common.md | 2 +- src/doc/style/features/traits/generics.md | 4 +- src/doc/style/features/traits/objects.md | 2 +- src/doc/style/features/types/README.md | 4 +- src/doc/style/features/types/newtype.md | 6 +- src/doc/style/ownership/builders.md | 8 +- src/doc/style/ownership/constructors.md | 10 +- src/doc/style/style/braces.md | 8 +- src/doc/style/style/comments.md | 30 +- src/doc/style/style/features.md | 4 +- src/doc/style/style/imports.md | 4 +- src/doc/style/style/naming/README.md | 4 +- src/doc/style/style/naming/containers.md | 10 +- src/doc/style/style/naming/iterators.md | 2 +- src/doc/style/style/whitespace.md | 16 +- src/etc/check-binaries.py | 20 - src/etc/errorck.py | 136 - src/etc/featureck.py | 251 - src/etc/gdb_rust_pretty_printing.py | 2 + src/etc/generate-keyword-tests.py | 8 +- src/etc/get-snapshot.py | 78 - src/etc/get-stage0.py | 51 + src/etc/htmldocck.py | 9 +- src/etc/latest-unix-snaps.py | 65 - src/etc/licenseck.py | 56 - src/etc/lldb_batchmode.py | 1 + src/etc/local_stage0.sh | 8 + src/etc/make-snapshot.py | 15 - src/etc/maketest.py | 96 - src/etc/mirror-all-snapshots.py | 45 - src/etc/snapshot.py | 304 - src/etc/tidy.py | 230 - src/etc/unicode.py | 138 +- src/liballoc/Cargo.toml | 1 - src/liballoc/arc.rs | 61 +- src/liballoc/boxed.rs | 12 +- src/liballoc/lib.rs | 1 - src/liballoc/raw_vec.rs | 1 + src/liballoc/rc.rs | 59 +- src/liballoc_jemalloc/build.rs | 16 + src/liballoc_jemalloc/lib.rs | 16 +- src/liballoc_system/lib.rs | 54 +- src/libbacktrace/ChangeLog | 22 +- src/libbacktrace/elf.c | 6 +- src/libbacktrace/mmap.c | 4 + src/libbacktrace/pecoff.c | 3 + src/libcollections/Cargo.toml | 5 +- src/libcollections/binary_heap.rs | 94 +- src/libcollections/btree/map.rs | 215 +- src/libcollections/btree/node.rs | 320 +- src/libcollections/btree/set.rs | 39 +- src/libcollections/fmt.rs | 50 +- src/libcollections/lib.rs | 7 + src/libcollections/linked_list.rs | 24 + src/libcollections/slice.rs | 50 +- src/libcollections/str.rs | 4 +- src/libcollections/string.rs | 14 +- src/libcollections/vec.rs | 13 + src/libcollections/vec_deque.rs | 190 + src/libcollectionstest/binary_heap.rs | 32 + src/libcollectionstest/btree/map.rs | 52 + src/libcollectionstest/btree/set.rs | 24 + src/libcollectionstest/lib.rs | 6 +- src/libcollectionstest/linked_list.rs | 29 + src/libcollectionstest/str.rs | 16 + src/libcollectionstest/string.rs | 4 +- src/libcollectionstest/vec.rs | 3 + src/libcollectionstest/vec_deque.rs | 13 + src/libcore/Cargo.toml | 4 + src/libcore/build.rs | 3 + src/libcore/cell.rs | 105 +- src/libcore/char.rs | 70 +- src/libcore/clone.rs | 16 + src/libcore/cmp.rs | 2 +- src/libcore/convert.rs | 57 +- src/libcore/fmt/builders.rs | 2 +- src/libcore/fmt/mod.rs | 28 +- src/libcore/intrinsics.rs | 32 +- src/libcore/iter.rs | 5007 ----------------- src/libcore/iter/iterator.rs | 2111 +++++++ src/libcore/iter/mod.rs | 1675 ++++++ src/libcore/iter/range.rs | 548 ++ src/libcore/iter/sources.rs | 270 + src/libcore/iter/traits.rs | 526 ++ src/libcore/lib.rs | 4 + src/libcore/macros.rs | 4 +- src/libcore/mem.rs | 4 + src/libcore/num/bignum.rs | 2 +- src/libcore/num/dec2flt/algorithm.rs | 89 +- src/libcore/num/int_macros.rs | 13 + src/libcore/num/mod.rs | 218 +- src/libcore/num/uint_macros.rs | 13 + src/libcore/num/wrapping.rs | 9 + src/libcore/ops.rs | 5 + src/libcore/option.rs | 1 - src/libcore/ptr.rs | 55 + src/libcore/result.rs | 47 +- src/libcore/slice.rs | 108 +- src/libcore/str/mod.rs | 5 +- src/libcore/sync/atomic.rs | 1149 ++-- src/libcoretest/char.rs | 33 + src/libcoretest/iter.rs | 22 +- src/libcoretest/lib.rs | 3 +- src/libcoretest/num/mod.rs | 446 +- src/libflate/lib.rs | 12 +- src/liblibc/.travis.yml | 4 +- src/liblibc/Cargo.toml | 2 +- src/liblibc/README.md | 15 +- src/liblibc/ci/run-travis.sh | 6 +- src/liblibc/libc-test/Cargo.lock | 30 +- src/liblibc/libc-test/build.rs | 11 +- src/liblibc/src/lib.rs | 1 + src/liblibc/src/unix/bsd/apple/mod.rs | 330 +- .../src/unix/bsd/freebsdlike/dragonfly/mod.rs | 133 +- .../src/unix/bsd/freebsdlike/freebsd/mod.rs | 145 +- src/liblibc/src/unix/bsd/freebsdlike/mod.rs | 113 +- src/liblibc/src/unix/bsd/mod.rs | 18 +- .../src/unix/bsd/openbsdlike/bitrig.rs | 161 +- src/liblibc/src/unix/bsd/openbsdlike/mod.rs | 70 +- .../src/unix/bsd/openbsdlike/netbsd.rs | 168 +- .../src/unix/bsd/openbsdlike/openbsd.rs | 118 +- src/liblibc/src/unix/mod.rs | 65 +- src/liblibc/src/unix/notbsd/android/b32.rs | 116 + src/liblibc/src/unix/notbsd/android/b64.rs | 126 + src/liblibc/src/unix/notbsd/android/mod.rs | 180 +- src/liblibc/src/unix/notbsd/linux/mips.rs | 27 +- src/liblibc/src/unix/notbsd/linux/mod.rs | 145 +- .../src/unix/notbsd/linux/musl/b32/x86.rs | 13 + .../src/unix/notbsd/linux/musl/b64/x86_64.rs | 15 + src/liblibc/src/unix/notbsd/linux/musl/mod.rs | 8 - .../src/unix/notbsd/linux/other/b32/x86.rs | 23 +- .../src/unix/notbsd/linux/other/b64/x86_64.rs | 29 +- .../src/unix/notbsd/linux/other/mod.rs | 27 +- src/liblibc/src/unix/notbsd/mod.rs | 59 +- src/liblibc/src/unix/solaris/mod.rs | 126 + src/liblog/lib.rs | 50 +- src/libpanic_abort/Cargo.toml | 12 + src/libpanic_abort/lib.rs | 135 + src/libpanic_unwind/Cargo.lock | 27 + src/libpanic_unwind/Cargo.toml | 14 + .../common => libpanic_unwind}/dwarf/eh.rs | 3 +- .../common => libpanic_unwind}/dwarf/mod.rs | 1 - .../common/unwind => libpanic_unwind}/gcc.rs | 113 +- src/libpanic_unwind/lib.rs | 109 + src/libpanic_unwind/seh.rs | 326 ++ .../unwind => libpanic_unwind}/seh64_gnu.rs | 23 +- src/libpanic_unwind/windows.rs | 98 + src/librand/Cargo.toml | 1 - src/librand/distributions/exponential.rs | 1 + src/librand/distributions/gamma.rs | 1 + src/librand/distributions/mod.rs | 2 + src/librand/distributions/normal.rs | 1 + src/librand/lib.rs | 2 +- src/librustc/Cargo.toml | 1 + src/librustc/cfg/construct.rs | 10 +- src/librustc/cfg/mod.rs | 6 +- src/librustc/dep_graph/debug.rs | 69 + src/librustc/dep_graph/dep_node.rs | 13 + src/librustc/dep_graph/mod.rs | 2 + src/librustc/dep_graph/query.rs | 21 +- src/librustc/dep_graph/thread.rs | 8 +- src/librustc/dep_graph/visit.rs | 11 +- src/librustc/diagnostics.rs | 17 +- src/librustc/hir/def.rs | 3 +- src/librustc/hir/fold.rs | 71 +- src/librustc/hir/intravisit.rs | 68 +- src/librustc/hir/lowering.rs | 3195 +++++------ src/librustc/hir/map/blocks.rs | 10 +- src/librustc/hir/map/collector.rs | 271 +- src/librustc/hir/map/def_collector.rs | 438 ++ src/librustc/hir/map/definitions.rs | 72 +- src/librustc/hir/map/mod.rs | 65 +- src/librustc/hir/mod.rs | 180 +- src/librustc/hir/pat_util.rs | 20 +- src/librustc/hir/print.rs | 155 +- src/librustc/hir/svh.rs | 72 +- src/librustc/infer/bivariate.rs | 28 +- src/librustc/infer/combine.rs | 188 +- src/librustc/infer/equate.rs | 29 +- src/librustc/infer/error_reporting.rs | 257 +- src/librustc/infer/freshen.rs | 13 +- src/librustc/infer/glb.rs | 29 +- src/librustc/infer/higher_ranked/mod.rs | 410 +- src/librustc/infer/lattice.rs | 17 +- src/librustc/infer/lub.rs | 29 +- src/librustc/infer/mod.rs | 713 ++- .../infer/region_inference/graphviz.rs | 28 +- src/librustc/infer/region_inference/mod.rs | 46 +- src/librustc/infer/resolve.rs | 36 +- src/librustc/infer/sub.rs | 30 +- src/librustc/infer/unify_key.rs | 12 +- src/librustc/lib.rs | 2 +- src/librustc/lint/builtin.rs | 24 +- src/librustc/lint/context.rs | 31 +- src/librustc/middle/astconv_util.rs | 97 +- src/librustc/middle/cstore.rs | 194 +- src/librustc/middle/dataflow.rs | 4 +- src/librustc/middle/dead.rs | 32 +- src/librustc/middle/dependency_format.rs | 145 +- src/librustc/middle/effect.rs | 4 +- src/librustc/middle/expr_use_visitor.rs | 89 +- src/librustc/middle/free_region.rs | 9 +- src/librustc/middle/intrinsicck.rs | 66 +- src/librustc/middle/lang_items.rs | 21 +- src/librustc/middle/liveness.rs | 47 +- src/librustc/middle/mem_categorization.rs | 68 +- src/librustc/middle/privacy.rs | 9 +- src/librustc/middle/reachable.rs | 19 +- src/librustc/middle/region.rs | 2 +- src/librustc/middle/resolve_lifetime.rs | 17 +- src/librustc/middle/stability.rs | 132 +- src/librustc/middle/weak_lang_items.rs | 21 +- src/librustc/mir/repr.rs | 40 +- src/librustc/mir/tcx.rs | 31 +- src/librustc/mir/transform.rs | 90 +- src/librustc/mir/visit.rs | 14 +- src/librustc/session/config.rs | 104 +- src/librustc/session/filesearch.rs | 1 - src/librustc/session/mod.rs | 103 +- src/librustc/traits/coherence.rs | 59 +- src/librustc/traits/error_reporting.rs | 1540 ++--- src/librustc/traits/fulfill.rs | 475 +- src/librustc/traits/mod.rs | 177 +- src/librustc/traits/object_safety.rs | 563 +- src/librustc/traits/project.rs | 667 ++- src/librustc/traits/select.rs | 939 ++-- src/librustc/traits/specialize/mod.rs | 104 +- .../traits/specialize/specialization_graph.rs | 140 +- src/librustc/traits/structural_impls.rs | 139 +- src/librustc/traits/util.rs | 384 +- src/librustc/ty/_match.rs | 24 +- src/librustc/ty/adjustment.rs | 27 +- src/librustc/ty/contents.rs | 41 +- src/librustc/ty/context.rs | 801 ++- src/librustc/ty/error.rs | 14 +- src/librustc/ty/fast_reject.rs | 8 +- src/librustc/ty/flags.rs | 15 +- src/librustc/ty/fold.rs | 195 +- src/librustc/ty/item_path.rs | 94 +- src/librustc/ty/layout.rs | 76 +- src/librustc/ty/mod.rs | 737 ++- src/librustc/ty/outlives.rs | 275 +- src/librustc/ty/relate.rs | 367 +- src/librustc/ty/structural_impls.rs | 449 +- src/librustc/ty/sty.rs | 90 +- src/librustc/ty/subst.rs | 39 +- src/librustc/ty/trait_def.rs | 34 +- src/librustc/ty/util.rs | 243 +- src/librustc/ty/wf.rs | 121 +- src/librustc/util/ppaux.rs | 57 +- src/librustc_back/dynamic_lib.rs | 10 +- src/librustc_back/lib.rs | 1 - src/librustc_back/sha2.rs | 47 +- src/librustc_back/target/aarch64_apple_ios.rs | 1 + .../target/aarch64_linux_android.rs | 7 +- .../target/aarch64_unknown_linux_gnu.rs | 3 +- src/librustc_back/target/apple_base.rs | 3 +- .../target/arm_linux_androideabi.rs | 5 +- .../target/arm_unknown_linux_gnueabi.rs | 5 +- .../target/arm_unknown_linux_gnueabihf.rs | 5 +- src/librustc_back/target/armv7_apple_ios.rs | 1 + .../target/armv7_linux_androideabi.rs | 29 + .../target/armv7_unknown_linux_gnueabihf.rs | 3 +- src/librustc_back/target/armv7s_apple_ios.rs | 1 + .../target/asmjs_unknown_emscripten.rs | 1 + src/librustc_back/target/bitrig_base.rs | 1 - src/librustc_back/target/dragonfly_base.rs | 1 - src/librustc_back/target/freebsd_base.rs | 1 - src/librustc_back/target/i386_apple_ios.rs | 7 +- src/librustc_back/target/i686_apple_darwin.rs | 1 + .../target/i686_linux_android.rs | 9 +- .../target/i686_pc_windows_gnu.rs | 1 + .../target/i686_pc_windows_msvc.rs | 1 + .../target/i686_unknown_dragonfly.rs | 1 + .../target/i686_unknown_freebsd.rs | 1 + .../target/i686_unknown_linux_gnu.rs | 1 + .../target/i686_unknown_linux_musl.rs | 20 +- src/librustc_back/target/le32_unknown_nacl.rs | 1 + src/librustc_back/target/linux_musl_base.rs | 71 + .../target/mips_unknown_linux_gnu.rs | 1 + .../target/mips_unknown_linux_musl.rs | 1 + .../target/mipsel_unknown_linux_gnu.rs | 1 + .../target/mipsel_unknown_linux_musl.rs | 1 + src/librustc_back/target/mod.rs | 23 +- src/librustc_back/target/netbsd_base.rs | 1 - src/librustc_back/target/openbsd_base.rs | 1 - .../target/powerpc64_unknown_linux_gnu.rs | 1 + .../target/powerpc64le_unknown_linux_gnu.rs | 1 + .../target/powerpc_unknown_linux_gnu.rs | 1 + src/librustc_back/target/solaris_base.rs | 1 - .../target/x86_64_apple_darwin.rs | 1 + src/librustc_back/target/x86_64_apple_ios.rs | 7 +- .../target/x86_64_pc_windows_gnu.rs | 1 + .../target/x86_64_pc_windows_msvc.rs | 1 + .../target/x86_64_rumprun_netbsd.rs | 1 + .../target/x86_64_sun_solaris.rs | 1 + .../target/x86_64_unknown_bitrig.rs | 1 + .../target/x86_64_unknown_dragonfly.rs | 1 + .../target/x86_64_unknown_freebsd.rs | 1 + .../target/x86_64_unknown_linux_gnu.rs | 1 + .../target/x86_64_unknown_linux_musl.rs | 57 +- .../target/x86_64_unknown_netbsd.rs | 1 + .../target/x86_64_unknown_openbsd.rs | 1 + src/librustc_bitflags/Cargo.toml | 2 +- src/librustc_borrowck/Cargo.toml | 1 + src/librustc_borrowck/borrowck/check_loans.rs | 248 +- src/librustc_borrowck/borrowck/fragments.rs | 57 +- .../borrowck/gather_loans/gather_moves.rs | 2 +- .../borrowck/gather_loans/mod.rs | 38 +- .../borrowck/gather_loans/move_error.rs | 50 +- .../borrowck/mir/dataflow.rs | 4 +- .../borrowck/mir/gather_moves.rs | 8 +- src/librustc_borrowck/borrowck/mir/mod.rs | 2 +- src/librustc_borrowck/borrowck/mod.rs | 247 +- src/librustc_borrowck/borrowck/move_data.rs | 21 +- src/librustc_borrowck/diagnostics.rs | 586 +- src/librustc_const_eval/check_match.rs | 84 +- src/librustc_const_eval/diagnostics.rs | 74 +- src/librustc_const_eval/eval.rs | 282 +- src/librustc_const_eval/lib.rs | 2 + src/librustc_const_math/int.rs | 2 +- src/librustc_const_math/lib.rs | 2 +- src/librustc_data_structures/bitvec.rs | 33 +- src/librustc_data_structures/graph/mod.rs | 14 +- src/librustc_data_structures/lib.rs | 2 + .../obligation_forest/mod.rs | 666 ++- .../obligation_forest/test.rs | 416 +- src/librustc_data_structures/snapshot_vec.rs | 8 + src/librustc_driver/driver.rs | 423 +- src/librustc_driver/lib.rs | 210 +- src/librustc_driver/pretty.rs | 487 +- src/librustc_driver/target_features.rs | 105 +- src/librustc_driver/test.rs | 131 +- src/librustc_incremental/assert_dep_graph.rs | 42 +- src/librustc_incremental/calculate_svh.rs | 42 +- src/librustc_incremental/persist/data.rs | 62 +- src/librustc_incremental/persist/directory.rs | 31 +- .../persist/dirty_clean.rs | 6 +- src/librustc_incremental/persist/hash.rs | 163 + src/librustc_incremental/persist/load.rs | 37 +- src/librustc_incremental/persist/mod.rs | 1 + src/librustc_incremental/persist/save.rs | 202 +- src/librustc_incremental/persist/serialize.rs | 0 src/librustc_incremental/persist/util.rs | 49 +- src/librustc_lint/Cargo.toml | 1 + src/librustc_lint/bad_style.rs | 4 +- src/librustc_lint/builtin.rs | 97 +- src/librustc_lint/lib.rs | 12 + src/librustc_lint/types.rs | 25 +- src/librustc_llvm/build.rs | 7 + src/librustc_llvm/lib.rs | 14 +- src/librustc_metadata/astencode.rs | 154 +- src/librustc_metadata/common.rs | 6 + src/librustc_metadata/creader.rs | 422 +- src/librustc_metadata/csearch.rs | 150 +- src/librustc_metadata/cstore.rs | 57 +- src/librustc_metadata/decoder.rs | 218 +- src/librustc_metadata/def_key.rs | 107 + src/librustc_metadata/diagnostics.rs | 22 +- src/librustc_metadata/encoder.rs | 258 +- src/librustc_metadata/index.rs | 2 +- src/librustc_metadata/lib.rs | 3 +- src/librustc_metadata/loader.rs | 212 +- src/librustc_metadata/tls_context.rs | 10 +- src/librustc_metadata/tydecode.rs | 24 +- src/librustc_metadata/tyencode.rs | 23 +- src/librustc_mir/Cargo.toml | 1 + src/librustc_mir/build/block.rs | 12 +- src/librustc_mir/build/expr/as_constant.rs | 2 +- src/librustc_mir/build/expr/as_lvalue.rs | 2 +- src/librustc_mir/build/expr/as_operand.rs | 2 +- src/librustc_mir/build/expr/as_rvalue.rs | 18 +- src/librustc_mir/build/expr/as_temp.rs | 19 +- src/librustc_mir/build/expr/into.rs | 90 +- src/librustc_mir/build/expr/mod.rs | 1 + src/librustc_mir/build/expr/stmt.rs | 135 + src/librustc_mir/build/into.rs | 32 +- src/librustc_mir/build/matches/mod.rs | 45 +- src/librustc_mir/build/matches/simplify.rs | 2 +- src/librustc_mir/build/matches/test.rs | 2 +- src/librustc_mir/build/matches/util.rs | 6 +- src/librustc_mir/build/misc.rs | 6 +- src/librustc_mir/build/mod.rs | 329 +- src/librustc_mir/build/scope.rs | 39 +- src/librustc_mir/diagnostics.rs | 387 ++ src/librustc_mir/graphviz.rs | 13 +- src/librustc_mir/hair/cx/block.rs | 14 +- src/librustc_mir/hair/cx/expr.rs | 884 +-- src/librustc_mir/hair/cx/mod.rs | 33 +- src/librustc_mir/hair/cx/pattern.rs | 27 +- src/librustc_mir/hair/mod.rs | 14 +- src/librustc_mir/lib.rs | 8 + src/librustc_mir/mir_map.rs | 315 +- src/librustc_mir/pretty.rs | 209 +- .../transform/break_cleanup_edges.rs | 111 + .../transform/break_critical_edges.rs | 117 - src/librustc_mir/transform/erase_regions.rs | 12 +- src/librustc_mir/transform/mod.rs | 4 +- src/librustc_mir/transform/no_landing_pads.rs | 6 +- src/librustc_mir/transform/promote_consts.rs | 412 ++ src/librustc_mir/transform/qualify_consts.rs | 1048 ++++ .../transform/remove_dead_blocks.rs | 9 +- src/librustc_mir/transform/simplify_cfg.rs | 185 +- src/librustc_mir/transform/type_check.rs | 95 +- src/librustc_mir/traversal.rs | 10 + src/librustc_passes/Cargo.toml | 1 + src/librustc_passes/const_fn.rs | 118 - src/librustc_passes/consts.rs | 251 +- src/librustc_passes/diagnostics.rs | 482 -- src/librustc_passes/lib.rs | 2 +- src/librustc_passes/loops.rs | 2 +- src/librustc_passes/rvalues.rs | 36 +- src/librustc_plugin/load.rs | 39 +- src/librustc_plugin/registry.rs | 7 +- src/librustc_privacy/lib.rs | 50 +- src/librustc_resolve/Cargo.toml | 2 +- src/librustc_resolve/build_reduced_graph.rs | 357 +- src/librustc_resolve/check_unused.rs | 47 +- src/librustc_resolve/diagnostics.rs | 211 +- src/librustc_resolve/lib.rs | 1314 +++-- src/librustc_resolve/resolve_imports.rs | 172 +- src/librustc_save_analysis/Cargo.toml | 1 + src/librustc_save_analysis/csv_dumper.rs | 286 +- src/librustc_save_analysis/data.rs | 133 +- src/librustc_save_analysis/dump.rs | 52 +- src/librustc_save_analysis/dump_visitor.rs | 311 +- src/librustc_save_analysis/external_data.rs | 632 +++ src/librustc_save_analysis/json_dumper.rs | 426 ++ src/librustc_save_analysis/lib.rs | 156 +- src/librustc_save_analysis/span_utils.rs | 25 +- src/librustc_trans/Cargo.toml | 1 + src/librustc_trans/_match.rs | 40 +- src/librustc_trans/abi.rs | 68 +- src/librustc_trans/adt.rs | 27 +- src/librustc_trans/back/archive.rs | 2 - src/librustc_trans/back/link.rs | 91 +- src/librustc_trans/back/linker.rs | 126 +- src/librustc_trans/back/lto.rs | 7 +- src/librustc_trans/back/symbol_names.rs | 6 +- src/librustc_trans/back/write.rs | 108 +- src/librustc_trans/base.rs | 353 +- src/librustc_trans/builder.rs | 55 +- src/librustc_trans/callee.rs | 108 +- src/librustc_trans/cleanup.rs | 4 +- src/librustc_trans/closure.rs | 65 +- src/librustc_trans/collector.rs | 779 ++- src/librustc_trans/common.rs | 200 +- src/librustc_trans/consts.rs | 101 +- src/librustc_trans/context.rs | 380 +- src/librustc_trans/controlflow.rs | 8 +- src/librustc_trans/datum.rs | 4 +- .../debuginfo/create_scope_map.rs | 96 +- src/librustc_trans/debuginfo/metadata.rs | 59 +- src/librustc_trans/debuginfo/mod.rs | 414 +- src/librustc_trans/debuginfo/namespace.rs | 160 +- src/librustc_trans/debuginfo/source_loc.rs | 104 +- src/librustc_trans/debuginfo/type_names.rs | 5 +- src/librustc_trans/debuginfo/utils.rs | 36 +- src/librustc_trans/declare.rs | 14 +- src/librustc_trans/diagnostics.rs | 15 - src/librustc_trans/expr.rs | 77 +- src/librustc_trans/glue.rs | 55 +- src/librustc_trans/inline.rs | 2 +- src/librustc_trans/intrinsic.rs | 173 +- src/librustc_trans/lib.rs | 4 +- src/librustc_trans/meth.rs | 45 +- src/librustc_trans/mir/analyze.rs | 27 +- src/librustc_trans/mir/block.rs | 171 +- src/librustc_trans/mir/constant.rs | 858 ++- src/librustc_trans/mir/lvalue.rs | 51 +- src/librustc_trans/mir/mod.rs | 188 +- src/librustc_trans/mir/operand.rs | 9 +- src/librustc_trans/mir/rvalue.rs | 102 +- src/librustc_trans/mir/statement.rs | 29 +- src/librustc_trans/monomorphize.rs | 28 +- src/librustc_trans/partitioning.rs | 401 ++ src/librustc_trans/symbol_names_test.rs | 2 +- src/librustc_trans/trans_item.rs | 384 ++ src/librustc_trans/type_of.rs | 20 +- src/librustc_typeck/Cargo.toml | 1 + src/librustc_typeck/astconv.rs | 3560 ++++++------ src/librustc_typeck/check/_match.rs | 1403 +++-- src/librustc_typeck/check/assoc.rs | 13 +- src/librustc_typeck/check/callee.rs | 539 +- src/librustc_typeck/check/cast.rs | 266 +- src/librustc_typeck/check/closure.rs | 420 +- src/librustc_typeck/check/coercion.rs | 375 +- src/librustc_typeck/check/compare_method.rs | 504 +- src/librustc_typeck/check/demand.rs | 79 +- src/librustc_typeck/check/dropck.rs | 131 +- src/librustc_typeck/check/intrinsic.rs | 68 +- src/librustc_typeck/check/method/confirm.rs | 244 +- src/librustc_typeck/check/method/mod.rs | 563 +- src/librustc_typeck/check/method/probe.rs | 443 +- src/librustc_typeck/check/method/suggest.rs | 649 ++- src/librustc_typeck/check/mod.rs | 4832 ++++++++-------- src/librustc_typeck/check/op.rs | 601 +- src/librustc_typeck/check/regionck.rs | 2510 ++++----- src/librustc_typeck/check/upvar.rs | 126 +- src/librustc_typeck/check/wfcheck.rs | 308 +- src/librustc_typeck/check/writeback.rs | 175 +- src/librustc_typeck/check_unused.rs | 64 + src/librustc_typeck/coherence/mod.rs | 319 +- src/librustc_typeck/coherence/orphan.rs | 4 +- src/librustc_typeck/coherence/overlap.rs | 47 +- src/librustc_typeck/coherence/unsafety.rs | 4 +- src/librustc_typeck/collect.rs | 491 +- .../constrained_type_params.rs | 11 +- src/librustc_typeck/diagnostics.rs | 255 +- src/librustc_typeck/lib.rs | 112 +- src/librustc_typeck/variance/constraints.rs | 4 +- src/librustc_typeck/variance/mod.rs | 2 +- src/librustc_typeck/variance/terms.rs | 6 +- src/librustc_unicode/tables.rs | 2379 ++++---- src/librustdoc/clean/inline.rs | 160 +- src/librustdoc/clean/mod.rs | 235 +- src/librustdoc/clean/simplify.rs | 6 +- src/librustdoc/core.rs | 118 +- src/librustdoc/doctree.rs | 1 + src/librustdoc/html/format.rs | 138 +- src/librustdoc/html/highlight.rs | 344 +- src/librustdoc/html/layout.rs | 3 +- src/librustdoc/html/render.rs | 481 +- src/librustdoc/html/static/main.js | 12 +- src/librustdoc/html/static/rustdoc.css | 55 +- src/librustdoc/html/static/styles/main.css | 3 + src/librustdoc/lib.rs | 22 +- src/librustdoc/passes.rs | 81 +- src/librustdoc/test.rs | 38 +- src/librustdoc/visit_ast.rs | 56 +- src/librustdoc/visit_lib.rs | 109 + src/libserialize/json.rs | 2 +- src/libstd/Cargo.toml | 4 +- src/libstd/build.rs | 37 +- src/libstd/collections/hash/bench.rs | 1 - src/libstd/collections/hash/map.rs | 63 +- src/libstd/collections/hash/set.rs | 4 +- src/libstd/collections/mod.rs | 10 +- src/libstd/env.rs | 20 +- src/libstd/error.rs | 7 + src/libstd/ffi/c_str.rs | 53 +- src/libstd/ffi/mod.rs | 2 + src/libstd/fs.rs | 21 +- src/libstd/io/buffered.rs | 2 +- src/libstd/io/error.rs | 1 - src/libstd/io/lazy.rs | 21 +- src/libstd/io/mod.rs | 21 +- src/libstd/lib.rs | 12 + src/libstd/macros.rs | 10 +- src/libstd/net/ip.rs | 12 +- src/libstd/net/tcp.rs | 2 + src/libstd/net/udp.rs | 2 +- src/libstd/num/f32.rs | 7 +- src/libstd/num/f64.rs | 9 +- src/libstd/num/mod.rs | 2 +- src/libstd/panic.rs | 25 +- src/libstd/panicking.rs | 241 +- src/libstd/path.rs | 4 +- src/libstd/process.rs | 246 +- src/libstd/rt.rs | 8 +- src/libstd/sync/barrier.rs | 2 +- src/libstd/sync/condvar.rs | 31 + src/libstd/sync/mod.rs | 6 +- src/libstd/sync/mpsc/mod.rs | 2 +- src/libstd/sync/mutex.rs | 44 +- src/libstd/sync/once.rs | 7 +- src/libstd/sync/rwlock.rs | 42 +- src/libstd/sys/common/args.rs | 16 +- src/libstd/sys/common/mod.rs | 3 - src/libstd/sys/common/unwind/mod.rs | 241 - src/libstd/sys/common/unwind/seh.rs | 153 - src/libstd/sys/common/util.rs | 26 +- src/libstd/sys/unix/android.rs | 119 + .../unix/backtrace/tracing/backtrace_fn.rs | 28 +- .../sys/unix/backtrace/tracing/gcc_s.rs | 164 +- src/libstd/sys/unix/ext/fs.rs | 4 +- src/libstd/sys/unix/ext/net.rs | 72 +- src/libstd/sys/unix/ext/process.rs | 9 + src/libstd/sys/unix/fs.rs | 47 +- src/libstd/sys/unix/mod.rs | 32 +- src/libstd/sys/unix/os.rs | 52 +- src/libstd/sys/unix/process.rs | 8 +- src/libstd/sys/unix/rand.rs | 17 +- src/libstd/sys/unix/stack_overflow.rs | 2 +- src/libstd/sys/unix/thread.rs | 30 +- src/libstd/sys/unix/time.rs | 318 +- src/libstd/sys/windows/c.rs | 53 - src/libstd/sys/windows/ext/fs.rs | 19 +- src/libstd/sys/windows/ext/process.rs | 15 + src/libstd/sys/windows/fs.rs | 1 - src/libstd/sys/windows/process.rs | 33 +- src/libstd/thread/local.rs | 4 +- src/libstd/thread/mod.rs | 50 +- src/libstd/time/duration.rs | 8 +- src/libstd/time/mod.rs | 2 +- src/libsyntax/ast.rs | 156 +- src/libsyntax/attr.rs | 6 +- src/libsyntax/codemap.rs | 353 +- src/libsyntax/diagnostics/macros.rs | 6 +- src/libsyntax/diagnostics/plugin.rs | 10 +- src/libsyntax/errors/emitter.rs | 986 ++-- src/libsyntax/errors/json.rs | 305 +- src/libsyntax/errors/mod.rs | 269 +- src/libsyntax/errors/snippet/mod.rs | 874 +++ src/libsyntax/errors/snippet/test.rs | 583 ++ src/libsyntax/ext/base.rs | 7 +- src/libsyntax/ext/build.rs | 47 +- src/libsyntax/ext/expand.rs | 129 +- src/libsyntax/ext/quote.rs | 36 +- src/libsyntax/ext/tt/macro_parser.rs | 16 +- src/libsyntax/ext/tt/macro_rules.rs | 23 +- src/libsyntax/ext/tt/transcribe.rs | 16 +- src/libsyntax/feature_gate.rs | 1011 ++-- src/libsyntax/fold.rs | 30 +- src/libsyntax/lib.rs | 3 +- src/libsyntax/parse/attr.rs | 10 +- src/libsyntax/parse/lexer/mod.rs | 227 +- src/libsyntax/parse/lexer/unicode_chars.rs | 59 +- src/libsyntax/parse/mod.rs | 47 +- src/libsyntax/parse/parser.rs | 845 ++- src/libsyntax/parse/token.rs | 380 +- src/libsyntax/print/pp.rs | 129 +- src/libsyntax/print/pprust.rs | 83 +- src/libsyntax/ptr.rs | 42 +- src/libsyntax/std_inject.rs | 8 +- src/libsyntax/test.rs | 12 +- src/libsyntax/util/interner.rs | 2 - src/libsyntax/visit.rs | 11 +- src/libsyntax_ext/concat_idents.rs | 2 +- src/libsyntax_ext/deriving/clone.rs | 126 +- src/libsyntax_ext/deriving/cmp/eq.rs | 1 + src/libsyntax_ext/deriving/cmp/ord.rs | 1 + src/libsyntax_ext/deriving/cmp/partial_eq.rs | 1 + src/libsyntax_ext/deriving/cmp/partial_ord.rs | 2 + src/libsyntax_ext/deriving/debug.rs | 1 + src/libsyntax_ext/deriving/decodable.rs | 3 +- src/libsyntax_ext/deriving/default.rs | 1 + src/libsyntax_ext/deriving/encodable.rs | 20 +- src/libsyntax_ext/deriving/generic/mod.rs | 55 +- src/libsyntax_ext/deriving/generic/ty.rs | 13 +- src/libsyntax_ext/deriving/hash.rs | 1 + src/libsyntax_ext/deriving/mod.rs | 94 +- src/libsyntax_ext/format.rs | 35 +- src/libterm/terminfo/parm.rs | 16 +- src/libtest/lib.rs | 4 +- src/libunwind/Cargo.toml | 14 + src/libunwind/build.rs | 39 + src/libunwind/lib.rs | 30 + .../sys/common => libunwind}/libunwind.rs | 117 +- src/rustc/Cargo.lock | 36 +- src/rustc/Cargo.toml | 2 + src/rustc/libc_shim/Cargo.toml | 1 + src/rustc/libc_shim/build.rs | 3 + src/rustc/std_shim/Cargo.lock | 52 +- src/rustc/std_shim/Cargo.toml | 2 + src/rustc/test_shim/Cargo.toml | 2 + src/rustc/test_shim/lib.rs | 4 + src/rustllvm/PassWrapper.cpp | 69 + src/rustllvm/llvm-auto-clean-trigger | 2 +- src/snapshots.txt | 2325 -------- src/stage0.txt | 17 + .../auxiliary/cgu_export_trait_method.rs | 0 .../auxiliary/cgu_extern_closures.rs | 0 .../auxiliary/cgu_generic_function.rs | 3 +- .../cross-crate-closures.rs | 2 +- .../cross-crate-generic-functions.rs | 0 .../cross-crate-trait-method.rs | 0 .../drop_in_place_intrinsic.rs | 41 + .../function-as-argument.rs | 0 .../generic-drop-glue.rs | 7 +- .../generic-functions.rs | 0 .../{ => item-collection}/generic-impl.rs | 0 .../impl-in-non-instantiated-generic.rs | 2 - .../instantiation-through-vtable.rs | 0 .../items-within-generic-items.rs | 0 .../non-generic-closures.rs | 2 - .../non-generic-drop-glue.rs | 2 + .../non-generic-functions.rs | 0 .../overloaded-operators.rs | 0 .../item-collection/static-init.rs | 23 + .../statics-and-consts.rs | 0 .../trait-implementations.rs | 0 .../trait-method-as-argument.rs | 0 .../trait-method-default-impl.rs | 0 .../transitive-drop-glue.rs | 6 +- .../{ => item-collection}/tuple-drop-glue.rs | 1 + .../{ => item-collection}/unsizing.rs | 0 .../unused-traits-and-generics.rs | 0 .../auxiliary/cgu_explicit_inlining.rs | 20 + .../auxiliary/cgu_extern_drop_glue.rs | 17 + .../auxiliary/cgu_generic_function.rs | 37 + .../partitioning/extern-drop-glue.rs | 46 + .../partitioning/extern-generic.rs | 64 + .../inlining-from-extern-crate.rs | 61 + .../partitioning/local-drop-glue.rs | 64 + .../partitioning/local-generic.rs | 60 + .../partitioning/local-inlining.rs | 54 + .../partitioning/local-transitive-inlining.rs | 54 + .../methods-are-with-self-type.rs | 80 + .../partitioning/regular-modules.rs | 84 + .../codegen-units/partitioning/statics.rs | 50 + src/test/codegen/lto-removes-invokes.rs | 31 + src/test/codegen/stores.rs | 16 +- .../auxiliary/attr_plugin_test.rs | 0 .../auxiliary/lint_for_crate.rs | 0 .../auxiliary/lint_group_plugin_test.rs | 0 .../auxiliary/lint_plugin_test.rs | 0 .../auxiliary/macro_crate_MacroRulesTT.rs | 0 .../auxiliary/macro_crate_test.rs | 0 .../auxiliary/macro_reexport_1.rs | 0 .../auxiliary/rlib_crate_test.rs | 0 .../auxiliary/use_from_trait_xc.rs | 0 src/test/compile-fail/E0001.rs | 18 + src/test/compile-fail/E0002.rs | 15 + src/test/compile-fail/E0004.rs | 22 + src/test/compile-fail/E0005.rs | 14 + src/test/compile-fail/E0007.rs | 18 + src/test/compile-fail/E0008.rs | 16 + src/test/compile-fail/E0009.rs | 18 + src/test/compile-fail/E0010.rs | 15 + src/test/compile-fail/E0017.rs | 22 + src/test/compile-fail/E0023.rs | 22 + src/test/compile-fail/E0024.rs | 22 + src/test/compile-fail/E0025.rs | 19 + src/test/compile-fail/E0026.rs | 21 + src/test/compile-fail/E0027.rs | 22 + src/test/compile-fail/E0029.rs | 18 + src/test/compile-fail/E0030.rs | 16 + src/test/compile-fail/E0033.rs | 19 + src/test/compile-fail/E0034.rs | 26 + src/test/compile-fail/E0035.rs | 20 + src/test/compile-fail/E0036.rs | 23 + src/test/compile-fail/E0038.rs | 20 + src/test/compile-fail/E0040.rs | 24 + src/test/compile-fail/E0044.rs | 14 + src/test/compile-fail/E0045.rs | 14 + src/test/compile-fail/E0046.rs | 20 + src/test/compile-fail/E0049.rs | 22 + src/test/compile-fail/E0050.rs | 22 + src/test/compile-fail/E0053.rs | 24 + src/test/compile-fail/E0054.rs | 14 + src/test/compile-fail/E0055.rs | 23 + src/test/compile-fail/E0057.rs | 16 + src/test/compile-fail/E0059.rs | 16 + src/test/compile-fail/E0060.rs | 17 + src/test/compile-fail/E0061.rs | 15 + src/test/compile-fail/array-not-vector.rs | 14 +- src/test/compile-fail/array_const_index-0.rs | 5 +- src/test/compile-fail/array_const_index-1.rs | 2 +- .../compile-fail/associated-types-eq-3.rs | 7 +- .../compile-fail/associated-types-eq-hr.rs | 49 +- .../compile-fail/associated-types-path-2.rs | 6 +- .../bound-lifetime-constrained.rs | 66 + .../bound-lifetime-in-binding-only.rs | 90 + .../bound-lifetime-in-return-only.rs | 64 + .../compile-fail/augmented-assignments.rs | 6 +- .../auxiliary/allocator-dylib.rs | 0 .../auxiliary/allocator-dylib2.rs | 0 .../auxiliary/allocator1.rs | 0 .../auxiliary/allocator2.rs | 0 .../auxiliary/allocator3.rs | 0 .../auxiliary/ambig_impl_2_lib.rs | 0 .../{ => compile-fail}/auxiliary/cci_class.rs | 0 .../auxiliary/cci_class_5.rs | 0 src/test/compile-fail/auxiliary/cdylib-dep.rs | 11 + .../auxiliary/changing-crates-a1.rs | 0 .../auxiliary/changing-crates-a2.rs | 0 .../auxiliary/changing-crates-b.rs | 0 .../auxiliary/coherence_copy_like_lib.rs | 0 .../auxiliary/coherence_inherent_cc_lib.rs | 0 .../auxiliary/coherence_lib.rs | 0 .../auxiliary/coherence_orphan_lib.rs | 0 .../auxiliary/const_fn_lib.rs | 0 .../{ => compile-fail}/auxiliary/crate_a1.rs | 0 .../{ => compile-fail}/auxiliary/crate_a2.rs | 0 .../auxiliary/crateresolve1-1.rs | 0 .../auxiliary/crateresolve1-2.rs | 0 .../auxiliary/crateresolve1-3.rs | 0 .../default_ty_param_cross_crate_crate.rs | 0 .../auxiliary/deprecation-lint.rs | 0 .../auxiliary/empty-struct.rs | 0 .../{ => compile-fail}/auxiliary/go_trait.rs | 0 .../auxiliary/inherited_stability.rs | 0 .../auxiliary/internal_unstable.rs | 0 .../auxiliary/issue-19163.rs | 0 .../auxiliary/issue-21146-inc.rs | 0 .../auxiliary/issue-21221-3.rs | 0 .../auxiliary/issue-21221-4.rs | 0 .../auxiliary/issue-29181.rs | 0 .../auxiliary/issue-30535.rs | 0 .../auxiliary/issue_11680.rs | 0 .../auxiliary/issue_12612_1.rs | 0 .../auxiliary/issue_16725.rs | 0 .../auxiliary/issue_17718_const_privacy.rs | 0 .../auxiliary/issue_21202.rs | 0 .../auxiliary/issue_30123_aux.rs | 0 .../auxiliary/issue_3907.rs | 0 .../auxiliary/issue_5844_aux.rs | 0 .../lifetime_bound_will_change_warning_lib.rs | 0 .../auxiliary/lint_output_format.rs | 0 .../auxiliary/lint_stability.rs | 0 .../auxiliary/lint_stability_fields.rs | 0 .../auxiliary/lint_unused_extern_crate.rs | 0 .../auxiliary/macro_crate_nonterminal.rs | 0 .../auxiliary/macro_non_reexport_2.rs | 0 .../auxiliary/macro_reexport_1.rs | 15 + .../auxiliary/namespaced_enums.rs | 0 .../auxiliary/needs_allocator.rs | 0 .../auxiliary/no_method_suggested_traits.rs | 0 .../auxiliary/noexporttypelib.rs | 0 .../auxiliary/orphan_check_diagnostics.rs | 0 .../auxiliary/privacy_tuple_struct.rs | 0 .../auxiliary/private_trait_xc.rs | 0 .../auxiliary/pub_static_array.rs | 2 +- .../auxiliary/rbmtp_cross_crate_lib.rs} | 0 .../auxiliary/stability_attribute_issue.rs | 0 .../auxiliary/stability_cfg1.rs | 0 .../auxiliary/stability_cfg2.rs | 0 .../auxiliary/static_priv_by_default.rs | 0 .../auxiliary/struct_field_privacy.rs | 0 .../auxiliary/struct_variant_privacy.rs | 0 .../auxiliary/svh-a-base.rs | 0 .../auxiliary/svh-a-change-lit.rs | 0 .../auxiliary/svh-a-change-significant-cfg.rs | 0 .../auxiliary/svh-a-change-trait-bound.rs | 0 .../auxiliary/svh-a-change-type-arg.rs | 0 .../auxiliary/svh-a-change-type-ret.rs | 0 .../auxiliary/svh-a-change-type-static.rs | 0 .../{ => compile-fail}/auxiliary/svh-b.rs | 0 .../auxiliary/svh-uta-base.rs | 0 .../auxiliary/svh-uta-change-use-trait.rs | 0 .../{ => compile-fail}/auxiliary/svh-utb.rs | 0 .../auxiliary/tdticc_coherence_lib.rs} | 0 .../trait_bounds_on_structs_and_enums_xc.rs | 0 .../auxiliary/trait_impl_conflict.rs | 0 .../auxiliary/trait_safety_lib.rs | 0 .../auxiliary/trait_superkinds_in_metadata.rs | 0 .../auxiliary/two_macros.rs | 0 .../auxiliary/unreachable_variant.rs | 0 .../auxiliary/use_from_trait_xc.rs | 41 + .../auxiliary/variant-namespacing.rs | 0 .../auxiliary/weak-lang-items.rs | 0 .../auxiliary/xc_private_method_lib.rs | 0 .../auxiliary/xcrate_unit_struct.rs | 0 src/test/compile-fail/bad-const-type.rs | 7 +- .../bad-intrinsic-monomorphization.rs | 5 +- src/test/compile-fail/bad-main.rs | 2 +- src/test/compile-fail/bad-sized.rs | 1 - src/test/compile-fail/binop-move-semantics.rs | 2 + .../compile-fail/blind-item-block-middle.rs | 2 +- .../compile-fail/blind-item-item-shadow.rs | 4 +- .../block-must-not-have-result-do.rs | 4 - .../block-must-not-have-result-res.rs | 4 - .../block-must-not-have-result-while.rs | 7 +- .../borrowck-borrowed-uniq-rvalue-2.rs | 3 +- .../borrowck/borrowck-box-insensitivity.rs | 62 +- .../borrowck/borrowck-closures-mut-of-imm.rs | 2 +- .../borrowck/borrowck-closures-unique.rs | 2 +- .../borrowck/borrowck-lend-flow-loop.rs | 1 + .../borrowck-let-suggestion-suffixes.rs | 3 + .../borrowck/borrowck-let-suggestion.rs | 10 +- .../borrowck/borrowck-move-error-with-note.rs | 11 +- .../borrowck-move-out-of-struct-with-dtor.rs | 6 +- ...owck-move-out-of-tuple-struct-with-dtor.rs | 6 +- .../borrowck/borrowck-move-out-of-vec-tail.rs | 8 +- .../borrowck-mut-borrow-linear-errors.rs | 1 + .../borrowck-report-with-custom-diagnostic.rs | 15 +- .../borrowck-struct-update-with-dtor.rs | 6 +- .../borrowck/borrowck-vec-pattern-nesting.rs | 26 +- src/test/compile-fail/cast-as-bool.rs | 3 +- ...race_macros-gate2.rs => cast-rfc0401-2.rs} | 13 +- src/test/compile-fail/cast-rfc0401.rs | 18 +- .../cdylib-deps-must-be-static.rs | 17 + src/test/compile-fail/changing-crates.rs | 2 +- .../check-static-immutable-mut-slices.rs | 1 + .../check-static-values-constraints.rs | 27 +- src/test/compile-fail/closure-wrong-kind.rs | 22 + src/test/compile-fail/coerce-mut.rs | 4 +- src/test/compile-fail/coercion-slice.rs | 7 +- ...herence-conflicting-negative-trait-impl.rs | 2 +- .../consider-removing-last-semi.rs | 2 - .../const-block-non-item-statement.rs | 14 + src/test/compile-fail/const-err-early.rs | 2 +- src/test/compile-fail/const-err-multi.rs | 19 + src/test/compile-fail/const-err.rs | 15 +- src/test/compile-fail/const-err2.rs | 34 + .../compile-fail/const-eval-overflow-4b.rs | 18 +- .../const-fn-destructuring-arg.rs | 7 +- src/test/compile-fail/const-fn-error.rs | 9 +- .../const-fn-not-safe-for-const.rs | 8 +- .../const-fn-not-safe-for-const2.rs | 44 - .../compile-fail/const-pattern-irrefutable.rs | 22 +- src/test/compile-fail/const-slice-oob.rs | 2 +- src/test/compile-fail/cross-borrow-trait.rs | 7 +- .../compile-fail/default_ty_param_conflict.rs | 3 + .../default_ty_param_conflict_cross_crate.rs | 6 +- ...graph-trait-impl-two-traits-same-method.rs | 1 + .../{ => derived-errors}/issue-30580.rs | 0 .../derived-errors/issue-31997-1.rs | 66 + .../derived-errors/issue-31997.rs | 27 + src/test/compile-fail/deriving-copyclone.rs | 48 + .../compile-fail/destructure-trait-ref.rs | 21 +- ...deconstructing-destructing-struct-match.rs | 2 +- src/test/compile-fail/dst-bad-assign.rs | 7 +- src/test/compile-fail/dst-bad-coerce4.rs | 7 +- .../compile-fail/empty-struct-unit-pat.rs | 8 +- src/test/compile-fail/enum-in-scope.rs | 2 +- src/test/compile-fail/expanded-cfg.rs | 26 + .../explicit-self-lifetime-mismatch.rs | 8 +- src/test/compile-fail/extern-main-fn.rs | 2 +- src/test/compile-fail/fail-simple.rs | 4 +- ...te-allow-internal-unstable-nested-macro.rs | 4 +- ...ure-gate-allow-internal-unstable-struct.rs | 17 + .../feature-gate-negate-unsigned.rs | 4 +- src/test/compile-fail/fn-item-type.rs | 20 +- src/test/compile-fail/fn-trait-formatting.rs | 21 +- .../fully-qualified-type-name1.rs | 7 +- .../fully-qualified-type-name2.rs | 7 +- .../fully-qualified-type-name4.rs | 7 +- .../generic-type-params-name-repr.rs | 42 +- src/test/compile-fail/if-branch-types.rs | 3 +- src/test/compile-fail/if-let-arm-types.rs | 3 + .../compile-fail/if-without-else-result.rs | 7 +- .../compile-fail/impl-duplicate-methods.rs | 5 +- .../compile-fail/import-prefix-macro-1.rs | 26 + .../compile-fail/import-prefix-macro-2.rs | 26 + src/test/compile-fail/import-ty-params.rs | 25 + .../integer-literal-suffix-inference.rs | 108 +- .../integral-variable-unification-error.rs | 7 +- src/test/compile-fail/issue-10176.rs | 7 +- src/test/compile-fail/issue-10636-2.rs | 2 +- src/test/compile-fail/issue-11319.rs | 9 +- src/test/compile-fail/issue-11714.rs | 1 - src/test/compile-fail/issue-12997-2.rs | 7 +- src/test/compile-fail/issue-13058.rs | 1 - src/test/compile-fail/issue-13359.rs | 6 +- src/test/compile-fail/issue-13428.rs | 2 - src/test/compile-fail/issue-13466.rs | 14 +- src/test/compile-fail/issue-13482-2.rs | 7 +- src/test/compile-fail/issue-13482.rs | 6 +- src/test/compile-fail/issue-13624.rs | 14 +- src/test/compile-fail/issue-13853.rs | 2 +- src/test/compile-fail/issue-14091.rs | 4 - src/test/compile-fail/issue-14541.rs | 7 +- src/test/compile-fail/issue-15167.rs | 12 +- src/test/compile-fail/issue-15783.rs | 7 +- src/test/compile-fail/issue-15896.rs | 7 +- src/test/compile-fail/issue-16338.rs | 7 +- src/test/compile-fail/issue-16401.rs | 7 +- src/test/compile-fail/issue-16747.rs | 1 - src/test/compile-fail/issue-17033.rs | 7 +- src/test/compile-fail/issue-17263.rs | 14 +- src/test/compile-fail/issue-17283.rs | 21 +- src/test/compile-fail/issue-17431-2.rs | 1 + .../issue-17718-borrow-interior.rs | 3 +- .../issue-17718-const-bad-values.rs | 9 +- .../compile-fail/issue-17718-references.rs | 10 +- src/test/compile-fail/issue-17728.rs | 3 + src/test/compile-fail/issue-17740.rs | 8 +- src/test/compile-fail/issue-18118-2.rs | 2 +- src/test/compile-fail/issue-18118.rs | 1 + src/test/compile-fail/issue-19109.rs | 9 +- src/test/compile-fail/issue-19498.rs | 4 +- src/test/compile-fail/issue-19692.rs | 2 +- src/test/compile-fail/issue-19707.rs | 2 - src/test/compile-fail/issue-19991.rs | 7 +- src/test/compile-fail/issue-20261.rs | 1 - src/test/compile-fail/issue-20692.rs | 1 + src/test/compile-fail/issue-20862.rs | 6 +- src/test/compile-fail/issue-21146.rs | 2 +- src/test/compile-fail/issue-21221-1.rs | 6 +- src/test/compile-fail/issue-21221-2.rs | 1 - src/test/compile-fail/issue-21221-3.rs | 1 - src/test/compile-fail/issue-21221-4.rs | 1 - src/test/compile-fail/issue-21600.rs | 4 +- ...issue-21659-show-relevant-trait-impls-1.rs | 1 - ...issue-21659-show-relevant-trait-impls-2.rs | 1 - src/test/compile-fail/issue-23716.rs | 29 + src/test/compile-fail/issue-2392.rs | 6 +- src/test/compile-fail/issue-24036.rs | 8 +- src/test/compile-fail/issue-24081.rs | 5 + src/test/compile-fail/issue-24357.rs | 4 +- src/test/compile-fail/issue-24446.rs | 6 +- src/test/compile-fail/issue-25901.rs | 3 +- src/test/compile-fail/issue-26472.rs | 24 + src/test/compile-fail/issue-26480.rs | 8 +- src/test/compile-fail/issue-26548.rs | 3 +- src/test/compile-fail/issue-26638.rs | 7 +- src/test/compile-fail/issue-27008.rs | 7 +- src/test/compile-fail/issue-27033.rs | 4 +- src/test/compile-fail/issue-27842.rs | 24 + src/test/compile-fail/issue-27895.rs | 3 +- src/test/compile-fail/issue-28113.rs | 3 +- src/test/compile-fail/issue-29084.rs | 5 +- src/test/compile-fail/issue-29124.rs | 4 - src/test/compile-fail/issue-2951.rs | 7 +- src/test/compile-fail/issue-30123.rs | 2 +- src/test/compile-fail/issue-30255.rs | 35 + src/test/compile-fail/issue-30302.rs | 2 - src/test/compile-fail/issue-31221.rs | 49 + src/test/compile-fail/issue-31424.rs | 30 + src/test/compile-fail/issue-31804.rs | 2 +- src/test/compile-fail/issue-32655.rs | 33 + src/test/compile-fail/issue-32709.rs | 20 + src/test/compile-fail/issue-32782.rs | 23 + src/test/compile-fail/issue-32833.rs | 16 + src/test/compile-fail/issue-32922.rs | 41 + src/test/compile-fail/issue-32963.rs | 20 + src/test/compile-fail/issue-33464.rs | 23 + src/test/compile-fail/issue-3477.rs | 3 +- src/test/compile-fail/issue-3563.rs | 4 - src/test/compile-fail/issue-3680.rs | 7 +- src/test/compile-fail/issue-3907.rs | 4 +- src/test/compile-fail/issue-3973.rs | 2 +- src/test/compile-fail/issue-4201.rs | 7 +- src/test/compile-fail/issue-4517.rs | 7 +- src/test/compile-fail/issue-4968.rs | 7 +- src/test/compile-fail/issue-5035.rs | 5 +- src/test/compile-fail/issue-5100.rs | 40 +- src/test/compile-fail/issue-5358-1.rs | 7 +- src/test/compile-fail/issue-5500.rs | 7 +- src/test/compile-fail/issue-6702.rs | 1 - src/test/compile-fail/issue-7061.rs | 7 +- src/test/compile-fail/issue-7092.rs | 9 +- src/test/compile-fail/issue-7867.rs | 21 +- src/test/compile-fail/issue-9243.rs | 5 +- src/test/compile-fail/issue-9575.rs | 2 +- src/test/compile-fail/issue32829.rs | 88 + .../keyword-false-as-identifier.rs | 6 +- .../keyword-true-as-identifier.rs | 6 +- src/test/compile-fail/kindck-copy.rs | 8 +- .../compile-fail/kindck-impl-type-params.rs | 2 + ...-return-type-requires-explicit-lifetime.rs | 4 - src/test/compile-fail/lint-change-warnings.rs | 2 +- .../compile-fail/lint-exceeding-bitshifts.rs | 1 + src/test/compile-fail/lint-malformed.rs | 4 +- src/test/compile-fail/lint-removed-allow.rs | 2 +- src/test/compile-fail/lint-removed.rs | 2 +- src/test/compile-fail/lint-renamed-allow.rs | 2 +- src/test/compile-fail/lint-renamed.rs | 2 +- src/test/compile-fail/lint-type-overflow2.rs | 2 + src/test/compile-fail/lint-unknown-lint.rs | 2 +- src/test/compile-fail/lint-unused-imports.rs | 3 + .../liveness-return-last-stmt-semi.rs | 6 +- .../macro-backtrace-invalid-internals.rs | 10 +- .../compile-fail/macro-backtrace-nested.rs | 11 +- .../compile-fail/macro-backtrace-println.rs | 8 +- src/test/compile-fail/macro-context.rs | 2 +- src/test/compile-fail/macro-follow.rs | 6 +- .../compile-fail/macro-incomplete-parse.rs | 2 +- src/test/compile-fail/main-wrong-type-2.rs | 2 +- src/test/compile-fail/main-wrong-type.rs | 2 +- src/test/compile-fail/match-range-fail.rs | 3 +- src/test/compile-fail/match-struct.rs | 7 +- src/test/compile-fail/match-vec-mismatch-2.rs | 7 +- src/test/compile-fail/method-self-arg-1.rs | 14 +- .../moves-based-on-type-block-bad.rs | 4 +- ...ased-on-type-distribute-copy-over-paren.rs | 8 +- .../moves-based-on-type-match-bindings.rs | 2 + .../compile-fail/mut-pattern-mismatched.rs | 8 +- src/test/compile-fail/mut-suggestion.rs | 14 +- src/test/compile-fail/name-clash-nullary.rs | 2 +- src/test/compile-fail/noexporttypeexe.rs | 7 +- .../non-constant-in-const-path.rs | 3 +- src/test/compile-fail/non-interger-atomic.rs | 18 +- src/test/compile-fail/not-panic-safe-2.rs | 5 +- src/test/compile-fail/not-panic-safe-3.rs | 4 +- src/test/compile-fail/not-panic-safe-4.rs | 4 +- src/test/compile-fail/not-panic-safe-6.rs | 5 +- src/test/compile-fail/occurs-check-2.rs | 4 +- src/test/compile-fail/occurs-check.rs | 4 +- .../bad-annotation.rs} | 0 .../on-unimplemented/multiple-impls.rs | 55 + .../compile-fail/on-unimplemented/on-impl.rs | 35 + .../on-trait.rs} | 0 .../on-unimplemented/slice-index.rs | 24 + .../abort-link-to-unwind-dylib.rs | 24 + .../auxiliary/needs-panic-runtime.rs | 16 + .../auxiliary/panic-runtime-abort.rs | 27 + .../auxiliary/panic-runtime-lang-items.rs} | 23 +- .../auxiliary/panic-runtime-unwind.rs | 27 + .../auxiliary/panic-runtime-unwind2.rs | 27 + .../runtime-depending-on-panic-runtime.rs | 18 + .../auxiliary/wants-panic-runtime-abort.rs | 17 + .../auxiliary/wants-panic-runtime-unwind.rs | 16 + .../panic-runtime/bad-panic-flag1.rs | 14 + .../panic-runtime/bad-panic-flag2.rs | 14 + .../panic-runtime/libtest-unwinds.rs | 20 + .../compile-fail/panic-runtime/needs-gate.rs | 14 + .../runtime-depend-on-needs-runtime.rs | 15 + .../panic-runtime/transitive-link-a-bunch.rs | 24 + .../panic-runtime/two-panic-runtimes.rs | 23 + .../panic-runtime/want-abort-got-unwind.rs | 17 + .../panic-runtime/want-abort-got-unwind2.rs | 18 + .../panic-runtime/want-unwind-got-abort.rs | 20 + .../panic-runtime/want-unwind-got-abort2.rs | 21 + src/test/compile-fail/paren-span.rs | 31 + .../pat-shadow-in-nested-binding.rs | 2 +- .../compile-fail/pattern-error-continue.rs | 14 +- src/test/compile-fail/pptypedef.rs | 6 +- .../restricted/auxiliary/pub_restricted.rs | 23 + .../privacy/restricted/feature-gate.rs | 25 + .../restricted/lookup-ignores-private.rs | 44 + .../privacy/restricted/private-in-public.rs | 22 + .../restricted/struct-literal-field.rs | 31 + .../compile-fail/privacy/restricted/test.rs | 62 + .../restricted/tuple-struct-fields/test.rs | 18 + .../restricted/tuple-struct-fields/test2.rs | 24 + .../restricted/tuple-struct-fields/test3.rs | 24 + .../privacy/restricted/ty-params.rs | 24 + src/test/compile-fail/ptr-coercion.rs | 12 +- .../compile-fail/qualified-path-params-2.rs | 31 + .../compile-fail/qualified-path-params.rs | 33 + src/test/compile-fail/range-1.rs | 5 +- src/test/compile-fail/ref-suggestion.rs | 9 - ...region-invariant-static-error-reporting.rs | 36 + ...nded-method-type-parameters-cross-crate.rs | 4 +- src/test/compile-fail/regions-bounds.rs | 8 +- .../regions-early-bound-error-method.rs | 4 +- .../regions-fn-subtyping-return-static.rs | 7 +- .../compile-fail/regions-infer-not-param.rs | 8 +- .../regions-infer-paramd-indirect.rs | 4 +- .../compile-fail/regions-wf-trait-object.rs | 2 +- .../reject-specialized-drops-8142.rs | 4 +- src/test/compile-fail/repeat_count.rs | 41 +- src/test/compile-fail/reserved-become.rs | 2 +- .../resolve-conflict-item-vs-import.rs | 2 +- src/test/compile-fail/rfc1592-deprecated.rs | 32 + src/test/compile-fail/self-infer.rs | 18 + src/test/compile-fail/self_type_keyword-2.rs | 13 + src/test/compile-fail/self_type_keyword.rs | 5 +- .../compile-fail/shift-various-bad-types.rs | 3 +- .../simd-intrinsic-generic-arithmetic.rs | 3 +- .../simd-intrinsic-generic-cast.rs | 3 +- .../simd-intrinsic-generic-comparison.rs | 3 +- .../simd-intrinsic-generic-elements.rs | 1 - src/test/compile-fail/sized-cycle-note.rs | 7 +- src/test/compile-fail/slice-mut.rs | 4 +- .../slightly-nice-generic-literal-messages.rs | 7 +- .../specialization/specialization-polarity.rs | 0 .../compile-fail/static-array-across-crate.rs | 7 + .../compile-fail/static-mut-not-constant.rs | 2 +- .../compile-fail/struct-base-wrong-type-2.rs | 14 +- .../compile-fail/struct-base-wrong-type.rs | 14 +- .../structure-constructor-type-mismatch.rs | 18 +- src/test/compile-fail/substs-ppaux.rs | 32 +- .../suggest-path-instead-of-mod-dot-item.rs | 20 +- src/test/compile-fail/suppressed-error.rs | 7 +- src/test/compile-fail/svh-change-lit.rs | 2 +- .../svh-change-significant-cfg.rs | 2 +- .../compile-fail/svh-change-trait-bound.rs | 2 +- src/test/compile-fail/svh-change-type-arg.rs | 2 +- src/test/compile-fail/svh-change-type-ret.rs | 2 +- .../compile-fail/svh-change-type-static.rs | 2 +- src/test/compile-fail/svh-use-trait.rs | 2 +- .../tag-that-dare-not-speak-its-name.rs | 7 +- src/test/compile-fail/terr-in-field.rs | 7 +- src/test/compile-fail/terr-sorts.rs | 7 +- .../compile-fail/token-error-correct-3.rs | 3 + src/test/compile-fail/token-error-correct.rs | 4 +- src/test/compile-fail/trace_macros-gate.rs | 2 +- src/test/compile-fail/trace_macros-gate3.rs | 20 - .../compile-fail/trait-bounds-cant-coerce.rs | 7 +- .../trait-bounds-impl-comparison-1.rs | 2 +- ...ect-reference-without-parens-suggestion.rs | 2 - .../trait-suggest-where-clause.rs | 7 - src/test/compile-fail/trait-test-2.rs | 2 - .../traits-inductive-overflow-simultaneous.rs | 30 + .../transmute-from-fn-item-types-lint.rs | 10 +- src/test/compile-fail/tuple-arity-mismatch.rs | 14 +- .../tutorial-suffix-inference-test.rs | 9 +- .../compile-fail/type-mismatch-multiple.rs | 10 +- .../type-mismatch-same-crate-name.rs | 18 +- .../type-parameter-invalid-lint.rs | 6 + src/test/compile-fail/type-parameter-names.rs | 7 +- .../type-params-in-different-spaces-1.rs | 7 +- ...efault-trait-impl-cross-crate-coherence.rs | 4 +- .../typeck_type_placeholder_mismatch.rs | 14 +- .../compile-fail/ufcs-explicit-self-bad.rs | 8 +- src/test/compile-fail/unsized6.rs | 4 +- src/test/compile-fail/use-keyword.rs | 23 + src/test/compile-fail/use-mod-2.rs | 4 +- .../{parse-fail => compile-fail}/use-mod-4.rs | 6 +- src/test/compile-fail/variadic-ffi-3.rs | 14 +- .../variance-unused-type-param.rs | 3 - src/test/compile-fail/variant-used-as-type.rs | 30 + .../compile-fail/vec-macro-with-comma-only.rs | 2 +- .../cross_crate_debuginfo_type_uniquing.rs | 0 .../auxiliary/cross_crate_spans.rs | 0 .../auxiliary/issue13213aux.rs | 0 src/test/debuginfo/c-style-enum.rs | 3 +- ...nction-prologue-stepping-no-stack-check.rs | 13 +- src/test/debuginfo/no-debug-attribute.rs | 7 +- src/test/debuginfo/struct-namespace.rs | 70 + .../callee_caller_cross_crate/auxiliary/a.rs} | 18 +- .../callee_caller_cross_crate/b.rs} | 27 +- .../type_alias_cross_crate/auxiliary/a.rs | 21 + .../incremental/type_alias_cross_crate/b.rs | 29 + src/test/parse-fail/bad-pointer-type.rs | 15 + src/test/parse-fail/column-offset-1-based.rs | 2 +- src/test/parse-fail/extern-no-fn.rs | 2 +- src/test/parse-fail/issue-10412.rs | 14 +- src/test/parse-fail/issue-14303-path.rs | 2 +- src/test/parse-fail/issue-30318.rs | 19 + src/test/parse-fail/issue-32214.rs | 17 + src/test/parse-fail/issue-32505.rs | 17 + ...etime-obsoleted-self.rs => issue-33262.rs} | 9 +- src/test/parse-fail/issue-33413.rs | 16 + src/test/parse-fail/keyword-abstract.rs | 2 +- .../parse-fail/keyword-as-as-identifier.rs | 4 +- ...tifier.rs => keyword-box-as-identifier.rs} | 4 +- .../parse-fail/keyword-break-as-identifier.rs | 4 +- .../parse-fail/keyword-const-as-identifier.rs | 17 + .../keyword-continue-as-identifier.rs | 17 + .../parse-fail/keyword-crate-as-identifier.rs | 17 + .../parse-fail/keyword-else-as-identifier.rs | 4 +- .../parse-fail/keyword-enum-as-identifier.rs | 4 +- .../keyword-extern-as-identifier.rs | 4 +- src/test/parse-fail/keyword-final.rs | 2 +- .../parse-fail/keyword-fn-as-identifier.rs | 4 +- .../parse-fail/keyword-for-as-identifier.rs | 4 +- .../parse-fail/keyword-if-as-identifier.rs | 4 +- .../parse-fail/keyword-impl-as-identifier.rs | 4 +- ...ntifier.rs => keyword-in-as-identifier.rs} | 6 +- .../parse-fail/keyword-let-as-identifier.rs | 4 +- .../parse-fail/keyword-loop-as-identifier.rs | 4 +- .../parse-fail/keyword-match-as-identifier.rs | 4 +- .../parse-fail/keyword-mod-as-identifier.rs | 4 +- .../parse-fail/keyword-move-as-identifier.rs | 17 + .../parse-fail/keyword-mut-as-identifier.rs | 6 +- src/test/parse-fail/keyword-override.rs | 2 +- .../parse-fail/keyword-pub-as-identifier.rs | 4 +- .../parse-fail/keyword-ref-as-identifier.rs | 6 +- .../keyword-return-as-identifier.rs | 4 +- .../parse-fail/keyword-self-as-identifier.rs | 6 +- .../keyword-static-as-identifier.rs | 4 +- .../keyword-struct-as-identifier.rs | 4 +- .../parse-fail/keyword-super-as-identifier.rs | 6 +- .../parse-fail/keyword-trait-as-identifier.rs | 4 +- .../parse-fail/keyword-type-as-identifier.rs | 4 +- src/test/parse-fail/keyword-typeof.rs | 2 +- .../keyword-unsafe-as-identifier.rs | 4 +- .../parse-fail/keyword-use-as-identifier.rs | 4 +- .../parse-fail/keyword-where-as-identifier.rs | 17 + .../parse-fail/keyword-while-as-identifier.rs | 4 +- .../keywords-followed-by-double-colon.rs | 2 +- src/test/parse-fail/lifetime-no-keyword.rs | 3 +- .../match-arrows-block-then-binop.rs | 2 +- src/test/parse-fail/no-unsafe-self.rs | 10 +- src/test/parse-fail/obsolete-proc.rs | 5 +- .../parse-fail/removed-syntax-extern-const.rs | 2 +- src/test/parse-fail/removed-syntax-mode.rs | 2 +- .../parse-fail/removed-syntax-mut-vec-expr.rs | 4 +- .../parse-fail/removed-syntax-mut-vec-ty.rs | 4 +- .../removed-syntax-uniq-mut-expr.rs | 4 +- .../parse-fail/removed-syntax-uniq-mut-ty.rs | 4 +- src/test/parse-fail/unsized2.rs | 5 +- src/test/parse-fail/use-ends-with-mod-sep.rs | 2 +- src/test/run-make/atomic-lock-free/Makefile | 30 + .../atomic-lock-free/atomic_lock_free.rs | 62 + src/test/run-make/cdylib/Makefile | 19 + src/test/run-make/cdylib/bar.rs | 15 + src/test/run-make/cdylib/foo.c | 20 + src/test/run-make/cdylib/foo.rs | 23 + .../run-make/compiler-lookup-paths/Makefile | 8 + src/test/run-make/compiler-lookup-paths/e2.rs | 14 + .../run-make/compiler-lookup-paths/native.c | 9 + src/test/run-make/debug-assertions/Makefile | 4 + .../run-make/dep-info-no-analysis/Makefile | 6 + .../run-make/dep-info-no-analysis/input.dd | 3 + .../run-make/dep-info-no-analysis/input.rs | 14 + src/test/run-make/emit/Makefile | 6 + src/test/run-make/execution-engine/test.rs | 26 +- src/test/run-make/extern-flag-fun/Makefile | 5 +- .../extern-flag-fun/bar-alt.rs} | 4 +- .../run-make/extern-multiple-copies2/Makefile | 10 + .../run-make/extern-multiple-copies2/bar.rs | 18 + .../foo1.rs} | 8 +- .../run-make/extern-multiple-copies2/foo2.rs | 18 + src/test/run-make/issue-11908/Makefile | 10 +- src/test/run-make/issue-19371/foo.rs | 6 +- src/test/run-make/issue-22131/Makefile | 2 +- src/test/run-make/json-errors/Makefile | 10 - .../many-crates-but-no-match/Makefile | 2 +- .../pretty-expanded-hygiene/input.pp.rs | 2 + .../run-make/pretty-expanded-hygiene/input.rs | 2 + src/test/run-make/print-cfg/Makefile | 1 + src/test/run-make/save-analysis/Makefile | 1 + src/test/run-make/static-unwinding/main.rs | 2 +- src/test/run-make/symlinked-rlib/Makefile | 14 + src/test/run-make/symlinked-rlib/bar.rs | 15 + src/test/run-make/symlinked-rlib/foo.rs | 11 + src/test/run-make/tools.mk | 2 +- .../auxiliary/custom_derive_plugin.rs | 1 + .../auxiliary/custom_derive_plugin_attr.rs | 1 + .../auxiliary/dummy_mir_pass.rs | 9 +- .../auxiliary/issue-13560-1.rs | 0 .../auxiliary/issue-13560-2.rs | 0 .../auxiliary/issue-13560-3.rs | 0 .../auxiliary/issue-16822.rs | 0 .../auxiliary/issue-18502.rs | 0 .../issue_16723_multiple_items_syntax_ext.rs | 0 .../auxiliary/linkage-visibility.rs | 0 .../auxiliary/lint_for_crate.rs | 47 + .../auxiliary/lint_group_plugin_test.rs | 51 + .../auxiliary/lint_plugin_test.rs | 48 + .../auxiliary/llvm_pass_plugin.rs | 0 .../auxiliary/logging_right_crate.rs | 0 .../auxiliary/lto-syntax-extension-lib.rs | 0 .../auxiliary/lto-syntax-extension-plugin.rs | 0 .../auxiliary/macro_crate_test.rs | 141 + .../auxiliary/plugin_args.rs | 2 +- .../plugin_crate_outlive_expansion_phase.rs | 0 .../auxiliary/plugin_with_plugin_lib.rs | 0 .../auxiliary/procedural_mbe_matching.rs | 0 .../auxiliary/roman_numerals.rs | 2 +- .../syntax_extension_with_dll_deps_1.rs | 0 .../syntax_extension_with_dll_deps_2.rs | 0 src/test/run-pass-fulldeps/compiler-calls.rs | 5 +- .../rustc_encodable_hygiene.rs | 32 + .../associated-const-outer-ty-refs.rs | 21 + .../associated-types-in-bound-type-arg.rs | 26 + .../auxiliary/allocator-dummy.rs | 0 .../anon-extern-mod-cross-crate-1.rs | 0 .../auxiliary/anon_trait_static_method_lib.rs | 0 .../auxiliary/associated-const-cc-lib.rs | 0 .../auxiliary/associated-types-cc-lib.rs | 0 .../auxiliary/augmented_assignments.rs | 0 .../blind-item-mixed-crate-use-item-foo.rs | 0 .../blind-item-mixed-crate-use-item-foo2.rs | 0 .../auxiliary/cci_borrow_lib.rs | 0 .../auxiliary/cci_capture_clause.rs | 0 src/test/run-pass/auxiliary/cci_class.rs | 24 + .../{ => run-pass}/auxiliary/cci_class_2.rs | 0 .../{ => run-pass}/auxiliary/cci_class_3.rs | 0 .../{ => run-pass}/auxiliary/cci_class_4.rs | 0 .../{ => run-pass}/auxiliary/cci_class_6.rs | 0 .../auxiliary/cci_class_cast.rs | 0 .../auxiliary/cci_class_trait.rs | 0 .../{ => run-pass}/auxiliary/cci_const.rs | 0 .../auxiliary/cci_const_block.rs | 0 .../{ => run-pass}/auxiliary/cci_impl_lib.rs | 0 .../{ => run-pass}/auxiliary/cci_intrinsic.rs | 0 .../{ => run-pass}/auxiliary/cci_iter_lib.rs | 0 .../auxiliary/cci_nested_lib.rs | 0 .../auxiliary/cci_no_inline_lib.rs | 0 .../auxiliary/cfg_inner_static.rs | 0 src/test/{ => run-pass}/auxiliary/cgu_test.rs | 0 .../{ => run-pass}/auxiliary/cgu_test_a.rs | 0 .../{ => run-pass}/auxiliary/cgu_test_b.rs | 0 .../check_static_recursion_foreign_helper.rs | 0 .../auxiliary/coherence_copy_like_lib.rs | 20 + src/test/run-pass/auxiliary/coherence_lib.rs | 25 + src/test/run-pass/auxiliary/const_fn_lib.rs | 16 + .../crate-attributes-using-cfg_attr.rs | 0 .../crate-method-reexport-grrrrrrr2.rs | 0 .../auxiliary/default_type_params_xc.rs | 0 .../{ => run-pass}/auxiliary/derive-no-std.rs | 0 src/test/run-pass/auxiliary/empty-struct.rs | 17 + .../auxiliary/explicit_self_xcrate.rs | 0 .../auxiliary/extern-crosscrate-source.rs | 0 .../auxiliary/extern-take-value.rs | 0 .../auxiliary/extern_calling_convention.rs | 0 .../auxiliary/extern_mod_ordering_lib.rs | 0 src/test/{ => run-pass}/auxiliary/fat_drop.rs | 0 src/test/{ => run-pass}/auxiliary/fn-abi.rs | 0 .../{ => run-pass}/auxiliary/foreign_lib.rs | 0 src/test/run-pass/auxiliary/go_trait.rs | 53 + src/test/{ => run-pass}/auxiliary/i8.rs | 0 .../auxiliary/impl_privacy_xc_1.rs | 0 .../auxiliary/impl_privacy_xc_2.rs | 0 .../{ => run-pass}/auxiliary/inline_dtor.rs | 0 .../{ => run-pass}/auxiliary/inner_static.rs | 0 src/test/{ => run-pass}/auxiliary/iss.rs | 0 .../{ => run-pass}/auxiliary/issue-10028.rs | 0 .../{ => run-pass}/auxiliary/issue-11224.rs | 0 .../{ => run-pass}/auxiliary/issue-11225-1.rs | 0 .../{ => run-pass}/auxiliary/issue-11225-2.rs | 0 .../{ => run-pass}/auxiliary/issue-11225-3.rs | 0 .../{ => run-pass}/auxiliary/issue-11508.rs | 0 .../{ => run-pass}/auxiliary/issue-11529.rs | 0 .../auxiliary/issue-12133-dylib.rs | 0 .../auxiliary/issue-12133-dylib2.rs | 0 .../auxiliary/issue-12133-rlib.rs | 0 .../auxiliary/issue-12660-aux.rs | 0 .../{ => run-pass}/auxiliary/issue-13620-1.rs | 0 .../{ => run-pass}/auxiliary/issue-13620-2.rs | 0 .../{ => run-pass}/auxiliary/issue-13872-1.rs | 0 .../{ => run-pass}/auxiliary/issue-13872-2.rs | 0 .../{ => run-pass}/auxiliary/issue-13872-3.rs | 0 .../{ => run-pass}/auxiliary/issue-14344-1.rs | 0 .../{ => run-pass}/auxiliary/issue-14344-2.rs | 0 .../{ => run-pass}/auxiliary/issue-14421.rs | 0 .../{ => run-pass}/auxiliary/issue-14422.rs | 0 .../{ => run-pass}/auxiliary/issue-15562.rs | 0 .../{ => run-pass}/auxiliary/issue-16643.rs | 0 .../{ => run-pass}/auxiliary/issue-17662.rs | 0 .../auxiliary/issue-17718-aux.rs | 0 .../{ => run-pass}/auxiliary/issue-18501.rs | 0 .../{ => run-pass}/auxiliary/issue-18514.rs | 0 .../{ => run-pass}/auxiliary/issue-18711.rs | 0 .../{ => run-pass}/auxiliary/issue-18913-1.rs | 0 .../{ => run-pass}/auxiliary/issue-18913-2.rs | 0 .../{ => run-pass}/auxiliary/issue-19340-1.rs | 0 .../{ => run-pass}/auxiliary/issue-2380.rs | 0 .../{ => run-pass}/auxiliary/issue-2414-a.rs | 0 .../{ => run-pass}/auxiliary/issue-2414-b.rs | 0 .../{ => run-pass}/auxiliary/issue-25185-1.rs | 0 .../{ => run-pass}/auxiliary/issue-25185-2.rs | 0 .../{ => run-pass}/auxiliary/issue-2526.rs | 0 .../{ => run-pass}/auxiliary/issue-25467.rs | 0 .../{ => run-pass}/auxiliary/issue-2631-a.rs | 0 .../{ => run-pass}/auxiliary/issue-29485.rs | 0 .../{ => run-pass}/auxiliary/issue-3012-1.rs | 0 .../{ => run-pass}/auxiliary/issue-31702-1.rs | 0 .../{ => run-pass}/auxiliary/issue-31702-2.rs | 0 .../{ => run-pass}/auxiliary/issue-4208-cc.rs | 0 .../{ => run-pass}/auxiliary/issue-4545.rs | 0 .../{ => run-pass}/auxiliary/issue-5518.rs | 0 .../{ => run-pass}/auxiliary/issue-5521.rs | 0 .../{ => run-pass}/auxiliary/issue-7178.rs | 0 .../{ => run-pass}/auxiliary/issue-7899.rs | 0 .../{ => run-pass}/auxiliary/issue-8044.rs | 0 .../{ => run-pass}/auxiliary/issue-8259.rs | 0 .../{ => run-pass}/auxiliary/issue-9906.rs | 0 .../{ => run-pass}/auxiliary/issue-9968.rs | 0 .../{ => run-pass}/auxiliary/issue13507.rs | 0 .../{ => run-pass}/auxiliary/issue2170lib.rs | 0 .../auxiliary/issue_10031_aux.rs | 0 .../auxiliary/issue_12612_1.rs} | 4 +- .../{ => run-pass}/auxiliary/issue_12612_2.rs | 0 .../{ => run-pass}/auxiliary/issue_19293.rs | 0 .../{ => run-pass}/auxiliary/issue_20389.rs | 0 .../{ => run-pass}/auxiliary/issue_2316_a.rs | 0 .../{ => run-pass}/auxiliary/issue_2316_b.rs | 0 .../{ => run-pass}/auxiliary/issue_2472_b.rs | 0 .../{ => run-pass}/auxiliary/issue_2723_a.rs | 0 .../{ => run-pass}/auxiliary/issue_3136_a.rc | 0 .../{ => run-pass}/auxiliary/issue_3136_a.rs | 0 .../auxiliary/issue_3979_traits.rs | 0 .../{ => run-pass}/auxiliary/issue_8401.rs | 0 .../{ => run-pass}/auxiliary/issue_9123.rs | 0 .../{ => run-pass}/auxiliary/issue_9155.rs | 0 .../{ => run-pass}/auxiliary/issue_9188.rs | 0 .../auxiliary/kinds_in_metadata.rs | 0 src/test/{ => run-pass}/auxiliary/linkage1.rs | 0 .../auxiliary/macro-include-items-expr.rs | 0 .../auxiliary/macro-include-items-item.rs | 0 .../auxiliary/macro_crate_def_only.rs | 0 .../auxiliary/macro_crate_nonterminal.rs | 22 + .../auxiliary/macro_export_inner_module.rs | 0 .../run-pass/auxiliary/macro_reexport_1.rs | 15 + .../auxiliary/macro_reexport_2.rs | 0 .../auxiliary/macro_reexport_2_no_use.rs | 0 .../auxiliary/macro_with_super_1.rs | 0 .../auxiliary/method_self_arg1.rs | 0 .../auxiliary/method_self_arg2.rs | 0 .../auxiliary/mir_external_refs.rs | 0 .../auxiliary/moves_based_on_type_lib.rs | 0 .../auxiliary/msvc-data-only-lib.rs | 0 .../auxiliary/namespaced_enum_emulate_flat.rs | 0 .../run-pass/auxiliary/namespaced_enums.rs | 20 + .../{ => run-pass}/auxiliary/nested_item.rs | 0 .../auxiliary/newtype_struct_xc.rs | 0 .../auxiliary/overloaded_autoderef_xc.rs | 0 src/test/{ => run-pass}/auxiliary/packed.rs | 0 .../auxiliary/priv-impl-prim-ty.rs | 0 .../auxiliary/privacy_reexport.rs | 0 .../auxiliary/pub_use_mods_xcrate.rs | 0 .../auxiliary/pub_use_xcrate1.rs | 0 .../auxiliary/pub_use_xcrate2.rs | 0 .../auxiliary/reachable-unnameable-items.rs | 0 .../auxiliary/reexport-should-still-link.rs | 0 .../auxiliary/reexported_static_methods.rs | 0 .../auxiliary/sepcomp-extern-lib.rs | 0 .../auxiliary/sepcomp_cci_lib.rs | 0 .../{ => run-pass}/auxiliary/sepcomp_lib.rs | 0 .../auxiliary/static-function-pointer-aux.rs | 0 .../auxiliary/static-methods-crate.rs | 0 .../auxiliary/static_fn_inline_xc_aux.rs | 0 .../auxiliary/static_fn_trait_xc_aux.rs | 0 .../{ => run-pass}/auxiliary/static_mut_xc.rs | 0 .../struct_destructuring_cross_crate.rs | 0 .../auxiliary/struct_variant_xc_aux.rs | 0 .../auxiliary/svh-a-base.rs} | 0 .../{ => run-pass}/auxiliary/svh-a-comment.rs | 0 .../{ => run-pass}/auxiliary/svh-a-doc.rs | 0 .../{ => run-pass}/auxiliary/svh-a-macro.rs | 0 .../run-pass/auxiliary/svh-a-no-change.rs | 35 + .../auxiliary/svh-a-redundant-cfg.rs | 0 .../auxiliary/svh-a-whitespace.rs | 0 src/test/run-pass/auxiliary/svh-b.rs | 23 + .../auxiliary/thread-local-extern-static.rs | 0 .../auxiliary/trait_default_method_xc_aux.rs | 0 .../trait_default_method_xc_aux_2.rs | 0 .../trait_inheritance_auto_xc_2_aux.rs | 0 .../trait_inheritance_auto_xc_aux.rs | 0 ...ait_inheritance_cross_trait_call_xc_aux.rs | 0 .../trait_inheritance_overloading_xc.rs | 0 .../run-pass/auxiliary/trait_safety_lib.rs | 19 + .../auxiliary/trait_superkinds_in_metadata.rs | 18 + .../{ => run-pass}/auxiliary/traitimpl.rs | 0 src/test/run-pass/auxiliary/two_macros.rs | 15 + .../auxiliary/typeid-intrinsic-aux1.rs | 0 .../auxiliary/typeid-intrinsic-aux2.rs | 0 .../auxiliary/unboxed-closures-cross-crate.rs | 0 .../run-pass/auxiliary/weak-lang-items.rs | 32 + .../auxiliary/where_clauses_xc.rs | 0 .../auxiliary/xcrate-trait-lifetime-param.rs | 0 .../auxiliary/xcrate_address_insignificant.rs | 0 .../xcrate_associated_type_defaults.rs | 0 .../auxiliary/xcrate_static_addresses.rs | 0 .../auxiliary/xcrate_struct_aliases.rs | 0 .../run-pass/auxiliary/xcrate_unit_struct.rs | 38 + src/test/run-pass/backtrace-debuginfo-aux.rs | 2 - src/test/run-pass/backtrace-debuginfo.rs | 6 - src/test/run-pass/backtrace.rs | 6 - src/test/run-pass/cast-to-infer-ty.rs | 17 + src/test/run-pass/check-static-mut-slices.rs | 1 + src/test/run-pass/coerce-expect-unsized.rs | 2 - src/test/run-pass/command-before-exec.rs | 2 +- src/test/run-pass/const-autoderef.rs | 8 +- src/test/run-pass/const-err.rs | 20 + src/test/run-pass/const-meth-pattern.rs | 27 + src/test/run-pass/const-str-ptr.rs | 5 - src/test/run-pass/copy-out-of-array-1.rs | 2 - src/test/run-pass/deriving-copyclone.rs | 48 + .../deriving-via-extension-hash-enum.rs | 2 - src/test/run-pass/foreign-dupe.rs | 1 + .../run-pass/foreign-truncated-arguments.rs | 29 + src/test/run-pass/hrtb-opt-in-copy.rs | 2 - .../auxiliary/crate_with_invalid_spans.rs | 0 .../crate_with_invalid_spans_macros.rs | 0 .../main.rs} | 0 src/test/run-pass/import-prefix-macro.rs | 35 + src/test/run-pass/issue-11577.rs | 2 +- src/test/run-pass/issue-13264.rs | 2 - .../issue-23338-ensure-param-drop-order.rs | 16 +- src/test/run-pass/issue-25343.rs | 15 + src/test/run-pass/issue-3121.rs | 2 - src/test/run-pass/issue-31299.rs | 43 + src/test/run-pass/issue-33096.rs | 27 + src/test/run-pass/issue-33202.rs | 18 + src/test/run-pass/issue-33387.rs | 44 + src/test/run-pass/issue-33537.rs | 24 + src/test/run-pass/issue-34503.rs | 20 + src/test/run-pass/issue-9382.rs | 2 +- .../auxiliary/issue24687_lib.rs | 0 .../auxiliary/issue24687_mbcs_in_comments.rs | 0 .../main.rs} | 0 src/test/run-pass/macro-follow.rs | 4 +- src/test/run-pass/macro-include-items.rs | 4 +- src/test/run-pass/mir_ascription_coercion.rs | 20 + src/test/run-pass/mir_coercion_casts.rs | 22 + src/test/run-pass/mir_coercions.rs | 11 + src/test/run-pass/mir_constval_adts.rs | 13 + src/test/run-pass/mir_raw_fat_ptr.rs | 1 - src/test/run-pass/mir_trans_calls.rs | 10 + src/test/run-pass/mir_trans_calls_variadic.rs | 6 +- src/test/run-pass/no-landing-pads.rs | 2 +- src/test/run-pass/out-of-stack.rs | 3 +- src/test/run-pass/panic-recover-propagate.rs | 4 +- .../abort-link-to-unwinding-crates.rs | 35 + src/test/run-pass/panic-runtime/abort.rs | 39 + .../auxiliary/exit-success-if-unwind.rs | 26 + .../run-pass/panic-runtime/link-to-abort.rs | 19 + .../run-pass/panic-runtime/link-to-unwind.rs | 18 + src/test/run-pass/panic-runtime/lto-abort.rs | 39 + src/test/run-pass/panic-runtime/lto-unwind.rs | 41 + src/test/run-pass/regions-lub-ref-ref-rc.rs | 2 +- src/test/run-pass/rfc1592-deprecated.rs | 29 + src/test/run-pass/sepcomp-unwind.rs | 2 +- .../simd-intrinsic-generic-elements.rs | 3 +- .../specialization/auxiliary/go_trait.rs | 53 + .../auxiliary/specialization_cross_crate.rs | 0 .../specialization_cross_crate_defaults.rs | 0 src/test/run-pass/sse2.rs | 18 + src/test/run-pass/tcp-stress.rs | 24 +- src/test/run-pass/terminate-in-initializer.rs | 4 +- src/test/run-pass/trait-object-exclusion.rs | 28 + .../run-pass/unit-like-struct-drop-run.rs | 2 +- src/test/run-pass/use-keyword-2.rs | 30 + src/test/{ => rustdoc}/auxiliary/empty.rs | 0 .../auxiliary/inline-default-methods.rs | 0 .../{ => rustdoc}/auxiliary/issue-13698.rs | 0 .../{ => rustdoc}/auxiliary/issue-15318.rs | 0 .../{ => rustdoc}/auxiliary/issue-17476.rs | 0 .../{ => rustdoc}/auxiliary/issue-19190-3.rs | 0 .../{ => rustdoc}/auxiliary/issue-20646.rs | 0 .../{ => rustdoc}/auxiliary/issue-20727.rs | 0 .../{ => rustdoc}/auxiliary/issue-21092.rs | 0 .../{ => rustdoc}/auxiliary/issue-21801.rs | 0 .../{ => rustdoc}/auxiliary/issue-22025.rs | 0 .../{ => rustdoc}/auxiliary/issue-23207-1.rs | 0 .../{ => rustdoc}/auxiliary/issue-23207-2.rs | 0 .../auxiliary/issue-26606-macro.rs | 0 .../{ => rustdoc}/auxiliary/issue-27362.rs | 0 .../{ => rustdoc}/auxiliary/issue-28927-1.rs | 0 .../{ => rustdoc}/auxiliary/issue-28927-2.rs | 0 .../{ => rustdoc}/auxiliary/issue-29584.rs | 0 .../{ => rustdoc}/auxiliary/issue-30109-1.rs | 0 .../{ => rustdoc}/auxiliary/reexp_stripped.rs | 0 .../auxiliary/rustdoc-default-impl.rs | 0 .../rustdoc-extern-default-method.rs | 0 .../auxiliary/rustdoc-extern-method.rs | 0 .../{ => rustdoc}/auxiliary/rustdoc-ffi.rs | 0 .../rustdoc-impl-parts-crosscrate.rs | 0 .../{ => rustdoc}/auxiliary/variant-struct.rs | 0 src/test/rustdoc/duplicate_impls/impls.rs | 22 + .../rustdoc/duplicate_impls/issue-33054.rs | 21 + src/test/rustdoc/escape-rust-expr.rs | 15 + src/test/rustdoc/extern-impl.rs | 37 + .../inline_cross/auxiliary/issue-33113.rs | 17 + .../auxiliary/rustdoc-hidden-sig.rs | 22 + .../inline_cross/auxiliary/rustdoc-hidden.rs | 14 + .../auxiliary/rustdoc-nonreachable-impls.rs | 44 + .../auxiliary/rustdoc-trait-object-impl.rs | 24 + .../rustdoc/inline_cross/inline_hidden.rs | 22 + src/test/rustdoc/inline_cross/issue-28480.rs | 23 + .../rustdoc/inline_cross/issue-31948-1.rs | 37 + .../rustdoc/inline_cross/issue-31948-2.rs | 31 + src/test/rustdoc/inline_cross/issue-31948.rs | 39 + src/test/rustdoc/inline_cross/issue-32881.rs | 22 + src/test/rustdoc/inline_cross/issue-33113.rs | 20 + src/test/rustdoc/issue-12834.rs | 21 + src/test/rustdoc/issue-25944.rs | 21 + src/test/rustdoc/issue-29503.rs | 26 + src/test/rustdoc/issue-32374.rs | 25 + src/test/rustdoc/issue-32890.rs | 27 + src/test/rustdoc/issue-33069.rs | 20 + src/test/rustdoc/issue-33178-1.rs | 20 + src/test/rustdoc/issue-33178.rs | 23 + src/test/rustdoc/issue-33302.rs | 46 + src/test/rustdoc/issue-33592.rs | 23 + src/test/rustdoc/manual_impl.rs | 91 + src/test/rustdoc/no-run-still-checks-lints.rs | 19 + src/test/rustdoc/trait-self-link.rs | 16 + src/test/ui/README.md | 31 + src/test/ui/hello_world/main.rs | 15 + src/test/ui/mismatched_types/main.rs | 17 + src/test/ui/mismatched_types/main.stderr | 8 + src/test/ui/update-all-references.sh | 31 + src/test/ui/update-references.sh | 50 + src/tools/cargotest/Cargo.lock | 24 - src/tools/cargotest/Cargo.toml | 3 - src/tools/cargotest/main.rs | 108 +- src/tools/compiletest/Cargo.lock | 82 + src/tools/compiletest/Cargo.toml | 16 + src/tools/compiletest/build.rs | 13 + .../compiletest/src}/common.rs | 28 +- .../compiletest/src}/errors.rs | 50 +- src/tools/compiletest/src/header.rs | 482 ++ src/tools/compiletest/src/json.rs | 225 + .../compiletest/src/main.rs} | 142 +- .../compiletest/src}/procsrv.rs | 14 +- .../compiletest/src}/raise_fd_limit.rs | 0 src/tools/compiletest/src/runtest.rs | 2265 ++++++++ src/tools/compiletest/src/uidiff.rs | 76 + .../compiletest/src}/util.rs | 0 src/tools/linkchecker/main.rs | 2 +- src/tools/rustbook/main.rs | 2 - src/tools/tidy/Cargo.lock | 4 + src/tools/tidy/Cargo.toml | 6 + src/tools/tidy/src/bins.rs | 45 + src/tools/tidy/src/cargo.rs | 98 + src/tools/tidy/src/cargo_lock.rs | 43 + src/tools/tidy/src/errors.rs | 93 + src/tools/tidy/src/features.rs | 159 + src/tools/tidy/src/main.rs | 85 + src/tools/tidy/src/style.rs | 127 + 1761 files changed, 67188 insertions(+), 46541 deletions(-) create mode 100644 mk/cfg/armv7-linux-androideabi.mk delete mode 100644 mk/snap.mk create mode 100644 src/bootstrap/config.toml.example delete mode 100644 src/compiletest/header.rs delete mode 100644 src/compiletest/runtest.rs delete mode 100755 src/etc/check-binaries.py delete mode 100644 src/etc/errorck.py delete mode 100644 src/etc/featureck.py delete mode 100755 src/etc/get-snapshot.py create mode 100644 src/etc/get-stage0.py delete mode 100755 src/etc/latest-unix-snaps.py delete mode 100644 src/etc/licenseck.py delete mode 100755 src/etc/make-snapshot.py delete mode 100644 src/etc/maketest.py delete mode 100644 src/etc/mirror-all-snapshots.py delete mode 100644 src/etc/snapshot.py delete mode 100644 src/etc/tidy.py delete mode 100644 src/libcore/iter.rs create mode 100644 src/libcore/iter/iterator.rs create mode 100644 src/libcore/iter/mod.rs create mode 100644 src/libcore/iter/range.rs create mode 100644 src/libcore/iter/sources.rs create mode 100644 src/libcore/iter/traits.rs create mode 100644 src/libpanic_abort/Cargo.toml create mode 100644 src/libpanic_abort/lib.rs create mode 100644 src/libpanic_unwind/Cargo.lock create mode 100644 src/libpanic_unwind/Cargo.toml rename src/{libstd/sys/common => libpanic_unwind}/dwarf/eh.rs (99%) rename src/{libstd/sys/common => libpanic_unwind}/dwarf/mod.rs (99%) rename src/{libstd/sys/common/unwind => libpanic_unwind}/gcc.rs (68%) create mode 100644 src/libpanic_unwind/lib.rs create mode 100644 src/libpanic_unwind/seh.rs rename src/{libstd/sys/common/unwind => libpanic_unwind}/seh64_gnu.rs (92%) create mode 100644 src/libpanic_unwind/windows.rs create mode 100644 src/librustc/dep_graph/debug.rs create mode 100644 src/librustc/hir/map/def_collector.rs create mode 100644 src/librustc_back/target/armv7_linux_androideabi.rs create mode 100644 src/librustc_back/target/linux_musl_base.rs create mode 100644 src/librustc_incremental/persist/hash.rs delete mode 100644 src/librustc_incremental/persist/serialize.rs create mode 100644 src/librustc_metadata/def_key.rs create mode 100644 src/librustc_mir/build/expr/stmt.rs create mode 100644 src/librustc_mir/diagnostics.rs create mode 100644 src/librustc_mir/transform/break_cleanup_edges.rs delete mode 100644 src/librustc_mir/transform/break_critical_edges.rs create mode 100644 src/librustc_mir/transform/promote_consts.rs create mode 100644 src/librustc_mir/transform/qualify_consts.rs delete mode 100644 src/librustc_passes/const_fn.rs create mode 100644 src/librustc_save_analysis/external_data.rs create mode 100644 src/librustc_save_analysis/json_dumper.rs create mode 100644 src/librustc_trans/partitioning.rs create mode 100644 src/librustc_trans/trans_item.rs create mode 100644 src/librustc_typeck/check_unused.rs create mode 100644 src/librustdoc/visit_lib.rs delete mode 100644 src/libstd/sys/common/unwind/mod.rs delete mode 100644 src/libstd/sys/common/unwind/seh.rs create mode 100644 src/libstd/sys/unix/android.rs create mode 100644 src/libsyntax/errors/snippet/mod.rs create mode 100644 src/libsyntax/errors/snippet/test.rs create mode 100644 src/libunwind/Cargo.toml create mode 100644 src/libunwind/build.rs create mode 100644 src/libunwind/lib.rs rename src/{libstd/sys/common => libunwind}/libunwind.rs (57%) delete mode 100644 src/snapshots.txt create mode 100644 src/stage0.txt rename src/test/{ => codegen-units/item-collection}/auxiliary/cgu_export_trait_method.rs (100%) rename src/test/{ => codegen-units/item-collection}/auxiliary/cgu_extern_closures.rs (100%) rename src/test/{ => codegen-units/item-collection}/auxiliary/cgu_generic_function.rs (96%) rename src/test/codegen-units/{ => item-collection}/cross-crate-closures.rs (98%) rename src/test/codegen-units/{ => item-collection}/cross-crate-generic-functions.rs (100%) rename src/test/codegen-units/{ => item-collection}/cross-crate-trait-method.rs (100%) create mode 100644 src/test/codegen-units/item-collection/drop_in_place_intrinsic.rs rename src/test/codegen-units/{ => item-collection}/function-as-argument.rs (100%) rename src/test/codegen-units/{ => item-collection}/generic-drop-glue.rs (84%) rename src/test/codegen-units/{ => item-collection}/generic-functions.rs (100%) rename src/test/codegen-units/{ => item-collection}/generic-impl.rs (100%) rename src/test/codegen-units/{ => item-collection}/impl-in-non-instantiated-generic.rs (97%) rename src/test/codegen-units/{ => item-collection}/instantiation-through-vtable.rs (100%) rename src/test/codegen-units/{ => item-collection}/items-within-generic-items.rs (100%) rename src/test/codegen-units/{ => item-collection}/non-generic-closures.rs (98%) rename src/test/codegen-units/{ => item-collection}/non-generic-drop-glue.rs (90%) rename src/test/codegen-units/{ => item-collection}/non-generic-functions.rs (100%) rename src/test/codegen-units/{ => item-collection}/overloaded-operators.rs (100%) create mode 100644 src/test/codegen-units/item-collection/static-init.rs rename src/test/codegen-units/{ => item-collection}/statics-and-consts.rs (100%) rename src/test/codegen-units/{ => item-collection}/trait-implementations.rs (100%) rename src/test/codegen-units/{ => item-collection}/trait-method-as-argument.rs (100%) rename src/test/codegen-units/{ => item-collection}/trait-method-default-impl.rs (100%) rename src/test/codegen-units/{ => item-collection}/transitive-drop-glue.rs (89%) rename src/test/codegen-units/{ => item-collection}/tuple-drop-glue.rs (94%) rename src/test/codegen-units/{ => item-collection}/unsizing.rs (100%) rename src/test/codegen-units/{ => item-collection}/unused-traits-and-generics.rs (100%) create mode 100644 src/test/codegen-units/partitioning/auxiliary/cgu_explicit_inlining.rs create mode 100644 src/test/codegen-units/partitioning/auxiliary/cgu_extern_drop_glue.rs create mode 100644 src/test/codegen-units/partitioning/auxiliary/cgu_generic_function.rs create mode 100644 src/test/codegen-units/partitioning/extern-drop-glue.rs create mode 100644 src/test/codegen-units/partitioning/extern-generic.rs create mode 100644 src/test/codegen-units/partitioning/inlining-from-extern-crate.rs create mode 100644 src/test/codegen-units/partitioning/local-drop-glue.rs create mode 100644 src/test/codegen-units/partitioning/local-generic.rs create mode 100644 src/test/codegen-units/partitioning/local-inlining.rs create mode 100644 src/test/codegen-units/partitioning/local-transitive-inlining.rs create mode 100644 src/test/codegen-units/partitioning/methods-are-with-self-type.rs create mode 100644 src/test/codegen-units/partitioning/regular-modules.rs create mode 100644 src/test/codegen-units/partitioning/statics.rs create mode 100644 src/test/codegen/lto-removes-invokes.rs rename src/test/{ => compile-fail-fulldeps}/auxiliary/attr_plugin_test.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/lint_for_crate.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/lint_group_plugin_test.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/lint_plugin_test.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/macro_crate_MacroRulesTT.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/macro_crate_test.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/macro_reexport_1.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/rlib_crate_test.rs (100%) rename src/test/{ => compile-fail-fulldeps}/auxiliary/use_from_trait_xc.rs (100%) create mode 100644 src/test/compile-fail/E0001.rs create mode 100644 src/test/compile-fail/E0002.rs create mode 100644 src/test/compile-fail/E0004.rs create mode 100644 src/test/compile-fail/E0005.rs create mode 100644 src/test/compile-fail/E0007.rs create mode 100644 src/test/compile-fail/E0008.rs create mode 100644 src/test/compile-fail/E0009.rs create mode 100644 src/test/compile-fail/E0010.rs create mode 100644 src/test/compile-fail/E0017.rs create mode 100644 src/test/compile-fail/E0023.rs create mode 100644 src/test/compile-fail/E0024.rs create mode 100644 src/test/compile-fail/E0025.rs create mode 100644 src/test/compile-fail/E0026.rs create mode 100644 src/test/compile-fail/E0027.rs create mode 100644 src/test/compile-fail/E0029.rs create mode 100644 src/test/compile-fail/E0030.rs create mode 100644 src/test/compile-fail/E0033.rs create mode 100644 src/test/compile-fail/E0034.rs create mode 100644 src/test/compile-fail/E0035.rs create mode 100644 src/test/compile-fail/E0036.rs create mode 100644 src/test/compile-fail/E0038.rs create mode 100644 src/test/compile-fail/E0040.rs create mode 100644 src/test/compile-fail/E0044.rs create mode 100644 src/test/compile-fail/E0045.rs create mode 100644 src/test/compile-fail/E0046.rs create mode 100644 src/test/compile-fail/E0049.rs create mode 100644 src/test/compile-fail/E0050.rs create mode 100644 src/test/compile-fail/E0053.rs create mode 100644 src/test/compile-fail/E0054.rs create mode 100644 src/test/compile-fail/E0055.rs create mode 100644 src/test/compile-fail/E0057.rs create mode 100644 src/test/compile-fail/E0059.rs create mode 100644 src/test/compile-fail/E0060.rs create mode 100644 src/test/compile-fail/E0061.rs create mode 100644 src/test/compile-fail/associated-types/bound-lifetime-constrained.rs create mode 100644 src/test/compile-fail/associated-types/bound-lifetime-in-binding-only.rs create mode 100644 src/test/compile-fail/associated-types/bound-lifetime-in-return-only.rs rename src/test/{ => compile-fail}/auxiliary/allocator-dylib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/allocator-dylib2.rs (100%) rename src/test/{ => compile-fail}/auxiliary/allocator1.rs (100%) rename src/test/{ => compile-fail}/auxiliary/allocator2.rs (100%) rename src/test/{ => compile-fail}/auxiliary/allocator3.rs (100%) rename src/test/{ => compile-fail}/auxiliary/ambig_impl_2_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/cci_class.rs (100%) rename src/test/{ => compile-fail}/auxiliary/cci_class_5.rs (100%) create mode 100644 src/test/compile-fail/auxiliary/cdylib-dep.rs rename src/test/{ => compile-fail}/auxiliary/changing-crates-a1.rs (100%) rename src/test/{ => compile-fail}/auxiliary/changing-crates-a2.rs (100%) rename src/test/{ => compile-fail}/auxiliary/changing-crates-b.rs (100%) rename src/test/{ => compile-fail}/auxiliary/coherence_copy_like_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/coherence_inherent_cc_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/coherence_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/coherence_orphan_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/const_fn_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/crate_a1.rs (100%) rename src/test/{ => compile-fail}/auxiliary/crate_a2.rs (100%) rename src/test/{ => compile-fail}/auxiliary/crateresolve1-1.rs (100%) rename src/test/{ => compile-fail}/auxiliary/crateresolve1-2.rs (100%) rename src/test/{ => compile-fail}/auxiliary/crateresolve1-3.rs (100%) rename src/test/{ => compile-fail}/auxiliary/default_ty_param_cross_crate_crate.rs (100%) rename src/test/{ => compile-fail}/auxiliary/deprecation-lint.rs (100%) rename src/test/{ => compile-fail}/auxiliary/empty-struct.rs (100%) rename src/test/{ => compile-fail}/auxiliary/go_trait.rs (100%) rename src/test/{ => compile-fail}/auxiliary/inherited_stability.rs (100%) rename src/test/{ => compile-fail}/auxiliary/internal_unstable.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue-19163.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue-21146-inc.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue-21221-3.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue-21221-4.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue-29181.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue-30535.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_11680.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_12612_1.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_16725.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_17718_const_privacy.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_21202.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_30123_aux.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_3907.rs (100%) rename src/test/{ => compile-fail}/auxiliary/issue_5844_aux.rs (100%) rename src/test/{ => compile-fail}/auxiliary/lifetime_bound_will_change_warning_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/lint_output_format.rs (100%) rename src/test/{ => compile-fail}/auxiliary/lint_stability.rs (100%) rename src/test/{ => compile-fail}/auxiliary/lint_stability_fields.rs (100%) rename src/test/{ => compile-fail}/auxiliary/lint_unused_extern_crate.rs (100%) rename src/test/{ => compile-fail}/auxiliary/macro_crate_nonterminal.rs (100%) rename src/test/{ => compile-fail}/auxiliary/macro_non_reexport_2.rs (100%) create mode 100644 src/test/compile-fail/auxiliary/macro_reexport_1.rs rename src/test/{ => compile-fail}/auxiliary/namespaced_enums.rs (100%) rename src/test/{ => compile-fail}/auxiliary/needs_allocator.rs (100%) rename src/test/{ => compile-fail}/auxiliary/no_method_suggested_traits.rs (100%) rename src/test/{ => compile-fail}/auxiliary/noexporttypelib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/orphan_check_diagnostics.rs (100%) rename src/test/{ => compile-fail}/auxiliary/privacy_tuple_struct.rs (100%) rename src/test/{ => compile-fail}/auxiliary/private_trait_xc.rs (100%) rename src/test/{ => compile-fail}/auxiliary/pub_static_array.rs (92%) rename src/test/{auxiliary/regions_bounded_method_type_parameters_cross_crate_lib.rs => compile-fail/auxiliary/rbmtp_cross_crate_lib.rs} (100%) rename src/test/{ => compile-fail}/auxiliary/stability_attribute_issue.rs (100%) rename src/test/{ => compile-fail}/auxiliary/stability_cfg1.rs (100%) rename src/test/{ => compile-fail}/auxiliary/stability_cfg2.rs (100%) rename src/test/{ => compile-fail}/auxiliary/static_priv_by_default.rs (100%) rename src/test/{ => compile-fail}/auxiliary/struct_field_privacy.rs (100%) rename src/test/{ => compile-fail}/auxiliary/struct_variant_privacy.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-a-base.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-a-change-lit.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-a-change-significant-cfg.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-a-change-trait-bound.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-a-change-type-arg.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-a-change-type-ret.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-a-change-type-static.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-b.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-uta-base.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-uta-change-use-trait.rs (100%) rename src/test/{ => compile-fail}/auxiliary/svh-utb.rs (100%) rename src/test/{auxiliary/typeck_default_trait_impl_cross_crate_coherence_lib.rs => compile-fail/auxiliary/tdticc_coherence_lib.rs} (100%) rename src/test/{ => compile-fail}/auxiliary/trait_bounds_on_structs_and_enums_xc.rs (100%) rename src/test/{ => compile-fail}/auxiliary/trait_impl_conflict.rs (100%) rename src/test/{ => compile-fail}/auxiliary/trait_safety_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/trait_superkinds_in_metadata.rs (100%) rename src/test/{ => compile-fail}/auxiliary/two_macros.rs (100%) rename src/test/{ => compile-fail}/auxiliary/unreachable_variant.rs (100%) create mode 100644 src/test/compile-fail/auxiliary/use_from_trait_xc.rs rename src/test/{ => compile-fail}/auxiliary/variant-namespacing.rs (100%) rename src/test/{ => compile-fail}/auxiliary/weak-lang-items.rs (100%) rename src/test/{ => compile-fail}/auxiliary/xc_private_method_lib.rs (100%) rename src/test/{ => compile-fail}/auxiliary/xcrate_unit_struct.rs (100%) rename src/test/compile-fail/{trace_macros-gate2.rs => cast-rfc0401-2.rs} (62%) create mode 100644 src/test/compile-fail/cdylib-deps-must-be-static.rs create mode 100644 src/test/compile-fail/closure-wrong-kind.rs create mode 100644 src/test/compile-fail/const-err-multi.rs create mode 100644 src/test/compile-fail/const-err2.rs delete mode 100644 src/test/compile-fail/const-fn-not-safe-for-const2.rs rename src/test/compile-fail/{ => derived-errors}/issue-30580.rs (100%) create mode 100644 src/test/compile-fail/derived-errors/issue-31997-1.rs create mode 100644 src/test/compile-fail/derived-errors/issue-31997.rs create mode 100644 src/test/compile-fail/deriving-copyclone.rs create mode 100644 src/test/compile-fail/expanded-cfg.rs create mode 100644 src/test/compile-fail/feature-gate-allow-internal-unstable-struct.rs create mode 100644 src/test/compile-fail/import-prefix-macro-1.rs create mode 100644 src/test/compile-fail/import-prefix-macro-2.rs create mode 100644 src/test/compile-fail/import-ty-params.rs create mode 100644 src/test/compile-fail/issue-23716.rs create mode 100644 src/test/compile-fail/issue-26472.rs create mode 100644 src/test/compile-fail/issue-27842.rs create mode 100644 src/test/compile-fail/issue-30255.rs create mode 100644 src/test/compile-fail/issue-31221.rs create mode 100644 src/test/compile-fail/issue-31424.rs create mode 100644 src/test/compile-fail/issue-32655.rs create mode 100644 src/test/compile-fail/issue-32709.rs create mode 100644 src/test/compile-fail/issue-32782.rs create mode 100644 src/test/compile-fail/issue-32833.rs create mode 100644 src/test/compile-fail/issue-32922.rs create mode 100644 src/test/compile-fail/issue-32963.rs create mode 100644 src/test/compile-fail/issue-33464.rs create mode 100644 src/test/compile-fail/issue32829.rs rename src/test/compile-fail/{on-unimplemented-bad-anno.rs => on-unimplemented/bad-annotation.rs} (100%) create mode 100644 src/test/compile-fail/on-unimplemented/multiple-impls.rs create mode 100644 src/test/compile-fail/on-unimplemented/on-impl.rs rename src/test/compile-fail/{on-unimplemented.rs => on-unimplemented/on-trait.rs} (100%) create mode 100644 src/test/compile-fail/on-unimplemented/slice-index.rs create mode 100644 src/test/compile-fail/panic-runtime/abort-link-to-unwind-dylib.rs create mode 100644 src/test/compile-fail/panic-runtime/auxiliary/needs-panic-runtime.rs create mode 100644 src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-abort.rs rename src/test/{auxiliary/lang-item-public.rs => compile-fail/panic-runtime/auxiliary/panic-runtime-lang-items.rs} (62%) create mode 100644 src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind.rs create mode 100644 src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind2.rs create mode 100644 src/test/compile-fail/panic-runtime/auxiliary/runtime-depending-on-panic-runtime.rs create mode 100644 src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-abort.rs create mode 100644 src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-unwind.rs create mode 100644 src/test/compile-fail/panic-runtime/bad-panic-flag1.rs create mode 100644 src/test/compile-fail/panic-runtime/bad-panic-flag2.rs create mode 100644 src/test/compile-fail/panic-runtime/libtest-unwinds.rs create mode 100644 src/test/compile-fail/panic-runtime/needs-gate.rs create mode 100644 src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs create mode 100644 src/test/compile-fail/panic-runtime/transitive-link-a-bunch.rs create mode 100644 src/test/compile-fail/panic-runtime/two-panic-runtimes.rs create mode 100644 src/test/compile-fail/panic-runtime/want-abort-got-unwind.rs create mode 100644 src/test/compile-fail/panic-runtime/want-abort-got-unwind2.rs create mode 100644 src/test/compile-fail/panic-runtime/want-unwind-got-abort.rs create mode 100644 src/test/compile-fail/panic-runtime/want-unwind-got-abort2.rs create mode 100644 src/test/compile-fail/paren-span.rs create mode 100644 src/test/compile-fail/privacy/restricted/auxiliary/pub_restricted.rs create mode 100644 src/test/compile-fail/privacy/restricted/feature-gate.rs create mode 100644 src/test/compile-fail/privacy/restricted/lookup-ignores-private.rs create mode 100644 src/test/compile-fail/privacy/restricted/private-in-public.rs create mode 100644 src/test/compile-fail/privacy/restricted/struct-literal-field.rs create mode 100644 src/test/compile-fail/privacy/restricted/test.rs create mode 100644 src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs create mode 100644 src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs create mode 100644 src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs create mode 100644 src/test/compile-fail/privacy/restricted/ty-params.rs create mode 100644 src/test/compile-fail/qualified-path-params-2.rs create mode 100644 src/test/compile-fail/qualified-path-params.rs create mode 100644 src/test/compile-fail/region-invariant-static-error-reporting.rs create mode 100644 src/test/compile-fail/rfc1592-deprecated.rs create mode 100644 src/test/compile-fail/self-infer.rs create mode 100644 src/test/compile-fail/self_type_keyword-2.rs mode change 100755 => 100644 src/test/compile-fail/specialization/specialization-polarity.rs delete mode 100644 src/test/compile-fail/trace_macros-gate3.rs create mode 100644 src/test/compile-fail/traits-inductive-overflow-simultaneous.rs create mode 100644 src/test/compile-fail/use-keyword.rs rename src/test/{parse-fail => compile-fail}/use-mod-4.rs (80%) create mode 100644 src/test/compile-fail/variant-used-as-type.rs rename src/test/{ => debuginfo}/auxiliary/cross_crate_debuginfo_type_uniquing.rs (100%) rename src/test/{ => debuginfo}/auxiliary/cross_crate_spans.rs (100%) rename src/test/{ => debuginfo}/auxiliary/issue13213aux.rs (100%) create mode 100644 src/test/debuginfo/struct-namespace.rs rename src/{libsyntax/owned_slice.rs => test/incremental/callee_caller_cross_crate/auxiliary/a.rs} (70%) rename src/{librustc_data_structures/obligation_forest/tree_index.rs => test/incremental/callee_caller_cross_crate/b.rs} (59%) create mode 100644 src/test/incremental/type_alias_cross_crate/auxiliary/a.rs create mode 100644 src/test/incremental/type_alias_cross_crate/b.rs create mode 100644 src/test/parse-fail/bad-pointer-type.rs create mode 100644 src/test/parse-fail/issue-30318.rs create mode 100644 src/test/parse-fail/issue-32214.rs create mode 100644 src/test/parse-fail/issue-32505.rs rename src/test/parse-fail/{lifetime-obsoleted-self.rs => issue-33262.rs} (71%) create mode 100644 src/test/parse-fail/issue-33413.rs rename src/test/parse-fail/{keyword-do-as-identifier.rs => keyword-box-as-identifier.rs} (78%) create mode 100644 src/test/parse-fail/keyword-const-as-identifier.rs create mode 100644 src/test/parse-fail/keyword-continue-as-identifier.rs create mode 100644 src/test/parse-fail/keyword-crate-as-identifier.rs rename src/test/parse-fail/{keyword-priv-as-identifier.rs => keyword-in-as-identifier.rs} (78%) create mode 100644 src/test/parse-fail/keyword-move-as-identifier.rs create mode 100644 src/test/parse-fail/keyword-where-as-identifier.rs create mode 100644 src/test/run-make/atomic-lock-free/Makefile create mode 100644 src/test/run-make/atomic-lock-free/atomic_lock_free.rs create mode 100644 src/test/run-make/cdylib/Makefile create mode 100644 src/test/run-make/cdylib/bar.rs create mode 100644 src/test/run-make/cdylib/foo.c create mode 100644 src/test/run-make/cdylib/foo.rs create mode 100644 src/test/run-make/compiler-lookup-paths/e2.rs create mode 100644 src/test/run-make/dep-info-no-analysis/Makefile create mode 100644 src/test/run-make/dep-info-no-analysis/input.dd create mode 100644 src/test/run-make/dep-info-no-analysis/input.rs rename src/test/{auxiliary/no_std_crate.rs => run-make/extern-flag-fun/bar-alt.rs} (94%) create mode 100644 src/test/run-make/extern-multiple-copies2/Makefile create mode 100644 src/test/run-make/extern-multiple-copies2/bar.rs rename src/test/run-make/{json-errors/foo.rs => extern-multiple-copies2/foo1.rs} (86%) create mode 100644 src/test/run-make/extern-multiple-copies2/foo2.rs delete mode 100644 src/test/run-make/json-errors/Makefile create mode 100644 src/test/run-make/symlinked-rlib/Makefile create mode 100644 src/test/run-make/symlinked-rlib/bar.rs create mode 100644 src/test/run-make/symlinked-rlib/foo.rs rename src/test/{ => run-pass-fulldeps}/auxiliary/custom_derive_plugin.rs (98%) rename src/test/{ => run-pass-fulldeps}/auxiliary/custom_derive_plugin_attr.rs (98%) rename src/test/{ => run-pass-fulldeps}/auxiliary/dummy_mir_pass.rs (87%) rename src/test/{ => run-pass-fulldeps}/auxiliary/issue-13560-1.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/issue-13560-2.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/issue-13560-3.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/issue-16822.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/issue-18502.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/issue_16723_multiple_items_syntax_ext.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/linkage-visibility.rs (100%) create mode 100644 src/test/run-pass-fulldeps/auxiliary/lint_for_crate.rs create mode 100644 src/test/run-pass-fulldeps/auxiliary/lint_group_plugin_test.rs create mode 100644 src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs rename src/test/{ => run-pass-fulldeps}/auxiliary/llvm_pass_plugin.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/logging_right_crate.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/lto-syntax-extension-lib.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/lto-syntax-extension-plugin.rs (100%) create mode 100644 src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs rename src/test/{ => run-pass-fulldeps}/auxiliary/plugin_args.rs (97%) rename src/test/{ => run-pass-fulldeps}/auxiliary/plugin_crate_outlive_expansion_phase.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/plugin_with_plugin_lib.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/procedural_mbe_matching.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/roman_numerals.rs (97%) rename src/test/{ => run-pass-fulldeps}/auxiliary/syntax_extension_with_dll_deps_1.rs (100%) rename src/test/{ => run-pass-fulldeps}/auxiliary/syntax_extension_with_dll_deps_2.rs (100%) create mode 100644 src/test/run-pass-fulldeps/rustc_encodable_hygiene.rs create mode 100644 src/test/run-pass/associated-const-outer-ty-refs.rs create mode 100644 src/test/run-pass/associated-types-in-bound-type-arg.rs rename src/test/{ => run-pass}/auxiliary/allocator-dummy.rs (100%) rename src/test/{ => run-pass}/auxiliary/anon-extern-mod-cross-crate-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/anon_trait_static_method_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/associated-const-cc-lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/associated-types-cc-lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/augmented_assignments.rs (100%) rename src/test/{ => run-pass}/auxiliary/blind-item-mixed-crate-use-item-foo.rs (100%) rename src/test/{ => run-pass}/auxiliary/blind-item-mixed-crate-use-item-foo2.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_borrow_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_capture_clause.rs (100%) create mode 100644 src/test/run-pass/auxiliary/cci_class.rs rename src/test/{ => run-pass}/auxiliary/cci_class_2.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_class_3.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_class_4.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_class_6.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_class_cast.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_class_trait.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_const.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_const_block.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_impl_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_intrinsic.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_iter_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_nested_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/cci_no_inline_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/cfg_inner_static.rs (100%) rename src/test/{ => run-pass}/auxiliary/cgu_test.rs (100%) rename src/test/{ => run-pass}/auxiliary/cgu_test_a.rs (100%) rename src/test/{ => run-pass}/auxiliary/cgu_test_b.rs (100%) rename src/test/{ => run-pass}/auxiliary/check_static_recursion_foreign_helper.rs (100%) create mode 100644 src/test/run-pass/auxiliary/coherence_copy_like_lib.rs create mode 100644 src/test/run-pass/auxiliary/coherence_lib.rs create mode 100644 src/test/run-pass/auxiliary/const_fn_lib.rs rename src/test/{ => run-pass}/auxiliary/crate-attributes-using-cfg_attr.rs (100%) rename src/test/{ => run-pass}/auxiliary/crate-method-reexport-grrrrrrr2.rs (100%) rename src/test/{ => run-pass}/auxiliary/default_type_params_xc.rs (100%) rename src/test/{ => run-pass}/auxiliary/derive-no-std.rs (100%) create mode 100644 src/test/run-pass/auxiliary/empty-struct.rs rename src/test/{ => run-pass}/auxiliary/explicit_self_xcrate.rs (100%) rename src/test/{ => run-pass}/auxiliary/extern-crosscrate-source.rs (100%) rename src/test/{ => run-pass}/auxiliary/extern-take-value.rs (100%) rename src/test/{ => run-pass}/auxiliary/extern_calling_convention.rs (100%) rename src/test/{ => run-pass}/auxiliary/extern_mod_ordering_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/fat_drop.rs (100%) rename src/test/{ => run-pass}/auxiliary/fn-abi.rs (100%) rename src/test/{ => run-pass}/auxiliary/foreign_lib.rs (100%) create mode 100644 src/test/run-pass/auxiliary/go_trait.rs rename src/test/{ => run-pass}/auxiliary/i8.rs (100%) rename src/test/{ => run-pass}/auxiliary/impl_privacy_xc_1.rs (100%) rename src/test/{ => run-pass}/auxiliary/impl_privacy_xc_2.rs (100%) rename src/test/{ => run-pass}/auxiliary/inline_dtor.rs (100%) rename src/test/{ => run-pass}/auxiliary/inner_static.rs (100%) rename src/test/{ => run-pass}/auxiliary/iss.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-10028.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-11224.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-11225-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-11225-2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-11225-3.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-11508.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-11529.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-12133-dylib.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-12133-dylib2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-12133-rlib.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-12660-aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-13620-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-13620-2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-13872-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-13872-2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-13872-3.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-14344-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-14344-2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-14421.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-14422.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-15562.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-16643.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-17662.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-17718-aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-18501.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-18514.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-18711.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-18913-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-18913-2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-19340-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-2380.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-2414-a.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-2414-b.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-25185-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-25185-2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-2526.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-25467.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-2631-a.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-29485.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-3012-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-31702-1.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-31702-2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-4208-cc.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-4545.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-5518.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-5521.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-7178.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-7899.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-8044.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-8259.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-9906.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue-9968.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue13507.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue2170lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_10031_aux.rs (100%) rename src/test/{auxiliary/issue_3907_1.rs => run-pass/auxiliary/issue_12612_1.rs} (93%) rename src/test/{ => run-pass}/auxiliary/issue_12612_2.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_19293.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_20389.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_2316_a.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_2316_b.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_2472_b.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_2723_a.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_3136_a.rc (100%) rename src/test/{ => run-pass}/auxiliary/issue_3136_a.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_3979_traits.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_8401.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_9123.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_9155.rs (100%) rename src/test/{ => run-pass}/auxiliary/issue_9188.rs (100%) rename src/test/{ => run-pass}/auxiliary/kinds_in_metadata.rs (100%) rename src/test/{ => run-pass}/auxiliary/linkage1.rs (100%) rename src/test/{ => run-pass}/auxiliary/macro-include-items-expr.rs (100%) rename src/test/{ => run-pass}/auxiliary/macro-include-items-item.rs (100%) rename src/test/{ => run-pass}/auxiliary/macro_crate_def_only.rs (100%) create mode 100644 src/test/run-pass/auxiliary/macro_crate_nonterminal.rs rename src/test/{ => run-pass}/auxiliary/macro_export_inner_module.rs (100%) create mode 100644 src/test/run-pass/auxiliary/macro_reexport_1.rs rename src/test/{ => run-pass}/auxiliary/macro_reexport_2.rs (100%) rename src/test/{ => run-pass}/auxiliary/macro_reexport_2_no_use.rs (100%) rename src/test/{ => run-pass}/auxiliary/macro_with_super_1.rs (100%) rename src/test/{ => run-pass}/auxiliary/method_self_arg1.rs (100%) rename src/test/{ => run-pass}/auxiliary/method_self_arg2.rs (100%) rename src/test/{ => run-pass}/auxiliary/mir_external_refs.rs (100%) rename src/test/{ => run-pass}/auxiliary/moves_based_on_type_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/msvc-data-only-lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/namespaced_enum_emulate_flat.rs (100%) create mode 100644 src/test/run-pass/auxiliary/namespaced_enums.rs rename src/test/{ => run-pass}/auxiliary/nested_item.rs (100%) rename src/test/{ => run-pass}/auxiliary/newtype_struct_xc.rs (100%) rename src/test/{ => run-pass}/auxiliary/overloaded_autoderef_xc.rs (100%) rename src/test/{ => run-pass}/auxiliary/packed.rs (100%) rename src/test/{ => run-pass}/auxiliary/priv-impl-prim-ty.rs (100%) rename src/test/{ => run-pass}/auxiliary/privacy_reexport.rs (100%) rename src/test/{ => run-pass}/auxiliary/pub_use_mods_xcrate.rs (100%) rename src/test/{ => run-pass}/auxiliary/pub_use_xcrate1.rs (100%) rename src/test/{ => run-pass}/auxiliary/pub_use_xcrate2.rs (100%) rename src/test/{ => run-pass}/auxiliary/reachable-unnameable-items.rs (100%) rename src/test/{ => run-pass}/auxiliary/reexport-should-still-link.rs (100%) rename src/test/{ => run-pass}/auxiliary/reexported_static_methods.rs (100%) rename src/test/{ => run-pass}/auxiliary/sepcomp-extern-lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/sepcomp_cci_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/sepcomp_lib.rs (100%) rename src/test/{ => run-pass}/auxiliary/static-function-pointer-aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/static-methods-crate.rs (100%) rename src/test/{ => run-pass}/auxiliary/static_fn_inline_xc_aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/static_fn_trait_xc_aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/static_mut_xc.rs (100%) rename src/test/{ => run-pass}/auxiliary/struct_destructuring_cross_crate.rs (100%) rename src/test/{ => run-pass}/auxiliary/struct_variant_xc_aux.rs (100%) rename src/test/{auxiliary/svh-a-no-change.rs => run-pass/auxiliary/svh-a-base.rs} (100%) rename src/test/{ => run-pass}/auxiliary/svh-a-comment.rs (100%) rename src/test/{ => run-pass}/auxiliary/svh-a-doc.rs (100%) rename src/test/{ => run-pass}/auxiliary/svh-a-macro.rs (100%) create mode 100644 src/test/run-pass/auxiliary/svh-a-no-change.rs rename src/test/{ => run-pass}/auxiliary/svh-a-redundant-cfg.rs (100%) rename src/test/{ => run-pass}/auxiliary/svh-a-whitespace.rs (100%) create mode 100644 src/test/run-pass/auxiliary/svh-b.rs rename src/test/{ => run-pass}/auxiliary/thread-local-extern-static.rs (100%) rename src/test/{ => run-pass}/auxiliary/trait_default_method_xc_aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/trait_default_method_xc_aux_2.rs (100%) rename src/test/{ => run-pass}/auxiliary/trait_inheritance_auto_xc_2_aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/trait_inheritance_auto_xc_aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/trait_inheritance_cross_trait_call_xc_aux.rs (100%) rename src/test/{ => run-pass}/auxiliary/trait_inheritance_overloading_xc.rs (100%) create mode 100644 src/test/run-pass/auxiliary/trait_safety_lib.rs create mode 100644 src/test/run-pass/auxiliary/trait_superkinds_in_metadata.rs rename src/test/{ => run-pass}/auxiliary/traitimpl.rs (100%) create mode 100644 src/test/run-pass/auxiliary/two_macros.rs rename src/test/{ => run-pass}/auxiliary/typeid-intrinsic-aux1.rs (100%) rename src/test/{ => run-pass}/auxiliary/typeid-intrinsic-aux2.rs (100%) rename src/test/{ => run-pass}/auxiliary/unboxed-closures-cross-crate.rs (100%) create mode 100644 src/test/run-pass/auxiliary/weak-lang-items.rs rename src/test/{ => run-pass}/auxiliary/where_clauses_xc.rs (100%) rename src/test/{ => run-pass}/auxiliary/xcrate-trait-lifetime-param.rs (100%) rename src/test/{ => run-pass}/auxiliary/xcrate_address_insignificant.rs (100%) rename src/test/{ => run-pass}/auxiliary/xcrate_associated_type_defaults.rs (100%) rename src/test/{ => run-pass}/auxiliary/xcrate_static_addresses.rs (100%) rename src/test/{ => run-pass}/auxiliary/xcrate_struct_aliases.rs (100%) create mode 100644 src/test/run-pass/auxiliary/xcrate_unit_struct.rs create mode 100644 src/test/run-pass/cast-to-infer-ty.rs create mode 100644 src/test/run-pass/const-err.rs create mode 100644 src/test/run-pass/const-meth-pattern.rs create mode 100644 src/test/run-pass/deriving-copyclone.rs create mode 100644 src/test/run-pass/foreign-truncated-arguments.rs rename src/test/{ => run-pass/import-crate-with-invalid-spans}/auxiliary/crate_with_invalid_spans.rs (100%) rename src/test/{ => run-pass/import-crate-with-invalid-spans}/auxiliary/crate_with_invalid_spans_macros.rs (100%) rename src/test/run-pass/{import-crate-with-invalid-spans.rs => import-crate-with-invalid-spans/main.rs} (100%) create mode 100644 src/test/run-pass/import-prefix-macro.rs create mode 100644 src/test/run-pass/issue-31299.rs create mode 100644 src/test/run-pass/issue-33096.rs create mode 100644 src/test/run-pass/issue-33202.rs create mode 100644 src/test/run-pass/issue-33387.rs create mode 100644 src/test/run-pass/issue-33537.rs create mode 100644 src/test/run-pass/issue-34503.rs rename src/test/{ => run-pass/issue24687-embed-debuginfo}/auxiliary/issue24687_lib.rs (100%) rename src/test/{ => run-pass/issue24687-embed-debuginfo}/auxiliary/issue24687_mbcs_in_comments.rs (100%) rename src/test/run-pass/{issue24687-embed-debuginfo.rs => issue24687-embed-debuginfo/main.rs} (100%) create mode 100644 src/test/run-pass/mir_ascription_coercion.rs create mode 100644 src/test/run-pass/mir_coercion_casts.rs create mode 100644 src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs create mode 100644 src/test/run-pass/panic-runtime/abort.rs create mode 100644 src/test/run-pass/panic-runtime/auxiliary/exit-success-if-unwind.rs create mode 100644 src/test/run-pass/panic-runtime/link-to-abort.rs create mode 100644 src/test/run-pass/panic-runtime/link-to-unwind.rs create mode 100644 src/test/run-pass/panic-runtime/lto-abort.rs create mode 100644 src/test/run-pass/panic-runtime/lto-unwind.rs create mode 100644 src/test/run-pass/rfc1592-deprecated.rs create mode 100644 src/test/run-pass/specialization/auxiliary/go_trait.rs rename src/test/{ => run-pass/specialization}/auxiliary/specialization_cross_crate.rs (100%) rename src/test/{ => run-pass/specialization}/auxiliary/specialization_cross_crate_defaults.rs (100%) mode change 100755 => 100644 create mode 100644 src/test/run-pass/sse2.rs create mode 100644 src/test/run-pass/trait-object-exclusion.rs create mode 100644 src/test/run-pass/use-keyword-2.rs rename src/test/{ => rustdoc}/auxiliary/empty.rs (100%) rename src/test/{ => rustdoc}/auxiliary/inline-default-methods.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-13698.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-15318.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-17476.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-19190-3.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-20646.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-20727.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-21092.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-21801.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-22025.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-23207-1.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-23207-2.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-26606-macro.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-27362.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-28927-1.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-28927-2.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-29584.rs (100%) rename src/test/{ => rustdoc}/auxiliary/issue-30109-1.rs (100%) rename src/test/{ => rustdoc}/auxiliary/reexp_stripped.rs (100%) rename src/test/{ => rustdoc}/auxiliary/rustdoc-default-impl.rs (100%) rename src/test/{ => rustdoc}/auxiliary/rustdoc-extern-default-method.rs (100%) rename src/test/{ => rustdoc}/auxiliary/rustdoc-extern-method.rs (100%) rename src/test/{ => rustdoc}/auxiliary/rustdoc-ffi.rs (100%) rename src/test/{ => rustdoc}/auxiliary/rustdoc-impl-parts-crosscrate.rs (100%) rename src/test/{ => rustdoc}/auxiliary/variant-struct.rs (100%) create mode 100644 src/test/rustdoc/duplicate_impls/impls.rs create mode 100644 src/test/rustdoc/duplicate_impls/issue-33054.rs create mode 100644 src/test/rustdoc/escape-rust-expr.rs create mode 100644 src/test/rustdoc/extern-impl.rs create mode 100644 src/test/rustdoc/inline_cross/auxiliary/issue-33113.rs create mode 100644 src/test/rustdoc/inline_cross/auxiliary/rustdoc-hidden-sig.rs create mode 100644 src/test/rustdoc/inline_cross/auxiliary/rustdoc-hidden.rs create mode 100644 src/test/rustdoc/inline_cross/auxiliary/rustdoc-nonreachable-impls.rs create mode 100644 src/test/rustdoc/inline_cross/auxiliary/rustdoc-trait-object-impl.rs create mode 100644 src/test/rustdoc/inline_cross/inline_hidden.rs create mode 100644 src/test/rustdoc/inline_cross/issue-28480.rs create mode 100644 src/test/rustdoc/inline_cross/issue-31948-1.rs create mode 100644 src/test/rustdoc/inline_cross/issue-31948-2.rs create mode 100644 src/test/rustdoc/inline_cross/issue-31948.rs create mode 100644 src/test/rustdoc/inline_cross/issue-32881.rs create mode 100644 src/test/rustdoc/inline_cross/issue-33113.rs create mode 100644 src/test/rustdoc/issue-12834.rs create mode 100644 src/test/rustdoc/issue-25944.rs create mode 100644 src/test/rustdoc/issue-29503.rs create mode 100644 src/test/rustdoc/issue-32374.rs create mode 100644 src/test/rustdoc/issue-32890.rs create mode 100644 src/test/rustdoc/issue-33069.rs create mode 100644 src/test/rustdoc/issue-33178-1.rs create mode 100644 src/test/rustdoc/issue-33178.rs create mode 100644 src/test/rustdoc/issue-33302.rs create mode 100644 src/test/rustdoc/issue-33592.rs create mode 100644 src/test/rustdoc/manual_impl.rs create mode 100644 src/test/rustdoc/no-run-still-checks-lints.rs create mode 100644 src/test/rustdoc/trait-self-link.rs create mode 100644 src/test/ui/README.md create mode 100644 src/test/ui/hello_world/main.rs create mode 100644 src/test/ui/mismatched_types/main.rs create mode 100644 src/test/ui/mismatched_types/main.stderr create mode 100755 src/test/ui/update-all-references.sh create mode 100755 src/test/ui/update-references.sh create mode 100644 src/tools/compiletest/Cargo.lock create mode 100644 src/tools/compiletest/Cargo.toml create mode 100644 src/tools/compiletest/build.rs rename src/{compiletest => tools/compiletest/src}/common.rs (87%) rename src/{compiletest => tools/compiletest/src}/errors.rs (78%) create mode 100644 src/tools/compiletest/src/header.rs create mode 100644 src/tools/compiletest/src/json.rs rename src/{compiletest/compiletest.rs => tools/compiletest/src/main.rs} (80%) rename src/{compiletest => tools/compiletest/src}/procsrv.rs (96%) rename src/{compiletest => tools/compiletest/src}/raise_fd_limit.rs (100%) create mode 100644 src/tools/compiletest/src/runtest.rs create mode 100644 src/tools/compiletest/src/uidiff.rs rename src/{compiletest => tools/compiletest/src}/util.rs (100%) create mode 100644 src/tools/tidy/Cargo.lock create mode 100644 src/tools/tidy/Cargo.toml create mode 100644 src/tools/tidy/src/bins.rs create mode 100644 src/tools/tidy/src/cargo.rs create mode 100644 src/tools/tidy/src/cargo_lock.rs create mode 100644 src/tools/tidy/src/errors.rs create mode 100644 src/tools/tidy/src/features.rs create mode 100644 src/tools/tidy/src/main.rs create mode 100644 src/tools/tidy/src/style.rs diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 10598e78ec..9d61feef81 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -307,7 +307,7 @@ are: [gsearchdocs]: https://www.google.com/search?q=site:doc.rust-lang.org+your+query+here [rif]: http://internals.rust-lang.org [rr]: https://doc.rust-lang.org/book/README.html -[tlgba]: http://tomlee.co/2014/04/03/a-more-detailed-tour-of-the-rust-compiler/ +[tlgba]: http://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/ [ro]: http://www.rustaceans.org/ [rctd]: ./COMPILER_TESTS.md [cheatsheet]: http://buildbot.rust-lang.org/homu/ diff --git a/Makefile.in b/Makefile.in index 9bc4463922..7425e9bd73 100644 --- a/Makefile.in +++ b/Makefile.in @@ -59,9 +59,8 @@ # * check-stage$(stage)-$(crate) - Test a crate in a specific stage # * check-stage$(stage)-{rpass,rfail,cfail,rmake,...} - Run tests in src/test/ # * check-stage1-T-$(target)-H-$(host) - Run cross-compiled-tests -# * tidy-basic - show file / line stats -# * tidy-errors - show the highest rustc error code -# * tidy-features - show the status of language and lib features +# * tidy - Basic style check, show highest rustc error code and +# the status of language and lib features # * rustc-stage$(stage) - Only build up to a specific stage # # Then mix in some of these environment variables to harness the @@ -108,6 +107,20 @@ # # run `make nitty-gritty` # +# # Make command examples +# +# ## Docs linked commands +# +# * make check-stage1-rustdocck: Builds rustdoc. It has the advantage to compile +# quite quickly since we're only using stage1 +# executables. +# * make doc/error-index.md: Gets all doc blocks from doc comments and error +# explanations to put them in a markdown file. You +# can then test them by running +# "rustdoc --test error-index.md". +# +# And of course, the wonderfully useful 'make tidy'! Always run it before opening a pull request to rust! +# # # # @@ -214,13 +227,6 @@ include $(CFG_SRC_DIR)mk/debuggers.mk # Secondary makefiles, conditionalized for speed ###################################################################### -# Binary snapshots -ifneq ($(strip $(findstring snap,$(MAKECMDGOALS)) \ - $(findstring clean,$(MAKECMDGOALS))),) - CFG_INFO := $(info cfg: including snap rules) - include $(CFG_SRC_DIR)mk/snap.mk -endif - # The test suite ifneq ($(strip $(findstring check,$(MAKECMDGOALS)) \ $(findstring test,$(MAKECMDGOALS)) \ @@ -264,3 +270,9 @@ ifneq ($(strip $(findstring TAGS.emacs,$(MAKECMDGOALS)) \ CFG_INFO := $(info cfg: including ctags rules) include $(CFG_SRC_DIR)mk/ctags.mk endif + +.DEFAULT: + @echo "\n======================================================" + @echo "== If you need help, run 'make help' or 'make tips' ==" + @echo "======================================================\n" + exit 1 diff --git a/README.md b/README.md index 1acf5fd1f3..4e476b4f35 100644 --- a/README.md +++ b/README.md @@ -16,7 +16,7 @@ Read ["Installing Rust"] from [The Book]. 1. Make sure you have installed the dependencies: - * `g++` 4.7 or `clang++` 3.x + * `g++` 4.7 or later or `clang++` 3.x * `python` 2.7 (but not 3.x) * GNU `make` 3.81 or later * `curl` diff --git a/RELEASES.md b/RELEASES.md index b19f4b07a3..ffe8d64ff2 100644 --- a/RELEASES.md +++ b/RELEASES.md @@ -1,3 +1,315 @@ +Version 1.10.0 (2016-07-07) +=========================== + +Language +-------- + +* [Allow `concat_idents!` in type positions as well as in expression + positions] + (https://github.com/rust-lang/rust/pull/33735). +* [`Copy` types are required to have a trivial implementation of `Clone`] + (https://github.com/rust-lang/rust/pull/33420). + [RFC 1521](https://github.com/rust-lang/rfcs/blob/master/text/1521-copy-clone-semantics.md). +* [Single-variant enums support the `#[repr(..)]` attribute] + (https://github.com/rust-lang/rust/pull/33355). +* [Fix `#[derive(RustcEncodable)]` in the presence of other `encode` methods] + (https://github.com/rust-lang/rust/pull/32908). +* [`panic!` can be converted to a runtime abort with the + `-C panic=abort` flag] + (https://github.com/rust-lang/rust/pull/32900). + [RFC 1513](https://github.com/rust-lang/rfcs/blob/master/text/1513-less-unwinding.md). +* [Add a new crate type, 'cdylib'] + (https://github.com/rust-lang/rust/pull/33553). + cdylibs are dynamic libraries suitable for loading by non-Rust hosts. + [RFC 1510](https://github.com/rust-lang/rfcs/blob/master/text/1510-rdylib.md). + Note that Cargo does not yet directly support cdylibs. + +Stabilized APIs +--------------- + +* `os::windows::fs::OpenOptionsExt::access_mode` +* `os::windows::fs::OpenOptionsExt::share_mode` +* `os::windows::fs::OpenOptionsExt::custom_flags` +* `os::windows::fs::OpenOptionsExt::attributes` +* `os::windows::fs::OpenOptionsExt::security_qos_flags` +* `os::unix::fs::OpenOptionsExt::custom_flags` +* [`sync::Weak::new`] + (http://doc.rust-lang.org/alloc/arc/struct.Weak.html#method.new) +* `Default for sync::Weak` +* [`panic::set_hook`] + (http://doc.rust-lang.org/std/panic/fn.set_hook.html) +* [`panic::take_hook`] + (http://doc.rust-lang.org/std/panic/fn.take_hook.html) +* [`panic::PanicInfo`] + (http://doc.rust-lang.org/std/panic/struct.PanicInfo.html) +* [`panic::PanicInfo::payload`] + (http://doc.rust-lang.org/std/panic/struct.PanicInfo.html#method.payload) +* [`panic::PanicInfo::location`] + (http://doc.rust-lang.org/std/panic/struct.PanicInfo.html#method.location) +* [`panic::Location`] + (http://doc.rust-lang.org/std/panic/struct.Location.html) +* [`panic::Location::file`] + (http://doc.rust-lang.org/std/panic/struct.Location.html#method.file) +* [`panic::Location::line`] + (http://doc.rust-lang.org/std/panic/struct.Location.html#method.line) +* [`ffi::CStr::from_bytes_with_nul`] + (http://doc.rust-lang.org/std/ffi/struct.CStr.html#method.from_bytes_with_nul) +* [`ffi::CStr::from_bytes_with_nul_unchecked`] + (http://doc.rust-lang.org/std/ffi/struct.CStr.html#method.from_bytes_with_nul_unchecked) +* [`ffi::FromBytesWithNulError`] + (http://doc.rust-lang.org/std/ffi/struct.FromBytesWithNulError.html) +* [`fs::Metadata::modified`] + (http://doc.rust-lang.org/std/fs/struct.Metadata.html#method.modified) +* [`fs::Metadata::accessed`] + (http://doc.rust-lang.org/std/fs/struct.Metadata.html#method.accessed) +* [`fs::Metadata::created`] + (http://doc.rust-lang.org/std/fs/struct.Metadata.html#method.created) +* `sync::atomic::Atomic{Usize,Isize,Bool,Ptr}::compare_exchange` +* `sync::atomic::Atomic{Usize,Isize,Bool,Ptr}::compare_exchange_weak` +* `collections::{btree,hash}_map::{Occupied,Vacant,}Entry::key` +* `os::unix::net::{UnixStream, UnixListener, UnixDatagram, SocketAddr}` +* [`SocketAddr::is_unnamed`] + (http://doc.rust-lang.org/std/os/unix/net/struct.SocketAddr.html#method.is_unnamed) +* [`SocketAddr::as_pathname`] + (http://doc.rust-lang.org/std/os/unix/net/struct.SocketAddr.html#method.as_pathname) +* [`UnixStream::connect`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.connect) +* [`UnixStream::pair`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.pair) +* [`UnixStream::try_clone`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.try_clone) +* [`UnixStream::local_addr`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.local_addr) +* [`UnixStream::peer_addr`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.peer_addr) +* [`UnixStream::set_read_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.read_timeout) +* [`UnixStream::set_write_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.write_timeout) +* [`UnixStream::read_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.read_timeout) +* [`UnixStream::write_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.write_timeout) +* [`UnixStream::set_nonblocking`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.set_nonblocking) +* [`UnixStream::take_error`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.take_error) +* [`UnixStream::shutdown`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixStream.html#method.shutdown) +* Read/Write/RawFd impls for `UnixStream` +* [`UnixListener::bind`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixListener.html#method.bind) +* [`UnixListener::accept`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixListener.html#method.accept) +* [`UnixListener::try_clone`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixListener.html#method.try_clone) +* [`UnixListener::local_addr`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixListener.html#method.local_addr) +* [`UnixListener::set_nonblocking`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixListener.html#method.set_nonblocking) +* [`UnixListener::take_error`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixListener.html#method.take_error) +* [`UnixListener::incoming`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixListener.html#method.incoming) +* RawFd impls for `UnixListener` +* [`UnixDatagram::bind`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.bind) +* [`UnixDatagram::unbound`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.unbound) +* [`UnixDatagram::pair`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.pair) +* [`UnixDatagram::connect`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.connect) +* [`UnixDatagram::try_clone`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.try_clone) +* [`UnixDatagram::local_addr`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.local_addr) +* [`UnixDatagram::peer_addr`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.peer_addr) +* [`UnixDatagram::recv_from`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.recv_from) +* [`UnixDatagram::recv`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.recv) +* [`UnixDatagram::send_to`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.send_to) +* [`UnixDatagram::send`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.send) +* [`UnixDatagram::set_read_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.set_read_timeout) +* [`UnixDatagram::set_write_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.set_write_timeout) +* [`UnixDatagram::read_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.read_timeout) +* [`UnixDatagram::write_timeout`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.write_timeout) +* [`UnixDatagram::set_nonblocking`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.set_nonblocking) +* [`UnixDatagram::take_error`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.take_error) +* [`UnixDatagram::shutdown`] + (http://doc.rust-lang.org/std/os/unix/net/struct.UnixDatagram.html#method.shutdown) +* RawFd impls for `UnixDatagram` +* `{BTree,Hash}Map::values_mut` +* [`<[_]>::binary_search_by_key`] + (http://doc.rust-lang.org/beta/std/primitive.slice.html#method.binary_search_by_key) + +Libraries +--------- + +* [The `abs_sub` method of floats is deprecated] + (https://github.com/rust-lang/rust/pull/33664). + The semantics of this minor method are subtle and probably not what + most people want. +* [Add implementation of Ord for Cell and RefCell where T: Ord] + (https://github.com/rust-lang/rust/pull/33306). +* [On Linux, if `HashMap`s can't be initialized with `getrandom` they + will fall back to `/dev/urandom` temporarily to avoid blocking + during early boot] + (https://github.com/rust-lang/rust/pull/33086). +* [Implemented negation for wrapping numerals] + (https://github.com/rust-lang/rust/pull/33067). +* [Implement `Clone` for `binary_heap::IntoIter`] + (https://github.com/rust-lang/rust/pull/33050). +* [Implement `Display` and `Hash` for `std::num::Wrapping`] + (https://github.com/rust-lang/rust/pull/33023). +* [Add `Default` implementation for `&CStr`, `CString`, `Path`] + (https://github.com/rust-lang/rust/pull/32990). +* [Implement `From>` and `Into>` for `VecDeque`] + (https://github.com/rust-lang/rust/pull/32866). +* [Implement `Default` for `UnsafeCell`, `fmt::Error`, `Condvar`, + `Mutex`, `RwLock`] + (https://github.com/rust-lang/rust/pull/32785). + +Cargo +----- +* [Cargo.toml supports the `profile.*.panic` option] + (https://github.com/rust-lang/cargo/pull/2687). + This controls the runtime behavior of the `panic!` macro + and can be either "unwind" (the default), or "abort". + [RFC 1513](https://github.com/rust-lang/rfcs/blob/master/text/1513-less-unwinding.md). +* [Don't throw away errors with `-p` arguments] + (https://github.com/rust-lang/cargo/pull/2723). +* [Report status to stderr instead of stdout] + (https://github.com/rust-lang/cargo/pull/2693). +* [Build scripts are passed a `CARGO_MANIFEST_LINKS` environment + variable that corresponds to the `links` field of the manifest] + (https://github.com/rust-lang/cargo/pull/2710). +* [Ban keywords from crate names] + (https://github.com/rust-lang/cargo/pull/2707). +* [Canonicalize `CARGO_HOME` on Windows] + (https://github.com/rust-lang/cargo/pull/2604). +* [Retry network requests] + (https://github.com/rust-lang/cargo/pull/2396). + By default they are retried twice, which can be customized with the + `net.retry` value in `.cargo/config`. +* [Don't print extra error info for failing subcommands] + (https://github.com/rust-lang/cargo/pull/2674). +* [Add `--force` flag to `cargo install`] + (https://github.com/rust-lang/cargo/pull/2405). +* [Don't use `flock` on NFS mounts] + (https://github.com/rust-lang/cargo/pull/2623). +* [Prefer building `cargo install` artifacts in temporary directories] + (https://github.com/rust-lang/cargo/pull/2610). + Makes it possible to install multiple crates in parallel. +* [Add `cargo test --doc`] + (https://github.com/rust-lang/cargo/pull/2578). +* [Add `cargo --explain`] + (https://github.com/rust-lang/cargo/pull/2551). +* [Don't print warnings when `-q` is passed] + (https://github.com/rust-lang/cargo/pull/2576). +* [Add `cargo doc --lib` and `--bin`] + (https://github.com/rust-lang/cargo/pull/2577). +* [Don't require build script output to be UTF-8] + (https://github.com/rust-lang/cargo/pull/2560). +* [Correctly attempt multiple git usernames] + (https://github.com/rust-lang/cargo/pull/2584). + +Performance +----------- + +* [rustc memory usage was reduced by refactoring the context used for + type checking] + (https://github.com/rust-lang/rust/pull/33425). +* [Speed up creation of `HashMap`s by caching the random keys used + to initialize the hash state] + (https://github.com/rust-lang/rust/pull/33318). +* [The `find` implementation for `Chain` iterators is 2x faster] + (https://github.com/rust-lang/rust/pull/33289). +* [Trait selection optimizations speed up type checking by 15%] + (https://github.com/rust-lang/rust/pull/33138). +* [Efficient trie lookup for boolean Unicode properties] + (https://github.com/rust-lang/rust/pull/33098). + 10x faster than the previous lookup tables. +* [Special case `#[derive(Copy, Clone)]` to avoid bloat] + (https://github.com/rust-lang/rust/pull/31414). + +Usability +--------- + +* Many incremental improvements to documentation and rustdoc. +* [rustdoc: List blanket trait impls] + (https://github.com/rust-lang/rust/pull/33514). +* [rustdoc: Clean up ABI rendering] + (https://github.com/rust-lang/rust/pull/33151). +* [Indexing with the wrong type produces a more informative error] + (https://github.com/rust-lang/rust/pull/33401). +* [Improve diagnostics for constants being used in irrefutable patterns] + (https://github.com/rust-lang/rust/pull/33406). +* [When many method candidates are in scope limit the suggestions to 10] + (https://github.com/rust-lang/rust/pull/33338). +* [Remove confusing suggestion when calling a `fn` type] + (https://github.com/rust-lang/rust/pull/33325). +* [Do not suggest changing `&mut self` to `&mut mut self`] + (https://github.com/rust-lang/rust/pull/33319). + +Misc +---- + +* [Update i686-linux-android features to match Android ABI] + (https://github.com/rust-lang/rust/pull/33651). +* [Update aarch64-linux-android features to match Android ABI] + (https://github.com/rust-lang/rust/pull/33500). +* [`std` no longer prints backtraces on platforms where the running + module must be loaded with `env::current_exe`, which can't be relied + on](https://github.com/rust-lang/rust/pull/33554). +* This release includes std binaries for the i586-unknown-linux-gnu, + i686-unknown-linux-musl, and armv7-linux-androideabi targets. The + i586 target is for old x86 hardware without SSE2, and the armv7 + target is for Android running on modern ARM architectures. +* [The `rust-gdb` and `rust-lldb` scripts are distributed on all + Unix platforms](https://github.com/rust-lang/rust/pull/32835). +* [On Unix the runtime aborts by calling `libc::abort` instead of + generating an illegal instruction] + (https://github.com/rust-lang/rust/pull/31457). +* [Rust is now bootstrapped from the previous release of Rust, + instead of a snapshot from an arbitrary commit] + (https://github.com/rust-lang/rust/pull/32942). + +Compatibility Notes +------------------- + +* [`AtomicBool` is now bool-sized, not word-sized] + (https://github.com/rust-lang/rust/pull/33579). +* [`target_env` for Linux ARM targets is just `gnu`, not + `gnueabihf`, `gnueabi`, etc] + (https://github.com/rust-lang/rust/pull/33403). +* [Consistently panic on overflow in `Duration::new`] + (https://github.com/rust-lang/rust/pull/33072). +* [Change `String::truncate` to panic less] + (https://github.com/rust-lang/rust/pull/32977). +* [Add `:block` to the follow set for `:ty` and `:path`] + (https://github.com/rust-lang/rust/pull/32945). + Affects how macros are parsed. +* [Fix macro hygiene bug] + (https://github.com/rust-lang/rust/pull/32923). +* [Feature-gated attributes on macro-generated macro invocations are + now rejected] + (https://github.com/rust-lang/rust/pull/32791). +* [Suppress fallback and ambiguity errors during type inference] + (https://github.com/rust-lang/rust/pull/32258). + This caused some minor changes to type inference. + + Version 1.9.0 (2016-05-26) ========================== @@ -140,9 +452,9 @@ Cargo Performance ----------- -* [During type unification, the complexity of comparing variables for - equivalance was reduced from `O(n!)` to `O(n)`][1.9tu]. This leads - to major compile-time improvements in some scenarios. +* [The time complexity of comparing variables for equivalence during type + unification is reduced from _O_(_n_!) to _O_(_n_)][1.9tu]. This leads + to major compilation time improvement in some scenarios. * [`ToString` is specialized for `str`, giving it the same performance as `to_owned`][1.9ts]. * [Spawning processes with `Command::output` no longer creates extra @@ -418,7 +730,7 @@ Compatibility Notes numbers [no longer return platform-specific types][1.8r], but instead return widened integers. [RFC 1415]. * [Modules sourced from the filesystem cannot appear within arbitrary - blocks, but only within other modules][1.8m]. + blocks, but only within other modules][1.8mf]. * [`--cfg` compiler flags are parsed strictly as identifiers][1.8c]. * On Unix, [stack overflow triggers a runtime abort instead of a SIGSEGV][1.8so]. @@ -448,7 +760,7 @@ Compatibility Notes [1.8h]: https://github.com/rust-lang/rust/pull/31460 [1.8l]: https://github.com/rust-lang/rust/pull/31668 [1.8m]: https://github.com/rust-lang/rust/pull/31020 -[1.8m]: https://github.com/rust-lang/rust/pull/31534 +[1.8mf]: https://github.com/rust-lang/rust/pull/31534 [1.8mp]: https://github.com/rust-lang/rust/pull/30894 [1.8mr]: https://users.rust-lang.org/t/multirust-0-8-with-cross-std-installation/4901 [1.8ms]: https://github.com/rust-lang/rust/pull/30448 diff --git a/configure b/configure index fdef550a64..d4178963f8 100755 --- a/configure +++ b/configure @@ -609,6 +609,7 @@ opt llvm-version-check 1 "check if the LLVM version is supported, build anyway" opt rustbuild 0 "use the rust and cargo based build system" opt orbit 0 "get MIR where it belongs - everywhere; most importantly, in orbit" opt codegen-tests 1 "run the src/test/codegen tests" +opt option-checking 1 "complain about unrecognized options in this configure script" # Optimization and debugging options. These may be overridden by the release channel, etc. opt_nosave optimize 1 "build optimized rust code" @@ -631,6 +632,7 @@ valopt build "${DEFAULT_BUILD}" "GNUs ./configure syntax LLVM build triple" valopt android-cross-path "" "Android NDK standalone path (deprecated)" valopt i686-linux-android-ndk "" "i686-linux-android NDK standalone path" valopt arm-linux-androideabi-ndk "" "arm-linux-androideabi NDK standalone path" +valopt armv7-linux-androideabi-ndk "" "armv7-linux-androideabi NDK standalone path" valopt aarch64-linux-android-ndk "" "aarch64-linux-android NDK standalone path" valopt nacl-cross-path "" "NaCl SDK path (Pepper Canary is recommended). Must be absolute!" valopt release-channel "dev" "the name of the release channel to build" @@ -674,8 +676,11 @@ then fi # Validate Options -step_msg "validating $CFG_SELF args" -validate_opt +if [ -z "$CFG_DISABLE_OPTION_CHECKING" ] +then + step_msg "validating $CFG_SELF args" + validate_opt +fi # Validate the release channel, and configure options case "$CFG_RELEASE_CHANNEL" in @@ -819,6 +824,19 @@ then fi fi +# LLDB tests on OSX require /usr/bin/python, not something like Homebrew's +# /usr/local/bin/python. We're loading a compiled module for LLDB tests which is +# only compatible with the system. +case $CFG_BUILD in + *-apple-darwin) + CFG_LLDB_PYTHON=/usr/bin/python + ;; + *) + CFG_LLDB_PYTHON=$CFG_PYTHON + ;; +esac +putvar CFG_LLDB_PYTHON + step_msg "looking for target specific programs" probe CFG_ADB adb @@ -1010,37 +1028,6 @@ if [ -n "$CFG_ENABLE_CLANG" ] then case "$CC" in (''|*clang) - CFG_CLANG_REPORTED_VERSION=$($CFG_CC --version | grep version) - - if echo $CFG_CLANG_REPORTED_VERSION | grep -q "(based on LLVM "; then - CFG_CLANG_VERSION=$(echo $CFG_CLANG_REPORTED_VERSION | sed 's/.*(based on LLVM \(.*\))/\1/') - elif echo $CFG_CLANG_REPORTED_VERSION | grep -q "Apple LLVM"; then - CFG_OSX_CLANG_VERSION=$(echo $CFG_CLANG_REPORTED_VERSION | sed 's/.*version \(.*\) .*/\1/') - else - CFG_CLANG_VERSION=$(echo $CFG_CLANG_REPORTED_VERSION | sed 's/.*version \(.*\) .*/\1/') - fi - - if [ -n "$CFG_OSX_CLANG_VERSION" ] - then - case $CFG_OSX_CLANG_VERSION in - (7.0* | 7.1* | 7.2* | 7.3*) - step_msg "found ok version of APPLE CLANG: $CFG_OSX_CLANG_VERSION" - ;; - (*) - err "bad APPLE CLANG version: $CFG_OSX_CLANG_VERSION, need >=7.0" - ;; - esac - else - case $CFG_CLANG_VERSION in - (3.2* | 3.3* | 3.4* | 3.5* | 3.6* | 3.7* | 3.8* | 3.9*) - step_msg "found ok version of CLANG: $CFG_CLANG_VERSION" - ;; - (*) - err "bad CLANG version: $CFG_CLANG_VERSION, need >=3.0svn" - ;; - esac - fi - if [ -z "$CC" ] then CFG_CC="clang" @@ -1127,6 +1114,15 @@ do case $i in *android*) + case $i in + armv7-linux-androideabi) + cmd_prefix="arm-linux-androideabi" + ;; + *) + cmd_prefix=$i + ;; + esac + upper_snake_target=$(echo "$i" | tr '[:lower:]' '[:upper:]' | tr '\-' '\_') eval ndk=\$"CFG_${upper_snake_target}_NDK" if [ -z "$ndk" ] @@ -1137,7 +1133,7 @@ do fi # Perform a basic sanity check of the NDK - for android_ndk_tool in "$ndk/bin/$i-gcc" "$ndk/bin/$i-g++" "$ndk/bin/$i-ar" + for android_ndk_tool in "$ndk/bin/$cmd_prefix-gcc" "$ndk/bin/$cmd_prefix-g++" "$ndk/bin/$cmd_prefix-ar" do if [ ! -f $android_ndk_tool ] then @@ -1436,6 +1432,19 @@ then cd ${CFG_BUILD_DIR} fi +# Do a sanity check that the submodule source exists. Because GitHub +# automatically publishes broken tarballs that can't be disabled, and +# people download them and try to use them. +if [ ! -e "${CFG_SRC_DIR}/src/liblibc" ]; then + err "some submodules are missing. Is this a broken tarball? + +If you downloaded this tarball from the GitHub release pages at +https://github.com/rust-lang/rust/releases, +then please delete it and instead download the source from +https://www.rust-lang.org/downloads.html" + +fi + # Configure llvm, only if necessary step_msg "looking at LLVM" CFG_LLVM_SRC_DIR=${CFG_SRC_DIR}src/llvm/ @@ -1756,6 +1765,7 @@ putvar CFG_LIBDIR_RELATIVE putvar CFG_DISABLE_MANAGE_SUBMODULES putvar CFG_AARCH64_LINUX_ANDROID_NDK putvar CFG_ARM_LINUX_ANDROIDEABI_NDK +putvar CFG_ARMV7_LINUX_ANDROIDEABI_NDK putvar CFG_I686_LINUX_ANDROID_NDK putvar CFG_NACL_CROSS_PATH putvar CFG_MANDIR diff --git a/mk/cfg/armv7-linux-androideabi.mk b/mk/cfg/armv7-linux-androideabi.mk new file mode 100644 index 0000000000..e5bf2e4df7 --- /dev/null +++ b/mk/cfg/armv7-linux-androideabi.mk @@ -0,0 +1,25 @@ +# armv7-linux-androideabi configuration +CC_armv7-linux-androideabi=$(CFG_ARMV7_LINUX_ANDROIDEABI_NDK)/bin/arm-linux-androideabi-gcc +CXX_armv7-linux-androideabi=$(CFG_ARMV7_LINUX_ANDROIDEABI_NDK)/bin/arm-linux-androideabi-g++ +CPP_armv7-linux-androideabi=$(CFG_ARMV7_LINUX_ANDROIDEABI_NDK)/bin/arm-linux-androideabi-gcc -E +AR_armv7-linux-androideabi=$(CFG_ARMV7_LINUX_ANDROIDEABI_NDK)/bin/arm-linux-androideabi-ar +CFG_LIB_NAME_armv7-linux-androideabi=lib$(1).so +CFG_STATIC_LIB_NAME_armv7-linux-androideabi=lib$(1).a +CFG_LIB_GLOB_armv7-linux-androideabi=lib$(1)-*.so +CFG_LIB_DSYM_GLOB_armv7-linux-androideabi=lib$(1)-*.dylib.dSYM +CFG_JEMALLOC_CFLAGS_armv7-linux-androideabi := -D__arm__ -DANDROID -D__ANDROID__ $(CFLAGS) +CFG_GCCISH_CFLAGS_armv7-linux-androideabi := -Wall -g -fPIC -D__arm__ -mfloat-abi=softfp -march=armv7-a -mfpu=vfpv3-d16 -DANDROID -D__ANDROID__ $(CFLAGS) +CFG_GCCISH_CXXFLAGS_armv7-linux-androideabi := -fno-rtti $(CXXFLAGS) +CFG_GCCISH_LINK_FLAGS_armv7-linux-androideabi := -shared -fPIC -ldl -g -lm -lsupc++ +CFG_GCCISH_DEF_FLAG_armv7-linux-androideabi := -Wl,--export-dynamic,--dynamic-list= +CFG_LLC_FLAGS_armv7-linux-androideabi := +CFG_INSTALL_NAME_armv7-linux-androideabi = +CFG_EXE_SUFFIX_armv7-linux-androideabi := +CFG_WINDOWSY_armv7-linux-androideabi := +CFG_UNIXY_armv7-linux-androideabi := 1 +CFG_LDPATH_armv7-linux-androideabi := +CFG_RUN_armv7-linux-androideabi= +CFG_RUN_TARG_armv7-linux-androideabi= +RUSTC_FLAGS_armv7-linux-androideabi := +RUSTC_CROSS_FLAGS_armv7-linux-androideabi := +CFG_GNU_TRIPLE_armv7-linux-androideabi := arm-linux-androideabi diff --git a/mk/cfg/i586-pc-windows-msvc.mk b/mk/cfg/i586-pc-windows-msvc.mk index da2680f741..48f1ecec3a 100644 --- a/mk/cfg/i586-pc-windows-msvc.mk +++ b/mk/cfg/i586-pc-windows-msvc.mk @@ -1,16 +1,16 @@ # i586-pc-windows-msvc configuration -CC_i586-pc-windows-msvc="$(CFG_MSVC_CL_i386)" -nologo -LINK_i586-pc-windows-msvc="$(CFG_MSVC_LINK_i386)" -nologo -CXX_i586-pc-windows-msvc="$(CFG_MSVC_CL_i386)" -nologo -CPP_i586-pc-windows-msvc="$(CFG_MSVC_CL_i386)" -nologo -AR_i586-pc-windows-msvc="$(CFG_MSVC_LIB_i386)" -nologo +CC_i586-pc-windows-msvc=$(CFG_MSVC_CL_i386) +LINK_i586-pc-windows-msvc=$(CFG_MSVC_LINK_i386) +CXX_i586-pc-windows-msvc=$(CFG_MSVC_CL_i386) +CPP_i586-pc-windows-msvc=$(CFG_MSVC_CL_i386) +AR_i586-pc-windows-msvc=$(CFG_MSVC_LIB_i386) CFG_LIB_NAME_i586-pc-windows-msvc=$(1).dll CFG_STATIC_LIB_NAME_i586-pc-windows-msvc=$(1).lib CFG_LIB_GLOB_i586-pc-windows-msvc=$(1)-*.{dll,lib} CFG_LIB_DSYM_GLOB_i586-pc-windows-msvc=$(1)-*.dylib.dSYM CFG_JEMALLOC_CFLAGS_i586-pc-windows-msvc := -CFG_GCCISH_CFLAGS_i586-pc-windows-msvc := -MD -arch:IA32 -CFG_GCCISH_CXXFLAGS_i586-pc-windows-msvc := -MD -arch:IA32 +CFG_GCCISH_CFLAGS_i586-pc-windows-msvc := -MD -arch:IA32 -nologo +CFG_GCCISH_CXXFLAGS_i586-pc-windows-msvc := -MD -arch:IA32 -nologo CFG_GCCISH_LINK_FLAGS_i586-pc-windows-msvc := CFG_GCCISH_DEF_FLAG_i586-pc-windows-msvc := CFG_LLC_FLAGS_i586-pc-windows-msvc := diff --git a/mk/cfg/i686-pc-windows-msvc.mk b/mk/cfg/i686-pc-windows-msvc.mk index 85b320a18a..b0289b9892 100644 --- a/mk/cfg/i686-pc-windows-msvc.mk +++ b/mk/cfg/i686-pc-windows-msvc.mk @@ -1,16 +1,16 @@ # i686-pc-windows-msvc configuration -CC_i686-pc-windows-msvc="$(CFG_MSVC_CL_i386)" -nologo -LINK_i686-pc-windows-msvc="$(CFG_MSVC_LINK_i386)" -nologo -CXX_i686-pc-windows-msvc="$(CFG_MSVC_CL_i386)" -nologo -CPP_i686-pc-windows-msvc="$(CFG_MSVC_CL_i386)" -nologo -AR_i686-pc-windows-msvc="$(CFG_MSVC_LIB_i386)" -nologo +CC_i686-pc-windows-msvc=$(CFG_MSVC_CL_i386) +LINK_i686-pc-windows-msvc=$(CFG_MSVC_LINK_i386) +CXX_i686-pc-windows-msvc=$(CFG_MSVC_CL_i386) +CPP_i686-pc-windows-msvc=$(CFG_MSVC_CL_i386) +AR_i686-pc-windows-msvc=$(CFG_MSVC_LIB_i386) CFG_LIB_NAME_i686-pc-windows-msvc=$(1).dll CFG_STATIC_LIB_NAME_i686-pc-windows-msvc=$(1).lib CFG_LIB_GLOB_i686-pc-windows-msvc=$(1)-*.{dll,lib} CFG_LIB_DSYM_GLOB_i686-pc-windows-msvc=$(1)-*.dylib.dSYM CFG_JEMALLOC_CFLAGS_i686-pc-windows-msvc := -CFG_GCCISH_CFLAGS_i686-pc-windows-msvc := -MD -CFG_GCCISH_CXXFLAGS_i686-pc-windows-msvc := -MD +CFG_GCCISH_CFLAGS_i686-pc-windows-msvc := -MD -nologo +CFG_GCCISH_CXXFLAGS_i686-pc-windows-msvc := -MD -nologo CFG_GCCISH_LINK_FLAGS_i686-pc-windows-msvc := CFG_GCCISH_DEF_FLAG_i686-pc-windows-msvc := CFG_LLC_FLAGS_i686-pc-windows-msvc := diff --git a/mk/cfg/i686-unknown-linux-musl.mk b/mk/cfg/i686-unknown-linux-musl.mk index ac05798c76..4c64402a73 100644 --- a/mk/cfg/i686-unknown-linux-musl.mk +++ b/mk/cfg/i686-unknown-linux-musl.mk @@ -25,4 +25,5 @@ CFG_THIRD_PARTY_OBJECTS_i686-unknown-linux-musl := crt1.o crti.o crtn.o CFG_INSTALLED_OBJECTS_i686-unknown-linux-musl := crt1.o crti.o crtn.o NATIVE_DEPS_libc_T_i686-unknown-linux-musl += libc.a -NATIVE_DEPS_std_T_i686-unknown-linux-musl += libunwind.a crt1.o crti.o crtn.o +NATIVE_DEPS_std_T_i686-unknown-linux-musl += crt1.o crti.o crtn.o +NATIVE_DEPS_unwind_T_i686-unknown-linux-musl += libunwind.a diff --git a/mk/cfg/x86_64-pc-windows-msvc.mk b/mk/cfg/x86_64-pc-windows-msvc.mk index a194dfd1a6..30e996a972 100644 --- a/mk/cfg/x86_64-pc-windows-msvc.mk +++ b/mk/cfg/x86_64-pc-windows-msvc.mk @@ -1,16 +1,16 @@ # x86_64-pc-windows-msvc configuration -CC_x86_64-pc-windows-msvc="$(CFG_MSVC_CL_x86_64)" -nologo -LINK_x86_64-pc-windows-msvc="$(CFG_MSVC_LINK_x86_64)" -nologo -CXX_x86_64-pc-windows-msvc="$(CFG_MSVC_CL_x86_64)" -nologo -CPP_x86_64-pc-windows-msvc="$(CFG_MSVC_CL_x86_64)" -nologo -AR_x86_64-pc-windows-msvc="$(CFG_MSVC_LIB_x86_64)" -nologo +CC_x86_64-pc-windows-msvc=$(CFG_MSVC_CL_x86_64) +LINK_x86_64-pc-windows-msvc=$(CFG_MSVC_LINK_x86_64) +CXX_x86_64-pc-windows-msvc=$(CFG_MSVC_CL_x86_64) +CPP_x86_64-pc-windows-msvc=$(CFG_MSVC_CL_x86_64) +AR_x86_64-pc-windows-msvc=$(CFG_MSVC_LIB_x86_64) CFG_LIB_NAME_x86_64-pc-windows-msvc=$(1).dll CFG_STATIC_LIB_NAME_x86_64-pc-windows-msvc=$(1).lib CFG_LIB_GLOB_x86_64-pc-windows-msvc=$(1)-*.{dll,lib} CFG_LIB_DSYM_GLOB_x86_64-pc-windows-msvc=$(1)-*.dylib.dSYM CFG_JEMALLOC_CFLAGS_x86_64-pc-windows-msvc := -CFG_GCCISH_CFLAGS_x86_64-pc-windows-msvc := -MD -CFG_GCCISH_CXXFLAGS_x86_64-pc-windows-msvc := -MD +CFG_GCCISH_CFLAGS_x86_64-pc-windows-msvc := -MD -nologo +CFG_GCCISH_CXXFLAGS_x86_64-pc-windows-msvc := -MD -nologo CFG_GCCISH_LINK_FLAGS_x86_64-pc-windows-msvc := CFG_GCCISH_DEF_FLAG_x86_64-pc-windows-msvc := CFG_LLC_FLAGS_x86_64-pc-windows-msvc := diff --git a/mk/cfg/x86_64-unknown-linux-musl.mk b/mk/cfg/x86_64-unknown-linux-musl.mk index 9d6dd5c73d..62a884874b 100644 --- a/mk/cfg/x86_64-unknown-linux-musl.mk +++ b/mk/cfg/x86_64-unknown-linux-musl.mk @@ -25,4 +25,5 @@ CFG_THIRD_PARTY_OBJECTS_x86_64-unknown-linux-musl := crt1.o crti.o crtn.o CFG_INSTALLED_OBJECTS_x86_64-unknown-linux-musl := crt1.o crti.o crtn.o NATIVE_DEPS_libc_T_x86_64-unknown-linux-musl += libc.a -NATIVE_DEPS_std_T_x86_64-unknown-linux-musl += libunwind.a crt1.o crti.o crtn.o +NATIVE_DEPS_std_T_x86_64-unknown-linux-musl += crt1.o crti.o crtn.o +NATIVE_DEPS_unwind_T_x86_64-unknown-linux-musl += libunwind.a diff --git a/mk/crates.mk b/mk/crates.mk index dafda75f5f..1583515014 100644 --- a/mk/crates.mk +++ b/mk/crates.mk @@ -53,7 +53,8 @@ TARGET_CRATES := libc std term \ getopts collections test rand \ core alloc \ rustc_unicode rustc_bitflags \ - alloc_system alloc_jemalloc + alloc_system alloc_jemalloc \ + panic_abort panic_unwind unwind RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_driver \ rustc_trans rustc_back rustc_llvm rustc_privacy rustc_lint \ rustc_data_structures rustc_platform_intrinsics \ @@ -72,10 +73,18 @@ DEPS_libc := core DEPS_rand := core DEPS_rustc_bitflags := core DEPS_rustc_unicode := core +DEPS_panic_abort := libc alloc +DEPS_panic_unwind := libc alloc unwind +DEPS_unwind := libc + +# FIXME(stage0): change this to just `RUSTFLAGS_panic_abort := ...` +RUSTFLAGS1_panic_abort := -C panic=abort +RUSTFLAGS2_panic_abort := -C panic=abort +RUSTFLAGS3_panic_abort := -C panic=abort DEPS_std := core libc rand alloc collections rustc_unicode \ native:backtrace \ - alloc_system + alloc_system panic_abort panic_unwind unwind DEPS_arena := std DEPS_glob := std DEPS_flate := std native:miniz @@ -97,7 +106,7 @@ DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \ rustc_back graphviz DEPS_rustc := syntax fmt_macros flate arena serialize getopts rbml \ - log graphviz rustc_back rustc_data_structures\ + log graphviz rustc_llvm rustc_back rustc_data_structures\ rustc_const_math DEPS_rustc_back := std syntax flate log libc DEPS_rustc_borrowck := rustc rustc_mir log graphviz syntax @@ -111,7 +120,7 @@ DEPS_rustc_lint := rustc log syntax rustc_const_eval DEPS_rustc_llvm := native:rustllvm libc std rustc_bitflags DEPS_rustc_metadata := rustc syntax rbml rustc_const_math DEPS_rustc_passes := syntax rustc core rustc_const_eval -DEPS_rustc_mir := rustc syntax rustc_const_math rustc_const_eval +DEPS_rustc_mir := rustc syntax rustc_const_math rustc_const_eval rustc_bitflags DEPS_rustc_resolve := arena rustc log syntax DEPS_rustc_platform_intrinsics := std DEPS_rustc_plugin := rustc rustc_metadata syntax rustc_mir @@ -120,7 +129,7 @@ DEPS_rustc_trans := arena flate getopts graphviz libc rustc rustc_back rustc_mir log syntax serialize rustc_llvm rustc_platform_intrinsics \ rustc_const_math rustc_const_eval rustc_incremental DEPS_rustc_incremental := rbml rustc serialize rustc_data_structures -DEPS_rustc_save_analysis := rustc log syntax +DEPS_rustc_save_analysis := rustc log syntax serialize DEPS_rustc_typeck := rustc syntax rustc_platform_intrinsics rustc_const_math \ rustc_const_eval @@ -128,12 +137,12 @@ DEPS_rustdoc := rustc rustc_driver native:hoedown serialize getopts \ test rustc_lint rustc_const_eval -TOOL_DEPS_compiletest := test getopts log +TOOL_DEPS_compiletest := test getopts log serialize TOOL_DEPS_rustdoc := rustdoc TOOL_DEPS_rustc := rustc_driver TOOL_DEPS_rustbook := std rustdoc TOOL_DEPS_error_index_generator := rustdoc syntax serialize -TOOL_SOURCE_compiletest := $(S)src/compiletest/compiletest.rs +TOOL_SOURCE_compiletest := $(S)src/tools/compiletest/src/main.rs TOOL_SOURCE_rustdoc := $(S)src/driver/driver.rs TOOL_SOURCE_rustc := $(S)src/driver/driver.rs TOOL_SOURCE_rustbook := $(S)src/tools/rustbook/main.rs @@ -148,6 +157,9 @@ ONLY_RLIB_rustc_unicode := 1 ONLY_RLIB_rustc_bitflags := 1 ONLY_RLIB_alloc_system := 1 ONLY_RLIB_alloc_jemalloc := 1 +ONLY_RLIB_panic_unwind := 1 +ONLY_RLIB_panic_abort := 1 +ONLY_RLIB_unwind := 1 TARGET_SPECIFIC_alloc_jemalloc := 1 diff --git a/mk/ctags.mk b/mk/ctags.mk index a116f2aba6..1fcb0bb4de 100644 --- a/mk/ctags.mk +++ b/mk/ctags.mk @@ -15,14 +15,21 @@ .PHONY: TAGS.emacs TAGS.vi -CTAGS_LOCATIONS=$(wildcard ${CFG_SRC_DIR}src/lib*) +CTAGS_RUSTC_LOCATIONS=$(patsubst ${CFG_SRC_DIR}src/lib%test,, \ + $(wildcard ${CFG_SRC_DIR}src/lib*)) ${CFG_SRC_DIR}src/libtest CTAGS_LOCATIONS=$(patsubst ${CFG_SRC_DIR}src/librust%,, \ $(patsubst ${CFG_SRC_DIR}src/lib%test,, \ $(wildcard ${CFG_SRC_DIR}src/lib*))) ${CFG_SRC_DIR}src/libtest -CTAGS_OPTS=--options="${CFG_SRC_DIR}src/etc/ctags.rust" --languages=Rust --recurse ${CTAGS_LOCATIONS} +CTAGS_OPTS=--options="${CFG_SRC_DIR}src/etc/ctags.rust" --languages=Rust --recurse + +TAGS.rustc.emacs: + ctags -e -f $@ ${CTAGS_OPTS} ${CTAGS_RUSTC_LOCATIONS} TAGS.emacs: - ctags -e -f $@ ${CTAGS_OPTS} + ctags -e -f $@ ${CTAGS_OPTS} ${CTAGS_LOCATIONS} + +TAGS.rustc.vi: + ctags -f $@ ${CTAGS_OPTS} ${CTAGS_RUSTC_LOCATIONS} TAGS.vi: - ctags -f $@ ${CTAGS_OPTS} + ctags -f $@ ${CTAGS_OPTS} ${CTAGS_LOCATIONS} diff --git a/mk/debuggers.mk b/mk/debuggers.mk index aa7b62e13b..fbf32dc1a3 100644 --- a/mk/debuggers.mk +++ b/mk/debuggers.mk @@ -41,10 +41,13 @@ DEBUGGER_BIN_SCRIPTS_LLDB_ABS=\ ## ALL ## -DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL=$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB) \ - $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB) -DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS=$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_GDB_ABS) \ - $(DEBUGGER_RUSTLIB_ETC_SCRIPTS_LLDB_ABS) +DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL=gdb_load_rust_pretty_printers.py \ + gdb_rust_pretty_printing.py \ + lldb_rust_formatters.py \ + debugger_pretty_printers_common.py +DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL_ABS=\ + $(foreach script,$(DEBUGGER_RUSTLIB_ETC_SCRIPTS_ALL), \ + $(CFG_SRC_DIR)src/etc/$(script)) DEBUGGER_BIN_SCRIPTS_ALL=$(DEBUGGER_BIN_SCRIPTS_GDB) \ $(DEBUGGER_BIN_SCRIPTS_LLDB) DEBUGGER_BIN_SCRIPTS_ALL_ABS=$(DEBUGGER_BIN_SCRIPTS_GDB_ABS) \ diff --git a/mk/dist.mk b/mk/dist.mk index 1273900608..9491311ea7 100644 --- a/mk/dist.mk +++ b/mk/dist.mk @@ -50,7 +50,6 @@ PKG_FILES := \ $(addprefix $(S)src/, \ bootstrap \ build_helper \ - compiletest \ doc \ driver \ etc \ @@ -62,7 +61,7 @@ PKG_FILES := \ rtstartup \ rustllvm \ rustc \ - snapshots.txt \ + stage0.txt \ rust-installer \ tools \ test) \ @@ -78,6 +77,7 @@ $(PKG_TAR): $(PKG_FILES) $(Q)mkdir -p tmp/dist/$(PKG_NAME) $(Q)tar \ -C $(S) \ + -f - \ --exclude-vcs \ --exclude=*~ \ --exclude=*.pyc \ @@ -87,7 +87,7 @@ $(PKG_TAR): $(PKG_FILES) --exclude=*/llvm/test/*/*/*.ll \ --exclude=*/llvm/test/*/*/*.td \ --exclude=*/llvm/test/*/*/*.s \ - -c $(UNROOTED_PKG_FILES) | tar -x -C tmp/dist/$(PKG_NAME) + -c $(UNROOTED_PKG_FILES) | tar -x -f - -C tmp/dist/$(PKG_NAME) @$(call E, making $@) $(Q)tar -czf $(PKG_TAR) -C tmp/dist $(PKG_NAME) $(Q)rm -Rf tmp/dist/$(PKG_NAME) diff --git a/mk/main.mk b/mk/main.mk index fc537d3b7d..c47020c9f9 100644 --- a/mk/main.mk +++ b/mk/main.mk @@ -13,12 +13,12 @@ ###################################################################### # The version number -CFG_RELEASE_NUM=1.9.0 +CFG_RELEASE_NUM=1.10.0 # An optional number to put after the label, e.g. '.2' -> '-beta.2' # NB Make sure it starts with a dot to conform to semver pre-release # versions (section 9) -CFG_PRERELEASE_VERSION=.3 +CFG_PRERELEASE_VERSION=.4 # Append a version-dependent hash to each library, so we can install different # versions in the same place @@ -34,6 +34,7 @@ CFG_FILENAME_EXTRA=$(shell printf '%s' $(CFG_RELEASE)$(CFG_EXTRA_FILENAME) | $(C # intentionally not "secure" by any definition, this is largely just a deterrent # from users enabling unstable features on the stable compiler. CFG_BOOTSTRAP_KEY=$(CFG_FILENAME_EXTRA) +CFG_BOOTSTRAP_KEY_STAGE0=$(shell grep 'rustc_key' $(S)src/stage0.txt | sed 's/rustc_key: '//) ifeq ($(CFG_RELEASE_CHANNEL),stable) # This is the normal semver version string, e.g. "0.12.0", "0.12.0-nightly" @@ -389,7 +390,7 @@ endif # This 'function' will determine which debugger scripts to copy based on a # target triple. See debuggers.mk for more information. TRIPLE_TO_DEBUGGER_SCRIPT_SETTING=\ - $(if $(findstring windows,$(1)),none,$(if $(findstring darwin,$(1)),lldb,gdb)) + $(if $(findstring windows-msvc,$(1)),none,all) STAGES = 0 1 2 3 diff --git a/mk/platform.mk b/mk/platform.mk index 83fd4509da..c2644621c5 100644 --- a/mk/platform.mk +++ b/mk/platform.mk @@ -148,7 +148,7 @@ define CC_MACROS CFG_CC_INCLUDE_$(1)=-I $$(1) ifeq ($$(findstring msvc,$(1)),msvc) CFG_CC_OUTPUT_$(1)=-Fo:$$(1) - CFG_CREATE_ARCHIVE_$(1)=$$(AR_$(1)) -OUT:$$(1) + CFG_CREATE_ARCHIVE_$(1)='$$(AR_$(1))' -OUT:$$(1) else CFG_CC_OUTPUT_$(1)=-o $$(1) CFG_CREATE_ARCHIVE_$(1)=$$(AR_$(1)) crus $$(1) @@ -169,7 +169,7 @@ ifdef CFG_CCACHE_BASEDIR export CCACHE_BASEDIR endif -FIND_COMPILER = $(word 1,$(1:ccache=)) +FIND_COMPILER = $(strip $(1:ccache=)) define CFG_MAKE_TOOLCHAIN # Prepend the tools with their prefix if cross compiling @@ -187,7 +187,7 @@ define CFG_MAKE_TOOLCHAIN endif endif - CFG_COMPILE_C_$(1) = $$(CC_$(1)) \ + CFG_COMPILE_C_$(1) = '$$(call FIND_COMPILER,$$(CC_$(1)))' \ $$(CFLAGS) \ $$(CFG_GCCISH_CFLAGS) \ $$(CFG_GCCISH_CFLAGS_$(1)) \ @@ -198,7 +198,7 @@ define CFG_MAKE_TOOLCHAIN $$(CFG_GCCISH_LINK_FLAGS_$(1)) \ $$(CFG_GCCISH_DEF_FLAG_$(1))$$(3) $$(2) \ $$(call CFG_INSTALL_NAME_$(1),$$(4)) - CFG_COMPILE_CXX_$(1) = $$(CXX_$(1)) \ + CFG_COMPILE_CXX_$(1) = '$$(call FIND_COMPILER,$$(CXX_$(1)))' \ $$(CXXFLAGS) \ $$(CFG_GCCISH_CFLAGS) \ $$(CFG_GCCISH_CXXFLAGS) \ diff --git a/mk/reconfig.mk b/mk/reconfig.mk index 1a3a177438..b8f5109786 100644 --- a/mk/reconfig.mk +++ b/mk/reconfig.mk @@ -38,6 +38,6 @@ else SREL_ROOT := $(SREL) endif -config.stamp: $(S)configure $(S)Makefile.in $(S)src/snapshots.txt +config.stamp: $(S)configure $(S)Makefile.in $(S)src/stage0.txt @$(call E, cfg: reconfiguring) $(SREL_ROOT)configure $(CFG_CONFIGURE_ARGS) diff --git a/mk/rustllvm.mk b/mk/rustllvm.mk index 6adffda7d1..834a11d37f 100644 --- a/mk/rustllvm.mk +++ b/mk/rustllvm.mk @@ -43,6 +43,9 @@ $$(RT_OUTPUT_DIR_$(1))/$$(call CFG_STATIC_LIB_NAME_$(1),rustllvm): \ @$$(call E, link: $$@) $$(Q)$$(call CFG_CREATE_ARCHIVE_$(1),$$@) $$^ +RUSTLLVM_COMPONENTS_$(1) = $$(shell echo $$(LLVM_ALL_COMPONENTS_$(1)) |\ + tr 'a-z-' 'A-Z_'| sed -e 's/^ //;s/\([^ ]*\)/\-DLLVM_COMPONENT_\1/g') + # On MSVC we need to double-escape arguments that llvm-config printed which # start with a '/'. The shell we're running in will auto-translate the argument # `/foo` to `C:/msys64/foo` but we really want it to be passed through as `/foo` @@ -51,6 +54,7 @@ $(1)/rustllvm/%.o: $(S)src/rustllvm/%.cpp $$(MKFILE_DEPS) $$(LLVM_CONFIG_$(1)) @$$(call E, compile: $$@) $$(Q)$$(call CFG_COMPILE_CXX_$(1), $$@,) \ $$(subst /,//,$$(LLVM_CXXFLAGS_$(1))) \ + $$(RUSTLLVM_COMPONENTS_$(1)) \ $$(EXTRA_RUSTLLVM_CXXFLAGS_$(1)) \ $$(RUSTLLVM_INCS_$(1)) \ $$< diff --git a/mk/snap.mk b/mk/snap.mk deleted file mode 100644 index 0b34f52b7e..0000000000 --- a/mk/snap.mk +++ /dev/null @@ -1,28 +0,0 @@ -# Copyright 2012 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -define DEF_SNAP_FOR_STAGE_H -# $(1) stage -# $(2) triple - -snap-stage$(1)-H-$(2): $$(HSREQ$(1)_H_$(2)) - $(CFG_PYTHON) $(S)src/etc/make-snapshot.py stage$(1) $(2) - -endef - -$(foreach host,$(CFG_HOST), \ - $(foreach stage,1 2 3, \ - $(eval $(call DEF_SNAP_FOR_STAGE_H,$(stage),$(host))))) - -snap-stage1: snap-stage1-H-$(CFG_BUILD) - -snap-stage2: snap-stage2-H-$(CFG_BUILD) - -snap-stage3: snap-stage3-H-$(CFG_BUILD) diff --git a/mk/stage0.mk b/mk/stage0.mk index 460a4a7f44..d0191874cb 100644 --- a/mk/stage0.mk +++ b/mk/stage0.mk @@ -10,17 +10,14 @@ $(HLIB0_H_$(CFG_BUILD))/: endif $(SNAPSHOT_RUSTC_POST_CLEANUP): \ - $(S)src/snapshots.txt \ - $(S)src/etc/get-snapshot.py $(MKFILE_DEPS) \ + $(S)src/stage0.txt \ + $(S)src/etc/get-stage0.py $(MKFILE_DEPS) \ | $(HBIN0_H_$(CFG_BUILD))/ - @$(call E, fetch: $@) -# Note: the variable "SNAPSHOT_FILE" is generally not set, and so -# we generally only pass one argument to this script. ifdef CFG_ENABLE_LOCAL_RUST $(Q)$(S)src/etc/local_stage0.sh $(CFG_BUILD) $(CFG_LOCAL_RUST_ROOT) rustlib else - $(Q)$(CFG_PYTHON) $(S)src/etc/get-snapshot.py $(CFG_BUILD) $(SNAPSHOT_FILE) + $(Q)$(CFG_PYTHON) $(S)src/etc/get-stage0.py $(CFG_BUILD) endif $(Q)if [ -e "$@" ]; then touch "$@"; else echo "ERROR: snapshot $@ not found"; exit 1; fi diff --git a/mk/target.mk b/mk/target.mk index 261b9e79aa..2a08b7b046 100644 --- a/mk/target.mk +++ b/mk/target.mk @@ -42,6 +42,23 @@ $(foreach host,$(CFG_HOST), \ $(foreach crate,$(CRATES), \ $(eval $(call RUST_CRATE_FULLDEPS,$(stage),$(target),$(host),$(crate))))))) +# $(1) stage +# $(2) target +# $(3) host +define DEFINE_BOOTSTRAP_KEY +BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3) := $$(CFG_BOOTSTRAP_KEY) +ifeq ($(1),0) +ifeq ($(3),$$(CFG_BUILD)) +BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3) := $$(CFG_BOOTSTRAP_KEY_STAGE0) +endif +endif +endef + +$(foreach host,$(CFG_TARGET), \ + $(foreach target,$(CFG_TARGET), \ + $(foreach stage,$(STAGES), \ + $(eval $(call DEFINE_BOOTSTRAP_KEY,$(stage),$(target),$(host)))))) + # RUST_TARGET_STAGE_N template: This defines how target artifacts are built # for all stage/target architecture combinations. This is one giant rule which # works as follows: @@ -66,6 +83,8 @@ $(foreach host,$(CFG_HOST), \ define RUST_TARGET_STAGE_N $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$(4): CFG_COMPILER_HOST_TRIPLE = $(2) +$$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$(4): \ + export RUSTC_BOOTSTRAP_KEY := $$(BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3)) $$(TLIB$(1)_T_$(2)_H_$(3))/stamp.$(4): \ $$(CRATEFILE_$(4)) \ $$(CRATE_FULLDEPS_$(1)_T_$(2)_H_$(3)_$(4)) \ @@ -113,6 +132,8 @@ endef # $(4) - name of the tool being built define TARGET_TOOL +$$(TBIN$(1)_T_$(2)_H_$(3))/$(4)$$(X_$(2)): \ + export RUSTC_BOOTSTRAP_KEY := $$(BOOTSTRAP_KEY$(1)_T_$(2)_H_$(3)) $$(TBIN$(1)_T_$(2)_H_$(3))/$(4)$$(X_$(2)): \ $$(TOOL_SOURCE_$(4)) \ $$(TOOL_INPUTS_$(4)) \ @@ -167,11 +188,15 @@ SNAPSHOT_RUSTC_POST_CLEANUP=$(HBIN0_H_$(CFG_BUILD))/rustc$(X_$(CFG_BUILD)) define TARGET_HOST_RULES -$$(TLIB$(1)_T_$(2)_H_$(3))/: +$$(TLIB$(1)_T_$(2)_H_$(3))/: $$(SNAPSHOT_RUSTC_POST_CLEANUP) + mkdir -p $$@ + +$$(TBIN$(1)_T_$(2)_H_$(3))/: $$(SNAPSHOT_RUSTC_POST_CLEANUP) mkdir -p $$@ $$(TLIB$(1)_T_$(2)_H_$(3))/%: $$(RT_OUTPUT_DIR_$(2))/% \ - | $$(TLIB$(1)_T_$(2)_H_$(3))/ $$(SNAPSHOT_RUSTC_POST_CLEANUP) + $$(SNAPSHOT_RUSTC_POST_CLEANUP) \ + | $$(TLIB$(1)_T_$(2)_H_$(3))/ @$$(call E, cp: $$@) $$(Q)cp $$< $$@ endef diff --git a/mk/tests.mk b/mk/tests.mk index 20736165b7..f9ab84e3f8 100644 --- a/mk/tests.mk +++ b/mk/tests.mk @@ -23,7 +23,8 @@ DEPS_collectionstest := $(eval $(call RUST_CRATE,collectionstest)) TEST_TARGET_CRATES = $(filter-out core rustc_unicode alloc_system libc \ - alloc_jemalloc,$(TARGET_CRATES)) \ + alloc_jemalloc panic_unwind \ + panic_abort,$(TARGET_CRATES)) \ collectionstest coretest TEST_DOC_CRATES = $(DOC_CRATES) arena flate fmt_macros getopts graphviz \ log rand rbml serialize syntax term test @@ -240,52 +241,19 @@ cleantestlibs: # Tidy ###################################################################### -ifdef CFG_NOTIDY .PHONY: tidy -tidy: -else - -# Run the tidy script in multiple parts to avoid huge 'echo' commands -.PHONY: tidy -tidy: tidy-basic tidy-binaries tidy-errors tidy-features - -endif - -.PHONY: tidy-basic -tidy-basic: - @$(call E, check: formatting) - $(Q) $(CFG_PYTHON) $(S)src/etc/tidy.py $(S)src/ - -.PHONY: tidy-binaries -tidy-binaries: - @$(call E, check: binaries) - $(Q)find $(S)src -type f \ - \( -perm -u+x -or -perm -g+x -or -perm -o+x \) \ - -not -name '*.rs' -and -not -name '*.py' \ - -and -not -name '*.sh' -and -not -name '*.pp' \ - | grep '^$(S)src/jemalloc' -v \ - | grep '^$(S)src/libuv' -v \ - | grep '^$(S)src/llvm' -v \ - | grep '^$(S)src/rt/hoedown' -v \ - | grep '^$(S)src/gyp' -v \ - | grep '^$(S)src/etc' -v \ - | grep '^$(S)src/doc' -v \ - | grep '^$(S)src/compiler-rt' -v \ - | grep '^$(S)src/libbacktrace' -v \ - | grep '^$(S)src/rust-installer' -v \ - | grep '^$(S)src/liblibc' -v \ - | xargs $(CFG_PYTHON) $(S)src/etc/check-binaries.py - -.PHONY: tidy-errors -tidy-errors: - @$(call E, check: extended errors) - $(Q) $(CFG_PYTHON) $(S)src/etc/errorck.py $(S)src/ - -.PHONY: tidy-features -tidy-features: - @$(call E, check: feature sanity) - $(Q) $(CFG_PYTHON) $(S)src/etc/featureck.py $(S)src/ - +tidy: $(HBIN0_H_$(CFG_BUILD))/tidy$(X_$(CFG_BUILD)) \ + $(SNAPSHOT_RUSTC_POST_CLEANUP) + $(TARGET_RPATH_VAR0_T_$(CFG_BUILD)_H_$(CFG_BUILD)) $< $(S)src + +$(HBIN0_H_$(CFG_BUILD))/tidy$(X_$(CFG_BUILD)): \ + $(TSREQ0_T_$(CFG_BUILD)_H_$(CFG_BUILD)) \ + $(TLIB0_T_$(CFG_BUILD)_H_$(CFG_BUILD))/stamp.std \ + $(call rwildcard,$(S)src/tools/tidy/src,*.rs) \ + $(SNAPSHOT_RUSTC_POST_CLEANUP) | \ + $(TLIB0_T_$(CFG_BUILD)_H_$(CFG_BUILD)) + $(STAGE0_T_$(CFG_BUILD)_H_$(CFG_BUILD)) $(S)src/tools/tidy/src/main.rs \ + --out-dir $(@D) --crate-name tidy ###################################################################### # Sets of tests @@ -306,6 +274,7 @@ check-stage$(1)-T-$(2)-H-$(3)-exec: \ check-stage$(1)-T-$(2)-H-$(3)-debuginfo-gdb-exec \ check-stage$(1)-T-$(2)-H-$(3)-debuginfo-lldb-exec \ check-stage$(1)-T-$(2)-H-$(3)-incremental-exec \ + check-stage$(1)-T-$(2)-H-$(3)-ui-exec \ check-stage$(1)-T-$(2)-H-$(3)-doc-exec \ check-stage$(1)-T-$(2)-H-$(3)-pretty-exec @@ -483,6 +452,10 @@ CODEGEN_RS := $(call rwildcard,$(S)src/test/codegen/,*.rs) CODEGEN_CC := $(call rwildcard,$(S)src/test/codegen/,*.cc) CODEGEN_UNITS_RS := $(call rwildcard,$(S)src/test/codegen-units/,*.rs) INCREMENTAL_RS := $(call rwildcard,$(S)src/test/incremental/,*.rs) +RMAKE_RS := $(wildcard $(S)src/test/run-make/*/Makefile) +UI_RS := $(call rwildcard,$(S)src/test/ui/,*.rs) \ + $(call rwildcard,$(S)src/test/ui/,*.stdout) \ + $(call rwildcard,$(S)src/test/ui/,*.stderr) RUSTDOCCK_RS := $(call rwildcard,$(S)src/test/rustdoc/,*.rs) RPASS_TESTS := $(RPASS_RS) @@ -499,6 +472,8 @@ DEBUGINFO_LLDB_TESTS := $(DEBUGINFO_LLDB_RS) CODEGEN_TESTS := $(CODEGEN_RS) $(CODEGEN_CC) CODEGEN_UNITS_TESTS := $(CODEGEN_UNITS_RS) INCREMENTAL_TESTS := $(INCREMENTAL_RS) +RMAKE_TESTS := $(RMAKE_RS) +UI_TESTS := $(UI_RS) RUSTDOCCK_TESTS := $(RUSTDOCCK_RS) CTEST_SRC_BASE_rpass = run-pass @@ -566,6 +541,16 @@ CTEST_BUILD_BASE_incremental = incremental CTEST_MODE_incremental = incremental CTEST_RUNTOOL_incremental = $(CTEST_RUNTOOL) +CTEST_SRC_BASE_rmake = run-make +CTEST_BUILD_BASE_rmake = run-make +CTEST_MODE_rmake = run-make +CTEST_RUNTOOL_rmake = $(CTEST_RUNTOOL) + +CTEST_SRC_BASE_ui = ui +CTEST_BUILD_BASE_ui = ui +CTEST_MODE_ui = ui +CTEST_RUNTOOL_ui = $(CTEST_RUNTOOL) + CTEST_SRC_BASE_rustdocck = rustdoc CTEST_BUILD_BASE_rustdocck = rustdoc CTEST_MODE_rustdocck = rustdoc @@ -642,25 +627,30 @@ ifdef CFG_ENABLE_DEBUGINFO_TESTS CTEST_RUSTC_FLAGS += -g endif -CTEST_COMMON_ARGS$(1)-T-$(2)-H-$(3) := \ +CTEST_COMMON_ARGS$(1)-T-$(2)-H-$(3) = \ --compile-lib-path $$(HLIB$(1)_H_$(3)) \ --run-lib-path $$(TLIB$(1)_T_$(2)_H_$(3)) \ --rustc-path $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \ --rustdoc-path $$(HBIN$(1)_H_$(3))/rustdoc$$(X_$(3)) \ - --llvm-bin-path $(CFG_LLVM_INST_DIR_$(CFG_BUILD))/bin \ - --aux-base $$(S)src/test/auxiliary/ \ + --llvm-filecheck $(CFG_LLVM_INST_DIR_$(CFG_BUILD))/bin/FileCheck \ --stage-id stage$(1)-$(2) \ --target $(2) \ --host $(3) \ - --python $$(CFG_PYTHON) \ + --docck-python $$(CFG_PYTHON) \ + --lldb-python $$(CFG_LLDB_PYTHON) \ --gdb-version="$(CFG_GDB_VERSION)" \ --lldb-version="$(CFG_LLDB_VERSION)" \ - --android-cross-path=$(CFG_ANDROID_CROSS_PATH) \ + --android-cross-path=$(CFG_ARM_LINUX_ANDROIDEABI_NDK) \ --adb-path=$(CFG_ADB) \ --adb-test-dir=$(CFG_ADB_TEST_DIR) \ --host-rustcflags "$(RUSTC_FLAGS_$(3)) $$(CTEST_RUSTC_FLAGS) -L $$(RT_OUTPUT_DIR_$(3))" \ --lldb-python-dir=$(CFG_LLDB_PYTHON_DIR) \ --target-rustcflags "$(RUSTC_FLAGS_$(2)) $$(CTEST_RUSTC_FLAGS) -L $$(RT_OUTPUT_DIR_$(2))" \ + --cc '$$(call FIND_COMPILER,$$(CC_$(2)))' \ + --cxx '$$(call FIND_COMPILER,$$(CXX_$(2)))' \ + --cflags "$$(CFG_GCCISH_CFLAGS_$(2))" \ + --llvm-components "$$(LLVM_ALL_COMPONENTS_$(2))" \ + --llvm-cxxflags "$$(LLVM_CXXFLAGS_$(2))" \ $$(CTEST_TESTARGS) ifdef CFG_VALGRIND_RPASS @@ -690,6 +680,9 @@ CTEST_DEPS_debuginfo-lldb_$(1)-T-$(2)-H-$(3) = $$(DEBUGINFO_LLDB_TESTS) \ CTEST_DEPS_codegen_$(1)-T-$(2)-H-$(3) = $$(CODEGEN_TESTS) CTEST_DEPS_codegen-units_$(1)-T-$(2)-H-$(3) = $$(CODEGEN_UNITS_TESTS) CTEST_DEPS_incremental_$(1)-T-$(2)-H-$(3) = $$(INCREMENTAL_TESTS) +CTEST_DEPS_rmake_$(1)-T-$(2)-H-$(3) = $$(RMAKE_TESTS) \ + $$(CSREQ$(1)_T_$(3)_H_$(3)) $$(SREQ$(1)_T_$(2)_H_$(3)) +CTEST_DEPS_ui_$(1)-T-$(2)-H-$(3) = $$(UI_TESTS) CTEST_DEPS_rustdocck_$(1)-T-$(2)-H-$(3) = $$(RUSTDOCCK_TESTS) \ $$(HBIN$(1)_H_$(3))/rustdoc$$(X_$(3)) \ $(S)src/etc/htmldocck.py @@ -703,7 +696,7 @@ $(foreach host,$(CFG_HOST), \ define DEF_RUN_COMPILETEST -CTEST_ARGS$(1)-T-$(2)-H-$(3)-$(4) := \ +CTEST_ARGS$(1)-T-$(2)-H-$(3)-$(4) = \ $$(CTEST_COMMON_ARGS$(1)-T-$(2)-H-$(3)) \ --src-base $$(S)src/test/$$(CTEST_SRC_BASE_$(4))/ \ --build-base $(3)/test/$$(CTEST_BUILD_BASE_$(4))/ \ @@ -734,6 +727,10 @@ endif endif ifeq ($$(CTEST_DONT_RUN_$(1)-T-$(2)-H-$(3)-$(4)),) +$$(call TEST_OK_FILE,$(1),$(2),$(3),$(4)): \ + export INCLUDE := $$(CFG_MSVC_INCLUDE_PATH_$$(HOST_$(3))) +$$(call TEST_OK_FILE,$(1),$(2),$(3),$(4)): \ + export LIB := $$(CFG_MSVC_LIB_PATH_$$(HOST_$(3))) $$(call TEST_OK_FILE,$(1),$(2),$(3),$(4)): \ $$(TEST_SREQ$(1)_T_$(2)_H_$(3)) \ $$(CTEST_DEPS_$(4)_$(1)-T-$(2)-H-$(3)) @@ -756,7 +753,8 @@ endif endef CTEST_NAMES = rpass rpass-valgrind rpass-full rfail-full cfail-full rfail cfail pfail \ - debuginfo-gdb debuginfo-lldb codegen codegen-units rustdocck incremental + debuginfo-gdb debuginfo-lldb codegen codegen-units rustdocck incremental \ + rmake ui $(foreach host,$(CFG_HOST), \ $(eval $(foreach target,$(CFG_TARGET), \ @@ -791,7 +789,7 @@ $(foreach host,$(CFG_HOST), \ define DEF_RUN_PRETTY_TEST -PRETTY_ARGS$(1)-T-$(2)-H-$(3)-$(4) := \ +PRETTY_ARGS$(1)-T-$(2)-H-$(3)-$(4) = \ $$(CTEST_COMMON_ARGS$(1)-T-$(2)-H-$(3)) \ --src-base $$(S)src/test/$$(PRETTY_DIRNAME_$(4))/ \ --build-base $(3)/test/$$(PRETTY_DIRNAME_$(4))/ \ @@ -955,6 +953,7 @@ TEST_GROUPS = \ codegen \ codegen-units \ incremental \ + ui \ doc \ $(foreach docname,$(DOC_NAMES),doc-$(docname)) \ pretty \ @@ -1041,70 +1040,3 @@ endef $(foreach crate,$(TEST_CRATES), \ $(eval $(call DEF_CHECK_CRATE,$(crate)))) - -###################################################################### -# RMAKE rules -###################################################################### - -RMAKE_TESTS := $(shell ls -d $(S)src/test/run-make/*/) -RMAKE_TESTS := $(RMAKE_TESTS:$(S)src/test/run-make/%/=%) - -define DEF_RMAKE_FOR_T_H -# $(1) the stage -# $(2) target triple -# $(3) host triple - - -ifeq ($(2)$(3),$$(CFG_BUILD)$$(CFG_BUILD)) -check-stage$(1)-T-$(2)-H-$(3)-rmake-exec: \ - $$(call TEST_OK_FILE,$(1),$(2),$(3),rmake) - -$$(call TEST_OK_FILE,$(1),$(2),$(3),rmake): \ - $$(RMAKE_TESTS:%=$(3)/test/run-make/%-$(1)-T-$(2)-H-$(3).ok) - @touch $$@ - -$(3)/test/run-make/%-$(1)-T-$(2)-H-$(3).ok: \ - export INCLUDE := $$(CFG_MSVC_INCLUDE_PATH_$$(HOST_$(3))) -$(3)/test/run-make/%-$(1)-T-$(2)-H-$(3).ok: \ - export LIB := $$(CFG_MSVC_LIB_PATH_$$(HOST_$(3))) -$(3)/test/run-make/%-$(1)-T-$(2)-H-$(3).ok: \ - export MSVC_LIB := "$$(CFG_MSVC_LIB_$$(HOST_$(3)))" -$(3)/test/run-make/%-$(1)-T-$(2)-H-$(3).ok: \ - $(S)src/test/run-make/%/Makefile \ - $$(CSREQ$(1)_T_$(2)_H_$(3)) - @rm -rf $(3)/test/run-make/$$* - @mkdir -p $(3)/test/run-make/$$* - $$(Q)touch $$@.start_time - $$(Q)$$(CFG_PYTHON) $(S)src/etc/maketest.py $$(dir $$<) \ - $$(MAKE) \ - $$(HBIN$(1)_H_$(3))/rustc$$(X_$(3)) \ - $(3)/test/run-make/$$* \ - '$$(CC_$(3))' \ - "$$(CFG_GCCISH_CFLAGS_$(3))" \ - $$(HBIN$(1)_H_$(3))/rustdoc$$(X_$(3)) \ - "$$(TESTNAME)" \ - $$(LD_LIBRARY_PATH_ENV_NAME$(1)_T_$(2)_H_$(3)) \ - "$$(LD_LIBRARY_PATH_ENV_HOSTDIR$(1)_T_$(2)_H_$(3))" \ - "$$(LD_LIBRARY_PATH_ENV_TARGETDIR$(1)_T_$(2)_H_$(3))" \ - $(1) \ - $$(S) \ - $(3) \ - "$$(LLVM_LIBDIR_RUSTFLAGS_$(3))" \ - "$$(LLVM_ALL_COMPONENTS_$(3))" \ - "$$(LLVM_CXXFLAGS_$(3))" \ - '$$(CXX_$(3))' - @touch -r $$@.start_time $$@ && rm $$@.start_time -else -# FIXME #11094 - The above rule doesn't work right for multiple targets -check-stage$(1)-T-$(2)-H-$(3)-rmake-exec: - @true - -endif - - -endef - -$(foreach stage,$(STAGES), \ - $(foreach target,$(CFG_TARGET), \ - $(foreach host,$(CFG_HOST), \ - $(eval $(call DEF_RMAKE_FOR_T_H,$(stage),$(target),$(host)))))) diff --git a/src/bootstrap/Cargo.lock b/src/bootstrap/Cargo.lock index 722feab212..39c7a37501 100644 --- a/src/bootstrap/Cargo.lock +++ b/src/bootstrap/Cargo.lock @@ -3,7 +3,7 @@ name = "bootstrap" version = "0.0.0" dependencies = [ "build_helper 0.1.0", - "cmake 0.1.16 (registry+https://github.com/rust-lang/crates.io-index)", + "cmake 0.1.17 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.26 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", @@ -22,7 +22,7 @@ version = "0.1.0" [[package]] name = "cmake" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "gcc 0.3.26 (registry+https://github.com/rust-lang/crates.io-index)", diff --git a/src/bootstrap/Cargo.toml b/src/bootstrap/Cargo.toml index 88dd19a769..f9a64567ff 100644 --- a/src/bootstrap/Cargo.toml +++ b/src/bootstrap/Cargo.toml @@ -21,7 +21,7 @@ path = "rustdoc.rs" [dependencies] build_helper = { path = "../build_helper" } -cmake = "0.1.10" +cmake = "0.1.17" filetime = "0.1" num_cpus = "0.2" toml = "0.1" diff --git a/src/bootstrap/README.md b/src/bootstrap/README.md index 9e97ec4da0..942f070c82 100644 --- a/src/bootstrap/README.md +++ b/src/bootstrap/README.md @@ -1,4 +1,4 @@ -# Bootstrapping Rust +# rustbuild - Bootstrapping Rust This is an in-progress README which is targeted at helping to explain how Rust is bootstrapped and in general some of the technical details of the build @@ -8,20 +8,64 @@ system. > intended to be the primarily used one just yet. The makefiles are currently > the ones that are still "guaranteed to work" as much as possible at least. -## Using the new build system +## Using rustbuild When configuring Rust via `./configure`, pass the following to enable building via this build system: ``` ./configure --enable-rustbuild +make ``` -## ... +Afterwards the `Makefile` which is generated will have a few commands like +`make check`, `make tidy`, etc. For finer-grained control, the +`bootstrap.py` entry point can be used: + +``` +python src/bootstrap/bootstrap.py +``` + +This accepts a number of options like `--stage` and `--step` which can configure +what's actually being done. + +## Configuring rustbuild + +There are currently two primary methods for configuring the rustbuild build +system. First, the `./configure` options serialized in `config.mk` will be +parsed and read. That is, if any `./configure` options are passed, they'll be +handled naturally. + +Next, rustbuild offers a TOML-based configuration system with a `config.toml` +file in the same location as `config.mk`. An example of this configuration can +be found at `src/bootstrap/config.toml.example`, and the configuration file +can also be passed as `--config path/to/config.toml` if the build system is +being invoked manually (via the python script). + +## Build stages + +The rustbuild build system goes through a few phases to actually build the +compiler. What actually happens when you invoke rustbuild is: + +1. The entry point script, `src/bootstrap/bootstrap.py` is run. This script is + responsible for downloading the stage0 compiler/Cargo binaries, and it then + compiles the build system itself (this folder). Finally, it then invokes the + actual `boostrap` binary build system. +2. In Rust, `bootstrap` will slurp up all configuration, perform a number of + sanity checks (compilers exist for example), and then start building the + stage0 artifacts. +3. The stage0 `cargo` downloaded earlier is used to build the standard library + and the compiler, and then these binaries are then copied to the `stage1` + directory. That compiler is then used to generate the stage1 artifacts which + are then copied to the stage2 directory, and then finally the stage2 + artifacts are generated using that compiler. + +The goal of each stage is to (a) leverage Cargo as much as possible and failing +that (b) leverage Rust as much as possible! ## Directory Layout -This build system houses all output under the `target` directory, which looks +This build system houses all output under the `build` directory, which looks like this: ``` @@ -42,6 +86,12 @@ build/ debug/ release/ + # Output of the dist-related steps like dist-std, dist-rustc, and dist-docs + dist/ + + # Temporary directory used for various input/output as part of various stages + tmp/ + # Each remaining directory is scoped by the "host" triple of compilation at # hand. x86_64-unknown-linux-gnu/ @@ -50,7 +100,8 @@ build/ # folder is under. The exact layout here will likely depend on the platform, # and this is also built with CMake so the build system is also likely # different. - compiler-rt/build/ + compiler-rt/ + build/ # Output folder for LLVM if it is compiled for this target llvm/ @@ -67,6 +118,17 @@ build/ share/ ... + # Output folder for all documentation of this target. This is what's filled + # in whenever the `doc` step is run. + doc/ + + # Output for all compiletest-based test suites + test/ + run-pass/ + compile-fail/ + debuginfo/ + ... + # Location where the stage0 Cargo and Rust compiler are unpacked. This # directory is purely an extracted and overlaid tarball of these two (done # by the bootstrapy python script). In theory the build system does not @@ -82,7 +144,9 @@ build/ # invocation. The build system instruments calling Cargo in the right order # with the right variables to ensure these are filled in correctly. stageN-std/ + stageN-test/ stageN-rustc/ + stageN-tools/ # This is a special case of the above directories, **not** filled in via # Cargo but rather the build system itself. The stage0 compiler already has @@ -96,7 +160,7 @@ build/ # Basically this directory is just a temporary artifact use to configure the # stage0 compiler to ensure that the libstd we just built is used to # compile the stage1 compiler. - stage0-rustc/lib/ + stage0-sysroot/lib/ # These output directories are intended to be standalone working # implementations of the compiler (corresponding to each stage). The build @@ -108,3 +172,69 @@ build/ stage2/ stage3/ ``` + +## Cargo projects + +The current build is unfortunately not quite as simple as `cargo build` in a +directory, but rather the compiler is split into three different Cargo projects: + +* `src/rustc/std_shim` - a project which builds and compiles libstd +* `src/rustc/test_shim` - a project which builds and compiles libtest +* `src/rustc` - the actual compiler itself + +Each "project" has a corresponding Cargo.lock file with all dependencies, and +this means that building the compiler involves running Cargo three times. The +structure here serves two goals: + +1. Facilitating dependencies coming from crates.io. These dependencies don't + depend on `std`, so libstd is a separate project compiled ahead of time + before the actual compiler builds. +2. Splitting "host artifacts" from "target artifacts". That is, when building + code for an arbitrary target you don't need the entire compiler, but you'll + end up needing libraries like libtest that depend on std but also want to use + crates.io dependencies. Hence, libtest is split out as its own project that + is sequenced after `std` but before `rustc`. This project is built for all + targets. + +There is some loss in build parallelism here because libtest can be compiled in +parallel with a number of rustc artifacts, but in theory the loss isn't too bad! + +## Build tools + +We've actually got quite a few tools that we use in the compiler's build system +and for testing. To organize these, each tool is a project in `src/tools` with a +corresponding `Cargo.toml`. All tools are compiled with Cargo (currently having +independent `Cargo.lock` files) and do not currently explicitly depend on the +compiler or standard library. Compiling each tool is sequenced after the +appropriate libstd/libtest/librustc compile above. + +## Extending rustbuild + +So you'd like to add a feature to the rustbuild build system or just fix a bug. +Great! One of the major motivational factors for moving away from `make` is that +Rust is in theory much easier to read, modify, and write. If you find anything +excessively confusing, please open an issue on this and we'll try to get it +documented or simplified pronto. + +First up, you'll probably want to read over the documentation above as that'll +give you a high level overview of what rustbuild is doing. You also probably +want to play around a bit yourself by just getting it up and running before you +dive too much into the actual build system itself. + +After that, each module in rustbuild should have enough documentation to keep +you up and running. Some general areas that you may be interested in modifying +are: + +* Adding a new build tool? Take a look at `build/step.rs` for examples of other + tools, as well as `build/mod.rs`. +* Adding a new compiler crate? Look no further! Adding crates can be done by + adding a new directory with `Cargo.toml` followed by configuring all + `Cargo.toml` files accordingly. +* Adding a new dependency from crates.io? We're still working on that, so hold + off on that for now. +* Adding a new configuration option? Take a look at `build/config.rs` or perhaps + `build/flags.rs` and then modify the build elsewhere to read that option. +* Adding a sanity check? Take a look at `build/sanity.rs`. + +If you have any questions feel free to reach out on `#rust-internals` on IRC or +open an issue in the bug tracker! diff --git a/src/bootstrap/bootstrap.py b/src/bootstrap/bootstrap.py index 5de7e6957c..0ab5253ee7 100644 --- a/src/bootstrap/bootstrap.py +++ b/src/bootstrap/bootstrap.py @@ -10,22 +10,66 @@ import argparse import contextlib +import hashlib import os import shutil import subprocess import sys import tarfile +import tempfile + def get(url, path, verbose=False): - print("downloading " + url) + sha_url = url + ".sha256" + with tempfile.NamedTemporaryFile(delete=False) as temp_file: + temp_path = temp_file.name + with tempfile.NamedTemporaryFile(suffix=".sha256", delete=False) as sha_file: + sha_path = sha_file.name + + try: + download(sha_path, sha_url, verbose) + download(temp_path, url, verbose) + verify(temp_path, sha_path, verbose) + print("moving " + temp_path + " to " + path) + shutil.move(temp_path, path) + finally: + delete_if_present(sha_path) + delete_if_present(temp_path) + + +def delete_if_present(path): + if os.path.isfile(path): + print("removing " + path) + os.unlink(path) + + +def download(path, url, verbose): + print("downloading " + url + " to " + path) # see http://serverfault.com/questions/301128/how-to-download if sys.platform == 'win32': run(["PowerShell.exe", "/nologo", "-Command", - "(New-Object System.Net.WebClient).DownloadFile('" + url + - "', '" + path + "')"], verbose=verbose) + "(New-Object System.Net.WebClient)" + ".DownloadFile('{}', '{}')".format(url, path)], + verbose=verbose) else: run(["curl", "-o", path, url], verbose=verbose) + +def verify(path, sha_path, verbose): + print("verifying " + path) + with open(path, "rb") as f: + found = hashlib.sha256(f.read()).hexdigest() + with open(sha_path, "r") as f: + expected, _ = f.readline().split() + if found != expected: + err = ("invalid checksum:\n" + " found: {}\n" + " expected: {}".format(found, expected)) + if verbose: + raise RuntimeError(err) + sys.exit(err) + + def unpack(tarball, dst, verbose=False, match=None): print("extracting " + tarball) fname = os.path.basename(tarball).replace(".tar.gz", "") @@ -57,15 +101,27 @@ def run(args, verbose=False): ret = subprocess.Popen(args) code = ret.wait() if code != 0: - if not verbose: - print("failed to run: " + ' '.join(args)) - raise RuntimeError("failed to run command") + err = "failed to run: " + ' '.join(args) + if verbose: + raise RuntimeError(err) + sys.exit(err) + +def stage0_data(rust_root): + nightlies = os.path.join(rust_root, "src/stage0.txt") + with open(nightlies, 'r') as nightlies: + data = {} + for line in nightlies.read().split("\n"): + if line.startswith("#") or line == '': + continue + a, b = line.split(": ", 1) + data[a] = b + return data class RustBuild: - def download_rust_nightly(self): + def download_stage0(self): cache_dst = os.path.join(self.build_dir, "cache") - rustc_cache = os.path.join(cache_dst, self.snap_rustc_date()) - cargo_cache = os.path.join(cache_dst, self.snap_cargo_date()) + rustc_cache = os.path.join(cache_dst, self.stage0_rustc_date()) + cargo_cache = os.path.join(cache_dst, self.stage0_cargo_date()) if not os.path.exists(rustc_cache): os.makedirs(rustc_cache) if not os.path.exists(cargo_cache): @@ -75,8 +131,9 @@ class RustBuild: (not os.path.exists(self.rustc()) or self.rustc_out_of_date()): if os.path.exists(self.bin_root()): shutil.rmtree(self.bin_root()) - filename = "rust-std-nightly-" + self.build + ".tar.gz" - url = "https://static.rust-lang.org/dist/" + self.snap_rustc_date() + channel = self.stage0_rustc_channel() + filename = "rust-std-" + channel + "-" + self.build + ".tar.gz" + url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date() tarball = os.path.join(rustc_cache, filename) if not os.path.exists(tarball): get(url + "/" + filename, tarball, verbose=self.verbose) @@ -84,32 +141,39 @@ class RustBuild: match="rust-std-" + self.build, verbose=self.verbose) - filename = "rustc-nightly-" + self.build + ".tar.gz" - url = "https://static.rust-lang.org/dist/" + self.snap_rustc_date() + filename = "rustc-" + channel + "-" + self.build + ".tar.gz" + url = "https://static.rust-lang.org/dist/" + self.stage0_rustc_date() tarball = os.path.join(rustc_cache, filename) if not os.path.exists(tarball): get(url + "/" + filename, tarball, verbose=self.verbose) unpack(tarball, self.bin_root(), match="rustc", verbose=self.verbose) with open(self.rustc_stamp(), 'w') as f: - f.write(self.snap_rustc_date()) + f.write(self.stage0_rustc_date()) if self.cargo().startswith(self.bin_root()) and \ (not os.path.exists(self.cargo()) or self.cargo_out_of_date()): - filename = "cargo-nightly-" + self.build + ".tar.gz" - url = "https://static.rust-lang.org/cargo-dist/" + self.snap_cargo_date() + channel = self.stage0_cargo_channel() + filename = "cargo-" + channel + "-" + self.build + ".tar.gz" + url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date() tarball = os.path.join(cargo_cache, filename) if not os.path.exists(tarball): get(url + "/" + filename, tarball, verbose=self.verbose) unpack(tarball, self.bin_root(), match="cargo", verbose=self.verbose) with open(self.cargo_stamp(), 'w') as f: - f.write(self.snap_cargo_date()) + f.write(self.stage0_cargo_date()) - def snap_cargo_date(self): + def stage0_cargo_date(self): return self._cargo_date - def snap_rustc_date(self): + def stage0_cargo_channel(self): + return self._cargo_channel + + def stage0_rustc_date(self): return self._rustc_date + def stage0_rustc_channel(self): + return self._rustc_channel + def rustc_stamp(self): return os.path.join(self.bin_root(), '.rustc-stamp') @@ -120,13 +184,13 @@ class RustBuild: if not os.path.exists(self.rustc_stamp()): return True with open(self.rustc_stamp(), 'r') as f: - return self.snap_rustc_date() != f.read() + return self.stage0_rustc_date() != f.read() def cargo_out_of_date(self): if not os.path.exists(self.cargo_stamp()): return True with open(self.cargo_stamp(), 'r') as f: - return self.snap_cargo_date() != f.read() + return self.stage0_cargo_date() != f.read() def bin_root(self): return os.path.join(self.build_dir, self.build, "stage0") @@ -169,15 +233,6 @@ class RustBuild: else: return '' - def parse_nightly_dates(self): - nightlies = os.path.join(self.rust_root, "src/nightlies.txt") - with open(nightlies, 'r') as nightlies: - rustc, cargo = nightlies.read().split("\n")[:2] - assert rustc.startswith("rustc: ") - assert cargo.startswith("cargo: ") - self._rustc_date = rustc[len("rustc: "):] - self._cargo_date = cargo[len("cargo: "):] - def build_bootstrap(self): env = os.environ.copy() env["CARGO_TARGET_DIR"] = os.path.join(self.build_dir, "bootstrap") @@ -210,7 +265,10 @@ class RustBuild: if sys.platform == 'win32': return 'x86_64-pc-windows-msvc' else: - raise + err = "uname not found" + if self.verbose: + raise Exception(err) + sys.exit(err) # Darwin's `uname -s` lies and always returns i386. We have to use # sysctl instead. @@ -253,7 +311,10 @@ class RustBuild: cputype = 'x86_64' ostype = 'pc-windows-gnu' else: - raise ValueError("unknown OS type: " + ostype) + err = "unknown OS type: " + ostype + if self.verbose: + raise ValueError(err) + sys.exit(err) if cputype in {'i386', 'i486', 'i686', 'i786', 'x86'}: cputype = 'i686' @@ -269,50 +330,60 @@ class RustBuild: elif cputype in {'amd64', 'x86_64', 'x86-64', 'x64'}: cputype = 'x86_64' else: - raise ValueError("unknown cpu type: " + cputype) + err = "unknown cpu type: " + cputype + if self.verbose: + raise ValueError(err) + sys.exit(err) return cputype + '-' + ostype -parser = argparse.ArgumentParser(description='Build rust') -parser.add_argument('--config') -parser.add_argument('-v', '--verbose', action='store_true') - -args = [a for a in sys.argv if a != '-h'] -args, _ = parser.parse_known_args(args) - -# Configure initial bootstrap -rb = RustBuild() -rb.config_toml = '' -rb.config_mk = '' -rb.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) -rb.build_dir = os.path.join(os.getcwd(), "build") -rb.verbose = args.verbose - -try: - with open(args.config or 'config.toml') as config: - rb.config_toml = config.read() -except: - pass -try: - rb.config_mk = open('config.mk').read() -except: - pass - -# Fetch/build the bootstrap -rb.build = rb.build_triple() -rb.parse_nightly_dates() -rb.download_rust_nightly() -sys.stdout.flush() -rb.build_bootstrap() -sys.stdout.flush() - -# Run the bootstrap -args = [os.path.join(rb.build_dir, "bootstrap/debug/bootstrap")] -args.extend(sys.argv[1:]) -args.append('--src') -args.append(rb.rust_root) -args.append('--build') -args.append(rb.build) -env = os.environ.copy() -env["BOOTSTRAP_PARENT_ID"] = str(os.getpid()) -rb.run(args, env) +def main(): + parser = argparse.ArgumentParser(description='Build rust') + parser.add_argument('--config') + parser.add_argument('-v', '--verbose', action='store_true') + + args = [a for a in sys.argv if a != '-h'] + args, _ = parser.parse_known_args(args) + + # Configure initial bootstrap + rb = RustBuild() + rb.config_toml = '' + rb.config_mk = '' + rb.rust_root = os.path.abspath(os.path.join(__file__, '../../..')) + rb.build_dir = os.path.join(os.getcwd(), "build") + rb.verbose = args.verbose + + try: + with open(args.config or 'config.toml') as config: + rb.config_toml = config.read() + except: + pass + try: + rb.config_mk = open('config.mk').read() + except: + pass + + data = stage0_data(rb.rust_root) + rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1) + rb._cargo_channel, rb._cargo_date = data['cargo'].split('-', 1) + + # Fetch/build the bootstrap + rb.build = rb.build_triple() + rb.download_stage0() + sys.stdout.flush() + rb.build_bootstrap() + sys.stdout.flush() + + # Run the bootstrap + args = [os.path.join(rb.build_dir, "bootstrap/debug/bootstrap")] + args.append('--src') + args.append(rb.rust_root) + args.append('--build') + args.append(rb.build) + args.extend(sys.argv[1:]) + env = os.environ.copy() + env["BOOTSTRAP_PARENT_ID"] = str(os.getpid()) + rb.run(args, env) + +if __name__ == '__main__': + main() diff --git a/src/bootstrap/build/cc.rs b/src/bootstrap/build/cc.rs index 9f962e9d9e..d0b0f1007c 100644 --- a/src/bootstrap/build/cc.rs +++ b/src/bootstrap/build/cc.rs @@ -8,6 +8,29 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! C-compiler probing and detection. +//! +//! This module will fill out the `cc` and `cxx` maps of `Build` by looking for +//! C and C++ compilers for each target configured. A compiler is found through +//! a number of vectors (in order of precedence) +//! +//! 1. Configuration via `target.$target.cc` in `config.toml`. +//! 2. Configuration via `target.$target.android-ndk` in `config.toml`, if +//! applicable +//! 3. Special logic to probe on OpenBSD +//! 4. The `CC_$target` environment variable. +//! 5. The `CC` environment variable. +//! 6. "cc" +//! +//! Some of this logic is implemented here, but much of it is farmed out to the +//! `gcc` crate itself, so we end up having the same fallbacks as there. +//! Similar logic is then used to find a C++ compiler, just some s/cc/c++/ is +//! used. +//! +//! It is intended that after this module has run no C/C++ compiler will +//! ever be probed for. Instead the compilers found here will be used for +//! everything. + use std::process::Command; use build_helper::{cc2ar, output}; diff --git a/src/bootstrap/build/channel.rs b/src/bootstrap/build/channel.rs index 611e347561..76d061eb43 100644 --- a/src/bootstrap/build/channel.rs +++ b/src/bootstrap/build/channel.rs @@ -8,7 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::env; +//! Build configuration for Rust's release channels. +//! +//! Implements the stable/beta/nightly channel distinctions by setting various +//! flags like the `unstable_features`, calculating variables like `release` and +//! `package_vers`, and otherwise indicating to the compiler what it should +//! print out as part of its version information. + use std::fs::{self, File}; use std::io::prelude::*; use std::process::Command; @@ -19,6 +25,9 @@ use md5; use build::Build; pub fn collect(build: &mut Build) { + // Currently the canonical source for the release number (e.g. 1.10.0) and + // the prerelease version (e.g. `.1`) is in `mk/main.mk`. We "parse" that + // here to learn about those numbers. let mut main_mk = String::new(); t!(t!(File::open(build.src.join("mk/main.mk"))).read_to_string(&mut main_mk)); let mut release_num = ""; @@ -32,7 +41,8 @@ pub fn collect(build: &mut Build) { } } - // FIXME: this is duplicating makefile logic + // Depending on the channel, passed in `./configure --release-channel`, + // determine various properties of the build. match &build.config.channel[..] { "stable" => { build.release = release_num.to_string(); @@ -58,6 +68,8 @@ pub fn collect(build: &mut Build) { } build.version = build.release.clone(); + // If we have a git directory, add in some various SHA information of what + // commit this compiler was compiled from. if fs::metadata(build.src.join(".git")).is_ok() { let ver_date = output(Command::new("git").current_dir(&build.src) .arg("log").arg("-1") @@ -80,8 +92,19 @@ pub fn collect(build: &mut Build) { build.short_ver_hash = Some(short_ver_hash); } + // Calculate this compiler's bootstrap key, which is currently defined as + // the first 8 characters of the md5 of the release string. let key = md5::compute(build.release.as_bytes()); build.bootstrap_key = format!("{:02x}{:02x}{:02x}{:02x}", key[0], key[1], key[2], key[3]); - env::set_var("RUSTC_BOOTSTRAP_KEY", &build.bootstrap_key); + + // Slurp up the stage0 bootstrap key as we're bootstrapping from an + // otherwise stable compiler. + let mut s = String::new(); + t!(t!(File::open(build.src.join("src/stage0.txt"))).read_to_string(&mut s)); + if let Some(line) = s.lines().find(|l| l.starts_with("rustc_key")) { + if let Some(key) = line.split(": ").nth(1) { + build.bootstrap_key_stage0 = key.to_string(); + } + } } diff --git a/src/bootstrap/build/check.rs b/src/bootstrap/build/check.rs index a2445ae498..154d9556fd 100644 --- a/src/bootstrap/build/check.rs +++ b/src/bootstrap/build/check.rs @@ -8,8 +8,26 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use build::{Build, Compiler}; +//! Implementation of the various `check-*` targets of the build system. +//! +//! This file implements the various regression test suites that we execute on +//! our CI. +use std::env; +use std::fs::{self, File}; +use std::io::prelude::*; +use std::path::{PathBuf, Path}; +use std::process::Command; + +use build_helper::output; +use bootstrap::{dylib_path, dylib_path_var}; + +use build::{Build, Compiler, Mode}; + +/// Runs the `linkchecker` tool as compiled in `stage` by the `host` compiler. +/// +/// This tool in `src/tools` will verify the validity of all our links in the +/// documentation to ensure we don't have a bunch of dead ones. pub fn linkcheck(build: &Build, stage: u32, host: &str) { println!("Linkcheck stage{} ({})", stage, host); let compiler = Compiler::new(stage, host); @@ -17,8 +35,11 @@ pub fn linkcheck(build: &Build, stage: u32, host: &str) { .arg(build.out.join(host).join("doc"))); } +/// Runs the `cargotest` tool as compiled in `stage` by the `host` compiler. +/// +/// This tool in `src/tools` will check out a few Rust projects and run `cargo +/// test` to ensure that we don't regress the test suites there. pub fn cargotest(build: &Build, stage: u32, host: &str) { - let ref compiler = Compiler::new(stage, host); // Configure PATH to find the right rustc. NB. we have to use PATH @@ -29,7 +50,259 @@ pub fn cargotest(build: &Build, stage: u32, host: &str) { let sep = if cfg!(windows) { ";" } else {":" }; let ref newpath = format!("{}{}{}", path.display(), sep, old_path); + // Note that this is a short, cryptic, and not scoped directory name. This + // is currently to minimize the length of path on Windows where we otherwise + // quickly run into path name limit constraints. + let out_dir = build.out.join("ct"); + t!(fs::create_dir_all(&out_dir)); + build.run(build.tool_cmd(compiler, "cargotest") - .env("PATH", newpath) - .arg(&build.cargo)); + .env("PATH", newpath) + .arg(&build.cargo) + .arg(&out_dir)); +} + +/// Runs the `tidy` tool as compiled in `stage` by the `host` compiler. +/// +/// This tool in `src/tools` checks up on various bits and pieces of style and +/// otherwise just implements a few lint-like checks that are specific to the +/// compiler itself. +pub fn tidy(build: &Build, stage: u32, host: &str) { + println!("tidy check stage{} ({})", stage, host); + let compiler = Compiler::new(stage, host); + build.run(build.tool_cmd(&compiler, "tidy") + .arg(build.src.join("src"))); +} + +fn testdir(build: &Build, host: &str) -> PathBuf { + build.out.join(host).join("test") +} + +/// Executes the `compiletest` tool to run a suite of tests. +/// +/// Compiles all tests with `compiler` for `target` with the specified +/// compiletest `mode` and `suite` arguments. For example `mode` can be +/// "run-pass" or `suite` can be something like `debuginfo`. +pub fn compiletest(build: &Build, + compiler: &Compiler, + target: &str, + mode: &str, + suite: &str) { + let mut cmd = build.tool_cmd(compiler, "compiletest"); + + // compiletest currently has... a lot of arguments, so let's just pass all + // of them! + + cmd.arg("--compile-lib-path").arg(build.rustc_libdir(compiler)); + cmd.arg("--run-lib-path").arg(build.sysroot_libdir(compiler, target)); + cmd.arg("--rustc-path").arg(build.compiler_path(compiler)); + cmd.arg("--rustdoc-path").arg(build.rustdoc(compiler)); + cmd.arg("--src-base").arg(build.src.join("src/test").join(suite)); + cmd.arg("--build-base").arg(testdir(build, compiler.host).join(suite)); + cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target)); + cmd.arg("--mode").arg(mode); + cmd.arg("--target").arg(target); + cmd.arg("--host").arg(compiler.host); + cmd.arg("--llvm-filecheck").arg(build.llvm_filecheck(&build.config.build)); + + let mut flags = format!("-Crpath"); + if build.config.rust_optimize_tests { + flags.push_str(" -O"); + } + if build.config.rust_debuginfo_tests { + flags.push_str(" -g"); + } + + cmd.arg("--host-rustcflags").arg(&flags); + + let linkflag = format!("-Lnative={}", build.test_helpers_out(target).display()); + cmd.arg("--target-rustcflags").arg(format!("{} {}", flags, linkflag)); + + // FIXME: needs android support + cmd.arg("--android-cross-path").arg(""); + + // FIXME: CFG_PYTHON should probably be detected more robustly elsewhere + let python_default = "python"; + cmd.arg("--docck-python").arg(python_default); + + if build.config.build.ends_with("apple-darwin") { + // Force /usr/bin/python on OSX for LLDB tests because we're loading the + // LLDB plugin's compiled module which only works with the system python + // (namely not Homebrew-installed python) + cmd.arg("--lldb-python").arg("/usr/bin/python"); + } else { + cmd.arg("--lldb-python").arg(python_default); + } + + if let Some(ref vers) = build.gdb_version { + cmd.arg("--gdb-version").arg(vers); + } + if let Some(ref vers) = build.lldb_version { + cmd.arg("--lldb-version").arg(vers); + } + if let Some(ref dir) = build.lldb_python_dir { + cmd.arg("--lldb-python-dir").arg(dir); + } + + cmd.args(&build.flags.args); + + if build.config.verbose || build.flags.verbose { + cmd.arg("--verbose"); + } + + // Only pass correct values for these flags for the `run-make` suite as it + // requires that a C++ compiler was configured which isn't always the case. + if suite == "run-make" { + let llvm_config = build.llvm_config(target); + let llvm_components = output(Command::new(&llvm_config).arg("--components")); + let llvm_cxxflags = output(Command::new(&llvm_config).arg("--cxxflags")); + cmd.arg("--cc").arg(build.cc(target)) + .arg("--cxx").arg(build.cxx(target)) + .arg("--cflags").arg(build.cflags(target).join(" ")) + .arg("--llvm-components").arg(llvm_components.trim()) + .arg("--llvm-cxxflags").arg(llvm_cxxflags.trim()); + } else { + cmd.arg("--cc").arg("") + .arg("--cxx").arg("") + .arg("--cflags").arg("") + .arg("--llvm-components").arg("") + .arg("--llvm-cxxflags").arg(""); + } + + // Running a C compiler on MSVC requires a few env vars to be set, to be + // sure to set them here. + if target.contains("msvc") { + for &(ref k, ref v) in build.cc[target].0.env() { + if k != "PATH" { + cmd.env(k, v); + } + } + } + build.add_bootstrap_key(compiler, &mut cmd); + + build.run(&mut cmd); +} + +/// Run `rustdoc --test` for all documentation in `src/doc`. +/// +/// This will run all tests in our markdown documentation (e.g. the book) +/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to +/// `compiler`. +pub fn docs(build: &Build, compiler: &Compiler) { + // Do a breadth-first traversal of the `src/doc` directory and just run + // tests for all files that end in `*.md` + let mut stack = vec![build.src.join("src/doc")]; + + while let Some(p) = stack.pop() { + if p.is_dir() { + stack.extend(t!(p.read_dir()).map(|p| t!(p).path())); + continue + } + + if p.extension().and_then(|s| s.to_str()) != Some("md") { + continue + } + + println!("doc tests for: {}", p.display()); + markdown_test(build, compiler, &p); + } +} + +/// Run the error index generator tool to execute the tests located in the error +/// index. +/// +/// The `error_index_generator` tool lives in `src/tools` and is used to +/// generate a markdown file from the error indexes of the code base which is +/// then passed to `rustdoc --test`. +pub fn error_index(build: &Build, compiler: &Compiler) { + println!("Testing error-index stage{}", compiler.stage); + + let output = testdir(build, compiler.host).join("error-index.md"); + build.run(build.tool_cmd(compiler, "error_index_generator") + .arg("markdown") + .arg(&output) + .env("CFG_BUILD", &build.config.build)); + + markdown_test(build, compiler, &output); +} + +fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) { + let mut cmd = Command::new(build.rustdoc(compiler)); + build.add_rustc_lib_path(compiler, &mut cmd); + cmd.arg("--test"); + cmd.arg(markdown); + cmd.arg("--test-args").arg(build.flags.args.join(" ")); + build.run(&mut cmd); +} + +/// Run all unit tests plus documentation tests for an entire crate DAG defined +/// by a `Cargo.toml` +/// +/// This is what runs tests for crates like the standard library, compiler, etc. +/// It essentially is the driver for running `cargo test`. +/// +/// Currently this runs all tests for a DAG by passing a bunch of `-p foo` +/// arguments, and those arguments are discovered from `Cargo.lock`. +pub fn krate(build: &Build, + compiler: &Compiler, + target: &str, + mode: Mode) { + let (name, path, features) = match mode { + Mode::Libstd => ("libstd", "src/rustc/std_shim", build.std_features()), + Mode::Libtest => ("libtest", "src/rustc/test_shim", String::new()), + Mode::Librustc => ("librustc", "src/rustc", build.rustc_features()), + _ => panic!("can only test libraries"), + }; + println!("Testing {} stage{} ({} -> {})", name, compiler.stage, + compiler.host, target); + + // Build up the base `cargo test` command. + let mut cargo = build.cargo(compiler, mode, target, "test"); + cargo.arg("--manifest-path") + .arg(build.src.join(path).join("Cargo.toml")) + .arg("--features").arg(features); + + // Generate a list of `-p` arguments to pass to the `cargo test` invocation + // by crawling the corresponding Cargo.lock file. + let lockfile = build.src.join(path).join("Cargo.lock"); + let mut contents = String::new(); + t!(t!(File::open(&lockfile)).read_to_string(&mut contents)); + let mut lines = contents.lines(); + while let Some(line) = lines.next() { + let prefix = "name = \""; + if !line.starts_with(prefix) { + continue + } + lines.next(); // skip `version = ...` + + // skip crates.io or otherwise non-path crates + if let Some(line) = lines.next() { + if line.starts_with("source") { + continue + } + } + + let crate_name = &line[prefix.len()..line.len() - 1]; + + // Right now jemalloc is our only target-specific crate in the sense + // that it's not present on all platforms. Custom skip it here for now, + // but if we add more this probably wants to get more generalized. + if crate_name.contains("jemalloc") { + continue + } + + cargo.arg("-p").arg(crate_name); + } + + // The tests are going to run with the *target* libraries, so we need to + // ensure that those libraries show up in the LD_LIBRARY_PATH equivalent. + // + // Note that to run the compiler we need to run with the *host* libraries, + // but our wrapper scripts arrange for that to be the case anyway. + let mut dylib_path = dylib_path(); + dylib_path.insert(0, build.sysroot_libdir(compiler, target)); + cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap()); + cargo.args(&build.flags.args); + + build.run(&mut cargo); } diff --git a/src/bootstrap/build/clean.rs b/src/bootstrap/build/clean.rs index 8f78fed001..1d407c9413 100644 --- a/src/bootstrap/build/clean.rs +++ b/src/bootstrap/build/clean.rs @@ -8,6 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Implementation of `make clean` in rustbuild. +//! +//! Responsible for cleaning out a build directory of all old and stale +//! artifacts to prepare for a fresh build. Currently doesn't remove the +//! `build/cache` directory (download cache) or the `build/$target/llvm` +//! directory as we want that cached between builds. + use std::fs; use std::path::Path; @@ -19,11 +26,14 @@ pub fn clean(build: &Build) { let out = build.out.join(host); rm_rf(build, &out.join("compiler-rt")); + rm_rf(build, &out.join("doc")); for stage in 0..4 { rm_rf(build, &out.join(format!("stage{}", stage))); rm_rf(build, &out.join(format!("stage{}-std", stage))); rm_rf(build, &out.join(format!("stage{}-rustc", stage))); + rm_rf(build, &out.join(format!("stage{}-tools", stage))); + rm_rf(build, &out.join(format!("stage{}-test", stage))); } } } diff --git a/src/bootstrap/build/compile.rs b/src/bootstrap/build/compile.rs index a67f1ba48b..5ed9c1c18c 100644 --- a/src/bootstrap/build/compile.rs +++ b/src/bootstrap/build/compile.rs @@ -8,6 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Implementation of compiling various phases of the compiler and standard +//! library. +//! +//! This module contains some of the real meat in the rustbuild build system +//! which is where Cargo is used to compiler the standard library, libtest, and +//! compiler. This module is also responsible for assembling the sysroot as it +//! goes along from the output of the previous stage. + use std::collections::HashMap; use std::fs; use std::path::{Path, PathBuf}; @@ -35,6 +43,8 @@ pub fn std<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { copy(&build.compiler_rt_built.borrow()[target], &libdir.join(staticlib("compiler-rt", target))); + // Some platforms have startup objects that may be required to produce the + // libstd dynamic library, for example. build_startup_objects(build, target, &libdir); let out_dir = build.cargo_out(compiler, Mode::Libstd, target); @@ -154,7 +164,6 @@ pub fn test_link(build: &Build, add_to_sysroot(&out_dir, &libdir); } - /// Build the compiler. /// /// This will build the compiler for a particular stage of the build using @@ -179,7 +188,6 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { .env("CFG_VERSION", &build.version) .env("CFG_BOOTSTRAP_KEY", &build.bootstrap_key) .env("CFG_PREFIX", build.config.prefix.clone().unwrap_or(String::new())) - .env("RUSTC_BOOTSTRAP_KEY", &build.bootstrap_key) .env("CFG_LIBDIR_RELATIVE", "lib"); if let Some(ref ver_date) = build.ver_date { @@ -191,14 +199,7 @@ pub fn rustc<'a>(build: &'a Build, target: &str, compiler: &Compiler<'a>) { if !build.unstable_features { cargo.env("CFG_DISABLE_UNSTABLE_FEATURES", "1"); } - let target_config = build.config.target_config.get(target); - if let Some(ref s) = target_config.and_then(|c| c.llvm_config.as_ref()) { - cargo.env("LLVM_CONFIG", s); - } else { - let llvm_config = build.llvm_out(&build.config.build).join("bin") - .join(exe("llvm-config", target)); - cargo.env("LLVM_CONFIG", llvm_config); - } + cargo.env("LLVM_CONFIG", build.llvm_config(target)); if build.config.llvm_static_stdcpp { cargo.env("LLVM_STATIC_STDCPP", compiler_file(build.cxx(target), "libstdc++.a")); diff --git a/src/bootstrap/build/config.rs b/src/bootstrap/build/config.rs index 1e67c4a9a3..3c35b9a951 100644 --- a/src/bootstrap/build/config.rs +++ b/src/bootstrap/build/config.rs @@ -8,6 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Serialized configuration of a build. +//! +//! This module implements parsing `config.mk` and `config.toml` configuration +//! files to tweak how the build runs. + use std::collections::HashMap; use std::env; use std::fs::File; @@ -27,10 +32,13 @@ use toml::{Parser, Decoder, Value}; /// is generated from `./configure`. /// /// Note that this structure is not decoded directly into, but rather it is -/// filled out from the decoded forms of the structs below. +/// filled out from the decoded forms of the structs below. For documentation +/// each field, see the corresponding fields in +/// `src/bootstrap/config.toml.example`. #[derive(Default)] pub struct Config { pub ccache: bool, + pub ninja: bool, pub verbose: bool, pub submodules: bool, pub compiler_docs: bool, @@ -51,6 +59,8 @@ pub struct Config { pub rust_rpath: bool, pub rustc_default_linker: Option, pub rustc_default_ar: Option, + pub rust_optimize_tests: bool, + pub rust_debuginfo_tests: bool, pub build: String, pub host: Vec, @@ -107,6 +117,7 @@ struct Build { #[derive(RustcDecodable, Default)] struct Llvm { ccache: Option, + ninja: Option, assertions: Option, optimize: Option, version_check: Option, @@ -127,6 +138,8 @@ struct Rust { channel: Option, musl_root: Option, rpath: Option, + optimize_tests: Option, + debuginfo_tests: Option, } /// TOML representation of how each build target is configured. @@ -145,6 +158,7 @@ impl Config { config.llvm_optimize = true; config.use_jemalloc = true; config.rust_optimize = true; + config.rust_optimize_tests = true; config.submodules = true; config.docs = true; config.rust_rpath = true; @@ -200,9 +214,9 @@ impl Config { if let Some(ref llvm) = toml.llvm { set(&mut config.ccache, llvm.ccache); + set(&mut config.ninja, llvm.ninja); set(&mut config.llvm_assertions, llvm.assertions); set(&mut config.llvm_optimize, llvm.optimize); - set(&mut config.llvm_optimize, llvm.optimize); set(&mut config.llvm_version_check, llvm.version_check); set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); } @@ -210,6 +224,8 @@ impl Config { set(&mut config.rust_debug_assertions, rust.debug_assertions); set(&mut config.rust_debuginfo, rust.debuginfo); set(&mut config.rust_optimize, rust.optimize); + set(&mut config.rust_optimize_tests, rust.optimize_tests); + set(&mut config.rust_debuginfo_tests, rust.debuginfo_tests); set(&mut config.rust_rpath, rust.rpath); set(&mut config.debug_jemalloc, rust.debug_jemalloc); set(&mut config.use_jemalloc, rust.use_jemalloc); @@ -248,6 +264,11 @@ impl Config { return config } + /// "Temporary" routine to parse `config.mk` into this configuration. + /// + /// While we still have `./configure` this implements the ability to decode + /// that configuration into this. This isn't exactly a full-blown makefile + /// parser, but hey it gets the job done! pub fn update_with_config_mk(&mut self) { let mut config = String::new(); File::open("config.mk").unwrap().read_to_string(&mut config).unwrap(); @@ -292,6 +313,8 @@ impl Config { ("JEMALLOC", self.use_jemalloc), ("DEBUG_JEMALLOC", self.debug_jemalloc), ("RPATH", self.rust_rpath), + ("OPTIMIZE_TESTS", self.rust_optimize_tests), + ("DEBUGINFO_TESTS", self.rust_debuginfo_tests), } match key { @@ -336,6 +359,12 @@ impl Config { .or_insert(Target::default()); target.ndk = Some(PathBuf::from(value)); } + "CFG_ARMV7_LINUX_ANDROIDEABI_NDK" if value.len() > 0 => { + let target = "armv7-linux-androideabi".to_string(); + let target = self.target_config.entry(target) + .or_insert(Target::default()); + target.ndk = Some(PathBuf::from(value)); + } "CFG_I686_LINUX_ANDROID_NDK" if value.len() > 0 => { let target = "i686-linux-androideabi".to_string(); let target = self.target_config.entry(target) diff --git a/src/bootstrap/build/dist.rs b/src/bootstrap/build/dist.rs index 6ae652bd66..088e89b658 100644 --- a/src/bootstrap/build/dist.rs +++ b/src/bootstrap/build/dist.rs @@ -8,6 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Implementation of the various distribution aspects of the compiler. +//! +//! This module is responsible for creating tarballs of the standard library, +//! compiler, and documentation. This ends up being what we distribute to +//! everyone as well. +//! +//! No tarball is actually created literally in this file, but rather we shell +//! out to `rust-installer` still. This may one day be replaced with bits and +//! pieces of `rustup.rs`! + use std::fs::{self, File}; use std::io::Write; use std::path::{PathBuf, Path}; @@ -33,6 +43,9 @@ fn tmpdir(build: &Build) -> PathBuf { build.out.join("tmp/dist") } +/// Builds the `rust-docs` installer component. +/// +/// Slurps up documentation from the `stage`'s `host`. pub fn docs(build: &Build, stage: u32, host: &str) { println!("Dist docs stage{} ({})", stage, host); let name = format!("rust-docs-{}", package_vers(build)); @@ -68,6 +81,12 @@ pub fn docs(build: &Build, stage: u32, host: &str) { } } +/// Build the `rust-mingw` installer component. +/// +/// This contains all the bits and pieces to run the MinGW Windows targets +/// without any extra installed software (e.g. we bundle gcc, libraries, etc). +/// Currently just shells out to a python script, but that should be rewritten +/// in Rust. pub fn mingw(build: &Build, host: &str) { println!("Dist mingw ({})", host); let name = format!("rust-mingw-{}", package_vers(build)); @@ -102,6 +121,7 @@ pub fn mingw(build: &Build, host: &str) { t!(fs::remove_dir_all(&image)); } +/// Creates the `rustc` installer component. pub fn rustc(build: &Build, stage: u32, host: &str) { println!("Dist rustc stage{} ({})", stage, host); let name = format!("rustc-{}", package_vers(build)); @@ -195,29 +215,7 @@ pub fn rustc(build: &Build, stage: u32, host: &str) { cp_r(&build.src.join("man"), &image.join("share/man/man1")); // Debugger scripts - let cp_debugger_script = |file: &str| { - let dst = image.join("lib/rustlib/etc"); - t!(fs::create_dir_all(&dst)); - install(&build.src.join("src/etc/").join(file), &dst, 0o644); - }; - if host.contains("windows") { - // no debugger scripts - } else if host.contains("darwin") { - // lldb debugger scripts - install(&build.src.join("src/etc/rust-lldb"), &image.join("bin"), - 0o755); - - cp_debugger_script("lldb_rust_formatters.py"); - cp_debugger_script("debugger_pretty_printers_common.py"); - } else { - // gdb debugger scripts - install(&build.src.join("src/etc/rust-gdb"), &image.join("bin"), - 0o755); - - cp_debugger_script("gdb_load_rust_pretty_printers.py"); - cp_debugger_script("gdb_rust_pretty_printing.py"); - cp_debugger_script("debugger_pretty_printers_common.py"); - } + debugger_scripts(build, &image, host); // Misc license info let cp = |file: &str| { @@ -231,6 +229,37 @@ pub fn rustc(build: &Build, stage: u32, host: &str) { } } +/// Copies debugger scripts for `host` into the `sysroot` specified. +pub fn debugger_scripts(build: &Build, + sysroot: &Path, + host: &str) { + let cp_debugger_script = |file: &str| { + let dst = sysroot.join("lib/rustlib/etc"); + t!(fs::create_dir_all(&dst)); + install(&build.src.join("src/etc/").join(file), &dst, 0o644); + }; + if host.contains("windows-msvc") { + // no debugger scripts + } else { + cp_debugger_script("debugger_pretty_printers_common.py"); + + // gdb debugger scripts + install(&build.src.join("src/etc/rust-gdb"), &sysroot.join("bin"), + 0o755); + + cp_debugger_script("gdb_load_rust_pretty_printers.py"); + cp_debugger_script("gdb_rust_pretty_printing.py"); + + // lldb debugger scripts + install(&build.src.join("src/etc/rust-lldb"), &sysroot.join("bin"), + 0o755); + + cp_debugger_script("lldb_rust_formatters.py"); + } +} + +/// Creates the `rust-std` installer component as compiled by `compiler` for the +/// target `target`. pub fn std(build: &Build, compiler: &Compiler, target: &str) { println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host, target); diff --git a/src/bootstrap/build/doc.rs b/src/bootstrap/build/doc.rs index 5782dd5ec2..f7cc742277 100644 --- a/src/bootstrap/build/doc.rs +++ b/src/bootstrap/build/doc.rs @@ -8,6 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Documentation generation for rustbuild. +//! +//! This module implements generation for all bits and pieces of documentation +//! for the Rust project. This notably includes suites like the rust book, the +//! nomicon, standalone documentation, etc. +//! +//! Everything here is basically just a shim around calling either `rustbook` or +//! `rustdoc`. + use std::fs::{self, File}; use std::io::prelude::*; use std::path::Path; @@ -16,6 +25,11 @@ use std::process::Command; use build::{Build, Compiler, Mode}; use build::util::{up_to_date, cp_r}; +/// Invoke `rustbook` as compiled in `stage` for `target` for the doc book +/// `name` into the `out` path. +/// +/// This will not actually generate any documentation if the documentation has +/// already been generated. pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) { t!(fs::create_dir_all(out)); @@ -35,6 +49,14 @@ pub fn rustbook(build: &Build, stage: u32, target: &str, name: &str, out: &Path) .arg(out)); } +/// Generates all standalone documentation as compiled by the rustdoc in `stage` +/// for the `target` into `out`. +/// +/// This will list all of `src/doc` looking for markdown files and appropriately +/// perform transformations like substituting `VERSION`, `SHORT_HASH`, and +/// `STAMP` alongw ith providing the various header/footer HTML we've cutomized. +/// +/// In the end, this is just a glorified wrapper around rustdoc! pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) { println!("Documenting stage{} standalone ({})", stage, target); t!(fs::create_dir_all(out)); @@ -105,6 +127,10 @@ pub fn standalone(build: &Build, stage: u32, target: &str, out: &Path) { } } +/// Compile all standard library documentation. +/// +/// This will generate all documentation for the standard library and its +/// dependencies. This is largely just a wrapper around `cargo doc`. pub fn std(build: &Build, stage: u32, target: &str, out: &Path) { println!("Documenting stage{} std ({})", stage, target); t!(fs::create_dir_all(out)); @@ -123,6 +149,10 @@ pub fn std(build: &Build, stage: u32, target: &str, out: &Path) { cp_r(&out_dir, out) } +/// Compile all libtest documentation. +/// +/// This will generate all documentation for libtest and its dependencies. This +/// is largely just a wrapper around `cargo doc`. pub fn test(build: &Build, stage: u32, target: &str, out: &Path) { println!("Documenting stage{} test ({})", stage, target); let compiler = Compiler::new(stage, &build.config.build); @@ -139,6 +169,10 @@ pub fn test(build: &Build, stage: u32, target: &str, out: &Path) { cp_r(&out_dir, out) } +/// Generate all compiler documentation. +/// +/// This will generate all documentation for the compiler libraries and their +/// dependencies. This is largely just a wrapper around `cargo doc`. pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) { println!("Documenting stage{} compiler ({})", stage, target); let compiler = Compiler::new(stage, &build.config.build); @@ -156,6 +190,8 @@ pub fn rustc(build: &Build, stage: u32, target: &str, out: &Path) { cp_r(&out_dir, out) } +/// Generates the HTML rendered error-index by running the +/// `error_index_generator` tool. pub fn error_index(build: &Build, stage: u32, target: &str, out: &Path) { println!("Documenting stage{} error index ({})", stage, target); t!(fs::create_dir_all(out)); diff --git a/src/bootstrap/build/flags.rs b/src/bootstrap/build/flags.rs index d91dfe0903..d925997f36 100644 --- a/src/bootstrap/build/flags.rs +++ b/src/bootstrap/build/flags.rs @@ -8,6 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Command-line interface of the rustbuild build system. +//! +//! This module implements the command-line parsing of the build system which +//! has various flags to configure how it's run. + use std::fs; use std::path::PathBuf; use std::process; @@ -15,6 +20,7 @@ use std::slice; use getopts::Options; +/// Deserialized version of all flags for this compile. pub struct Flags { pub verbose: bool, pub stage: Option, @@ -62,11 +68,6 @@ impl Flags { usage(0); } - if m.free.len() > 0 { - println!("free arguments are not currently accepted"); - usage(1); - } - let cfg_file = m.opt_str("config").map(PathBuf::from).or_else(|| { if fs::metadata("config.toml").is_ok() { Some(PathBuf::from("config.toml")) diff --git a/src/bootstrap/build/job.rs b/src/bootstrap/build/job.rs index a4e53bc45f..4558e6f049 100644 --- a/src/bootstrap/build/job.rs +++ b/src/bootstrap/build/job.rs @@ -54,7 +54,7 @@ pub unsafe fn setup() { // Indicate that when all handles to the job object are gone that all // process in the object should be killed. Note that this includes our - // entire process tree by default because we've added ourselves and and our + // entire process tree by default because we've added ourselves and our // children will reside in the job by default. let mut info = mem::zeroed::(); info.BasicLimitInformation.LimitFlags = JOB_OBJECT_LIMIT_KILL_ON_JOB_CLOSE; diff --git a/src/bootstrap/build/mod.rs b/src/bootstrap/build/mod.rs index 248bf6cb4e..ebc05c5f61 100644 --- a/src/bootstrap/build/mod.rs +++ b/src/bootstrap/build/mod.rs @@ -8,6 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Implementation of rustbuild, the Rust build system. +//! +//! This module, and its descendants, are the implementation of the Rust build +//! system. Most of this build system is backed by Cargo but the outer layer +//! here serves as the ability to orchestrate calling Cargo, sequencing Cargo +//! builds, building artifacts like LLVM, etc. +//! +//! More documentation can be found in each respective module below. + use std::cell::RefCell; use std::collections::HashMap; use std::env; @@ -21,6 +30,14 @@ use num_cpus; use build::util::{exe, mtime, libdir, add_lib_path}; +/// A helper macro to `unwrap` a result except also print out details like: +/// +/// * The file/line of the panic +/// * The expression that failed +/// * The error itself +/// +/// This is currently used judiciously throughout the build system rather than +/// using a `Result` with `try!`, but this may change on day... macro_rules! t { ($e:expr) => (match $e { Ok(e) => e, @@ -53,12 +70,27 @@ mod job { pub use build::config::Config; pub use build::flags::Flags; +/// A structure representing a Rust compiler. +/// +/// Each compiler has a `stage` that it is associated with and a `host` that +/// corresponds to the platform the compiler runs on. This structure is used as +/// a parameter to many methods below. #[derive(Eq, PartialEq, Clone, Copy, Hash, Debug)] pub struct Compiler<'a> { stage: u32, host: &'a str, } +/// Global configuration for the build system. +/// +/// This structure transitively contains all configuration for the build system. +/// All filesystem-encoded configuration is in `config`, all flags are in +/// `flags`, and then parsed or probed information is listed in the keys below. +/// +/// This structure is a parameter of almost all methods in the build system, +/// although most functions are implemented as free functions rather than +/// methods specifically on this structure itself (to make it easier to +/// organize). pub struct Build { // User-specified configuration via config.toml config: Config, @@ -79,6 +111,12 @@ pub struct Build { version: String, package_vers: String, bootstrap_key: String, + bootstrap_key_stage0: String, + + // Probed tools at runtime + gdb_version: Option, + lldb_version: Option, + lldb_python_dir: Option, // Runtime state filled in later on cc: HashMap, @@ -86,14 +124,33 @@ pub struct Build { compiler_rt_built: RefCell>, } +/// The various "modes" of invoking Cargo. +/// +/// These entries currently correspond to the various output directories of the +/// build system, with each mod generating output in a different directory. pub enum Mode { + /// This cargo is going to build the standard library, placing output in the + /// "stageN-std" directory. Libstd, + + /// This cargo is going to build libtest, placing output in the + /// "stageN-test" directory. Libtest, + + /// This cargo is going to build librustc and compiler libraries, placing + /// output in the "stageN-rustc" directory. Librustc, + + /// This cargo is going to some build tool, placing output in the + /// "stageN-tools" directory. Tool, } impl Build { + /// Creates a new set of build configuration from the `flags` on the command + /// line and the filesystem `config`. + /// + /// By default all build output will be placed in the current directory. pub fn new(flags: Flags, config: Config) -> Build { let cwd = t!(env::current_dir()); let src = flags.src.clone().unwrap_or(cwd.clone()); @@ -124,13 +181,18 @@ impl Build { ver_date: None, version: String::new(), bootstrap_key: String::new(), + bootstrap_key_stage0: String::new(), package_vers: String::new(), cc: HashMap::new(), cxx: HashMap::new(), compiler_rt_built: RefCell::new(HashMap::new()), + gdb_version: None, + lldb_version: None, + lldb_python_dir: None, } } + /// Executes the entire build, as configured by the flags and configuration. pub fn build(&mut self) { use build::step::Source::*; @@ -151,6 +213,16 @@ impl Build { self.verbose("updating submodules"); self.update_submodules(); + // The main loop of the build system. + // + // The `step::all` function returns a topographically sorted list of all + // steps that need to be executed as part of this build. Each step has a + // corresponding entry in `step.rs` and indicates some unit of work that + // needs to be done as part of the build. + // + // Almost all of these are simple one-liners that shell out to the + // corresponding functionality in the extra modules, where more + // documentation can be found. for target in step::all(self) { let doc_out = self.out.join(&target.target).join("doc"); match target.src { @@ -160,6 +232,9 @@ impl Build { CompilerRt { _dummy } => { native::compiler_rt(self, target.target); } + TestHelpers { _dummy } => { + native::test_helpers(self, target.target); + } Libstd { compiler } => { compile::std(self, target.target, &compiler); } @@ -197,6 +272,12 @@ impl Build { ToolCargoTest { stage } => { compile::tool(self, stage, target.target, "cargotest"); } + ToolTidy { stage } => { + compile::tool(self, stage, target.target, "tidy"); + } + ToolCompiletest { stage } => { + compile::tool(self, stage, target.target, "compiletest"); + } DocBook { stage } => { doc::rustbook(self, stage, target.target, "book", &doc_out); } @@ -230,12 +311,129 @@ impl Build { CheckCargoTest { stage } => { check::cargotest(self, stage, target.target); } + CheckTidy { stage } => { + check::tidy(self, stage, target.target); + } + CheckRPass { compiler } => { + check::compiletest(self, &compiler, target.target, + "run-pass", "run-pass"); + } + CheckRPassFull { compiler } => { + check::compiletest(self, &compiler, target.target, + "run-pass", "run-pass-fulldeps"); + } + CheckCFail { compiler } => { + check::compiletest(self, &compiler, target.target, + "compile-fail", "compile-fail"); + } + CheckCFailFull { compiler } => { + check::compiletest(self, &compiler, target.target, + "compile-fail", "compile-fail-fulldeps") + } + CheckPFail { compiler } => { + check::compiletest(self, &compiler, target.target, + "parse-fail", "parse-fail"); + } + CheckRFail { compiler } => { + check::compiletest(self, &compiler, target.target, + "run-fail", "run-fail"); + } + CheckRFailFull { compiler } => { + check::compiletest(self, &compiler, target.target, + "run-fail", "run-fail-fulldeps"); + } + CheckPretty { compiler } => { + check::compiletest(self, &compiler, target.target, + "pretty", "pretty"); + } + CheckPrettyRPass { compiler } => { + check::compiletest(self, &compiler, target.target, + "pretty", "run-pass"); + } + CheckPrettyRPassFull { compiler } => { + check::compiletest(self, &compiler, target.target, + "pretty", "run-pass-fulldeps"); + } + CheckPrettyRFail { compiler } => { + check::compiletest(self, &compiler, target.target, + "pretty", "run-fail"); + } + CheckPrettyRFailFull { compiler } => { + check::compiletest(self, &compiler, target.target, + "pretty", "run-fail-fulldeps"); + } + CheckPrettyRPassValgrind { compiler } => { + check::compiletest(self, &compiler, target.target, + "pretty", "run-pass-valgrind"); + } + CheckCodegen { compiler } => { + check::compiletest(self, &compiler, target.target, + "codegen", "codegen"); + } + CheckCodegenUnits { compiler } => { + check::compiletest(self, &compiler, target.target, + "codegen-units", "codegen-units"); + } + CheckIncremental { compiler } => { + check::compiletest(self, &compiler, target.target, + "incremental", "incremental"); + } + CheckUi { compiler } => { + check::compiletest(self, &compiler, target.target, + "ui", "ui"); + } + CheckDebuginfo { compiler } => { + if target.target.contains("msvc") || + target.target.contains("android") { + // nothing to do + } else if target.target.contains("apple") { + check::compiletest(self, &compiler, target.target, + "debuginfo-lldb", "debuginfo"); + } else { + check::compiletest(self, &compiler, target.target, + "debuginfo-gdb", "debuginfo"); + } + } + CheckRustdoc { compiler } => { + check::compiletest(self, &compiler, target.target, + "rustdoc", "rustdoc"); + } + CheckRPassValgrind { compiler } => { + check::compiletest(self, &compiler, target.target, + "run-pass-valgrind", "run-pass-valgrind"); + } + CheckDocs { compiler } => { + check::docs(self, &compiler); + } + CheckErrorIndex { compiler } => { + check::error_index(self, &compiler); + } + CheckRMake { compiler } => { + check::compiletest(self, &compiler, target.target, + "run-make", "run-make") + } + CheckCrateStd { compiler } => { + check::krate(self, &compiler, target.target, Mode::Libstd) + } + CheckCrateTest { compiler } => { + check::krate(self, &compiler, target.target, Mode::Libtest) + } + CheckCrateRustc { compiler } => { + check::krate(self, &compiler, target.target, Mode::Librustc) + } DistDocs { stage } => dist::docs(self, stage, target.target), DistMingw { _dummy } => dist::mingw(self, target.target), DistRustc { stage } => dist::rustc(self, stage, target.target), DistStd { compiler } => dist::std(self, &compiler, target.target), + DebuggerScripts { stage } => { + let compiler = Compiler::new(stage, target.target); + dist::debugger_scripts(self, + &self.sysroot(&compiler), + target.target); + } + Dist { .. } | Doc { .. } | // pseudo-steps Check { .. } => {} @@ -243,6 +441,10 @@ impl Build { } } + /// Updates all git submodules that we have. + /// + /// This will detect if any submodules are out of date an run the necessary + /// commands to sync them all with upstream. fn update_submodules(&self) { if !self.config.submodules { return @@ -255,6 +457,11 @@ impl Build { cmd.current_dir(&self.src).arg("submodule"); return cmd }; + + // FIXME: this takes a seriously long time to execute on Windows and a + // nontrivial amount of time on Unix, we should have a better way + // of detecting whether we need to run all the submodule commands + // below. let out = output(git_submodule().arg("status")); if !out.lines().any(|l| l.starts_with("+") || l.starts_with("-")) { return @@ -271,8 +478,9 @@ impl Build { .arg("git").arg("checkout").arg(".")); } - /// Clear out `dir` if our build has been flagged as dirty, and also set - /// ourselves as dirty if `file` changes when `f` is executed. + /// Clear out `dir` if `input` is newer. + /// + /// After this executes, it will also ensure that `dir` exists. fn clear_if_dirty(&self, dir: &Path, input: &Path) { let stamp = dir.join(".stamp"); if mtime(&stamp) < mtime(input) { @@ -286,8 +494,10 @@ impl Build { /// Prepares an invocation of `cargo` to be run. /// /// This will create a `Command` that represents a pending execution of - /// Cargo for the specified stage, whether or not the standard library is - /// being built, and using the specified compiler targeting `target`. + /// Cargo. This cargo will be configured to use `compiler` as the actual + /// rustc compiler, its output will be scoped by `mode`'s output directory, + /// it will pass the `--target` flag for the specified `target`, and will be + /// executing the Cargo command `cmd`. fn cargo(&self, compiler: &Compiler, mode: Mode, @@ -303,6 +513,9 @@ impl Build { // Customize the compiler we're running. Specify the compiler to cargo // as our shim and then pass it some various options used to configure // how the actual compiler itself is called. + // + // These variables are primarily all read by + // src/bootstrap/{rustc,rustdoc.rs} cargo.env("RUSTC", self.out.join("bootstrap/debug/rustc")) .env("RUSTC_REAL", self.compiler_path(compiler)) .env("RUSTC_STAGE", compiler.stage.to_string()) @@ -313,12 +526,15 @@ impl Build { self.config.rust_debug_assertions.to_string()) .env("RUSTC_SNAPSHOT", &self.rustc) .env("RUSTC_SYSROOT", self.sysroot(compiler)) + .env("RUSTC_LIBDIR", self.rustc_libdir(compiler)) .env("RUSTC_SNAPSHOT_LIBDIR", self.rustc_snapshot_libdir()) .env("RUSTC_RPATH", self.config.rust_rpath.to_string()) .env("RUSTDOC", self.out.join("bootstrap/debug/rustdoc")) .env("RUSTDOC_REAL", self.rustdoc(compiler)) .env("RUSTC_FLAGS", self.rustc_flags(target).join(" ")); + self.add_bootstrap_key(compiler, &mut cargo); + // Specify some various options for build scripts used throughout // the build. // @@ -337,7 +553,7 @@ impl Build { // Environment variables *required* needed throughout the build // - // FIXME: should update code to not require this env vars + // FIXME: should update code to not require this env var cargo.env("CFG_COMPILER_HOST_TRIPLE", target); if self.config.verbose || self.flags.verbose { @@ -346,7 +562,6 @@ impl Build { if self.config.rust_optimize { cargo.arg("--release"); } - self.add_rustc_lib_path(compiler, &mut cargo); return cargo } @@ -382,6 +597,7 @@ impl Build { self.cargo_out(compiler, Mode::Libstd, host).join("deps"), self.cargo_out(compiler, Mode::Libtest, host).join("deps"), self.cargo_out(compiler, Mode::Librustc, host).join("deps"), + self.cargo_out(compiler, Mode::Tool, host).join("deps"), ]; add_lib_path(paths, &mut cmd); return cmd @@ -415,6 +631,12 @@ impl Build { if self.config.rust_optimize {"release"} else {"debug"} } + /// Returns the sysroot for the `compiler` specified that *this build system + /// generates*. + /// + /// That is, the sysroot for the stage0 compiler is not what the compiler + /// thinks it is by default, but it's the same as the default for stages + /// 1-3. fn sysroot(&self, compiler: &Compiler) -> PathBuf { if compiler.stage == 0 { self.out.join(compiler.host).join("stage0-sysroot") @@ -423,6 +645,8 @@ impl Build { } } + /// Returns the libdir where the standard library and other artifacts are + /// found for a compiler's sysroot. fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf { self.sysroot(compiler).join("lib").join("rustlib") .join(target).join("lib") @@ -436,7 +660,8 @@ impl Build { let suffix = match mode { Mode::Libstd => "-std", Mode::Libtest => "-test", - Mode::Tool | Mode::Librustc => "-rustc", + Mode::Tool => "-tools", + Mode::Librustc => "-rustc", }; self.out.join(compiler.host) .join(format!("stage{}{}", compiler.stage, suffix)) @@ -453,24 +678,85 @@ impl Build { } /// Root output directory for LLVM compiled for `target` + /// + /// Note that if LLVM is configured externally then the directory returned + /// will likely be empty. fn llvm_out(&self, target: &str) -> PathBuf { self.out.join(target).join("llvm") } + /// Returns the path to `llvm-config` for the specified target. + /// + /// If a custom `llvm-config` was specified for target then that's returned + /// instead. + fn llvm_config(&self, target: &str) -> PathBuf { + let target_config = self.config.target_config.get(target); + if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { + s.clone() + } else { + self.llvm_out(&self.config.build).join("bin") + .join(exe("llvm-config", target)) + } + } + + /// Returns the path to `FileCheck` binary for the specified target + fn llvm_filecheck(&self, target: &str) -> PathBuf { + let target_config = self.config.target_config.get(target); + if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { + s.parent().unwrap().join(exe("FileCheck", target)) + } else { + let base = self.llvm_out(&self.config.build).join("build"); + let exe = exe("FileCheck", target); + if self.config.build.contains("msvc") { + base.join("Release/bin").join(exe) + } else { + base.join("bin").join(exe) + } + } + } + /// Root output directory for compiler-rt compiled for `target` fn compiler_rt_out(&self, target: &str) -> PathBuf { self.out.join(target).join("compiler-rt") } + /// Root output directory for rust_test_helpers library compiled for + /// `target` + fn test_helpers_out(&self, target: &str) -> PathBuf { + self.out.join(target).join("rust-test-helpers") + } + + /// Adds the compiler's directory of dynamic libraries to `cmd`'s dynamic + /// library lookup path. fn add_rustc_lib_path(&self, compiler: &Compiler, cmd: &mut Command) { // Windows doesn't need dylib path munging because the dlls for the // compiler live next to the compiler and the system will find them // automatically. - if cfg!(windows) { return } + if cfg!(windows) { + return + } add_lib_path(vec![self.rustc_libdir(compiler)], cmd); } + /// Adds the compiler's bootstrap key to the environment of `cmd`. + fn add_bootstrap_key(&self, compiler: &Compiler, cmd: &mut Command) { + // In stage0 we're using a previously released stable compiler, so we + // use the stage0 bootstrap key. Otherwise we use our own build's + // bootstrap key. + let bootstrap_key = if compiler.is_snapshot(self) { + &self.bootstrap_key_stage0 + } else { + &self.bootstrap_key + }; + cmd.env("RUSTC_BOOTSTRAP_KEY", bootstrap_key); + } + + /// Returns the compiler's libdir where it stores the dynamic libraries that + /// it itself links against. + /// + /// For example this returns `/lib` on Unix and `/bin` on + /// Windows. fn rustc_libdir(&self, compiler: &Compiler) -> PathBuf { if compiler.is_snapshot(self) { self.rustc_snapshot_libdir() @@ -479,33 +765,44 @@ impl Build { } } + /// Returns the libdir of the snapshot compiler. fn rustc_snapshot_libdir(&self) -> PathBuf { self.rustc.parent().unwrap().parent().unwrap() .join(libdir(&self.config.build)) } + /// Runs a command, printing out nice contextual information if it fails. fn run(&self, cmd: &mut Command) { self.verbose(&format!("running: {:?}", cmd)); run_silent(cmd) } + /// Prints a message if this build is configured in verbose mode. fn verbose(&self, msg: &str) { if self.flags.verbose || self.config.verbose { println!("{}", msg); } } + /// Returns the number of parallel jobs that have been configured for this + /// build. fn jobs(&self) -> u32 { self.flags.jobs.unwrap_or(num_cpus::get() as u32) } + /// Returns the path to the C compiler for the target specified. fn cc(&self, target: &str) -> &Path { self.cc[target].0.path() } + /// Returns a list of flags to pass to the C compiler for the target + /// specified. fn cflags(&self, target: &str) -> Vec { + // Filter out -O and /O (the optimization flags) that we picked up from + // gcc-rs because the build scripts will determine that for themselves. let mut base = self.cc[target].0.args().iter() .map(|s| s.to_string_lossy().into_owned()) + .filter(|s| !s.starts_with("-O") && !s.starts_with("/O")) .collect::>(); // If we're compiling on OSX then we add a few unconditional flags @@ -519,15 +816,26 @@ impl Build { return base } + /// Returns the path to the `ar` archive utility for the target specified. fn ar(&self, target: &str) -> &Path { &self.cc[target].1 } + /// Returns the path to the C++ compiler for the target specified, may panic + /// if no C++ compiler was configured for the target. fn cxx(&self, target: &str) -> &Path { self.cxx[target].path() } + /// Returns flags to pass to the compiler to generate code for `target`. fn rustc_flags(&self, target: &str) -> Vec { + // New flags should be added here with great caution! + // + // It's quite unfortunate to **require** flags to generate code for a + // target, so it should only be passed here if absolutely necessary! + // Most default configuration should be done through target specs rather + // than an entry here. + let mut base = Vec::new(); if target != self.config.build && !target.contains("msvc") { base.push(format!("-Clinker={}", self.cc(target).display())); @@ -537,10 +845,12 @@ impl Build { } impl<'a> Compiler<'a> { + /// Creates a new complier for the specified stage/host fn new(stage: u32, host: &'a str) -> Compiler<'a> { Compiler { stage: stage, host: host } } + /// Returns whether this is a snapshot compiler for `build`'s configuration fn is_snapshot(&self, build: &Build) -> bool { self.stage == 0 && self.host == build.config.build } diff --git a/src/bootstrap/build/native.rs b/src/bootstrap/build/native.rs index bf0494bcd8..5691b2da6a 100644 --- a/src/bootstrap/build/native.rs +++ b/src/bootstrap/build/native.rs @@ -8,16 +8,28 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Compilation of native dependencies like LLVM. +//! +//! Native projects like LLVM unfortunately aren't suited just yet for +//! compilation in build scripts that Cargo has. This is because thie +//! compilation takes a *very* long time but also because we don't want to +//! compile LLVM 3 times as part of a normal bootstrap (we want it cached). +//! +//! LLVM and compiler-rt are essentially just wired up to everything else to +//! ensure that they're always in place if needed. + use std::path::Path; use std::process::Command; use std::fs; use build_helper::output; use cmake; +use gcc; use build::Build; -use build::util::{exe, staticlib}; +use build::util::{exe, staticlib, up_to_date}; +/// Compile LLVM for `target`. pub fn llvm(build: &Build, target: &str) { // If we're using a custom LLVM bail out here, but we can only use a // custom LLVM for the build triple. @@ -43,6 +55,9 @@ pub fn llvm(build: &Build, target: &str) { // http://llvm.org/docs/CMake.html let mut cfg = cmake::Config::new(build.src.join("src/llvm")); + if build.config.ninja { + cfg.generator("Ninja"); + } cfg.target(target) .host(&build.config.build) .out_dir(&dst) @@ -112,6 +127,10 @@ fn check_llvm_version(build: &Build, llvm_config: &Path) { panic!("\n\nbad LLVM version: {}, need >=3.5\n\n", version) } +/// Compiles the `compiler-rt` library, or at least the builtins part of it. +/// +/// This uses the CMake build system and an existing LLVM build directory to +/// compile the project. pub fn compiler_rt(build: &Build, target: &str) { let dst = build.compiler_rt_out(target); let arch = target.split('-').next().unwrap(); @@ -149,9 +168,7 @@ pub fn compiler_rt(build: &Build, target: &str) { } let _ = fs::remove_dir_all(&dst); t!(fs::create_dir_all(&dst)); - let build_llvm_config = build.llvm_out(&build.config.build) - .join("bin") - .join(exe("llvm-config", &build.config.build)); + let build_llvm_config = build.llvm_config(&build.config.build); let mut cfg = cmake::Config::new(build.src.join("src/compiler-rt")); cfg.target(target) .host(&build.config.build) @@ -168,3 +185,25 @@ pub fn compiler_rt(build: &Build, target: &str) { .build_target(&build_target); cfg.build(); } + +/// Compiles the `rust_test_helpers.c` library which we used in various +/// `run-pass` test suites for ABI testing. +pub fn test_helpers(build: &Build, target: &str) { + let dst = build.test_helpers_out(target); + let src = build.src.join("src/rt/rust_test_helpers.c"); + if up_to_date(&src, &dst.join("librust_test_helpers.a")) { + return + } + + println!("Building test helpers"); + t!(fs::create_dir_all(&dst)); + let mut cfg = gcc::Config::new(); + cfg.cargo_metadata(false) + .out_dir(&dst) + .target(target) + .host(&build.config.build) + .opt_level(0) + .debug(false) + .file(build.src.join("src/rt/rust_test_helpers.c")) + .compile("librust_test_helpers.a"); +} diff --git a/src/bootstrap/build/sanity.rs b/src/bootstrap/build/sanity.rs index 6ce2749638..a290527742 100644 --- a/src/bootstrap/build/sanity.rs +++ b/src/bootstrap/build/sanity.rs @@ -8,6 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Sanity checking performed by rustbuild before actually executing anything. +//! +//! This module contains the implementation of ensuring that the build +//! environment looks reasonable before progressing. This will verify that +//! various programs like git and python exist, along with ensuring that all C +//! compilers for cross-compiling are found. +//! +//! In theory if we get past this phase it's a bug if a build fails, but in +//! practice that's likely not true! + use std::collections::HashSet; use std::env; use std::ffi::{OsStr, OsString}; @@ -48,6 +58,9 @@ pub fn check(build: &mut Build) { } } need_cmd("cmake".as_ref()); + if build.config.ninja { + need_cmd("ninja".as_ref()) + } break } @@ -63,6 +76,12 @@ pub fn check(build: &mut Build) { need_cmd(build.cxx(host).as_ref()); } + // Externally configured LLVM requires FileCheck to exist + let filecheck = build.llvm_filecheck(&build.config.build); + if !filecheck.starts_with(&build.out) && !filecheck.exists() { + panic!("filecheck executable {:?} does not exist", filecheck); + } + for target in build.config.target.iter() { // Either can't build or don't want to run jemalloc on these targets if target.contains("rumprun") || @@ -131,4 +150,17 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake target); } } + + let run = |cmd: &mut Command| { + cmd.output().map(|output| { + String::from_utf8_lossy(&output.stdout) + .lines().next().unwrap() + .to_string() + }) + }; + build.gdb_version = run(Command::new("gdb").arg("--version")).ok(); + build.lldb_version = run(Command::new("lldb").arg("--version")).ok(); + if build.lldb_version.is_some() { + build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok(); + } } diff --git a/src/bootstrap/build/step.rs b/src/bootstrap/build/step.rs index 80fcc32e53..742fd8575b 100644 --- a/src/bootstrap/build/step.rs +++ b/src/bootstrap/build/step.rs @@ -8,6 +8,18 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Major workhorse of rustbuild, definition and dependencies between stages of +//! the copmile. +//! +//! The primary purpose of this module is to define the various `Step`s of +//! execution of the build. Each `Step` has a corresponding `Source` indicating +//! what it's actually doing along with a number of dependencies which must be +//! executed first. +//! +//! This module will take the CLI as input and calculate the steps required for +//! the build requested, ensuring that all intermediate pieces are in place. +//! Essentially this module is a `make`-replacement, but not as good. + use std::collections::HashSet; use build::{Build, Compiler}; @@ -18,6 +30,15 @@ pub struct Step<'a> { pub target: &'a str, } +/// Macro used to iterate over all targets that are recognized by the build +/// system. +/// +/// Whenever a new step is added it will involve adding an entry here, updating +/// the dependencies section below, and then adding an implementation of the +/// step in `build/mod.rs`. +/// +/// This macro takes another macro as an argument and then calls that macro with +/// all steps that the build system knows about. macro_rules! targets { ($m:ident) => { $m! { @@ -51,6 +72,8 @@ macro_rules! targets { (tool_rustbook, ToolRustbook { stage: u32 }), (tool_error_index, ToolErrorIndex { stage: u32 }), (tool_cargotest, ToolCargoTest { stage: u32 }), + (tool_tidy, ToolTidy { stage: u32 }), + (tool_compiletest, ToolCompiletest { stage: u32 }), // Steps for long-running native builds. Ideally these wouldn't // actually exist and would be part of build scripts, but for now @@ -60,6 +83,8 @@ macro_rules! targets { // with braces are unstable so we just pick something that works. (llvm, Llvm { _dummy: () }), (compiler_rt, CompilerRt { _dummy: () }), + (test_helpers, TestHelpers { _dummy: () }), + (debugger_scripts, DebuggerScripts { stage: u32 }), // Steps for various pieces of documentation that we can generate, // the 'doc' step is just a pseudo target to depend on a bunch of @@ -79,6 +104,33 @@ macro_rules! targets { (check, Check { stage: u32, compiler: Compiler<'a> }), (check_linkcheck, CheckLinkcheck { stage: u32 }), (check_cargotest, CheckCargoTest { stage: u32 }), + (check_tidy, CheckTidy { stage: u32 }), + (check_rpass, CheckRPass { compiler: Compiler<'a> }), + (check_rpass_full, CheckRPassFull { compiler: Compiler<'a> }), + (check_rpass_valgrind, CheckRPassValgrind { compiler: Compiler<'a> }), + (check_rfail, CheckRFail { compiler: Compiler<'a> }), + (check_rfail_full, CheckRFailFull { compiler: Compiler<'a> }), + (check_cfail, CheckCFail { compiler: Compiler<'a> }), + (check_cfail_full, CheckCFailFull { compiler: Compiler<'a> }), + (check_pfail, CheckPFail { compiler: Compiler<'a> }), + (check_pretty, CheckPretty { compiler: Compiler<'a> }), + (check_pretty_rpass, CheckPrettyRPass { compiler: Compiler<'a> }), + (check_pretty_rpass_full, CheckPrettyRPassFull { compiler: Compiler<'a> }), + (check_pretty_rfail, CheckPrettyRFail { compiler: Compiler<'a> }), + (check_pretty_rfail_full, CheckPrettyRFailFull { compiler: Compiler<'a> }), + (check_pretty_rpass_valgrind, CheckPrettyRPassValgrind { compiler: Compiler<'a> }), + (check_codegen, CheckCodegen { compiler: Compiler<'a> }), + (check_codegen_units, CheckCodegenUnits { compiler: Compiler<'a> }), + (check_incremental, CheckIncremental { compiler: Compiler<'a> }), + (check_ui, CheckUi { compiler: Compiler<'a> }), + (check_debuginfo, CheckDebuginfo { compiler: Compiler<'a> }), + (check_rustdoc, CheckRustdoc { compiler: Compiler<'a> }), + (check_docs, CheckDocs { compiler: Compiler<'a> }), + (check_error_index, CheckErrorIndex { compiler: Compiler<'a> }), + (check_rmake, CheckRMake { compiler: Compiler<'a> }), + (check_crate_std, CheckCrateStd { compiler: Compiler<'a> }), + (check_crate_test, CheckCrateTest { compiler: Compiler<'a> }), + (check_crate_rustc, CheckCrateRustc { compiler: Compiler<'a> }), // Distribution targets, creating tarballs (dist, Dist { stage: u32 }), @@ -90,6 +142,9 @@ macro_rules! targets { } } +// Define the `Source` enum by iterating over all the steps and peeling out just +// the types that we want to define. + macro_rules! item { ($a:item) => ($a) } macro_rules! define_source { @@ -105,6 +160,12 @@ macro_rules! define_source { targets!(define_source); +/// Calculate a list of all steps described by `build`. +/// +/// This will inspect the flags passed in on the command line and use that to +/// build up a list of steps to execute. These steps will then be transformed +/// into a topologically sorted list which when executed left-to-right will +/// correctly sequence the entire build. pub fn all(build: &Build) -> Vec { let mut ret = Vec::new(); let mut all = HashSet::new(); @@ -126,6 +187,8 @@ pub fn all(build: &Build) -> Vec { } } +/// Determines what top-level targets are requested as part of this build, +/// returning them as a list. fn top_level(build: &Build) -> Vec { let mut targets = Vec::new(); let stage = build.flags.stage.unwrap_or(2); @@ -141,14 +204,18 @@ fn top_level(build: &Build) -> Vec { .unwrap_or(host.target) }; + // First, try to find steps on the command line. add_steps(build, stage, &host, &target, &mut targets); + // If none are specified, then build everything. if targets.len() == 0 { let t = Step { src: Source::Llvm { _dummy: () }, target: &build.config.build, }; - targets.push(t.doc(stage)); + if build.config.docs { + targets.push(t.doc(stage)); + } for host in build.config.host.iter() { if !build.flags.host.contains(host) { continue @@ -238,8 +305,14 @@ impl<'a> Step<'a> { Step { target: target, src: self.src.clone() } } + // Define ergonomic constructors for each step defined above so they can be + // easily constructed. targets!(constructors); + /// Mapping of all dependencies for rustbuild. + /// + /// This function receives a step, the build that we're building for, and + /// then returns a list of all the dependencies of that step. pub fn deps(&self, build: &'a Build) -> Vec> { match self.src { Source::Rustc { stage: 0 } => { @@ -274,6 +347,8 @@ impl<'a> Step<'a> { vec![self.llvm(()).target(&build.config.build)] } Source::Llvm { _dummy } => Vec::new(), + Source::TestHelpers { _dummy } => Vec::new(), + Source::DebuggerScripts { stage: _ } => Vec::new(), // Note that all doc targets depend on artifacts from the build // architecture, not the target (which is where we're generating @@ -306,18 +381,104 @@ impl<'a> Step<'a> { self.doc_std(stage), self.doc_error_index(stage)] } - Source::Check { stage, compiler: _ } => { - vec![self.check_linkcheck(stage), - self.dist(stage)] + Source::Check { stage, compiler } => { + vec![ + self.check_rpass(compiler), + self.check_rpass_full(compiler), + self.check_rfail(compiler), + self.check_rfail_full(compiler), + self.check_cfail(compiler), + self.check_cfail_full(compiler), + self.check_pfail(compiler), + self.check_incremental(compiler), + self.check_ui(compiler), + self.check_crate_std(compiler), + self.check_crate_test(compiler), + self.check_crate_rustc(compiler), + self.check_codegen(compiler), + self.check_codegen_units(compiler), + self.check_debuginfo(compiler), + self.check_rustdoc(compiler), + self.check_pretty(compiler), + self.check_pretty_rpass(compiler), + self.check_pretty_rpass_full(compiler), + self.check_pretty_rfail(compiler), + self.check_pretty_rfail_full(compiler), + self.check_pretty_rpass_valgrind(compiler), + self.check_rpass_valgrind(compiler), + self.check_error_index(compiler), + self.check_docs(compiler), + self.check_rmake(compiler), + self.check_linkcheck(stage), + self.check_tidy(stage), + self.dist(stage), + ] } Source::CheckLinkcheck { stage } => { vec![self.tool_linkchecker(stage), self.doc(stage)] } Source::CheckCargoTest { stage } => { - vec![self.tool_cargotest(stage)] + vec![self.tool_cargotest(stage), + self.librustc(self.compiler(stage))] + } + Source::CheckTidy { stage } => { + vec![self.tool_tidy(stage)] + } + Source::CheckPrettyRPass { compiler } | + Source::CheckPrettyRFail { compiler } | + Source::CheckRFail { compiler } | + Source::CheckPFail { compiler } | + Source::CheckCodegen { compiler } | + Source::CheckCodegenUnits { compiler } | + Source::CheckIncremental { compiler } | + Source::CheckUi { compiler } | + Source::CheckRustdoc { compiler } | + Source::CheckPretty { compiler } | + Source::CheckCFail { compiler } | + Source::CheckRPassValgrind { compiler } | + Source::CheckRPass { compiler } => { + vec![ + self.libtest(compiler), + self.tool_compiletest(compiler.stage), + self.test_helpers(()), + ] + } + Source::CheckDebuginfo { compiler } => { + vec![ + self.libtest(compiler), + self.tool_compiletest(compiler.stage), + self.test_helpers(()), + self.debugger_scripts(compiler.stage), + ] + } + Source::CheckRPassFull { compiler } | + Source::CheckRFailFull { compiler } | + Source::CheckCFailFull { compiler } | + Source::CheckPrettyRPassFull { compiler } | + Source::CheckPrettyRFailFull { compiler } | + Source::CheckPrettyRPassValgrind { compiler } | + Source::CheckRMake { compiler } => { + vec![self.librustc(compiler), + self.tool_compiletest(compiler.stage)] + } + Source::CheckDocs { compiler } => { + vec![self.libstd(compiler)] + } + Source::CheckErrorIndex { compiler } => { + vec![self.libstd(compiler), self.tool_error_index(compiler.stage)] + } + Source::CheckCrateStd { compiler } => { + vec![self.libtest(compiler)] + } + Source::CheckCrateTest { compiler } => { + vec![self.libtest(compiler)] + } + Source::CheckCrateRustc { compiler } => { + vec![self.libtest(compiler)] } - Source::ToolLinkchecker { stage } => { + Source::ToolLinkchecker { stage } | + Source::ToolTidy { stage } => { vec![self.libstd(self.compiler(stage))] } Source::ToolErrorIndex { stage } | @@ -325,7 +486,10 @@ impl<'a> Step<'a> { vec![self.librustc(self.compiler(stage))] } Source::ToolCargoTest { stage } => { - vec![self.librustc(self.compiler(stage))] + vec![self.libstd(self.compiler(stage))] + } + Source::ToolCompiletest { stage } => { + vec![self.libtest(self.compiler(stage))] } Source::DistDocs { stage } => vec![self.doc(stage)], @@ -334,7 +498,14 @@ impl<'a> Step<'a> { vec![self.rustc(stage)] } Source::DistStd { compiler } => { - vec![self.libtest(compiler)] + // We want to package up as many target libraries as possible + // for the `rust-std` package, so if this is a host target we + // depend on librustc and otherwise we just depend on libtest. + if build.config.host.iter().any(|t| t == self.target) { + vec![self.librustc(compiler)] + } else { + vec![self.libtest(compiler)] + } } Source::Dist { stage } => { @@ -350,7 +521,9 @@ impl<'a> Step<'a> { let compiler = self.compiler(stage); for target in build.config.target.iter() { let target = self.target(target); - base.push(target.dist_docs(stage)); + if build.config.docs { + base.push(target.dist_docs(stage)); + } base.push(target.dist_std(compiler)); } } diff --git a/src/bootstrap/build/util.rs b/src/bootstrap/build/util.rs index 41cf924d44..36ce064914 100644 --- a/src/bootstrap/build/util.rs +++ b/src/bootstrap/build/util.rs @@ -8,6 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Various utility functions used throughout rustbuild. +//! +//! Simple things like testing the various filesystem operations here and there, +//! not a lot of interesting happenings here unfortunately. + use std::env; use std::path::{Path, PathBuf}; use std::fs; @@ -16,6 +21,7 @@ use std::process::Command; use bootstrap::{dylib_path, dylib_path_var}; use filetime::FileTime; +/// Returns the `name` as the filename of a static library for `target`. pub fn staticlib(name: &str, target: &str) -> String { if target.contains("windows-msvc") { format!("{}.lib", name) @@ -24,12 +30,15 @@ pub fn staticlib(name: &str, target: &str) -> String { } } +/// Returns the last-modified time for `path`, or zero if it doesn't exist. pub fn mtime(path: &Path) -> FileTime { fs::metadata(path).map(|f| { FileTime::from_last_modification_time(&f) }).unwrap_or(FileTime::zero()) } +/// Copies a file from `src` to `dst`, attempting to use hard links and then +/// falling back to an actually filesystem copy if necessary. pub fn copy(src: &Path, dst: &Path) { let res = fs::hard_link(src, dst); let res = res.or_else(|_| fs::copy(src, dst).map(|_| ())); @@ -39,6 +48,8 @@ pub fn copy(src: &Path, dst: &Path) { } } +/// Copies the `src` directory recursively to `dst`. Both are assumed to exist +/// when this function is called. pub fn cp_r(src: &Path, dst: &Path) { for f in t!(fs::read_dir(src)) { let f = t!(f); @@ -66,14 +77,18 @@ pub fn exe(name: &str, target: &str) -> String { } } +/// Returns whether the file name given looks like a dynamic library. pub fn is_dylib(name: &str) -> bool { name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll") } +/// Returns the corresponding relative library directory that the compiler's +/// dylibs will be found in. pub fn libdir(target: &str) -> &'static str { if target.contains("windows") {"bin"} else {"lib"} } +/// Adds a list of lookup paths to `cmd`'s dynamic library lookup path. pub fn add_lib_path(path: Vec, cmd: &mut Command) { let mut list = dylib_path(); for path in path { @@ -82,7 +97,10 @@ pub fn add_lib_path(path: Vec, cmd: &mut Command) { cmd.env(dylib_path_var(), t!(env::join_paths(list))); } -#[allow(dead_code)] // this will be used soon +/// Returns whether `dst` is up to date given that the file or files in `src` +/// are used to generate it. +/// +/// Uses last-modified time checks to verify this. pub fn up_to_date(src: &Path, dst: &Path) -> bool { let threshold = mtime(dst); let meta = t!(fs::metadata(src)); diff --git a/src/bootstrap/config.toml.example b/src/bootstrap/config.toml.example new file mode 100644 index 0000000000..6f06584232 --- /dev/null +++ b/src/bootstrap/config.toml.example @@ -0,0 +1,159 @@ +# Sample TOML configuration file for building Rust. +# +# All options are commented out by default in this file, and they're commented +# out with their default values. The build system by default looks for +# `config.toml` in the current directory of a build for build configuration, but +# a custom configuration file can also be specified with `--config` to the build +# system. + +# ============================================================================= +# Tweaking how LLVM is compiled +# ============================================================================= +[llvm] + +# Indicates whether the LLVM build is a Release or Debug build +#optimize = true + +# Indicates whether the LLVM assertions are enabled or not +#assertions = false + +# Indicates whether ccache is used when building LLVM +#ccache = false + +# If an external LLVM root is specified, we automatically check the version by +# default to make sure it's within the range that we're expecting, but setting +# this flag will indicate that this version check should not be done. +#version-check = false + +# Link libstdc++ statically into the librustc_llvm instead of relying on a +# dynamic version to be available. +#static-libstdcpp = false + +# Tell the LLVM build system to use Ninja instead of the platform default for +# the generated build system. This can sometimes be faster than make, for +# example. +#ninja = false + +# ============================================================================= +# General build configuration options +# ============================================================================= +[build] + +# Build triple for the original snapshot compiler. This must be a compiler that +# nightlies are already produced for. The current platform must be able to run +# binaries of this build triple and the nightly will be used to bootstrap the +# first compiler. +#build = "x86_64-unknown-linux-gnu" # defaults to your host platform + +# In addition to the build triple, other triples to produce full compiler +# toolchains for. Each of these triples will be bootstrapped from the build +# triple and then will continue to bootstrap themselves. This platform must +# currently be able to run all of the triples provided here. +#host = ["x86_64-unknown-linux-gnu"] # defaults to just the build triple + +# In addition to all host triples, other triples to produce the standard library +# for. Each host triple will be used to produce a copy of the standard library +# for each target triple. +#target = ["x86_64-unknown-linux-gnu"] # defaults to just the build triple + +# Instead of downloading the src/nightlies.txt version of Cargo specified, use +# this Cargo binary instead to build all Rust code +#cargo = "/path/to/bin/cargo" + +# Instead of downloading the src/nightlies.txt version of the compiler +# specified, use this rustc binary instead as the stage0 snapshot compiler. +#rustc = "/path/to/bin/rustc" + +# Flag to specify whether any documentation is built. If false, rustdoc and +# friends will still be compiled but they will not be used to generate any +# documentation. +#docs = true + +# Indicate whether the compiler should be documented in addition to the standard +# library and facade crates. +#compiler-docs = false + +# ============================================================================= +# Options for compiling Rust code itself +# ============================================================================= +[rust] + +# Whether or not to optimize the compiler and standard library +#optimize = true + +# Number of codegen units to use for each compiler invocation. A value of 0 +# means "the number of cores on this machine", and 1+ is passed through to the +# compiler. +#codegen-units = 1 + +# Whether or not debug assertions are enabled for the compiler and standard +# library +#debug-assertions = false + +# Whether or not debuginfo is emitted +#debuginfo = false + +# Whether or not jemalloc is built and enabled +#use-jemalloc = true + +# Whether or not jemalloc is built with its debug option set +#debug-jemalloc = false + +# The default linker that will be used by the generated compiler. Note that this +# is not the linker used to link said compiler. +#default-linker = "cc" + +# The default ar utility that will be used by the generated compiler if LLVM +# cannot be used. Note that this is not used to assemble said compiler. +#default-ar = "ar" + +# The "channel" for the Rust build to produce. The stable/beta channels only +# allow using stable features, whereas the nightly and dev channels allow using +# nightly features +#channel = "dev" + +# The root location of the MUSL installation directory. The library directory +# will also need to contain libunwind.a for an unwinding implementation. +#musl-root = "..." + +# By default the `rustc` executable is built with `-Wl,-rpath` flags on Unix +# platforms to ensure that the compiler is usable by default from the build +# directory (as it links to a number of dynamic libraries). This may not be +# desired in distributions, for example. +#rpath = true + +# Flag indicating whether tests are compiled with optimizations (the -O flag) or +# with debuginfo (the -g flag) +#optimize-tests = true +#debuginfo-tests = true + +# ============================================================================= +# Options for specific targets +# +# Each of the following options is scoped to the specific target triple in +# question and is used for determining how to compile each target. +# ============================================================================= +[target.x86_64-unknown-linux-gnu] + +# C compiler to be used to compiler C code and link Rust code. Note that the +# default value is platform specific, and if not specified it may also depend on +# what platform is crossing to what platform. +#cc = "cc" + +# C++ compiler to be used to compiler C++ code (e.g. LLVM and our LLVM shims). +# This is only used for host targets. +#cxx = "c++" + +# Path to the `llvm-config` binary of the installation of a custom LLVM to link +# against. Note that if this is specifed we don't compile LLVM at all for this +# target. +#llvm-config = "../path/to/llvm/root/bin/llvm-config" + +# Path to the custom jemalloc static library to link into the standard library +# by default. This is only used if jemalloc is still enabled above +#jemalloc = "/path/to/jemalloc/libjemalloc_pic.a" + +# If this target is for Android, this option will be required to specify where +# the NDK for the target lives. This is used to find the C compiler to link and +# build native code. +#android-ndk = "/path/to/ndk" diff --git a/src/bootstrap/lib.rs b/src/bootstrap/lib.rs index 3158a3ab05..ef6184d6ca 100644 --- a/src/bootstrap/lib.rs +++ b/src/bootstrap/lib.rs @@ -8,10 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! A small helper library shared between the build system's executables +//! +//! Currently this just has some simple utilities for modifying the dynamic +//! library lookup path. + use std::env; use std::ffi::OsString; use std::path::PathBuf; +/// Returns the environment variable which the dynamic library lookup path +/// resides in for this platform. pub fn dylib_path_var() -> &'static str { if cfg!(target_os = "windows") { "PATH" @@ -22,6 +29,8 @@ pub fn dylib_path_var() -> &'static str { } } +/// Parses the `dylib_path_var()` environment variable, returning a list of +/// paths that are members of this lookup path. pub fn dylib_path() -> Vec { env::split_paths(&env::var_os(dylib_path_var()).unwrap_or(OsString::new())) .collect() diff --git a/src/bootstrap/main.rs b/src/bootstrap/main.rs index bf29ac107f..18d03b5d59 100644 --- a/src/bootstrap/main.rs +++ b/src/bootstrap/main.rs @@ -8,6 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! rustbuild, the Rust build system +//! +//! This is the entry point for the build system used to compile the `rustc` +//! compiler. Lots of documentation can be found in the `README.md` file next to +//! this file, and otherwise documentation can be found throughout the `build` +//! directory in each respective module. + #![deny(warnings)] extern crate bootstrap; @@ -32,8 +39,11 @@ fn main() { let args = env::args().skip(1).collect::>(); let flags = Flags::parse(&args); let mut config = Config::parse(&flags.build, flags.config.clone()); + + // compat with `./configure` while we're still using that if std::fs::metadata("config.mk").is_ok() { config.update_with_config_mk(); } + Build::new(flags, config).build(); } diff --git a/src/bootstrap/mk/Makefile.in b/src/bootstrap/mk/Makefile.in index 9199c755f6..1f3ea8f19b 100644 --- a/src/bootstrap/mk/Makefile.in +++ b/src/bootstrap/mk/Makefile.in @@ -42,5 +42,7 @@ check-cargotest: $(Q)$(BOOTSTRAP) --step check-cargotest dist: $(Q)$(BOOTSTRAP) --step dist +tidy: + $(Q)$(BOOTSTRAP) --step check-tidy --stage 0 .PHONY: dist diff --git a/src/bootstrap/rustc.rs b/src/bootstrap/rustc.rs index d403d76bb1..97decedf91 100644 --- a/src/bootstrap/rustc.rs +++ b/src/bootstrap/rustc.rs @@ -36,35 +36,34 @@ fn main() { let args = env::args_os().skip(1).collect::>(); // Detect whether or not we're a build script depending on whether --target // is passed (a bit janky...) - let is_build_script = args.iter() - .position(|i| i.to_str() == Some("--target")) - .is_none(); + let target = args.windows(2).find(|w| &*w[0] == "--target") + .and_then(|w| w[1].to_str()); // Build scripts always use the snapshot compiler which is guaranteed to be // able to produce an executable, whereas intermediate compilers may not // have the standard library built yet and may not be able to produce an // executable. Otherwise we just use the standard compiler we're // bootstrapping with. - let rustc = if is_build_script { - env::var_os("RUSTC_SNAPSHOT").unwrap() + let (rustc, libdir) = if target.is_none() { + ("RUSTC_SNAPSHOT", "RUSTC_SNAPSHOT_LIBDIR") } else { - env::var_os("RUSTC_REAL").unwrap() + ("RUSTC_REAL", "RUSTC_LIBDIR") }; + let stage = env::var("RUSTC_STAGE").unwrap(); + + let rustc = env::var_os(rustc).unwrap(); + let libdir = env::var_os(libdir).unwrap(); + let mut dylib_path = bootstrap::dylib_path(); + dylib_path.insert(0, PathBuf::from(libdir)); let mut cmd = Command::new(rustc); cmd.args(&args) - .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap())); + .arg("--cfg").arg(format!("stage{}", stage)) + .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap()); - if is_build_script { - // Build scripts are always built with the snapshot compiler, so we need - // to be sure to set up the right path information for the OS dynamic - // linker to find the libraries in question. - if let Some(p) = env::var_os("RUSTC_SNAPSHOT_LIBDIR") { - let mut path = bootstrap::dylib_path(); - path.insert(0, PathBuf::from(p)); - cmd.env(bootstrap::dylib_path_var(), env::join_paths(path).unwrap()); - } - } else { + if let Some(target) = target { + // The stage0 compiler has a special sysroot distinct from what we + // actually downloaded, so we just always pass the `--sysroot` option. cmd.arg("--sysroot").arg(env::var_os("RUSTC_SYSROOT").unwrap()); // When we build Rust dylibs they're all intended for intermediate @@ -72,32 +71,89 @@ fn main() { // linking all deps statically into the dylib. cmd.arg("-Cprefer-dynamic"); + // Help the libc crate compile by assisting it in finding the MUSL + // native libraries. if let Some(s) = env::var_os("MUSL_ROOT") { let mut root = OsString::from("native="); root.push(&s); root.push("/lib"); cmd.arg("-L").arg(&root); } + + // Pass down extra flags, commonly used to configure `-Clinker` when + // cross compiling. if let Ok(s) = env::var("RUSTC_FLAGS") { cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::>()); } - } - // Set various options from config.toml to configure how we're building - // code. - if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) { - cmd.arg("-g"); - } - if env::var("RUSTC_RPATH") == Ok("true".to_string()) { - cmd.arg("-Crpath"); - } - let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") { - Ok(s) => if s == "true" {"y"} else {"n"}, - Err(..) => "n", - }; - cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions)); - if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") { - cmd.arg("-C").arg(format!("codegen-units={}", s)); + // If we're compiling specifically the `panic_abort` crate then we pass + // the `-C panic=abort` option. Note that we do not do this for any + // other crate intentionally as this is the only crate for now that we + // ship with panic=abort. + // + // This... is a bit of a hack how we detect this. Ideally this + // information should be encoded in the crate I guess? Would likely + // require an RFC amendment to RFC 1513, however. + let is_panic_abort = args.windows(2).any(|a| { + &*a[0] == "--crate-name" && &*a[1] == "panic_abort" + }); + // FIXME(stage0): remove this `stage != "0"` condition + if is_panic_abort && stage != "0" { + cmd.arg("-C").arg("panic=abort"); + } + + // Set various options from config.toml to configure how we're building + // code. + if env::var("RUSTC_DEBUGINFO") == Ok("true".to_string()) { + cmd.arg("-g"); + } + let debug_assertions = match env::var("RUSTC_DEBUG_ASSERTIONS") { + Ok(s) => if s == "true" {"y"} else {"n"}, + Err(..) => "n", + }; + cmd.arg("-C").arg(format!("debug-assertions={}", debug_assertions)); + if let Ok(s) = env::var("RUSTC_CODEGEN_UNITS") { + cmd.arg("-C").arg(format!("codegen-units={}", s)); + } + + // Dealing with rpath here is a little special, so let's go into some + // detail. First off, `-rpath` is a linker option on Unix platforms + // which adds to the runtime dynamic loader path when looking for + // dynamic libraries. We use this by default on Unix platforms to ensure + // that our nightlies behave the same on Windows, that is they work out + // of the box. This can be disabled, of course, but basically that's why + // we're gated on RUSTC_RPATH here. + // + // Ok, so the astute might be wondering "why isn't `-C rpath` used + // here?" and that is indeed a good question to task. This codegen + // option is the compiler's current interface to generating an rpath. + // Unfortunately it doesn't quite suffice for us. The flag currently + // takes no value as an argument, so the compiler calculates what it + // should pass to the linker as `-rpath`. This unfortunately is based on + // the **compile time** directory structure which when building with + // Cargo will be very different than the runtime directory structure. + // + // All that's a really long winded way of saying that if we use + // `-Crpath` then the executables generated have the wrong rpath of + // something like `$ORIGIN/deps` when in fact the way we distribute + // rustc requires the rpath to be `$ORIGIN/../lib`. + // + // So, all in all, to set up the correct rpath we pass the linker + // argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it + // fun to pass a flag to a tool to pass a flag to pass a flag to a tool + // to change a flag in a binary? + if env::var("RUSTC_RPATH") == Ok("true".to_string()) { + let rpath = if target.contains("apple") { + Some("-Wl,-rpath,@loader_path/../lib") + } else if !target.contains("windows") { + Some("-Wl,-rpath,$ORIGIN/../lib") + } else { + None + }; + if let Some(rpath) = rpath { + cmd.arg("-C").arg(format!("link-args={}", rpath)); + } + } } // Actually run the compiler! diff --git a/src/bootstrap/rustdoc.rs b/src/bootstrap/rustdoc.rs index 8c61819611..88ac26d32f 100644 --- a/src/bootstrap/rustdoc.rs +++ b/src/bootstrap/rustdoc.rs @@ -12,17 +12,25 @@ //! //! See comments in `src/bootstrap/rustc.rs` for more information. +extern crate bootstrap; + use std::env; use std::process::Command; +use std::path::PathBuf; fn main() { let args = env::args_os().skip(1).collect::>(); let rustdoc = env::var_os("RUSTDOC_REAL").unwrap(); + let libdir = env::var_os("RUSTC_LIBDIR").unwrap(); + + let mut dylib_path = bootstrap::dylib_path(); + dylib_path.insert(0, PathBuf::from(libdir)); let mut cmd = Command::new(rustdoc); cmd.args(&args) .arg("--cfg").arg(format!("stage{}", env::var("RUSTC_STAGE").unwrap())) - .arg("--cfg").arg("dox"); + .arg("--cfg").arg("dox") + .env(bootstrap::dylib_path_var(), env::join_paths(&dylib_path).unwrap()); std::process::exit(match cmd.status() { Ok(s) => s.code().unwrap_or(1), Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e), diff --git a/src/compiletest/header.rs b/src/compiletest/header.rs deleted file mode 100644 index ef93fcfa01..0000000000 --- a/src/compiletest/header.rs +++ /dev/null @@ -1,455 +0,0 @@ -// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use std::env; -use std::fs::File; -use std::io::BufReader; -use std::io::prelude::*; -use std::path::{Path, PathBuf}; - -use common::Config; -use common; -use util; - -#[derive(Clone, Debug)] -pub struct TestProps { - // Lines that should be expected, in order, on standard out - pub error_patterns: Vec , - // Extra flags to pass to the compiler - pub compile_flags: Vec, - // Extra flags to pass when the compiled code is run (such as --bench) - pub run_flags: Option, - // If present, the name of a file that this test should match when - // pretty-printed - pub pp_exact: Option, - // Modules from aux directory that should be compiled - pub aux_builds: Vec , - // Environment settings to use for compiling - pub rustc_env: Vec<(String,String)> , - // Environment settings to use during execution - pub exec_env: Vec<(String,String)> , - // Lines to check if they appear in the expected debugger output - pub check_lines: Vec , - // Build documentation for all specified aux-builds as well - pub build_aux_docs: bool, - // Flag to force a crate to be built with the host architecture - pub force_host: bool, - // Check stdout for error-pattern output as well as stderr - pub check_stdout: bool, - // Don't force a --crate-type=dylib flag on the command line - pub no_prefer_dynamic: bool, - // Run --pretty expanded when running pretty printing tests - pub pretty_expanded: bool, - // Which pretty mode are we testing with, default to 'normal' - pub pretty_mode: String, - // Only compare pretty output and don't try compiling - pub pretty_compare_only: bool, - // Patterns which must not appear in the output of a cfail test. - pub forbid_output: Vec, - // Revisions to test for incremental compilation. - pub revisions: Vec, -} - -// Load any test directives embedded in the file -pub fn load_props(testfile: &Path) -> TestProps { - let error_patterns = Vec::new(); - let aux_builds = Vec::new(); - let exec_env = Vec::new(); - let run_flags = None; - let pp_exact = None; - let check_lines = Vec::new(); - let build_aux_docs = false; - let force_host = false; - let check_stdout = false; - let no_prefer_dynamic = false; - let pretty_expanded = false; - let pretty_compare_only = false; - let forbid_output = Vec::new(); - let mut props = TestProps { - error_patterns: error_patterns, - compile_flags: vec![], - run_flags: run_flags, - pp_exact: pp_exact, - aux_builds: aux_builds, - revisions: vec![], - rustc_env: vec![], - exec_env: exec_env, - check_lines: check_lines, - build_aux_docs: build_aux_docs, - force_host: force_host, - check_stdout: check_stdout, - no_prefer_dynamic: no_prefer_dynamic, - pretty_expanded: pretty_expanded, - pretty_mode: format!("normal"), - pretty_compare_only: pretty_compare_only, - forbid_output: forbid_output, - }; - load_props_into(&mut props, testfile, None); - props -} - -/// Load properties from `testfile` into `props`. If a property is -/// tied to a particular revision `foo` (indicated by writing -/// `//[foo]`), then the property is ignored unless `cfg` is -/// `Some("foo")`. -pub fn load_props_into(props: &mut TestProps, testfile: &Path, cfg: Option<&str>) { - iter_header(testfile, cfg, &mut |ln| { - if let Some(ep) = parse_error_pattern(ln) { - props.error_patterns.push(ep); - } - - if let Some(flags) = parse_compile_flags(ln) { - props.compile_flags.extend( - flags - .split_whitespace() - .map(|s| s.to_owned())); - } - - if let Some(r) = parse_revisions(ln) { - props.revisions.extend(r); - } - - if props.run_flags.is_none() { - props.run_flags = parse_run_flags(ln); - } - - if props.pp_exact.is_none() { - props.pp_exact = parse_pp_exact(ln, testfile); - } - - if !props.build_aux_docs { - props.build_aux_docs = parse_build_aux_docs(ln); - } - - if !props.force_host { - props.force_host = parse_force_host(ln); - } - - if !props.check_stdout { - props.check_stdout = parse_check_stdout(ln); - } - - if !props.no_prefer_dynamic { - props.no_prefer_dynamic = parse_no_prefer_dynamic(ln); - } - - if !props.pretty_expanded { - props.pretty_expanded = parse_pretty_expanded(ln); - } - - if let Some(m) = parse_pretty_mode(ln) { - props.pretty_mode = m; - } - - if !props.pretty_compare_only { - props.pretty_compare_only = parse_pretty_compare_only(ln); - } - - if let Some(ab) = parse_aux_build(ln) { - props.aux_builds.push(ab); - } - - if let Some(ee) = parse_env(ln, "exec-env") { - props.exec_env.push(ee); - } - - if let Some(ee) = parse_env(ln, "rustc-env") { - props.rustc_env.push(ee); - } - - if let Some(cl) = parse_check_line(ln) { - props.check_lines.push(cl); - } - - if let Some(of) = parse_forbid_output(ln) { - props.forbid_output.push(of); - } - }); - - for key in vec!["RUST_TEST_NOCAPTURE", "RUST_TEST_THREADS"] { - match env::var(key) { - Ok(val) => - if props.exec_env.iter().find(|&&(ref x, _)| *x == key).is_none() { - props.exec_env.push((key.to_owned(), val)) - }, - Err(..) => {} - } - } -} - -pub struct EarlyProps { - pub ignore: bool, - pub should_fail: bool, -} - -// scan the file to detect whether the test should be ignored and -// whether it should panic; these are two things the test runner needs -// to know early, before actually running the test -pub fn early_props(config: &Config, testfile: &Path) -> EarlyProps { - let mut props = EarlyProps { - ignore: false, - should_fail: false, - }; - - iter_header(testfile, None, &mut |ln| { - props.ignore = - props.ignore || - parse_name_directive(ln, "ignore-test") || - parse_name_directive(ln, &ignore_target(config)) || - parse_name_directive(ln, &ignore_architecture(config)) || - parse_name_directive(ln, &ignore_stage(config)) || - parse_name_directive(ln, &ignore_env(config)) || - (config.mode == common::Pretty && - parse_name_directive(ln, "ignore-pretty")) || - (config.target != config.host && - parse_name_directive(ln, "ignore-cross-compile")) || - ignore_gdb(config, ln) || - ignore_lldb(config, ln); - - props.should_fail = - props.should_fail || - parse_name_directive(ln, "should-fail"); - }); - - return props; - - fn ignore_target(config: &Config) -> String { - format!("ignore-{}", util::get_os(&config.target)) - } - fn ignore_architecture(config: &Config) -> String { - format!("ignore-{}", util::get_arch(&config.target)) - } - fn ignore_stage(config: &Config) -> String { - format!("ignore-{}", - config.stage_id.split('-').next().unwrap()) - } - fn ignore_env(config: &Config) -> String { - format!("ignore-{}", util::get_env(&config.target).unwrap_or("")) - } - fn ignore_gdb(config: &Config, line: &str) -> bool { - if config.mode != common::DebugInfoGdb { - return false; - } - - if parse_name_directive(line, "ignore-gdb") { - return true; - } - - if let Some(ref actual_version) = config.gdb_version { - if line.contains("min-gdb-version") { - let min_version = line.trim() - .split(' ') - .last() - .expect("Malformed GDB version directive"); - // Ignore if actual version is smaller the minimum required - // version - gdb_version_to_int(actual_version) < - gdb_version_to_int(min_version) - } else { - false - } - } else { - false - } - } - - fn ignore_lldb(config: &Config, line: &str) -> bool { - if config.mode != common::DebugInfoLldb { - return false; - } - - if parse_name_directive(line, "ignore-lldb") { - return true; - } - - if let Some(ref actual_version) = config.lldb_version { - if line.contains("min-lldb-version") { - let min_version = line.trim() - .split(' ') - .last() - .expect("Malformed lldb version directive"); - // Ignore if actual version is smaller the minimum required - // version - lldb_version_to_int(actual_version) < - lldb_version_to_int(min_version) - } else { - false - } - } else { - false - } - } -} - -fn iter_header(testfile: &Path, - cfg: Option<&str>, - it: &mut FnMut(&str)) { - let rdr = BufReader::new(File::open(testfile).unwrap()); - for ln in rdr.lines() { - // Assume that any directives will be found before the first - // module or function. This doesn't seem to be an optimization - // with a warm page cache. Maybe with a cold one. - let ln = ln.unwrap(); - let ln = ln.trim(); - if ln.starts_with("fn") || ln.starts_with("mod") { - return; - } else if ln.starts_with("//[") { - // A comment like `//[foo]` is specific to revision `foo` - if let Some(close_brace) = ln.find("]") { - let lncfg = &ln[3..close_brace]; - let matches = match cfg { - Some(s) => s == &lncfg[..], - None => false, - }; - if matches { - it(&ln[close_brace+1..]); - } - } else { - panic!("malformed condition directive: expected `//[foo]`, found `{}`", - ln) - } - } else if ln.starts_with("//") { - it(&ln[2..]); - } - } - return; -} - -fn parse_error_pattern(line: &str) -> Option { - parse_name_value_directive(line, "error-pattern") -} - -fn parse_forbid_output(line: &str) -> Option { - parse_name_value_directive(line, "forbid-output") -} - -fn parse_aux_build(line: &str) -> Option { - parse_name_value_directive(line, "aux-build") -} - -fn parse_compile_flags(line: &str) -> Option { - parse_name_value_directive(line, "compile-flags") -} - -fn parse_revisions(line: &str) -> Option> { - parse_name_value_directive(line, "revisions") - .map(|r| r.split_whitespace().map(|t| t.to_string()).collect()) -} - -fn parse_run_flags(line: &str) -> Option { - parse_name_value_directive(line, "run-flags") -} - -fn parse_check_line(line: &str) -> Option { - parse_name_value_directive(line, "check") -} - -fn parse_force_host(line: &str) -> bool { - parse_name_directive(line, "force-host") -} - -fn parse_build_aux_docs(line: &str) -> bool { - parse_name_directive(line, "build-aux-docs") -} - -fn parse_check_stdout(line: &str) -> bool { - parse_name_directive(line, "check-stdout") -} - -fn parse_no_prefer_dynamic(line: &str) -> bool { - parse_name_directive(line, "no-prefer-dynamic") -} - -fn parse_pretty_expanded(line: &str) -> bool { - parse_name_directive(line, "pretty-expanded") -} - -fn parse_pretty_mode(line: &str) -> Option { - parse_name_value_directive(line, "pretty-mode") -} - -fn parse_pretty_compare_only(line: &str) -> bool { - parse_name_directive(line, "pretty-compare-only") -} - -fn parse_env(line: &str, name: &str) -> Option<(String, String)> { - parse_name_value_directive(line, name).map(|nv| { - // nv is either FOO or FOO=BAR - let mut strs: Vec = nv - .splitn(2, '=') - .map(str::to_owned) - .collect(); - - match strs.len() { - 1 => (strs.pop().unwrap(), "".to_owned()), - 2 => { - let end = strs.pop().unwrap(); - (strs.pop().unwrap(), end) - } - n => panic!("Expected 1 or 2 strings, not {}", n) - } - }) -} - -fn parse_pp_exact(line: &str, testfile: &Path) -> Option { - if let Some(s) = parse_name_value_directive(line, "pp-exact") { - Some(PathBuf::from(&s)) - } else { - if parse_name_directive(line, "pp-exact") { - testfile.file_name().map(PathBuf::from) - } else { - None - } - } -} - -fn parse_name_directive(line: &str, directive: &str) -> bool { - // This 'no-' rule is a quick hack to allow pretty-expanded and no-pretty-expanded to coexist - line.contains(directive) && !line.contains(&("no-".to_owned() + directive)) -} - -pub fn parse_name_value_directive(line: &str, directive: &str) - -> Option { - let keycolon = format!("{}:", directive); - if let Some(colon) = line.find(&keycolon) { - let value = line[(colon + keycolon.len()) .. line.len()].to_owned(); - debug!("{}: {}", directive, value); - Some(value) - } else { - None - } -} - -pub fn gdb_version_to_int(version_string: &str) -> isize { - let error_string = format!( - "Encountered GDB version string with unexpected format: {}", - version_string); - let error_string = error_string; - - let components: Vec<&str> = version_string.trim().split('.').collect(); - - if components.len() != 2 { - panic!("{}", error_string); - } - - let major: isize = components[0].parse().ok().expect(&error_string); - let minor: isize = components[1].parse().ok().expect(&error_string); - - return major * 1000 + minor; -} - -pub fn lldb_version_to_int(version_string: &str) -> isize { - let error_string = format!( - "Encountered LLDB version string with unexpected format: {}", - version_string); - let error_string = error_string; - let major: isize = version_string.parse().ok().expect(&error_string); - return major; -} diff --git a/src/compiletest/runtest.rs b/src/compiletest/runtest.rs deleted file mode 100644 index 6773c34c7d..0000000000 --- a/src/compiletest/runtest.rs +++ /dev/null @@ -1,2034 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use common::Config; -use common::{CompileFail, ParseFail, Pretty, RunFail, RunPass, RunPassValgrind}; -use common::{Codegen, DebugInfoLldb, DebugInfoGdb, Rustdoc, CodegenUnits}; -use common::{Incremental}; -use errors::{self, ErrorKind}; -use header::TestProps; -use header; -use procsrv; -use test::TestPaths; -use util::logv; - -use std::env; -use std::collections::HashSet; -use std::fmt; -use std::fs::{self, File}; -use std::io::BufReader; -use std::io::prelude::*; -use std::net::TcpStream; -use std::path::{Path, PathBuf, Component}; -use std::process::{Command, Output, ExitStatus}; - -pub fn run(config: Config, testpaths: &TestPaths) { - match &*config.target { - - "arm-linux-androideabi" | "aarch64-linux-android" => { - if !config.adb_device_status { - panic!("android device not available"); - } - } - - _=> { } - } - - if config.verbose { - // We're going to be dumping a lot of info. Start on a new line. - print!("\n\n"); - } - debug!("running {:?}", testpaths.file.display()); - let props = header::load_props(&testpaths.file); - debug!("loaded props"); - match config.mode { - CompileFail => run_cfail_test(&config, &props, &testpaths), - ParseFail => run_cfail_test(&config, &props, &testpaths), - RunFail => run_rfail_test(&config, &props, &testpaths), - RunPass => run_rpass_test(&config, &props, &testpaths), - RunPassValgrind => run_valgrind_test(&config, &props, &testpaths), - Pretty => run_pretty_test(&config, &props, &testpaths), - DebugInfoGdb => run_debuginfo_gdb_test(&config, &props, &testpaths), - DebugInfoLldb => run_debuginfo_lldb_test(&config, &props, &testpaths), - Codegen => run_codegen_test(&config, &props, &testpaths), - Rustdoc => run_rustdoc_test(&config, &props, &testpaths), - CodegenUnits => run_codegen_units_test(&config, &props, &testpaths), - Incremental => run_incremental_test(&config, &props, &testpaths), - } -} - -fn get_output(props: &TestProps, proc_res: &ProcRes) -> String { - if props.check_stdout { - format!("{}{}", proc_res.stdout, proc_res.stderr) - } else { - proc_res.stderr.clone() - } -} - - -fn for_each_revision(config: &Config, props: &TestProps, testpaths: &TestPaths, - mut op: OP) - where OP: FnMut(&Config, &TestProps, &TestPaths, Option<&str>) -{ - if props.revisions.is_empty() { - op(config, props, testpaths, None) - } else { - for revision in &props.revisions { - let mut revision_props = props.clone(); - header::load_props_into(&mut revision_props, - &testpaths.file, - Some(&revision)); - revision_props.compile_flags.extend(vec![ - format!("--cfg"), - format!("{}", revision), - ]); - op(config, &revision_props, testpaths, Some(revision)); - } - } -} - -fn run_cfail_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - for_each_revision(config, props, testpaths, run_cfail_test_revision); -} - -fn run_cfail_test_revision(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - revision: Option<&str>) { - let proc_res = compile_test(config, props, testpaths); - - if proc_res.status.success() { - fatal_proc_rec( - revision, - &format!("{} test compiled successfully!", config.mode)[..], - &proc_res); - } - - check_correct_failure_status(revision, &proc_res); - - if proc_res.status.success() { - fatal(revision, "process did not return an error status"); - } - - let output_to_check = get_output(props, &proc_res); - let expected_errors = errors::load_errors(&testpaths.file, revision); - if !expected_errors.is_empty() { - if !props.error_patterns.is_empty() { - fatal(revision, "both error pattern and expected errors specified"); - } - check_expected_errors(revision, expected_errors, testpaths, &proc_res); - } else { - check_error_patterns(revision, props, testpaths, &output_to_check, &proc_res); - } - check_no_compiler_crash(revision, &proc_res); - check_forbid_output(revision, props, &output_to_check, &proc_res); -} - -fn run_rfail_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - for_each_revision(config, props, testpaths, run_rfail_test_revision); -} - -fn run_rfail_test_revision(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - revision: Option<&str>) { - let proc_res = compile_test(config, props, testpaths); - - if !proc_res.status.success() { - fatal_proc_rec(revision, "compilation failed!", &proc_res); - } - - let proc_res = exec_compiled_test(config, props, testpaths); - - // The value our Makefile configures valgrind to return on failure - const VALGRIND_ERR: i32 = 100; - if proc_res.status.code() == Some(VALGRIND_ERR) { - fatal_proc_rec(revision, "run-fail test isn't valgrind-clean!", &proc_res); - } - - let output_to_check = get_output(props, &proc_res); - check_correct_failure_status(revision, &proc_res); - check_error_patterns(revision, props, testpaths, &output_to_check, &proc_res); -} - -fn check_correct_failure_status(revision: Option<&str>, proc_res: &ProcRes) { - // The value the rust runtime returns on failure - const RUST_ERR: i32 = 101; - if proc_res.status.code() != Some(RUST_ERR) { - fatal_proc_rec( - revision, - &format!("failure produced the wrong error: {}", - proc_res.status), - proc_res); - } -} - -fn run_rpass_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - for_each_revision(config, props, testpaths, run_rpass_test_revision); -} - -fn run_rpass_test_revision(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - revision: Option<&str>) { - let proc_res = compile_test(config, props, testpaths); - - if !proc_res.status.success() { - fatal_proc_rec(revision, "compilation failed!", &proc_res); - } - - let proc_res = exec_compiled_test(config, props, testpaths); - - if !proc_res.status.success() { - fatal_proc_rec(revision, "test run failed!", &proc_res); - } -} - -fn run_valgrind_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - assert!(props.revisions.is_empty(), "revisions not relevant here"); - - if config.valgrind_path.is_none() { - assert!(!config.force_valgrind); - return run_rpass_test(config, props, testpaths); - } - - let mut proc_res = compile_test(config, props, testpaths); - - if !proc_res.status.success() { - fatal_proc_rec(None, "compilation failed!", &proc_res); - } - - let mut new_config = config.clone(); - new_config.runtool = new_config.valgrind_path.clone(); - proc_res = exec_compiled_test(&new_config, props, testpaths); - - if !proc_res.status.success() { - fatal_proc_rec(None, "test run failed!", &proc_res); - } -} - -fn run_pretty_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - for_each_revision(config, props, testpaths, run_pretty_test_revision); -} - -fn run_pretty_test_revision(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - revision: Option<&str>) { - if props.pp_exact.is_some() { - logv(config, "testing for exact pretty-printing".to_owned()); - } else { - logv(config, "testing for converging pretty-printing".to_owned()); - } - - let rounds = - match props.pp_exact { Some(_) => 1, None => 2 }; - - let mut src = String::new(); - File::open(&testpaths.file).unwrap().read_to_string(&mut src).unwrap(); - let mut srcs = vec!(src); - - let mut round = 0; - while round < rounds { - logv(config, format!("pretty-printing round {} revision {:?}", - round, revision)); - let proc_res = print_source(config, - props, - testpaths, - srcs[round].to_owned(), - &props.pretty_mode); - - if !proc_res.status.success() { - fatal_proc_rec(revision, - &format!("pretty-printing failed in round {} revision {:?}", - round, revision), - &proc_res); - } - - let ProcRes{ stdout, .. } = proc_res; - srcs.push(stdout); - round += 1; - } - - let mut expected = match props.pp_exact { - Some(ref file) => { - let filepath = testpaths.file.parent().unwrap().join(file); - let mut s = String::new(); - File::open(&filepath).unwrap().read_to_string(&mut s).unwrap(); - s - } - None => { srcs[srcs.len() - 2].clone() } - }; - let mut actual = srcs[srcs.len() - 1].clone(); - - if props.pp_exact.is_some() { - // Now we have to care about line endings - let cr = "\r".to_owned(); - actual = actual.replace(&cr, "").to_owned(); - expected = expected.replace(&cr, "").to_owned(); - } - - compare_source(revision, &expected, &actual); - - // If we're only making sure that the output matches then just stop here - if props.pretty_compare_only { return; } - - // Finally, let's make sure it actually appears to remain valid code - let proc_res = typecheck_source(config, props, testpaths, actual); - if !proc_res.status.success() { - fatal_proc_rec(revision, "pretty-printed source does not typecheck", &proc_res); - } - - if !props.pretty_expanded { return } - - // additionally, run `--pretty expanded` and try to build it. - let proc_res = print_source(config, props, testpaths, srcs[round].clone(), "expanded"); - if !proc_res.status.success() { - fatal_proc_rec(revision, "pretty-printing (expanded) failed", &proc_res); - } - - let ProcRes{ stdout: expanded_src, .. } = proc_res; - let proc_res = typecheck_source(config, props, testpaths, expanded_src); - if !proc_res.status.success() { - fatal_proc_rec( - revision, - "pretty-printed source (expanded) does not typecheck", - &proc_res); - } - - return; - - fn print_source(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - src: String, - pretty_type: &str) -> ProcRes { - let aux_dir = aux_output_dir_name(config, testpaths); - compose_and_run(config, - testpaths, - make_pp_args(config, - props, - testpaths, - pretty_type.to_owned()), - props.exec_env.clone(), - config.compile_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), - Some(src)) - } - - fn make_pp_args(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - pretty_type: String) -> ProcArgs { - let aux_dir = aux_output_dir_name(config, testpaths); - // FIXME (#9639): This needs to handle non-utf8 paths - let mut args = vec!("-".to_owned(), - "-Zunstable-options".to_owned(), - "--unpretty".to_owned(), - pretty_type, - format!("--target={}", config.target), - "-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); - args.extend(split_maybe_args(&config.target_rustcflags)); - args.extend(props.compile_flags.iter().cloned()); - return ProcArgs { - prog: config.rustc_path.to_str().unwrap().to_owned(), - args: args, - }; - } - - fn compare_source(revision: Option<&str>, expected: &str, actual: &str) { - if expected != actual { - error(revision, "pretty-printed source does not match expected source"); - println!("\n\ -expected:\n\ -------------------------------------------\n\ -{}\n\ -------------------------------------------\n\ -actual:\n\ -------------------------------------------\n\ -{}\n\ -------------------------------------------\n\ -\n", - expected, actual); - panic!(); - } - } - - fn typecheck_source(config: &Config, props: &TestProps, - testpaths: &TestPaths, src: String) -> ProcRes { - let args = make_typecheck_args(config, props, testpaths); - compose_and_run_compiler(config, props, testpaths, args, Some(src)) - } - - fn make_typecheck_args(config: &Config, props: &TestProps, testpaths: &TestPaths) -> ProcArgs { - let aux_dir = aux_output_dir_name(config, testpaths); - let target = if props.force_host { - &*config.host - } else { - &*config.target - }; - // FIXME (#9639): This needs to handle non-utf8 paths - let mut args = vec!("-".to_owned(), - "-Zno-trans".to_owned(), - format!("--target={}", target), - "-L".to_owned(), - config.build_base.to_str().unwrap().to_owned(), - "-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); - args.extend(split_maybe_args(&config.target_rustcflags)); - args.extend(props.compile_flags.iter().cloned()); - // FIXME (#9639): This needs to handle non-utf8 paths - return ProcArgs { - prog: config.rustc_path.to_str().unwrap().to_owned(), - args: args, - }; - } -} - -fn run_debuginfo_gdb_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - assert!(props.revisions.is_empty(), "revisions not relevant here"); - - let mut config = Config { - target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags), - host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags), - .. config.clone() - }; - - let config = &mut config; - let DebuggerCommands { - commands, - check_lines, - breakpoint_lines - } = parse_debugger_commands(testpaths, "gdb"); - let mut cmds = commands.join("\n"); - - // compile test file (it should have 'compile-flags:-g' in the header) - let compiler_run_result = compile_test(config, props, testpaths); - if !compiler_run_result.status.success() { - fatal_proc_rec(None, "compilation failed!", &compiler_run_result); - } - - let exe_file = make_exe_name(config, testpaths); - - let debugger_run_result; - match &*config.target { - "arm-linux-androideabi" | "aarch64-linux-android" => { - - cmds = cmds.replace("run", "continue"); - - // write debugger script - let mut script_str = String::with_capacity(2048); - script_str.push_str(&format!("set charset {}\n", charset())); - script_str.push_str(&format!("file {}\n", exe_file.to_str().unwrap())); - script_str.push_str("target remote :5039\n"); - script_str.push_str(&format!("set solib-search-path \ - ./{}/stage2/lib/rustlib/{}/lib/\n", - config.host, config.target)); - for line in &breakpoint_lines { - script_str.push_str(&format!("break {:?}:{}\n", - testpaths.file - .file_name() - .unwrap() - .to_string_lossy(), - *line)[..]); - } - script_str.push_str(&cmds); - script_str.push_str("\nquit\n"); - - debug!("script_str = {}", script_str); - dump_output_file(config, - testpaths, - &script_str, - "debugger.script"); - - - procsrv::run("", - &config.adb_path, - None, - &[ - "push".to_owned(), - exe_file.to_str().unwrap().to_owned(), - config.adb_test_dir.clone() - ], - vec!(("".to_owned(), "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{:?}`", config.adb_path)); - - procsrv::run("", - &config.adb_path, - None, - &[ - "forward".to_owned(), - "tcp:5039".to_owned(), - "tcp:5039".to_owned() - ], - vec!(("".to_owned(), "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{:?}`", config.adb_path)); - - let adb_arg = format!("export LD_LIBRARY_PATH={}; \ - gdbserver{} :5039 {}/{}", - config.adb_test_dir.clone(), - if config.target.contains("aarch64") - {"64"} else {""}, - config.adb_test_dir.clone(), - exe_file.file_name().unwrap().to_str() - .unwrap()); - - let mut process = procsrv::run_background("", - &config.adb_path - , - None, - &[ - "shell".to_owned(), - adb_arg.clone() - ], - vec!(("".to_owned(), - "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{:?}`", config.adb_path)); - loop { - //waiting 1 second for gdbserver start - ::std::thread::sleep(::std::time::Duration::new(1,0)); - if TcpStream::connect("127.0.0.1:5039").is_ok() { - break - } - } - - let tool_path = match config.android_cross_path.to_str() { - Some(x) => x.to_owned(), - None => fatal(None, "cannot find android cross path") - }; - - let debugger_script = make_out_name(config, testpaths, "debugger.script"); - // FIXME (#9639): This needs to handle non-utf8 paths - let debugger_opts = - vec!("-quiet".to_owned(), - "-batch".to_owned(), - "-nx".to_owned(), - format!("-command={}", debugger_script.to_str().unwrap())); - - let mut gdb_path = tool_path; - gdb_path.push_str(&format!("/bin/{}-gdb", config.target)); - let procsrv::Result { - out, - err, - status - } = procsrv::run("", - &gdb_path, - None, - &debugger_opts, - vec!(("".to_owned(), "".to_owned())), - None) - .expect(&format!("failed to exec `{:?}`", gdb_path)); - let cmdline = { - let cmdline = make_cmdline("", - &format!("{}-gdb", config.target), - &debugger_opts); - logv(config, format!("executing {}", cmdline)); - cmdline - }; - - debugger_run_result = ProcRes { - status: Status::Normal(status), - stdout: out, - stderr: err, - cmdline: cmdline - }; - if process.kill().is_err() { - println!("Adb process is already finished."); - } - } - - _=> { - let rust_src_root = find_rust_src_root(config) - .expect("Could not find Rust source root"); - let rust_pp_module_rel_path = Path::new("./src/etc"); - let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path) - .to_str() - .unwrap() - .to_owned(); - // write debugger script - let mut script_str = String::with_capacity(2048); - script_str.push_str(&format!("set charset {}\n", charset())); - script_str.push_str("show version\n"); - - match config.gdb_version { - Some(ref version) => { - println!("NOTE: compiletest thinks it is using GDB version {}", - version); - - if header::gdb_version_to_int(version) > - header::gdb_version_to_int("7.4") { - // Add the directory containing the pretty printers to - // GDB's script auto loading safe path - script_str.push_str( - &format!("add-auto-load-safe-path {}\n", - rust_pp_module_abs_path.replace(r"\", r"\\")) - ); - } - } - _ => { - println!("NOTE: compiletest does not know which version of \ - GDB it is using"); - } - } - - // The following line actually doesn't have to do anything with - // pretty printing, it just tells GDB to print values on one line: - script_str.push_str("set print pretty off\n"); - - // Add the pretty printer directory to GDB's source-file search path - script_str.push_str(&format!("directory {}\n", - rust_pp_module_abs_path)); - - // Load the target executable - script_str.push_str(&format!("file {}\n", - exe_file.to_str().unwrap() - .replace(r"\", r"\\"))); - - // Add line breakpoints - for line in &breakpoint_lines { - script_str.push_str(&format!("break '{}':{}\n", - testpaths.file.file_name().unwrap() - .to_string_lossy(), - *line)); - } - - script_str.push_str(&cmds); - script_str.push_str("\nquit\n"); - - debug!("script_str = {}", script_str); - dump_output_file(config, - testpaths, - &script_str, - "debugger.script"); - - // run debugger script with gdb - fn debugger() -> &'static str { - if cfg!(windows) {"gdb.exe"} else {"gdb"} - } - - let debugger_script = make_out_name(config, testpaths, "debugger.script"); - - // FIXME (#9639): This needs to handle non-utf8 paths - let debugger_opts = - vec!("-quiet".to_owned(), - "-batch".to_owned(), - "-nx".to_owned(), - format!("-command={}", debugger_script.to_str().unwrap())); - - let proc_args = ProcArgs { - prog: debugger().to_owned(), - args: debugger_opts, - }; - - let environment = vec![("PYTHONPATH".to_owned(), rust_pp_module_abs_path)]; - - debugger_run_result = compose_and_run(config, - testpaths, - proc_args, - environment, - config.run_lib_path.to_str().unwrap(), - None, - None); - } - } - - if !debugger_run_result.status.success() { - fatal(None, "gdb failed to execute"); - } - - check_debugger_output(&debugger_run_result, &check_lines); -} - -fn find_rust_src_root(config: &Config) -> Option { - let mut path = config.src_base.clone(); - let path_postfix = Path::new("src/etc/lldb_batchmode.py"); - - while path.pop() { - if path.join(&path_postfix).is_file() { - return Some(path); - } - } - - return None; -} - -fn run_debuginfo_lldb_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - assert!(props.revisions.is_empty(), "revisions not relevant here"); - - if config.lldb_python_dir.is_none() { - fatal(None, "Can't run LLDB test because LLDB's python path is not set."); - } - - let mut config = Config { - target_rustcflags: cleanup_debug_info_options(&config.target_rustcflags), - host_rustcflags: cleanup_debug_info_options(&config.host_rustcflags), - .. config.clone() - }; - - let config = &mut config; - - // compile test file (it should have 'compile-flags:-g' in the header) - let compile_result = compile_test(config, props, testpaths); - if !compile_result.status.success() { - fatal_proc_rec(None, "compilation failed!", &compile_result); - } - - let exe_file = make_exe_name(config, testpaths); - - match config.lldb_version { - Some(ref version) => { - println!("NOTE: compiletest thinks it is using LLDB version {}", - version); - } - _ => { - println!("NOTE: compiletest does not know which version of \ - LLDB it is using"); - } - } - - // Parse debugger commands etc from test files - let DebuggerCommands { - commands, - check_lines, - breakpoint_lines, - .. - } = parse_debugger_commands(testpaths, "lldb"); - - // Write debugger script: - // We don't want to hang when calling `quit` while the process is still running - let mut script_str = String::from("settings set auto-confirm true\n"); - - // Make LLDB emit its version, so we have it documented in the test output - script_str.push_str("version\n"); - - // Switch LLDB into "Rust mode" - let rust_src_root = find_rust_src_root(config) - .expect("Could not find Rust source root"); - let rust_pp_module_rel_path = Path::new("./src/etc/lldb_rust_formatters.py"); - let rust_pp_module_abs_path = rust_src_root.join(rust_pp_module_rel_path) - .to_str() - .unwrap() - .to_owned(); - - script_str.push_str(&format!("command script import {}\n", - &rust_pp_module_abs_path[..])[..]); - script_str.push_str("type summary add --no-value "); - script_str.push_str("--python-function lldb_rust_formatters.print_val "); - script_str.push_str("-x \".*\" --category Rust\n"); - script_str.push_str("type category enable Rust\n"); - - // Set breakpoints on every line that contains the string "#break" - let source_file_name = testpaths.file.file_name().unwrap().to_string_lossy(); - for line in &breakpoint_lines { - script_str.push_str(&format!("breakpoint set --file '{}' --line {}\n", - source_file_name, - line)); - } - - // Append the other commands - for line in &commands { - script_str.push_str(line); - script_str.push_str("\n"); - } - - // Finally, quit the debugger - script_str.push_str("\nquit\n"); - - // Write the script into a file - debug!("script_str = {}", script_str); - dump_output_file(config, - testpaths, - &script_str, - "debugger.script"); - let debugger_script = make_out_name(config, testpaths, "debugger.script"); - - // Let LLDB execute the script via lldb_batchmode.py - let debugger_run_result = run_lldb(config, - testpaths, - &exe_file, - &debugger_script, - &rust_src_root); - - if !debugger_run_result.status.success() { - fatal_proc_rec(None, "Error while running LLDB", &debugger_run_result); - } - - check_debugger_output(&debugger_run_result, &check_lines); - - fn run_lldb(config: &Config, - testpaths: &TestPaths, - test_executable: &Path, - debugger_script: &Path, - rust_src_root: &Path) - -> ProcRes { - // Prepare the lldb_batchmode which executes the debugger script - let lldb_script_path = rust_src_root.join("src/etc/lldb_batchmode.py"); - cmd2procres(config, - testpaths, - Command::new(&config.python) - .arg(&lldb_script_path) - .arg(test_executable) - .arg(debugger_script) - .env("PYTHONPATH", - config.lldb_python_dir.as_ref().unwrap())) - } -} - -fn cmd2procres(config: &Config, testpaths: &TestPaths, cmd: &mut Command) - -> ProcRes { - let (status, out, err) = match cmd.output() { - Ok(Output { status, stdout, stderr }) => { - (status, - String::from_utf8(stdout).unwrap(), - String::from_utf8(stderr).unwrap()) - }, - Err(e) => { - fatal(None, &format!("Failed to setup Python process for \ - LLDB script: {}", e)) - } - }; - - dump_output(config, testpaths, &out, &err); - ProcRes { - status: Status::Normal(status), - stdout: out, - stderr: err, - cmdline: format!("{:?}", cmd) - } -} - -struct DebuggerCommands { - commands: Vec, - check_lines: Vec, - breakpoint_lines: Vec, -} - -fn parse_debugger_commands(testpaths: &TestPaths, debugger_prefix: &str) - -> DebuggerCommands { - let command_directive = format!("{}-command", debugger_prefix); - let check_directive = format!("{}-check", debugger_prefix); - - let mut breakpoint_lines = vec!(); - let mut commands = vec!(); - let mut check_lines = vec!(); - let mut counter = 1; - let reader = BufReader::new(File::open(&testpaths.file).unwrap()); - for line in reader.lines() { - match line { - Ok(line) => { - if line.contains("#break") { - breakpoint_lines.push(counter); - } - - header::parse_name_value_directive( - &line, - &command_directive).map(|cmd| { - commands.push(cmd) - }); - - header::parse_name_value_directive( - &line, - &check_directive).map(|cmd| { - check_lines.push(cmd) - }); - } - Err(e) => { - fatal(None, &format!("Error while parsing debugger commands: {}", e)) - } - } - counter += 1; - } - - DebuggerCommands { - commands: commands, - check_lines: check_lines, - breakpoint_lines: breakpoint_lines, - } -} - -fn cleanup_debug_info_options(options: &Option) -> Option { - if options.is_none() { - return None; - } - - // Remove options that are either unwanted (-O) or may lead to duplicates due to RUSTFLAGS. - let options_to_remove = [ - "-O".to_owned(), - "-g".to_owned(), - "--debuginfo".to_owned() - ]; - let mut new_options = - split_maybe_args(options).into_iter() - .filter(|x| !options_to_remove.contains(x)) - .collect::>(); - - let mut i = 0; - while i + 1 < new_options.len() { - if new_options[i] == "-Z" { - // FIXME #31005 MIR missing debuginfo currently. - if new_options[i + 1] == "orbit" { - // Remove "-Z" and "orbit". - new_options.remove(i); - new_options.remove(i); - continue; - } - // Always skip over -Z's argument. - i += 1; - } - i += 1; - } - - Some(new_options.join(" ")) -} - -fn check_debugger_output(debugger_run_result: &ProcRes, check_lines: &[String]) { - let num_check_lines = check_lines.len(); - if num_check_lines > 0 { - // Allow check lines to leave parts unspecified (e.g., uninitialized - // bits in the wrong case of an enum) with the notation "[...]". - let check_fragments: Vec> = - check_lines.iter().map(|s| { - s - .trim() - .split("[...]") - .map(str::to_owned) - .collect() - }).collect(); - // check if each line in props.check_lines appears in the - // output (in order) - let mut i = 0; - for line in debugger_run_result.stdout.lines() { - let mut rest = line.trim(); - let mut first = true; - let mut failed = false; - for frag in &check_fragments[i] { - let found = if first { - if rest.starts_with(frag) { - Some(0) - } else { - None - } - } else { - rest.find(frag) - }; - match found { - None => { - failed = true; - break; - } - Some(i) => { - rest = &rest[(i + frag.len())..]; - } - } - first = false; - } - if !failed && rest.is_empty() { - i += 1; - } - if i == num_check_lines { - // all lines checked - break; - } - } - if i != num_check_lines { - fatal_proc_rec(None, &format!("line not found in debugger output: {}", - check_lines.get(i).unwrap()), - debugger_run_result); - } - } -} - -fn check_error_patterns(revision: Option<&str>, - props: &TestProps, - testpaths: &TestPaths, - output_to_check: &str, - proc_res: &ProcRes) { - if props.error_patterns.is_empty() { - fatal(revision, - &format!("no error pattern specified in {:?}", - testpaths.file.display())); - } - let mut next_err_idx = 0; - let mut next_err_pat = &props.error_patterns[next_err_idx]; - let mut done = false; - for line in output_to_check.lines() { - if line.contains(next_err_pat) { - debug!("found error pattern {}", next_err_pat); - next_err_idx += 1; - if next_err_idx == props.error_patterns.len() { - debug!("found all error patterns"); - done = true; - break; - } - next_err_pat = &props.error_patterns[next_err_idx]; - } - } - if done { return; } - - let missing_patterns = &props.error_patterns[next_err_idx..]; - if missing_patterns.len() == 1 { - fatal_proc_rec( - revision, - &format!("error pattern '{}' not found!", missing_patterns[0]), - proc_res); - } else { - for pattern in missing_patterns { - error(revision, &format!("error pattern '{}' not found!", *pattern)); - } - fatal_proc_rec(revision, "multiple error patterns not found", proc_res); - } -} - -fn check_no_compiler_crash(revision: Option<&str>, proc_res: &ProcRes) { - for line in proc_res.stderr.lines() { - if line.starts_with("error: internal compiler error:") { - fatal_proc_rec(revision, - "compiler encountered internal error", - proc_res); - } - } -} - -fn check_forbid_output(revision: Option<&str>, - props: &TestProps, - output_to_check: &str, - proc_res: &ProcRes) { - for pat in &props.forbid_output { - if output_to_check.contains(pat) { - fatal_proc_rec(revision, - "forbidden pattern found in compiler output", - proc_res); - } - } -} - -fn check_expected_errors(revision: Option<&str>, - expected_errors: Vec, - testpaths: &TestPaths, - proc_res: &ProcRes) { - // true if we found the error in question - let mut found_flags = vec![false; expected_errors.len()]; - - if proc_res.status.success() { - fatal_proc_rec(revision, "process did not return an error status", proc_res); - } - - let prefixes = expected_errors.iter().map(|ee| { - let expected = format!("{}:{}:", testpaths.file.display(), ee.line_num); - // On windows just translate all '\' path separators to '/' - expected.replace(r"\", "/") - }).collect::>(); - - // If the testcase being checked contains at least one expected "help" - // message, then we'll ensure that all "help" messages are expected. - // Otherwise, all "help" messages reported by the compiler will be ignored. - // This logic also applies to "note" messages. - let (expect_help, expect_note) = - expected_errors.iter() - .fold((false, false), - |(acc_help, acc_note), ee| - (acc_help || ee.kind == Some(ErrorKind::Help), - acc_note || ee.kind == Some(ErrorKind::Note))); - - // Scan and extract our error/warning messages, - // which look like: - // filename:line1:col1: line2:col2: *error:* msg - // filename:line1:col1: line2:col2: *warning:* msg - // where line1:col1: is the starting point, line2:col2: - // is the ending point, and * represents ANSI color codes. - // - // This pattern is ambiguous on windows, because filename may contain - // a colon, so any path prefix must be detected and removed first. - let mut unexpected = 0; - let mut not_found = 0; - for line in proc_res.stderr.lines() { - let mut was_expected = false; - let mut prev = 0; - for (i, ee) in expected_errors.iter().enumerate() { - if !found_flags[i] { - debug!("prefix={} ee.kind={:?} ee.msg={} line={}", - prefixes[i], - ee.kind, - ee.msg, - line); - // Suggestions have no line number in their output, so take on the line number of - // the previous expected error - if ee.kind == Some(ErrorKind::Suggestion) { - assert!(expected_errors[prev].kind == Some(ErrorKind::Help), - "SUGGESTIONs must be preceded by a HELP"); - if line.contains(&ee.msg) { - found_flags[i] = true; - was_expected = true; - break; - } - } - if - (prefix_matches(line, &prefixes[i]) || continuation(line)) && - (ee.kind.is_none() || line.contains(&ee.kind.as_ref().unwrap().to_string())) && - line.contains(&ee.msg) - { - found_flags[i] = true; - was_expected = true; - break; - } - } - prev = i; - } - - // ignore this msg which gets printed at the end - if line.contains("aborting due to") { - was_expected = true; - } - - if !was_expected && is_unexpected_compiler_message(line, expect_help, expect_note) { - error(revision, &format!("unexpected compiler message: '{}'", line)); - unexpected += 1; - } - } - - for (i, &flag) in found_flags.iter().enumerate() { - if !flag { - let ee = &expected_errors[i]; - error(revision, &format!("expected {} on line {} not found: {}", - ee.kind.as_ref() - .map_or("message".into(), - |k| k.to_string()), - ee.line_num, ee.msg)); - not_found += 1; - } - } - - if unexpected > 0 || not_found > 0 { - fatal_proc_rec( - revision, - &format!("{} unexpected errors found, {} expected errors not found", - unexpected, not_found), - proc_res); - } - - fn prefix_matches(line: &str, prefix: &str) -> bool { - use std::ascii::AsciiExt; - // On windows just translate all '\' path separators to '/' - let line = line.replace(r"\", "/"); - if cfg!(windows) { - line.to_ascii_lowercase().starts_with(&prefix.to_ascii_lowercase()) - } else { - line.starts_with(prefix) - } - } - - // A multi-line error will have followup lines which start with a space - // or open paren. - fn continuation( line: &str) -> bool { - line.starts_with(" ") || line.starts_with("(") - } -} - -fn is_unexpected_compiler_message(line: &str, expect_help: bool, expect_note: bool) -> bool { - let mut c = Path::new(line).components(); - let line = match c.next() { - Some(Component::Prefix(_)) => c.as_path().to_str().unwrap(), - _ => line, - }; - - let mut i = 0; - return scan_until_char(line, ':', &mut i) && - scan_char(line, ':', &mut i) && - scan_integer(line, &mut i) && - scan_char(line, ':', &mut i) && - scan_integer(line, &mut i) && - scan_char(line, ':', &mut i) && - scan_char(line, ' ', &mut i) && - scan_integer(line, &mut i) && - scan_char(line, ':', &mut i) && - scan_integer(line, &mut i) && - scan_char(line, ' ', &mut i) && - (scan_string(line, "error", &mut i) || - scan_string(line, "warning", &mut i) || - (expect_help && scan_string(line, "help", &mut i)) || - (expect_note && scan_string(line, "note", &mut i)) - ); -} - -fn scan_until_char(haystack: &str, needle: char, idx: &mut usize) -> bool { - if *idx >= haystack.len() { - return false; - } - let opt = haystack[(*idx)..].find(needle); - if opt.is_none() { - return false; - } - *idx = opt.unwrap(); - return true; -} - -fn scan_char(haystack: &str, needle: char, idx: &mut usize) -> bool { - if *idx >= haystack.len() { - return false; - } - let ch = haystack[*idx..].chars().next().unwrap(); - if ch != needle { - return false; - } - *idx += ch.len_utf8(); - return true; -} - -fn scan_integer(haystack: &str, idx: &mut usize) -> bool { - let mut i = *idx; - while i < haystack.len() { - let ch = haystack[i..].chars().next().unwrap(); - if ch < '0' || '9' < ch { - break; - } - i += ch.len_utf8(); - } - if i == *idx { - return false; - } - *idx = i; - return true; -} - -fn scan_string(haystack: &str, needle: &str, idx: &mut usize) -> bool { - let mut haystack_i = *idx; - let mut needle_i = 0; - while needle_i < needle.len() { - if haystack_i >= haystack.len() { - return false; - } - let ch = haystack[haystack_i..].chars().next().unwrap(); - haystack_i += ch.len_utf8(); - if !scan_char(needle, ch, &mut needle_i) { - return false; - } - } - *idx = haystack_i; - return true; -} - -struct ProcArgs { - prog: String, - args: Vec, -} - -struct ProcRes { - status: Status, - stdout: String, - stderr: String, - cmdline: String, -} - -enum Status { - Parsed(i32), - Normal(ExitStatus), -} - -impl Status { - fn code(&self) -> Option { - match *self { - Status::Parsed(i) => Some(i), - Status::Normal(ref e) => e.code(), - } - } - - fn success(&self) -> bool { - match *self { - Status::Parsed(i) => i == 0, - Status::Normal(ref e) => e.success(), - } - } -} - -impl fmt::Display for Status { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - match *self { - Status::Parsed(i) => write!(f, "exit code: {}", i), - Status::Normal(ref e) => e.fmt(f), - } - } -} - -fn compile_test(config: &Config, props: &TestProps, - testpaths: &TestPaths) -> ProcRes { - let aux_dir = aux_output_dir_name(config, testpaths); - // FIXME (#9639): This needs to handle non-utf8 paths - let link_args = vec!("-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); - let args = make_compile_args(config, - props, - link_args, - |a, b| TargetLocation::ThisFile(make_exe_name(a, b)), testpaths); - compose_and_run_compiler(config, props, testpaths, args, None) -} - -fn document(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - out_dir: &Path) - -> ProcRes { - if props.build_aux_docs { - for rel_ab in &props.aux_builds { - let aux_testpaths = compute_aux_test_paths(config, testpaths, rel_ab); - let aux_props = header::load_props(&aux_testpaths.file); - let auxres = document(config, &aux_props, &aux_testpaths, out_dir); - if !auxres.status.success() { - return auxres; - } - } - } - - let aux_dir = aux_output_dir_name(config, testpaths); - let mut args = vec!["-L".to_owned(), - aux_dir.to_str().unwrap().to_owned(), - "-o".to_owned(), - out_dir.to_str().unwrap().to_owned(), - testpaths.file.to_str().unwrap().to_owned()]; - args.extend(props.compile_flags.iter().cloned()); - let args = ProcArgs { - prog: config.rustdoc_path.to_str().unwrap().to_owned(), - args: args, - }; - compose_and_run_compiler(config, props, testpaths, args, None) -} - -fn exec_compiled_test(config: &Config, props: &TestProps, - testpaths: &TestPaths) -> ProcRes { - - let env = props.exec_env.clone(); - - match &*config.target { - - "arm-linux-androideabi" | "aarch64-linux-android" => { - _arm_exec_compiled_test(config, props, testpaths, env) - } - - _=> { - let aux_dir = aux_output_dir_name(config, testpaths); - compose_and_run(config, - testpaths, - make_run_args(config, props, testpaths), - env, - config.run_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), - None) - } - } -} - -fn compute_aux_test_paths(config: &Config, - testpaths: &TestPaths, - rel_ab: &str) - -> TestPaths -{ - let abs_ab = config.aux_base.join(rel_ab); - TestPaths { - file: abs_ab, - base: testpaths.base.clone(), - relative_dir: Path::new(rel_ab).parent() - .map(|p| p.to_path_buf()) - .unwrap_or_else(|| PathBuf::new()) - } -} - -fn compose_and_run_compiler(config: &Config, props: &TestProps, - testpaths: &TestPaths, args: ProcArgs, - input: Option) -> ProcRes { - if !props.aux_builds.is_empty() { - ensure_dir(&aux_output_dir_name(config, testpaths)); - } - - let aux_dir = aux_output_dir_name(config, testpaths); - // FIXME (#9639): This needs to handle non-utf8 paths - let extra_link_args = vec!["-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()]; - - for rel_ab in &props.aux_builds { - let aux_testpaths = compute_aux_test_paths(config, testpaths, rel_ab); - let aux_props = header::load_props(&aux_testpaths.file); - let mut crate_type = if aux_props.no_prefer_dynamic { - Vec::new() - } else { - // We primarily compile all auxiliary libraries as dynamic libraries - // to avoid code size bloat and large binaries as much as possible - // for the test suite (otherwise including libstd statically in all - // executables takes up quite a bit of space). - // - // For targets like MUSL or Emscripten, however, there is no support for - // dynamic libraries so we just go back to building a normal library. Note, - // however, that for MUSL if the library is built with `force_host` then - // it's ok to be a dylib as the host should always support dylibs. - if (config.target.contains("musl") && !aux_props.force_host) || - config.target.contains("emscripten") - { - vec!("--crate-type=lib".to_owned()) - } else { - vec!("--crate-type=dylib".to_owned()) - } - }; - crate_type.extend(extra_link_args.clone()); - let aux_args = - make_compile_args(config, - &aux_props, - crate_type, - |a,b| { - let f = make_lib_name(a, &b.file, testpaths); - let parent = f.parent().unwrap(); - TargetLocation::ThisDirectory(parent.to_path_buf()) - }, - &aux_testpaths); - let auxres = compose_and_run(config, - &aux_testpaths, - aux_args, - Vec::new(), - config.compile_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), - None); - if !auxres.status.success() { - fatal_proc_rec( - None, - &format!("auxiliary build of {:?} failed to compile: ", - aux_testpaths.file.display()), - &auxres); - } - - match &*config.target { - "arm-linux-androideabi" | "aarch64-linux-android" => { - _arm_push_aux_shared_library(config, testpaths); - } - _ => {} - } - } - - compose_and_run(config, - testpaths, - args, - props.rustc_env.clone(), - config.compile_lib_path.to_str().unwrap(), - Some(aux_dir.to_str().unwrap()), - input) -} - -fn ensure_dir(path: &Path) { - if path.is_dir() { return; } - fs::create_dir_all(path).unwrap(); -} - -fn compose_and_run(config: &Config, - testpaths: &TestPaths, - ProcArgs{ args, prog }: ProcArgs, - procenv: Vec<(String, String)> , - lib_path: &str, - aux_path: Option<&str>, - input: Option) -> ProcRes { - return program_output(config, testpaths, lib_path, - prog, aux_path, args, procenv, input); -} - -enum TargetLocation { - ThisFile(PathBuf), - ThisDirectory(PathBuf), -} - -fn make_compile_args(config: &Config, - props: &TestProps, - extras: Vec , - xform: F, - testpaths: &TestPaths) - -> ProcArgs where - F: FnOnce(&Config, &TestPaths) -> TargetLocation, -{ - let xform_file = xform(config, testpaths); - let target = if props.force_host { - &*config.host - } else { - &*config.target - }; - // FIXME (#9639): This needs to handle non-utf8 paths - let mut args = vec!(testpaths.file.to_str().unwrap().to_owned(), - "-L".to_owned(), - config.build_base.to_str().unwrap().to_owned(), - format!("--target={}", target)); - args.extend_from_slice(&extras); - if !props.no_prefer_dynamic { - args.push("-C".to_owned()); - args.push("prefer-dynamic".to_owned()); - } - let path = match xform_file { - TargetLocation::ThisFile(path) => { - args.push("-o".to_owned()); - path - } - TargetLocation::ThisDirectory(path) => { - args.push("--out-dir".to_owned()); - path - } - }; - args.push(path.to_str().unwrap().to_owned()); - if props.force_host { - args.extend(split_maybe_args(&config.host_rustcflags)); - } else { - args.extend(split_maybe_args(&config.target_rustcflags)); - } - args.extend(props.compile_flags.iter().cloned()); - return ProcArgs { - prog: config.rustc_path.to_str().unwrap().to_owned(), - args: args, - }; -} - -fn make_lib_name(config: &Config, auxfile: &Path, testpaths: &TestPaths) -> PathBuf { - // what we return here is not particularly important, as it - // happens; rustc ignores everything except for the directory. - let auxname = output_testname(auxfile); - aux_output_dir_name(config, testpaths).join(&auxname) -} - -fn make_exe_name(config: &Config, testpaths: &TestPaths) -> PathBuf { - let mut f = output_base_name(config, testpaths); - // FIXME: This is using the host architecture exe suffix, not target! - if config.target == "asmjs-unknown-emscripten" { - let mut fname = f.file_name().unwrap().to_os_string(); - fname.push(".js"); - f.set_file_name(&fname); - } else if !env::consts::EXE_SUFFIX.is_empty() { - let mut fname = f.file_name().unwrap().to_os_string(); - fname.push(env::consts::EXE_SUFFIX); - f.set_file_name(&fname); - } - f -} - -fn make_run_args(config: &Config, props: &TestProps, testpaths: &TestPaths) - -> ProcArgs { - // If we've got another tool to run under (valgrind), - // then split apart its command - let mut args = split_maybe_args(&config.runtool); - - // If this is emscripten, then run tests under nodejs - if config.target == "asmjs-unknown-emscripten" { - args.push("nodejs".to_owned()); - } - - let exe_file = make_exe_name(config, testpaths); - - // FIXME (#9639): This needs to handle non-utf8 paths - args.push(exe_file.to_str().unwrap().to_owned()); - - // Add the arguments in the run_flags directive - args.extend(split_maybe_args(&props.run_flags)); - - let prog = args.remove(0); - return ProcArgs { - prog: prog, - args: args, - }; -} - -fn split_maybe_args(argstr: &Option) -> Vec { - match *argstr { - Some(ref s) => { - s - .split(' ') - .filter_map(|s| { - if s.chars().all(|c| c.is_whitespace()) { - None - } else { - Some(s.to_owned()) - } - }).collect() - } - None => Vec::new() - } -} - -fn program_output(config: &Config, testpaths: &TestPaths, lib_path: &str, prog: String, - aux_path: Option<&str>, args: Vec, - env: Vec<(String, String)>, - input: Option) -> ProcRes { - let cmdline = - { - let cmdline = make_cmdline(lib_path, - &prog, - &args); - logv(config, format!("executing {}", cmdline)); - cmdline - }; - let procsrv::Result { - out, - err, - status - } = procsrv::run(lib_path, - &prog, - aux_path, - &args, - env, - input).expect(&format!("failed to exec `{}`", prog)); - dump_output(config, testpaths, &out, &err); - return ProcRes { - status: Status::Normal(status), - stdout: out, - stderr: err, - cmdline: cmdline, - }; -} - -fn make_cmdline(libpath: &str, prog: &str, args: &[String]) -> String { - use util; - - // Linux and mac don't require adjusting the library search path - if cfg!(unix) { - format!("{} {}", prog, args.join(" ")) - } else { - // Build the LD_LIBRARY_PATH variable as it would be seen on the command line - // for diagnostic purposes - fn lib_path_cmd_prefix(path: &str) -> String { - format!("{}=\"{}\"", util::lib_path_env_var(), util::make_new_path(path)) - } - - format!("{} {} {}", lib_path_cmd_prefix(libpath), prog, args.join(" ")) - } -} - -fn dump_output(config: &Config, testpaths: &TestPaths, out: &str, err: &str) { - dump_output_file(config, testpaths, out, "out"); - dump_output_file(config, testpaths, err, "err"); - maybe_dump_to_stdout(config, out, err); -} - -fn dump_output_file(config: &Config, - testpaths: &TestPaths, - out: &str, - extension: &str) { - let outfile = make_out_name(config, testpaths, extension); - File::create(&outfile).unwrap().write_all(out.as_bytes()).unwrap(); -} - -fn make_out_name(config: &Config, testpaths: &TestPaths, extension: &str) -> PathBuf { - output_base_name(config, testpaths).with_extension(extension) -} - -fn aux_output_dir_name(config: &Config, testpaths: &TestPaths) -> PathBuf { - let f = output_base_name(config, testpaths); - let mut fname = f.file_name().unwrap().to_os_string(); - fname.push(&format!(".{}.libaux", config.mode)); - f.with_file_name(&fname) -} - -fn output_testname(filepath: &Path) -> PathBuf { - PathBuf::from(filepath.file_stem().unwrap()) -} - -fn output_base_name(config: &Config, testpaths: &TestPaths) -> PathBuf { - let dir = config.build_base.join(&testpaths.relative_dir); - - // Note: The directory `dir` is created during `collect_tests_from_dir` - dir - .join(&output_testname(&testpaths.file)) - .with_extension(&config.stage_id) -} - -fn maybe_dump_to_stdout(config: &Config, out: &str, err: &str) { - if config.verbose { - println!("------{}------------------------------", "stdout"); - println!("{}", out); - println!("------{}------------------------------", "stderr"); - println!("{}", err); - println!("------------------------------------------"); - } -} - -fn error(revision: Option<&str>, err: &str) { - match revision { - Some(rev) => println!("\nerror in revision `{}`: {}", rev, err), - None => println!("\nerror: {}", err) - } -} - -fn fatal(revision: Option<&str>, err: &str) -> ! { - error(revision, err); panic!(); -} - -fn fatal_proc_rec(revision: Option<&str>, err: &str, proc_res: &ProcRes) -> ! { - error(revision, err); - print!("\ -status: {}\n\ -command: {}\n\ -stdout:\n\ -------------------------------------------\n\ -{}\n\ -------------------------------------------\n\ -stderr:\n\ -------------------------------------------\n\ -{}\n\ -------------------------------------------\n\ -\n", - proc_res.status, proc_res.cmdline, proc_res.stdout, - proc_res.stderr); - panic!(); -} - -fn _arm_exec_compiled_test(config: &Config, - props: &TestProps, - testpaths: &TestPaths, - env: Vec<(String, String)>) - -> ProcRes { - let args = make_run_args(config, props, testpaths); - let cmdline = make_cmdline("", - &args.prog, - &args.args); - - // get bare program string - let mut tvec: Vec = args.prog - .split('/') - .map(str::to_owned) - .collect(); - let prog_short = tvec.pop().unwrap(); - - // copy to target - let copy_result = procsrv::run("", - &config.adb_path, - None, - &[ - "push".to_owned(), - args.prog.clone(), - config.adb_test_dir.clone() - ], - vec!(("".to_owned(), "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{}`", config.adb_path)); - - if config.verbose { - println!("push ({}) {} {} {}", - config.target, - args.prog, - copy_result.out, - copy_result.err); - } - - logv(config, format!("executing ({}) {}", config.target, cmdline)); - - let mut runargs = Vec::new(); - - // run test via adb_run_wrapper - runargs.push("shell".to_owned()); - for (key, val) in env { - runargs.push(format!("{}={}", key, val)); - } - runargs.push(format!("{}/../adb_run_wrapper.sh", config.adb_test_dir)); - runargs.push(format!("{}", config.adb_test_dir)); - runargs.push(format!("{}", prog_short)); - - for tv in &args.args { - runargs.push(tv.to_owned()); - } - procsrv::run("", - &config.adb_path, - None, - &runargs, - vec!(("".to_owned(), "".to_owned())), Some("".to_owned())) - .expect(&format!("failed to exec `{}`", config.adb_path)); - - // get exitcode of result - runargs = Vec::new(); - runargs.push("shell".to_owned()); - runargs.push("cat".to_owned()); - runargs.push(format!("{}/{}.exitcode", config.adb_test_dir, prog_short)); - - let procsrv::Result{ out: exitcode_out, err: _, status: _ } = - procsrv::run("", - &config.adb_path, - None, - &runargs, - vec!(("".to_owned(), "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{}`", config.adb_path)); - - let mut exitcode: i32 = 0; - for c in exitcode_out.chars() { - if !c.is_numeric() { break; } - exitcode = exitcode * 10 + match c { - '0' ... '9' => c as i32 - ('0' as i32), - _ => 101, - } - } - - // get stdout of result - runargs = Vec::new(); - runargs.push("shell".to_owned()); - runargs.push("cat".to_owned()); - runargs.push(format!("{}/{}.stdout", config.adb_test_dir, prog_short)); - - let procsrv::Result{ out: stdout_out, err: _, status: _ } = - procsrv::run("", - &config.adb_path, - None, - &runargs, - vec!(("".to_owned(), "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{}`", config.adb_path)); - - // get stderr of result - runargs = Vec::new(); - runargs.push("shell".to_owned()); - runargs.push("cat".to_owned()); - runargs.push(format!("{}/{}.stderr", config.adb_test_dir, prog_short)); - - let procsrv::Result{ out: stderr_out, err: _, status: _ } = - procsrv::run("", - &config.adb_path, - None, - &runargs, - vec!(("".to_owned(), "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{}`", config.adb_path)); - - dump_output(config, - testpaths, - &stdout_out, - &stderr_out); - - ProcRes { - status: Status::Parsed(exitcode), - stdout: stdout_out, - stderr: stderr_out, - cmdline: cmdline - } -} - -fn _arm_push_aux_shared_library(config: &Config, testpaths: &TestPaths) { - let tdir = aux_output_dir_name(config, testpaths); - - let dirs = fs::read_dir(&tdir).unwrap(); - for file in dirs { - let file = file.unwrap().path(); - if file.extension().and_then(|s| s.to_str()) == Some("so") { - // FIXME (#9639): This needs to handle non-utf8 paths - let copy_result = procsrv::run("", - &config.adb_path, - None, - &[ - "push".to_owned(), - file.to_str() - .unwrap() - .to_owned(), - config.adb_test_dir.to_owned(), - ], - vec!(("".to_owned(), - "".to_owned())), - Some("".to_owned())) - .expect(&format!("failed to exec `{}`", config.adb_path)); - - if config.verbose { - println!("push ({}) {:?} {} {}", - config.target, file.display(), - copy_result.out, copy_result.err); - } - } - } -} - -// codegen tests (using FileCheck) - -fn compile_test_and_save_ir(config: &Config, props: &TestProps, - testpaths: &TestPaths) -> ProcRes { - let aux_dir = aux_output_dir_name(config, testpaths); - // FIXME (#9639): This needs to handle non-utf8 paths - let mut link_args = vec!("-L".to_owned(), - aux_dir.to_str().unwrap().to_owned()); - let llvm_args = vec!("--emit=llvm-ir".to_owned(),); - link_args.extend(llvm_args); - let args = make_compile_args(config, - props, - link_args, - |a, b| TargetLocation::ThisDirectory( - output_base_name(a, b).parent() - .unwrap().to_path_buf()), - testpaths); - compose_and_run_compiler(config, props, testpaths, args, None) -} - -fn check_ir_with_filecheck(config: &Config, testpaths: &TestPaths) -> ProcRes { - let irfile = output_base_name(config, testpaths).with_extension("ll"); - let prog = config.llvm_bin_path.as_ref().unwrap().join("FileCheck"); - let proc_args = ProcArgs { - // FIXME (#9639): This needs to handle non-utf8 paths - prog: prog.to_str().unwrap().to_owned(), - args: vec!(format!("-input-file={}", irfile.to_str().unwrap()), - testpaths.file.to_str().unwrap().to_owned()) - }; - compose_and_run(config, testpaths, proc_args, Vec::new(), "", None, None) -} - -fn run_codegen_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - assert!(props.revisions.is_empty(), "revisions not relevant here"); - - if config.llvm_bin_path.is_none() { - fatal(None, "missing --llvm-bin-path"); - } - - let mut proc_res = compile_test_and_save_ir(config, props, testpaths); - if !proc_res.status.success() { - fatal_proc_rec(None, "compilation failed!", &proc_res); - } - - proc_res = check_ir_with_filecheck(config, testpaths); - if !proc_res.status.success() { - fatal_proc_rec(None, - "verification with 'FileCheck' failed", - &proc_res); - } -} - -fn charset() -> &'static str { - // FreeBSD 10.1 defaults to GDB 6.1.1 which doesn't support "auto" charset - if cfg!(target_os = "bitrig") { - "auto" - } else if cfg!(target_os = "freebsd") { - "ISO-8859-1" - } else { - "UTF-8" - } -} - -fn run_rustdoc_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - assert!(props.revisions.is_empty(), "revisions not relevant here"); - - let out_dir = output_base_name(config, testpaths); - let _ = fs::remove_dir_all(&out_dir); - ensure_dir(&out_dir); - - let proc_res = document(config, props, testpaths, &out_dir); - if !proc_res.status.success() { - fatal_proc_rec(None, "rustdoc failed!", &proc_res); - } - let root = find_rust_src_root(config).unwrap(); - - let res = cmd2procres(config, - testpaths, - Command::new(&config.python) - .arg(root.join("src/etc/htmldocck.py")) - .arg(out_dir) - .arg(&testpaths.file)); - if !res.status.success() { - fatal_proc_rec(None, "htmldocck failed!", &res); - } -} - -fn run_codegen_units_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - assert!(props.revisions.is_empty(), "revisions not relevant here"); - - let proc_res = compile_test(config, props, testpaths); - - if !proc_res.status.success() { - fatal_proc_rec(None, "compilation failed!", &proc_res); - } - - check_no_compiler_crash(None, &proc_res); - - let prefix = "TRANS_ITEM "; - - let actual: HashSet = proc_res - .stdout - .lines() - .filter(|line| line.starts_with(prefix)) - .map(|s| (&s[prefix.len()..]).to_string()) - .collect(); - - let expected: HashSet = errors::load_errors(&testpaths.file, None) - .iter() - .map(|e| e.msg.trim().to_string()) - .collect(); - - if actual != expected { - let mut missing: Vec<_> = expected.difference(&actual).collect(); - missing.sort(); - - let mut too_much: Vec<_> = actual.difference(&expected).collect(); - too_much.sort(); - - println!("Expected and actual sets of codegen-items differ.\n\ - These items should have been contained but were not:\n\n\ - {}\n\n\ - These items were contained but should not have been:\n\n\ - {}\n\n", - missing.iter().fold("".to_string(), |s1, s2| s1 + "\n" + s2), - too_much.iter().fold("".to_string(), |s1, s2| s1 + "\n" + s2)); - panic!(); - } -} - -fn run_incremental_test(config: &Config, props: &TestProps, testpaths: &TestPaths) { - // Basic plan for a test incremental/foo/bar.rs: - // - load list of revisions pass1, fail2, pass3 - // - each should begin with `rpass`, `rfail`, or `cfail` - // - if `rpass`, expect compile and execution to succeed - // - if `cfail`, expect compilation to fail - // - if `rfail`, expect execution to fail - // - create a directory build/foo/bar.incremental - // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass1 - // - because name of revision starts with "pass", expect success - // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C fail2 - // - because name of revision starts with "fail", expect an error - // - load expected errors as usual, but filter for those that end in `[fail2]` - // - compile foo/bar.rs with -Z incremental=.../foo/bar.incremental and -C pass3 - // - because name of revision starts with "pass", expect success - // - execute build/foo/bar.exe and save output - // - // FIXME -- use non-incremental mode as an oracle? That doesn't apply - // to #[rustc_dirty] and clean tests I guess - - assert!(!props.revisions.is_empty(), "incremental tests require a list of revisions"); - - let output_base_name = output_base_name(config, testpaths); - - // Create the incremental workproduct directory. - let incremental_dir = output_base_name.with_extension("incremental"); - if incremental_dir.exists() { - fs::remove_dir_all(&incremental_dir).unwrap(); - } - fs::create_dir_all(&incremental_dir).unwrap(); - - if config.verbose { - print!("incremental_dir={}", incremental_dir.display()); - } - - for revision in &props.revisions { - let mut revision_props = props.clone(); - header::load_props_into(&mut revision_props, &testpaths.file, Some(&revision)); - - revision_props.compile_flags.extend(vec![ - format!("-Z"), - format!("incremental={}", incremental_dir.display()), - format!("--cfg"), - format!("{}", revision), - ]); - - if config.verbose { - print!("revision={:?} revision_props={:#?}", revision, revision_props); - } - - if revision.starts_with("rpass") { - run_rpass_test_revision(config, &revision_props, testpaths, Some(&revision)); - } else if revision.starts_with("rfail") { - run_rfail_test_revision(config, &revision_props, testpaths, Some(&revision)); - } else if revision.starts_with("cfail") { - run_cfail_test_revision(config, &revision_props, testpaths, Some(&revision)); - } else { - fatal( - Some(revision), - "revision name must begin with rpass, rfail, or cfail"); - } - } -} diff --git a/src/doc/README.md b/src/doc/README.md index b5972f7ddb..e1d95732b4 100644 --- a/src/doc/README.md +++ b/src/doc/README.md @@ -9,7 +9,7 @@ libraries. To generate HTML documentation from one source file/crate, do something like: -~~~~ +~~~~text rustdoc --output html-doc/ --output-format html ../src/libstd/path.rs ~~~~ @@ -20,7 +20,7 @@ rustdoc --output html-doc/ --output-format html ../src/libstd/path.rs To generate an HTML version of a doc from Markdown manually, you can do something like: -~~~~ +~~~~text rustdoc reference.md ~~~~ diff --git a/src/doc/book/advanced-linking.md b/src/doc/book/advanced-linking.md index 9ef6d5c2bf..c8a9082947 100644 --- a/src/doc/book/advanced-linking.md +++ b/src/doc/book/advanced-linking.md @@ -12,7 +12,7 @@ the `link_args` attribute. This attribute is applied to `extern` blocks and specifies raw flags which need to get passed to the linker when producing an artifact. An example usage would be: -``` no_run +```rust,no_run #![feature(link_args)] #[link_args = "-foo -bar -baz"] @@ -52,7 +52,7 @@ By default, all Rust programs on Linux will link to the system `libc` along with a number of other libraries. Let's look at an example on a 64-bit Linux machine with GCC and `glibc` (by far the most common `libc` on Linux): -``` text +```text $ cat example.rs fn main() {} $ rustc example.rs diff --git a/src/doc/book/casting-between-types.md b/src/doc/book/casting-between-types.md index 7d03d2991a..a101f397c3 100644 --- a/src/doc/book/casting-between-types.md +++ b/src/doc/book/casting-between-types.md @@ -135,14 +135,14 @@ cast four bytes into a `u32`: ```rust,ignore let a = [0u8, 0u8, 0u8, 0u8]; -let b = a as u32; // four eights makes 32 +let b = a as u32; // four u8s makes a u32 ``` This errors with: ```text error: non-scalar cast: `[u8; 4]` as `u32` -let b = a as u32; // four eights makes 32 +let b = a as u32; // four u8s makes a u32 ^~~~~~~~ ``` @@ -165,10 +165,15 @@ Rust lets us: ```rust use std::mem; -unsafe { - let a = [0u8, 0u8, 0u8, 0u8]; - - let b = mem::transmute::<[u8; 4], u32>(a); +fn main() { + unsafe { + let a = [0u8, 1u8, 0u8, 0u8]; + let b = mem::transmute::<[u8; 4], u32>(a); + println!("{}", b); // 256 + // or, more concisely: + let c: u32 = mem::transmute(a); + println!("{}", c); // 256 + } } ``` diff --git a/src/doc/book/closures.md b/src/doc/book/closures.md index a8135ad384..dedf9d5c28 100644 --- a/src/doc/book/closures.md +++ b/src/doc/book/closures.md @@ -319,6 +319,53 @@ assert_eq!(3, answer); Now we take a trait object, a `&Fn`. And we have to make a reference to our closure when we pass it to `call_with_one`, so we use `&||`. +A quick note about closures that use explicit lifetimes. Sometimes you might have a closure +that takes a reference like so: + +``` +fn call_with_ref(some_closure:F) -> i32 + where F: Fn(&i32) -> i32 { + + let mut value = 0; + some_closure(&value) +} +``` + +Normally you can specify the lifetime of the parameter to our closure. We +could annotate it on the function declaration: + +```ignore +fn call_with_ref<'a, F>(some_closure:F) -> i32 + where F: Fn(&'a 32) -> i32 { +``` + +However this presents a problem with in our case. When you specify the explict +lifetime on a function it binds that lifetime to the *entire* scope of the function +instead of just the invocation scope of our closure. This means that the borrow checker +will see a mutable reference in the same lifetime as our immutable reference and fail +to compile. + +In order to say that we only need the lifetime to be valid for the invocation scope +of the closure we can use Higher-Ranked Trait Bounds with the `for<...>` syntax: + +```ignore +fn call_with_ref(some_closure:F) -> i32 + where F: for<'a> Fn(&'a 32) -> i32 { +``` + +This lets the Rust compiler find the minimum lifetime to invoke our closure and +satisfy the borrow checker's rules. Our function then compiles and excutes as we +expect. + +``` +fn call_with_ref(some_closure:F) -> i32 + where F: for<'a> Fn(&'a i32) -> i32 { + + let mut value = 0; + some_closure(&value) +} +``` + # Function pointers and closures A function pointer is kind of like a closure that has no environment. As such, @@ -344,7 +391,7 @@ assert_eq!(2, answer); In this example, we don’t strictly need the intermediate variable `f`, the name of the function works just fine too: -```ignore +```rust,ignore let answer = call_with_one(&add_one); ``` @@ -492,12 +539,12 @@ fn factory() -> Box i32> { Box::new(move |x| x + num) } -# fn main() { +fn main() { let f = factory(); let answer = f(1); assert_eq!(6, answer); -# } +} ``` By making the inner closure a `move Fn`, we create a new stack frame for our diff --git a/src/doc/book/compiler-plugins.md b/src/doc/book/compiler-plugins.md index 1af05bfea1..2d0cc61fb1 100644 --- a/src/doc/book/compiler-plugins.md +++ b/src/doc/book/compiler-plugins.md @@ -37,7 +37,7 @@ Let's write a plugin [`roman_numerals.rs`](https://github.com/rust-lang/rust/tree/master/src/test/auxiliary/roman_numerals.rs) that implements Roman numeral integer literals. -```ignore +```rust,ignore #![crate_type="dylib"] #![feature(plugin_registrar, rustc_private)] @@ -102,7 +102,7 @@ pub fn plugin_registrar(reg: &mut Registry) { Then we can use `rn!()` like any other macro: -```ignore +```rust,ignore #![feature(plugin)] #![plugin(roman_numerals)] @@ -132,7 +132,7 @@ Some of the [macro debugging tips](macros.html#debugging-macro-code) are applica You can use `syntax::parse` to turn token trees into higher-level syntax elements like expressions: -```ignore +```rust,ignore fn expand_foo(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) -> Box { @@ -169,7 +169,7 @@ infrastructure](../reference.html#lint-check-attributes) with additional checks code style, safety, etc. Now let's write a plugin [`lint_plugin_test.rs`](https://github.com/rust-lang/rust/blob/master/src/test/auxiliary/lint_plugin_test.rs) that warns about any item named `lintme`. -```ignore +```rust,ignore #![feature(plugin_registrar)] #![feature(box_syntax, rustc_private)] @@ -211,7 +211,7 @@ pub fn plugin_registrar(reg: &mut Registry) { Then code like -```ignore +```rust,ignore #![plugin(lint_plugin_test)] fn lintme() { } diff --git a/src/doc/book/concurrency.md b/src/doc/book/concurrency.md index ba4496b93f..a783650f8e 100644 --- a/src/doc/book/concurrency.md +++ b/src/doc/book/concurrency.md @@ -162,10 +162,10 @@ The same [ownership system](ownership.html) that helps prevent using pointers incorrectly also helps rule out data races, one of the worst kinds of concurrency bugs. -As an example, here is a Rust program that could have a data race in many +As an example, here is a Rust program that would have a data race in many languages. It will not compile: -```ignore +```rust,ignore use std::thread; use std::time::Duration; @@ -174,7 +174,7 @@ fn main() { for i in 0..3 { thread::spawn(move || { - data[i] += 1; + data[0] += i; }); } @@ -186,7 +186,7 @@ This gives us an error: ```text 8:17 error: capture of moved value: `data` - data[i] += 1; + data[0] += i; ^~~~ ``` @@ -195,11 +195,6 @@ thread, and the thread takes ownership of the reference, we'd have three owners! `data` gets moved out of `main` in the first call to `spawn()`, so subsequent calls in the loop cannot use this variable. -Note that this specific example will not cause a data race since different array -indices are being accessed. But this can't be determined at compile time, and in -a similar situation where `i` is a constant or is random, you would have a data -race. - So, we need some type that lets us have more than one owning reference to a value. Usually, we'd use `Rc` for this, which is a reference counted type that provides shared ownership. It has some runtime bookkeeping that keeps track @@ -209,7 +204,7 @@ Calling `clone()` on an `Rc` will return a new owned reference and bump the internal reference count. We create one of these for each thread: -```ignore +```rust,ignore use std::thread; use std::time::Duration; use std::rc::Rc; @@ -223,7 +218,7 @@ fn main() { // use it in a thread thread::spawn(move || { - data_ref[i] += 1; + data_ref[0] += i; }); } @@ -255,7 +250,7 @@ In essence, `Arc` is a type that lets us share ownership of data _across threads_. -```ignore +```rust,ignore use std::thread; use std::sync::Arc; use std::time::Duration; @@ -266,7 +261,7 @@ fn main() { for i in 0..3 { let data = data.clone(); thread::spawn(move || { - data[i] += 1; + data[0] += i; }); } @@ -281,7 +276,7 @@ And... still gives us an error. ```text :11:24 error: cannot borrow immutable borrowed content as mutable -:11 data[i] += 1; +:11 data[0] += i; ^~~~ ``` @@ -317,7 +312,7 @@ fn main() { let data = data.clone(); thread::spawn(move || { let mut data = data.lock().unwrap(); - data[i] += 1; + data[0] += i; }); } @@ -341,7 +336,7 @@ The lock "release" here is implicit; when the result of the lock (in this case, Note that [`lock`](../std/sync/struct.Mutex.html#method.lock) method of [`Mutex`](../std/sync/struct.Mutex.html) has this signature: -```ignore +```rust,ignore fn lock(&self) -> LockResult> ``` @@ -360,7 +355,7 @@ Let's examine the body of the thread more closely: # let data = data.clone(); thread::spawn(move || { let mut data = data.lock().unwrap(); - data[i] += 1; + data[0] += i; }); # } # thread::sleep(Duration::from_millis(50)); diff --git a/src/doc/book/const-and-static.md b/src/doc/book/const-and-static.md index 08ff3894c9..11aa25ac81 100644 --- a/src/doc/book/const-and-static.md +++ b/src/doc/book/const-and-static.md @@ -79,5 +79,5 @@ the result of a function call or anything similarly complex or at runtime. Almost always, if you can choose between the two, choose `const`. It’s pretty rare that you actually want a memory location associated with your constant, -and using a const allows for optimizations like constant propagation not only +and using a `const` allows for optimizations like constant propagation not only in your crate but downstream crates. diff --git a/src/doc/book/crates-and-modules.md b/src/doc/book/crates-and-modules.md index b3ccefe0a6..43ac30c35c 100644 --- a/src/doc/book/crates-and-modules.md +++ b/src/doc/book/crates-and-modules.md @@ -115,7 +115,7 @@ $ ls target/debug build deps examples libphrases-a7448e02a0468eaa.rlib native ``` -`libphrases-hash.rlib` is the compiled crate. Before we see how to use this +`libphrases-.rlib` is the compiled crate. Before we see how to use this crate from another crate, let’s break it up into multiple files. # Multiple File Crates diff --git a/src/doc/book/documentation.md b/src/doc/book/documentation.md index 8d1e58ac17..4a41bb7b7f 100644 --- a/src/doc/book/documentation.md +++ b/src/doc/book/documentation.md @@ -362,7 +362,7 @@ Here’s an example of documenting a macro: /// # } /// ``` /// -/// ```should_panic +/// ```rust,should_panic /// # #[macro_use] extern crate foo; /// # fn main() { /// panic_unless!(true == false, “I’m broken.”); @@ -429,7 +429,7 @@ There are a few more annotations that are useful to help `rustdoc` do the right thing when testing your code: ```rust -/// ```ignore +/// ```rust,ignore /// fn foo() { /// ``` # fn foo() {} @@ -441,7 +441,7 @@ with `text` if it's not code, or using `#`s to get a working example that only shows the part you care about. ```rust -/// ```should_panic +/// ```rust,should_panic /// assert!(false); /// ``` # fn foo() {} @@ -451,7 +451,7 @@ only shows the part you care about. not actually pass as a test. ```rust -/// ```no_run +/// ```rust,no_run /// loop { /// println!("Hello, world"); /// } @@ -563,7 +563,7 @@ can be useful when changing some options, or when writing a macro. `rustdoc` will show the documentation for a public re-export in both places: -```ignore +```rust,ignore extern crate foo; pub use foo::bar; @@ -575,7 +575,7 @@ documentation in both places. This behavior can be suppressed with `no_inline`: -```ignore +```rust,ignore extern crate foo; #[doc(no_inline)] diff --git a/src/doc/book/error-handling.md b/src/doc/book/error-handling.md index 12cb71973a..c914c33a5a 100644 --- a/src/doc/book/error-handling.md +++ b/src/doc/book/error-handling.md @@ -225,7 +225,7 @@ sense to put it into a function: ```rust # fn find(_: &str, _: char) -> Option { None } // Returns the extension of the given file name, where the extension is defined -// as all characters proceeding the first `.`. +// as all characters following the first `.`. // If `file_name` has no `.`, then `None` is returned. fn extension_explicit(file_name: &str) -> Option<&str> { match find(file_name, '.') { @@ -274,7 +274,7 @@ to get rid of the case analysis: ```rust # fn find(_: &str, _: char) -> Option { None } // Returns the extension of the given file name, where the extension is defined -// as all characters proceeding the first `.`. +// as all characters following the first `.`. // If `file_name` has no `.`, then `None` is returned. fn extension(file_name: &str) -> Option<&str> { find(file_name, '.').map(|i| &file_name[i+1..]) @@ -1573,8 +1573,9 @@ detail on Getopts, but there is [some good documentation][15] describing it. The short story is that Getopts generates an argument parser and a help message from a vector of options (The fact that it is a vector is hidden behind a struct and a set of methods). Once the -parsing is done, we can decode the program arguments into a Rust -struct. From there, we can get information about the flags, for +parsing is done, the parser returns a struct that records matches +for defined options, and remaining "free" arguments. +From there, we can get information about the flags, for instance, whether they were passed in, and what arguments they had. Here's our program with the appropriate `extern crate` statements, and the basic argument setup for Getopts: @@ -1605,8 +1606,8 @@ fn main() { print_usage(&program, opts); return; } - let data_path = &args[1]; - let city = &args[2]; + let data_path = &matches.free[0]; + let city: &str = &matches.free[1]; // Do stuff with information } @@ -1680,8 +1681,8 @@ fn main() { return; } - let data_path = &args[1]; - let city: &str = &args[2]; + let data_path = &matches.free[0]; + let city: &str = &matches.free[1]; let file = File::open(data_path).unwrap(); let mut rdr = csv::Reader::from_reader(file); @@ -1792,13 +1793,15 @@ fn main() { Ok(m) => { m } Err(e) => { panic!(e.to_string()) } }; + if matches.opt_present("h") { print_usage(&program, opts); return; } - let data_path = &args[1]; - let city = &args[2]; + let data_path = &matches.free[0]; + let city: &str = &matches.free[1]; + for pop in search(data_path, city) { println!("{}, {}: {:?}", pop.city, pop.country, pop.count); } @@ -1876,14 +1879,14 @@ when calling `search`: ```rust,ignore ... -match search(&data_file, &city) { - Ok(pops) => { - for pop in pops { - println!("{}, {}: {:?}", pop.city, pop.country, pop.count); + match search(data_path, city) { + Ok(pops) => { + for pop in pops { + println!("{}, {}: {:?}", pop.city, pop.country, pop.count); + } } + Err(err) => println!("{}", err) } - Err(err) => println!("{}", err) -} ... ``` @@ -1914,43 +1917,37 @@ fn print_usage(program: &str, opts: Options) { println!("{}", opts.usage(&format!("Usage: {} [options] ", program))); } ``` -The next part is going to be only a little harder: +Of course we need to adapt the argument handling code: ```rust,ignore ... -let mut opts = Options::new(); -opts.optopt("f", "file", "Choose an input file, instead of using STDIN.", "NAME"); -opts.optflag("h", "help", "Show this usage message."); -... -let file = matches.opt_str("f"); -let data_file = &file.as_ref().map(Path::new); - -let city = if !matches.free.is_empty() { - &matches.free[0] -} else { - print_usage(&program, opts); - return; -}; - -match search(data_file, city) { - Ok(pops) => { - for pop in pops { - println!("{}, {}: {:?}", pop.city, pop.country, pop.count); + let mut opts = Options::new(); + opts.optopt("f", "file", "Choose an input file, instead of using STDIN.", "NAME"); + opts.optflag("h", "help", "Show this usage message."); + ... + let data_path = matches.opt_str("f"); + + let city = if !matches.free.is_empty() { + &matches.free[0] + } else { + print_usage(&program, opts); + return; + }; + + match search(&data_path, city) { + Ok(pops) => { + for pop in pops { + println!("{}, {}: {:?}", pop.city, pop.country, pop.count); + } } + Err(err) => println!("{}", err) } - Err(err) => println!("{}", err) -} ... ``` -In this piece of code, we take `file` (which has the type -`Option`), and convert it to a type that `search` can use, in -this case, `&Option>`. To do this, we take a reference of -file, and map `Path::new` onto it. In this case, `as_ref()` converts -the `Option` into an `Option<&str>`, and from there, we can -execute `Path::new` to the content of the optional, and return the -optional of the new value. Once we have that, it is a simple matter of -getting the `city` argument and executing `search`. +We've made the user experience a bit nicer by showing the usage message, +instead of a panic from an out-of-bounds index, when `city`, the +remaining free argument, is not present. Modifying `search` is slightly trickier. The `csv` crate can build a parser out of @@ -2000,6 +1997,8 @@ enum CliError { And now for impls on `Display` and `Error`: ```rust,ignore +use std::fmt; + impl fmt::Display for CliError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { @@ -2020,13 +2019,13 @@ impl Error for CliError { } } - fn cause(&self) -> Option<&error::Error> { - match *self { + fn cause(&self) -> Option<&Error> { + match *self { CliError::Io(ref err) => Some(err), - CliError::Parse(ref err) => Some(err), - // Our custom error doesn't have an underlying cause, but we could - // modify it so that it does. - CliError::NotFound() => None, + CliError::Csv(ref err) => Some(err), + // Our custom error doesn't have an underlying cause, + // but we could modify it so that it does. + CliError::NotFound => None, } } } @@ -2122,10 +2121,10 @@ string and add a flag to the Option variable. Once we've done that, Getopts does ```rust,ignore ... -let mut opts = Options::new(); -opts.optopt("f", "file", "Choose an input file, instead of using STDIN.", "NAME"); -opts.optflag("h", "help", "Show this usage message."); -opts.optflag("q", "quiet", "Silences errors and warnings."); + let mut opts = Options::new(); + opts.optopt("f", "file", "Choose an input file, instead of using STDIN.", "NAME"); + opts.optflag("h", "help", "Show this usage message."); + opts.optflag("q", "quiet", "Silences errors and warnings."); ... ``` @@ -2133,13 +2132,16 @@ Now we only need to implement our “quiet” functionality. This requires us to tweak the case analysis in `main`: ```rust,ignore -match search(&args.arg_data_path, &args.arg_city) { - Err(CliError::NotFound) if args.flag_quiet => process::exit(1), - Err(err) => panic!("{}", err), - Ok(pops) => for pop in pops { - println!("{}, {}: {:?}", pop.city, pop.country, pop.count); +use std::process; +... + match search(&data_path, city) { + Err(CliError::NotFound) if matches.opt_present("q") => process::exit(1), + Err(err) => panic!("{}", err), + Ok(pops) => for pop in pops { + println!("{}, {}: {:?}", pop.city, pop.country, pop.count); + } } -} +... ``` Certainly, we don't want to be quiet if there was an IO error or if the data diff --git a/src/doc/book/ffi.md b/src/doc/book/ffi.md index 6aec8d2a04..f48e87c422 100644 --- a/src/doc/book/ffi.md +++ b/src/doc/book/ffi.md @@ -28,7 +28,7 @@ and add `extern crate libc;` to your crate root. The following is a minimal example of calling a foreign function which will compile if snappy is installed: -```no_run +```rust,no_run # #![feature(libc)] extern crate libc; use libc::size_t; @@ -62,7 +62,7 @@ keeping the binding correct at runtime. The `extern` block can be extended to cover the entire snappy API: -```no_run +```rust,no_run # #![feature(libc)] extern crate libc; use libc::{c_int, size_t}; @@ -209,7 +209,7 @@ A basic example is: Rust code: -```no_run +```rust,no_run extern fn callback(a: i32) { println!("I'm called from C with value {0}", a); } @@ -262,7 +262,7 @@ referenced Rust object. Rust code: -```no_run +```rust,no_run #[repr(C)] struct RustObject { a: i32, @@ -406,7 +406,7 @@ Foreign APIs often export a global variable which could do something like track global state. In order to access these variables, you declare them in `extern` blocks with the `static` keyword: -```no_run +```rust,no_run # #![feature(libc)] extern crate libc; @@ -425,7 +425,7 @@ Alternatively, you may need to alter global state provided by a foreign interface. To do this, statics can be declared with `mut` so we can mutate them. -```no_run +```rust,no_run # #![feature(libc)] extern crate libc; diff --git a/src/doc/book/functions.md b/src/doc/book/functions.md index 8a2444323f..3a10d2aecc 100644 --- a/src/doc/book/functions.md +++ b/src/doc/book/functions.md @@ -134,7 +134,7 @@ x = y = 5 In Rust, however, using `let` to introduce a binding is _not_ an expression. The following will produce a compile-time error: -```ignore +```rust,ignore let x = (let y = 5); // expected identifier, found keyword `let` ``` @@ -283,7 +283,7 @@ stack backtrace: A diverging function can be used as any type: -```should_panic +```rust,should_panic # fn diverges() -> ! { # panic!("This function never returns!"); # } diff --git a/src/doc/book/getting-started.md b/src/doc/book/getting-started.md index 16141d936e..e7d05a8d93 100644 --- a/src/doc/book/getting-started.md +++ b/src/doc/book/getting-started.md @@ -8,7 +8,7 @@ we’ll talk about Cargo, Rust’s build system and package manager. The first step to using Rust is to install it. Generally speaking, you’ll need an Internet connection to run the commands in this section, as we’ll be -downloading Rust from the internet. +downloading Rust from the Internet. We’ll be showing off a number of commands using a terminal, and those lines all start with `$`. We don't need to type in the `$`s, they are there to indicate @@ -39,13 +39,13 @@ Specifically they will each satisfy the following requirements: | Target | std |rustc|cargo| notes | |-------------------------------|-----|-----|-----|----------------------------| -| `i686-pc-windows-msvc` | ✓ | ✓ | ✓ | 32-bit MSVC (Windows 7+) | -| `x86_64-pc-windows-msvc` | ✓ | ✓ | ✓ | 64-bit MSVC (Windows 7+) | -| `i686-pc-windows-gnu` | ✓ | ✓ | ✓ | 32-bit MinGW (Windows 7+) | -| `x86_64-pc-windows-gnu` | ✓ | ✓ | ✓ | 64-bit MinGW (Windows 7+) | | `i686-apple-darwin` | ✓ | ✓ | ✓ | 32-bit OSX (10.7+, Lion+) | -| `x86_64-apple-darwin` | ✓ | ✓ | ✓ | 64-bit OSX (10.7+, Lion+) | +| `i686-pc-windows-gnu` | ✓ | ✓ | ✓ | 32-bit MinGW (Windows 7+) | +| `i686-pc-windows-msvc` | ✓ | ✓ | ✓ | 32-bit MSVC (Windows 7+) | | `i686-unknown-linux-gnu` | ✓ | ✓ | ✓ | 32-bit Linux (2.6.18+) | +| `x86_64-apple-darwin` | ✓ | ✓ | ✓ | 64-bit OSX (10.7+, Lion+) | +| `x86_64-pc-windows-gnu` | ✓ | ✓ | ✓ | 64-bit MinGW (Windows 7+) | +| `x86_64-pc-windows-msvc` | ✓ | ✓ | ✓ | 64-bit MSVC (Windows 7+) | | `x86_64-unknown-linux-gnu` | ✓ | ✓ | ✓ | 64-bit Linux (2.6.18+) | ### Tier 2 @@ -63,13 +63,28 @@ these platforms are required to have each of the following: | Target | std |rustc|cargo| notes | |-------------------------------|-----|-----|-----|----------------------------| -| `x86_64-unknown-linux-musl` | ✓ | | | 64-bit Linux with MUSL | +| `aarch64-apple-ios` | ✓ | | | ARM64 iOS | +| `aarch64-unknown-linux-gnu` | ✓ | ✓ | ✓ | ARM64 Linux (2.6.18+) | | `arm-linux-androideabi` | ✓ | | | ARM Android | -| `arm-unknown-linux-gnueabi` | ✓ | ✓ | | ARM Linux (2.6.18+) | -| `arm-unknown-linux-gnueabihf` | ✓ | ✓ | | ARM Linux (2.6.18+) | -| `aarch64-unknown-linux-gnu` | ✓ | | | ARM64 Linux (2.6.18+) | +| `arm-unknown-linux-gnueabi` | ✓ | ✓ | ✓ | ARM Linux (2.6.18+) | +| `arm-unknown-linux-gnueabihf` | ✓ | ✓ | ✓ | ARM Linux (2.6.18+) | +| `armv7-apple-ios` | ✓ | | | ARM iOS | +|`armv7-unknown-linux-gnueabihf`| ✓ | ✓ | ✓ | ARMv7 Linux (2.6.18+) | +| `armv7s-apple-ios` | ✓ | | | ARM iOS | +| `i386-apple-ios` | ✓ | | | 32-bit x86 iOS | +| `i586-pc-windows-msvc` | ✓ | | | 32-bit Windows w/o SSE | | `mips-unknown-linux-gnu` | ✓ | | | MIPS Linux (2.6.18+) | +| `mips-unknown-linux-musl` | ✓ | | | MIPS Linux with MUSL | | `mipsel-unknown-linux-gnu` | ✓ | | | MIPS (LE) Linux (2.6.18+) | +| `mipsel-unknown-linux-musl` | ✓ | | | MIPS (LE) Linux with MUSL | +| `powerpc-unknown-linux-gnu` | ✓ | | | PowerPC Linux (2.6.18+) | +| `powerpc64-unknown-linux-gnu` | ✓ | | | PPC64 Linux (2.6.18+) | +|`powerpc64le-unknown-linux-gnu`| ✓ | | | PPC64LE Linux (2.6.18+) | +| `x86_64-apple-ios` | ✓ | | | 64-bit x86 iOS | +| `x86_64-rumprun-netbsd` | ✓ | | | 64-bit NetBSD Rump Kernel | +| `x86_64-unknown-freebsd` | ✓ | ✓ | ✓ | 64-bit FreeBSD | +| `x86_64-unknown-linux-musl` | ✓ | | | 64-bit Linux with MUSL | +| `x86_64-unknown-netbsd` | ✓ | ✓ | ✓ | 64-bit NetBSD | ### Tier 3 @@ -82,27 +97,16 @@ unofficial locations. | Target | std |rustc|cargo| notes | |-------------------------------|-----|-----|-----|----------------------------| -| `i686-linux-android` | ✓ | | | 32-bit x86 Android | | `aarch64-linux-android` | ✓ | | | ARM64 Android | -| `powerpc-unknown-linux-gnu` | ✓ | | | PowerPC Linux (2.6.18+) | -| `powerpc64-unknown-linux-gnu` | ✓ | | | PPC64 Linux (2.6.18+) | -|`powerpc64le-unknown-linux-gnu`| ✓ | | | PPC64LE Linux (2.6.18+) | -|`armv7-unknown-linux-gnueabihf`| ✓ | | | ARMv7 Linux (2.6.18+) | -| `i386-apple-ios` | ✓ | | | 32-bit x86 iOS | -| `x86_64-apple-ios` | ✓ | | | 64-bit x86 iOS | -| `armv7-apple-ios` | ✓ | | | ARM iOS | -| `armv7s-apple-ios` | ✓ | | | ARM iOS | -| `aarch64-apple-ios` | ✓ | | | ARM64 iOS | +| `armv7-linux-androideabi` | ✓ | | | ARM-v7a Android | +| `i686-linux-android` | ✓ | | | 32-bit x86 Android | +| `i686-pc-windows-msvc` (XP) | ✓ | | | Windows XP support | | `i686-unknown-freebsd` | ✓ | ✓ | ✓ | 32-bit FreeBSD | -| `x86_64-unknown-freebsd` | ✓ | ✓ | ✓ | 64-bit FreeBSD | -| `x86_64-unknown-openbsd` | ✓ | ✓ | | 64-bit OpenBSD | -| `x86_64-unknown-netbsd` | ✓ | ✓ | | 64-bit NetBSD | +| `x86_64-pc-windows-msvc` (XP) | ✓ | | | Windows XP support | +| `x86_64-sun-solaris` | ✓ | ✓ | | 64-bit Solaris/SunOS | | `x86_64-unknown-bitrig` | ✓ | ✓ | | 64-bit Bitrig | | `x86_64-unknown-dragonfly` | ✓ | ✓ | | 64-bit DragonFlyBSD | -| `x86_64-rumprun-netbsd` | ✓ | | | 64-bit NetBSD Rump Kernel | -| `x86_64-sun-solaris` | ✓ | ✓ | | 64-bit Solaris/SunOS | -| `i686-pc-windows-msvc` (XP) | ✓ | | | Windows XP support | -| `x86_64-pc-windows-msvc` (XP) | ✓ | | | Windows XP support | +| `x86_64-unknown-openbsd` | ✓ | ✓ | | 64-bit OpenBSD | Note that this table can be expanded over time, this isn't the exhaustive set of tier 3 platforms that will ever be! @@ -396,20 +400,20 @@ Let’s convert the Hello World program to Cargo. To Cargo-fy a project, you nee to do three things: 1. Put your source file in the right directory. -2. Get rid of the old executable (`main.exe` on Windows, `main` everywhere else) - and make a new one. +2. Get rid of the old executable (`main.exe` on Windows, `main` everywhere + else). 3. Make a Cargo configuration file. Let's get started! -### Creating a new Executable and Source Directory +### Creating a Source Directory and Removing the Old Executable First, go back to your terminal, move to your *hello_world* directory, and enter the following commands: ```bash $ mkdir src -$ mv main.rs src/main.rs +$ mv main.rs src/main.rs # or 'move main.rs src/main.rs' on Windows $ rm main # or 'del main.exe' on Windows ``` @@ -575,8 +579,12 @@ look something like this: name = "hello_world" version = "0.1.0" authors = ["Your Name "] + +[dependencies] ``` +Do not worry about the `[dependencies]` line, we will come back to it later. + Cargo has populated *Cargo.toml* with reasonable defaults based on the arguments you gave it and your `git` global configuration. You may notice that Cargo has also initialized the `hello_world` directory as a `git` repository. diff --git a/src/doc/book/guessing-game.md b/src/doc/book/guessing-game.md index 590c7e8481..a2067e33a6 100644 --- a/src/doc/book/guessing-game.md +++ b/src/doc/book/guessing-game.md @@ -988,8 +988,7 @@ fn main() { # Complete! -At this point, you have successfully built the Guessing Game! Congratulations! +This project showed you a lot: `let`, `match`, methods, associated +functions, using external crates, and more. -This first project showed you a lot: `let`, `match`, methods, associated -functions, using external crates, and more. Our next project will show off -even more. +At this point, you have successfully built the Guessing Game! Congratulations! diff --git a/src/doc/book/inline-assembly.md b/src/doc/book/inline-assembly.md index a5a2d7ce74..2c2d89a1fb 100644 --- a/src/doc/book/inline-assembly.md +++ b/src/doc/book/inline-assembly.md @@ -4,7 +4,7 @@ For extremely low-level manipulations and performance reasons, one might wish to control the CPU directly. Rust supports using inline assembly to do this via the `asm!` macro. -```ignore +```rust,ignore asm!(assembly template : output operands : input operands diff --git a/src/doc/book/lifetimes.md b/src/doc/book/lifetimes.md index 695b1614fb..cb07573189 100644 --- a/src/doc/book/lifetimes.md +++ b/src/doc/book/lifetimes.md @@ -1,7 +1,7 @@ % Lifetimes -This guide is three of three presenting Rust’s ownership system. This is one of -Rust’s most unique and compelling features, with which Rust developers should +This is the last of three sections presenting Rust’s ownership system. This is one of +Rust’s most distinct and compelling features, with which Rust developers should become quite acquainted. Ownership is how Rust achieves its largest goal, memory safety. There are a few distinct concepts, each with its own chapter: diff --git a/src/doc/book/loops.md b/src/doc/book/loops.md index b5dde9be17..97ca2e3e70 100644 --- a/src/doc/book/loops.md +++ b/src/doc/book/loops.md @@ -74,7 +74,7 @@ for x in 0..10 { In slightly more abstract terms, -```ignore +```rust,ignore for var in expression { code } diff --git a/src/doc/book/macros.md b/src/doc/book/macros.md index c16e2ea453..f535fb96af 100644 --- a/src/doc/book/macros.md +++ b/src/doc/book/macros.md @@ -78,7 +78,7 @@ macro_rules! vec { Whoa, that’s a lot of new syntax! Let’s break it down. -```ignore +```rust,ignore macro_rules! vec { ... } ``` @@ -92,7 +92,7 @@ syntax and serves to distinguish a macro from an ordinary function. The macro is defined through a series of rules, which are pattern-matching cases. Above, we had -```ignore +```rust,ignore ( $( $x:expr ),* ) => { ... }; ``` @@ -112,7 +112,7 @@ separated by commas. Aside from the special matcher syntax, any Rust tokens that appear in a matcher must match exactly. For example, -```rust +```rust,ignore macro_rules! foo { (x => $e:expr) => (println!("mode X: {}", $e)); (y => $e:expr) => (println!("mode Y: {}", $e)); @@ -147,7 +147,7 @@ The right-hand side of a macro rule is ordinary Rust syntax, for the most part. But we can splice in bits of syntax captured by the matcher. From the original example: -```ignore +```rust,ignore $( temp_vec.push($x); )* @@ -165,7 +165,7 @@ within the repeated block. Another detail: the `vec!` macro has *two* pairs of braces on the right-hand side. They are often combined like so: -```ignore +```rust,ignore macro_rules! foo { () => {{ ... diff --git a/src/doc/book/mutability.md b/src/doc/book/mutability.md index 71acb551e6..e462715114 100644 --- a/src/doc/book/mutability.md +++ b/src/doc/book/mutability.md @@ -24,18 +24,16 @@ changed from one `i32` to another. [vb]: variable-bindings.html -If you want to change what the binding points to, you’ll need a [mutable reference][mr]: +You can also create a [reference][ref] to it, using `&x`, but if you want to use the reference to change it, you will need a mutable reference: ```rust let mut x = 5; let y = &mut x; ``` -[mr]: references-and-borrowing.html +[ref]: references-and-borrowing.html -`y` is an immutable binding to a mutable reference, which means that you can’t -bind `y` to something else (`y = &mut z`), but you can mutate the thing that’s -bound to `y` (`*y = 5`). A subtle distinction. +`y` is an immutable binding to a mutable reference, which means that you can’t bind 'y' to something else (`y = &mut z`), but `y` can be used to bind `x` to something else (`*y = 5`). A subtle distinction. Of course, if you need both: @@ -57,6 +55,8 @@ fn foo(mut x: i32) { # } ``` +Note that here, the `x` is mutable, but not the `y`. + [pattern]: patterns.html # Interior vs. Exterior Mutability diff --git a/src/doc/book/operators-and-overloading.md b/src/doc/book/operators-and-overloading.md index fcce831c2d..424e2cda61 100644 --- a/src/doc/book/operators-and-overloading.md +++ b/src/doc/book/operators-and-overloading.md @@ -123,7 +123,7 @@ fn main() { For `HasArea` and `Square`, we declare a type parameter `T` and replace `f64` with it. The `impl` needs more involved modifications: -```ignore +```rust,ignore impl HasArea for Square where T: Mul + Copy { ... } ``` diff --git a/src/doc/book/ownership.md b/src/doc/book/ownership.md index f8938be30e..f445bed015 100644 --- a/src/doc/book/ownership.md +++ b/src/doc/book/ownership.md @@ -1,7 +1,7 @@ % Ownership -This guide is one of three presenting Rust’s ownership system. This is one of -Rust’s most unique and compelling features, with which Rust developers should +This is the first of three sections presenting Rust’s ownership system. This is one of +Rust’s most distinct and compelling features, with which Rust developers should become quite acquainted. Ownership is how Rust achieves its largest goal, memory safety. There are a few distinct concepts, each with its own chapter: @@ -155,7 +155,7 @@ vector object and its data live in separate memory regions instead of being a single contiguous memory allocation (due to reasons we will not go into at this point of time). These two parts of the vector (the one on the stack and one on the heap) must agree with each other at all times with regards to -things like the length, capacity etc. +things like the length, capacity, etc. When we move `v` to `v2`, Rust actually does a bitwise copy of the vector object `v` into the stack allocation represented by `v2`. This shallow copy @@ -173,11 +173,11 @@ For example if we truncated the vector to just two elements through `v2`: v2.truncate(2); ``` -and `v1` were still accessible we'd end up with an invalid vector since `v1` +and `v` were still accessible we'd end up with an invalid vector since `v` would not know that the heap data has been truncated. Now, the part of the -vector `v1` on the stack does not agree with the corresponding part on the -heap. `v1` still thinks there are three elements in the vector and will -happily let us access the non existent element `v1[2]` but as you might +vector `v` on the stack does not agree with the corresponding part on the +heap. `v` still thinks there are three elements in the vector and will +happily let us access the non existent element `v[2]` but as you might already know this is a recipe for disaster. Especially because it might lead to a segmentation fault or worse allow an unauthorized user to read from memory to which they don't have access. diff --git a/src/doc/book/primitive-types.md b/src/doc/book/primitive-types.md index 69040931de..2a4b7ba37f 100644 --- a/src/doc/book/primitive-types.md +++ b/src/doc/book/primitive-types.md @@ -97,9 +97,10 @@ and `i64` is a signed, 64-bit integer. ## Variable-size types -Rust also provides types whose size depends on the size of a pointer of the -underlying machine. These types have ‘size’ as the category, and come in signed -and unsigned varieties. This makes for two types: `isize` and `usize`. +Rust also provides types whose particular size depends on the underlying machine +architecture. Their range is sufficient to express the size of any collection, so +these types have ‘size’ as the category. They come in signed and unsigned varieties +which account for two types: `isize` and `usize`. ## Floating-point types diff --git a/src/doc/book/references-and-borrowing.md b/src/doc/book/references-and-borrowing.md index a08d53f958..a28f450c94 100644 --- a/src/doc/book/references-and-borrowing.md +++ b/src/doc/book/references-and-borrowing.md @@ -1,7 +1,7 @@ % References and Borrowing -This guide is two of three presenting Rust’s ownership system. This is one of -Rust’s most unique and compelling features, with which Rust developers should +This is the second of three sections presenting Rust’s ownership system. This is one of +Rust’s most distinct and compelling features, with which Rust developers should become quite acquainted. Ownership is how Rust achieves its largest goal, memory safety. There are a few distinct concepts, each with its own chapter: @@ -77,6 +77,32 @@ let answer = foo(&v1, &v2); // we can use v1 and v2 here! ``` +A more concrete example: + +```rust +fn main() { + // Don't worry if you don't understand how `fold` works, the point here is that an immutable reference is borrowed. + fn sum_vec(v: &Vec) -> i32 { + return v.iter().fold(0, |a, &b| a + b); + } + // Borrow two vectors and and sum them. + // This kind of borrowing does not allow mutation to the borrowed. + fn foo(v1: &Vec, v2: &Vec) -> i32 { + // do stuff with v1 and v2 + let s1 = sum_vec(v1); + let s2 = sum_vec(v2); + // return the answer + s1 + s2 + } + + let v1 = vec![1, 2, 3]; + let v2 = vec![4, 5, 6]; + + let answer = foo(&v1, &v2); + println!("{}", answer); +} +``` + Instead of taking `Vec`s as our arguments, we take a reference: `&Vec`. And instead of passing `v1` and `v2` directly, we pass `&v1` and `&v2`. We call the `&T` type a ‘reference’, and rather than owning the resource, diff --git a/src/doc/book/strings.md b/src/doc/book/strings.md index f5ebceedd3..008644ec9a 100644 --- a/src/doc/book/strings.md +++ b/src/doc/book/strings.md @@ -32,7 +32,7 @@ include the newline and the leading spaces: let s = "foo bar"; -assert_eq!("foo\n bar", s); +assert_eq!("foo\n bar", s); ``` The second, with a `\`, trims the spaces and the newline: diff --git a/src/doc/book/testing.md b/src/doc/book/testing.md index 59d07e4f81..4ea114c4be 100644 --- a/src/doc/book/testing.md +++ b/src/doc/book/testing.md @@ -84,8 +84,8 @@ fn it_works() { ``` `assert!` is a macro provided by Rust which takes one argument: if the argument -is `true`, nothing happens. If the argument is `false`, it `panic!`s. Let's run -our tests again: +is `true`, nothing happens. If the argument is `false`, it will `panic!`. Let's +run our tests again: ```bash $ cargo test diff --git a/src/doc/book/the-stack-and-the-heap.md b/src/doc/book/the-stack-and-the-heap.md index a7b6faccd8..a1f6a065a2 100644 --- a/src/doc/book/the-stack-and-the-heap.md +++ b/src/doc/book/the-stack-and-the-heap.md @@ -175,6 +175,7 @@ And then `bold()` calls `italic()`: | **2** | **b**|**100**| | **1** | **a**| **5** | | 0 | x | 42 | + Whew! Our stack is growing tall. After `italic()` is over, its frame is deallocated, leaving only `bold()` and @@ -260,8 +261,7 @@ layout of a program which has been running for a while now: | (230) - 3 | | | | (230) - 4 | | 42 | | ... | ... | ... | -| 3 | y | → (230) - 4 | -| 2 | y | 42 | +| 2 | z | → (230) - 4 | | 1 | y | 42 | | 0 | x | → (230) - 1 | diff --git a/src/doc/book/trait-objects.md b/src/doc/book/trait-objects.md index 1d63435ed5..b31a34a042 100644 --- a/src/doc/book/trait-objects.md +++ b/src/doc/book/trait-objects.md @@ -306,7 +306,7 @@ let y = TraitObject { Not every trait can be used to make a trait object. For example, vectors implement `Clone`, but if we try to make a trait object: -```ignore +```rust,ignore let v = vec![1, 2, 3]; let o = &v as &Clone; ``` diff --git a/src/doc/book/traits.md b/src/doc/book/traits.md index b3b4197924..107ef2b44d 100644 --- a/src/doc/book/traits.md +++ b/src/doc/book/traits.md @@ -195,7 +195,7 @@ fn main() { `is_square()` needs to check that the sides are equal, so the sides must be of a type that implements the [`core::cmp::PartialEq`][PartialEq] trait: -```ignore +```rust,ignore impl Rectangle { ... } ``` diff --git a/src/doc/book/unsized-types.md b/src/doc/book/unsized-types.md index 73b90355e4..a23470d39f 100644 --- a/src/doc/book/unsized-types.md +++ b/src/doc/book/unsized-types.md @@ -47,7 +47,7 @@ pointers, can use this `impl`. # ?Sized If you want to write a function that accepts a dynamically sized type, you -can use the special bound, `?Sized`: +can use the special bound syntax, `?Sized`: ```rust struct Foo { @@ -55,6 +55,7 @@ struct Foo { } ``` -This `?`, read as “T may be `Sized`”, means that this bound is special: it -lets us match more kinds, not less. It’s almost like every `T` implicitly has -`T: Sized`, and the `?` undoes this default. +This `?Sized`, read as “T may or may not be `Sized`”, which allows us to match +both sized and unsized types. All generic type parameters implicitly +have the `Sized` bound, so the `?Sized` can be used to opt-out of the implicit +bound. diff --git a/src/doc/book/vectors.md b/src/doc/book/vectors.md index 75e961e4c4..1c44af2f21 100644 --- a/src/doc/book/vectors.md +++ b/src/doc/book/vectors.md @@ -40,7 +40,7 @@ The indices count from `0`, so the third element is `v[2]`. It’s also important to note that you must index with the `usize` type: -```ignore +```rust,ignore let v = vec![1, 2, 3, 4, 5]; let i: usize = 0; @@ -71,7 +71,7 @@ you cannot index with an `i32`. If you try to access an index that doesn’t exist: -```ignore +```rust,ignore let v = vec![1, 2, 3]; println!("Item 7 is {}", v[7]); ``` diff --git a/src/doc/footer.inc b/src/doc/footer.inc index b5eb589eb5..952846dd5e 100644 --- a/src/doc/footer.inc +++ b/src/doc/footer.inc @@ -1,7 +1,7 @@

Copyright © 2011-2015 The Rust Project Developers. Licensed under the Apache License, Version 2.0 -or the MIT license, at your option. +or the MIT license, at your option.

This file may not be copied, modified, or distributed except according to those terms.

diff --git a/src/doc/nomicon/subtyping.md b/src/doc/nomicon/subtyping.md index 5def5c3903..eb940e811a 100644 --- a/src/doc/nomicon/subtyping.md +++ b/src/doc/nomicon/subtyping.md @@ -53,7 +53,7 @@ inferred variance, so `Fn(T)` is invariant in `T`). Some important variances: * `&'a T` is variant over `'a` and `T` (as is `*const T` by metaphor) -* `&'a mut T` is variant with over `'a` but invariant over `T` +* `&'a mut T` is variant over `'a` but invariant over `T` * `Fn(T) -> U` is invariant over `T`, but variant over `U` * `Box`, `Vec`, and all other collections are variant over the types of their contents diff --git a/src/doc/nomicon/vec-alloc.md b/src/doc/nomicon/vec-alloc.md index c2ae1a4eb6..bc60a577bd 100644 --- a/src/doc/nomicon/vec-alloc.md +++ b/src/doc/nomicon/vec-alloc.md @@ -150,7 +150,7 @@ LLVM needs to work with different languages' semantics and custom allocators, it can't really intimately understand allocation. Instead, the main idea behind allocation is "doesn't overlap with other stuff". That is, heap allocations, stack allocations, and globals don't randomly overlap. Yep, it's about alias -analysis. As such, Rust can technically play a bit fast an loose with the notion of +analysis. As such, Rust can technically play a bit fast and loose with the notion of an allocation as long as it's *consistent*. Getting back to the empty allocation case, there are a couple of places where diff --git a/src/doc/reference.md b/src/doc/reference.md index fcf9aefaba..ebb111a2e2 100644 --- a/src/doc/reference.md +++ b/src/doc/reference.md @@ -2063,33 +2063,43 @@ arbitrarily complex configurations through nesting. The following configurations must be defined by the implementation: -* `debug_assertions` - Enabled by default when compiling without optimizations. - This can be used to enable extra debugging code in development but not in - production. For example, it controls the behavior of the standard library's - `debug_assert!` macro. -* `target_arch = "..."` - Target CPU architecture, such as `"x86"`, `"x86_64"` - `"mips"`, `"powerpc"`, `"powerpc64"`, `"arm"`, or `"aarch64"`. -* `target_endian = "..."` - Endianness of the target CPU, either `"little"` or - `"big"`. -* `target_env = ".."` - An option provided by the compiler by default - describing the runtime environment of the target platform. Some examples of - this are `musl` for builds targeting the MUSL libc implementation, `msvc` for - Windows builds targeting MSVC, and `gnu` frequently the rest of the time. This - option may also be blank on some platforms. +* `target_arch = "..."` - Target CPU architecture, such as `"x86"`, + `"x86_64"` `"mips"`, `"powerpc"`, `"powerpc64"`, `"arm"`, or + `"aarch64"`. This value is closely related to the first element of + the platform target triple, though it is not identical. +* `target_os = "..."` - Operating system of the target, examples + include `"windows"`, `"macos"`, `"ios"`, `"linux"`, `"android"`, + `"freebsd"`, `"dragonfly"`, `"bitrig"` , `"openbsd"` or + `"netbsd"`. This value is closely related to the second and third + element of the platform target triple, though it is not identical. * `target_family = "..."` - Operating system family of the target, e. g. `"unix"` or `"windows"`. The value of this configuration option is defined as a configuration itself, like `unix` or `windows`. -* `target_os = "..."` - Operating system of the target, examples include - `"windows"`, `"macos"`, `"ios"`, `"linux"`, `"android"`, `"freebsd"`, `"dragonfly"`, - `"bitrig"` , `"openbsd"` or `"netbsd"`. +* `unix` - See `target_family`. +* `windows` - See `target_family`. +* `target_env = ".."` - Further disambiguates the target platform with + information about the ABI/libc. Presently this value is either + `"gnu"`, `"msvc"`, `"musl"`, or the empty string. For historical + reasons this value has only been defined as non-empty when needed + for disambiguation. Thus on many GNU platforms this value will be + empty. This value is closely related to the fourth element of the + platform target triple, though it is not identical. For example, + embedded ABIs such as `gnueabihf` will simply define `target_env` as + `"gnu"`. +* `target_endian = "..."` - Endianness of the target CPU, either `"little"` or + `"big"`. * `target_pointer_width = "..."` - Target pointer width in bits. This is set to `"32"` for targets with 32-bit pointers, and likewise set to `"64"` for 64-bit pointers. +* `target_has_atomic = "..."` - Set of integer sizes on which the target can perform + atomic operations. Values are `"8"`, `"16"`, `"32"`, `"64"` and `"ptr"`. * `target_vendor = "..."` - Vendor of the target, for example `apple`, `pc`, or simply `"unknown"`. * `test` - Enabled when compiling the test harness (using the `--test` flag). -* `unix` - See `target_family`. -* `windows` - See `target_family`. +* `debug_assertions` - Enabled by default when compiling without optimizations. + This can be used to enable extra debugging code in development but not in + production. For example, it controls the behavior of the standard library's + `debug_assert!` macro. You can also set another attribute based on a `cfg` variable with `cfg_attr`: @@ -2287,6 +2297,9 @@ The currently implemented features of the reference compiler are: * `cfg_target_vendor` - Allows conditional compilation using the `target_vendor` matcher which is subject to change. +* `cfg_target_has_atomic` - Allows conditional compilation using the `target_has_atomic` + matcher which is subject to change. + * `concat_idents` - Allows use of the `concat_idents` macro, which is in many ways insufficient for concatenating identifiers, and may be removed entirely for something more wholesome. diff --git a/src/doc/style/errors/ergonomics.md b/src/doc/style/errors/ergonomics.md index a404d25bf3..269f2a2894 100644 --- a/src/doc/style/errors/ergonomics.md +++ b/src/doc/style/errors/ergonomics.md @@ -9,7 +9,7 @@ pattern. Prefer -```rust +```rust,ignore use std::io::{File, Open, Write, IoError}; struct Info { @@ -31,7 +31,7 @@ fn write_info(info: &Info) -> Result<(), IoError> { over -```rust +```rust,ignore use std::io::{File, Open, Write, IoError}; struct Info { diff --git a/src/doc/style/features/functions-and-methods/README.md b/src/doc/style/features/functions-and-methods/README.md index 611cd564cc..a3559ca3e7 100644 --- a/src/doc/style/features/functions-and-methods/README.md +++ b/src/doc/style/features/functions-and-methods/README.md @@ -4,7 +4,7 @@ Prefer -```rust +```rust,ignore impl Foo { pub fn frob(&self, w: widget) { ... } } @@ -12,7 +12,7 @@ impl Foo { over -```rust +```rust,ignore pub fn frob(foo: &Foo, w: widget) { ... } ``` diff --git a/src/doc/style/features/functions-and-methods/input.md b/src/doc/style/features/functions-and-methods/input.md index 9b243bc72e..5b63a45144 100644 --- a/src/doc/style/features/functions-and-methods/input.md +++ b/src/doc/style/features/functions-and-methods/input.md @@ -6,7 +6,7 @@ Prefer -```rust +```rust,ignore fn foo(b: Bar) { // use b as owned, directly } @@ -14,7 +14,7 @@ fn foo(b: Bar) { over -```rust +```rust,ignore fn foo(b: &Bar) { let b = b.clone(); // use b as owned after cloning @@ -33,13 +33,13 @@ needed, not as a way of signaling that copies should be cheap to make. Prefer -```rust +```rust,ignore fn foo(b: Bar) -> Bar { ... } ``` over -```rust +```rust,ignore fn foo(b: Box) -> Box { ... } ``` @@ -56,13 +56,13 @@ it becomes. Prefer -```rust +```rust,ignore fn foo>(c: T) { ... } ``` over any of -```rust +```rust,ignore fn foo(c: &[i32]) { ... } fn foo(c: &Vec) { ... } fn foo(c: &SomeOtherCollection) { ... } @@ -83,14 +83,14 @@ concrete nor overly abstract. See the discussion on Prefer either of -```rust +```rust,ignore fn foo(b: &Bar) { ... } fn foo(b: &mut Bar) { ... } ``` over -```rust +```rust,ignore fn foo(b: Bar) { ... } ``` @@ -101,13 +101,13 @@ ownership is actually needed. Prefer -```rust +```rust,ignore fn foo() -> (Bar, Bar) ``` over -```rust +```rust,ignore fn foo(output: &mut Bar) -> Bar ``` @@ -120,7 +120,7 @@ multiple values, it should do so via one of these types. The primary exception: sometimes a function is meant to modify data that the caller already owns, for example to re-use a buffer: -```rust +```rust,ignore fn read(&mut self, buf: &mut [u8]) -> std::io::Result ``` @@ -146,7 +146,7 @@ Choose an argument type that rules out bad inputs. For example, prefer -```rust +```rust,ignore enum FooMode { Mode1, Mode2, @@ -157,7 +157,7 @@ fn foo(mode: FooMode) { ... } over -```rust +```rust,ignore fn foo(mode2: bool, mode3: bool) { assert!(!mode2 || !mode3); ... diff --git a/src/doc/style/features/functions-and-methods/output.md b/src/doc/style/features/functions-and-methods/output.md index 3e43d1e416..e26eee5336 100644 --- a/src/doc/style/features/functions-and-methods/output.md +++ b/src/doc/style/features/functions-and-methods/output.md @@ -16,7 +16,7 @@ API. Prefer -```rust +```rust,ignore struct SearchResult { found: bool, // item in container? expected_index: usize // what would the item's index be? @@ -26,13 +26,13 @@ fn binary_search(&self, k: Key) -> SearchResult ``` or -```rust +```rust,ignore fn binary_search(&self, k: Key) -> (bool, usize) ``` over -```rust +```rust,ignore fn binary_search(&self, k: Key) -> bool ``` @@ -40,13 +40,13 @@ fn binary_search(&self, k: Key) -> bool Prefer -```rust +```rust,ignore fn from_utf8_owned(vv: Vec) -> Result> ``` over -```rust +```rust,ignore fn from_utf8_owned(vv: Vec) -> Option ``` diff --git a/src/doc/style/features/let.md b/src/doc/style/features/let.md index 01dff3dcce..ba9787b45f 100644 --- a/src/doc/style/features/let.md +++ b/src/doc/style/features/let.md @@ -4,7 +4,7 @@ Prefer -```rust +```rust,ignore fn use_mutex(m: sync::mutex::Mutex) { let guard = m.lock(); do_work(guard); @@ -15,7 +15,7 @@ fn use_mutex(m: sync::mutex::Mutex) { over -```rust +```rust,ignore fn use_mutex(m: sync::mutex::Mutex) { do_work(m.lock()); // do other work @@ -32,7 +32,7 @@ explicitly `let`-bound to make the lifetime clear. Consider using an explicit Prefer -```rust +```rust,ignore let foo = match bar { Baz => 0, Quux => 1 @@ -41,7 +41,7 @@ let foo = match bar { over -```rust +```rust,ignore let foo; match bar { Baz => { @@ -60,14 +60,14 @@ conditional expression. Prefer -```rust +```rust,ignore let v = s.iter().map(|x| x * 2) .collect::>(); ``` over -```rust +```rust,ignore let v: Vec<_> = s.iter().map(|x| x * 2) .collect(); ``` @@ -87,7 +87,7 @@ the type by explicit generics instantiation, which is usually more clear. Use `mut` bindings to signal the span during which a value is mutated: -```rust +```rust,ignore let mut v = Vec::new(); // push things onto v let v = v; diff --git a/src/doc/style/features/match.md b/src/doc/style/features/match.md index 131e0fad79..0d5a1184a0 100644 --- a/src/doc/style/features/match.md +++ b/src/doc/style/features/match.md @@ -4,7 +4,7 @@ Prefer -~~~~ +~~~~ignore match *foo { X(...) => ... Y(...) => ... @@ -13,7 +13,7 @@ match *foo { over -~~~~ +~~~~ignore match foo { box X(...) => ... box Y(...) => ... diff --git a/src/doc/style/features/modules.md b/src/doc/style/features/modules.md index c55b38b915..995c5fda8a 100644 --- a/src/doc/style/features/modules.md +++ b/src/doc/style/features/modules.md @@ -35,7 +35,7 @@ module hierarchy match, instead. For all except very short modules (<100 lines) and [tests](../testing/README.md), place the module `foo` in a separate file, as in: -```rust +```rust,ignore pub mod foo; // in foo.rs or foo/mod.rs @@ -45,7 +45,7 @@ pub fn bar() { println!("..."); } rather than declaring it inline: -```rust +```rust,ignore pub mod foo { pub fn bar() { println!("..."); } /* ... */ @@ -67,7 +67,7 @@ On the other hand, [`io::net`](https://doc.rust-lang.org/std/io/net/) contains submodules, so it lives in a separate directory: -``` +```text io/mod.rs io/extensions.rs io/fs.rs @@ -120,7 +120,7 @@ and [`BufWriter`](https://doc.rust-lang.org/std/io/struct.BufWriter.html), but these are re-exported in `io/mod.rs` at the top level of the module: -```rust +```rust,ignore // libstd/io/mod.rs pub use self::mem::{MemReader, BufReader, MemWriter, BufWriter}; diff --git a/src/doc/style/features/traits/common.md b/src/doc/style/features/traits/common.md index 18346c0925..e8699c7522 100644 --- a/src/doc/style/features/traits/common.md +++ b/src/doc/style/features/traits/common.md @@ -19,7 +19,7 @@ workaround; see [newtype for views](../types/newtype.md)) The most important common traits to implement from `std` are: -```rust +```text Clone, Debug, Hash, Eq ``` diff --git a/src/doc/style/features/traits/generics.md b/src/doc/style/features/traits/generics.md index a09640c305..f9dac1272c 100644 --- a/src/doc/style/features/traits/generics.md +++ b/src/doc/style/features/traits/generics.md @@ -4,7 +4,7 @@ The most widespread use of traits is for writing generic functions or types. For example, the following signature describes a function for consuming any iterator yielding items of type `A` to produce a collection of `A`: -```rust +```rust,ignore fn from_iter>(iterator: T) -> SomeCollection ``` @@ -32,7 +32,7 @@ explicitly implement to be used by this generic function. implementing a trait, it is possible to be precise about places where that exact type is required or produced. For example, a function - ```rust + ```rust,ignore fn binary(x: T, y: T) -> T ``` diff --git a/src/doc/style/features/traits/objects.md b/src/doc/style/features/traits/objects.md index 38494a9b9b..34712ed1ae 100644 --- a/src/doc/style/features/traits/objects.md +++ b/src/doc/style/features/traits/objects.md @@ -6,7 +6,7 @@ Trait objects are useful primarily when _heterogeneous_ collections of objects need to be treated uniformly; it is the closest that Rust comes to object-oriented programming. -```rust +```rust,ignore struct Frame { ... } struct Button { ... } struct Label { ... } diff --git a/src/doc/style/features/types/README.md b/src/doc/style/features/types/README.md index c675eb581c..d3b95d8a6e 100644 --- a/src/doc/style/features/types/README.md +++ b/src/doc/style/features/types/README.md @@ -4,13 +4,13 @@ Prefer -```rust +```rust,ignore let w = Widget::new(Small, Round) ``` over -```rust +```rust,ignore let w = Widget::new(true, false) ``` diff --git a/src/doc/style/features/types/newtype.md b/src/doc/style/features/types/newtype.md index e69aa3b83b..9646e3e82a 100644 --- a/src/doc/style/features/types/newtype.md +++ b/src/doc/style/features/types/newtype.md @@ -13,7 +13,7 @@ underlying type. For example, a `f64` value might be used to represent a quantity in miles or in kilometers. Using newtypes, we can keep track of the intended interpretation: -```rust +```rust,ignore struct Miles(pub f64); struct Kilometers(pub f64); @@ -28,7 +28,7 @@ impl Kilometers { Once we have separated these two types, we can statically ensure that we do not confuse them. For example, the function -```rust +```rust,ignore fn are_we_there_yet(distance_travelled: Miles) -> bool { ... } ``` @@ -46,7 +46,7 @@ type `Enumerate>>`. We wish to hide this type from the client, so that the client's view of the return type is roughly `Iterator<(usize, T)>`. We can do so using the newtype pattern: -```rust +```rust,ignore struct MyTransformResult(Enumerate>>); impl Iterator<(usize, T)> for MyTransformResult { ... } diff --git a/src/doc/style/ownership/builders.md b/src/doc/style/ownership/builders.md index 9fc640890f..3422591233 100644 --- a/src/doc/style/ownership/builders.md +++ b/src/doc/style/ownership/builders.md @@ -35,7 +35,7 @@ be consumed. The follow variant on [`std::process::Command`](https://doc.rust-lang.org/stable/std/process/struct.Command.html) is one example: -```rust +```rust,ignore // NOTE: the actual Command API does not use owned Strings; // this is a simplified version. @@ -94,7 +94,7 @@ methods take and return a mutable borrow of `self`. By using borrows throughout, `Command` can be used conveniently for both one-liner and more complex constructions: -```rust +```rust,ignore // One-liners Command::new("/bin/cat").arg("file.txt").spawn(); @@ -114,7 +114,7 @@ cmd.spawn(); Sometimes builders must transfer ownership when constructing the final type `T`, meaning that the terminal methods must take `self` rather than `&self`: -```rust +```rust,ignore // A simplified excerpt from std::thread::Builder impl ThreadBuilder { @@ -156,7 +156,7 @@ Under the rubric of making easy things easy and hard things possible, _all_ builder methods for a consuming builder should take and returned an owned `self`. Then client code works as follows: -```rust +```rust,ignore // One-liners ThreadBuilder::new().named("my_thread").spawn(proc() { ... }); diff --git a/src/doc/style/ownership/constructors.md b/src/doc/style/ownership/constructors.md index b4a1147315..51fc74ac11 100644 --- a/src/doc/style/ownership/constructors.md +++ b/src/doc/style/ownership/constructors.md @@ -4,7 +4,7 @@ In Rust, "constructors" are just a convention: -```rust +```rust,ignore impl Vec { pub fn new() -> Vec { ... } } @@ -15,7 +15,7 @@ construct. Combined with the practice of [fully importing type names](../style/imports.md), this convention leads to informative but concise construction: -```rust +```rust,ignore use vec::Vec; // construct a new vector @@ -29,7 +29,7 @@ than `new`). Given the `struct` -```rust +```rust,ignore pub struct Config { pub color: Color, pub size: Size, @@ -39,7 +39,7 @@ pub struct Config { provide a constructor if there are sensible defaults: -```rust +```rust,ignore impl Config { pub fn new() -> Config { Config { @@ -53,7 +53,7 @@ impl Config { which then allows clients to concisely override using `struct` update syntax: -```rust +```rust,ignore Config { color: Red, .. Config::new() }; ``` diff --git a/src/doc/style/style/braces.md b/src/doc/style/style/braces.md index 0f61bac9fd..80323dba1d 100644 --- a/src/doc/style/style/braces.md +++ b/src/doc/style/style/braces.md @@ -2,7 +2,7 @@ ### Opening braces always go on the same line. -``` rust +```rust,ignore fn foo() { ... } @@ -30,7 +30,7 @@ frob(|x| { ### `match` arms get braces, except for single-line expressions. -``` rust +```rust,ignore match foo { bar => baz, quux => { @@ -42,7 +42,7 @@ match foo { ### `return` statements get semicolons. -``` rust +```rust,ignore fn foo() { do_something(); @@ -62,7 +62,7 @@ fn foo() { > One possible rule: a trailing comma should be included whenever the > closing delimiter appears on a separate line: -```rust +```rust,ignore Foo { bar: 0, baz: 1 } Foo { diff --git a/src/doc/style/style/comments.md b/src/doc/style/style/comments.md index 3851187b52..af02d87cc8 100644 --- a/src/doc/style/style/comments.md +++ b/src/doc/style/style/comments.md @@ -1,10 +1,10 @@ -% Comments [FIXME: needs RFC] +% Comments [RFC #505] ### Avoid block comments. Use line comments: -``` rust +```rust // Wait for the main thread to return, and set the process error code // appropriately. ``` @@ -51,7 +51,7 @@ Basically, this means write "Returns" instead of "Return". For example: -``` rust +```rust,ignore /// Sets up a default runtime configuration, given compiler-supplied arguments. /// /// This function will block until the entire pool of M:N schedulers has @@ -74,13 +74,31 @@ For example: ### Code snippets -> **[FIXME]** +Only use inner doc comments `//!` to write crate and module-level documentation, +nothing else. When using `mod` blocks, prefer `///` outside of the block: + +```rust +/// This module contains tests +mod test { + // ... +} +``` + +over + +```rust +mod test { + //! This module contains tests + + // ... +} +``` ### Avoid inner doc comments. Use inner doc comments _only_ to document crates and file-level modules: -``` rust +```rust,ignore //! The core library. //! //! The core library is a something something... @@ -92,7 +110,7 @@ Rust doesn't have special constructors, only functions that return new instances. These aren't visible in the automatically generated documentation for a type, so you should specifically link to them: -``` rust +```rust,ignore /// An iterator that yields `None` forever after the underlying iterator /// yields `None` once. /// diff --git a/src/doc/style/style/features.md b/src/doc/style/style/features.md index b5d0b484cc..13cc37fc23 100644 --- a/src/doc/style/style/features.md +++ b/src/doc/style/style/features.md @@ -1,8 +1,8 @@ -## `return` [FIXME: needs RFC] +## `return` [RFC #968] Terminate `return` statements with semicolons: -``` rust +``` rust,ignore fn foo(bar: i32) -> Option { if some_condition() { return None; diff --git a/src/doc/style/style/imports.md b/src/doc/style/style/imports.md index cf3fd4163a..c958875ddb 100644 --- a/src/doc/style/style/imports.md +++ b/src/doc/style/style/imports.md @@ -10,7 +10,7 @@ sections, in order, with a blank space between each: For example: -```rust +```rust,ignore // Crates. extern crate getopts; extern crate mylib; @@ -40,7 +40,7 @@ as a convenience. For example: -```rust +```rust,ignore use option::Option; use mem; diff --git a/src/doc/style/style/naming/README.md b/src/doc/style/style/naming/README.md index 2106f32faf..6d88a838f5 100644 --- a/src/doc/style/style/naming/README.md +++ b/src/doc/style/style/naming/README.md @@ -69,7 +69,7 @@ Names of items within a module should not be prefixed with that module's name: Prefer -``` rust +```rust,ignore mod foo { pub struct Error { ... } } @@ -77,7 +77,7 @@ mod foo { over -``` rust +```rust,ignore mod foo { pub struct FooError { ... } } diff --git a/src/doc/style/style/naming/containers.md b/src/doc/style/style/naming/containers.md index dfed4f9f75..c352a5b1bf 100644 --- a/src/doc/style/style/naming/containers.md +++ b/src/doc/style/style/naming/containers.md @@ -13,7 +13,7 @@ appropriate. This name is chosen rather than names like `find` or For a container with keys/indexes of type `K` and elements of type `V`: -```rust +```rust,ignore // Look up element without failing fn get(&self, key: K) -> Option<&V> fn get_mut(&mut self, key: K) -> Option<&mut V> @@ -31,7 +31,7 @@ impl IndexMut for Container { ... } Prefer specific conversion functions like `as_bytes` or `into_vec` whenever possible. Otherwise, use: -```rust +```rust,ignore // Extract contents without failing fn get(&self) -> &V fn get_mut(&mut self) -> &mut V @@ -40,7 +40,7 @@ fn unwrap(self) -> V #### Wrappers/Cells around `Copy` data -```rust +```rust,ignore // Extract contents without failing fn get(&self) -> V ``` @@ -52,7 +52,7 @@ play a special role for failure. For `Option`: -```rust +```rust,ignore // Extract contents or fail if not available fn assert(self) -> V fn expect(self, &str) -> V @@ -60,7 +60,7 @@ fn expect(self, &str) -> V For `Result`: -```rust +```rust,ignore // Extract the contents of Ok variant; fail if Err fn assert(self) -> V diff --git a/src/doc/style/style/naming/iterators.md b/src/doc/style/style/naming/iterators.md index 38138b5e39..945cbe4800 100644 --- a/src/doc/style/style/naming/iterators.md +++ b/src/doc/style/style/naming/iterators.md @@ -6,7 +6,7 @@ For a container with elements of type `U`, iterator methods should be named: -```rust +```rust,ignore fn iter(&self) -> T // where T implements Iterator<&U> fn iter_mut(&mut self) -> T // where T implements Iterator<&mut U> fn into_iter(self) -> T // where T implements Iterator diff --git a/src/doc/style/style/whitespace.md b/src/doc/style/style/whitespace.md index c28a723209..c33c17c8e4 100644 --- a/src/doc/style/style/whitespace.md +++ b/src/doc/style/style/whitespace.md @@ -8,7 +8,7 @@ * Use spaces around binary operators, including the equals sign in attributes: -``` rust +```rust,ignore #[deprecated = "Use `bar` instead."] fn foo(a: usize, b: usize) -> usize { a + b @@ -17,7 +17,7 @@ fn foo(a: usize, b: usize) -> usize { * Use a space after colons and commas: -``` rust +```rust,ignore fn foo(a: Bar); MyStruct { foo: 3, bar: 4 } @@ -28,7 +28,7 @@ foo(bar, baz); * Use a space after the opening and before the closing brace for single line blocks or `struct` expressions: -``` rust +```rust,ignore spawn(proc() { do_something(); }) Point { x: 0.1, y: 0.3 } @@ -39,7 +39,7 @@ Point { x: 0.1, y: 0.3 } * For multiline function signatures, each new line should align with the first parameter. Multiple parameters per line are permitted: -``` rust +```rust,ignore fn frobnicate(a: Bar, b: Bar, c: Bar, d: Bar) -> Bar { @@ -59,7 +59,7 @@ fn foo Bar { ... @@ -77,7 +77,7 @@ foo_bar(x, y, |z| { > **[FIXME]** Do we also want to allow the following? > -> ```rust +> ```rust,ignore > frobnicate( > arg1, > arg2, @@ -92,7 +92,7 @@ foo_bar(x, y, |z| { > * **[Deprecated]** If you have multiple patterns in a single `match` > arm, write each pattern on a separate line: > -> ``` rust +> ```rust,ignore > match foo { > bar(_) > | baz => quux, @@ -110,7 +110,7 @@ Idiomatic code should not use extra whitespace in the middle of a line to provide alignment. -``` rust +```rust,ignore // Good struct Foo { short: f64, diff --git a/src/etc/check-binaries.py b/src/etc/check-binaries.py deleted file mode 100755 index 91c01b1780..0000000000 --- a/src/etc/check-binaries.py +++ /dev/null @@ -1,20 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import sys - -offenders = sys.argv[1:] -if len(offenders) > 0: - print("Binaries checked into src:") - for offender in offenders: - print(offender) - sys.exit(1) diff --git a/src/etc/errorck.py b/src/etc/errorck.py deleted file mode 100644 index 1f5f3784ac..0000000000 --- a/src/etc/errorck.py +++ /dev/null @@ -1,136 +0,0 @@ -# Copyright 2015 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -# Digs error codes out of files named 'diagnostics.rs' across -# the tree, and ensures thare are no duplicates. - -import sys -import os -import re - -if len(sys.argv) < 2: - print("usage: errorck.py ") - sys.exit(1) - -src_dir = sys.argv[1] -errcode_map = {} -errcode_checked = [] -errcode_not_found = [] -error_re = re.compile("(E\d\d\d\d)") - -def check_unused_error_codes(error_codes, check_error_codes, filenames, dirnames, dirpath): - for filename in filenames: - if filename == "diagnostics.rs" or not filename.endswith(".rs"): - continue - path = os.path.join(dirpath, filename) - - with open(path, 'r') as f: - for line in f: - match = error_re.search(line) - if match: - errcode = match.group(1) - if errcode in error_codes: - error_codes.remove(errcode) - if errcode not in check_error_codes: - check_error_codes.append(errcode) - for dirname in dirnames: - path = os.path.join(dirpath, dirname) - for (dirpath, dnames, fnames) in os.walk(path): - check_unused_error_codes(error_codes, check_error_codes, fnames, dnames, dirpath) - - -# In the register_long_diagnostics! macro, entries look like this: -# -# EXXXX: r##" -# -# "##, -# -# These two variables are for detecting the beginning and end of diagnostic -# messages so that duplicate error codes are not reported when a code occurs -# inside a diagnostic message -long_diag_begin = "r##\"" -long_diag_end = "\"##" - -errors = False -all_errors = [] - -for (dirpath, dirnames, filenames) in os.walk(src_dir): - if "src/test" in dirpath or "src/llvm" in dirpath: - # Short circuit for fast - continue - - errcode_to_check = [] - for filename in filenames: - if filename != "diagnostics.rs": - continue - path = os.path.join(dirpath, filename) - - with open(path, 'r') as f: - inside_long_diag = False - errcode_to_check = [] - for line_num, line in enumerate(f, start=1): - if inside_long_diag: - # Skip duplicate error code checking for this line - if long_diag_end in line: - inside_long_diag = False - continue - - match = error_re.search(line) - if match: - errcode = match.group(1) - new_record = [(errcode, path, line_num, line)] - existing = errcode_map.get(errcode) - if existing is not None: - # This is a dupe - errcode_map[errcode] = existing + new_record - else: - errcode_map[errcode] = new_record - # we don't check if this is a long error explanation - if (long_diag_begin not in line and not line.strip().startswith("//") - and errcode not in errcode_to_check and errcode not in errcode_checked - and errcode not in errcode_not_found): - errcode_to_check.append(errcode) - - if long_diag_begin in line: - inside_long_diag = True - break - check_unused_error_codes(errcode_to_check, errcode_checked, filenames, dirnames, dirpath) - if len(errcode_to_check) > 0: - for errcode in errcode_to_check: - if errcode in errcode_checked: - continue - errcode_not_found.append(errcode) - -if len(errcode_not_found) > 0: - errcode_not_found.sort() - for errcode in errcode_not_found: - if errcode in errcode_checked: - continue - all_errors.append(errcode) - print("error: unused error code: {0} ({1}:{2})".format(*errcode_map[errcode][0])) - errors = True - - -for errcode, entries in errcode_map.items(): - all_errors.append(entries[0][0]) - if len(entries) > 1: - entries.sort() - print("error: duplicate error code " + errcode) - for entry in entries: - print("{1}: {2}\n{3}".format(*entry)) - errors = True - -print -print("* {0} error codes".format(len(errcode_map))) -print("* highest error code: " + max(all_errors)) -print - -if errors: - sys.exit(1) diff --git a/src/etc/featureck.py b/src/etc/featureck.py deleted file mode 100644 index d6cc25177e..0000000000 --- a/src/etc/featureck.py +++ /dev/null @@ -1,251 +0,0 @@ -# Copyright 2015 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -# This script does a tree-wide sanity checks against stability -# attributes, currently: -# * For all feature_name/level pairs the 'since' field is the same -# * That no features are both stable and unstable. -# * That lib features don't have the same name as lang features -# unless they are on the 'joint_features' whitelist -# * That features that exist in both lang and lib and are stable -# since the same version -# * Prints information about features - -import sys -import os -import re -import codecs - -if len(sys.argv) < 2: - print("usage: featureck.py ") - sys.exit(1) - -src_dir = sys.argv[1] - -# Features that are allowed to exist in both the language and the library -joint_features = [ ] - -# Grab the list of language features from the compiler -language_gate_statuses = [ "Active", "Deprecated", "Removed", "Accepted" ] -feature_gate_source = os.path.join(src_dir, "libsyntax", "feature_gate.rs") -language_features = [] -language_feature_names = [] -with open(feature_gate_source, 'r') as f: - for line in f: - original_line = line - line = line.strip() - is_feature_line = False - for status in language_gate_statuses: - if status in line and line.startswith("("): - is_feature_line = True - - if is_feature_line: - # turn ` ("foo", "1.0.0", Some(10), Active)` into - # `"foo", "1.0.0", Some(10), Active` - line = line.strip(' ,()') - parts = line.split(",") - if len(parts) != 4: - print("error: unexpected number of components in line: " + original_line) - sys.exit(1) - feature_name = parts[0].strip().replace('"', "") - since = parts[1].strip().replace('"', "") - issue = parts[2].strip() - status = parts[3].strip() - assert len(feature_name) > 0 - assert len(since) > 0 - assert len(issue) > 0 - assert len(status) > 0 - - language_feature_names += [feature_name] - language_features += [(feature_name, since, issue, status)] - -assert len(language_features) > 0 - -errors = False - -lib_features = { } -lib_features_and_level = { } -for (dirpath, dirnames, filenames) in os.walk(src_dir): - # Don't look for feature names in tests - if "src/test" in dirpath: - continue - - # Takes a long time to traverse LLVM - if "src/llvm" in dirpath: - continue - - for filename in filenames: - if not filename.endswith(".rs"): - continue - - path = os.path.join(dirpath, filename) - with codecs.open(filename=path, mode='r', encoding="utf-8") as f: - line_num = 0 - for line in f: - line_num += 1 - level = None - if "[unstable(" in line: - level = "unstable" - elif "[stable(" in line: - level = "stable" - else: - continue - - # This is a stability attribute. For the purposes of this - # script we expect both the 'feature' and 'since' attributes on - # the same line, e.g. - # `#[unstable(feature = "foo", since = "1.0.0")]` - - p = re.compile('(unstable|stable).*feature *= *"(\w*)"') - m = p.search(line) - if not m is None: - feature_name = m.group(2) - since = None - if re.compile("\[ *stable").search(line) is not None: - pp = re.compile('since *= *"([\w\.]*)"') - mm = pp.search(line) - if not mm is None: - since = mm.group(1) - else: - print("error: misformed stability attribute") - print("line %d of %:" % (line_num, path)) - print(line) - errors = True - - lib_features[feature_name] = feature_name - if lib_features_and_level.get((feature_name, level)) is None: - # Add it to the observed features - lib_features_and_level[(feature_name, level)] = \ - (since, path, line_num, line) - else: - # Verify that for this combination of feature_name and level the 'since' - # attribute matches. - (expected_since, source_path, source_line_num, source_line) = \ - lib_features_and_level.get((feature_name, level)) - if since != expected_since: - print("error: mismatch in %s feature '%s'" % (level, feature_name)) - print("line %d of %s:" % (source_line_num, source_path)) - print(source_line) - print("line %d of %s:" % (line_num, path)) - print(line) - errors = True - - # Verify that this lib feature doesn't duplicate a lang feature - if feature_name in language_feature_names: - print("error: lib feature '%s' duplicates a lang feature" % (feature_name)) - print("line %d of %s:" % (line_num, path)) - print(line) - errors = True - - else: - print("error: misformed stability attribute") - print("line %d of %s:" % (line_num, path)) - print(line) - errors = True - -# Merge data about both lists -# name, lang, lib, status, stable since - -language_feature_stats = {} - -for f in language_features: - name = f[0] - lang = True - lib = False - status = "unstable" - stable_since = None - - if f[3] == "Accepted": - status = "stable" - if status == "stable": - stable_since = f[1] - - language_feature_stats[name] = (name, lang, lib, status, stable_since) - -lib_feature_stats = {} - -for f in lib_features: - name = f - lang = False - lib = True - status = "unstable" - stable_since = None - - is_stable = lib_features_and_level.get((name, "stable")) is not None - is_unstable = lib_features_and_level.get((name, "unstable")) is not None - - if is_stable and is_unstable: - print("error: feature '%s' is both stable and unstable" % (name)) - errors = True - - if is_stable: - status = "stable" - stable_since = lib_features_and_level[(name, "stable")][0] - elif is_unstable: - status = "unstable" - - lib_feature_stats[name] = (name, lang, lib, status, stable_since) - -# Check for overlap in two sets -merged_stats = { } - -for name in lib_feature_stats: - if language_feature_stats.get(name) is not None: - if not name in joint_features: - print("error: feature '%s' is both a lang and lib feature but not whitelisted" % (name)) - errors = True - lang_status = language_feature_stats[name][3] - lib_status = lib_feature_stats[name][3] - lang_stable_since = language_feature_stats[name][4] - lib_stable_since = lib_feature_stats[name][4] - - if lang_status != lib_status and lib_status != "rustc_deprecated": - print("error: feature '%s' has lang status %s " + - "but lib status %s" % (name, lang_status, lib_status)) - errors = True - - if lang_stable_since != lib_stable_since: - print("error: feature '%s' has lang stable since %s " + - "but lib stable since %s" % (name, lang_stable_since, lib_stable_since)) - errors = True - - merged_stats[name] = (name, True, True, lang_status, lang_stable_since) - - del language_feature_stats[name] - del lib_feature_stats[name] - -if errors: - sys.exit(1) - -# Finally, display the stats -stats = {} -stats.update(language_feature_stats) -stats.update(lib_feature_stats) -stats.update(merged_stats) -lines = [] -for s in stats: - s = stats[s] - type_ = "lang" - if s[1] and s[2]: - type_ = "lang/lib" - elif s[2]: - type_ = "lib" - line = "{: <32}".format(s[0]) + \ - "{: <8}".format(type_) + \ - "{: <12}".format(s[3]) + \ - "{: <8}".format(str(s[4])) - lines += [line] - -lines.sort() - -print -for line in lines: - print("* " + line) -print diff --git a/src/etc/gdb_rust_pretty_printing.py b/src/etc/gdb_rust_pretty_printing.py index f93f349021..38c9fbf982 100755 --- a/src/etc/gdb_rust_pretty_printing.py +++ b/src/etc/gdb_rust_pretty_printing.py @@ -70,6 +70,8 @@ class GdbValue(rustpp.Value): return child def as_integer(self): + if self.gdb_val.type.code == gdb.TYPE_CODE_PTR: + return int(str(self.gdb_val), 0) return int(self.gdb_val) def get_wrapped_value(self): diff --git a/src/etc/generate-keyword-tests.py b/src/etc/generate-keyword-tests.py index 937c231a47..e53d6c718c 100755 --- a/src/etc/generate-keyword-tests.py +++ b/src/etc/generate-keyword-tests.py @@ -34,15 +34,17 @@ template = """// Copyright %d The Rust Project Developers. See the COPYRIGHT // option. This file may not be copied, modified, or distributed // except according to those terms. +// compile-flags: -Z parse-only + // This file was auto-generated using 'src/etc/generate-keyword-tests.py %s' fn main() { - let %s = "foo"; //~ error: ident + let %s = "foo"; //~ error: expected pattern, found keyword `%s` } """ test_dir = os.path.abspath( - os.path.join(os.path.dirname(__file__), '../test/compile-fail') + os.path.join(os.path.dirname(__file__), '../test/parse-fail') ) for kw in sys.argv[1:]: @@ -53,7 +55,7 @@ for kw in sys.argv[1:]: os.chmod(test_file, stat.S_IWUSR) with open(test_file, 'wt') as f: - f.write(template % (datetime.datetime.now().year, kw, kw)) + f.write(template % (datetime.datetime.now().year, kw, kw, kw)) # mark file read-only os.chmod(test_file, stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH) diff --git a/src/etc/get-snapshot.py b/src/etc/get-snapshot.py deleted file mode 100755 index 26246bd2c3..0000000000 --- a/src/etc/get-snapshot.py +++ /dev/null @@ -1,78 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2014 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import os -import tarfile -import shutil -import sys -from snapshot import * - - -def unpack_snapshot(triple, dl_path): - print("opening snapshot " + dl_path) - tar = tarfile.open(dl_path) - kernel = get_kernel(triple) - - stagep = os.path.join(triple, "stage0") - - # Remove files from prior unpackings, since snapshot rustc may not - # be able to disambiguate between multiple candidate libraries. - # (Leave dirs in place since extracting step still needs them.) - for root, _, files in os.walk(stagep): - for f in files: - print("removing " + os.path.join(root, f)) - os.unlink(os.path.join(root, f)) - - for p in tar.getnames(): - name = p.replace("rust-stage0/", "", 1) - - fp = os.path.join(stagep, name) - print("extracting " + p) - tar.extract(p, download_unpack_base) - tp = os.path.join(download_unpack_base, p) - if os.path.isdir(tp) and os.path.exists(fp): - continue - shutil.move(tp, fp) - tar.close() - shutil.rmtree(download_unpack_base) - - -# Main - -# this gets called with one or two arguments: -# The first is the O/S triple. -# The second is an optional path to the snapshot to use. - -def main(argv): - triple = argv[1] - if len(argv) == 3: - dl_path = argv[2] - else: - snap = determine_curr_snapshot(triple) - dl = os.path.join(download_dir_base, snap) - url = download_url_base + "/" + snap - print("determined most recent snapshot: " + snap) - - if (not os.path.exists(dl)): - get_url_to_file(url, dl) - - if (snap_filename_hash_part(snap) == hash_file(dl)): - print("got download with ok hash") - else: - raise Exception("bad hash on download") - - dl_path = os.path.join(download_dir_base, snap) - - unpack_snapshot(triple, dl_path) - -if __name__ == '__main__': - main(sys.argv) diff --git a/src/etc/get-stage0.py b/src/etc/get-stage0.py new file mode 100644 index 0000000000..3a609957fa --- /dev/null +++ b/src/etc/get-stage0.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# +# Copyright 2016 The Rust Project Developers. See the COPYRIGHT +# file at the top-level directory of this distribution and at +# http://rust-lang.org/COPYRIGHT. +# +# Licensed under the Apache License, Version 2.0 or the MIT license +# , at your +# option. This file may not be copied, modified, or distributed +# except according to those terms. + +import os +import shutil +import sys +import tarfile + +path = os.path.abspath(os.path.join(os.path.dirname(__file__), "../bootstrap")) +sys.path.append(path) + +import bootstrap + +def main(argv): + src_root = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) + triple = argv[1] + data = bootstrap.stage0_data(src_root) + + channel, date = data['rustc'].split('-', 1) + + dl_dir = 'dl' + if not os.path.exists(dl_dir): + os.makedirs(dl_dir) + + filename_base = 'rustc-' + channel + '-' + triple + filename = filename_base + '.tar.gz' + url = 'https://static.rust-lang.org/dist/' + date + '/' + filename + dst = dl_dir + '/' + filename + if not os.path.exists(dst): + bootstrap.get(url, dst) + + stage0_dst = triple + '/stage0' + if os.path.exists(stage0_dst): + for root, _, files in os.walk(stage0_dst): + for f in files: + os.unlink(os.path.join(root, f)) + else: + os.makedirs(stage0_dst) + bootstrap.unpack(dst, stage0_dst, match='rustc', verbose=True) + +if __name__ == '__main__': + main(sys.argv) diff --git a/src/etc/htmldocck.py b/src/etc/htmldocck.py index 8362c239b6..a930a0d083 100644 --- a/src/etc/htmldocck.py +++ b/src/etc/htmldocck.py @@ -342,9 +342,9 @@ def check_tree_text(tree, path, pat, regexp): return ret -def check_tree_count(tree, path, count): +def get_tree_count(tree, path): path = normalize_xpath(path) - return len(tree.findall(path)) == count + return len(tree.findall(path)) def stderr(*args): print(*args, file=sys.stderr) @@ -393,7 +393,10 @@ def check_command(c, cache): elif c.cmd == 'count': # count test if len(c.args) == 3: # @count = count test - ret = check_tree_count(cache.get_tree(c.args[0]), c.args[1], int(c.args[2])) + expected = int(c.args[2]) + found = get_tree_count(cache.get_tree(c.args[0]), c.args[1]) + cerr = "Expected {} occurrences but found {}".format(expected, found) + ret = expected == found else: raise InvalidCheck('Invalid number of @{} arguments'.format(c.cmd)) elif c.cmd == 'valid-html': diff --git a/src/etc/latest-unix-snaps.py b/src/etc/latest-unix-snaps.py deleted file mode 100755 index 6c93bf23f9..0000000000 --- a/src/etc/latest-unix-snaps.py +++ /dev/null @@ -1,65 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import os -import re -from snapshot import * - -f = open(snapshotfile) -date = None -rev = None -platform = None -snap = None -i = 0 - -newestSet = {} - - -for line in f.readlines(): - i += 1 - parsed = parse_line(i, line) - if not parsed: - continue - - if parsed["type"] == "snapshot": - if (len(newestSet) == 0 or parsed["date"] > newestSet["date"]): - newestSet["date"] = parsed["date"] - newestSet["rev"] = parsed["rev"] - newestSet["files"] = [] - addingMode = True - else: - addingMode = False - - elif addingMode is True and parsed["type"] == "file": - tux = re.compile("linux", re.IGNORECASE) - if (tux.match(parsed["platform"]) is not None): - ff = {} - ff["platform"] = parsed["platform"] - ff["hash"] = parsed["hash"] - newestSet["files"] += [ff] - - -def download_new_file(date, rev, platform, hsh): - snap = full_snapshot_name(date, rev, platform, hsh) - dl = os.path.join(download_dir_base, snap) - url = download_url_base + "/" + snap - if (not os.path.exists(dl)): - print("downloading " + url) - get_url_to_file(url, dl) - if (snap_filename_hash_part(snap) == hash_file(dl)): - print("got download with ok hash") - else: - raise Exception("bad hash on download") - -for ff in newestSet["files"]: - download_new_file(newestSet["date"], newestSet["rev"], - ff["platform"], ff["hash"]) diff --git a/src/etc/licenseck.py b/src/etc/licenseck.py deleted file mode 100644 index aa2a00beae..0000000000 --- a/src/etc/licenseck.py +++ /dev/null @@ -1,56 +0,0 @@ -# Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import re -import os - -license_re = re.compile( -u"""(#|//) Copyright .* The Rust Project Developers. See the COPYRIGHT -\\1 file at the top-level directory of this distribution and at -\\1 http://rust-lang.org/COPYRIGHT. -\\1 -\\1 Licensed under the Apache License, Version 2.0 or the MIT license -\\1 , at your -\\1 option. This file may not be copied, modified, or distributed -\\1 except according to those terms.""") - -exceptions = [ - "libstd/sync/mpsc/mpsc_queue.rs", # BSD - "libstd/sync/mpsc/spsc_queue.rs", # BSD - "test/bench/shootout-binarytrees.rs", # BSD - "test/bench/shootout-chameneos-redux.rs", # BSD - "test/bench/shootout-fannkuch-redux.rs", # BSD - "test/bench/shootout-fasta.rs", # BSD - "test/bench/shootout-fasta-redux.rs", # BSD - "test/bench/shootout-k-nucleotide.rs", # BSD - "test/bench/shootout-mandelbrot.rs", # BSD - "test/bench/shootout-meteor.rs", # BSD - "test/bench/shootout-nbody.rs", # BSD - "test/bench/shootout-regex-dna.rs", # BSD - "test/bench/shootout-reverse-complement.rs", # BSD - "test/bench/shootout-spectralnorm.rs", # BSD - "test/bench/shootout-threadring.rs", # BSD -] - -def check_license(name, contents): - name = os.path.normpath(name) - # Whitelist check - if any(name.endswith(os.path.normpath(e)) for e in exceptions): - return True - - # Xfail check - firstlineish = contents[:100] - if "ignore-license" in firstlineish: - return True - - # License check - boilerplate = contents[:500] - return bool(license_re.search(boilerplate)) diff --git a/src/etc/lldb_batchmode.py b/src/etc/lldb_batchmode.py index b1506285b3..7bbb3577f8 100644 --- a/src/etc/lldb_batchmode.py +++ b/src/etc/lldb_batchmode.py @@ -216,4 +216,5 @@ except IOError as e: print("Aborting.", file=sys.stderr) sys.exit(1) finally: + debugger.Terminate() script_file.close() diff --git a/src/etc/local_stage0.sh b/src/etc/local_stage0.sh index aee69a5c8b..fb45544191 100755 --- a/src/etc/local_stage0.sh +++ b/src/etc/local_stage0.sh @@ -55,6 +55,14 @@ cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}extra*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_D cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}rust*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}std*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}syntax*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}flate*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}fmt_macros*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}getopts*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}graphviz*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}log*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}rbml*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}serialize*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ +cp ${PREFIX}/${LIB_DIR}/${LIB_PREFIX}term*${LIB_SUF} ${TARG_DIR}/stage0/${LIB_DIR}/ # do not fail if one of the above fails, as all we need is a working rustc! exit 0 diff --git a/src/etc/make-snapshot.py b/src/etc/make-snapshot.py deleted file mode 100755 index 31a69a581e..0000000000 --- a/src/etc/make-snapshot.py +++ /dev/null @@ -1,15 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2014 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import snapshot, sys - -print(snapshot.make_snapshot(sys.argv[1], sys.argv[2])) diff --git a/src/etc/maketest.py b/src/etc/maketest.py deleted file mode 100644 index c7d17b23bf..0000000000 --- a/src/etc/maketest.py +++ /dev/null @@ -1,96 +0,0 @@ -# Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import subprocess -import os -import sys - -target_triple = sys.argv[14] - -def normalize_path(v): - """msys1/msys2 automatically converts `/abs/path1:/abs/path2` into - `c:\real\abs\path1;c:\real\abs\path2` (semicolons) if shell thinks - the value is list of paths. - (if there is only one path, it becomes `c:/real/abs/path`.) - this causes great confusion and error: shell and Makefile doesn't like - windows paths so it is really error-prone. revert it for peace.""" - v = v.replace('\\', '/') - # c:/path -> /c/path - # "c:/path" -> "/c/path" - start = v.find(':/') - while start != -1: - v = v[:start - 1] + '/' + v[start - 1:start] + v[start + 1:] - start = v.find(':/') - return v - - -def putenv(name, value): - if os.name == 'nt': - value = normalize_path(value) - os.putenv(name, value) - - -def convert_path_spec(name, value): - if os.name == 'nt' and name != 'PATH': - value = ":".join(normalize_path(v) for v in value.split(";")) - return value - -make = sys.argv[2] -putenv('RUSTC', os.path.abspath(sys.argv[3])) -putenv('TMPDIR', os.path.abspath(sys.argv[4])) -putenv('CC', sys.argv[5] + ' ' + sys.argv[6]) -putenv('CFLAGS', sys.argv[6]) -putenv('RUSTDOC', os.path.abspath(sys.argv[7])) -filt = sys.argv[8] -putenv('LD_LIB_PATH_ENVVAR', sys.argv[9]) -putenv('HOST_RPATH_DIR', os.path.abspath(sys.argv[10])) -putenv('TARGET_RPATH_DIR', os.path.abspath(sys.argv[11])) -putenv('RUST_BUILD_STAGE', sys.argv[12]) -putenv('S', os.path.abspath(sys.argv[13])) -putenv('RUSTFLAGS', sys.argv[15]) -putenv('LLVM_COMPONENTS', sys.argv[16]) -putenv('LLVM_CXXFLAGS', sys.argv[17]) -putenv('CXX', sys.argv[18]) -putenv('PYTHON', sys.executable) -os.putenv('TARGET', target_triple) - -if 'msvc' in target_triple: - os.putenv('IS_MSVC', '1') - -if filt not in sys.argv[1]: - sys.exit(0) -print('maketest: ' + os.path.basename(os.path.dirname(sys.argv[1]))) - -path = sys.argv[1] -if path[-1] == '/': - # msys1 has a bug that `make` fails to include `../tools.mk` (parent dir) - # if `-C path` option is given and `path` is absolute directory with - # trailing slash (`c:/path/to/test/`). - # the easist workaround is to remove the slash (`c:/path/to/test`). - # msys2 seems to fix this problem. - path = path[:-1] - -proc = subprocess.Popen([make, '-C', path], - stdout=subprocess.PIPE, - stderr=subprocess.PIPE) -out, err = proc.communicate() -i = proc.wait() - -if i != 0: - print """\ ------ %s -------------------- ------- stdout --------------------------------------------- -%s ------- stderr --------------------------------------------- -%s ------- --------------------------------------------- -""" % (sys.argv[1], out, err) - - sys.exit(i) diff --git a/src/etc/mirror-all-snapshots.py b/src/etc/mirror-all-snapshots.py deleted file mode 100644 index cd77f88214..0000000000 --- a/src/etc/mirror-all-snapshots.py +++ /dev/null @@ -1,45 +0,0 @@ -#!/usr/bin/env python -# -# Copyright 2011-2013 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import os -from snapshot import * - -f = open(snapshotfile) -date = None -rev = None -platform = None -snap = None -i = 0 - -for line in f.readlines(): - i += 1 - parsed = parse_line(i, line) - if not parsed: - continue - - if parsed["type"] == "snapshot": - date = parsed["date"] - rev = parsed["rev"] - - elif rev is not None and parsed["type"] == "file": - platform = parsed["platform"] - hsh = parsed["hash"] - snap = full_snapshot_name(date, rev, platform, hsh) - dl = os.path.join(download_dir_base, snap) - url = download_url_base + "/" + snap - if (not os.path.exists(dl)): - print("downloading " + url) - get_url_to_file(url, dl) - if (snap_filename_hash_part(snap) == hash_file(dl)): - print("got download with ok hash") - else: - raise Exception("bad hash on download") diff --git a/src/etc/snapshot.py b/src/etc/snapshot.py deleted file mode 100644 index 81babf924c..0000000000 --- a/src/etc/snapshot.py +++ /dev/null @@ -1,304 +0,0 @@ -# Copyright 2011-2015 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import re -import os -import sys -import glob -import tarfile -import shutil -import subprocess -import distutils.spawn - -try: - import hashlib - sha_func = hashlib.sha1 -except ImportError: - import sha - sha_func = sha.new - - -def scrub(b): - if sys.version_info >= (3,) and type(b) == bytes: - return b.decode('ascii') - else: - return b - -src_dir = scrub(os.getenv("CFG_SRC_DIR")) -if not src_dir: - raise Exception("missing env var CFG_SRC_DIR") - -snapshotfile = os.path.join(src_dir, "src", "snapshots.txt") -download_url_base = "https://static.rust-lang.org/stage0-snapshots" -download_dir_base = "dl" -download_unpack_base = os.path.join(download_dir_base, "unpack") - -snapshot_files = { - "bitrig": ["bin/rustc"], - "dragonfly": ["bin/rustc"], - "freebsd": ["bin/rustc"], - "linux": ["bin/rustc"], - "macos": ["bin/rustc"], - "netbsd": ["bin/rustc"], - "openbsd": ["bin/rustc"], - "solaris": ["bin/rustc"], - "winnt": ["bin/rustc.exe"], - } - -winnt_runtime_deps_32 = ["libgcc_s_dw2-1.dll", "libstdc++-6.dll"] -winnt_runtime_deps_64 = ["libgcc_s_seh-1.dll", "libstdc++-6.dll"] - -def parse_line(n, line): - global snapshotfile - - if re.match(r"\s*$", line): - return None - - if re.match(r"^T\s*$", line): - return None - - match = re.match(r"\s+([\w_-]+) ([a-fA-F\d]{40})\s*$", line) - if match: - return {"type": "file", - "platform": match.group(1), - "hash": match.group(2).lower()} - - match = re.match(r"([ST]) (\d{4}-\d{2}-\d{2}) ([a-fA-F\d]+)\s*$", line) - if not match: - raise Exception("%s:%d:E syntax error: " % (snapshotfile, n)) - return {"type": "snapshot", - "date": match.group(2), - "rev": match.group(3)} - - -def partial_snapshot_name(date, rev, platform): - return ("rust-stage0-%s-%s-%s.tar.bz2" % - (date, rev, platform)) - - -def full_snapshot_name(date, rev, platform, hsh): - return ("rust-stage0-%s-%s-%s-%s.tar.bz2" % - (date, rev, platform, hsh)) - - -def get_kernel(triple): - t = triple.split('-') - if len(t) == 2: - os_name = t[1] - else: - os_name = t[2] - - if os_name == "windows": - return "winnt" - if os_name == "darwin": - return "macos" - if os_name == "freebsd": - return "freebsd" - if os_name == "dragonfly": - return "dragonfly" - if os_name == "bitrig": - return "bitrig" - if os_name == "netbsd": - return "netbsd" - if os_name == "openbsd": - return "openbsd" - return "linux" - - -def get_cpu(triple): - arch = triple.split('-')[0] - if arch == "i686": - return "i386" - return arch - - -def get_platform(triple): - return "%s-%s" % (get_kernel(triple), get_cpu(triple)) - - -def cmd_out(cmdline): - p = subprocess.Popen(cmdline, stdout=subprocess.PIPE) - return scrub(p.communicate()[0].strip()) - - -def local_rev_info(field): - return cmd_out(["git", "--git-dir=" + os.path.join(src_dir, ".git"), - "log", "-n", "1", - "--format=%%%s" % field, "HEAD"]) - - -def local_rev_full_sha(): - return local_rev_info("H").split()[0] - - -def local_rev_short_sha(): - return local_rev_info("h").split()[0] - - -def local_rev_committer_date(): - return local_rev_info("ci") - - -def get_url_to_file(u, f): - # no security issue, just to stop partial download leaving a stale file - tmpf = f + '.tmp' - - returncode = -1 - if distutils.spawn.find_executable("curl"): - returncode = subprocess.call(["curl", "-o", tmpf, u]) - elif distutils.spawn.find_executable("wget"): - returncode = subprocess.call(["wget", "-O", tmpf, u]) - - if returncode != 0: - try: - os.unlink(tmpf) - except OSError: - pass - raise Exception("failed to fetch url") - os.rename(tmpf, f) - - -def snap_filename_hash_part(snap): - match = re.match(r".*([a-fA-F\d]{40}).tar.bz2$", snap) - if not match: - raise Exception("unable to find hash in filename: " + snap) - return match.group(1) - - -def hash_file(x): - h = sha_func() - h.update(open(x, "rb").read()) - return scrub(h.hexdigest()) - - -def get_winnt_runtime_deps(platform): - """Returns a list of paths of Rust's system runtime dependencies""" - if platform == "winnt-x86_64": - deps = winnt_runtime_deps_64 - else: - deps = winnt_runtime_deps_32 - runtime_deps = [] - path_dirs = os.environ["PATH"].split(os.pathsep) - for name in deps: - for dir in path_dirs: - filepath = os.path.join(dir, name) - if os.path.isfile(filepath): - runtime_deps.append(filepath) - break - else: - raise Exception("Could not find runtime dependency: %s" % name) - return runtime_deps - - -def make_snapshot(stage, triple): - kernel = get_kernel(triple) - platform = get_platform(triple) - rev = local_rev_short_sha() - date = local_rev_committer_date().split()[0] - - file0 = partial_snapshot_name(date, rev, platform) - - def in_tar_name(fn): - cs = re.split(r"[\\/]", fn) - if len(cs) >= 2: - return os.sep.join(cs[-2:]) - - tar = tarfile.open(file0, "w:bz2") - - for name in snapshot_files[kernel]: - dir = stage - if stage == "stage1" and re.match(r"^lib/(lib)?std.*", name): - dir = "stage0" - fn_glob = os.path.join(triple, dir, name) - matches = glob.glob(fn_glob) - if not matches: - raise Exception("Not found file with name like " + fn_glob) - if len(matches) == 1: - tar.add(matches[0], "rust-stage0/" + in_tar_name(matches[0])) - else: - raise Exception("Found stale files: \n %s\n" - "Please make a clean build." % "\n ".join(matches)) - - if kernel == "winnt": - for path in get_winnt_runtime_deps(platform): - tar.add(path, "rust-stage0/bin/" + os.path.basename(path)) - tar.add(os.path.join(os.path.dirname(__file__), "third-party"), - "rust-stage0/bin/third-party") - - tar.close() - - h = hash_file(file0) - file1 = full_snapshot_name(date, rev, platform, h) - - shutil.move(file0, file1) - - return file1 - - -def curr_snapshot_rev(): - i = 0 - found_snap = False - date = None - rev = None - - f = open(snapshotfile) - for line in f.readlines(): - i += 1 - parsed = parse_line(i, line) - if not parsed: - continue - - if parsed["type"] == "snapshot": - date = parsed["date"] - rev = parsed["rev"] - found_snap = True - break - - if not found_snap: - raise Exception("no snapshot entries in file") - - return (date, rev) - - -def determine_curr_snapshot(triple): - i = 0 - platform = get_platform(triple) - - found_file = False - found_snap = False - hsh = None - date = None - rev = None - - f = open(snapshotfile) - for line in f.readlines(): - i += 1 - parsed = parse_line(i, line) - if not parsed: - continue - - if found_snap and parsed["type"] == "file": - if parsed["platform"] == platform: - hsh = parsed["hash"] - found_file = True - break - elif parsed["type"] == "snapshot": - date = parsed["date"] - rev = parsed["rev"] - found_snap = True - - if not found_snap: - raise Exception("no snapshot entries in file") - - if not found_file: - raise Exception("no snapshot file found for platform %s, rev %s" % - (platform, rev)) - - return full_snapshot_name(date, rev, platform, hsh) diff --git a/src/etc/tidy.py b/src/etc/tidy.py deleted file mode 100644 index 9264646673..0000000000 --- a/src/etc/tidy.py +++ /dev/null @@ -1,230 +0,0 @@ -# Copyright 2010-2014 The Rust Project Developers. See the COPYRIGHT -# file at the top-level directory of this distribution and at -# http://rust-lang.org/COPYRIGHT. -# -# Licensed under the Apache License, Version 2.0 or the MIT license -# , at your -# option. This file may not be copied, modified, or distributed -# except according to those terms. - -import sys -import fileinput -import subprocess -import re -import os -from licenseck import check_license -import snapshot - -err = 0 -cols = 100 -cr_flag = "ignore-tidy-cr" -tab_flag = "ignore-tidy-tab" -linelength_flag = "ignore-tidy-linelength" - -interesting_files = ['.rs', '.py', '.js', '.sh', '.c', '.h'] -uninteresting_files = ['miniz.c', 'jquery', 'rust_android_dummy'] -stable_whitelist = { - 'src/bootstrap', - 'src/build_helper', - 'src/libcollectionstest', - 'src/libcore', - 'src/libstd', - 'src/rustc/std_shim', - 'src/rustc/test_shim', - 'src/test' -} - - -def report_error_name_no(name, no, s): - global err - print("%s:%d: %s" % (name, no, s)) - err = 1 - - -def report_err(s): - report_error_name_no(fileinput.filename(), fileinput.filelineno(), s) - - -def report_warn(s): - print("%s:%d: %s" % (fileinput.filename(), - fileinput.filelineno(), - s)) - - -def do_license_check(name, contents): - if not check_license(name, contents): - report_error_name_no(name, 1, "incorrect license") - - -def update_counts(current_name): - global file_counts - global count_other_linted_files - - _, ext = os.path.splitext(current_name) - - if ext in interesting_files: - file_counts[ext] += 1 - else: - count_other_linted_files += 1 - - -def interesting_file(f): - if any(x in f for x in uninteresting_files): - return False - - return any(os.path.splitext(f)[1] == ext for ext in interesting_files) - - -# Be careful to support Python 2.4, 2.6, and 3.x here! -config_proc = subprocess.Popen(["git", "config", "core.autocrlf"], - stdout=subprocess.PIPE) -result = config_proc.communicate()[0] - -true = "true".encode('utf8') -autocrlf = result.strip() == true if result is not None else False - -current_name = "" -current_contents = "" -check_tab = True -check_cr = True -check_linelength = True - -if len(sys.argv) < 2: - print("usage: tidy.py ") - sys.exit(1) - -src_dir = sys.argv[1] - -count_lines = 0 -count_non_blank_lines = 0 -count_other_linted_files = 0 - -file_counts = {ext: 0 for ext in interesting_files} - -all_paths = set() -needs_unstable_attr = set() - -try: - for (dirpath, dirnames, filenames) in os.walk(src_dir): - # Skip some third-party directories - skippable_dirs = { - 'src/jemalloc', - 'src/llvm', - 'src/gyp', - 'src/libbacktrace', - 'src/libuv', - 'src/compiler-rt', - 'src/rt/hoedown', - 'src/rustllvm', - 'src/rt/valgrind', - 'src/rt/msvc', - 'src/rust-installer', - 'src/liblibc', - } - - dirpath = os.path.normpath(dirpath) - if any(os.path.normpath(d) in dirpath for d in skippable_dirs): - continue - - file_names = [os.path.join(dirpath, f) for f in filenames - if interesting_file(f) - and not f.endswith("_gen.rs") - and not ".#" is f] - - if not file_names: - continue - - for line in fileinput.input(file_names, - openhook=fileinput.hook_encoded("utf-8")): - - filename = fileinput.filename() - - if "tidy.py" not in filename: - if "TODO" in line: - report_err("TODO is deprecated; use FIXME") - match = re.match(r'^.*/(\*|/!?)\s*XXX', line) - if match: - report_err("XXX is no longer necessary, use FIXME") - match = re.match(r'^.*//\s*(NOTE.*)$', line) - if match and "TRAVIS" not in os.environ: - m = match.group(1) - if "snap" in m.lower(): - report_warn(match.group(1)) - match = re.match(r'^.*//\s*SNAP\s+(\w+)', line) - if match: - hsh = match.group(1) - date, rev = snapshot.curr_snapshot_rev() - if not hsh.startswith(rev): - report_err("snapshot out of date (" + date - + "): " + line) - else: - if "SNAP " in line: - report_warn("unmatched SNAP line: " + line) - search = re.search(r'^#!\[unstable', line) - if search: - needs_unstable_attr.discard(filename) - - if cr_flag in line: - check_cr = False - if tab_flag in line: - check_tab = False - if linelength_flag in line: - check_linelength = False - - if check_tab and ('\t' in line and - "Makefile" not in filename): - report_err("tab character") - if check_cr and not autocrlf and '\r' in line: - report_err("CR character") - if line.endswith(" \n") or line.endswith("\t\n"): - report_err("trailing whitespace") - line_len = len(line)-2 if autocrlf else len(line)-1 - - if check_linelength and line_len > cols: - report_err("line longer than %d chars" % cols) - - if fileinput.isfirstline(): - # This happens at the end of each file except the last. - if current_name != "": - update_counts(current_name) - assert len(current_contents) > 0 - do_license_check(current_name, current_contents) - - current_name = filename - current_contents = "" - check_cr = True - check_tab = True - check_linelength = True - if all(f not in filename for f in stable_whitelist) and \ - re.search(r'src/.*/lib\.rs', filename): - needs_unstable_attr.add(filename) - - # Put a reasonable limit on the amount of header data we use for - # the licenseck - if len(current_contents) < 1000: - current_contents += line - - count_lines += 1 - if line.strip(): - count_non_blank_lines += 1 - - if current_name != "": - update_counts(current_name) - assert len(current_contents) > 0 - do_license_check(current_name, current_contents) - for f in needs_unstable_attr: - report_error_name_no(f, 1, "requires unstable attribute") - -except UnicodeDecodeError as e: - report_err("UTF-8 decoding error " + str(e)) - -print -for ext in sorted(file_counts, key=file_counts.get, reverse=True): - print("* linted {} {} files".format(file_counts[ext], ext)) -print("* linted {} other files".format(count_other_linted_files)) -print("* total lines of code: {}".format(count_lines)) -print("* total non-blank lines of code: {}".format(count_non_blank_lines)) -print() - -sys.exit(err) diff --git a/src/etc/unicode.py b/src/etc/unicode.py index 5a7632868e..a99770f226 100755 --- a/src/etc/unicode.py +++ b/src/etc/unicode.py @@ -25,6 +25,9 @@ import fileinput, re, os, sys, operator +bytes_old = 0 +bytes_new = 0 + preamble = '''// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. @@ -307,12 +310,137 @@ def emit_table(f, name, t_data, t_type = "&'static [(char, char)]", is_pub=True, format_table_content(f, data, 8) f.write("\n ];\n\n") +def emit_trie_lookup_range_table(f): + f.write(""" + +// BoolTrie is a trie for representing a set of Unicode codepoints. It is +// implemented with postfix compression (sharing of identical child nodes), +// which gives both compact size and fast lookup. +// +// The space of Unicode codepoints is divided into 3 subareas, each +// represented by a trie with different depth. In the first (0..0x800), there +// is no trie structure at all; each u64 entry corresponds to a bitvector +// effectively holding 64 bool values. +// +// In the second (0x800..0x10000), each child of the root node represents a +// 64-wide subrange, but instead of storing the full 64-bit value of the leaf, +// the trie stores an 8-bit index into a shared table of leaf values. This +// exploits the fact that in reasonable sets, many such leaves can be shared. +// +// In the third (0x10000..0x110000), each child of the root node represents a +// 4096-wide subrange, and the trie stores an 8-bit index into a 64-byte slice +// of a child tree. Each of these 64 bytes represents an index into the table +// of shared 64-bit leaf values. This exploits the sparse structure in the +// non-BMP range of most Unicode sets. +pub struct BoolTrie { + // 0..0x800 (corresponding to 1 and 2 byte utf-8 sequences) + r1: [u64; 32], // leaves + + // 0x800..0x10000 (corresponding to 3 byte utf-8 sequences) + r2: [u8; 992], // first level + r3: &'static [u64], // leaves + + // 0x10000..0x110000 (corresponding to 4 byte utf-8 sequences) + r4: [u8; 256], // first level + r5: &'static [u8], // second level + r6: &'static [u64], // leaves +} + +fn trie_range_leaf(c: usize, bitmap_chunk: u64) -> bool { + ((bitmap_chunk >> (c & 63)) & 1) != 0 +} + +fn trie_lookup_range_table(c: char, r: &'static BoolTrie) -> bool { + let c = c as usize; + if c < 0x800 { + trie_range_leaf(c, r.r1[c >> 6]) + } else if c < 0x10000 { + let child = r.r2[(c >> 6) - 0x20]; + trie_range_leaf(c, r.r3[child as usize]) + } else { + let child = r.r4[(c >> 12) - 0x10]; + let leaf = r.r5[((child as usize) << 6) + ((c >> 6) & 0x3f)]; + trie_range_leaf(c, r.r6[leaf as usize]) + } +}\n +""") + +def compute_trie(rawdata, chunksize): + root = [] + childmap = {} + child_data = [] + for i in range(len(rawdata) / chunksize): + data = rawdata[i * chunksize: (i + 1) * chunksize] + child = '|'.join(map(str, data)) + if child not in childmap: + childmap[child] = len(childmap) + child_data.extend(data) + root.append(childmap[child]) + return (root, child_data) + +def emit_bool_trie(f, name, t_data, is_pub=True): + global bytes_old, bytes_new + bytes_old += 8 * len(t_data) + CHUNK = 64 + rawdata = [False] * 0x110000; + for (lo, hi) in t_data: + for cp in range(lo, hi + 1): + rawdata[cp] = True + + # convert to bitmap chunks of 64 bits each + chunks = [] + for i in range(0x110000 / CHUNK): + chunk = 0 + for j in range(64): + if rawdata[i * 64 + j]: + chunk |= 1 << j + chunks.append(chunk) + + pub_string = "" + if is_pub: + pub_string = "pub " + f.write(" %sconst %s: &'static super::BoolTrie = &super::BoolTrie {\n" % (pub_string, name)) + f.write(" r1: [\n") + data = ','.join('0x%016x' % chunk for chunk in chunks[0:0x800 / CHUNK]) + format_table_content(f, data, 12) + f.write("\n ],\n") + + # 0x800..0x10000 trie + (r2, r3) = compute_trie(chunks[0x800 / CHUNK : 0x10000 / CHUNK], 64 / CHUNK) + f.write(" r2: [\n") + data = ','.join(str(node) for node in r2) + format_table_content(f, data, 12) + f.write("\n ],\n") + f.write(" r3: &[\n") + data = ','.join('0x%016x' % chunk for chunk in r3) + format_table_content(f, data, 12) + f.write("\n ],\n") + + # 0x10000..0x110000 trie + (mid, r6) = compute_trie(chunks[0x10000 / CHUNK : 0x110000 / CHUNK], 64 / CHUNK) + (r4, r5) = compute_trie(mid, 64) + f.write(" r4: [\n") + data = ','.join(str(node) for node in r4) + format_table_content(f, data, 12) + f.write("\n ],\n") + f.write(" r5: &[\n") + data = ','.join(str(node) for node in r5) + format_table_content(f, data, 12) + f.write("\n ],\n") + f.write(" r6: &[\n") + data = ','.join('0x%016x' % chunk for chunk in r6) + format_table_content(f, data, 12) + f.write("\n ],\n") + + f.write(" };\n\n") + bytes_new += 256 + 992 + 256 + 8 * len(r3) + len(r5) + 8 * len(r6) + def emit_property_module(f, mod, tbl, emit): f.write("pub mod %s {\n" % mod) for cat in sorted(emit): - emit_table(f, "%s_table" % cat, tbl[cat]) + emit_bool_trie(f, "%s_table" % cat, tbl[cat]) f.write(" pub fn %s(c: char) -> bool {\n" % cat) - f.write(" super::bsearch_range_table(c, %s_table)\n" % cat) + f.write(" super::trie_lookup_range_table(c, %s_table)\n" % cat) f.write(" }\n\n") f.write("}\n\n") @@ -402,8 +530,9 @@ pub const UNICODE_VERSION: (u64, u64, u64) = (%s, %s, %s); norm_props = load_properties("DerivedNormalizationProps.txt", ["Full_Composition_Exclusion"]) - # bsearch_range_table is used in all the property modules below - emit_bsearch_range_table(rf) + # trie_lookup_table is used in all the property modules below + emit_trie_lookup_range_table(rf) + # emit_bsearch_range_table(rf) # category tables for (name, cat, pfuns) in ("general_category", gencats, ["N", "Cc"]), \ @@ -414,3 +543,4 @@ pub const UNICODE_VERSION: (u64, u64, u64) = (%s, %s, %s); # normalizations and conversions module emit_norm_module(rf, canon_decomp, compat_decomp, combines, norm_props) emit_conversions_module(rf, to_upper, to_lower, to_title) + #print 'bytes before = %d, bytes after = %d' % (bytes_old, bytes_new) diff --git a/src/liballoc/Cargo.toml b/src/liballoc/Cargo.toml index 5da0f1a10b..0889ca9fc8 100644 --- a/src/liballoc/Cargo.toml +++ b/src/liballoc/Cargo.toml @@ -6,7 +6,6 @@ version = "0.0.0" [lib] name = "alloc" path = "lib.rs" -test = false [dependencies] core = { path = "../libcore" } diff --git a/src/liballoc/arc.rs b/src/liballoc/arc.rs index 4aba567fa1..d0a51e320c 100644 --- a/src/liballoc/arc.rs +++ b/src/liballoc/arc.rs @@ -592,6 +592,31 @@ impl Drop for Arc { } } +impl Weak { + /// Constructs a new `Weak` without an accompanying instance of T. + /// + /// This allocates memory for T, but does not initialize it. Calling + /// Weak::upgrade() on the return value always gives None. + /// + /// # Examples + /// + /// ``` + /// use std::sync::Weak; + /// + /// let empty: Weak = Weak::new(); + /// ``` + #[stable(feature = "downgraded_weak", since = "1.10.0")] + pub fn new() -> Weak { + unsafe { + Weak { ptr: Shared::new(Box::into_raw(box ArcInner { + strong: atomic::AtomicUsize::new(0), + weak: atomic::AtomicUsize::new(1), + data: uninitialized(), + }))} + } + } +} + impl Weak { /// Upgrades a weak reference to a strong reference. /// @@ -682,6 +707,13 @@ impl Clone for Weak { } } +#[stable(feature = "downgraded_weak", since = "1.10.0")] +impl Default for Weak { + fn default() -> Weak { + Weak::new() + } +} + #[stable(feature = "arc_weak", since = "1.4.0")] impl Drop for Weak { /// Drops the `Weak`. @@ -907,35 +939,6 @@ impl From for Arc { } } -impl Weak { - /// Constructs a new `Weak` without an accompanying instance of T. - /// - /// This allocates memory for T, but does not initialize it. Calling - /// Weak::upgrade() on the return value always gives None. - /// - /// # Examples - /// - /// ``` - /// #![feature(downgraded_weak)] - /// - /// use std::sync::Weak; - /// - /// let empty: Weak = Weak::new(); - /// ``` - #[unstable(feature = "downgraded_weak", - reason = "recently added", - issue = "30425")] - pub fn new() -> Weak { - unsafe { - Weak { ptr: Shared::new(Box::into_raw(box ArcInner { - strong: atomic::AtomicUsize::new(0), - weak: atomic::AtomicUsize::new(1), - data: uninitialized(), - }))} - } - } -} - #[cfg(test)] mod tests { use std::clone::Clone; diff --git a/src/liballoc/boxed.rs b/src/liballoc/boxed.rs index 7bdf9eaccc..10e4ea1c3f 100644 --- a/src/liballoc/boxed.rs +++ b/src/liballoc/boxed.rs @@ -525,14 +525,16 @@ impl ExactSizeIterator for Box {} /// } /// ``` #[rustc_paren_sugar] -#[unstable(feature = "fnbox", reason = "Newly introduced", issue = "28796")] +#[unstable(feature = "fnbox", + reason = "will be deprecated if and when Box becomes usable", issue = "28796")] pub trait FnBox { type Output; fn call_box(self: Box, args: A) -> Self::Output; } -#[unstable(feature = "fnbox", reason = "Newly introduced", issue = "28796")] +#[unstable(feature = "fnbox", + reason = "will be deprecated if and when Box becomes usable", issue = "28796")] impl FnBox for F where F: FnOnce { type Output = F::Output; @@ -542,7 +544,8 @@ impl FnBox for F where F: FnOnce } } -#[unstable(feature = "fnbox", reason = "Newly introduced", issue = "28796")] +#[unstable(feature = "fnbox", + reason = "will be deprecated if and when Box becomes usable", issue = "28796")] impl<'a, A, R> FnOnce for Box + 'a> { type Output = R; @@ -551,7 +554,8 @@ impl<'a, A, R> FnOnce for Box + 'a> { } } -#[unstable(feature = "fnbox", reason = "Newly introduced", issue = "28796")] +#[unstable(feature = "fnbox", + reason = "will be deprecated if and when Box becomes usable", issue = "28796")] impl<'a, A, R> FnOnce for Box + Send + 'a> { type Output = R; diff --git a/src/liballoc/lib.rs b/src/liballoc/lib.rs index c2dad9a1ae..0293d5402c 100644 --- a/src/liballoc/lib.rs +++ b/src/liballoc/lib.rs @@ -90,7 +90,6 @@ #![feature(unique)] #![feature(unsafe_no_drop_flag, filling_drop)] #![feature(unsize)] -#![feature(extended_compare_and_swap)] #![cfg_attr(not(test), feature(raw, fn_traits, placement_new_protocol))] #![cfg_attr(test, feature(test, box_heap))] diff --git a/src/liballoc/raw_vec.rs b/src/liballoc/raw_vec.rs index c407cef25e..8b3168b29a 100644 --- a/src/liballoc/raw_vec.rs +++ b/src/liballoc/raw_vec.rs @@ -147,6 +147,7 @@ impl RawVec { /// Gets the capacity of the allocation. /// /// This will always be `usize::MAX` if `T` is zero-sized. + #[inline(always)] pub fn cap(&self) -> usize { if mem::size_of::() == 0 { !0 diff --git a/src/liballoc/rc.rs b/src/liballoc/rc.rs index c2f0a96132..b92f5af05e 100644 --- a/src/liballoc/rc.rs +++ b/src/liballoc/rc.rs @@ -720,6 +720,33 @@ impl !marker::Sync for Weak {} #[unstable(feature = "coerce_unsized", issue = "27732")] impl, U: ?Sized> CoerceUnsized> for Weak {} +impl Weak { + /// Constructs a new `Weak` without an accompanying instance of T. + /// + /// This allocates memory for T, but does not initialize it. Calling + /// Weak::upgrade() on the return value always gives None. + /// + /// # Examples + /// + /// ``` + /// use std::rc::Weak; + /// + /// let empty: Weak = Weak::new(); + /// ``` + #[stable(feature = "downgraded_weak", since = "1.10.0")] + pub fn new() -> Weak { + unsafe { + Weak { + ptr: Shared::new(Box::into_raw(box RcBox { + strong: Cell::new(0), + weak: Cell::new(1), + value: uninitialized(), + })), + } + } + } +} + impl Weak { /// Upgrades a weak reference to a strong reference. /// @@ -823,34 +850,10 @@ impl fmt::Debug for Weak { } } -impl Weak { - /// Constructs a new `Weak` without an accompanying instance of T. - /// - /// This allocates memory for T, but does not initialize it. Calling - /// Weak::upgrade() on the return value always gives None. - /// - /// # Examples - /// - /// ``` - /// #![feature(downgraded_weak)] - /// - /// use std::rc::Weak; - /// - /// let empty: Weak = Weak::new(); - /// ``` - #[unstable(feature = "downgraded_weak", - reason = "recently added", - issue="30425")] - pub fn new() -> Weak { - unsafe { - Weak { - ptr: Shared::new(Box::into_raw(box RcBox { - strong: Cell::new(0), - weak: Cell::new(1), - value: uninitialized(), - })), - } - } +#[stable(feature = "downgraded_weak", since = "1.10.0")] +impl Default for Weak { + fn default() -> Weak { + Weak::new() } } diff --git a/src/liballoc_jemalloc/build.rs b/src/liballoc_jemalloc/build.rs index 5d521913b4..33a675331a 100644 --- a/src/liballoc_jemalloc/build.rs +++ b/src/liballoc_jemalloc/build.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![deny(warnings)] + extern crate build_helper; extern crate gcc; @@ -18,6 +20,7 @@ use build_helper::run; fn main() { println!("cargo:rustc-cfg=cargobuild"); + println!("cargo:rerun-if-changed=build.rs"); let target = env::var("TARGET").unwrap(); let host = env::var("HOST").unwrap(); @@ -40,6 +43,19 @@ fn main() { let cflags = compiler.args().iter().map(|s| s.to_str().unwrap()) .collect::>().join(" "); + let mut stack = src_dir.join("../jemalloc") + .read_dir().unwrap() + .map(|e| e.unwrap()) + .collect::>(); + while let Some(entry) = stack.pop() { + let path = entry.path(); + if entry.file_type().unwrap().is_dir() { + stack.extend(path.read_dir().unwrap().map(|e| e.unwrap())); + } else { + println!("cargo:rerun-if-changed={}", path.display()); + } + } + let mut cmd = Command::new("sh"); cmd.arg(src_dir.join("../jemalloc/configure").to_str().unwrap() .replace("C:\\", "/c/") diff --git a/src/liballoc_jemalloc/lib.rs b/src/liballoc_jemalloc/lib.rs index 3a30bebec5..7651d91c06 100644 --- a/src/liballoc_jemalloc/lib.rs +++ b/src/liballoc_jemalloc/lib.rs @@ -41,28 +41,28 @@ use libc::{c_int, c_void, size_t}; #[cfg(not(cargobuild))] extern {} -// Note that the symbols here are prefixed by default on OSX (we don't -// explicitly request it), and on Android and DragonFly we explicitly request -// it as unprefixing cause segfaults (mismatches in allocators). +// Note that the symbols here are prefixed by default on OSX and Windows (we +// don't explicitly request it), and on Android and DragonFly we explicitly +// request it as unprefixing cause segfaults (mismatches in allocators). extern { #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly"), + target_os = "dragonfly", target_os = "windows"), link_name = "je_mallocx")] fn mallocx(size: size_t, flags: c_int) -> *mut c_void; #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly"), + target_os = "dragonfly", target_os = "windows"), link_name = "je_rallocx")] fn rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void; #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly"), + target_os = "dragonfly", target_os = "windows"), link_name = "je_xallocx")] fn xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t; #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly"), + target_os = "dragonfly", target_os = "windows"), link_name = "je_sdallocx")] fn sdallocx(ptr: *mut c_void, size: size_t, flags: c_int); #[cfg_attr(any(target_os = "macos", target_os = "android", target_os = "ios", - target_os = "dragonfly"), + target_os = "dragonfly", target_os = "windows"), link_name = "je_nallocx")] fn nallocx(size: size_t, flags: c_int) -> size_t; } diff --git a/src/liballoc_system/lib.rs b/src/liballoc_system/lib.rs index 6a62e00d31..9eade937bf 100644 --- a/src/liballoc_system/lib.rs +++ b/src/liballoc_system/lib.rs @@ -18,10 +18,8 @@ form or name", issue = "27783")] #![feature(allocator)] -#![feature(libc)] #![feature(staged_api)] - -extern crate libc; +#![cfg_attr(unix, feature(libc))] // The minimum alignment guaranteed by the architecture. This value is used to // add fast paths for low alignment values. In practice, the alignment is a @@ -72,22 +70,50 @@ pub extern "C" fn __rust_usable_size(size: usize, align: usize) -> usize { #[cfg(unix)] mod imp { + extern crate libc; + use core::cmp; use core::ptr; - use libc; use MIN_ALIGN; pub unsafe fn allocate(size: usize, align: usize) -> *mut u8 { if align <= MIN_ALIGN { libc::malloc(size as libc::size_t) as *mut u8 } else { - let mut out = ptr::null_mut(); - let ret = libc::posix_memalign(&mut out, align as libc::size_t, size as libc::size_t); - if ret != 0 { - ptr::null_mut() - } else { - out as *mut u8 - } + aligned_malloc(size, align) + } + } + + #[cfg(target_os = "android")] + unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 { + // On android we currently target API level 9 which unfortunately + // doesn't have the `posix_memalign` API used below. Instead we use + // `memalign`, but this unfortunately has the property on some systems + // where the memory returned cannot be deallocated by `free`! + // + // Upon closer inspection, however, this appears to work just fine with + // Android, so for this platform we should be fine to call `memalign` + // (which is present in API level 9). Some helpful references could + // possibly be chromium using memalign [1], attempts at documenting that + // memalign + free is ok [2] [3], or the current source of chromium + // which still uses memalign on android [4]. + // + // [1]: https://codereview.chromium.org/10796020/ + // [2]: https://code.google.com/p/android/issues/detail?id=35391 + // [3]: https://bugs.chromium.org/p/chromium/issues/detail?id=138579 + // [4]: https://chromium.googlesource.com/chromium/src/base/+/master/ + // /memory/aligned_memory.cc + libc::memalign(align as libc::size_t, size as libc::size_t) as *mut u8 + } + + #[cfg(not(target_os = "android"))] + unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 { + let mut out = ptr::null_mut(); + let ret = libc::posix_memalign(&mut out, align as libc::size_t, size as libc::size_t); + if ret != 0 { + ptr::null_mut() + } else { + out as *mut u8 } } @@ -96,8 +122,10 @@ mod imp { libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8 } else { let new_ptr = allocate(size, align); - ptr::copy(ptr, new_ptr, cmp::min(size, old_size)); - deallocate(ptr, old_size, align); + if !new_ptr.is_null() { + ptr::copy(ptr, new_ptr, cmp::min(size, old_size)); + deallocate(ptr, old_size, align); + } new_ptr } } diff --git a/src/libbacktrace/ChangeLog b/src/libbacktrace/ChangeLog index 2afa470553..acc07047f6 100644 --- a/src/libbacktrace/ChangeLog +++ b/src/libbacktrace/ChangeLog @@ -1,15 +1,33 @@ +2016-05-18 Uros Bizjak + + PR target/71161 + * elf.c (phdr_callback) [__i386__]: Add + __attribute__((__force_align_arg_pointer__)). + +2016-03-02 Maxim Ostapenko + + * elf.c (backtrace_initialize): Properly initialize elf_fileline_fn to + avoid possible crash. + (elf_add): Don't set *fileline_fn to elf_nodebug value in case of + missing debug info anymore. + +2016-02-06 John David Anglin + + * mmap.c (MAP_FAILED): Define if not defined. + 2016-01-04 Jakub Jelinek Update copyright years. 2015-12-18 Andris Pavenis - * configure.ac: Specify that DJGPP do not have mmap even when sys/mman.h exists + * configure.ac: Specify that DJGPP do not have mmap + even when sys/mman.h exists. * configure: Regenerate 2015-12-09 John David Anglin - PR 68115/libfortran + PR libgfortran/68115 * configure.ac: Set libbacktrace_cv_sys_sync to no on hppa*-*-hpux*. * configure: Regenerate. * elf.c (backtrace_initialize): Cast __sync_bool_compare_and_swap call diff --git a/src/libbacktrace/elf.c b/src/libbacktrace/elf.c index 05cc5c0473..81ba3440ab 100644 --- a/src/libbacktrace/elf.c +++ b/src/libbacktrace/elf.c @@ -791,7 +791,6 @@ elf_add (struct backtrace_state *state, int descriptor, uintptr_t base_address, { if (!backtrace_close (descriptor, error_callback, data)) goto fail; - *fileline_fn = elf_nodebug; return 1; } @@ -867,6 +866,9 @@ struct phdr_data libraries. */ static int +#ifdef __i386__ +__attribute__ ((__force_align_arg_pointer__)) +#endif phdr_callback (struct dl_phdr_info *info, size_t size ATTRIBUTE_UNUSED, void *pdata) { @@ -925,7 +927,7 @@ backtrace_initialize (struct backtrace_state *state, int descriptor, int ret; int found_sym; int found_dwarf; - fileline elf_fileline_fn; + fileline elf_fileline_fn = elf_nodebug; struct phdr_data pd; ret = elf_add (state, descriptor, 0, error_callback, data, &elf_fileline_fn, diff --git a/src/libbacktrace/mmap.c b/src/libbacktrace/mmap.c index 0ed4802d02..138ef70711 100644 --- a/src/libbacktrace/mmap.c +++ b/src/libbacktrace/mmap.c @@ -50,6 +50,10 @@ POSSIBILITY OF SUCH DAMAGE. */ #define MAP_ANONYMOUS MAP_ANON #endif +#ifndef MAP_FAILED +#define MAP_FAILED ((void *)-1) +#endif + /* A list of free memory blocks. */ struct backtrace_freelist_struct diff --git a/src/libbacktrace/pecoff.c b/src/libbacktrace/pecoff.c index 31126cf474..04e0bafb14 100644 --- a/src/libbacktrace/pecoff.c +++ b/src/libbacktrace/pecoff.c @@ -602,6 +602,9 @@ coff_add (struct backtrace_state *state, int descriptor, const b_coff_section_header *sects; struct backtrace_view str_view; int str_view_valid; + // NOTE: upstream this is a `size_t` but this was fixed in Rust commit + // 55e2b7e1b, see #33729 for more info. If you see this in a diff + // against the upstream libbacktrace, that's what's going on. uint32_t str_size; off_t str_off; struct backtrace_view syms_view; diff --git a/src/libcollections/Cargo.toml b/src/libcollections/Cargo.toml index 18e322ff74..65d456e750 100644 --- a/src/libcollections/Cargo.toml +++ b/src/libcollections/Cargo.toml @@ -6,9 +6,12 @@ version = "0.0.0" [lib] name = "collections" path = "lib.rs" -test = false [dependencies] alloc = { path = "../liballoc" } core = { path = "../libcore" } rustc_unicode = { path = "../librustc_unicode" } + +[[test]] +name = "collectionstest" +path = "../libcollectionstest/lib.rs" diff --git a/src/libcollections/binary_heap.rs b/src/libcollections/binary_heap.rs index c9dd1efb37..43c6e6e812 100644 --- a/src/libcollections/binary_heap.rs +++ b/src/libcollections/binary_heap.rs @@ -153,12 +153,15 @@ use core::iter::FromIterator; use core::mem::swap; +use core::mem::size_of; use core::ptr; use core::fmt; use slice; use vec::{self, Vec}; +use super::SpecExtend; + /// A priority queue implemented with a binary heap. /// /// This will be a max-heap. @@ -738,6 +741,71 @@ impl BinaryHeap { pub fn clear(&mut self) { self.drain(); } + + fn rebuild(&mut self) { + let mut n = self.len() / 2; + while n > 0 { + n -= 1; + self.sift_down(n); + } + } + + /// Moves all the elements of `other` into `self`, leaving `other` empty. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(binary_heap_append)] + /// + /// use std::collections::BinaryHeap; + /// + /// let v = vec![-10, 1, 2, 3, 3]; + /// let mut a = BinaryHeap::from(v); + /// + /// let v = vec![-20, 5, 43]; + /// let mut b = BinaryHeap::from(v); + /// + /// a.append(&mut b); + /// + /// assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); + /// assert!(b.is_empty()); + /// ``` + #[unstable(feature = "binary_heap_append", + reason = "needs to be audited", + issue = "32526")] + pub fn append(&mut self, other: &mut Self) { + if self.len() < other.len() { + swap(self, other); + } + + if other.is_empty() { + return; + } + + #[inline(always)] + fn log2_fast(x: usize) -> usize { + 8 * size_of::() - (x.leading_zeros() as usize) - 1 + } + + // `rebuild` takes O(len1 + len2) operations + // and about 2 * (len1 + len2) comparisons in the worst case + // while `extend` takes O(len2 * log_2(len1)) operations + // and about 1 * len2 * log_2(len1) comparisons in the worst case, + // assuming len1 >= len2. + #[inline] + fn better_to_rebuild(len1: usize, len2: usize) -> bool { + 2 * (len1 + len2) < len2 * log2_fast(len1) + } + + if better_to_rebuild(self.len(), other.len()) { + self.data.append(&mut other.data); + self.rebuild(); + } else { + self.extend(other.drain()); + } + } } /// Hole represents a hole in a slice i.e. an index without valid value @@ -851,6 +919,7 @@ impl<'a, T> ExactSizeIterator for Iter<'a, T> {} /// An iterator that moves out of a `BinaryHeap`. #[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] pub struct IntoIter { iter: vec::IntoIter, } @@ -917,11 +986,7 @@ impl<'a, T: 'a> ExactSizeIterator for Drain<'a, T> {} impl From> for BinaryHeap { fn from(vec: Vec) -> BinaryHeap { let mut heap = BinaryHeap { data: vec }; - let mut n = heap.len() / 2; - while n > 0 { - n -= 1; - heap.sift_down(n); - } + heap.rebuild(); heap } } @@ -980,7 +1045,26 @@ impl<'a, T> IntoIterator for &'a BinaryHeap where T: Ord { #[stable(feature = "rust1", since = "1.0.0")] impl Extend for BinaryHeap { + #[inline] fn extend>(&mut self, iter: I) { + >::spec_extend(self, iter); + } +} + +impl> SpecExtend for BinaryHeap { + default fn spec_extend(&mut self, iter: I) { + self.extend_desugared(iter.into_iter()); + } +} + +impl SpecExtend> for BinaryHeap { + fn spec_extend(&mut self, ref mut other: BinaryHeap) { + self.append(other); + } +} + +impl BinaryHeap { + fn extend_desugared>(&mut self, iter: I) { let iterator = iter.into_iter(); let (lower, _) = iterator.size_hint(); diff --git a/src/libcollections/btree/map.rs b/src/libcollections/btree/map.rs index de40568fd6..ec2f4a9f7f 100644 --- a/src/libcollections/btree/map.rs +++ b/src/libcollections/btree/map.rs @@ -11,7 +11,7 @@ use core::cmp::Ordering; use core::fmt::Debug; use core::hash::{Hash, Hasher}; -use core::iter::FromIterator; +use core::iter::{FromIterator, Peekable}; use core::marker::PhantomData; use core::ops::Index; use core::{fmt, intrinsics, mem, ptr}; @@ -286,7 +286,7 @@ pub struct Values<'a, K: 'a, V: 'a> { } /// A mutable iterator over a BTreeMap's values. -#[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] +#[stable(feature = "map_values_mut", since = "1.10.0")] pub struct ValuesMut<'a, K: 'a, V: 'a> { inner: IterMut<'a, K, V>, } @@ -348,6 +348,12 @@ pub struct OccupiedEntry<'a, K: 'a, V: 'a> { _marker: PhantomData<&'a mut (K, V)>, } +// An iterator for merging two sorted sequences into one +struct MergeIter> { + left: Peekable, + right: Peekable, +} + impl BTreeMap { /// Makes a new empty BTreeMap with a reasonable choice for B. /// @@ -535,6 +541,62 @@ impl BTreeMap { } } + /// Moves all elements from `other` into `Self`, leaving `other` empty. + /// + /// # Examples + /// + /// ``` + /// #![feature(btree_append)] + /// use std::collections::BTreeMap; + /// + /// let mut a = BTreeMap::new(); + /// a.insert(1, "a"); + /// a.insert(2, "b"); + /// a.insert(3, "c"); + /// + /// let mut b = BTreeMap::new(); + /// b.insert(3, "d"); + /// b.insert(4, "e"); + /// b.insert(5, "f"); + /// + /// a.append(&mut b); + /// + /// assert_eq!(a.len(), 5); + /// assert_eq!(b.len(), 0); + /// + /// assert_eq!(a[&1], "a"); + /// assert_eq!(a[&2], "b"); + /// assert_eq!(a[&3], "d"); + /// assert_eq!(a[&4], "e"); + /// assert_eq!(a[&5], "f"); + /// ``` + #[unstable(feature = "btree_append", reason = "recently added as part of collections reform 2", + issue = "19986")] + pub fn append(&mut self, other: &mut Self) { + // Do we have to append anything at all? + if other.len() == 0 { + return; + } + + // We can just swap `self` and `other` if `self` is empty. + if self.len() == 0 { + mem::swap(self, other); + return; + } + + // First, we merge `self` and `other` into a sorted sequence in linear time. + let self_iter = mem::replace(self, BTreeMap::new()).into_iter(); + let other_iter = mem::replace(other, BTreeMap::new()).into_iter(); + let iter = MergeIter { + left: self_iter.peekable(), + right: other_iter.peekable(), + }; + + // Second, we build a tree from the sorted sequence in linear time. + self.from_sorted_iter(iter); + self.fix_right_edge(); + } + /// Constructs a double-ended iterator over a sub-range of elements in the map, starting /// at min, and ending at max. If min is `Unbounded`, then it will be treated as "negative /// infinity", and if max is `Unbounded`, then it will be treated as "positive infinity". @@ -724,6 +786,76 @@ impl BTreeMap { }) } } + + fn from_sorted_iter>(&mut self, iter: I) { + let mut cur_node = last_leaf_edge(self.root.as_mut()).into_node(); + // Iterate through all key-value pairs, pushing them into nodes at the right level. + for (key, value) in iter { + // Try to push key-value pair into the current leaf node. + if cur_node.len() < node::CAPACITY { + cur_node.push(key, value); + } else { + // No space left, go up and push there. + let mut open_node; + let mut test_node = cur_node.forget_type(); + loop { + match test_node.ascend() { + Ok(parent) => { + let parent = parent.into_node(); + if parent.len() < node::CAPACITY { + // Found a node with space left, push here. + open_node = parent; + break; + } else { + // Go up again. + test_node = parent.forget_type(); + } + }, + Err(node) => { + // We are at the top, create a new root node and push there. + open_node = node.into_root_mut().push_level(); + break; + }, + } + } + + // Push key-value pair and new right subtree. + let tree_height = open_node.height() - 1; + let mut right_tree = node::Root::new_leaf(); + for _ in 0..tree_height { + right_tree.push_level(); + } + open_node.push(key, value, right_tree); + + // Go down to the right-most leaf again. + cur_node = last_leaf_edge(open_node.forget_type()).into_node(); + } + + self.length += 1; + } + } + + fn fix_right_edge(&mut self) { + // Handle underfull nodes, start from the top. + let mut cur_node = self.root.as_mut(); + while let Internal(internal) = cur_node.force() { + // Check if right-most child is underfull. + let mut last_edge = internal.last_edge(); + let right_child_len = last_edge.reborrow().descend().len(); + if right_child_len < node::CAPACITY / 2 { + // We need to steal. + let mut last_kv = match last_edge.left_kv() { + Ok(left) => left, + Err(_) => unreachable!(), + }; + last_kv.bulk_steal_left(node::CAPACITY/2 - right_child_len); + last_edge = last_kv.right_edge(); + } + + // Go further down. + cur_node = last_edge.descend(); + } + } } impl<'a, K: 'a, V: 'a> IntoIterator for &'a BTreeMap { @@ -1012,7 +1144,7 @@ impl<'a, K, V> Iterator for Range<'a, K, V> { } } -#[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] +#[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { type Item = &'a mut V; @@ -1025,14 +1157,14 @@ impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { } } -#[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] +#[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> DoubleEndedIterator for ValuesMut<'a, K, V> { fn next_back(&mut self) -> Option<&'a mut V> { self.inner.next_back().map(|(_, v)| v) } } -#[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] +#[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { fn len(&self) -> usize { self.inner.len() @@ -1443,7 +1575,6 @@ impl BTreeMap { /// Basic usage: /// /// ``` - /// # #![feature(map_values_mut)] /// use std::collections::BTreeMap; /// /// let mut a = BTreeMap::new(); @@ -1458,8 +1589,8 @@ impl BTreeMap { /// assert_eq!(values, [String::from("hello!"), /// String::from("goodbye!")]); /// ``` - #[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] - pub fn values_mut<'a>(&'a mut self) -> ValuesMut<'a, K, V> { + #[stable(feature = "map_values_mut", since = "1.10.0")] + pub fn values_mut(&mut self) -> ValuesMut { ValuesMut { inner: self.iter_mut() } } @@ -1522,12 +1653,21 @@ impl<'a, K: Ord, V> Entry<'a, K, V> { Vacant(entry) => entry.insert(default()), } } + + /// Returns a reference to this entry's key. + #[stable(feature = "map_entry_keys", since = "1.10.0")] + pub fn key(&self) -> &K { + match *self { + Occupied(ref entry) => entry.key(), + Vacant(ref entry) => entry.key(), + } + } } impl<'a, K: Ord, V> VacantEntry<'a, K, V> { /// Gets a reference to the key that would be used when inserting a value /// through the VacantEntry. - #[unstable(feature = "map_entry_keys", issue = "32281")] + #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { &self.key } @@ -1577,7 +1717,7 @@ impl<'a, K: Ord, V> VacantEntry<'a, K, V> { impl<'a, K: Ord, V> OccupiedEntry<'a, K, V> { /// Gets a reference to the key in the entry. - #[unstable(feature = "map_entry_keys", issue = "32281")] + #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { self.handle.reborrow().into_kv().0 } @@ -1690,32 +1830,41 @@ fn handle_underfull_node<'a, K, V>(node: NodeRef, }; if handle.can_merge() { - return Merged(handle.merge().into_node()); + Merged(handle.merge().into_node()) } else { - unsafe { - let (k, v, edge) = if is_left { - handle.reborrow_mut().left_edge().descend().pop() - } else { - handle.reborrow_mut().right_edge().descend().pop_front() - }; + if is_left { + handle.steal_left(); + } else { + handle.steal_right(); + } + Stole(handle.into_node()) + } +} - let k = mem::replace(handle.reborrow_mut().into_kv_mut().0, k); - let v = mem::replace(handle.reborrow_mut().into_kv_mut().1, v); +impl> Iterator for MergeIter { + type Item = (K, V); - // FIXME: reuse cur_node? - if is_left { - match handle.reborrow_mut().right_edge().descend().force() { - Leaf(mut leaf) => leaf.push_front(k, v), - Internal(mut internal) => internal.push_front(k, v, edge.unwrap()) - } - } else { - match handle.reborrow_mut().left_edge().descend().force() { - Leaf(mut leaf) => leaf.push(k, v), - Internal(mut internal) => internal.push(k, v, edge.unwrap()) - } - } - } + fn next(&mut self) -> Option<(K, V)> { + let res = match (self.left.peek(), self.right.peek()) { + (Some(&(ref left_key, _)), Some(&(ref right_key, _))) => left_key.cmp(right_key), + (Some(_), None) => Ordering::Less, + (None, Some(_)) => Ordering::Greater, + (None, None) => return None, + }; - return Stole(handle.into_node()); + // Check which elements comes first and only advance the corresponding iterator. + // If two keys are equal, take the value from `right`. + match res { + Ordering::Less => { + self.left.next() + }, + Ordering::Greater => { + self.right.next() + }, + Ordering::Equal => { + self.left.next(); + self.right.next() + }, + } } } diff --git a/src/libcollections/btree/node.rs b/src/libcollections/btree/node.rs index 8ae23a646e..ca1cf6bcc5 100644 --- a/src/libcollections/btree/node.rs +++ b/src/libcollections/btree/node.rs @@ -31,6 +31,16 @@ // Since Rust doesn't actually have dependent types and polymorphic recursion, // we make do with lots of unsafety. +// A major goal of this module is to avoid complexity by treating the tree as a generic (if +// weirdly shaped) container and avoiding dealing with most of the B-Tree invariants. As such, +// this module doesn't care whether the entries are sorted, which nodes can be underfull, or +// even what underfull means. However, we do rely on a few invariants: +// +// - Trees must have uniform depth/height. This means that every path down to a leaf from a +// given node has exactly the same length. +// - A node of length `n` has `n` keys, `n` values, and (in an internal node) `n + 1` edges. +// This implies that even an empty internal node has at least one edge. + use alloc::heap; use core::marker::PhantomData; use core::mem; @@ -43,17 +53,43 @@ use boxed::Box; const B: usize = 6; pub const CAPACITY: usize = 2 * B - 1; +/// The underlying representation of leaf nodes. Note that it is often unsafe to actually store +/// these, since only the first `len` keys and values are assumed to be initialized. As such, +/// these should always be put behind pointers, and specifically behind `BoxedNode` in the owned +/// case. +/// +/// See also rust-lang/rfcs#197, which would make this structure significantly more safe by +/// avoiding accidentally dropping unused and uninitialized keys and values. struct LeafNode { + /// The arrays storing the actual data of the node. Only the first `len` elements of each + /// array are initialized and valid. keys: [K; CAPACITY], vals: [V; CAPACITY], + + /// We use `*const` as opposed to `*mut` so as to be covariant in `K` and `V`. + /// This either points to an actual node or is null. parent: *const InternalNode, + + /// This node's index into the parent node's `edges` array. + /// `*node.parent.edges[node.parent_idx]` should be the same thing as `node`. + /// This is only guaranteed to be initialized when `parent` is nonnull. parent_idx: u16, + + /// The number of keys and values this node stores. + /// + /// This is at the end of the node's representation and next to `parent_idx` to encourage + /// the compiler to join `len` and `parent_idx` into the same 32-bit word, reducing space + /// overhead. len: u16, } impl LeafNode { + /// Creates a new `LeafNode`. Unsafe because all nodes should really be hidden behind + /// `BoxedNode`, preventing accidental dropping of uninitialized keys and values. unsafe fn new() -> Self { LeafNode { + // As a general policy, we leave fields uninitialized if they can be, as this should + // be both slightly faster and easier to track in Valgrind. keys: mem::uninitialized(), vals: mem::uninitialized(), parent: ptr::null(), @@ -63,15 +99,28 @@ impl LeafNode { } } -// We use repr(C) so that a pointer to an internal node can be -// directly used as a pointer to a leaf node +/// The underlying representation of internal nodes. As with `LeafNode`s, these should be hidden +/// behind `BoxedNode`s to prevent dropping uninitialized keys and values. Any pointer to an +/// `InternalNode` can be directly casted to a pointer to the underlying `LeafNode` portion of the +/// node, allowing code to act on leaf and internal nodes generically without having to even check +/// which of the two a pointer is pointing at. This property is enabled by the use of `repr(C)`. #[repr(C)] struct InternalNode { data: LeafNode, + + /// The pointers to the children of this node. `len + 1` of these are considered + /// initialized and valid. edges: [BoxedNode; 2 * B], } impl InternalNode { + /// Creates a new `InternalNode`. + /// + /// This is unsafe for two reasons. First, it returns an `InternalNode` by value, risking + /// dropping of uninitialized fields. Second, an invariant of internal nodes is that `len + 1` + /// edges are initialized and valid, meaning that even when the node is empty (having a + /// `len` of 0), there must be one initialized and valid edge. This function does not set up + /// such an edge. unsafe fn new() -> Self { InternalNode { data: LeafNode::new(), @@ -80,8 +129,12 @@ impl InternalNode { } } +/// An owned pointer to a node. This basically is either `Box>` or +/// `Box>`. However, it contains no information as to which of the two types +/// of nodes is acutally behind the box, and, partially due to this lack of information, has no +/// destructor. struct BoxedNode { - ptr: Unique> // we don't know if this points to a leaf node or an internal node + ptr: Unique> } impl BoxedNode { @@ -156,7 +209,7 @@ impl Root { } } - /// Add a new internal node with a single edge, pointing to the previous root, and make that + /// Adds a new internal node with a single edge, pointing to the previous root, and make that /// new node the root. This increases the height by 1 and is the opposite of `pop_level`. pub fn push_level(&mut self) -> NodeRef { @@ -180,7 +233,7 @@ impl Root { ret } - /// Remove the root node, using its first child as the new root. This cannot be called when + /// Removes the root node, using its first child as the new root. This cannot be called when /// the tree consists only of a leaf node. As it is intended only to be called when the root /// has only one edge, no cleanup is done on any of the other children are elements of the root. /// This decreases the height by 1 and is the opposite of `push_level`. @@ -229,6 +282,7 @@ impl Root { pub struct NodeRef { height: usize, node: NonZero<*const LeafNode>, + // This is null unless the borrow type is `Mut` root: *const Root, _marker: PhantomData<(BorrowType, Type)> } @@ -268,10 +322,20 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { impl NodeRef { + /// Finds the length of the node. This is the number of keys or values. In an + /// internal node, the number of edges is `len() + 1`. pub fn len(&self) -> usize { self.as_leaf().len as usize } + /// Returns the height of this node in the whole tree. Zero height denotes the + /// leaf level. + pub fn height(&self) -> usize { + self.height + } + + /// Removes any static information about whether this node is a `Leaf` or an + /// `Internal` node. pub fn forget_type(self) -> NodeRef { NodeRef { height: self.height, @@ -281,6 +345,7 @@ impl NodeRef { } } + /// Temporarily takes out another, immutable reference to the same node. fn reborrow<'a>(&'a self) -> NodeRef, K, V, Type> { NodeRef { height: self.height, @@ -304,6 +369,13 @@ impl NodeRef { self.reborrow().into_slices().1 } + /// Finds the parent of the current node. Returns `Ok(handle)` if the current + /// node actually has a parent, where `handle` points to the edge of the parent + /// that points to the current node. Returns `Err(self)` if the current node has + /// no parent, giving back the original `NodeRef`. + /// + /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should + /// both, upon success, do nothing. pub fn ascend(self) -> Result< Handle< NodeRef< @@ -344,6 +416,9 @@ impl NodeRef { } impl NodeRef { + /// Similar to `ascend`, gets a reference to a node's parent node, but also + /// deallocate the current node in the process. This is unsafe because the + /// current node will still be accessible despite being deallocated. pub unsafe fn deallocate_and_ascend(self) -> Option< Handle< NodeRef< @@ -362,6 +437,9 @@ impl NodeRef { } impl NodeRef { + /// Similar to `ascend`, gets a reference to a node's parent node, but also + /// deallocate the current node in the process. This is unsafe because the + /// current node will still be accessible despite being deallocated. pub unsafe fn deallocate_and_ascend(self) -> Option< Handle< NodeRef< @@ -384,6 +462,8 @@ impl NodeRef { } impl<'a, K, V, Type> NodeRef, K, V, Type> { + /// Unsafely asserts to the compiler some static information about whether this + /// node is a `Leaf`. unsafe fn cast_unchecked(&mut self) -> NodeRef { @@ -395,6 +475,16 @@ impl<'a, K, V, Type> NodeRef, K, V, Type> { } } + /// Temporarily takes out another, mutable reference to the same node. Beware, as + /// this method is very dangerous, doubly so since it may not immediately appear + /// dangerous. + /// + /// Because mutable pointers can roam anywhere around the tree and can even (through + /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut` + /// can easily be used to make the original mutable pointer dangling, or, in the case + /// of a reborrowed handle, out of bounds. + // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts + // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety. unsafe fn reborrow_mut(&mut self) -> NodeRef { NodeRef { height: self.height, @@ -437,6 +527,8 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { } impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { + /// Gets a mutable reference to the root itself. This is useful primarily when the + /// height of the tree needs to be adjusted. Never call this on a reborrowed pointer. pub fn into_root_mut(self) -> &'a mut Root { unsafe { &mut *(self.root as *mut Root) @@ -460,6 +552,7 @@ impl<'a, K: 'a, V: 'a, Type> NodeRef, K, V, Type> { } impl<'a, K, V> NodeRef, K, V, marker::Leaf> { + /// Adds a key/value pair the end of the node. pub fn push(&mut self, key: K, val: V) { // Necessary for correctness, but this is an internal module debug_assert!(self.len() < CAPACITY); @@ -474,6 +567,7 @@ impl<'a, K, V> NodeRef, K, V, marker::Leaf> { self.as_leaf_mut().len += 1; } + /// Adds a key/value pair to the beginning of the node. pub fn push_front(&mut self, key: K, val: V) { // Necessary for correctness, but this is an internal module debug_assert!(self.len() < CAPACITY); @@ -488,6 +582,8 @@ impl<'a, K, V> NodeRef, K, V, marker::Leaf> { } impl<'a, K, V> NodeRef, K, V, marker::Internal> { + /// Adds a key/value pair and an edge to go to the right of that pair to + /// the end of the node. pub fn push(&mut self, key: K, val: V, edge: Root) { // Necessary for correctness, but this is an internal module debug_assert!(edge.height == self.height - 1); @@ -506,6 +602,8 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { } } + /// Adds a key/value pair and an edge to go to the left of that pair to + /// the beginning of the node. pub fn push_front(&mut self, key: K, val: V, edge: Root) { // Necessary for correctness, but this is an internal module debug_assert!(edge.height == self.height - 1); @@ -534,6 +632,8 @@ impl<'a, K, V> NodeRef, K, V, marker::Internal> { } impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { + /// Removes a key/value pair from the end of this node. If this is an internal node, + /// also removes the edge that was to the right of that pair. pub fn pop(&mut self) -> (K, V, Option>) { // Necessary for correctness, but this is an internal module debug_assert!(self.len() > 0); @@ -558,6 +658,8 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { } } + /// Removes a key/value pair from the beginning of this node. If this is an internal node, + /// also removes the edge that was to the left of that pair. pub fn pop_front(&mut self) -> (K, V, Option>) { // Necessary for correctness, but this is an internal module debug_assert!(self.len() > 0); @@ -597,6 +699,7 @@ impl<'a, K, V> NodeRef, K, V, marker::LeafOrInternal> { } impl NodeRef { + /// Checks whether a node is an `Internal` node or a `Leaf` node. pub fn force(self) -> ForceResult< NodeRef, NodeRef @@ -619,6 +722,14 @@ impl NodeRef { } } +/// A reference to a specific key/value pair or edge within a node. The `Node` parameter +/// must be a `NodeRef`, while the `Type` can either be `KV` (signifying a handle on a key/value +/// pair) or `Edge` (signifying a handle on an edge). +/// +/// Note that even `Leaf` nodes can have `Edge` handles. Instead of representing a pointer to +/// a child node, these represent the spaces where child pointers would go between the key/value +/// pairs. For example, in a node with length 2, there would be 3 possible edge locations - one +/// to the left of the node, one between the two pairs, and one at the right of the node. pub struct Handle { node: Node, idx: usize, @@ -626,6 +737,8 @@ pub struct Handle { } impl Copy for Handle { } +// We don't need the full generality of `#[derive(Clone)]`, as the only time `Node` will be +// `Clone`able is when it is an immutable reference and therefore `Copy`. impl Clone for Handle { fn clone(&self) -> Self { *self @@ -633,12 +746,14 @@ impl Clone for Handle { } impl Handle { + /// Retrieves the node that contains the edge of key/value pair this handle pointes to. pub fn into_node(self) -> Node { self.node } } impl Handle, marker::KV> { + /// Creates a new handle to a key/value pair in `node`. `idx` must be less than `node.len()`. pub fn new_kv(node: NodeRef, idx: usize) -> Self { // Necessary for correctness, but in a private module debug_assert!(idx < node.len()); @@ -670,6 +785,7 @@ impl PartialEq impl Handle, HandleType> { + /// Temporarily takes out another, immutable handle on the same location. pub fn reborrow(&self) -> Handle, HandleType> { @@ -685,6 +801,16 @@ impl impl<'a, K, V, NodeType, HandleType> Handle, K, V, NodeType>, HandleType> { + /// Temporarily takes out another, mutable handle on the same location. Beware, as + /// this method is very dangerous, doubly so since it may not immediately appear + /// dangerous. + /// + /// Because mutable pointers can roam anywhere around the tree and can even (through + /// `into_root_mut`) mess with the root of the tree, the result of `reborrow_mut` + /// can easily be used to make the original mutable pointer dangling, or, in the case + /// of a reborrowed handle, out of bounds. + // FIXME(@gereeter) consider adding yet another type parameter to `NodeRef` that restricts + // the use of `ascend` and `into_root_mut` on reborrowed pointers, preventing this unsafety. pub unsafe fn reborrow_mut(&mut self) -> Handle, HandleType> { @@ -700,6 +826,8 @@ impl<'a, K, V, NodeType, HandleType> impl Handle, marker::Edge> { + /// Creates a new handle to an edge in `node`. `idx` must be less than or equal to + /// `node.len()`. pub fn new_edge(node: NodeRef, idx: usize) -> Self { // Necessary for correctness, but in a private module debug_assert!(idx <= node.len()); @@ -733,6 +861,11 @@ impl } impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge> { + /// Inserts a new key/value pair between the key/value pairs to the right and left of + /// this edge. This method assumes that there is enough space in the node for the new + /// pair to fit. + /// + /// The returned pointer points to the inserted value. fn insert_fit(&mut self, key: K, val: V) -> *mut V { // Necessary for correctness, but in a private module debug_assert!(self.node.len() < CAPACITY); @@ -747,6 +880,10 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge } } + /// Inserts a new key/value pair between the key/value pairs to the right and left of + /// this edge. This method splits the node if there isn't enough room. + /// + /// The returned pointer points to the inserted value. pub fn insert(mut self, key: K, val: V) -> (InsertResult<'a, K, V, marker::Leaf>, *mut V) { @@ -774,6 +911,8 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::Edge } impl<'a, K, V> Handle, K, V, marker::Internal>, marker::Edge> { + /// Fixes the parent pointer and index in the child node below this edge. This is useful + /// when the ordering of edges has been changed, such as in the various `insert` methods. fn correct_parent_link(mut self) { let idx = self.idx as u16; let ptr = self.node.as_internal_mut() as *mut _; @@ -782,18 +921,24 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: child.as_leaf_mut().parent_idx = idx; } + /// Unsafely asserts to the compiler some static information about whether the underlying + /// node of this handle is a `Leaf`. unsafe fn cast_unchecked(&mut self) -> Handle, marker::Edge> { Handle::new_edge(self.node.cast_unchecked(), self.idx) } + /// Inserts a new key/value pair and an edge that will go to the right of that new pair + /// between this edge and the key/value pair to the right of this edge. This method assumes + /// that there is enough space in the node for the new pair to fit. fn insert_fit(&mut self, key: K, val: V, edge: Root) { // Necessary for correctness, but in an internal module debug_assert!(self.node.len() < CAPACITY); debug_assert!(edge.height == self.node.height - 1); unsafe { + // This cast is a lie, but it allows us to reuse the key/value insertion logic. self.cast_unchecked::().insert_fit(key, val); slice_insert( @@ -811,6 +956,9 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } } + /// Inserts a new key/value pair and an edge that will go to the right of that new pair + /// between this edge and the key/value pair to the right of this edge. This method splits + /// the node if there isn't enough room. pub fn insert(mut self, key: K, val: V, edge: Root) -> InsertResult<'a, K, V, marker::Internal> { @@ -843,6 +991,10 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: impl Handle, marker::Edge> { + /// Finds the node pointed to by this edge. + /// + /// `edge.descend().ascend().unwrap()` and `node.ascend().unwrap().descend()` should + /// both, upon success, do nothing. pub fn descend(self) -> NodeRef { NodeRef { height: self.node.height - 1, @@ -885,6 +1037,13 @@ impl<'a, K, V, NodeType> Handle, K, V, NodeType>, marker } impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> { + /// Splits the underlying node into three parts: + /// + /// - The node is truncated to only contain the key/value pairs to the right of + /// this handle. + /// - The key and value pointed to by this handle and extracted. + /// - All the key/value pairs to the right of this handle are put into a newly + /// allocated node. pub fn split(mut self) -> (NodeRef, K, V, marker::Leaf>, K, V, Root) { unsafe { @@ -920,6 +1079,8 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> } } + /// Removes the key/value pair pointed to by this handle, returning the edge between the + /// now adjacent key/value pairs to the left and right of this handle. pub fn remove(mut self) -> (Handle, K, V, marker::Leaf>, marker::Edge>, K, V) { unsafe { @@ -932,6 +1093,13 @@ impl<'a, K, V> Handle, K, V, marker::Leaf>, marker::KV> } impl<'a, K, V> Handle, K, V, marker::Internal>, marker::KV> { + /// Splits the underlying node into three parts: + /// + /// - The node is truncated to only contain the edges and key/value pairs to the + /// right of this handle. + /// - The key and value pointed to by this handle and extracted. + /// - All the edges and key/value pairs to the right of this handle are put into + /// a newly allocated node. pub fn split(mut self) -> (NodeRef, K, V, marker::Internal>, K, V, Root) { unsafe { @@ -979,6 +1147,9 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: } } + /// Returns whether it is valid to call `.merge()`, i.e., whether there is enough room in + /// a node to hold the combination of the nodes to the left and right of this handle along + /// with the key/value pair at this handle. pub fn can_merge(&self) -> bool { ( self.reborrow() @@ -993,6 +1164,11 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: ) <= CAPACITY } + /// Combines the node immediately to the left of this handle, the key/value pair pointed + /// to by this handle, and the node immediately to the right of this handle into one new + /// child of the underlying node, returning an edge referencing that new child. + /// + /// Assumes that this edge `.can_merge()`. pub fn merge(mut self) -> Handle, K, V, marker::Internal>, marker::Edge> { let self1 = unsafe { ptr::read(&self) }; @@ -1063,11 +1239,145 @@ impl<'a, K, V> Handle, K, V, marker::Internal>, marker:: Handle::new_edge(self.node, self.idx) } } + + /// This removes a key/value pair from the left child and replaces it with the key/value pair + /// pointed to by this handle while pushing the old key/value pair of this handle into the right + /// child. + pub fn steal_left(&mut self) { + unsafe { + let (k, v, edge) = self.reborrow_mut().left_edge().descend().pop(); + + let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k); + let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v); + + match self.reborrow_mut().right_edge().descend().force() { + ForceResult::Leaf(mut leaf) => leaf.push_front(k, v), + ForceResult::Internal(mut internal) => internal.push_front(k, v, edge.unwrap()) + } + } + } + + /// This removes a key/value pair from the right child and replaces it with the key/value pair + /// pointed to by this handle while pushing the old key/value pair of this handle into the left + /// child. + pub fn steal_right(&mut self) { + unsafe { + let (k, v, edge) = self.reborrow_mut().right_edge().descend().pop_front(); + + let k = mem::replace(self.reborrow_mut().into_kv_mut().0, k); + let v = mem::replace(self.reborrow_mut().into_kv_mut().1, v); + + match self.reborrow_mut().left_edge().descend().force() { + ForceResult::Leaf(mut leaf) => leaf.push(k, v), + ForceResult::Internal(mut internal) => internal.push(k, v, edge.unwrap()) + } + } + } + + /// This does stealing similar to `steal_left` but steals multiple elements at once. + pub fn bulk_steal_left(&mut self, n: usize) { + unsafe { + // Get raw pointers to left child's keys, values and edges. + let (left_len, left_k, left_v, left_e) = { + let mut left = self.reborrow_mut().left_edge().descend(); + + (left.len(), + left.keys_mut().as_mut_ptr(), + left.vals_mut().as_mut_ptr(), + match left.force() { + ForceResult::Leaf(_) => None, + ForceResult::Internal(mut i) => Some(i.as_internal_mut().edges.as_mut_ptr()), + }) + }; + + // Get raw pointers to right child's keys, values and edges. + let (right_len, right_k, right_v, right_e) = { + let mut right = self.reborrow_mut().right_edge().descend(); + + (right.len(), + right.keys_mut().as_mut_ptr(), + right.vals_mut().as_mut_ptr(), + match right.force() { + ForceResult::Leaf(_) => None, + ForceResult::Internal(mut i) => Some(i.as_internal_mut().edges.as_mut_ptr()), + }) + }; + + // Get raw pointers to parent's key and value. + let (parent_k, parent_v) = { + let kv = self.reborrow_mut().into_kv_mut(); + (kv.0 as *mut K, kv.1 as *mut V) + }; + + // Make sure that we may steal safely. + debug_assert!(right_len + n <= CAPACITY); + debug_assert!(left_len >= n); + + // Make room for stolen elements in right child. + ptr::copy(right_k, + right_k.offset(n as isize), + right_len); + ptr::copy(right_v, + right_v.offset(n as isize), + right_len); + if let Some(edges) = right_e { + ptr::copy(edges, + edges.offset(n as isize), + right_len+1); + } + + // Move elements from the left child to the right one. + let left_ind = (left_len - n) as isize; + ptr::copy_nonoverlapping(left_k.offset(left_ind + 1), + right_k, + n - 1); + ptr::copy_nonoverlapping(left_v.offset(left_ind + 1), + right_v, + n - 1); + match (left_e, right_e) { + (Some(left), Some(right)) => { + ptr::copy_nonoverlapping(left.offset(left_ind + 1), + right, + n); + }, + (Some(_), None) => unreachable!(), + (None, Some(_)) => unreachable!(), + (None, None) => {}, + } + + // Copy parent key/value pair to right child. + ptr::copy_nonoverlapping(parent_k, + right_k.offset(n as isize - 1), + 1); + ptr::copy_nonoverlapping(parent_v, + right_v.offset(n as isize - 1), + 1); + // Copy left-most stolen pair to parent. + ptr::copy_nonoverlapping(left_k.offset(left_ind), + parent_k, + 1); + ptr::copy_nonoverlapping(left_v.offset(left_ind), + parent_v, + 1); + + // Fix lengths of left and right child and parent pointers in children of the right + // child. + self.reborrow_mut().left_edge().descend().as_leaf_mut().len -= n as u16; + let mut right = self.reborrow_mut().right_edge().descend(); + right.as_leaf_mut().len += n as u16; + if let ForceResult::Internal(mut node) = right.force() { + for i in 0..(right_len+n+1) { + Handle::new_edge(node.reborrow_mut(), i as usize).correct_parent_link(); + } + } + } + } } impl Handle, HandleType> { + /// Check whether the underlying node is an `Internal` node or a `Leaf` node. pub fn force(self) -> ForceResult< Handle, HandleType>, Handle, HandleType> diff --git a/src/libcollections/btree/set.rs b/src/libcollections/btree/set.rs index e679381f22..3ee42499a3 100644 --- a/src/libcollections/btree/set.rs +++ b/src/libcollections/btree/set.rs @@ -477,9 +477,9 @@ impl BTreeSet { /// Adds a value to the set. /// - /// If the set did not have a value present, `true` is returned. + /// If the set did not have this value present, `true` is returned. /// - /// If the set did have this key present, `false` is returned, and the + /// If the set did have this value present, `false` is returned, and the /// entry is not updated. See the [module-level documentation] for more. /// /// [module-level documentation]: index.html#insert-and-complex-keys @@ -545,6 +545,41 @@ impl BTreeSet { { Recover::take(&mut self.map, value) } + + /// Moves all elements from `other` into `Self`, leaving `other` empty. + /// + /// # Examples + /// + /// ``` + /// #![feature(btree_append)] + /// use std::collections::BTreeSet; + /// + /// let mut a = BTreeSet::new(); + /// a.insert(1); + /// a.insert(2); + /// a.insert(3); + /// + /// let mut b = BTreeSet::new(); + /// b.insert(3); + /// b.insert(4); + /// b.insert(5); + /// + /// a.append(&mut b); + /// + /// assert_eq!(a.len(), 5); + /// assert_eq!(b.len(), 0); + /// + /// assert!(a.contains(&1)); + /// assert!(a.contains(&2)); + /// assert!(a.contains(&3)); + /// assert!(a.contains(&4)); + /// assert!(a.contains(&5)); + /// ``` + #[unstable(feature = "btree_append", reason = "recently added as part of collections reform 2", + issue = "19986")] + pub fn append(&mut self, other: &mut Self) { + self.map.append(&mut other.map); + } } #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libcollections/fmt.rs b/src/libcollections/fmt.rs index e30e0b213a..0ebb89b8a2 100644 --- a/src/libcollections/fmt.rs +++ b/src/libcollections/fmt.rs @@ -81,7 +81,7 @@ //! //! ``` //! format!("{argument}", argument = "test"); // => "test" -//! format!("{name} {}", 1, name = 2); // => "2 1" +//! format!("{name} {}", 1, name = 2); // => "2 1" //! format!("{a} {c} {b}", a="a", b='b', c=3); // => "a 3 b" //! ``` //! @@ -104,8 +104,8 @@ //! octal. //! //! There are various parameters which do require a particular type, however. -//! Namely, the `{:.*}` syntax, which sets the number of numbers after the -//! decimal in floating-point types: +//! An example is the `{:.*}` syntax, which sets the number of decimal places +//! in floating-point types: //! //! ``` //! let formatted_number = format!("{:.*}", 2, 1.234567); @@ -292,15 +292,13 @@ //! use std::fmt; //! use std::io::{self, Write}; //! -//! fmt::format(format_args!("this returns {}", "String")); -//! //! let mut some_writer = io::stdout(); //! write!(&mut some_writer, "{}", format_args!("print with a {}", "macro")); //! //! fn my_fmt_fn(args: fmt::Arguments) { //! write!(&mut io::stdout(), "{}", args); //! } -//! my_fmt_fn(format_args!("or a {} too", "function")); +//! my_fmt_fn(format_args!(", or a {} too", "function")); //! ``` //! //! The result of the `format_args!` macro is a value of type `fmt::Arguments`. @@ -316,7 +314,7 @@ //! # Syntax //! //! The syntax for the formatting language used is drawn from other languages, -//! so it should not be too alien. Arguments are formatted with python-like +//! so it should not be too alien. Arguments are formatted with Python-like //! syntax, meaning that arguments are surrounded by `{}` instead of the C-like //! `%`. The actual grammar for the formatting syntax is: //! @@ -333,7 +331,7 @@ //! precision := count | '*' //! type := identifier | '' //! count := parameter | integer -//! parameter := integer '$' +//! parameter := argument '$' //! ``` //! //! # Formatting Parameters @@ -395,8 +393,20 @@ //! `0`. //! //! The value for the width can also be provided as a `usize` in the list of -//! parameters by using the `2$` syntax indicating that the second argument is a -//! `usize` specifying the width. +//! parameters by using the dollar syntax indicating that the second argument is +//! a `usize` specifying the width, for example: +//! +//! ``` +//! // All of these print "Hello x !" +//! println!("Hello {:5}!", "x"); +//! println!("Hello {:1$}!", "x", 5); +//! println!("Hello {1:0$}!", 5, "x"); +//! println!("Hello {:width$}!", "x", width = 5); +//! ``` +//! +//! Referring to an argument with the dollar syntax does not affect the "next +//! argument" counter, so it's usually a good idea to refer to arguments by +//! position, or use named arguments. //! //! ## Precision //! @@ -415,7 +425,7 @@ //! //! the integer `N` itself is the precision. //! -//! 2. An integer followed by dollar sign `.N$`: +//! 2. An integer or name followed by dollar sign `.N$`: //! //! use format *argument* `N` (which must be a `usize`) as the precision. //! @@ -445,6 +455,10 @@ //! // Hello {next arg (x)} is {arg 2 (0.01) with precision //! // specified in its predecessor (5)} //! println!("Hello {} is {2:.*}", "x", 5, 0.01); +//! +//! // Hello {next arg (x)} is {arg "number" (0.01) with precision specified +//! // in arg "prec" (5)} +//! println!("Hello {} is {number:.prec$}", "x", prec = 5, number = 0.01); //! ``` //! //! All print the same thing: @@ -505,12 +519,24 @@ use string; /// /// # Examples /// +/// Basic usage: +/// /// ``` /// use std::fmt; /// /// let s = fmt::format(format_args!("Hello, {}!", "world")); -/// assert_eq!(s, "Hello, world!".to_string()); +/// assert_eq!(s, "Hello, world!"); +/// ``` +/// +/// Please note that using [`format!`][format!] might be preferrable. +/// Example: +/// +/// ``` +/// let s = format!("Hello, {}!", "world"); +/// assert_eq!(s, "Hello, world!"); /// ``` +/// +/// [format!]: ../macro.format!.html #[stable(feature = "rust1", since = "1.0.0")] pub fn format(args: Arguments) -> string::String { let mut output = string::String::new(); diff --git a/src/libcollections/lib.rs b/src/libcollections/lib.rs index 7540c51e23..6ab66fc217 100644 --- a/src/libcollections/lib.rs +++ b/src/libcollections/lib.rs @@ -131,3 +131,10 @@ pub enum Bound { /// An infinite endpoint. Indicates that there is no bound in this direction. Unbounded, } + +/// An intermediate trait for specialization of `Extend`. +#[doc(hidden)] +trait SpecExtend { + /// Extends `self` with the contents of the given iterator. + fn spec_extend(&mut self, iter: I); +} diff --git a/src/libcollections/linked_list.rs b/src/libcollections/linked_list.rs index 85a4fa82e2..406b979a37 100644 --- a/src/libcollections/linked_list.rs +++ b/src/libcollections/linked_list.rs @@ -30,6 +30,8 @@ use core::mem; use core::ops::{BoxPlace, InPlace, Place, Placer}; use core::ptr::{self, Shared}; +use super::SpecExtend; + /// A doubly-linked list. #[stable(feature = "rust1", since = "1.0.0")] pub struct LinkedList { @@ -401,6 +403,16 @@ impl LinkedList { *self = LinkedList::new() } + /// Returns `true` if the `LinkedList` contains an element equal to the + /// given value. + #[unstable(feature = "linked_list_contains", reason = "recently added", + issue = "32630")] + pub fn contains(&self, x: &T) -> bool + where T: PartialEq + { + self.iter().any(|e| e == x) + } + /// Provides a reference to the front element, or `None` if the list is /// empty. /// @@ -969,12 +981,24 @@ impl<'a, T> IntoIterator for &'a mut LinkedList { #[stable(feature = "rust1", since = "1.0.0")] impl Extend for LinkedList { fn extend>(&mut self, iter: T) { + >::spec_extend(self, iter); + } +} + +impl SpecExtend for LinkedList { + default fn spec_extend(&mut self, iter: I) { for elt in iter { self.push_back(elt); } } } +impl SpecExtend> for LinkedList { + fn spec_extend(&mut self, ref mut other: LinkedList) { + self.append(other); + } +} + #[stable(feature = "extend_ref", since = "1.2.0")] impl<'a, T: 'a + Copy> Extend<&'a T> for LinkedList { fn extend>(&mut self, iter: I) { diff --git a/src/libcollections/slice.rs b/src/libcollections/slice.rs index db91d911c7..cef8a33703 100644 --- a/src/libcollections/slice.rs +++ b/src/libcollections/slice.rs @@ -419,8 +419,8 @@ impl [T] { /// /// ```rust /// let v = &[1, 2, 3, 4, 5]; - /// for win in v.chunks(2) { - /// println!("{:?}", win); + /// for chunk in v.chunks(2) { + /// println!("{:?}", chunk); /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] @@ -741,11 +741,47 @@ impl [T] { core_slice::SliceExt::binary_search_by(self, f) } - /// Sorts the slice, in place. + /// Binary search a sorted slice with a key extraction function. + /// + /// Assumes that the slice is sorted by the key, for instance with + /// `sort_by_key` using the same key extraction function. /// + /// If a matching value is found then returns `Ok`, containing the + /// index for the matched element; if no match is found then `Err` + /// is returned, containing the index where a matching element could + /// be inserted while maintaining sorted order. + /// + /// # Examples + /// + /// Looks up a series of four elements in a slice of pairs sorted by + /// their second elements. The first is found, with a uniquely + /// determined position; the second and third are not found; the + /// fourth could match any position in `[1,4]`. + /// + /// ```rust + /// let s = [(0, 0), (2, 1), (4, 1), (5, 1), (3, 1), + /// (1, 2), (2, 3), (4, 5), (5, 8), (3, 13), + /// (1, 21), (2, 34), (4, 55)]; + /// + /// assert_eq!(s.binary_search_by_key(&13, |&(a,b)| b), Ok(9)); + /// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7)); + /// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13)); + /// let r = s.binary_search_by_key(&1, |&(a,b)| b); + /// assert!(match r { Ok(1...4) => true, _ => false, }); + /// ``` + #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")] + #[inline] + pub fn binary_search_by_key(&self, b: &B, f: F) -> Result + where F: FnMut(&T) -> B, + B: Ord + { + core_slice::SliceExt::binary_search_by_key(self, b, f) + } + /// This is equivalent to `self.sort_by(|a, b| a.cmp(b))`. /// - /// This is a stable sort. + /// This sort is stable and `O(n log n)` worst-case but allocates + /// approximately `2 * n` where `n` is the length of `self`. /// /// # Examples /// @@ -766,11 +802,9 @@ impl [T] { /// Sorts the slice, in place, using `key` to extract a key by which to /// order the sort by. /// - /// This sort is `O(n log n)` worst-case and stable, but allocates + /// This sort is stable and `O(n log n)` worst-case but allocates /// approximately `2 * n`, where `n` is the length of `self`. /// - /// This is a stable sort. - /// /// # Examples /// /// ```rust @@ -790,7 +824,7 @@ impl [T] { /// Sorts the slice, in place, using `compare` to compare /// elements. /// - /// This sort is `O(n log n)` worst-case and stable, but allocates + /// This sort is stable and `O(n log n)` worst-case but allocates /// approximately `2 * n`, where `n` is the length of `self`. /// /// # Examples diff --git a/src/libcollections/str.rs b/src/libcollections/str.rs index b2b1e019a1..2059943bfd 100644 --- a/src/libcollections/str.rs +++ b/src/libcollections/str.rs @@ -634,9 +634,9 @@ impl str { /// Basic usage: /// /// ``` - /// let s = "Per Martin-Löf"; + /// let mut s = "Per Martin-Löf".to_string(); /// - /// let (first, last) = s.split_at(3); + /// let (first, last) = s.split_at_mut(3); /// /// assert_eq!("Per", first); /// assert_eq!(" Martin-Löf", last); diff --git a/src/libcollections/string.rs b/src/libcollections/string.rs index 2226116585..eedf4c2c11 100644 --- a/src/libcollections/string.rs +++ b/src/libcollections/string.rs @@ -184,7 +184,7 @@ use boxed::Box; /// let len = story.len(); /// let capacity = story.capacity(); /// -/// // story has thirteen bytes +/// // story has nineteen bytes /// assert_eq!(19, len); /// /// // Now that we have our parts, we throw the story away. @@ -992,10 +992,12 @@ impl String { /// Shortens this `String` to the specified length. /// + /// If `new_len` is greater than the string's current length, this has no + /// effect. + /// /// # Panics /// - /// Panics if `new_len` > current length, or if `new_len` does not lie on a - /// [`char`] boundary. + /// Panics if `new_len` does not lie on a [`char`] boundary. /// /// [`char`]: ../../std/primitive.char.html /// @@ -1013,8 +1015,10 @@ impl String { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn truncate(&mut self, new_len: usize) { - assert!(self.is_char_boundary(new_len)); - self.vec.truncate(new_len) + if new_len <= self.len() { + assert!(self.is_char_boundary(new_len)); + self.vec.truncate(new_len) + } } /// Removes the last character from the string buffer and returns it. diff --git a/src/libcollections/vec.rs b/src/libcollections/vec.rs index dde5cbb508..58d4a4ed4e 100644 --- a/src/libcollections/vec.rs +++ b/src/libcollections/vec.rs @@ -75,6 +75,7 @@ use core::ops; use core::ptr; use core::slice; +use super::SpecExtend; use super::range::RangeArgument; /// A contiguous growable array type, written `Vec` but pronounced 'vector.' @@ -1390,10 +1391,22 @@ impl<'a, T> IntoIterator for &'a mut Vec { impl Extend for Vec { #[inline] fn extend>(&mut self, iter: I) { + >::spec_extend(self, iter); + } +} + +impl SpecExtend for Vec { + default fn spec_extend(&mut self, iter: I) { self.extend_desugared(iter.into_iter()) } } +impl SpecExtend> for Vec { + fn spec_extend(&mut self, ref mut other: Vec) { + self.append(other); + } +} + impl Vec { fn extend_desugared>(&mut self, mut iterator: I) { // This function should be the moral equivalent of: diff --git a/src/libcollections/vec_deque.rs b/src/libcollections/vec_deque.rs index 9e2b25d178..84a0bbbd24 100644 --- a/src/libcollections/vec_deque.rs +++ b/src/libcollections/vec_deque.rs @@ -32,6 +32,7 @@ use core::cmp; use alloc::raw_vec::RawVec; use super::range::RangeArgument; +use super::vec::Vec; const INITIAL_CAPACITY: usize = 7; // 2^3 - 1 const MINIMUM_CAPACITY: usize = 1; // 2 - 1 @@ -872,6 +873,17 @@ impl VecDeque { self.drain(..); } + /// Returns `true` if the `VecDeque` contains an element equal to the + /// given value. + #[unstable(feature = "vec_deque_contains", reason = "recently added", + issue = "32630")] + pub fn contains(&self, x: &T) -> bool + where T: PartialEq + { + let (a, b) = self.as_slices(); + a.contains(x) || b.contains(x) + } + /// Provides a reference to the front element, or `None` if the sequence is /// empty. /// @@ -2121,6 +2133,106 @@ impl fmt::Debug for VecDeque { } } +#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] +impl From> for VecDeque { + fn from(mut other: Vec) -> Self { + unsafe { + let other_buf = other.as_mut_ptr(); + let mut buf = RawVec::from_raw_parts(other_buf, other.capacity()); + let len = other.len(); + mem::forget(other); + + // We need to extend the buf if it's not a power of two, too small + // or doesn't have at least one free space + if !buf.cap().is_power_of_two() + || (buf.cap() < (MINIMUM_CAPACITY + 1)) + || (buf.cap() == len) + { + let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); + buf.reserve_exact(len, cap - len); + } + + VecDeque { + tail: 0, + head: len, + buf: buf + } + } + } +} + +#[stable(feature = "vecdeque_vec_conversions", since = "1.10.0")] +impl From> for Vec { + fn from(other: VecDeque) -> Self { + unsafe { + let buf = other.buf.ptr(); + let len = other.len(); + let tail = other.tail; + let head = other.head; + let cap = other.cap(); + + // Need to move the ring to the front of the buffer, as vec will expect this. + if other.is_contiguous() { + ptr::copy(buf.offset(tail as isize), buf, len); + } else { + if (tail - head) >= cmp::min((cap - tail), head) { + // There is enough free space in the centre for the shortest block so we can + // do this in at most three copy moves. + if (cap - tail) > head { + // right hand block is the long one; move that enough for the left + ptr::copy( + buf.offset(tail as isize), + buf.offset((tail - head) as isize), + cap - tail); + // copy left in the end + ptr::copy(buf, buf.offset((cap - head) as isize), head); + // shift the new thing to the start + ptr::copy(buf.offset((tail-head) as isize), buf, len); + } else { + // left hand block is the long one, we can do it in two! + ptr::copy(buf, buf.offset((cap-tail) as isize), head); + ptr::copy(buf.offset(tail as isize), buf, cap-tail); + } + } else { + // Need to use N swaps to move the ring + // We can use the space at the end of the ring as a temp store + + let mut left_edge: usize = 0; + let mut right_edge: usize = tail; + + // The general problem looks like this + // GHIJKLM...ABCDEF - before any swaps + // ABCDEFM...GHIJKL - after 1 pass of swaps + // ABCDEFGHIJM...KL - swap until the left edge reaches the temp store + // - then restart the algorithm with a new (smaller) store + // Sometimes the temp store is reached when the right edge is at the end + // of the buffer - this means we've hit the right order with fewer swaps! + // E.g + // EF..ABCD + // ABCDEF.. - after four only swaps we've finished + + while left_edge < len && right_edge != cap { + let mut right_offset = 0; + for i in left_edge..right_edge { + right_offset = (i - left_edge) % (cap - right_edge); + let src: isize = (right_edge + right_offset) as isize; + ptr::swap(buf.offset(i as isize), buf.offset(src)); + } + let n_ops = right_edge - left_edge; + left_edge += n_ops; + right_edge += right_offset + 1; + + } + } + + } + let out = Vec::from_raw_parts(buf, len, cap); + mem::forget(other); + out + } + } +} + #[cfg(test)] mod tests { use core::iter::Iterator; @@ -2401,4 +2513,82 @@ mod tests { } } } + + #[test] + fn test_from_vec() { + use super::super::vec::Vec; + for cap in 0..35 { + for len in 0..cap + 1 { + let mut vec = Vec::with_capacity(cap); + vec.extend(0..len); + + let vd = VecDeque::from(vec.clone()); + assert!(vd.cap().is_power_of_two()); + assert_eq!(vd.len(), vec.len()); + assert!(vd.into_iter().eq(vec)); + } + } + } + + #[test] + fn test_vec_from_vecdeque() { + use super::super::vec::Vec; + + fn create_vec_and_test_convert(cap: usize, offset: usize, len: usize) { + let mut vd = VecDeque::with_capacity(cap); + for _ in 0..offset { + vd.push_back(0); + vd.pop_front(); + } + vd.extend(0..len); + + let vec: Vec<_> = Vec::from(vd.clone()); + assert_eq!(vec.len(), vd.len()); + assert!(vec.into_iter().eq(vd)); + } + + for cap_pwr in 0..7 { + // Make capacity as a (2^x)-1, so that the ring size is 2^x + let cap = (2i32.pow(cap_pwr) - 1) as usize; + + // In these cases there is enough free space to solve it with copies + for len in 0..((cap+1)/2) { + // Test contiguous cases + for offset in 0..(cap-len) { + create_vec_and_test_convert(cap, offset, len) + } + + // Test cases where block at end of buffer is bigger than block at start + for offset in (cap-len)..(cap-(len/2)) { + create_vec_and_test_convert(cap, offset, len) + } + + // Test cases where block at start of buffer is bigger than block at end + for offset in (cap-(len/2))..cap { + create_vec_and_test_convert(cap, offset, len) + } + } + + // Now there's not (necessarily) space to straighten the ring with simple copies, + // the ring will use swapping when: + // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len)) + // right block size > free space && left block size > free space + for len in ((cap+1)/2)..cap { + // Test contiguous cases + for offset in 0..(cap-len) { + create_vec_and_test_convert(cap, offset, len) + } + + // Test cases where block at end of buffer is bigger than block at start + for offset in (cap-len)..(cap-(len/2)) { + create_vec_and_test_convert(cap, offset, len) + } + + // Test cases where block at start of buffer is bigger than block at end + for offset in (cap-(len/2))..cap { + create_vec_and_test_convert(cap, offset, len) + } + } + } + } } diff --git a/src/libcollectionstest/binary_heap.rs b/src/libcollectionstest/binary_heap.rs index cc4366e8ae..58194fe75f 100644 --- a/src/libcollectionstest/binary_heap.rs +++ b/src/libcollectionstest/binary_heap.rs @@ -242,3 +242,35 @@ fn test_extend_ref() { assert_eq!(a.len(), 5); assert_eq!(a.into_sorted_vec(), [1, 2, 3, 4, 5]); } + +#[test] +fn test_append() { + let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]); + let mut b = BinaryHeap::from(vec![-20, 5, 43]); + + a.append(&mut b); + + assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); + assert!(b.is_empty()); +} + +#[test] +fn test_append_to_empty() { + let mut a = BinaryHeap::new(); + let mut b = BinaryHeap::from(vec![-20, 5, 43]); + + a.append(&mut b); + + assert_eq!(a.into_sorted_vec(), [-20, 5, 43]); + assert!(b.is_empty()); +} + +#[test] +fn test_extend_specialization() { + let mut a = BinaryHeap::from(vec![-10, 1, 2, 3, 3]); + let b = BinaryHeap::from(vec![-20, 5, 43]); + + a.extend(b); + + assert_eq!(a.into_sorted_vec(), [-20, -10, 1, 2, 3, 3, 5, 43]); +} diff --git a/src/libcollectionstest/btree/map.rs b/src/libcollectionstest/btree/map.rs index 619bc189e6..1858791776 100644 --- a/src/libcollectionstest/btree/map.rs +++ b/src/libcollectionstest/btree/map.rs @@ -446,6 +446,58 @@ fn test_vacant_entry_key() { assert_eq!(a[key], value); } +macro_rules! create_append_test { + ($name:ident, $len:expr) => { + #[test] + fn $name() { + let mut a = BTreeMap::new(); + for i in 0..8 { + a.insert(i, i); + } + + let mut b = BTreeMap::new(); + for i in 5..$len { + b.insert(i, 2*i); + } + + a.append(&mut b); + + assert_eq!(a.len(), $len); + assert_eq!(b.len(), 0); + + for i in 0..$len { + if i < 5 { + assert_eq!(a[&i], i); + } else { + assert_eq!(a[&i], 2*i); + } + } + + assert_eq!(a.remove(&($len-1)), Some(2*($len-1))); + assert_eq!(a.insert($len-1, 20), None); + } + }; +} + +// These are mostly for testing the algorithm that "fixes" the right edge after insertion. +// Single node. +create_append_test!(test_append_9, 9); +// Two leafs that don't need fixing. +create_append_test!(test_append_17, 17); +// Two leafs where the second one ends up underfull and needs stealing at the end. +create_append_test!(test_append_14, 14); +// Two leafs where the second one ends up empty because the insertion finished at the root. +create_append_test!(test_append_12, 12); +// Three levels; insertion finished at the root. +create_append_test!(test_append_144, 144); +// Three levels; insertion finished at leaf while there is an empty node on the second level. +create_append_test!(test_append_145, 145); +// Tests for several randomly chosen sizes. +create_append_test!(test_append_170, 170); +create_append_test!(test_append_181, 181); +create_append_test!(test_append_239, 239); +create_append_test!(test_append_1700, 1700); + mod bench { use std::collections::BTreeMap; use std::__rand::{Rng, thread_rng}; diff --git a/src/libcollectionstest/btree/set.rs b/src/libcollectionstest/btree/set.rs index 3928804a8e..53ccfd5b4e 100644 --- a/src/libcollectionstest/btree/set.rs +++ b/src/libcollectionstest/btree/set.rs @@ -265,3 +265,27 @@ fn test_variance() { fn into_iter<'new>(v: IntoIter<&'static str>) -> IntoIter<&'new str> { v } fn range<'a, 'new>(v: Range<'a, &'static str>) -> Range<'a, &'new str> { v } } + +#[test] +fn test_append() { + let mut a = BTreeSet::new(); + a.insert(1); + a.insert(2); + a.insert(3); + + let mut b = BTreeSet::new(); + b.insert(3); + b.insert(4); + b.insert(5); + + a.append(&mut b); + + assert_eq!(a.len(), 5); + assert_eq!(b.len(), 0); + + assert_eq!(a.contains(&1), true); + assert_eq!(a.contains(&2), true); + assert_eq!(a.contains(&3), true); + assert_eq!(a.contains(&4), true); + assert_eq!(a.contains(&5), true); +} diff --git a/src/libcollectionstest/lib.rs b/src/libcollectionstest/lib.rs index 211942f229..bae21f1bd9 100644 --- a/src/libcollectionstest/lib.rs +++ b/src/libcollectionstest/lib.rs @@ -11,7 +11,9 @@ #![deny(warnings)] #![feature(binary_heap_extras)] +#![feature(binary_heap_append)] #![feature(box_syntax)] +#![feature(btree_append)] #![feature(btree_range)] #![feature(collections)] #![feature(collections_bound)] @@ -19,8 +21,7 @@ #![feature(fn_traits)] #![feature(enumset)] #![feature(iter_arith)] -#![feature(map_entry_keys)] -#![feature(map_values_mut)] +#![feature(linked_list_contains)] #![feature(pattern)] #![feature(rand)] #![feature(step_by)] @@ -29,6 +30,7 @@ #![feature(test)] #![feature(unboxed_closures)] #![feature(unicode)] +#![feature(vec_deque_contains)] extern crate collections; extern crate test; diff --git a/src/libcollectionstest/linked_list.rs b/src/libcollectionstest/linked_list.rs index 7dac967d80..7265d53be4 100644 --- a/src/libcollectionstest/linked_list.rs +++ b/src/libcollectionstest/linked_list.rs @@ -339,6 +339,22 @@ fn test_extend_ref() { assert_eq!(a, list_from(&[1, 2, 3, 4, 5, 6])); } +#[test] +fn test_extend() { + let mut a = LinkedList::new(); + a.push_back(1); + a.extend(vec![2, 3, 4]); // uses iterator + + assert_eq!(a.len(), 4); + assert!(a.iter().eq(&[1, 2, 3, 4])); + + let b: LinkedList<_> = vec![5, 6, 7].into_iter().collect(); + a.extend(b); // specializes to `append` + + assert_eq!(a.len(), 7); + assert!(a.iter().eq(&[1, 2, 3, 4, 5, 6, 7])); +} + #[bench] fn bench_collect_into(b: &mut test::Bencher) { let v = &[0; 64]; @@ -413,3 +429,16 @@ fn bench_iter_mut_rev(b: &mut test::Bencher) { assert!(m.iter_mut().rev().count() == 128); }) } + +#[test] +fn test_contains() { + let mut l = LinkedList::new(); + l.extend(&[2, 3, 4]); + + assert!(l.contains(&3)); + assert!(!l.contains(&1)); + + l.clear(); + + assert!(!l.contains(&3)); +} diff --git a/src/libcollectionstest/str.rs b/src/libcollectionstest/str.rs index 929ac7a52a..a1820a1cb9 100644 --- a/src/libcollectionstest/str.rs +++ b/src/libcollectionstest/str.rs @@ -346,6 +346,22 @@ fn test_slice_fail() { &"中华Việt Nam"[0..2]; } + +#[test] +fn test_is_char_boundary() { + let s = "ศไทย中华Việt Nam β-release 🐱123"; + assert!(s.is_char_boundary(0)); + assert!(s.is_char_boundary(s.len())); + assert!(!s.is_char_boundary(s.len() + 1)); + for (i, ch) in s.char_indices() { + // ensure character locations are boundaries and continuation bytes are not + assert!(s.is_char_boundary(i), "{} is a char boundary in {:?}", i, s); + for j in 1..ch.len_utf8() { + assert!(!s.is_char_boundary(i + j), + "{} should not be a char boundary in {:?}", i + j, s); + } + } +} const LOREM_PARAGRAPH: &'static str = "\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \ ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \ diff --git a/src/libcollectionstest/string.rs b/src/libcollectionstest/string.rs index d8e01f3800..c2eafa1b90 100644 --- a/src/libcollectionstest/string.rs +++ b/src/libcollectionstest/string.rs @@ -52,7 +52,7 @@ fn test_from_utf8() { String::from("ศไทย中华Việt Nam")); let xs = b"hello\xFF".to_vec(); - let err = String::from_utf8(xs).err().unwrap(); + let err = String::from_utf8(xs).unwrap_err(); assert_eq!(err.into_bytes(), b"hello\xff".to_vec()); } @@ -248,10 +248,10 @@ fn test_str_truncate() { } #[test] -#[should_panic] fn test_str_truncate_invalid_len() { let mut s = String::from("12345"); s.truncate(6); + assert_eq!(s, "12345"); } #[test] diff --git a/src/libcollectionstest/vec.rs b/src/libcollectionstest/vec.rs index ccdbf1092f..0fb00543dd 100644 --- a/src/libcollectionstest/vec.rs +++ b/src/libcollectionstest/vec.rs @@ -93,6 +93,9 @@ fn test_extend() { for i in 3..10 { w.push(i) } assert_eq!(v, w); + + v.extend(w.clone()); // specializes to `append` + assert!(v.iter().eq(w.iter().chain(w.iter()))); } #[test] diff --git a/src/libcollectionstest/vec_deque.rs b/src/libcollectionstest/vec_deque.rs index 95675a2423..05af9bd704 100644 --- a/src/libcollectionstest/vec_deque.rs +++ b/src/libcollectionstest/vec_deque.rs @@ -959,3 +959,16 @@ fn test_extend_ref() { assert_eq!(v[4], 5); assert_eq!(v[5], 6); } + +#[test] +fn test_contains() { + let mut v = VecDeque::new(); + v.extend(&[2, 3, 4]); + + assert!(v.contains(&3)); + assert!(!v.contains(&1)); + + v.clear(); + + assert!(!v.contains(&3)); +} diff --git a/src/libcore/Cargo.toml b/src/libcore/Cargo.toml index 98f941f005..02fe574b81 100644 --- a/src/libcore/Cargo.toml +++ b/src/libcore/Cargo.toml @@ -8,3 +8,7 @@ build = "build.rs" name = "core" path = "lib.rs" test = false + +[[test]] +name = "coretest" +path = "../libcoretest/lib.rs" diff --git a/src/libcore/build.rs b/src/libcore/build.rs index a991ac0de1..255a367e58 100644 --- a/src/libcore/build.rs +++ b/src/libcore/build.rs @@ -8,7 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![deny(warnings)] + fn main() { // Remove this whenever snapshots and rustbuild nightlies are synced. println!("cargo:rustc-cfg=cargobuild"); + println!("cargo:rerun-if-changed=build.rs") } diff --git a/src/libcore/cell.rs b/src/libcore/cell.rs index a1c7a293af..4929088201 100644 --- a/src/libcore/cell.rs +++ b/src/libcore/cell.rs @@ -145,7 +145,7 @@ #![stable(feature = "rust1", since = "1.0.0")] use clone::Clone; -use cmp::{PartialEq, Eq}; +use cmp::{PartialEq, Eq, PartialOrd, Ord, Ordering}; use default::Default; use marker::{Copy, Send, Sync, Sized, Unsize}; use ops::{Deref, DerefMut, Drop, FnOnce, CoerceUnsized}; @@ -232,6 +232,18 @@ impl Cell { pub fn as_unsafe_cell(&self) -> &UnsafeCell { &self.value } + + /// Returns a mutable reference to the underlying data. + /// + /// This call borrows `Cell` mutably (at compile-time) which guarantees + /// that we possess the only reference. + #[inline] + #[unstable(feature = "cell_get_mut", issue = "33444")] + pub fn get_mut(&mut self) -> &mut T { + unsafe { + &mut *self.value.get() + } + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -267,6 +279,42 @@ impl PartialEq for Cell { #[stable(feature = "cell_eq", since = "1.2.0")] impl Eq for Cell {} +#[stable(feature = "cell_ord", since = "1.10.0")] +impl PartialOrd for Cell { + #[inline] + fn partial_cmp(&self, other: &Cell) -> Option { + self.get().partial_cmp(&other.get()) + } + + #[inline] + fn lt(&self, other: &Cell) -> bool { + self.get() < other.get() + } + + #[inline] + fn le(&self, other: &Cell) -> bool { + self.get() <= other.get() + } + + #[inline] + fn gt(&self, other: &Cell) -> bool { + self.get() > other.get() + } + + #[inline] + fn ge(&self, other: &Cell) -> bool { + self.get() >= other.get() + } +} + +#[stable(feature = "cell_ord", since = "1.10.0")] +impl Ord for Cell { + #[inline] + fn cmp(&self, other: &Cell) -> Ordering { + self.get().cmp(&other.get()) + } +} + /// A mutable memory location with dynamically checked borrow rules /// /// See the [module-level documentation](index.html) for more. @@ -455,6 +503,18 @@ impl RefCell { pub unsafe fn as_unsafe_cell(&self) -> &UnsafeCell { &self.value } + + /// Returns a mutable reference to the underlying data. + /// + /// This call borrows `RefCell` mutably (at compile-time) so there is no + /// need for dynamic checks. + #[inline] + #[unstable(feature = "cell_get_mut", issue="33444")] + pub fn get_mut(&mut self) -> &mut T { + unsafe { + &mut *self.value.get() + } + } } #[stable(feature = "rust1", since = "1.0.0")] @@ -490,6 +550,42 @@ impl PartialEq for RefCell { #[stable(feature = "cell_eq", since = "1.2.0")] impl Eq for RefCell {} +#[stable(feature = "cell_ord", since = "1.10.0")] +impl PartialOrd for RefCell { + #[inline] + fn partial_cmp(&self, other: &RefCell) -> Option { + self.borrow().partial_cmp(&*other.borrow()) + } + + #[inline] + fn lt(&self, other: &RefCell) -> bool { + *self.borrow() < *other.borrow() + } + + #[inline] + fn le(&self, other: &RefCell) -> bool { + *self.borrow() <= *other.borrow() + } + + #[inline] + fn gt(&self, other: &RefCell) -> bool { + *self.borrow() > *other.borrow() + } + + #[inline] + fn ge(&self, other: &RefCell) -> bool { + *self.borrow() >= *other.borrow() + } +} + +#[stable(feature = "cell_ord", since = "1.10.0")] +impl Ord for RefCell { + #[inline] + fn cmp(&self, other: &RefCell) -> Ordering { + self.borrow().cmp(&*other.borrow()) + } +} + struct BorrowRef<'b> { borrow: &'b Cell, } @@ -859,3 +955,10 @@ impl UnsafeCell { &self.value as *const T as *mut T } } + +#[stable(feature = "unsafe_cell_default", since = "1.9.0")] +impl Default for UnsafeCell { + fn default() -> UnsafeCell { + UnsafeCell::new(Default::default()) + } +} diff --git a/src/libcore/char.rs b/src/libcore/char.rs index b2b1dc5178..6a2331dddc 100644 --- a/src/libcore/char.rs +++ b/src/libcore/char.rs @@ -15,11 +15,9 @@ #![allow(non_snake_case)] #![stable(feature = "core_char", since = "1.2.0")] -use iter::Iterator; +use prelude::v1::*; + use mem::transmute; -use option::Option::{None, Some}; -use option::Option; -use slice::SliceExt; // UTF-8 ranges and tags for encoding characters const TAG_CONT: u8 = 0b1000_0000; @@ -299,7 +297,20 @@ impl CharExt for char { #[inline] fn escape_unicode(self) -> EscapeUnicode { - EscapeUnicode { c: self, state: EscapeUnicodeState::Backslash } + let c = self as u32; + + // or-ing 1 ensures that for c==0 the code computes that one + // digit should be printed and (which is the same) avoids the + // (31 - 32) underflow + let msb = 31 - (c | 1).leading_zeros(); + + // the index of the most significant hex digit + let ms_hex_digit = msb / 4; + EscapeUnicode { + c: self, + state: EscapeUnicodeState::Backslash, + hex_digit_idx: ms_hex_digit as usize, + } } #[inline] @@ -392,7 +403,12 @@ impl CharExt for char { #[stable(feature = "rust1", since = "1.0.0")] pub struct EscapeUnicode { c: char, - state: EscapeUnicodeState + state: EscapeUnicodeState, + + // The index of the next hex digit to be printed (0 if none), + // i.e. the number of remaining hex digits to be printed; + // increasing from the least significant digit: 0x543210 + hex_digit_idx: usize, } #[derive(Clone, Debug)] @@ -400,7 +416,7 @@ enum EscapeUnicodeState { Backslash, Type, LeftBrace, - Value(usize), + Value, RightBrace, Done, } @@ -420,19 +436,16 @@ impl Iterator for EscapeUnicode { Some('u') } EscapeUnicodeState::LeftBrace => { - let mut n = 0; - while (self.c as u32) >> (4 * (n + 1)) != 0 { - n += 1; - } - self.state = EscapeUnicodeState::Value(n); + self.state = EscapeUnicodeState::Value; Some('{') } - EscapeUnicodeState::Value(offset) => { - let c = from_digit(((self.c as u32) >> (offset * 4)) & 0xf, 16).unwrap(); - if offset == 0 { + EscapeUnicodeState::Value => { + let hex_digit = ((self.c as u32) >> (self.hex_digit_idx * 4)) & 0xf; + let c = from_digit(hex_digit, 16).unwrap(); + if self.hex_digit_idx == 0 { self.state = EscapeUnicodeState::RightBrace; } else { - self.state = EscapeUnicodeState::Value(offset - 1); + self.hex_digit_idx -= 1; } Some(c) } @@ -445,20 +458,29 @@ impl Iterator for EscapeUnicode { } fn size_hint(&self) -> (usize, Option) { - let mut n = 0; - while (self.c as usize) >> (4 * (n + 1)) != 0 { - n += 1; - } let n = match self.state { - EscapeUnicodeState::Backslash => n + 5, - EscapeUnicodeState::Type => n + 4, - EscapeUnicodeState::LeftBrace => n + 3, - EscapeUnicodeState::Value(offset) => offset + 2, + EscapeUnicodeState::Backslash => 5, + EscapeUnicodeState::Type => 4, + EscapeUnicodeState::LeftBrace => 3, + EscapeUnicodeState::Value => 2, EscapeUnicodeState::RightBrace => 1, EscapeUnicodeState::Done => 0, }; + let n = n + self.hex_digit_idx; (n, Some(n)) } + + fn last(self) -> Option { + match self.state { + EscapeUnicodeState::Done => None, + + EscapeUnicodeState::RightBrace | + EscapeUnicodeState::Value | + EscapeUnicodeState::LeftBrace | + EscapeUnicodeState::Type | + EscapeUnicodeState::Backslash => Some('}'), + } + } } /// An iterator that yields the literal escape code of a `char`. diff --git a/src/libcore/clone.rs b/src/libcore/clone.rs index a793502e58..e8ea993c69 100644 --- a/src/libcore/clone.rs +++ b/src/libcore/clone.rs @@ -49,6 +49,11 @@ use marker::Sized; /// A common trait for cloning an object. /// /// This trait can be used with `#[derive]`. +/// +/// Types that are `Copy` should have a trivial implementation of `Clone`. More formally: +/// if `T: Copy`, `x: T`, and `y: &T`, then `let x = y.clone();` is equivalent to `let x = *y;`. +/// Manual implementations should be careful to uphold this invariant; however, unsafe code +/// must not rely on it to ensure memory safety. #[stable(feature = "rust1", since = "1.0.0")] pub trait Clone : Sized { /// Returns a copy of the value. @@ -75,6 +80,17 @@ pub trait Clone : Sized { } } +// FIXME(aburka): this method is used solely by #[derive] to +// assert that every component of a type implements Clone. +// +// This should never be called by user code. +#[doc(hidden)] +#[inline(always)] +#[unstable(feature = "derive_clone_copy", + reason = "deriving hack, should not be public", + issue = "0")] +pub fn assert_receiver_is_clone(_: &T) {} + #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: ?Sized> Clone for &'a T { /// Returns a shallow copy of the reference. diff --git a/src/libcore/cmp.rs b/src/libcore/cmp.rs index 49aa0238a9..d3481ba3f0 100644 --- a/src/libcore/cmp.rs +++ b/src/libcore/cmp.rs @@ -128,7 +128,7 @@ pub trait Eq: PartialEq { /// let result = 2.cmp(&1); /// assert_eq!(Ordering::Greater, result); /// ``` -#[derive(Clone, Copy, PartialEq, Debug)] +#[derive(Clone, Copy, PartialEq, Debug, Hash)] #[stable(feature = "rust1", since = "1.0.0")] pub enum Ordering { /// An ordering where a compared value is less [than another]. diff --git a/src/libcore/convert.rs b/src/libcore/convert.rs index 2d999868f7..48421abc7b 100644 --- a/src/libcore/convert.rs +++ b/src/libcore/convert.rs @@ -20,18 +20,19 @@ //! - Impl the `As*` traits for reference-to-reference conversions //! - Impl the `Into` trait when you want to consume the value in the conversion //! - The `From` trait is the most flexible, useful for value _and_ reference conversions +//! - The `TryFrom` and `TryInto` traits behave like `From` and `Into`, but allow for the +//! conversion to fail //! -//! As a library author, you should prefer implementing `From` rather than -//! `Into`, as `From` provides greater flexibility and offers an equivalent `Into` -//! implementation for free, thanks to a blanket implementation in the standard library. -//! -//! **Note: these traits must not fail**. If the conversion can fail, you must use a dedicated -//! method which returns an `Option` or a `Result`. +//! As a library author, you should prefer implementing `From` or `TryFrom` rather than +//! `Into` or `TryInto`, as `From` and `TryFrom` provide greater flexibility and offer +//! equivalent `Into` or `TryInto` implementations for free, thanks to a blanket implementation +//! in the standard library. //! //! # Generic impl //! //! - `AsRef` and `AsMut` auto-dereference if the inner type is a reference //! - `From for T` implies `Into for U` +//! - `TryFrom for T` implies `TryInto for U` //! - `From` and `Into` are reflexive, which means that all types can `into()` //! themselves and `from()` themselves //! @@ -40,6 +41,7 @@ #![stable(feature = "rust1", since = "1.0.0")] use marker::Sized; +use result::Result; /// A cheap, reference-to-reference conversion. /// @@ -98,8 +100,8 @@ pub trait AsMut { /// A conversion that consumes `self`, which may or may not be expensive. /// -/// **Note: this trait must not fail**. If the conversion can fail, use a dedicated method which -/// returns an `Option` or a `Result`. +/// **Note: this trait must not fail**. If the conversion can fail, use `TryInto` or a dedicated +/// method which returns an `Option` or a `Result`. /// /// Library authors should not directly implement this trait, but should prefer implementing /// the `From` trait, which offers greater flexibility and provides an equivalent `Into` @@ -133,8 +135,8 @@ pub trait Into: Sized { /// Construct `Self` via a conversion. /// -/// **Note: this trait must not fail**. If the conversion can fail, use a dedicated method which -/// returns an `Option` or a `Result`. +/// **Note: this trait must not fail**. If the conversion can fail, use `TryFrom` or a dedicated +/// method which returns an `Option` or a `Result`. /// /// # Examples /// @@ -158,6 +160,30 @@ pub trait From: Sized { fn from(T) -> Self; } +/// An attempted conversion that consumes `self`, which may or may not be expensive. +/// +/// Library authors should not directly implement this trait, but should prefer implementing +/// the `TryFrom` trait, which offers greater flexibility and provides an equivalent `TryInto` +/// implementation for free, thanks to a blanket implementation in the standard library. +#[unstable(feature = "try_from", issue = "33417")] +pub trait TryInto: Sized { + /// The type returned in the event of a conversion error. + type Err; + + /// Performs the conversion. + fn try_into(self) -> Result; +} + +/// Attempt to construct `Self` via a conversion. +#[unstable(feature = "try_from", issue = "33417")] +pub trait TryFrom: Sized { + /// The type returned in the event of a conversion error. + type Err; + + /// Performs the conversion. + fn try_from(T) -> Result; +} + //////////////////////////////////////////////////////////////////////////////// // GENERIC IMPLS //////////////////////////////////////////////////////////////////////////////// @@ -216,6 +242,17 @@ impl From for T { fn from(t: T) -> T { t } } + +// TryFrom implies TryInto +#[unstable(feature = "try_from", issue = "33417")] +impl TryInto for T where U: TryFrom { + type Err = U::Err; + + fn try_into(self) -> Result { + U::try_from(self) + } +} + //////////////////////////////////////////////////////////////////////////////// // CONCRETE IMPLS //////////////////////////////////////////////////////////////////////////////// diff --git a/src/libcore/fmt/builders.rs b/src/libcore/fmt/builders.rs index d33746389a..6cac80ab62 100644 --- a/src/libcore/fmt/builders.rs +++ b/src/libcore/fmt/builders.rs @@ -9,7 +9,7 @@ // except according to those terms. use prelude::v1::*; -use fmt::{self, Write, FlagV1}; +use fmt::{self, FlagV1}; struct PadAdapter<'a, 'b: 'a> { fmt: &'a mut fmt::Formatter<'b>, diff --git a/src/libcore/fmt/mod.rs b/src/libcore/fmt/mod.rs index 2f02f5c21f..dde4d03dad 100644 --- a/src/libcore/fmt/mod.rs +++ b/src/libcore/fmt/mod.rs @@ -60,7 +60,7 @@ pub type Result = result::Result<(), Error>; /// occurred. Any extra information must be arranged to be transmitted through /// some other means. #[stable(feature = "rust1", since = "1.0.0")] -#[derive(Copy, Clone, Debug)] +#[derive(Copy, Clone, Debug, Default, Eq, Hash, Ord, PartialEq, PartialOrd)] pub struct Error; /// A collection of methods that are required to format a message into a stream. @@ -776,6 +776,32 @@ pub trait UpperExp { /// /// * output - the buffer to write output to /// * args - the precompiled arguments generated by `format_args!` +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use std::fmt; +/// +/// let mut output = String::new(); +/// fmt::write(&mut output, format_args!("Hello {}!", "world")) +/// .expect("Error occurred while trying to write in String"); +/// assert_eq!(output, "Hello world!"); +/// ``` +/// +/// Please note that using [`write!`][write_macro] might be preferrable. Example: +/// +/// ``` +/// use std::fmt::Write; +/// +/// let mut output = String::new(); +/// write!(&mut output, "Hello {}!", "world") +/// .expect("Error occurred while trying to write in String"); +/// assert_eq!(output, "Hello world!"); +/// ``` +/// +/// [write_macro]: ../../std/macro.write!.html #[stable(feature = "rust1", since = "1.0.0")] pub fn write(output: &mut Write, args: Arguments) -> Result { let mut formatter = Formatter { diff --git a/src/libcore/intrinsics.rs b/src/libcore/intrinsics.rs index 03bcf9caee..8a9f662bf8 100644 --- a/src/libcore/intrinsics.rs +++ b/src/libcore/intrinsics.rs @@ -53,34 +53,14 @@ extern "rust-intrinsic" { // NB: These intrinsics take raw pointers because they mutate aliased // memory, which is not valid for either `&` or `&mut`. - #[cfg(all(stage0, not(cargobuild)))] - pub fn atomic_cxchg(dst: *mut T, old: T, src: T) -> T; - #[cfg(all(stage0, not(cargobuild)))] - pub fn atomic_cxchg_acq(dst: *mut T, old: T, src: T) -> T; - #[cfg(all(stage0, not(cargobuild)))] - pub fn atomic_cxchg_rel(dst: *mut T, old: T, src: T) -> T; - #[cfg(all(stage0, not(cargobuild)))] - pub fn atomic_cxchg_acqrel(dst: *mut T, old: T, src: T) -> T; - #[cfg(all(stage0, not(cargobuild)))] - pub fn atomic_cxchg_relaxed(dst: *mut T, old: T, src: T) -> T; - - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_acq(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_rel(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_acqrel(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_relaxed(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_failrelaxed(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_failacq(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_acq_failrelaxed(dst: *mut T, old: T, src: T) -> (T, bool); - #[cfg(any(not(stage0), cargobuild))] pub fn atomic_cxchg_acqrel_failrelaxed(dst: *mut T, old: T, src: T) -> (T, bool); pub fn atomic_cxchgweak(dst: *mut T, old: T, src: T) -> (T, bool); @@ -212,11 +192,8 @@ extern "rust-intrinsic" { /// The size of a type in bytes. /// - /// This is the exact number of bytes in memory taken up by a - /// value of the given type. In other words, a memset of this size - /// would *exactly* overwrite a value. When laid out in vectors - /// and structures there may be additional padding between - /// elements. + /// More specifically, this is the offset in bytes between successive + /// items of the same type, including alignment padding. pub fn size_of() -> usize; /// Moves a value to an uninitialized memory location. @@ -548,27 +525,22 @@ extern "rust-intrinsic" { /// Float addition that allows optimizations based on algebraic rules. /// May assume inputs are finite. - #[cfg(not(stage0))] pub fn fadd_fast(a: T, b: T) -> T; /// Float subtraction that allows optimizations based on algebraic rules. /// May assume inputs are finite. - #[cfg(not(stage0))] pub fn fsub_fast(a: T, b: T) -> T; /// Float multiplication that allows optimizations based on algebraic rules. /// May assume inputs are finite. - #[cfg(not(stage0))] pub fn fmul_fast(a: T, b: T) -> T; /// Float division that allows optimizations based on algebraic rules. /// May assume inputs are finite. - #[cfg(not(stage0))] pub fn fdiv_fast(a: T, b: T) -> T; /// Float remainder that allows optimizations based on algebraic rules. /// May assume inputs are finite. - #[cfg(not(stage0))] pub fn frem_fast(a: T, b: T) -> T; diff --git a/src/libcore/iter.rs b/src/libcore/iter.rs deleted file mode 100644 index b4378a5fec..0000000000 --- a/src/libcore/iter.rs +++ /dev/null @@ -1,5007 +0,0 @@ -// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Composable external iteration. -//! -//! If you've found yourself with a collection of some kind, and needed to -//! perform an operation on the elements of said collection, you'll quickly run -//! into 'iterators'. Iterators are heavily used in idiomatic Rust code, so -//! it's worth becoming familiar with them. -//! -//! Before explaining more, let's talk about how this module is structured: -//! -//! # Organization -//! -//! This module is largely organized by type: -//! -//! * [Traits] are the core portion: these traits define what kind of iterators -//! exist and what you can do with them. The methods of these traits are worth -//! putting some extra study time into. -//! * [Functions] provide some helpful ways to create some basic iterators. -//! * [Structs] are often the return types of the various methods on this -//! module's traits. You'll usually want to look at the method that creates -//! the `struct`, rather than the `struct` itself. For more detail about why, -//! see '[Implementing Iterator](#implementing-iterator)'. -//! -//! [Traits]: #traits -//! [Functions]: #functions -//! [Structs]: #structs -//! -//! That's it! Let's dig into iterators. -//! -//! # Iterator -//! -//! The heart and soul of this module is the [`Iterator`] trait. The core of -//! [`Iterator`] looks like this: -//! -//! ``` -//! trait Iterator { -//! type Item; -//! fn next(&mut self) -> Option; -//! } -//! ``` -//! -//! An iterator has a method, [`next()`], which when called, returns an -//! [`Option`]``. [`next()`] will return `Some(Item)` as long as there -//! are elements, and once they've all been exhausted, will return `None` to -//! indicate that iteration is finished. Individual iterators may choose to -//! resume iteration, and so calling [`next()`] again may or may not eventually -//! start returning `Some(Item)` again at some point. -//! -//! [`Iterator`]'s full definition includes a number of other methods as well, -//! but they are default methods, built on top of [`next()`], and so you get -//! them for free. -//! -//! Iterators are also composable, and it's common to chain them together to do -//! more complex forms of processing. See the [Adapters](#adapters) section -//! below for more details. -//! -//! [`Iterator`]: trait.Iterator.html -//! [`next()`]: trait.Iterator.html#tymethod.next -//! [`Option`]: ../../std/option/enum.Option.html -//! -//! # The three forms of iteration -//! -//! There are three common methods which can create iterators from a collection: -//! -//! * `iter()`, which iterates over `&T`. -//! * `iter_mut()`, which iterates over `&mut T`. -//! * `into_iter()`, which iterates over `T`. -//! -//! Various things in the standard library may implement one or more of the -//! three, where appropriate. -//! -//! # Implementing Iterator -//! -//! Creating an iterator of your own involves two steps: creating a `struct` to -//! hold the iterator's state, and then `impl`ementing [`Iterator`] for that -//! `struct`. This is why there are so many `struct`s in this module: there is -//! one for each iterator and iterator adapter. -//! -//! Let's make an iterator named `Counter` which counts from `1` to `5`: -//! -//! ``` -//! // First, the struct: -//! -//! /// An iterator which counts from one to five -//! struct Counter { -//! count: usize, -//! } -//! -//! // we want our count to start at one, so let's add a new() method to help. -//! // This isn't strictly necessary, but is convenient. Note that we start -//! // `count` at zero, we'll see why in `next()`'s implementation below. -//! impl Counter { -//! fn new() -> Counter { -//! Counter { count: 0 } -//! } -//! } -//! -//! // Then, we implement `Iterator` for our `Counter`: -//! -//! impl Iterator for Counter { -//! // we will be counting with usize -//! type Item = usize; -//! -//! // next() is the only required method -//! fn next(&mut self) -> Option { -//! // increment our count. This is why we started at zero. -//! self.count += 1; -//! -//! // check to see if we've finished counting or not. -//! if self.count < 6 { -//! Some(self.count) -//! } else { -//! None -//! } -//! } -//! } -//! -//! // And now we can use it! -//! -//! let mut counter = Counter::new(); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! -//! let x = counter.next().unwrap(); -//! println!("{}", x); -//! ``` -//! -//! This will print `1` through `5`, each on their own line. -//! -//! Calling `next()` this way gets repetitive. Rust has a construct which can -//! call `next()` on your iterator, until it reaches `None`. Let's go over that -//! next. -//! -//! # for Loops and IntoIterator -//! -//! Rust's `for` loop syntax is actually sugar for iterators. Here's a basic -//! example of `for`: -//! -//! ``` -//! let values = vec![1, 2, 3, 4, 5]; -//! -//! for x in values { -//! println!("{}", x); -//! } -//! ``` -//! -//! This will print the numbers one through five, each on their own line. But -//! you'll notice something here: we never called anything on our vector to -//! produce an iterator. What gives? -//! -//! There's a trait in the standard library for converting something into an -//! iterator: [`IntoIterator`]. This trait has one method, [`into_iter()`], -//! which converts the thing implementing [`IntoIterator`] into an iterator. -//! Let's take a look at that `for` loop again, and what the compiler converts -//! it into: -//! -//! [`IntoIterator`]: trait.IntoIterator.html -//! [`into_iter()`]: trait.IntoIterator.html#tymethod.into_iter -//! -//! ``` -//! let values = vec![1, 2, 3, 4, 5]; -//! -//! for x in values { -//! println!("{}", x); -//! } -//! ``` -//! -//! Rust de-sugars this into: -//! -//! ``` -//! let values = vec![1, 2, 3, 4, 5]; -//! { -//! let result = match IntoIterator::into_iter(values) { -//! mut iter => loop { -//! match iter.next() { -//! Some(x) => { println!("{}", x); }, -//! None => break, -//! } -//! }, -//! }; -//! result -//! } -//! ``` -//! -//! First, we call `into_iter()` on the value. Then, we match on the iterator -//! that returns, calling [`next()`] over and over until we see a `None`. At -//! that point, we `break` out of the loop, and we're done iterating. -//! -//! There's one more subtle bit here: the standard library contains an -//! interesting implementation of [`IntoIterator`]: -//! -//! ```ignore -//! impl IntoIterator for I -//! ``` -//! -//! In other words, all [`Iterator`]s implement [`IntoIterator`], by just -//! returning themselves. This means two things: -//! -//! 1. If you're writing an [`Iterator`], you can use it with a `for` loop. -//! 2. If you're creating a collection, implementing [`IntoIterator`] for it -//! will allow your collection to be used with the `for` loop. -//! -//! # Adapters -//! -//! Functions which take an [`Iterator`] and return another [`Iterator`] are -//! often called 'iterator adapters', as they're a form of the 'adapter -//! pattern'. -//! -//! Common iterator adapters include [`map()`], [`take()`], and [`collect()`]. -//! For more, see their documentation. -//! -//! [`map()`]: trait.Iterator.html#method.map -//! [`take()`]: trait.Iterator.html#method.take -//! [`collect()`]: trait.Iterator.html#method.collect -//! -//! # Laziness -//! -//! Iterators (and iterator [adapters](#adapters)) are *lazy*. This means that -//! just creating an iterator doesn't _do_ a whole lot. Nothing really happens -//! until you call [`next()`]. This is sometimes a source of confusion when -//! creating an iterator solely for its side effects. For example, the [`map()`] -//! method calls a closure on each element it iterates over: -//! -//! ``` -//! # #![allow(unused_must_use)] -//! let v = vec![1, 2, 3, 4, 5]; -//! v.iter().map(|x| println!("{}", x)); -//! ``` -//! -//! This will not print any values, as we only created an iterator, rather than -//! using it. The compiler will warn us about this kind of behavior: -//! -//! ```text -//! warning: unused result which must be used: iterator adaptors are lazy and -//! do nothing unless consumed -//! ``` -//! -//! The idiomatic way to write a [`map()`] for its side effects is to use a -//! `for` loop instead: -//! -//! ``` -//! let v = vec![1, 2, 3, 4, 5]; -//! -//! for x in &v { -//! println!("{}", x); -//! } -//! ``` -//! -//! [`map()`]: trait.Iterator.html#method.map -//! -//! The two most common ways to evaluate an iterator are to use a `for` loop -//! like this, or using the [`collect()`] adapter to produce a new collection. -//! -//! [`collect()`]: trait.Iterator.html#method.collect -//! -//! # Infinity -//! -//! Iterators do not have to be finite. As an example, an open-ended range is -//! an infinite iterator: -//! -//! ``` -//! let numbers = 0..; -//! ``` -//! -//! It is common to use the [`take()`] iterator adapter to turn an infinite -//! iterator into a finite one: -//! -//! ``` -//! let numbers = 0..; -//! let five_numbers = numbers.take(5); -//! -//! for number in five_numbers { -//! println!("{}", number); -//! } -//! ``` -//! -//! This will print the numbers `0` through `4`, each on their own line. -//! -//! [`take()`]: trait.Iterator.html#method.take - -#![stable(feature = "rust1", since = "1.0.0")] - -use clone::Clone; -use cmp; -use cmp::{Ord, PartialOrd, PartialEq, Ordering}; -use default::Default; -use fmt; -use marker; -use mem; -use num::{Zero, One}; -use ops::{self, Add, Sub, FnMut, Mul}; -use option::Option::{self, Some, None}; -use marker::Sized; -use usize; - -fn _assert_is_object_safe(_: &Iterator) {} - -/// An interface for dealing with iterators. -/// -/// This is the main iterator trait. For more about the concept of iterators -/// generally, please see the [module-level documentation]. In particular, you -/// may want to know how to [implement `Iterator`][impl]. -/// -/// [module-level documentation]: index.html -/// [impl]: index.html#implementing-iterator -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented = "`{Self}` is not an iterator; maybe try calling \ - `.iter()` or a similar method"] -pub trait Iterator { - /// The type of the elements being iterated over. - #[stable(feature = "rust1", since = "1.0.0")] - type Item; - - /// Advances the iterator and returns the next value. - /// - /// Returns `None` when iteration is finished. Individual iterator - /// implementations may choose to resume iteration, and so calling `next()` - /// again may or may not eventually start returning `Some(Item)` again at some - /// point. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter(); - /// - /// // A call to next() returns the next value... - /// assert_eq!(Some(&1), iter.next()); - /// assert_eq!(Some(&2), iter.next()); - /// assert_eq!(Some(&3), iter.next()); - /// - /// // ... and then None once it's over. - /// assert_eq!(None, iter.next()); - /// - /// // More calls may or may not return None. Here, they always will. - /// assert_eq!(None, iter.next()); - /// assert_eq!(None, iter.next()); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn next(&mut self) -> Option; - - /// Returns the bounds on the remaining length of the iterator. - /// - /// Specifically, `size_hint()` returns a tuple where the first element - /// is the lower bound, and the second element is the upper bound. - /// - /// The second half of the tuple that is returned is an `Option`. A - /// `None` here means that either there is no known upper bound, or the - /// upper bound is larger than `usize`. - /// - /// # Implementation notes - /// - /// It is not enforced that an iterator implementation yields the declared - /// number of elements. A buggy iterator may yield less than the lower bound - /// or more than the upper bound of elements. - /// - /// `size_hint()` is primarily intended to be used for optimizations such as - /// reserving space for the elements of the iterator, but must not be - /// trusted to e.g. omit bounds checks in unsafe code. An incorrect - /// implementation of `size_hint()` should not lead to memory safety - /// violations. - /// - /// That said, the implementation should provide a correct estimation, - /// because otherwise it would be a violation of the trait's protocol. - /// - /// The default implementation returns `(0, None)` which is correct for any - /// iterator. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// let iter = a.iter(); - /// - /// assert_eq!((3, Some(3)), iter.size_hint()); - /// ``` - /// - /// A more complex example: - /// - /// ``` - /// // The even numbers from zero to ten. - /// let iter = (0..10).filter(|x| x % 2 == 0); - /// - /// // We might iterate from zero to ten times. Knowing that it's five - /// // exactly wouldn't be possible without executing filter(). - /// assert_eq!((0, Some(10)), iter.size_hint()); - /// - /// // Let's add one five more numbers with chain() - /// let iter = (0..10).filter(|x| x % 2 == 0).chain(15..20); - /// - /// // now both bounds are increased by five - /// assert_eq!((5, Some(15)), iter.size_hint()); - /// ``` - /// - /// Returning `None` for an upper bound: - /// - /// ``` - /// // an infinite iterator has no upper bound - /// let iter = 0..; - /// - /// assert_eq!((0, None), iter.size_hint()); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn size_hint(&self) -> (usize, Option) { (0, None) } - - /// Consumes the iterator, counting the number of iterations and returning it. - /// - /// This method will evaluate the iterator until its [`next()`] returns - /// `None`. Once `None` is encountered, `count()` returns the number of - /// times it called [`next()`]. - /// - /// [`next()`]: #tymethod.next - /// - /// # Overflow Behavior - /// - /// The method does no guarding against overflows, so counting elements of - /// an iterator with more than `usize::MAX` elements either produces the - /// wrong result or panics. If debug assertions are enabled, a panic is - /// guaranteed. - /// - /// # Panics - /// - /// This function might panic if the iterator has more than `usize::MAX` - /// elements. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// assert_eq!(a.iter().count(), 3); - /// - /// let a = [1, 2, 3, 4, 5]; - /// assert_eq!(a.iter().count(), 5); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn count(self) -> usize where Self: Sized { - // Might overflow. - self.fold(0, |cnt, _| cnt + 1) - } - - /// Consumes the iterator, returning the last element. - /// - /// This method will evaluate the iterator until it returns `None`. While - /// doing so, it keeps track of the current element. After `None` is - /// returned, `last()` will then return the last element it saw. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// assert_eq!(a.iter().last(), Some(&3)); - /// - /// let a = [1, 2, 3, 4, 5]; - /// assert_eq!(a.iter().last(), Some(&5)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn last(self) -> Option where Self: Sized { - let mut last = None; - for x in self { last = Some(x); } - last - } - - /// Consumes the `n` first elements of the iterator, then returns the - /// `next()` one. - /// - /// This method will evaluate the iterator `n` times, discarding those elements. - /// After it does so, it will call [`next()`] and return its value. - /// - /// [`next()`]: #tymethod.next - /// - /// Like most indexing operations, the count starts from zero, so `nth(0)` - /// returns the first value, `nth(1)` the second, and so on. - /// - /// `nth()` will return `None` if `n` is larger than the length of the - /// iterator. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// assert_eq!(a.iter().nth(1), Some(&2)); - /// ``` - /// - /// Calling `nth()` multiple times doesn't rewind the iterator: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter(); - /// - /// assert_eq!(iter.nth(1), Some(&2)); - /// assert_eq!(iter.nth(1), None); - /// ``` - /// - /// Returning `None` if there are less than `n` elements: - /// - /// ``` - /// let a = [1, 2, 3]; - /// assert_eq!(a.iter().nth(10), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn nth(&mut self, mut n: usize) -> Option where Self: Sized { - for x in self { - if n == 0 { return Some(x) } - n -= 1; - } - None - } - - /// Takes two iterators and creates a new iterator over both in sequence. - /// - /// `chain()` will return a new iterator which will first iterate over - /// values from the first iterator and then over values from the second - /// iterator. - /// - /// In other words, it links two iterators together, in a chain. 🔗 - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a1 = [1, 2, 3]; - /// let a2 = [4, 5, 6]; - /// - /// let mut iter = a1.iter().chain(a2.iter()); - /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), Some(&4)); - /// assert_eq!(iter.next(), Some(&5)); - /// assert_eq!(iter.next(), Some(&6)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Since the argument to `chain()` uses [`IntoIterator`], we can pass - /// anything that can be converted into an [`Iterator`], not just an - /// [`Iterator`] itself. For example, slices (`&[T]`) implement - /// [`IntoIterator`], and so can be passed to `chain()` directly: - /// - /// [`IntoIterator`]: trait.IntoIterator.html - /// [`Iterator`]: trait.Iterator.html - /// - /// ``` - /// let s1 = &[1, 2, 3]; - /// let s2 = &[4, 5, 6]; - /// - /// let mut iter = s1.iter().chain(s2); - /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), Some(&4)); - /// assert_eq!(iter.next(), Some(&5)); - /// assert_eq!(iter.next(), Some(&6)); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn chain(self, other: U) -> Chain where - Self: Sized, U: IntoIterator, - { - Chain{a: self, b: other.into_iter(), state: ChainState::Both} - } - - /// 'Zips up' two iterators into a single iterator of pairs. - /// - /// `zip()` returns a new iterator that will iterate over two other - /// iterators, returning a tuple where the first element comes from the - /// first iterator, and the second element comes from the second iterator. - /// - /// In other words, it zips two iterators together, into a single one. - /// - /// When either iterator returns `None`, all further calls to `next()` - /// will return `None`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a1 = [1, 2, 3]; - /// let a2 = [4, 5, 6]; - /// - /// let mut iter = a1.iter().zip(a2.iter()); - /// - /// assert_eq!(iter.next(), Some((&1, &4))); - /// assert_eq!(iter.next(), Some((&2, &5))); - /// assert_eq!(iter.next(), Some((&3, &6))); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Since the argument to `zip()` uses [`IntoIterator`], we can pass - /// anything that can be converted into an [`Iterator`], not just an - /// [`Iterator`] itself. For example, slices (`&[T]`) implement - /// [`IntoIterator`], and so can be passed to `zip()` directly: - /// - /// [`IntoIterator`]: trait.IntoIterator.html - /// [`Iterator`]: trait.Iterator.html - /// - /// ``` - /// let s1 = &[1, 2, 3]; - /// let s2 = &[4, 5, 6]; - /// - /// let mut iter = s1.iter().zip(s2); - /// - /// assert_eq!(iter.next(), Some((&1, &4))); - /// assert_eq!(iter.next(), Some((&2, &5))); - /// assert_eq!(iter.next(), Some((&3, &6))); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// `zip()` is often used to zip an infinite iterator to a finite one. - /// This works because the finite iterator will eventually return `None`, - /// ending the zipper. Zipping with `(0..)` can look a lot like [`enumerate()`]: - /// - /// ``` - /// let enumerate: Vec<_> = "foo".chars().enumerate().collect(); - /// - /// let zipper: Vec<_> = (0..).zip("foo".chars()).collect(); - /// - /// assert_eq!((0, 'f'), enumerate[0]); - /// assert_eq!((0, 'f'), zipper[0]); - /// - /// assert_eq!((1, 'o'), enumerate[1]); - /// assert_eq!((1, 'o'), zipper[1]); - /// - /// assert_eq!((2, 'o'), enumerate[2]); - /// assert_eq!((2, 'o'), zipper[2]); - /// ``` - /// - /// [`enumerate()`]: trait.Iterator.html#method.enumerate - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn zip(self, other: U) -> Zip where - Self: Sized, U: IntoIterator - { - Zip{a: self, b: other.into_iter()} - } - - /// Takes a closure and creates an iterator which calls that closure on each - /// element. - /// - /// `map()` transforms one iterator into another, by means of its argument: - /// something that implements `FnMut`. It produces a new iterator which - /// calls this closure on each element of the original iterator. - /// - /// If you are good at thinking in types, you can think of `map()` like this: - /// If you have an iterator that gives you elements of some type `A`, and - /// you want an iterator of some other type `B`, you can use `map()`, - /// passing a closure that takes an `A` and returns a `B`. - /// - /// `map()` is conceptually similar to a [`for`] loop. However, as `map()` is - /// lazy, it is best used when you're already working with other iterators. - /// If you're doing some sort of looping for a side effect, it's considered - /// more idiomatic to use [`for`] than `map()`. - /// - /// [`for`]: ../../book/loops.html#for - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.into_iter().map(|x| 2 * x); - /// - /// assert_eq!(iter.next(), Some(2)); - /// assert_eq!(iter.next(), Some(4)); - /// assert_eq!(iter.next(), Some(6)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// If you're doing some sort of side effect, prefer [`for`] to `map()`: - /// - /// ``` - /// # #![allow(unused_must_use)] - /// // don't do this: - /// (0..5).map(|x| println!("{}", x)); - /// - /// // it won't even execute, as it is lazy. Rust will warn you about this. - /// - /// // Instead, use for: - /// for x in 0..5 { - /// println!("{}", x); - /// } - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn map(self, f: F) -> Map where - Self: Sized, F: FnMut(Self::Item) -> B, - { - Map{iter: self, f: f} - } - - /// Creates an iterator which uses a closure to determine if an element - /// should be yielded. - /// - /// The closure must return `true` or `false`. `filter()` creates an - /// iterator which calls this closure on each element. If the closure - /// returns `true`, then the element is returned. If the closure returns - /// `false`, it will try again, and call the closure on the next element, - /// seeing if it passes the test. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [0i32, 1, 2]; - /// - /// let mut iter = a.into_iter().filter(|x| x.is_positive()); - /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Because the closure passed to `filter()` takes a reference, and many - /// iterators iterate over references, this leads to a possibly confusing - /// situation, where the type of the closure is a double reference: - /// - /// ``` - /// let a = [0, 1, 2]; - /// - /// let mut iter = a.into_iter().filter(|x| **x > 1); // need two *s! - /// - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// It's common to instead use destructuring on the argument to strip away - /// one: - /// - /// ``` - /// let a = [0, 1, 2]; - /// - /// let mut iter = a.into_iter().filter(|&x| *x > 1); // both & and * - /// - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// or both: - /// - /// ``` - /// let a = [0, 1, 2]; - /// - /// let mut iter = a.into_iter().filter(|&&x| x > 1); // two &s - /// - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// of these layers. - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn filter

(self, predicate: P) -> Filter where - Self: Sized, P: FnMut(&Self::Item) -> bool, - { - Filter{iter: self, predicate: predicate} - } - - /// Creates an iterator that both filters and maps. - /// - /// The closure must return an [`Option`]. `filter_map()` creates an - /// iterator which calls this closure on each element. If the closure - /// returns `Some(element)`, then that element is returned. If the - /// closure returns `None`, it will try again, and call the closure on the - /// next element, seeing if it will return `Some`. - /// - /// [`Option`]: ../../std/option/enum.Option.html - /// - /// Why `filter_map()` and not just [`filter()`].[`map()`]? The key is in this - /// part: - /// - /// [`filter()`]: #method.filter - /// [`map()`]: #method.map - /// - /// > If the closure returns `Some(element)`, then that element is returned. - /// - /// In other words, it removes the [`Option`] layer automatically. If your - /// mapping is already returning an [`Option`] and you want to skip over - /// `None`s, then `filter_map()` is much, much nicer to use. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = ["1", "2", "lol"]; - /// - /// let mut iter = a.iter().filter_map(|s| s.parse().ok()); - /// - /// assert_eq!(iter.next(), Some(1)); - /// assert_eq!(iter.next(), Some(2)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Here's the same example, but with [`filter()`] and [`map()`]: - /// - /// ``` - /// let a = ["1", "2", "lol"]; - /// - /// let mut iter = a.iter() - /// .map(|s| s.parse().ok()) - /// .filter(|s| s.is_some()); - /// - /// assert_eq!(iter.next(), Some(Some(1))); - /// assert_eq!(iter.next(), Some(Some(2))); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// There's an extra layer of `Some` in there. - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn filter_map(self, f: F) -> FilterMap where - Self: Sized, F: FnMut(Self::Item) -> Option, - { - FilterMap { iter: self, f: f } - } - - /// Creates an iterator which gives the current iteration count as well as - /// the next value. - /// - /// The iterator returned yields pairs `(i, val)`, where `i` is the - /// current index of iteration and `val` is the value returned by the - /// iterator. - /// - /// `enumerate()` keeps its count as a [`usize`]. If you want to count by a - /// different sized integer, the [`zip()`] function provides similar - /// functionality. - /// - /// [`usize`]: ../../std/primitive.usize.html - /// [`zip()`]: #method.zip - /// - /// # Overflow Behavior - /// - /// The method does no guarding against overflows, so enumerating more than - /// [`usize::MAX`] elements either produces the wrong result or panics. If - /// debug assertions are enabled, a panic is guaranteed. - /// - /// [`usize::MAX`]: ../../std/usize/constant.MAX.html - /// - /// # Panics - /// - /// The returned iterator might panic if the to-be-returned index would - /// overflow a `usize`. - /// - /// # Examples - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter().enumerate(); - /// - /// assert_eq!(iter.next(), Some((0, &1))); - /// assert_eq!(iter.next(), Some((1, &2))); - /// assert_eq!(iter.next(), Some((2, &3))); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn enumerate(self) -> Enumerate where Self: Sized { - Enumerate { iter: self, count: 0 } - } - - /// Creates an iterator which can look at the `next()` element without - /// consuming it. - /// - /// Adds a [`peek()`] method to an iterator. See its documentation for - /// more information. - /// - /// [`peek()`]: struct.Peekable.html#method.peek - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let xs = [1, 2, 3]; - /// - /// let mut iter = xs.iter().peekable(); - /// - /// // peek() lets us see into the future - /// assert_eq!(iter.peek(), Some(&&1)); - /// assert_eq!(iter.next(), Some(&1)); - /// - /// assert_eq!(iter.next(), Some(&2)); - /// - /// // we can peek() multiple times, the iterator won't advance - /// assert_eq!(iter.peek(), Some(&&3)); - /// assert_eq!(iter.peek(), Some(&&3)); - /// - /// assert_eq!(iter.next(), Some(&3)); - /// - /// // after the iterator is finished, so is peek() - /// assert_eq!(iter.peek(), None); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn peekable(self) -> Peekable where Self: Sized { - Peekable{iter: self, peeked: None} - } - - /// Creates an iterator that [`skip()`]s elements based on a predicate. - /// - /// [`skip()`]: #method.skip - /// - /// `skip_while()` takes a closure as an argument. It will call this - /// closure on each element of the iterator, and ignore elements - /// until it returns `false`. - /// - /// After `false` is returned, `skip_while()`'s job is over, and the - /// rest of the elements are yielded. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [-1i32, 0, 1]; - /// - /// let mut iter = a.into_iter().skip_while(|x| x.is_negative()); - /// - /// assert_eq!(iter.next(), Some(&0)); - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Because the closure passed to `skip_while()` takes a reference, and many - /// iterators iterate over references, this leads to a possibly confusing - /// situation, where the type of the closure is a double reference: - /// - /// ``` - /// let a = [-1, 0, 1]; - /// - /// let mut iter = a.into_iter().skip_while(|x| **x < 0); // need two *s! - /// - /// assert_eq!(iter.next(), Some(&0)); - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Stopping after an initial `false`: - /// - /// ``` - /// let a = [-1, 0, 1, -2]; - /// - /// let mut iter = a.into_iter().skip_while(|x| **x < 0); - /// - /// assert_eq!(iter.next(), Some(&0)); - /// assert_eq!(iter.next(), Some(&1)); - /// - /// // while this would have been false, since we already got a false, - /// // skip_while() isn't used any more - /// assert_eq!(iter.next(), Some(&-2)); - /// - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn skip_while

(self, predicate: P) -> SkipWhile where - Self: Sized, P: FnMut(&Self::Item) -> bool, - { - SkipWhile{iter: self, flag: false, predicate: predicate} - } - - /// Creates an iterator that yields elements based on a predicate. - /// - /// `take_while()` takes a closure as an argument. It will call this - /// closure on each element of the iterator, and yield elements - /// while it returns `true`. - /// - /// After `false` is returned, `take_while()`'s job is over, and the - /// rest of the elements are ignored. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [-1i32, 0, 1]; - /// - /// let mut iter = a.into_iter().take_while(|x| x.is_negative()); - /// - /// assert_eq!(iter.next(), Some(&-1)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Because the closure passed to `take_while()` takes a reference, and many - /// iterators iterate over references, this leads to a possibly confusing - /// situation, where the type of the closure is a double reference: - /// - /// ``` - /// let a = [-1, 0, 1]; - /// - /// let mut iter = a.into_iter().take_while(|x| **x < 0); // need two *s! - /// - /// assert_eq!(iter.next(), Some(&-1)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Stopping after an initial `false`: - /// - /// ``` - /// let a = [-1, 0, 1, -2]; - /// - /// let mut iter = a.into_iter().take_while(|x| **x < 0); - /// - /// assert_eq!(iter.next(), Some(&-1)); - /// - /// // We have more elements that are less than zero, but since we already - /// // got a false, take_while() isn't used any more - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// Because `take_while()` needs to look at the value in order to see if it - /// should be included or not, consuming iterators will see that it is - /// removed: - /// - /// ``` - /// let a = [1, 2, 3, 4]; - /// let mut iter = a.into_iter(); - /// - /// let result: Vec = iter.by_ref() - /// .take_while(|n| **n != 3) - /// .cloned() - /// .collect(); - /// - /// assert_eq!(result, &[1, 2]); - /// - /// let result: Vec = iter.cloned().collect(); - /// - /// assert_eq!(result, &[4]); - /// ``` - /// - /// The `3` is no longer there, because it was consumed in order to see if - /// the iteration should stop, but wasn't placed back into the iterator or - /// some similar thing. - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn take_while

(self, predicate: P) -> TakeWhile where - Self: Sized, P: FnMut(&Self::Item) -> bool, - { - TakeWhile{iter: self, flag: false, predicate: predicate} - } - - /// Creates an iterator that skips the first `n` elements. - /// - /// After they have been consumed, the rest of the elements are yielded. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter().skip(2); - /// - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn skip(self, n: usize) -> Skip where Self: Sized { - Skip{iter: self, n: n} - } - - /// Creates an iterator that yields its first `n` elements. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter().take(2); - /// - /// assert_eq!(iter.next(), Some(&1)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), None); - /// ``` - /// - /// `take()` is often used with an infinite iterator, to make it finite: - /// - /// ``` - /// let mut iter = (0..).take(3); - /// - /// assert_eq!(iter.next(), Some(0)); - /// assert_eq!(iter.next(), Some(1)); - /// assert_eq!(iter.next(), Some(2)); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn take(self, n: usize) -> Take where Self: Sized, { - Take{iter: self, n: n} - } - - /// An iterator adaptor similar to [`fold()`] that holds internal state and - /// produces a new iterator. - /// - /// [`fold()`]: #method.fold - /// - /// `scan()` takes two arguments: an initial value which seeds the internal - /// state, and a closure with two arguments, the first being a mutable - /// reference to the internal state and the second an iterator element. - /// The closure can assign to the internal state to share state between - /// iterations. - /// - /// On iteration, the closure will be applied to each element of the - /// iterator and the return value from the closure, an [`Option`], is - /// yielded by the iterator. - /// - /// [`Option`]: ../../std/option/enum.Option.html - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter().scan(1, |state, &x| { - /// // each iteration, we'll multiply the state by the element - /// *state = *state * x; - /// - /// // the value passed on to the next iteration - /// Some(*state) - /// }); - /// - /// assert_eq!(iter.next(), Some(1)); - /// assert_eq!(iter.next(), Some(2)); - /// assert_eq!(iter.next(), Some(6)); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn scan(self, initial_state: St, f: F) -> Scan - where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option, - { - Scan{iter: self, f: f, state: initial_state} - } - - /// Creates an iterator that works like map, but flattens nested structure. - /// - /// The [`map()`] adapter is very useful, but only when the closure - /// argument produces values. If it produces an iterator instead, there's - /// an extra layer of indirection. `flat_map()` will remove this extra layer - /// on its own. - /// - /// [`map()`]: #method.map - /// - /// Another way of thinking about `flat_map()`: [`map()`]'s closure returns - /// one item for each element, and `flat_map()`'s closure returns an - /// iterator for each element. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let words = ["alpha", "beta", "gamma"]; - /// - /// // chars() returns an iterator - /// let merged: String = words.iter() - /// .flat_map(|s| s.chars()) - /// .collect(); - /// assert_eq!(merged, "alphabetagamma"); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn flat_map(self, f: F) -> FlatMap - where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U, - { - FlatMap{iter: self, f: f, frontiter: None, backiter: None } - } - - /// Creates an iterator which ends after the first `None`. - /// - /// After an iterator returns `None`, future calls may or may not yield - /// `Some(T)` again. `fuse()` adapts an iterator, ensuring that after a - /// `None` is given, it will always return `None` forever. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// // an iterator which alternates between Some and None - /// struct Alternate { - /// state: i32, - /// } - /// - /// impl Iterator for Alternate { - /// type Item = i32; - /// - /// fn next(&mut self) -> Option { - /// let val = self.state; - /// self.state = self.state + 1; - /// - /// // if it's even, Some(i32), else None - /// if val % 2 == 0 { - /// Some(val) - /// } else { - /// None - /// } - /// } - /// } - /// - /// let mut iter = Alternate { state: 0 }; - /// - /// // we can see our iterator going back and forth - /// assert_eq!(iter.next(), Some(0)); - /// assert_eq!(iter.next(), None); - /// assert_eq!(iter.next(), Some(2)); - /// assert_eq!(iter.next(), None); - /// - /// // however, once we fuse it... - /// let mut iter = iter.fuse(); - /// - /// assert_eq!(iter.next(), Some(4)); - /// assert_eq!(iter.next(), None); - /// - /// // it will always return None after the first time. - /// assert_eq!(iter.next(), None); - /// assert_eq!(iter.next(), None); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn fuse(self) -> Fuse where Self: Sized { - Fuse{iter: self, done: false} - } - - /// Do something with each element of an iterator, passing the value on. - /// - /// When using iterators, you'll often chain several of them together. - /// While working on such code, you might want to check out what's - /// happening at various parts in the pipeline. To do that, insert - /// a call to `inspect()`. - /// - /// It's much more common for `inspect()` to be used as a debugging tool - /// than to exist in your final code, but never say never. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 4, 2, 3]; - /// - /// // this iterator sequence is complex. - /// let sum = a.iter() - /// .cloned() - /// .filter(|&x| x % 2 == 0) - /// .fold(0, |sum, i| sum + i); - /// - /// println!("{}", sum); - /// - /// // let's add some inspect() calls to investigate what's happening - /// let sum = a.iter() - /// .cloned() - /// .inspect(|x| println!("about to filter: {}", x)) - /// .filter(|&x| x % 2 == 0) - /// .inspect(|x| println!("made it through filter: {}", x)) - /// .fold(0, |sum, i| sum + i); - /// - /// println!("{}", sum); - /// ``` - /// - /// This will print: - /// - /// ```text - /// about to filter: 1 - /// about to filter: 4 - /// made it through filter: 4 - /// about to filter: 2 - /// made it through filter: 2 - /// about to filter: 3 - /// 6 - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn inspect(self, f: F) -> Inspect where - Self: Sized, F: FnMut(&Self::Item), - { - Inspect{iter: self, f: f} - } - - /// Borrows an iterator, rather than consuming it. - /// - /// This is useful to allow applying iterator adaptors while still - /// retaining ownership of the original iterator. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let iter = a.into_iter(); - /// - /// let sum: i32 = iter.take(5) - /// .fold(0, |acc, &i| acc + i ); - /// - /// assert_eq!(sum, 6); - /// - /// // if we try to use iter again, it won't work. The following line - /// // gives "error: use of moved value: `iter` - /// // assert_eq!(iter.next(), None); - /// - /// // let's try that again - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.into_iter(); - /// - /// // instead, we add in a .by_ref() - /// let sum: i32 = iter.by_ref() - /// .take(2) - /// .fold(0, |acc, &i| acc + i ); - /// - /// assert_eq!(sum, 3); - /// - /// // now this is just fine: - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), None); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn by_ref(&mut self) -> &mut Self where Self: Sized { self } - - /// Transforms an iterator into a collection. - /// - /// `collect()` can take anything iterable, and turn it into a relevant - /// collection. This is one of the more powerful methods in the standard - /// library, used in a variety of contexts. - /// - /// The most basic pattern in which `collect()` is used is to turn one - /// collection into another. You take a collection, call `iter()` on it, - /// do a bunch of transformations, and then `collect()` at the end. - /// - /// One of the keys to `collect()`'s power is that many things you might - /// not think of as 'collections' actually are. For example, a [`String`] - /// is a collection of [`char`]s. And a collection of [`Result`] can - /// be thought of as single `Result, E>`. See the examples - /// below for more. - /// - /// [`String`]: ../../std/string/struct.String.html - /// [`Result`]: ../../std/result/enum.Result.html - /// [`char`]: ../../std/primitive.char.html - /// - /// Because `collect()` is so general, it can cause problems with type - /// inference. As such, `collect()` is one of the few times you'll see - /// the syntax affectionately known as the 'turbofish': `::<>`. This - /// helps the inference algorithm understand specifically which collection - /// you're trying to collect into. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let doubled: Vec = a.iter() - /// .map(|&x| x * 2) - /// .collect(); - /// - /// assert_eq!(vec![2, 4, 6], doubled); - /// ``` - /// - /// Note that we needed the `: Vec` on the left-hand side. This is because - /// we could collect into, for example, a [`VecDeque`] instead: - /// - /// [`VecDeque`]: ../../std/collections/struct.VecDeque.html - /// - /// ``` - /// use std::collections::VecDeque; - /// - /// let a = [1, 2, 3]; - /// - /// let doubled: VecDeque = a.iter() - /// .map(|&x| x * 2) - /// .collect(); - /// - /// assert_eq!(2, doubled[0]); - /// assert_eq!(4, doubled[1]); - /// assert_eq!(6, doubled[2]); - /// ``` - /// - /// Using the 'turbofish' instead of annotating `doubled`: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let doubled = a.iter() - /// .map(|&x| x * 2) - /// .collect::>(); - /// - /// assert_eq!(vec![2, 4, 6], doubled); - /// ``` - /// - /// Because `collect()` cares about what you're collecting into, you can - /// still use a partial type hint, `_`, with the turbofish: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let doubled = a.iter() - /// .map(|&x| x * 2) - /// .collect::>(); - /// - /// assert_eq!(vec![2, 4, 6], doubled); - /// ``` - /// - /// Using `collect()` to make a [`String`]: - /// - /// ``` - /// let chars = ['g', 'd', 'k', 'k', 'n']; - /// - /// let hello: String = chars.iter() - /// .map(|&x| x as u8) - /// .map(|x| (x + 1) as char) - /// .collect(); - /// - /// assert_eq!("hello", hello); - /// ``` - /// - /// If you have a list of [`Result`]s, you can use `collect()` to - /// see if any of them failed: - /// - /// ``` - /// let results = [Ok(1), Err("nope"), Ok(3), Err("bad")]; - /// - /// let result: Result, &str> = results.iter().cloned().collect(); - /// - /// // gives us the first error - /// assert_eq!(Err("nope"), result); - /// - /// let results = [Ok(1), Ok(3)]; - /// - /// let result: Result, &str> = results.iter().cloned().collect(); - /// - /// // gives us the list of answers - /// assert_eq!(Ok(vec![1, 3]), result); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn collect>(self) -> B where Self: Sized { - FromIterator::from_iter(self) - } - - /// Consumes an iterator, creating two collections from it. - /// - /// The predicate passed to `partition()` can return `true`, or `false`. - /// `partition()` returns a pair, all of the elements for which it returned - /// `true`, and all of the elements for which it returned `false`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let (even, odd): (Vec, Vec) = a.into_iter() - /// .partition(|&n| n % 2 == 0); - /// - /// assert_eq!(even, vec![2]); - /// assert_eq!(odd, vec![1, 3]); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn partition(self, mut f: F) -> (B, B) where - Self: Sized, - B: Default + Extend, - F: FnMut(&Self::Item) -> bool - { - let mut left: B = Default::default(); - let mut right: B = Default::default(); - - for x in self { - if f(&x) { - left.extend(Some(x)) - } else { - right.extend(Some(x)) - } - } - - (left, right) - } - - /// An iterator adaptor that applies a function, producing a single, final value. - /// - /// `fold()` takes two arguments: an initial value, and a closure with two - /// arguments: an 'accumulator', and an element. The closure returns the value that - /// the accumulator should have for the next iteration. - /// - /// The initial value is the value the accumulator will have on the first - /// call. - /// - /// After applying this closure to every element of the iterator, `fold()` - /// returns the accumulator. - /// - /// This operation is sometimes called 'reduce' or 'inject'. - /// - /// Folding is useful whenever you have a collection of something, and want - /// to produce a single value from it. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// // the sum of all of the elements of a - /// let sum = a.iter() - /// .fold(0, |acc, &x| acc + x); - /// - /// assert_eq!(sum, 6); - /// ``` - /// - /// Let's walk through each step of the iteration here: - /// - /// | element | acc | x | result | - /// |---------|-----|---|--------| - /// | | 0 | | | - /// | 1 | 0 | 1 | 1 | - /// | 2 | 1 | 2 | 3 | - /// | 3 | 3 | 3 | 6 | - /// - /// And so, our final result, `6`. - /// - /// It's common for people who haven't used iterators a lot to - /// use a `for` loop with a list of things to build up a result. Those - /// can be turned into `fold()`s: - /// - /// ``` - /// let numbers = [1, 2, 3, 4, 5]; - /// - /// let mut result = 0; - /// - /// // for loop: - /// for i in &numbers { - /// result = result + i; - /// } - /// - /// // fold: - /// let result2 = numbers.iter().fold(0, |acc, &x| acc + x); - /// - /// // they're the same - /// assert_eq!(result, result2); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn fold(self, init: B, mut f: F) -> B where - Self: Sized, F: FnMut(B, Self::Item) -> B, - { - let mut accum = init; - for x in self { - accum = f(accum, x); - } - accum - } - - /// Tests if every element of the iterator matches a predicate. - /// - /// `all()` takes a closure that returns `true` or `false`. It applies - /// this closure to each element of the iterator, and if they all return - /// `true`, then so does `all()`. If any of them return `false`, it - /// returns `false`. - /// - /// `all()` is short-circuiting; in other words, it will stop processing - /// as soon as it finds a `false`, given that no matter what else happens, - /// the result will also be `false`. - /// - /// An empty iterator returns `true`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// assert!(a.iter().all(|&x| x > 0)); - /// - /// assert!(!a.iter().all(|&x| x > 2)); - /// ``` - /// - /// Stopping at the first `false`: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter(); - /// - /// assert!(!iter.all(|&x| x != 2)); - /// - /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&3)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn all(&mut self, mut f: F) -> bool where - Self: Sized, F: FnMut(Self::Item) -> bool - { - for x in self { - if !f(x) { - return false; - } - } - true - } - - /// Tests if any element of the iterator matches a predicate. - /// - /// `any()` takes a closure that returns `true` or `false`. It applies - /// this closure to each element of the iterator, and if any of them return - /// `true`, then so does `any()`. If they all return `false`, it - /// returns `false`. - /// - /// `any()` is short-circuiting; in other words, it will stop processing - /// as soon as it finds a `true`, given that no matter what else happens, - /// the result will also be `true`. - /// - /// An empty iterator returns `false`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// assert!(a.iter().any(|&x| x > 0)); - /// - /// assert!(!a.iter().any(|&x| x > 5)); - /// ``` - /// - /// Stopping at the first `true`: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter(); - /// - /// assert!(iter.any(|&x| x != 2)); - /// - /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&2)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn any(&mut self, mut f: F) -> bool where - Self: Sized, - F: FnMut(Self::Item) -> bool - { - for x in self { - if f(x) { - return true; - } - } - false - } - - /// Searches for an element of an iterator that satisfies a predicate. - /// - /// `find()` takes a closure that returns `true` or `false`. It applies - /// this closure to each element of the iterator, and if any of them return - /// `true`, then `find()` returns `Some(element)`. If they all return - /// `false`, it returns `None`. - /// - /// `find()` is short-circuiting; in other words, it will stop processing - /// as soon as the closure returns `true`. - /// - /// Because `find()` takes a reference, and many iterators iterate over - /// references, this leads to a possibly confusing situation where the - /// argument is a double reference. You can see this effect in the - /// examples below, with `&&x`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// assert_eq!(a.iter().find(|&&x| x == 2), Some(&2)); - /// - /// assert_eq!(a.iter().find(|&&x| x == 5), None); - /// ``` - /// - /// Stopping at the first `true`: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter(); - /// - /// assert_eq!(iter.find(|&&x| x == 2), Some(&2)); - /// - /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&3)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn find

(&mut self, mut predicate: P) -> Option where - Self: Sized, - P: FnMut(&Self::Item) -> bool, - { - for x in self { - if predicate(&x) { return Some(x) } - } - None - } - - /// Searches for an element in an iterator, returning its index. - /// - /// `position()` takes a closure that returns `true` or `false`. It applies - /// this closure to each element of the iterator, and if one of them - /// returns `true`, then `position()` returns `Some(index)`. If all of - /// them return `false`, it returns `None`. - /// - /// `position()` is short-circuiting; in other words, it will stop - /// processing as soon as it finds a `true`. - /// - /// # Overflow Behavior - /// - /// The method does no guarding against overflows, so if there are more - /// than `usize::MAX` non-matching elements, it either produces the wrong - /// result or panics. If debug assertions are enabled, a panic is - /// guaranteed. - /// - /// # Panics - /// - /// This function might panic if the iterator has more than `usize::MAX` - /// non-matching elements. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// assert_eq!(a.iter().position(|&x| x == 2), Some(1)); - /// - /// assert_eq!(a.iter().position(|&x| x == 5), None); - /// ``` - /// - /// Stopping at the first `true`: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter(); - /// - /// assert_eq!(iter.position(|&x| x == 2), Some(1)); - /// - /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&3)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn position

(&mut self, mut predicate: P) -> Option where - Self: Sized, - P: FnMut(Self::Item) -> bool, - { - // `enumerate` might overflow. - for (i, x) in self.enumerate() { - if predicate(x) { - return Some(i); - } - } - None - } - - /// Searches for an element in an iterator from the right, returning its - /// index. - /// - /// `rposition()` takes a closure that returns `true` or `false`. It applies - /// this closure to each element of the iterator, starting from the end, - /// and if one of them returns `true`, then `rposition()` returns - /// `Some(index)`. If all of them return `false`, it returns `None`. - /// - /// `rposition()` is short-circuiting; in other words, it will stop - /// processing as soon as it finds a `true`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// assert_eq!(a.iter().rposition(|&x| x == 3), Some(2)); - /// - /// assert_eq!(a.iter().rposition(|&x| x == 5), None); - /// ``` - /// - /// Stopping at the first `true`: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter(); - /// - /// assert_eq!(iter.rposition(|&x| x == 2), Some(1)); - /// - /// // we can still use `iter`, as there are more elements. - /// assert_eq!(iter.next(), Some(&1)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn rposition

(self) -> P where - P: Mul + One, - Self: Sized, - { - self.fold(One::one(), |p, e| p * e) - } - - /// Lexicographically compares the elements of this `Iterator` with those - /// of another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn cmp(mut self, other: I) -> Ordering where - I: IntoIterator, - Self::Item: Ord, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return Ordering::Equal, - (None, _ ) => return Ordering::Less, - (_ , None) => return Ordering::Greater, - (Some(x), Some(y)) => match x.cmp(&y) { - Ordering::Equal => (), - non_eq => return non_eq, - }, - } - } - } - - /// Lexicographically compares the elements of this `Iterator` with those - /// of another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn partial_cmp(mut self, other: I) -> Option where - I: IntoIterator, - Self::Item: PartialOrd, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return Some(Ordering::Equal), - (None, _ ) => return Some(Ordering::Less), - (_ , None) => return Some(Ordering::Greater), - (Some(x), Some(y)) => match x.partial_cmp(&y) { - Some(Ordering::Equal) => (), - non_eq => return non_eq, - }, - } - } - } - - /// Determines if the elements of this `Iterator` are equal to those of - /// another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn eq(mut self, other: I) -> bool where - I: IntoIterator, - Self::Item: PartialEq, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return true, - (None, _) | (_, None) => return false, - (Some(x), Some(y)) => if x != y { return false }, - } - } - } - - /// Determines if the elements of this `Iterator` are unequal to those of - /// another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn ne(mut self, other: I) -> bool where - I: IntoIterator, - Self::Item: PartialEq, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return false, - (None, _) | (_, None) => return true, - (Some(x), Some(y)) => if x.ne(&y) { return true }, - } - } - } - - /// Determines if the elements of this `Iterator` are lexicographically - /// less than those of another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn lt(mut self, other: I) -> bool where - I: IntoIterator, - Self::Item: PartialOrd, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return false, - (None, _ ) => return true, - (_ , None) => return false, - (Some(x), Some(y)) => { - match x.partial_cmp(&y) { - Some(Ordering::Less) => return true, - Some(Ordering::Equal) => {} - Some(Ordering::Greater) => return false, - None => return false, - } - }, - } - } - } - - /// Determines if the elements of this `Iterator` are lexicographically - /// less or equal to those of another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn le(mut self, other: I) -> bool where - I: IntoIterator, - Self::Item: PartialOrd, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return true, - (None, _ ) => return true, - (_ , None) => return false, - (Some(x), Some(y)) => { - match x.partial_cmp(&y) { - Some(Ordering::Less) => return true, - Some(Ordering::Equal) => {} - Some(Ordering::Greater) => return false, - None => return false, - } - }, - } - } - } - - /// Determines if the elements of this `Iterator` are lexicographically - /// greater than those of another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn gt(mut self, other: I) -> bool where - I: IntoIterator, - Self::Item: PartialOrd, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return false, - (None, _ ) => return false, - (_ , None) => return true, - (Some(x), Some(y)) => { - match x.partial_cmp(&y) { - Some(Ordering::Less) => return false, - Some(Ordering::Equal) => {} - Some(Ordering::Greater) => return true, - None => return false, - } - } - } - } - } - - /// Determines if the elements of this `Iterator` are lexicographically - /// greater than or equal to those of another. - #[stable(feature = "iter_order", since = "1.5.0")] - fn ge(mut self, other: I) -> bool where - I: IntoIterator, - Self::Item: PartialOrd, - Self: Sized, - { - let mut other = other.into_iter(); - - loop { - match (self.next(), other.next()) { - (None, None) => return true, - (None, _ ) => return false, - (_ , None) => return true, - (Some(x), Some(y)) => { - match x.partial_cmp(&y) { - Some(Ordering::Less) => return false, - Some(Ordering::Equal) => {} - Some(Ordering::Greater) => return true, - None => return false, - } - }, - } - } - } -} - -/// Select an element from an iterator based on the given projection -/// and "comparison" function. -/// -/// This is an idiosyncratic helper to try to factor out the -/// commonalities of {max,min}{,_by}. In particular, this avoids -/// having to implement optimizations several times. -#[inline] -fn select_fold1(mut it: I, - mut f_proj: FProj, - mut f_cmp: FCmp) -> Option<(B, I::Item)> - where I: Iterator, - FProj: FnMut(&I::Item) -> B, - FCmp: FnMut(&B, &I::Item, &B, &I::Item) -> bool -{ - // start with the first element as our selection. This avoids - // having to use `Option`s inside the loop, translating to a - // sizeable performance gain (6x in one case). - it.next().map(|mut sel| { - let mut sel_p = f_proj(&sel); - - for x in it { - let x_p = f_proj(&x); - if f_cmp(&sel_p, &sel, &x_p, &x) { - sel = x; - sel_p = x_p; - } - } - (sel_p, sel) - }) -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl<'a, I: Iterator + ?Sized> Iterator for &'a mut I { - type Item = I::Item; - fn next(&mut self) -> Option { (**self).next() } - fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } -} - -/// Conversion from an `Iterator`. -/// -/// By implementing `FromIterator` for a type, you define how it will be -/// created from an iterator. This is common for types which describe a -/// collection of some kind. -/// -/// `FromIterator`'s [`from_iter()`] is rarely called explicitly, and is instead -/// used through [`Iterator`]'s [`collect()`] method. See [`collect()`]'s -/// documentation for more examples. -/// -/// [`from_iter()`]: #tymethod.from_iter -/// [`Iterator`]: trait.Iterator.html -/// [`collect()`]: trait.Iterator.html#method.collect -/// -/// See also: [`IntoIterator`]. -/// -/// [`IntoIterator`]: trait.IntoIterator.html -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use std::iter::FromIterator; -/// -/// let five_fives = std::iter::repeat(5).take(5); -/// -/// let v = Vec::from_iter(five_fives); -/// -/// assert_eq!(v, vec![5, 5, 5, 5, 5]); -/// ``` -/// -/// Using [`collect()`] to implicitly use `FromIterator`: -/// -/// ``` -/// let five_fives = std::iter::repeat(5).take(5); -/// -/// let v: Vec = five_fives.collect(); -/// -/// assert_eq!(v, vec![5, 5, 5, 5, 5]); -/// ``` -/// -/// Implementing `FromIterator` for your type: -/// -/// ``` -/// use std::iter::FromIterator; -/// -/// // A sample collection, that's just a wrapper over Vec -/// #[derive(Debug)] -/// struct MyCollection(Vec); -/// -/// // Let's give it some methods so we can create one and add things -/// // to it. -/// impl MyCollection { -/// fn new() -> MyCollection { -/// MyCollection(Vec::new()) -/// } -/// -/// fn add(&mut self, elem: i32) { -/// self.0.push(elem); -/// } -/// } -/// -/// // and we'll implement FromIterator -/// impl FromIterator for MyCollection { -/// fn from_iter>(iter: I) -> Self { -/// let mut c = MyCollection::new(); -/// -/// for i in iter { -/// c.add(i); -/// } -/// -/// c -/// } -/// } -/// -/// // Now we can make a new iterator... -/// let iter = (0..5).into_iter(); -/// -/// // ... and make a MyCollection out of it -/// let c = MyCollection::from_iter(iter); -/// -/// assert_eq!(c.0, vec![0, 1, 2, 3, 4]); -/// -/// // collect works too! -/// -/// let iter = (0..5).into_iter(); -/// let c: MyCollection = iter.collect(); -/// -/// assert_eq!(c.0, vec![0, 1, 2, 3, 4]); -/// ``` -#[stable(feature = "rust1", since = "1.0.0")] -#[rustc_on_unimplemented="a collection of type `{Self}` cannot be \ - built from an iterator over elements of type `{A}`"] -pub trait FromIterator: Sized { - /// Creates a value from an iterator. - /// - /// See the [module-level documentation] for more. - /// - /// [module-level documentation]: trait.FromIterator.html - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// use std::iter::FromIterator; - /// - /// let five_fives = std::iter::repeat(5).take(5); - /// - /// let v = Vec::from_iter(five_fives); - /// - /// assert_eq!(v, vec![5, 5, 5, 5, 5]); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn from_iter>(iter: T) -> Self; -} - -/// Conversion into an `Iterator`. -/// -/// By implementing `IntoIterator` for a type, you define how it will be -/// converted to an iterator. This is common for types which describe a -/// collection of some kind. -/// -/// One benefit of implementing `IntoIterator` is that your type will [work -/// with Rust's `for` loop syntax](index.html#for-loops-and-intoiterator). -/// -/// See also: [`FromIterator`]. -/// -/// [`FromIterator`]: trait.FromIterator.html -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// let v = vec![1, 2, 3]; -/// -/// let mut iter = v.into_iter(); -/// -/// let n = iter.next(); -/// assert_eq!(Some(1), n); -/// -/// let n = iter.next(); -/// assert_eq!(Some(2), n); -/// -/// let n = iter.next(); -/// assert_eq!(Some(3), n); -/// -/// let n = iter.next(); -/// assert_eq!(None, n); -/// ``` -/// -/// Implementing `IntoIterator` for your type: -/// -/// ``` -/// // A sample collection, that's just a wrapper over Vec -/// #[derive(Debug)] -/// struct MyCollection(Vec); -/// -/// // Let's give it some methods so we can create one and add things -/// // to it. -/// impl MyCollection { -/// fn new() -> MyCollection { -/// MyCollection(Vec::new()) -/// } -/// -/// fn add(&mut self, elem: i32) { -/// self.0.push(elem); -/// } -/// } -/// -/// // and we'll implement IntoIterator -/// impl IntoIterator for MyCollection { -/// type Item = i32; -/// type IntoIter = ::std::vec::IntoIter; -/// -/// fn into_iter(self) -> Self::IntoIter { -/// self.0.into_iter() -/// } -/// } -/// -/// // Now we can make a new collection... -/// let mut c = MyCollection::new(); -/// -/// // ... add some stuff to it ... -/// c.add(0); -/// c.add(1); -/// c.add(2); -/// -/// // ... and then turn it into an Iterator: -/// for (i, n) in c.into_iter().enumerate() { -/// assert_eq!(i as i32, n); -/// } -/// ``` -#[stable(feature = "rust1", since = "1.0.0")] -pub trait IntoIterator { - /// The type of the elements being iterated over. - #[stable(feature = "rust1", since = "1.0.0")] - type Item; - - /// Which kind of iterator are we turning this into? - #[stable(feature = "rust1", since = "1.0.0")] - type IntoIter: Iterator; - - /// Creates an iterator from a value. - /// - /// See the [module-level documentation] for more. - /// - /// [module-level documentation]: trait.IntoIterator.html - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let v = vec![1, 2, 3]; - /// - /// let mut iter = v.into_iter(); - /// - /// let n = iter.next(); - /// assert_eq!(Some(1), n); - /// - /// let n = iter.next(); - /// assert_eq!(Some(2), n); - /// - /// let n = iter.next(); - /// assert_eq!(Some(3), n); - /// - /// let n = iter.next(); - /// assert_eq!(None, n); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn into_iter(self) -> Self::IntoIter; -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl IntoIterator for I { - type Item = I::Item; - type IntoIter = I; - - fn into_iter(self) -> I { - self - } -} - -/// Extend a collection with the contents of an iterator. -/// -/// Iterators produce a series of values, and collections can also be thought -/// of as a series of values. The `Extend` trait bridges this gap, allowing you -/// to extend a collection by including the contents of that iterator. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// // You can extend a String with some chars: -/// let mut message = String::from("The first three letters are: "); -/// -/// message.extend(&['a', 'b', 'c']); -/// -/// assert_eq!("abc", &message[29..32]); -/// ``` -/// -/// Implementing `Extend`: -/// -/// ``` -/// // A sample collection, that's just a wrapper over Vec -/// #[derive(Debug)] -/// struct MyCollection(Vec); -/// -/// // Let's give it some methods so we can create one and add things -/// // to it. -/// impl MyCollection { -/// fn new() -> MyCollection { -/// MyCollection(Vec::new()) -/// } -/// -/// fn add(&mut self, elem: i32) { -/// self.0.push(elem); -/// } -/// } -/// -/// // since MyCollection has a list of i32s, we implement Extend for i32 -/// impl Extend for MyCollection { -/// -/// // This is a bit simpler with the concrete type signature: we can call -/// // extend on anything which can be turned into an Iterator which gives -/// // us i32s. Because we need i32s to put into MyCollection. -/// fn extend>(&mut self, iter: T) { -/// -/// // The implementation is very straightforward: loop through the -/// // iterator, and add() each element to ourselves. -/// for elem in iter { -/// self.add(elem); -/// } -/// } -/// } -/// -/// let mut c = MyCollection::new(); -/// -/// c.add(5); -/// c.add(6); -/// c.add(7); -/// -/// // let's extend our collection with three more numbers -/// c.extend(vec![1, 2, 3]); -/// -/// // we've added these elements onto the end -/// assert_eq!("MyCollection([5, 6, 7, 1, 2, 3])", format!("{:?}", c)); -/// ``` -#[stable(feature = "rust1", since = "1.0.0")] -pub trait Extend { - /// Extends a collection with the contents of an iterator. - /// - /// As this is the only method for this trait, the [trait-level] docs - /// contain more details. - /// - /// [trait-level]: trait.Extend.html - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// // You can extend a String with some chars: - /// let mut message = String::from("abc"); - /// - /// message.extend(['d', 'e', 'f'].iter()); - /// - /// assert_eq!("abcdef", &message); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn extend>(&mut self, iter: T); -} - -/// An iterator able to yield elements from both ends. -/// -/// Something that implements `DoubleEndedIterator` has one extra capability -/// over something that implements [`Iterator`]: the ability to also take -/// `Item`s from the back, as well as the front. -/// -/// It is important to note that both back and forth work on the same range, -/// and do not cross: iteration is over when they meet in the middle. -/// -/// In a similar fashion to the [`Iterator`] protocol, once a -/// `DoubleEndedIterator` returns `None` from a `next_back()`, calling it again -/// may or may not ever return `Some` again. `next()` and `next_back()` are -/// interchangable for this purpose. -/// -/// [`Iterator`]: trait.Iterator.html -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// let numbers = vec![1, 2, 3]; -/// -/// let mut iter = numbers.iter(); -/// -/// assert_eq!(Some(&1), iter.next()); -/// assert_eq!(Some(&3), iter.next_back()); -/// assert_eq!(Some(&2), iter.next_back()); -/// assert_eq!(None, iter.next()); -/// assert_eq!(None, iter.next_back()); -/// ``` -#[stable(feature = "rust1", since = "1.0.0")] -pub trait DoubleEndedIterator: Iterator { - /// An iterator able to yield elements from both ends. - /// - /// As this is the only method for this trait, the [trait-level] docs - /// contain more details. - /// - /// [trait-level]: trait.DoubleEndedIterator.html - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let numbers = vec![1, 2, 3]; - /// - /// let mut iter = numbers.iter(); - /// - /// assert_eq!(Some(&1), iter.next()); - /// assert_eq!(Some(&3), iter.next_back()); - /// assert_eq!(Some(&2), iter.next_back()); - /// assert_eq!(None, iter.next()); - /// assert_eq!(None, iter.next_back()); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn next_back(&mut self) -> Option; -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for &'a mut I { - fn next_back(&mut self) -> Option { (**self).next_back() } -} - -/// An iterator that knows its exact length. -/// -/// Many [`Iterator`]s don't know how many times they will iterate, but some do. -/// If an iterator knows how many times it can iterate, providing access to -/// that information can be useful. For example, if you want to iterate -/// backwards, a good start is to know where the end is. -/// -/// When implementing an `ExactSizeIterator`, You must also implement -/// [`Iterator`]. When doing so, the implementation of [`size_hint()`] *must* -/// return the exact size of the iterator. -/// -/// [`Iterator`]: trait.Iterator.html -/// [`size_hint()`]: trait.Iterator.html#method.size_hint -/// -/// The [`len()`] method has a default implementation, so you usually shouldn't -/// implement it. However, you may be able to provide a more performant -/// implementation than the default, so overriding it in this case makes sense. -/// -/// [`len()`]: #method.len -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// // a finite range knows exactly how many times it will iterate -/// let five = 0..5; -/// -/// assert_eq!(5, five.len()); -/// ``` -/// -/// In the [module level docs][moddocs], we implemented an [`Iterator`], -/// `Counter`. Let's implement `ExactSizeIterator` for it as well: -/// -/// [moddocs]: index.html -/// -/// ``` -/// # struct Counter { -/// # count: usize, -/// # } -/// # impl Counter { -/// # fn new() -> Counter { -/// # Counter { count: 0 } -/// # } -/// # } -/// # impl Iterator for Counter { -/// # type Item = usize; -/// # fn next(&mut self) -> Option { -/// # self.count += 1; -/// # if self.count < 6 { -/// # Some(self.count) -/// # } else { -/// # None -/// # } -/// # } -/// # } -/// impl ExactSizeIterator for Counter { -/// // We already have the number of iterations, so we can use it directly. -/// fn len(&self) -> usize { -/// self.count -/// } -/// } -/// -/// // And now we can use it! -/// -/// let counter = Counter::new(); -/// -/// assert_eq!(0, counter.len()); -/// ``` -#[stable(feature = "rust1", since = "1.0.0")] -pub trait ExactSizeIterator: Iterator { - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - /// Returns the exact number of times the iterator will iterate. - /// - /// This method has a default implementation, so you usually should not - /// implement it directly. However, if you can provide a more efficient - /// implementation, you can do so. See the [trait-level] docs for an - /// example. - /// - /// This function has the same safety guarantees as the [`size_hint()`] - /// function. - /// - /// [trait-level]: trait.ExactSizeIterator.html - /// [`size_hint()`]: trait.Iterator.html#method.size_hint - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// // a finite range knows exactly how many times it will iterate - /// let five = 0..5; - /// - /// assert_eq!(5, five.len()); - /// ``` - fn len(&self) -> usize { - let (lower, upper) = self.size_hint(); - // Note: This assertion is overly defensive, but it checks the invariant - // guaranteed by the trait. If this trait were rust-internal, - // we could use debug_assert!; assert_eq! will check all Rust user - // implementations too. - assert_eq!(upper, Some(lower)); - lower - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {} - -// All adaptors that preserve the size of the wrapped iterator are fine -// Adaptors that may overflow in `size_hint` are not, i.e. `Chain`. -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Enumerate where I: ExactSizeIterator {} -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Inspect where - F: FnMut(&I::Item), -{} -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Rev - where I: ExactSizeIterator + DoubleEndedIterator {} -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Map where - F: FnMut(I::Item) -> B, -{} -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Zip - where A: ExactSizeIterator, B: ExactSizeIterator {} - -/// An double-ended iterator with the direction inverted. -/// -/// This `struct` is created by the [`rev()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`rev()`]: trait.Iterator.html#method.rev -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Rev { - iter: T -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Rev where I: DoubleEndedIterator { - type Item = ::Item; - - #[inline] - fn next(&mut self) -> Option<::Item> { self.iter.next_back() } - #[inline] - fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Rev where I: DoubleEndedIterator { - #[inline] - fn next_back(&mut self) -> Option<::Item> { self.iter.next() } -} - -/// An iterator that clones the elements of an underlying iterator. -/// -/// This `struct` is created by the [`cloned()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`cloned()`]: trait.Iterator.html#method.cloned -/// [`Iterator`]: trait.Iterator.html -#[stable(feature = "iter_cloned", since = "1.1.0")] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[derive(Clone, Debug)] -pub struct Cloned { - it: I, -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl<'a, I, T: 'a> Iterator for Cloned - where I: Iterator, T: Clone -{ - type Item = T; - - fn next(&mut self) -> Option { - self.it.next().cloned() - } - - fn size_hint(&self) -> (usize, Option) { - self.it.size_hint() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl<'a, I, T: 'a> DoubleEndedIterator for Cloned - where I: DoubleEndedIterator, T: Clone -{ - fn next_back(&mut self) -> Option { - self.it.next_back().cloned() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl<'a, I, T: 'a> ExactSizeIterator for Cloned - where I: ExactSizeIterator, T: Clone -{} - -/// An iterator that repeats endlessly. -/// -/// This `struct` is created by the [`cycle()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`cycle()`]: trait.Iterator.html#method.cycle -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Cycle { - orig: I, - iter: I, -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Cycle where I: Clone + Iterator { - type Item = ::Item; - - #[inline] - fn next(&mut self) -> Option<::Item> { - match self.iter.next() { - None => { self.iter = self.orig.clone(); self.iter.next() } - y => y - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - // the cycle iterator is either empty or infinite - match self.orig.size_hint() { - sz @ (0, Some(0)) => sz, - (0, _) => (0, None), - _ => (usize::MAX, None) - } - } -} - -/// An iterator that strings two iterators together. -/// -/// This `struct` is created by the [`chain()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`chain()`]: trait.Iterator.html#method.chain -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Chain { - a: A, - b: B, - state: ChainState, -} - -// The iterator protocol specifies that iteration ends with the return value -// `None` from `.next()` (or `.next_back()`) and it is unspecified what -// further calls return. The chain adaptor must account for this since it uses -// two subiterators. -// -// It uses three states: -// -// - Both: `a` and `b` are remaining -// - Front: `a` remaining -// - Back: `b` remaining -// -// The fourth state (neither iterator is remaining) only occurs after Chain has -// returned None once, so we don't need to store this state. -#[derive(Clone, Debug)] -enum ChainState { - // both front and back iterator are remaining - Both, - // only front is remaining - Front, - // only back is remaining - Back, -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Chain where - A: Iterator, - B: Iterator -{ - type Item = A::Item; - - #[inline] - fn next(&mut self) -> Option { - match self.state { - ChainState::Both => match self.a.next() { - elt @ Some(..) => elt, - None => { - self.state = ChainState::Back; - self.b.next() - } - }, - ChainState::Front => self.a.next(), - ChainState::Back => self.b.next(), - } - } - - #[inline] - fn count(self) -> usize { - match self.state { - ChainState::Both => self.a.count() + self.b.count(), - ChainState::Front => self.a.count(), - ChainState::Back => self.b.count(), - } - } - - #[inline] - fn nth(&mut self, mut n: usize) -> Option { - match self.state { - ChainState::Both | ChainState::Front => { - for x in self.a.by_ref() { - if n == 0 { - return Some(x) - } - n -= 1; - } - if let ChainState::Both = self.state { - self.state = ChainState::Back; - } - } - ChainState::Back => {} - } - if let ChainState::Back = self.state { - self.b.nth(n) - } else { - None - } - } - - #[inline] - fn last(self) -> Option { - match self.state { - ChainState::Both => { - // Must exhaust a before b. - let a_last = self.a.last(); - let b_last = self.b.last(); - b_last.or(a_last) - }, - ChainState::Front => self.a.last(), - ChainState::Back => self.b.last() - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (a_lower, a_upper) = self.a.size_hint(); - let (b_lower, b_upper) = self.b.size_hint(); - - let lower = a_lower.saturating_add(b_lower); - - let upper = match (a_upper, b_upper) { - (Some(x), Some(y)) => x.checked_add(y), - _ => None - }; - - (lower, upper) - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Chain where - A: DoubleEndedIterator, - B: DoubleEndedIterator, -{ - #[inline] - fn next_back(&mut self) -> Option { - match self.state { - ChainState::Both => match self.b.next_back() { - elt @ Some(..) => elt, - None => { - self.state = ChainState::Front; - self.a.next_back() - } - }, - ChainState::Front => self.a.next_back(), - ChainState::Back => self.b.next_back(), - } - } -} - -/// An iterator that iterates two other iterators simultaneously. -/// -/// This `struct` is created by the [`zip()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`zip()`]: trait.Iterator.html#method.zip -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Zip { - a: A, - b: B -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Zip where A: Iterator, B: Iterator -{ - type Item = (A::Item, B::Item); - - #[inline] - fn next(&mut self) -> Option<(A::Item, B::Item)> { - self.a.next().and_then(|x| { - self.b.next().and_then(|y| { - Some((x, y)) - }) - }) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (a_lower, a_upper) = self.a.size_hint(); - let (b_lower, b_upper) = self.b.size_hint(); - - let lower = cmp::min(a_lower, b_lower); - - let upper = match (a_upper, b_upper) { - (Some(x), Some(y)) => Some(cmp::min(x,y)), - (Some(x), None) => Some(x), - (None, Some(y)) => Some(y), - (None, None) => None - }; - - (lower, upper) - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Zip where - A: DoubleEndedIterator + ExactSizeIterator, - B: DoubleEndedIterator + ExactSizeIterator, -{ - #[inline] - fn next_back(&mut self) -> Option<(A::Item, B::Item)> { - let a_sz = self.a.len(); - let b_sz = self.b.len(); - if a_sz != b_sz { - // Adjust a, b to equal length - if a_sz > b_sz { - for _ in 0..a_sz - b_sz { self.a.next_back(); } - } else { - for _ in 0..b_sz - a_sz { self.b.next_back(); } - } - } - match (self.a.next_back(), self.b.next_back()) { - (Some(x), Some(y)) => Some((x, y)), - (None, None) => None, - _ => unreachable!(), - } - } -} - -/// An iterator that maps the values of `iter` with `f`. -/// -/// This `struct` is created by the [`map()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`map()`]: trait.Iterator.html#method.map -/// [`Iterator`]: trait.Iterator.html -/// -/// # Notes about side effects -/// -/// The [`map()`] iterator implements [`DoubleEndedIterator`], meaning that -/// you can also [`map()`] backwards: -/// -/// ```rust -/// let v: Vec = vec![1, 2, 3].into_iter().rev().map(|x| x + 1).collect(); -/// -/// assert_eq!(v, [4, 3, 2]); -/// ``` -/// -/// [`DoubleEndedIterator`]: trait.DoubleEndedIterator.html -/// -/// But if your closure has state, iterating backwards may act in a way you do -/// not expect. Let's go through an example. First, in the forward direction: -/// -/// ```rust -/// let mut c = 0; -/// -/// for pair in vec!['a', 'b', 'c'].into_iter() -/// .map(|letter| { c += 1; (letter, c) }) { -/// println!("{:?}", pair); -/// } -/// ``` -/// -/// This will print "('a', 1), ('b', 2), ('c', 3)". -/// -/// Now consider this twist where we add a call to `rev`. This version will -/// print `('c', 1), ('b', 2), ('a', 3)`. Note that the letters are reversed, -/// but the values of the counter still go in order. This is because `map()` is -/// still being called lazilly on each item, but we are popping items off the -/// back of the vector now, instead of shifting them from the front. -/// -/// ```rust -/// let mut c = 0; -/// -/// for pair in vec!['a', 'b', 'c'].into_iter() -/// .map(|letter| { c += 1; (letter, c) }) -/// .rev() { -/// println!("{:?}", pair); -/// } -/// ``` -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct Map { - iter: I, - f: F, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for Map { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Map") - .field("iter", &self.iter) - .finish() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Map where F: FnMut(I::Item) -> B { - type Item = B; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().map(&mut self.f) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Map where - F: FnMut(I::Item) -> B, -{ - #[inline] - fn next_back(&mut self) -> Option { - self.iter.next_back().map(&mut self.f) - } -} - -/// An iterator that filters the elements of `iter` with `predicate`. -/// -/// This `struct` is created by the [`filter()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`filter()`]: trait.Iterator.html#method.filter -/// [`Iterator`]: trait.Iterator.html -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct Filter { - iter: I, - predicate: P, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for Filter { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Filter") - .field("iter", &self.iter) - .finish() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Filter where P: FnMut(&I::Item) -> bool { - type Item = I::Item; - - #[inline] - fn next(&mut self) -> Option { - for x in self.iter.by_ref() { - if (self.predicate)(&x) { - return Some(x); - } - } - None - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (_, upper) = self.iter.size_hint(); - (0, upper) // can't know a lower bound, due to the predicate - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Filter - where P: FnMut(&I::Item) -> bool, -{ - #[inline] - fn next_back(&mut self) -> Option { - for x in self.iter.by_ref().rev() { - if (self.predicate)(&x) { - return Some(x); - } - } - None - } -} - -/// An iterator that uses `f` to both filter and map elements from `iter`. -/// -/// This `struct` is created by the [`filter_map()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`filter_map()`]: trait.Iterator.html#method.filter_map -/// [`Iterator`]: trait.Iterator.html -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct FilterMap { - iter: I, - f: F, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for FilterMap { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("FilterMap") - .field("iter", &self.iter) - .finish() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for FilterMap - where F: FnMut(I::Item) -> Option, -{ - type Item = B; - - #[inline] - fn next(&mut self) -> Option { - for x in self.iter.by_ref() { - if let Some(y) = (self.f)(x) { - return Some(y); - } - } - None - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (_, upper) = self.iter.size_hint(); - (0, upper) // can't know a lower bound, due to the predicate - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for FilterMap - where F: FnMut(I::Item) -> Option, -{ - #[inline] - fn next_back(&mut self) -> Option { - for x in self.iter.by_ref().rev() { - if let Some(y) = (self.f)(x) { - return Some(y); - } - } - None - } -} - -/// An iterator that yields the current count and the element during iteration. -/// -/// This `struct` is created by the [`enumerate()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`enumerate()`]: trait.Iterator.html#method.enumerate -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Enumerate { - iter: I, - count: usize, -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Enumerate where I: Iterator { - type Item = (usize, ::Item); - - /// # Overflow Behavior - /// - /// The method does no guarding against overflows, so enumerating more than - /// `usize::MAX` elements either produces the wrong result or panics. If - /// debug assertions are enabled, a panic is guaranteed. - /// - /// # Panics - /// - /// Might panic if the index of the element overflows a `usize`. - #[inline] - fn next(&mut self) -> Option<(usize, ::Item)> { - self.iter.next().map(|a| { - let ret = (self.count, a); - // Possible undefined overflow. - self.count += 1; - ret - }) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } - - #[inline] - fn nth(&mut self, n: usize) -> Option<(usize, I::Item)> { - self.iter.nth(n).map(|a| { - let i = self.count + n; - self.count = i + 1; - (i, a) - }) - } - - #[inline] - fn count(self) -> usize { - self.iter.count() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Enumerate where - I: ExactSizeIterator + DoubleEndedIterator -{ - #[inline] - fn next_back(&mut self) -> Option<(usize, ::Item)> { - self.iter.next_back().map(|a| { - let len = self.iter.len(); - // Can safely add, `ExactSizeIterator` promises that the number of - // elements fits into a `usize`. - (self.count + len, a) - }) - } -} - -/// An iterator with a `peek()` that returns an optional reference to the next -/// element. -/// -/// This `struct` is created by the [`peekable()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`peekable()`]: trait.Iterator.html#method.peekable -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Peekable { - iter: I, - peeked: Option, -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Peekable { - type Item = I::Item; - - #[inline] - fn next(&mut self) -> Option { - match self.peeked { - Some(_) => self.peeked.take(), - None => self.iter.next(), - } - } - - #[inline] - fn count(self) -> usize { - (if self.peeked.is_some() { 1 } else { 0 }) + self.iter.count() - } - - #[inline] - fn nth(&mut self, n: usize) -> Option { - match self.peeked { - Some(_) if n == 0 => self.peeked.take(), - Some(_) => { - self.peeked = None; - self.iter.nth(n-1) - }, - None => self.iter.nth(n) - } - } - - #[inline] - fn last(self) -> Option { - self.iter.last().or(self.peeked) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (lo, hi) = self.iter.size_hint(); - if self.peeked.is_some() { - let lo = lo.saturating_add(1); - let hi = hi.and_then(|x| x.checked_add(1)); - (lo, hi) - } else { - (lo, hi) - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Peekable {} - -impl Peekable { - /// Returns a reference to the next() value without advancing the iterator. - /// - /// The `peek()` method will return the value that a call to [`next()`] would - /// return, but does not advance the iterator. Like [`next()`], if there is - /// a value, it's wrapped in a `Some(T)`, but if the iterator is over, it - /// will return `None`. - /// - /// [`next()`]: trait.Iterator.html#tymethod.next - /// - /// Because `peek()` returns reference, and many iterators iterate over - /// references, this leads to a possibly confusing situation where the - /// return value is a double reference. You can see this effect in the - /// examples below, with `&&i32`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let xs = [1, 2, 3]; - /// - /// let mut iter = xs.iter().peekable(); - /// - /// // peek() lets us see into the future - /// assert_eq!(iter.peek(), Some(&&1)); - /// assert_eq!(iter.next(), Some(&1)); - /// - /// assert_eq!(iter.next(), Some(&2)); - /// - /// // we can peek() multiple times, the iterator won't advance - /// assert_eq!(iter.peek(), Some(&&3)); - /// assert_eq!(iter.peek(), Some(&&3)); - /// - /// assert_eq!(iter.next(), Some(&3)); - /// - /// // after the iterator is finished, so is peek() - /// assert_eq!(iter.peek(), None); - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn peek(&mut self) -> Option<&I::Item> { - if self.peeked.is_none() { - self.peeked = self.iter.next(); - } - match self.peeked { - Some(ref value) => Some(value), - None => None, - } - } - - /// Checks if the iterator has finished iterating. - /// - /// Returns `true` if there are no more elements in the iterator, and - /// `false` if there are. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// #![feature(peekable_is_empty)] - /// - /// let xs = [1, 2, 3]; - /// - /// let mut iter = xs.iter().peekable(); - /// - /// // there are still elements to iterate over - /// assert_eq!(iter.is_empty(), false); - /// - /// // let's consume the iterator - /// iter.next(); - /// iter.next(); - /// iter.next(); - /// - /// assert_eq!(iter.is_empty(), true); - /// ``` - #[unstable(feature = "peekable_is_empty", issue = "32111")] - #[inline] - pub fn is_empty(&mut self) -> bool { - self.peek().is_none() - } -} - -/// An iterator that rejects elements while `predicate` is true. -/// -/// This `struct` is created by the [`skip_while()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`skip_while()`]: trait.Iterator.html#method.skip_while -/// [`Iterator`]: trait.Iterator.html -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct SkipWhile { - iter: I, - flag: bool, - predicate: P, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for SkipWhile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("SkipWhile") - .field("iter", &self.iter) - .field("flag", &self.flag) - .finish() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for SkipWhile - where P: FnMut(&I::Item) -> bool -{ - type Item = I::Item; - - #[inline] - fn next(&mut self) -> Option { - for x in self.iter.by_ref() { - if self.flag || !(self.predicate)(&x) { - self.flag = true; - return Some(x); - } - } - None - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (_, upper) = self.iter.size_hint(); - (0, upper) // can't know a lower bound, due to the predicate - } -} - -/// An iterator that only accepts elements while `predicate` is true. -/// -/// This `struct` is created by the [`take_while()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`take_while()`]: trait.Iterator.html#method.take_while -/// [`Iterator`]: trait.Iterator.html -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct TakeWhile { - iter: I, - flag: bool, - predicate: P, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for TakeWhile { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("TakeWhile") - .field("iter", &self.iter) - .field("flag", &self.flag) - .finish() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for TakeWhile - where P: FnMut(&I::Item) -> bool -{ - type Item = I::Item; - - #[inline] - fn next(&mut self) -> Option { - if self.flag { - None - } else { - self.iter.next().and_then(|x| { - if (self.predicate)(&x) { - Some(x) - } else { - self.flag = true; - None - } - }) - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (_, upper) = self.iter.size_hint(); - (0, upper) // can't know a lower bound, due to the predicate - } -} - -/// An iterator that skips over `n` elements of `iter`. -/// -/// This `struct` is created by the [`skip()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`skip()`]: trait.Iterator.html#method.skip -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Skip { - iter: I, - n: usize -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Skip where I: Iterator { - type Item = ::Item; - - #[inline] - fn next(&mut self) -> Option { - if self.n == 0 { - self.iter.next() - } else { - let old_n = self.n; - self.n = 0; - self.iter.nth(old_n) - } - } - - #[inline] - fn nth(&mut self, n: usize) -> Option { - // Can't just add n + self.n due to overflow. - if self.n == 0 { - self.iter.nth(n) - } else { - let to_skip = self.n; - self.n = 0; - // nth(n) skips n+1 - if self.iter.nth(to_skip-1).is_none() { - return None; - } - self.iter.nth(n) - } - } - - #[inline] - fn count(self) -> usize { - self.iter.count().saturating_sub(self.n) - } - - #[inline] - fn last(mut self) -> Option { - if self.n == 0 { - self.iter.last() - } else { - let next = self.next(); - if next.is_some() { - // recurse. n should be 0. - self.last().or(next) - } else { - None - } - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (lower, upper) = self.iter.size_hint(); - - let lower = lower.saturating_sub(self.n); - let upper = upper.map(|x| x.saturating_sub(self.n)); - - (lower, upper) - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Skip where I: ExactSizeIterator {} - -#[stable(feature = "double_ended_skip_iterator", since = "1.8.0")] -impl DoubleEndedIterator for Skip where I: DoubleEndedIterator + ExactSizeIterator { - fn next_back(&mut self) -> Option { - if self.len() > 0 { - self.iter.next_back() - } else { - None - } - } -} - -/// An iterator that only iterates over the first `n` iterations of `iter`. -/// -/// This `struct` is created by the [`take()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`take()`]: trait.Iterator.html#method.take -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Take { - iter: I, - n: usize -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Take where I: Iterator{ - type Item = ::Item; - - #[inline] - fn next(&mut self) -> Option<::Item> { - if self.n != 0 { - self.n -= 1; - self.iter.next() - } else { - None - } - } - - #[inline] - fn nth(&mut self, n: usize) -> Option { - if self.n > n { - self.n -= n + 1; - self.iter.nth(n) - } else { - if self.n > 0 { - self.iter.nth(self.n - 1); - self.n = 0; - } - None - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (lower, upper) = self.iter.size_hint(); - - let lower = cmp::min(lower, self.n); - - let upper = match upper { - Some(x) if x < self.n => Some(x), - _ => Some(self.n) - }; - - (lower, upper) - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Take where I: ExactSizeIterator {} - - -/// An iterator to maintain state while iterating another iterator. -/// -/// This `struct` is created by the [`scan()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`scan()`]: trait.Iterator.html#method.scan -/// [`Iterator`]: trait.Iterator.html -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct Scan { - iter: I, - f: F, - state: St, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for Scan { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Scan") - .field("iter", &self.iter) - .field("state", &self.state) - .finish() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Scan where - I: Iterator, - F: FnMut(&mut St, I::Item) -> Option, -{ - type Item = B; - - #[inline] - fn next(&mut self) -> Option { - self.iter.next().and_then(|a| (self.f)(&mut self.state, a)) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (_, upper) = self.iter.size_hint(); - (0, upper) // can't know a lower bound, due to the scan function - } -} - -/// An iterator that maps each element to an iterator, and yields the elements -/// of the produced iterators. -/// -/// This `struct` is created by the [`flat_map()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`flat_map()`]: trait.Iterator.html#method.flat_map -/// [`Iterator`]: trait.Iterator.html -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct FlatMap { - iter: I, - f: F, - frontiter: Option, - backiter: Option, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for FlatMap - where U::IntoIter: fmt::Debug -{ - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("FlatMap") - .field("iter", &self.iter) - .field("frontiter", &self.frontiter) - .field("backiter", &self.backiter) - .finish() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for FlatMap - where F: FnMut(I::Item) -> U, -{ - type Item = U::Item; - - #[inline] - fn next(&mut self) -> Option { - loop { - if let Some(ref mut inner) = self.frontiter { - if let Some(x) = inner.by_ref().next() { - return Some(x) - } - } - match self.iter.next().map(&mut self.f) { - None => return self.backiter.as_mut().and_then(|it| it.next()), - next => self.frontiter = next.map(IntoIterator::into_iter), - } - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - let (flo, fhi) = self.frontiter.as_ref().map_or((0, Some(0)), |it| it.size_hint()); - let (blo, bhi) = self.backiter.as_ref().map_or((0, Some(0)), |it| it.size_hint()); - let lo = flo.saturating_add(blo); - match (self.iter.size_hint(), fhi, bhi) { - ((0, Some(0)), Some(a), Some(b)) => (lo, a.checked_add(b)), - _ => (lo, None) - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for FlatMap where - F: FnMut(I::Item) -> U, - U: IntoIterator, - U::IntoIter: DoubleEndedIterator -{ - #[inline] - fn next_back(&mut self) -> Option { - loop { - if let Some(ref mut inner) = self.backiter { - if let Some(y) = inner.next_back() { - return Some(y) - } - } - match self.iter.next_back().map(&mut self.f) { - None => return self.frontiter.as_mut().and_then(|it| it.next_back()), - next => self.backiter = next.map(IntoIterator::into_iter), - } - } - } -} - -/// An iterator that yields `None` forever after the underlying iterator -/// yields `None` once. -/// -/// This `struct` is created by the [`fuse()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`fuse()`]: trait.Iterator.html#method.fuse -/// [`Iterator`]: trait.Iterator.html -#[derive(Clone, Debug)] -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Fuse { - iter: I, - done: bool -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Fuse where I: Iterator { - type Item = ::Item; - - #[inline] - fn next(&mut self) -> Option<::Item> { - if self.done { - None - } else { - let next = self.iter.next(); - self.done = next.is_none(); - next - } - } - - #[inline] - fn nth(&mut self, n: usize) -> Option { - if self.done { - None - } else { - let nth = self.iter.nth(n); - self.done = nth.is_none(); - nth - } - } - - #[inline] - fn last(self) -> Option { - if self.done { - None - } else { - self.iter.last() - } - } - - #[inline] - fn count(self) -> usize { - if self.done { - 0 - } else { - self.iter.count() - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - if self.done { - (0, Some(0)) - } else { - self.iter.size_hint() - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Fuse where I: DoubleEndedIterator { - #[inline] - fn next_back(&mut self) -> Option<::Item> { - if self.done { - None - } else { - let next = self.iter.next_back(); - self.done = next.is_none(); - next - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl ExactSizeIterator for Fuse where I: ExactSizeIterator {} - -/// An iterator that calls a function with a reference to each element before -/// yielding it. -/// -/// This `struct` is created by the [`inspect()`] method on [`Iterator`]. See its -/// documentation for more. -/// -/// [`inspect()`]: trait.Iterator.html#method.inspect -/// [`Iterator`]: trait.Iterator.html -#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] -#[stable(feature = "rust1", since = "1.0.0")] -#[derive(Clone)] -pub struct Inspect { - iter: I, - f: F, -} - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for Inspect { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_struct("Inspect") - .field("iter", &self.iter) - .finish() - } -} - -impl Inspect where F: FnMut(&I::Item) { - #[inline] - fn do_inspect(&mut self, elt: Option) -> Option { - if let Some(ref a) = elt { - (self.f)(a); - } - - elt - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Inspect where F: FnMut(&I::Item) { - type Item = I::Item; - - #[inline] - fn next(&mut self) -> Option { - let next = self.iter.next(); - self.do_inspect(next) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - self.iter.size_hint() - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Inspect - where F: FnMut(&I::Item), -{ - #[inline] - fn next_back(&mut self) -> Option { - let next = self.iter.next_back(); - self.do_inspect(next) - } -} - -/// Objects that can be stepped over in both directions. -/// -/// The `steps_between` function provides a way to efficiently compare -/// two `Step` objects. -#[unstable(feature = "step_trait", - reason = "likely to be replaced by finer-grained traits", - issue = "27741")] -pub trait Step: PartialOrd + Sized { - /// Steps `self` if possible. - fn step(&self, by: &Self) -> Option; - - /// Returns the number of steps between two step objects. The count is - /// inclusive of `start` and exclusive of `end`. - /// - /// Returns `None` if it is not possible to calculate `steps_between` - /// without overflow. - fn steps_between(start: &Self, end: &Self, by: &Self) -> Option; -} - -macro_rules! step_impl_unsigned { - ($($t:ty)*) => ($( - #[unstable(feature = "step_trait", - reason = "likely to be replaced by finer-grained traits", - issue = "27741")] - impl Step for $t { - #[inline] - fn step(&self, by: &$t) -> Option<$t> { - (*self).checked_add(*by) - } - #[inline] - #[allow(trivial_numeric_casts)] - fn steps_between(start: &$t, end: &$t, by: &$t) -> Option { - if *by == 0 { return None; } - if *start < *end { - // Note: We assume $t <= usize here - let diff = (*end - *start) as usize; - let by = *by as usize; - if diff % by > 0 { - Some(diff / by + 1) - } else { - Some(diff / by) - } - } else { - Some(0) - } - } - } - )*) -} -macro_rules! step_impl_signed { - ($($t:ty)*) => ($( - #[unstable(feature = "step_trait", - reason = "likely to be replaced by finer-grained traits", - issue = "27741")] - impl Step for $t { - #[inline] - fn step(&self, by: &$t) -> Option<$t> { - (*self).checked_add(*by) - } - #[inline] - #[allow(trivial_numeric_casts)] - fn steps_between(start: &$t, end: &$t, by: &$t) -> Option { - if *by == 0 { return None; } - let diff: usize; - let by_u: usize; - if *by > 0 { - if *start >= *end { - return Some(0); - } - // Note: We assume $t <= isize here - // Use .wrapping_sub and cast to usize to compute the - // difference that may not fit inside the range of isize. - diff = (*end as isize).wrapping_sub(*start as isize) as usize; - by_u = *by as usize; - } else { - if *start <= *end { - return Some(0); - } - diff = (*start as isize).wrapping_sub(*end as isize) as usize; - by_u = (*by as isize).wrapping_mul(-1) as usize; - } - if diff % by_u > 0 { - Some(diff / by_u + 1) - } else { - Some(diff / by_u) - } - } - } - )*) -} - -macro_rules! step_impl_no_between { - ($($t:ty)*) => ($( - #[unstable(feature = "step_trait", - reason = "likely to be replaced by finer-grained traits", - issue = "27741")] - impl Step for $t { - #[inline] - fn step(&self, by: &$t) -> Option<$t> { - (*self).checked_add(*by) - } - #[inline] - fn steps_between(_a: &$t, _b: &$t, _by: &$t) -> Option { - None - } - } - )*) -} - -step_impl_unsigned!(usize u8 u16 u32); -step_impl_signed!(isize i8 i16 i32); -#[cfg(target_pointer_width = "64")] -step_impl_unsigned!(u64); -#[cfg(target_pointer_width = "64")] -step_impl_signed!(i64); -// If the target pointer width is not 64-bits, we -// assume here that it is less than 64-bits. -#[cfg(not(target_pointer_width = "64"))] -step_impl_no_between!(u64 i64); - -/// An adapter for stepping range iterators by a custom amount. -/// -/// The resulting iterator handles overflow by stopping. The `A` -/// parameter is the type being iterated over, while `R` is the range -/// type (usually one of `std::ops::{Range, RangeFrom, RangeInclusive}`. -#[derive(Clone, Debug)] -#[unstable(feature = "step_by", reason = "recent addition", - issue = "27741")] -pub struct StepBy { - step_by: A, - range: R, -} - -impl ops::RangeFrom { - /// Creates an iterator starting at the same point, but stepping by - /// the given amount at each iteration. - /// - /// # Examples - /// - /// ``` - /// # #![feature(step_by)] - /// - /// for i in (0u8..).step_by(2).take(10) { - /// println!("{}", i); - /// } - /// ``` - /// - /// This prints the first ten even natural integers (0 to 18). - #[unstable(feature = "step_by", reason = "recent addition", - issue = "27741")] - pub fn step_by(self, by: A) -> StepBy { - StepBy { - step_by: by, - range: self - } - } -} - -impl ops::Range { - /// Creates an iterator with the same range, but stepping by the - /// given amount at each iteration. - /// - /// The resulting iterator handles overflow by stopping. - /// - /// # Examples - /// - /// ``` - /// #![feature(step_by)] - /// - /// for i in (0..10).step_by(2) { - /// println!("{}", i); - /// } - /// ``` - /// - /// This prints: - /// - /// ```text - /// 0 - /// 2 - /// 4 - /// 6 - /// 8 - /// ``` - #[unstable(feature = "step_by", reason = "recent addition", - issue = "27741")] - pub fn step_by(self, by: A) -> StepBy { - StepBy { - step_by: by, - range: self - } - } -} - -impl ops::RangeInclusive { - /// Creates an iterator with the same range, but stepping by the - /// given amount at each iteration. - /// - /// The resulting iterator handles overflow by stopping. - /// - /// # Examples - /// - /// ``` - /// #![feature(step_by, inclusive_range_syntax)] - /// - /// for i in (0...10).step_by(2) { - /// println!("{}", i); - /// } - /// ``` - /// - /// This prints: - /// - /// ```text - /// 0 - /// 2 - /// 4 - /// 6 - /// 8 - /// 10 - /// ``` - #[unstable(feature = "step_by", reason = "recent addition", - issue = "27741")] - pub fn step_by(self, by: A) -> StepBy { - StepBy { - step_by: by, - range: self - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for StepBy> where - A: Clone, - for<'a> &'a A: Add<&'a A, Output = A> -{ - type Item = A; - - #[inline] - fn next(&mut self) -> Option { - let mut n = &self.range.start + &self.step_by; - mem::swap(&mut n, &mut self.range.start); - Some(n) - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - (usize::MAX, None) // Too bad we can't specify an infinite lower bound - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for StepBy> { - type Item = A; - - #[inline] - fn next(&mut self) -> Option { - let rev = self.step_by < A::zero(); - if (rev && self.range.start > self.range.end) || - (!rev && self.range.start < self.range.end) - { - match self.range.start.step(&self.step_by) { - Some(mut n) => { - mem::swap(&mut self.range.start, &mut n); - Some(n) - }, - None => { - let mut n = self.range.end.clone(); - mem::swap(&mut self.range.start, &mut n); - Some(n) - } - } - } else { - None - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - match Step::steps_between(&self.range.start, - &self.range.end, - &self.step_by) { - Some(hint) => (hint, Some(hint)), - None => (0, None) - } - } -} - -#[unstable(feature = "inclusive_range", - reason = "recently added, follows RFC", - issue = "28237")] -impl Iterator for StepBy> { - type Item = A; - - #[inline] - fn next(&mut self) -> Option { - use ops::RangeInclusive::*; - - // this function has a sort of odd structure due to borrowck issues - // we may need to replace self.range, so borrows of start and end need to end early - - let (finishing, n) = match self.range { - Empty { .. } => return None, // empty iterators yield no values - - NonEmpty { ref mut start, ref mut end } => { - let zero = A::zero(); - let rev = self.step_by < zero; - - // march start towards (maybe past!) end and yield the old value - if (rev && start >= end) || - (!rev && start <= end) - { - match start.step(&self.step_by) { - Some(mut n) => { - mem::swap(start, &mut n); - (None, Some(n)) // yield old value, remain non-empty - }, - None => { - let mut n = end.clone(); - mem::swap(start, &mut n); - (None, Some(n)) // yield old value, remain non-empty - } - } - } else { - // found range in inconsistent state (start at or past end), so become empty - (Some(mem::replace(end, zero)), None) - } - } - }; - - // turn into an empty iterator if we've reached the end - if let Some(end) = finishing { - self.range = Empty { at: end }; - } - - n - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - use ops::RangeInclusive::*; - - match self.range { - Empty { .. } => (0, Some(0)), - - NonEmpty { ref start, ref end } => - match Step::steps_between(start, - end, - &self.step_by) { - Some(hint) => (hint.saturating_add(1), hint.checked_add(1)), - None => (0, None) - } - } - } -} - -macro_rules! range_exact_iter_impl { - ($($t:ty)*) => ($( - #[stable(feature = "rust1", since = "1.0.0")] - impl ExactSizeIterator for ops::Range<$t> { } - - #[unstable(feature = "inclusive_range", - reason = "recently added, follows RFC", - issue = "28237")] - impl ExactSizeIterator for ops::RangeInclusive<$t> { } - )*) -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for ops::Range where - for<'a> &'a A: Add<&'a A, Output = A> -{ - type Item = A; - - #[inline] - fn next(&mut self) -> Option { - if self.start < self.end { - let mut n = &self.start + &A::one(); - mem::swap(&mut n, &mut self.start); - Some(n) - } else { - None - } - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - match Step::steps_between(&self.start, &self.end, &A::one()) { - Some(hint) => (hint, Some(hint)), - None => (0, None) - } - } -} - -// Ranges of u64 and i64 are excluded because they cannot guarantee having -// a length <= usize::MAX, which is required by ExactSizeIterator. -range_exact_iter_impl!(usize u8 u16 u32 isize i8 i16 i32); - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for ops::Range where - for<'a> &'a A: Add<&'a A, Output = A>, - for<'a> &'a A: Sub<&'a A, Output = A> -{ - #[inline] - fn next_back(&mut self) -> Option { - if self.start < self.end { - self.end = &self.end - &A::one(); - Some(self.end.clone()) - } else { - None - } - } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for ops::RangeFrom where - for<'a> &'a A: Add<&'a A, Output = A> -{ - type Item = A; - - #[inline] - fn next(&mut self) -> Option { - let mut n = &self.start + &A::one(); - mem::swap(&mut n, &mut self.start); - Some(n) - } -} - -#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] -impl Iterator for ops::RangeInclusive where - for<'a> &'a A: Add<&'a A, Output = A> -{ - type Item = A; - - #[inline] - fn next(&mut self) -> Option { - use ops::RangeInclusive::*; - - // this function has a sort of odd structure due to borrowck issues - // we may need to replace self, so borrows of self.start and self.end need to end early - - let (finishing, n) = match *self { - Empty { .. } => (None, None), // empty iterators yield no values - - NonEmpty { ref mut start, ref mut end } => { - if start == end { - (Some(mem::replace(end, A::one())), Some(mem::replace(start, A::one()))) - } else if start < end { - let one = A::one(); - let mut n = &*start + &one; - mem::swap(&mut n, start); - - // if the iterator is done iterating, it will change from NonEmpty to Empty - // to avoid unnecessary drops or clones, we'll reuse either start or end - // (they are equal now, so it doesn't matter which) - // to pull out end, we need to swap something back in -- use the previously - // created A::one() as a dummy value - - (if n == *end { Some(mem::replace(end, one)) } else { None }, - // ^ are we done yet? - Some(n)) // < the value to output - } else { - (Some(mem::replace(start, A::one())), None) - } - } - }; - - // turn into an empty iterator if this is the last value - if let Some(end) = finishing { - *self = Empty { at: end }; - } - - n - } - - #[inline] - fn size_hint(&self) -> (usize, Option) { - use ops::RangeInclusive::*; - - match *self { - Empty { .. } => (0, Some(0)), - - NonEmpty { ref start, ref end } => - match Step::steps_between(start, end, &A::one()) { - Some(hint) => (hint.saturating_add(1), hint.checked_add(1)), - None => (0, None), - } - } - } -} - -#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] -impl DoubleEndedIterator for ops::RangeInclusive where - for<'a> &'a A: Add<&'a A, Output = A>, - for<'a> &'a A: Sub<&'a A, Output = A> -{ - #[inline] - fn next_back(&mut self) -> Option { - use ops::RangeInclusive::*; - - // see Iterator::next for comments - - let (finishing, n) = match *self { - Empty { .. } => return None, - - NonEmpty { ref mut start, ref mut end } => { - if start == end { - (Some(mem::replace(start, A::one())), Some(mem::replace(end, A::one()))) - } else if start < end { - let one = A::one(); - let mut n = &*end - &one; - mem::swap(&mut n, end); - - (if n == *start { Some(mem::replace(start, one)) } else { None }, - Some(n)) - } else { - (Some(mem::replace(end, A::one())), None) - } - } - }; - - if let Some(start) = finishing { - *self = Empty { at: start }; - } - - n - } -} - -/// An iterator that repeats an element endlessly. -/// -/// This `struct` is created by the [`repeat()`] function. See its documentation for more. -/// -/// [`repeat()`]: fn.repeat.html -#[derive(Clone, Debug)] -#[stable(feature = "rust1", since = "1.0.0")] -pub struct Repeat { - element: A -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Iterator for Repeat { - type Item = A; - - #[inline] - fn next(&mut self) -> Option { Some(self.element.clone()) } - #[inline] - fn size_hint(&self) -> (usize, Option) { (usize::MAX, None) } -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl DoubleEndedIterator for Repeat { - #[inline] - fn next_back(&mut self) -> Option { Some(self.element.clone()) } -} - -/// Creates a new iterator that endlessly repeats a single element. -/// -/// The `repeat()` function repeats a single value over and over and over and -/// over and over and 🔁. -/// -/// Infinite iterators like `repeat()` are often used with adapters like -/// [`take()`], in order to make them finite. -/// -/// [`take()`]: trait.Iterator.html#method.take -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use std::iter; -/// -/// // the number four 4ever: -/// let mut fours = iter::repeat(4); -/// -/// assert_eq!(Some(4), fours.next()); -/// assert_eq!(Some(4), fours.next()); -/// assert_eq!(Some(4), fours.next()); -/// assert_eq!(Some(4), fours.next()); -/// assert_eq!(Some(4), fours.next()); -/// -/// // yup, still four -/// assert_eq!(Some(4), fours.next()); -/// ``` -/// -/// Going finite with [`take()`]: -/// -/// ``` -/// use std::iter; -/// -/// // that last example was too many fours. Let's only have four fours. -/// let mut four_fours = iter::repeat(4).take(4); -/// -/// assert_eq!(Some(4), four_fours.next()); -/// assert_eq!(Some(4), four_fours.next()); -/// assert_eq!(Some(4), four_fours.next()); -/// assert_eq!(Some(4), four_fours.next()); -/// -/// // ... and now we're done -/// assert_eq!(None, four_fours.next()); -/// ``` -#[inline] -#[stable(feature = "rust1", since = "1.0.0")] -pub fn repeat(elt: T) -> Repeat { - Repeat{element: elt} -} - -/// An iterator that yields nothing. -/// -/// This `struct` is created by the [`empty()`] function. See its documentation for more. -/// -/// [`empty()`]: fn.empty.html -#[stable(feature = "iter_empty", since = "1.2.0")] -pub struct Empty(marker::PhantomData); - -#[stable(feature = "core_impl_debug", since = "1.9.0")] -impl fmt::Debug for Empty { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.pad("Empty") - } -} - -#[stable(feature = "iter_empty", since = "1.2.0")] -impl Iterator for Empty { - type Item = T; - - fn next(&mut self) -> Option { - None - } - - fn size_hint(&self) -> (usize, Option){ - (0, Some(0)) - } -} - -#[stable(feature = "iter_empty", since = "1.2.0")] -impl DoubleEndedIterator for Empty { - fn next_back(&mut self) -> Option { - None - } -} - -#[stable(feature = "iter_empty", since = "1.2.0")] -impl ExactSizeIterator for Empty { - fn len(&self) -> usize { - 0 - } -} - -// not #[derive] because that adds a Clone bound on T, -// which isn't necessary. -#[stable(feature = "iter_empty", since = "1.2.0")] -impl Clone for Empty { - fn clone(&self) -> Empty { - Empty(marker::PhantomData) - } -} - -// not #[derive] because that adds a Default bound on T, -// which isn't necessary. -#[stable(feature = "iter_empty", since = "1.2.0")] -impl Default for Empty { - fn default() -> Empty { - Empty(marker::PhantomData) - } -} - -/// Creates an iterator that yields nothing. -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use std::iter; -/// -/// // this could have been an iterator over i32, but alas, it's just not. -/// let mut nope = iter::empty::(); -/// -/// assert_eq!(None, nope.next()); -/// ``` -#[stable(feature = "iter_empty", since = "1.2.0")] -pub fn empty() -> Empty { - Empty(marker::PhantomData) -} - -/// An iterator that yields an element exactly once. -/// -/// This `struct` is created by the [`once()`] function. See its documentation for more. -/// -/// [`once()`]: fn.once.html -#[derive(Clone, Debug)] -#[stable(feature = "iter_once", since = "1.2.0")] -pub struct Once { - inner: ::option::IntoIter -} - -#[stable(feature = "iter_once", since = "1.2.0")] -impl Iterator for Once { - type Item = T; - - fn next(&mut self) -> Option { - self.inner.next() - } - - fn size_hint(&self) -> (usize, Option) { - self.inner.size_hint() - } -} - -#[stable(feature = "iter_once", since = "1.2.0")] -impl DoubleEndedIterator for Once { - fn next_back(&mut self) -> Option { - self.inner.next_back() - } -} - -#[stable(feature = "iter_once", since = "1.2.0")] -impl ExactSizeIterator for Once { - fn len(&self) -> usize { - self.inner.len() - } -} - -/// Creates an iterator that yields an element exactly once. -/// -/// This is commonly used to adapt a single value into a [`chain()`] of other -/// kinds of iteration. Maybe you have an iterator that covers almost -/// everything, but you need an extra special case. Maybe you have a function -/// which works on iterators, but you only need to process one value. -/// -/// [`chain()`]: trait.Iterator.html#method.chain -/// -/// # Examples -/// -/// Basic usage: -/// -/// ``` -/// use std::iter; -/// -/// // one is the loneliest number -/// let mut one = iter::once(1); -/// -/// assert_eq!(Some(1), one.next()); -/// -/// // just one, that's all we get -/// assert_eq!(None, one.next()); -/// ``` -/// -/// Chaining together with another iterator. Let's say that we want to iterate -/// over each file of the `.foo` directory, but also a configuration file, -/// `.foorc`: -/// -/// ```no_run -/// use std::iter; -/// use std::fs; -/// use std::path::PathBuf; -/// -/// let dirs = fs::read_dir(".foo").unwrap(); -/// -/// // we need to convert from an iterator of DirEntry-s to an iterator of -/// // PathBufs, so we use map -/// let dirs = dirs.map(|file| file.unwrap().path()); -/// -/// // now, our iterator just for our config file -/// let config = iter::once(PathBuf::from(".foorc")); -/// -/// // chain the two iterators together into one big iterator -/// let files = dirs.chain(config); -/// -/// // this will give us all of the files in .foo as well as .foorc -/// for f in files { -/// println!("{:?}", f); -/// } -/// ``` -#[stable(feature = "iter_once", since = "1.2.0")] -pub fn once(value: T) -> Once { - Once { inner: Some(value).into_iter() } -} diff --git a/src/libcore/iter/iterator.rs b/src/libcore/iter/iterator.rs new file mode 100644 index 0000000000..b80f77c0d2 --- /dev/null +++ b/src/libcore/iter/iterator.rs @@ -0,0 +1,2111 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use clone::Clone; +use cmp::{Ord, PartialOrd, PartialEq, Ordering}; +use default::Default; +use marker; +use num::{Zero, One}; +use ops::{Add, FnMut, Mul}; +use option::Option::{self, Some, None}; +use marker::Sized; + +use super::{Chain, Cycle, Cloned, Enumerate, Filter, FilterMap, FlatMap, Fuse, + Inspect, Map, Peekable, Scan, Skip, SkipWhile, Take, TakeWhile, Rev, + Zip}; +use super::ChainState; +use super::{DoubleEndedIterator, ExactSizeIterator, Extend, FromIterator, + IntoIterator}; + +fn _assert_is_object_safe(_: &Iterator) {} + +/// An interface for dealing with iterators. +/// +/// This is the main iterator trait. For more about the concept of iterators +/// generally, please see the [module-level documentation]. In particular, you +/// may want to know how to [implement `Iterator`][impl]. +/// +/// [module-level documentation]: index.html +/// [impl]: index.html#implementing-iterator +#[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "`{Self}` is not an iterator; maybe try calling \ + `.iter()` or a similar method"] +pub trait Iterator { + /// The type of the elements being iterated over. + #[stable(feature = "rust1", since = "1.0.0")] + type Item; + + /// Advances the iterator and returns the next value. + /// + /// Returns `None` when iteration is finished. Individual iterator + /// implementations may choose to resume iteration, and so calling `next()` + /// again may or may not eventually start returning `Some(Item)` again at some + /// point. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter(); + /// + /// // A call to next() returns the next value... + /// assert_eq!(Some(&1), iter.next()); + /// assert_eq!(Some(&2), iter.next()); + /// assert_eq!(Some(&3), iter.next()); + /// + /// // ... and then None once it's over. + /// assert_eq!(None, iter.next()); + /// + /// // More calls may or may not return None. Here, they always will. + /// assert_eq!(None, iter.next()); + /// assert_eq!(None, iter.next()); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn next(&mut self) -> Option; + + /// Returns the bounds on the remaining length of the iterator. + /// + /// Specifically, `size_hint()` returns a tuple where the first element + /// is the lower bound, and the second element is the upper bound. + /// + /// The second half of the tuple that is returned is an `Option`. A + /// `None` here means that either there is no known upper bound, or the + /// upper bound is larger than `usize`. + /// + /// # Implementation notes + /// + /// It is not enforced that an iterator implementation yields the declared + /// number of elements. A buggy iterator may yield less than the lower bound + /// or more than the upper bound of elements. + /// + /// `size_hint()` is primarily intended to be used for optimizations such as + /// reserving space for the elements of the iterator, but must not be + /// trusted to e.g. omit bounds checks in unsafe code. An incorrect + /// implementation of `size_hint()` should not lead to memory safety + /// violations. + /// + /// That said, the implementation should provide a correct estimation, + /// because otherwise it would be a violation of the trait's protocol. + /// + /// The default implementation returns `(0, None)` which is correct for any + /// iterator. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// let iter = a.iter(); + /// + /// assert_eq!((3, Some(3)), iter.size_hint()); + /// ``` + /// + /// A more complex example: + /// + /// ``` + /// // The even numbers from zero to ten. + /// let iter = (0..10).filter(|x| x % 2 == 0); + /// + /// // We might iterate from zero to ten times. Knowing that it's five + /// // exactly wouldn't be possible without executing filter(). + /// assert_eq!((0, Some(10)), iter.size_hint()); + /// + /// // Let's add one five more numbers with chain() + /// let iter = (0..10).filter(|x| x % 2 == 0).chain(15..20); + /// + /// // now both bounds are increased by five + /// assert_eq!((5, Some(15)), iter.size_hint()); + /// ``` + /// + /// Returning `None` for an upper bound: + /// + /// ``` + /// // an infinite iterator has no upper bound + /// let iter = 0..; + /// + /// assert_eq!((0, None), iter.size_hint()); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn size_hint(&self) -> (usize, Option) { (0, None) } + + /// Consumes the iterator, counting the number of iterations and returning it. + /// + /// This method will evaluate the iterator until its [`next()`] returns + /// `None`. Once `None` is encountered, `count()` returns the number of + /// times it called [`next()`]. + /// + /// [`next()`]: #tymethod.next + /// + /// # Overflow Behavior + /// + /// The method does no guarding against overflows, so counting elements of + /// an iterator with more than `usize::MAX` elements either produces the + /// wrong result or panics. If debug assertions are enabled, a panic is + /// guaranteed. + /// + /// # Panics + /// + /// This function might panic if the iterator has more than `usize::MAX` + /// elements. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// assert_eq!(a.iter().count(), 3); + /// + /// let a = [1, 2, 3, 4, 5]; + /// assert_eq!(a.iter().count(), 5); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn count(self) -> usize where Self: Sized { + // Might overflow. + self.fold(0, |cnt, _| cnt + 1) + } + + /// Consumes the iterator, returning the last element. + /// + /// This method will evaluate the iterator until it returns `None`. While + /// doing so, it keeps track of the current element. After `None` is + /// returned, `last()` will then return the last element it saw. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// assert_eq!(a.iter().last(), Some(&3)); + /// + /// let a = [1, 2, 3, 4, 5]; + /// assert_eq!(a.iter().last(), Some(&5)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn last(self) -> Option where Self: Sized { + let mut last = None; + for x in self { last = Some(x); } + last + } + + /// Consumes the `n` first elements of the iterator, then returns the + /// `next()` one. + /// + /// This method will evaluate the iterator `n` times, discarding those elements. + /// After it does so, it will call [`next()`] and return its value. + /// + /// [`next()`]: #tymethod.next + /// + /// Like most indexing operations, the count starts from zero, so `nth(0)` + /// returns the first value, `nth(1)` the second, and so on. + /// + /// `nth()` will return `None` if `n` is greater than or equal to the length of the + /// iterator. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// assert_eq!(a.iter().nth(1), Some(&2)); + /// ``` + /// + /// Calling `nth()` multiple times doesn't rewind the iterator: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter(); + /// + /// assert_eq!(iter.nth(1), Some(&2)); + /// assert_eq!(iter.nth(1), None); + /// ``` + /// + /// Returning `None` if there are less than `n + 1` elements: + /// + /// ``` + /// let a = [1, 2, 3]; + /// assert_eq!(a.iter().nth(10), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn nth(&mut self, mut n: usize) -> Option where Self: Sized { + for x in self { + if n == 0 { return Some(x) } + n -= 1; + } + None + } + + /// Takes two iterators and creates a new iterator over both in sequence. + /// + /// `chain()` will return a new iterator which will first iterate over + /// values from the first iterator and then over values from the second + /// iterator. + /// + /// In other words, it links two iterators together, in a chain. 🔗 + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a1 = [1, 2, 3]; + /// let a2 = [4, 5, 6]; + /// + /// let mut iter = a1.iter().chain(a2.iter()); + /// + /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(&4)); + /// assert_eq!(iter.next(), Some(&5)); + /// assert_eq!(iter.next(), Some(&6)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Since the argument to `chain()` uses [`IntoIterator`], we can pass + /// anything that can be converted into an [`Iterator`], not just an + /// [`Iterator`] itself. For example, slices (`&[T]`) implement + /// [`IntoIterator`], and so can be passed to `chain()` directly: + /// + /// [`IntoIterator`]: trait.IntoIterator.html + /// [`Iterator`]: trait.Iterator.html + /// + /// ``` + /// let s1 = &[1, 2, 3]; + /// let s2 = &[4, 5, 6]; + /// + /// let mut iter = s1.iter().chain(s2); + /// + /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(&4)); + /// assert_eq!(iter.next(), Some(&5)); + /// assert_eq!(iter.next(), Some(&6)); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn chain(self, other: U) -> Chain where + Self: Sized, U: IntoIterator, + { + Chain{a: self, b: other.into_iter(), state: ChainState::Both} + } + + /// 'Zips up' two iterators into a single iterator of pairs. + /// + /// `zip()` returns a new iterator that will iterate over two other + /// iterators, returning a tuple where the first element comes from the + /// first iterator, and the second element comes from the second iterator. + /// + /// In other words, it zips two iterators together, into a single one. + /// + /// When either iterator returns `None`, all further calls to `next()` + /// will return `None`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a1 = [1, 2, 3]; + /// let a2 = [4, 5, 6]; + /// + /// let mut iter = a1.iter().zip(a2.iter()); + /// + /// assert_eq!(iter.next(), Some((&1, &4))); + /// assert_eq!(iter.next(), Some((&2, &5))); + /// assert_eq!(iter.next(), Some((&3, &6))); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Since the argument to `zip()` uses [`IntoIterator`], we can pass + /// anything that can be converted into an [`Iterator`], not just an + /// [`Iterator`] itself. For example, slices (`&[T]`) implement + /// [`IntoIterator`], and so can be passed to `zip()` directly: + /// + /// [`IntoIterator`]: trait.IntoIterator.html + /// [`Iterator`]: trait.Iterator.html + /// + /// ``` + /// let s1 = &[1, 2, 3]; + /// let s2 = &[4, 5, 6]; + /// + /// let mut iter = s1.iter().zip(s2); + /// + /// assert_eq!(iter.next(), Some((&1, &4))); + /// assert_eq!(iter.next(), Some((&2, &5))); + /// assert_eq!(iter.next(), Some((&3, &6))); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// `zip()` is often used to zip an infinite iterator to a finite one. + /// This works because the finite iterator will eventually return `None`, + /// ending the zipper. Zipping with `(0..)` can look a lot like [`enumerate()`]: + /// + /// ``` + /// let enumerate: Vec<_> = "foo".chars().enumerate().collect(); + /// + /// let zipper: Vec<_> = (0..).zip("foo".chars()).collect(); + /// + /// assert_eq!((0, 'f'), enumerate[0]); + /// assert_eq!((0, 'f'), zipper[0]); + /// + /// assert_eq!((1, 'o'), enumerate[1]); + /// assert_eq!((1, 'o'), zipper[1]); + /// + /// assert_eq!((2, 'o'), enumerate[2]); + /// assert_eq!((2, 'o'), zipper[2]); + /// ``` + /// + /// [`enumerate()`]: trait.Iterator.html#method.enumerate + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn zip(self, other: U) -> Zip where + Self: Sized, U: IntoIterator + { + Zip{a: self, b: other.into_iter()} + } + + /// Takes a closure and creates an iterator which calls that closure on each + /// element. + /// + /// `map()` transforms one iterator into another, by means of its argument: + /// something that implements `FnMut`. It produces a new iterator which + /// calls this closure on each element of the original iterator. + /// + /// If you are good at thinking in types, you can think of `map()` like this: + /// If you have an iterator that gives you elements of some type `A`, and + /// you want an iterator of some other type `B`, you can use `map()`, + /// passing a closure that takes an `A` and returns a `B`. + /// + /// `map()` is conceptually similar to a [`for`] loop. However, as `map()` is + /// lazy, it is best used when you're already working with other iterators. + /// If you're doing some sort of looping for a side effect, it's considered + /// more idiomatic to use [`for`] than `map()`. + /// + /// [`for`]: ../../book/loops.html#for + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.into_iter().map(|x| 2 * x); + /// + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), Some(4)); + /// assert_eq!(iter.next(), Some(6)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// If you're doing some sort of side effect, prefer [`for`] to `map()`: + /// + /// ``` + /// # #![allow(unused_must_use)] + /// // don't do this: + /// (0..5).map(|x| println!("{}", x)); + /// + /// // it won't even execute, as it is lazy. Rust will warn you about this. + /// + /// // Instead, use for: + /// for x in 0..5 { + /// println!("{}", x); + /// } + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn map(self, f: F) -> Map where + Self: Sized, F: FnMut(Self::Item) -> B, + { + Map{iter: self, f: f} + } + + /// Creates an iterator which uses a closure to determine if an element + /// should be yielded. + /// + /// The closure must return `true` or `false`. `filter()` creates an + /// iterator which calls this closure on each element. If the closure + /// returns `true`, then the element is returned. If the closure returns + /// `false`, it will try again, and call the closure on the next element, + /// seeing if it passes the test. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [0i32, 1, 2]; + /// + /// let mut iter = a.into_iter().filter(|x| x.is_positive()); + /// + /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Because the closure passed to `filter()` takes a reference, and many + /// iterators iterate over references, this leads to a possibly confusing + /// situation, where the type of the closure is a double reference: + /// + /// ``` + /// let a = [0, 1, 2]; + /// + /// let mut iter = a.into_iter().filter(|x| **x > 1); // need two *s! + /// + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// It's common to instead use destructuring on the argument to strip away + /// one: + /// + /// ``` + /// let a = [0, 1, 2]; + /// + /// let mut iter = a.into_iter().filter(|&x| *x > 1); // both & and * + /// + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// or both: + /// + /// ``` + /// let a = [0, 1, 2]; + /// + /// let mut iter = a.into_iter().filter(|&&x| x > 1); // two &s + /// + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// of these layers. + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn filter

(self, predicate: P) -> Filter where + Self: Sized, P: FnMut(&Self::Item) -> bool, + { + Filter{iter: self, predicate: predicate} + } + + /// Creates an iterator that both filters and maps. + /// + /// The closure must return an [`Option`]. `filter_map()` creates an + /// iterator which calls this closure on each element. If the closure + /// returns `Some(element)`, then that element is returned. If the + /// closure returns `None`, it will try again, and call the closure on the + /// next element, seeing if it will return `Some`. + /// + /// [`Option`]: ../../std/option/enum.Option.html + /// + /// Why `filter_map()` and not just [`filter()`].[`map()`]? The key is in this + /// part: + /// + /// [`filter()`]: #method.filter + /// [`map()`]: #method.map + /// + /// > If the closure returns `Some(element)`, then that element is returned. + /// + /// In other words, it removes the [`Option`] layer automatically. If your + /// mapping is already returning an [`Option`] and you want to skip over + /// `None`s, then `filter_map()` is much, much nicer to use. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = ["1", "2", "lol"]; + /// + /// let mut iter = a.iter().filter_map(|s| s.parse().ok()); + /// + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Here's the same example, but with [`filter()`] and [`map()`]: + /// + /// ``` + /// let a = ["1", "2", "lol"]; + /// + /// let mut iter = a.iter() + /// .map(|s| s.parse().ok()) + /// .filter(|s| s.is_some()); + /// + /// assert_eq!(iter.next(), Some(Some(1))); + /// assert_eq!(iter.next(), Some(Some(2))); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// There's an extra layer of `Some` in there. + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn filter_map(self, f: F) -> FilterMap where + Self: Sized, F: FnMut(Self::Item) -> Option, + { + FilterMap { iter: self, f: f } + } + + /// Creates an iterator which gives the current iteration count as well as + /// the next value. + /// + /// The iterator returned yields pairs `(i, val)`, where `i` is the + /// current index of iteration and `val` is the value returned by the + /// iterator. + /// + /// `enumerate()` keeps its count as a [`usize`]. If you want to count by a + /// different sized integer, the [`zip()`] function provides similar + /// functionality. + /// + /// [`usize`]: ../../std/primitive.usize.html + /// [`zip()`]: #method.zip + /// + /// # Overflow Behavior + /// + /// The method does no guarding against overflows, so enumerating more than + /// [`usize::MAX`] elements either produces the wrong result or panics. If + /// debug assertions are enabled, a panic is guaranteed. + /// + /// [`usize::MAX`]: ../../std/usize/constant.MAX.html + /// + /// # Panics + /// + /// The returned iterator might panic if the to-be-returned index would + /// overflow a `usize`. + /// + /// # Examples + /// + /// ``` + /// let a = ['a', 'b', 'c']; + /// + /// let mut iter = a.iter().enumerate(); + /// + /// assert_eq!(iter.next(), Some((0, &'a'))); + /// assert_eq!(iter.next(), Some((1, &'b'))); + /// assert_eq!(iter.next(), Some((2, &'c'))); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn enumerate(self) -> Enumerate where Self: Sized { + Enumerate { iter: self, count: 0 } + } + + /// Creates an iterator which can use `peek` to look at the next element of + /// the iterator without consuming it. + /// + /// Adds a [`peek()`] method to an iterator. See its documentation for + /// more information. + /// + /// Note that the underlying iterator is still advanced when `peek` is + /// called for the first time: In order to retrieve the next element, + /// `next` is called on the underlying iterator, hence any side effects of + /// the `next` method will occur. + /// + /// [`peek()`]: struct.Peekable.html#method.peek + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let xs = [1, 2, 3]; + /// + /// let mut iter = xs.iter().peekable(); + /// + /// // peek() lets us see into the future + /// assert_eq!(iter.peek(), Some(&&1)); + /// assert_eq!(iter.next(), Some(&1)); + /// + /// assert_eq!(iter.next(), Some(&2)); + /// + /// // we can peek() multiple times, the iterator won't advance + /// assert_eq!(iter.peek(), Some(&&3)); + /// assert_eq!(iter.peek(), Some(&&3)); + /// + /// assert_eq!(iter.next(), Some(&3)); + /// + /// // after the iterator is finished, so is peek() + /// assert_eq!(iter.peek(), None); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn peekable(self) -> Peekable where Self: Sized { + Peekable{iter: self, peeked: None} + } + + /// Creates an iterator that [`skip()`]s elements based on a predicate. + /// + /// [`skip()`]: #method.skip + /// + /// `skip_while()` takes a closure as an argument. It will call this + /// closure on each element of the iterator, and ignore elements + /// until it returns `false`. + /// + /// After `false` is returned, `skip_while()`'s job is over, and the + /// rest of the elements are yielded. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [-1i32, 0, 1]; + /// + /// let mut iter = a.into_iter().skip_while(|x| x.is_negative()); + /// + /// assert_eq!(iter.next(), Some(&0)); + /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Because the closure passed to `skip_while()` takes a reference, and many + /// iterators iterate over references, this leads to a possibly confusing + /// situation, where the type of the closure is a double reference: + /// + /// ``` + /// let a = [-1, 0, 1]; + /// + /// let mut iter = a.into_iter().skip_while(|x| **x < 0); // need two *s! + /// + /// assert_eq!(iter.next(), Some(&0)); + /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Stopping after an initial `false`: + /// + /// ``` + /// let a = [-1, 0, 1, -2]; + /// + /// let mut iter = a.into_iter().skip_while(|x| **x < 0); + /// + /// assert_eq!(iter.next(), Some(&0)); + /// assert_eq!(iter.next(), Some(&1)); + /// + /// // while this would have been false, since we already got a false, + /// // skip_while() isn't used any more + /// assert_eq!(iter.next(), Some(&-2)); + /// + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn skip_while

(self, predicate: P) -> SkipWhile where + Self: Sized, P: FnMut(&Self::Item) -> bool, + { + SkipWhile{iter: self, flag: false, predicate: predicate} + } + + /// Creates an iterator that yields elements based on a predicate. + /// + /// `take_while()` takes a closure as an argument. It will call this + /// closure on each element of the iterator, and yield elements + /// while it returns `true`. + /// + /// After `false` is returned, `take_while()`'s job is over, and the + /// rest of the elements are ignored. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [-1i32, 0, 1]; + /// + /// let mut iter = a.into_iter().take_while(|x| x.is_negative()); + /// + /// assert_eq!(iter.next(), Some(&-1)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Because the closure passed to `take_while()` takes a reference, and many + /// iterators iterate over references, this leads to a possibly confusing + /// situation, where the type of the closure is a double reference: + /// + /// ``` + /// let a = [-1, 0, 1]; + /// + /// let mut iter = a.into_iter().take_while(|x| **x < 0); // need two *s! + /// + /// assert_eq!(iter.next(), Some(&-1)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Stopping after an initial `false`: + /// + /// ``` + /// let a = [-1, 0, 1, -2]; + /// + /// let mut iter = a.into_iter().take_while(|x| **x < 0); + /// + /// assert_eq!(iter.next(), Some(&-1)); + /// + /// // We have more elements that are less than zero, but since we already + /// // got a false, take_while() isn't used any more + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// Because `take_while()` needs to look at the value in order to see if it + /// should be included or not, consuming iterators will see that it is + /// removed: + /// + /// ``` + /// let a = [1, 2, 3, 4]; + /// let mut iter = a.into_iter(); + /// + /// let result: Vec = iter.by_ref() + /// .take_while(|n| **n != 3) + /// .cloned() + /// .collect(); + /// + /// assert_eq!(result, &[1, 2]); + /// + /// let result: Vec = iter.cloned().collect(); + /// + /// assert_eq!(result, &[4]); + /// ``` + /// + /// The `3` is no longer there, because it was consumed in order to see if + /// the iteration should stop, but wasn't placed back into the iterator or + /// some similar thing. + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn take_while

(self, predicate: P) -> TakeWhile where + Self: Sized, P: FnMut(&Self::Item) -> bool, + { + TakeWhile{iter: self, flag: false, predicate: predicate} + } + + /// Creates an iterator that skips the first `n` elements. + /// + /// After they have been consumed, the rest of the elements are yielded. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter().skip(2); + /// + /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn skip(self, n: usize) -> Skip where Self: Sized { + Skip{iter: self, n: n} + } + + /// Creates an iterator that yields its first `n` elements. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter().take(2); + /// + /// assert_eq!(iter.next(), Some(&1)); + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), None); + /// ``` + /// + /// `take()` is often used with an infinite iterator, to make it finite: + /// + /// ``` + /// let mut iter = (0..).take(3); + /// + /// assert_eq!(iter.next(), Some(0)); + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn take(self, n: usize) -> Take where Self: Sized, { + Take{iter: self, n: n} + } + + /// An iterator adaptor similar to [`fold()`] that holds internal state and + /// produces a new iterator. + /// + /// [`fold()`]: #method.fold + /// + /// `scan()` takes two arguments: an initial value which seeds the internal + /// state, and a closure with two arguments, the first being a mutable + /// reference to the internal state and the second an iterator element. + /// The closure can assign to the internal state to share state between + /// iterations. + /// + /// On iteration, the closure will be applied to each element of the + /// iterator and the return value from the closure, an [`Option`], is + /// yielded by the iterator. + /// + /// [`Option`]: ../../std/option/enum.Option.html + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter().scan(1, |state, &x| { + /// // each iteration, we'll multiply the state by the element + /// *state = *state * x; + /// + /// // the value passed on to the next iteration + /// Some(*state) + /// }); + /// + /// assert_eq!(iter.next(), Some(1)); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), Some(6)); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn scan(self, initial_state: St, f: F) -> Scan + where Self: Sized, F: FnMut(&mut St, Self::Item) -> Option, + { + Scan{iter: self, f: f, state: initial_state} + } + + /// Creates an iterator that works like map, but flattens nested structure. + /// + /// The [`map()`] adapter is very useful, but only when the closure + /// argument produces values. If it produces an iterator instead, there's + /// an extra layer of indirection. `flat_map()` will remove this extra layer + /// on its own. + /// + /// [`map()`]: #method.map + /// + /// Another way of thinking about `flat_map()`: [`map()`]'s closure returns + /// one item for each element, and `flat_map()`'s closure returns an + /// iterator for each element. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let words = ["alpha", "beta", "gamma"]; + /// + /// // chars() returns an iterator + /// let merged: String = words.iter() + /// .flat_map(|s| s.chars()) + /// .collect(); + /// assert_eq!(merged, "alphabetagamma"); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn flat_map(self, f: F) -> FlatMap + where Self: Sized, U: IntoIterator, F: FnMut(Self::Item) -> U, + { + FlatMap{iter: self, f: f, frontiter: None, backiter: None } + } + + /// Creates an iterator which ends after the first `None`. + /// + /// After an iterator returns `None`, future calls may or may not yield + /// `Some(T)` again. `fuse()` adapts an iterator, ensuring that after a + /// `None` is given, it will always return `None` forever. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// // an iterator which alternates between Some and None + /// struct Alternate { + /// state: i32, + /// } + /// + /// impl Iterator for Alternate { + /// type Item = i32; + /// + /// fn next(&mut self) -> Option { + /// let val = self.state; + /// self.state = self.state + 1; + /// + /// // if it's even, Some(i32), else None + /// if val % 2 == 0 { + /// Some(val) + /// } else { + /// None + /// } + /// } + /// } + /// + /// let mut iter = Alternate { state: 0 }; + /// + /// // we can see our iterator going back and forth + /// assert_eq!(iter.next(), Some(0)); + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next(), Some(2)); + /// assert_eq!(iter.next(), None); + /// + /// // however, once we fuse it... + /// let mut iter = iter.fuse(); + /// + /// assert_eq!(iter.next(), Some(4)); + /// assert_eq!(iter.next(), None); + /// + /// // it will always return None after the first time. + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next(), None); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn fuse(self) -> Fuse where Self: Sized { + Fuse{iter: self, done: false} + } + + /// Do something with each element of an iterator, passing the value on. + /// + /// When using iterators, you'll often chain several of them together. + /// While working on such code, you might want to check out what's + /// happening at various parts in the pipeline. To do that, insert + /// a call to `inspect()`. + /// + /// It's much more common for `inspect()` to be used as a debugging tool + /// than to exist in your final code, but never say never. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 4, 2, 3]; + /// + /// // this iterator sequence is complex. + /// let sum = a.iter() + /// .cloned() + /// .filter(|&x| x % 2 == 0) + /// .fold(0, |sum, i| sum + i); + /// + /// println!("{}", sum); + /// + /// // let's add some inspect() calls to investigate what's happening + /// let sum = a.iter() + /// .cloned() + /// .inspect(|x| println!("about to filter: {}", x)) + /// .filter(|&x| x % 2 == 0) + /// .inspect(|x| println!("made it through filter: {}", x)) + /// .fold(0, |sum, i| sum + i); + /// + /// println!("{}", sum); + /// ``` + /// + /// This will print: + /// + /// ```text + /// about to filter: 1 + /// about to filter: 4 + /// made it through filter: 4 + /// about to filter: 2 + /// made it through filter: 2 + /// about to filter: 3 + /// 6 + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn inspect(self, f: F) -> Inspect where + Self: Sized, F: FnMut(&Self::Item), + { + Inspect{iter: self, f: f} + } + + /// Borrows an iterator, rather than consuming it. + /// + /// This is useful to allow applying iterator adaptors while still + /// retaining ownership of the original iterator. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let iter = a.into_iter(); + /// + /// let sum: i32 = iter.take(5) + /// .fold(0, |acc, &i| acc + i ); + /// + /// assert_eq!(sum, 6); + /// + /// // if we try to use iter again, it won't work. The following line + /// // gives "error: use of moved value: `iter` + /// // assert_eq!(iter.next(), None); + /// + /// // let's try that again + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.into_iter(); + /// + /// // instead, we add in a .by_ref() + /// let sum: i32 = iter.by_ref() + /// .take(2) + /// .fold(0, |acc, &i| acc + i ); + /// + /// assert_eq!(sum, 3); + /// + /// // now this is just fine: + /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), None); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn by_ref(&mut self) -> &mut Self where Self: Sized { self } + + /// Transforms an iterator into a collection. + /// + /// `collect()` can take anything iterable, and turn it into a relevant + /// collection. This is one of the more powerful methods in the standard + /// library, used in a variety of contexts. + /// + /// The most basic pattern in which `collect()` is used is to turn one + /// collection into another. You take a collection, call `iter()` on it, + /// do a bunch of transformations, and then `collect()` at the end. + /// + /// One of the keys to `collect()`'s power is that many things you might + /// not think of as 'collections' actually are. For example, a [`String`] + /// is a collection of [`char`]s. And a collection of [`Result`] can + /// be thought of as single `Result, E>`. See the examples + /// below for more. + /// + /// [`String`]: ../../std/string/struct.String.html + /// [`Result`]: ../../std/result/enum.Result.html + /// [`char`]: ../../std/primitive.char.html + /// + /// Because `collect()` is so general, it can cause problems with type + /// inference. As such, `collect()` is one of the few times you'll see + /// the syntax affectionately known as the 'turbofish': `::<>`. This + /// helps the inference algorithm understand specifically which collection + /// you're trying to collect into. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let doubled: Vec = a.iter() + /// .map(|&x| x * 2) + /// .collect(); + /// + /// assert_eq!(vec![2, 4, 6], doubled); + /// ``` + /// + /// Note that we needed the `: Vec` on the left-hand side. This is because + /// we could collect into, for example, a [`VecDeque`] instead: + /// + /// [`VecDeque`]: ../../std/collections/struct.VecDeque.html + /// + /// ``` + /// use std::collections::VecDeque; + /// + /// let a = [1, 2, 3]; + /// + /// let doubled: VecDeque = a.iter() + /// .map(|&x| x * 2) + /// .collect(); + /// + /// assert_eq!(2, doubled[0]); + /// assert_eq!(4, doubled[1]); + /// assert_eq!(6, doubled[2]); + /// ``` + /// + /// Using the 'turbofish' instead of annotating `doubled`: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let doubled = a.iter() + /// .map(|&x| x * 2) + /// .collect::>(); + /// + /// assert_eq!(vec![2, 4, 6], doubled); + /// ``` + /// + /// Because `collect()` cares about what you're collecting into, you can + /// still use a partial type hint, `_`, with the turbofish: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let doubled = a.iter() + /// .map(|&x| x * 2) + /// .collect::>(); + /// + /// assert_eq!(vec![2, 4, 6], doubled); + /// ``` + /// + /// Using `collect()` to make a [`String`]: + /// + /// ``` + /// let chars = ['g', 'd', 'k', 'k', 'n']; + /// + /// let hello: String = chars.iter() + /// .map(|&x| x as u8) + /// .map(|x| (x + 1) as char) + /// .collect(); + /// + /// assert_eq!("hello", hello); + /// ``` + /// + /// If you have a list of [`Result`]s, you can use `collect()` to + /// see if any of them failed: + /// + /// ``` + /// let results = [Ok(1), Err("nope"), Ok(3), Err("bad")]; + /// + /// let result: Result, &str> = results.iter().cloned().collect(); + /// + /// // gives us the first error + /// assert_eq!(Err("nope"), result); + /// + /// let results = [Ok(1), Ok(3)]; + /// + /// let result: Result, &str> = results.iter().cloned().collect(); + /// + /// // gives us the list of answers + /// assert_eq!(Ok(vec![1, 3]), result); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn collect>(self) -> B where Self: Sized { + FromIterator::from_iter(self) + } + + /// Consumes an iterator, creating two collections from it. + /// + /// The predicate passed to `partition()` can return `true`, or `false`. + /// `partition()` returns a pair, all of the elements for which it returned + /// `true`, and all of the elements for which it returned `false`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let (even, odd): (Vec, Vec) = a.into_iter() + /// .partition(|&n| n % 2 == 0); + /// + /// assert_eq!(even, vec![2]); + /// assert_eq!(odd, vec![1, 3]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn partition(self, mut f: F) -> (B, B) where + Self: Sized, + B: Default + Extend, + F: FnMut(&Self::Item) -> bool + { + let mut left: B = Default::default(); + let mut right: B = Default::default(); + + for x in self { + if f(&x) { + left.extend(Some(x)) + } else { + right.extend(Some(x)) + } + } + + (left, right) + } + + /// An iterator adaptor that applies a function, producing a single, final value. + /// + /// `fold()` takes two arguments: an initial value, and a closure with two + /// arguments: an 'accumulator', and an element. The closure returns the value that + /// the accumulator should have for the next iteration. + /// + /// The initial value is the value the accumulator will have on the first + /// call. + /// + /// After applying this closure to every element of the iterator, `fold()` + /// returns the accumulator. + /// + /// This operation is sometimes called 'reduce' or 'inject'. + /// + /// Folding is useful whenever you have a collection of something, and want + /// to produce a single value from it. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// // the sum of all of the elements of a + /// let sum = a.iter() + /// .fold(0, |acc, &x| acc + x); + /// + /// assert_eq!(sum, 6); + /// ``` + /// + /// Let's walk through each step of the iteration here: + /// + /// | element | acc | x | result | + /// |---------|-----|---|--------| + /// | | 0 | | | + /// | 1 | 0 | 1 | 1 | + /// | 2 | 1 | 2 | 3 | + /// | 3 | 3 | 3 | 6 | + /// + /// And so, our final result, `6`. + /// + /// It's common for people who haven't used iterators a lot to + /// use a `for` loop with a list of things to build up a result. Those + /// can be turned into `fold()`s: + /// + /// ``` + /// let numbers = [1, 2, 3, 4, 5]; + /// + /// let mut result = 0; + /// + /// // for loop: + /// for i in &numbers { + /// result = result + i; + /// } + /// + /// // fold: + /// let result2 = numbers.iter().fold(0, |acc, &x| acc + x); + /// + /// // they're the same + /// assert_eq!(result, result2); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn fold(self, init: B, mut f: F) -> B where + Self: Sized, F: FnMut(B, Self::Item) -> B, + { + let mut accum = init; + for x in self { + accum = f(accum, x); + } + accum + } + + /// Tests if every element of the iterator matches a predicate. + /// + /// `all()` takes a closure that returns `true` or `false`. It applies + /// this closure to each element of the iterator, and if they all return + /// `true`, then so does `all()`. If any of them return `false`, it + /// returns `false`. + /// + /// `all()` is short-circuiting; in other words, it will stop processing + /// as soon as it finds a `false`, given that no matter what else happens, + /// the result will also be `false`. + /// + /// An empty iterator returns `true`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// assert!(a.iter().all(|&x| x > 0)); + /// + /// assert!(!a.iter().all(|&x| x > 2)); + /// ``` + /// + /// Stopping at the first `false`: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter(); + /// + /// assert!(!iter.all(|&x| x != 2)); + /// + /// // we can still use `iter`, as there are more elements. + /// assert_eq!(iter.next(), Some(&3)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn all(&mut self, mut f: F) -> bool where + Self: Sized, F: FnMut(Self::Item) -> bool + { + for x in self { + if !f(x) { + return false; + } + } + true + } + + /// Tests if any element of the iterator matches a predicate. + /// + /// `any()` takes a closure that returns `true` or `false`. It applies + /// this closure to each element of the iterator, and if any of them return + /// `true`, then so does `any()`. If they all return `false`, it + /// returns `false`. + /// + /// `any()` is short-circuiting; in other words, it will stop processing + /// as soon as it finds a `true`, given that no matter what else happens, + /// the result will also be `true`. + /// + /// An empty iterator returns `false`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// assert!(a.iter().any(|&x| x > 0)); + /// + /// assert!(!a.iter().any(|&x| x > 5)); + /// ``` + /// + /// Stopping at the first `true`: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter(); + /// + /// assert!(iter.any(|&x| x != 2)); + /// + /// // we can still use `iter`, as there are more elements. + /// assert_eq!(iter.next(), Some(&2)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn any(&mut self, mut f: F) -> bool where + Self: Sized, + F: FnMut(Self::Item) -> bool + { + for x in self { + if f(x) { + return true; + } + } + false + } + + /// Searches for an element of an iterator that satisfies a predicate. + /// + /// `find()` takes a closure that returns `true` or `false`. It applies + /// this closure to each element of the iterator, and if any of them return + /// `true`, then `find()` returns `Some(element)`. If they all return + /// `false`, it returns `None`. + /// + /// `find()` is short-circuiting; in other words, it will stop processing + /// as soon as the closure returns `true`. + /// + /// Because `find()` takes a reference, and many iterators iterate over + /// references, this leads to a possibly confusing situation where the + /// argument is a double reference. You can see this effect in the + /// examples below, with `&&x`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// assert_eq!(a.iter().find(|&&x| x == 2), Some(&2)); + /// + /// assert_eq!(a.iter().find(|&&x| x == 5), None); + /// ``` + /// + /// Stopping at the first `true`: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter(); + /// + /// assert_eq!(iter.find(|&&x| x == 2), Some(&2)); + /// + /// // we can still use `iter`, as there are more elements. + /// assert_eq!(iter.next(), Some(&3)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn find

(&mut self, mut predicate: P) -> Option where + Self: Sized, + P: FnMut(&Self::Item) -> bool, + { + for x in self { + if predicate(&x) { return Some(x) } + } + None + } + + /// Searches for an element in an iterator, returning its index. + /// + /// `position()` takes a closure that returns `true` or `false`. It applies + /// this closure to each element of the iterator, and if one of them + /// returns `true`, then `position()` returns `Some(index)`. If all of + /// them return `false`, it returns `None`. + /// + /// `position()` is short-circuiting; in other words, it will stop + /// processing as soon as it finds a `true`. + /// + /// # Overflow Behavior + /// + /// The method does no guarding against overflows, so if there are more + /// than `usize::MAX` non-matching elements, it either produces the wrong + /// result or panics. If debug assertions are enabled, a panic is + /// guaranteed. + /// + /// # Panics + /// + /// This function might panic if the iterator has more than `usize::MAX` + /// non-matching elements. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// assert_eq!(a.iter().position(|&x| x == 2), Some(1)); + /// + /// assert_eq!(a.iter().position(|&x| x == 5), None); + /// ``` + /// + /// Stopping at the first `true`: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter(); + /// + /// assert_eq!(iter.position(|&x| x == 2), Some(1)); + /// + /// // we can still use `iter`, as there are more elements. + /// assert_eq!(iter.next(), Some(&3)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn position

(&mut self, mut predicate: P) -> Option where + Self: Sized, + P: FnMut(Self::Item) -> bool, + { + // `enumerate` might overflow. + for (i, x) in self.enumerate() { + if predicate(x) { + return Some(i); + } + } + None + } + + /// Searches for an element in an iterator from the right, returning its + /// index. + /// + /// `rposition()` takes a closure that returns `true` or `false`. It applies + /// this closure to each element of the iterator, starting from the end, + /// and if one of them returns `true`, then `rposition()` returns + /// `Some(index)`. If all of them return `false`, it returns `None`. + /// + /// `rposition()` is short-circuiting; in other words, it will stop + /// processing as soon as it finds a `true`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// assert_eq!(a.iter().rposition(|&x| x == 3), Some(2)); + /// + /// assert_eq!(a.iter().rposition(|&x| x == 5), None); + /// ``` + /// + /// Stopping at the first `true`: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter(); + /// + /// assert_eq!(iter.rposition(|&x| x == 2), Some(1)); + /// + /// // we can still use `iter`, as there are more elements. + /// assert_eq!(iter.next(), Some(&1)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn rposition

(&mut self, mut predicate: P) -> Option where + P: FnMut(Self::Item) -> bool, + Self: Sized + ExactSizeIterator + DoubleEndedIterator + { + let mut i = self.len(); + + while let Some(v) = self.next_back() { + if predicate(v) { + return Some(i - 1); + } + // No need for an overflow check here, because `ExactSizeIterator` + // implies that the number of elements fits into a `usize`. + i -= 1; + } + None + } + + /// Returns the maximum element of an iterator. + /// + /// If the two elements are equally maximum, the latest element is + /// returned. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// assert_eq!(a.iter().max(), Some(&3)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn max(self) -> Option where Self: Sized, Self::Item: Ord + { + select_fold1(self, + |_| (), + // switch to y even if it is only equal, to preserve + // stability. + |_, x, _, y| *x <= *y) + .map(|(_, x)| x) + } + + /// Returns the minimum element of an iterator. + /// + /// If the two elements are equally minimum, the first element is + /// returned. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// assert_eq!(a.iter().min(), Some(&1)); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn min(self) -> Option where Self: Sized, Self::Item: Ord + { + select_fold1(self, + |_| (), + // only switch to y if it is strictly smaller, to + // preserve stability. + |_, x, _, y| *x > *y) + .map(|(_, x)| x) + } + + /// Returns the element that gives the maximum value from the + /// specified function. + /// + /// Returns the rightmost element if the comparison determines two elements + /// to be equally maximum. + /// + /// # Examples + /// + /// ``` + /// let a = [-3_i32, 0, 1, 5, -10]; + /// assert_eq!(*a.iter().max_by_key(|x| x.abs()).unwrap(), -10); + /// ``` + #[inline] + #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] + fn max_by_key(self, f: F) -> Option + where Self: Sized, F: FnMut(&Self::Item) -> B, + { + select_fold1(self, + f, + // switch to y even if it is only equal, to preserve + // stability. + |x_p, _, y_p, _| x_p <= y_p) + .map(|(_, x)| x) + } + + /// Returns the element that gives the minimum value from the + /// specified function. + /// + /// Returns the latest element if the comparison determines two elements + /// to be equally minimum. + /// + /// # Examples + /// + /// ``` + /// let a = [-3_i32, 0, 1, 5, -10]; + /// assert_eq!(*a.iter().min_by_key(|x| x.abs()).unwrap(), 0); + /// ``` + #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] + fn min_by_key(self, f: F) -> Option + where Self: Sized, F: FnMut(&Self::Item) -> B, + { + select_fold1(self, + f, + // only switch to y if it is strictly smaller, to + // preserve stability. + |x_p, _, y_p, _| x_p > y_p) + .map(|(_, x)| x) + } + + /// Reverses an iterator's direction. + /// + /// Usually, iterators iterate from left to right. After using `rev()`, + /// an iterator will instead iterate from right to left. + /// + /// This is only possible if the iterator has an end, so `rev()` only + /// works on [`DoubleEndedIterator`]s. + /// + /// [`DoubleEndedIterator`]: trait.DoubleEndedIterator.html + /// + /// # Examples + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut iter = a.iter().rev(); + /// + /// assert_eq!(iter.next(), Some(&3)); + /// assert_eq!(iter.next(), Some(&2)); + /// assert_eq!(iter.next(), Some(&1)); + /// + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + fn rev(self) -> Rev where Self: Sized + DoubleEndedIterator { + Rev{iter: self} + } + + /// Converts an iterator of pairs into a pair of containers. + /// + /// `unzip()` consumes an entire iterator of pairs, producing two + /// collections: one from the left elements of the pairs, and one + /// from the right elements. + /// + /// This function is, in some sense, the opposite of [`zip()`]. + /// + /// [`zip()`]: #method.zip + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [(1, 2), (3, 4)]; + /// + /// let (left, right): (Vec<_>, Vec<_>) = a.iter().cloned().unzip(); + /// + /// assert_eq!(left, [1, 3]); + /// assert_eq!(right, [2, 4]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn unzip(self) -> (FromA, FromB) where + FromA: Default + Extend, + FromB: Default + Extend, + Self: Sized + Iterator, + { + struct SizeHint(usize, Option, marker::PhantomData); + impl Iterator for SizeHint { + type Item = A; + + fn next(&mut self) -> Option { None } + fn size_hint(&self) -> (usize, Option) { + (self.0, self.1) + } + } + + let (lo, hi) = self.size_hint(); + let mut ts: FromA = Default::default(); + let mut us: FromB = Default::default(); + + ts.extend(SizeHint(lo, hi, marker::PhantomData)); + us.extend(SizeHint(lo, hi, marker::PhantomData)); + + for (t, u) in self { + ts.extend(Some(t)); + us.extend(Some(u)); + } + + (ts, us) + } + + /// Creates an iterator which `clone()`s all of its elements. + /// + /// This is useful when you have an iterator over `&T`, but you need an + /// iterator over `T`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let v_cloned: Vec<_> = a.iter().cloned().collect(); + /// + /// // cloned is the same as .map(|&x| x), for integers + /// let v_map: Vec<_> = a.iter().map(|&x| x).collect(); + /// + /// assert_eq!(v_cloned, vec![1, 2, 3]); + /// assert_eq!(v_map, vec![1, 2, 3]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn cloned<'a, T: 'a>(self) -> Cloned + where Self: Sized + Iterator, T: Clone + { + Cloned { it: self } + } + + /// Repeats an iterator endlessly. + /// + /// Instead of stopping at `None`, the iterator will instead start again, + /// from the beginning. After iterating again, it will start at the + /// beginning again. And again. And again. Forever. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let a = [1, 2, 3]; + /// + /// let mut it = a.iter().cycle(); + /// + /// assert_eq!(it.next(), Some(&1)); + /// assert_eq!(it.next(), Some(&2)); + /// assert_eq!(it.next(), Some(&3)); + /// assert_eq!(it.next(), Some(&1)); + /// assert_eq!(it.next(), Some(&2)); + /// assert_eq!(it.next(), Some(&3)); + /// assert_eq!(it.next(), Some(&1)); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + #[inline] + fn cycle(self) -> Cycle where Self: Sized + Clone { + Cycle{orig: self.clone(), iter: self} + } + + /// Sums the elements of an iterator. + /// + /// Takes each element, adds them together, and returns the result. + /// + /// An empty iterator returns the zero value of the type. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(iter_arith)] + /// + /// let a = [1, 2, 3]; + /// let sum: i32 = a.iter().sum(); + /// + /// assert_eq!(sum, 6); + /// ``` + #[unstable(feature = "iter_arith", reason = "bounds recently changed", + issue = "27739")] + fn sum(self) -> S where + S: Add + Zero, + Self: Sized, + { + self.fold(Zero::zero(), |s, e| s + e) + } + + /// Iterates over the entire iterator, multiplying all the elements + /// + /// An empty iterator returns the one value of the type. + /// + /// # Examples + /// + /// ``` + /// #![feature(iter_arith)] + /// + /// fn factorial(n: u32) -> u32 { + /// (1..).take_while(|&i| i <= n).product() + /// } + /// assert_eq!(factorial(0), 1); + /// assert_eq!(factorial(1), 1); + /// assert_eq!(factorial(5), 120); + /// ``` + #[unstable(feature="iter_arith", reason = "bounds recently changed", + issue = "27739")] + fn product

(self) -> P where + P: Mul + One, + Self: Sized, + { + self.fold(One::one(), |p, e| p * e) + } + + /// Lexicographically compares the elements of this `Iterator` with those + /// of another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn cmp(mut self, other: I) -> Ordering where + I: IntoIterator, + Self::Item: Ord, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return Ordering::Equal, + (None, _ ) => return Ordering::Less, + (_ , None) => return Ordering::Greater, + (Some(x), Some(y)) => match x.cmp(&y) { + Ordering::Equal => (), + non_eq => return non_eq, + }, + } + } + } + + /// Lexicographically compares the elements of this `Iterator` with those + /// of another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn partial_cmp(mut self, other: I) -> Option where + I: IntoIterator, + Self::Item: PartialOrd, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return Some(Ordering::Equal), + (None, _ ) => return Some(Ordering::Less), + (_ , None) => return Some(Ordering::Greater), + (Some(x), Some(y)) => match x.partial_cmp(&y) { + Some(Ordering::Equal) => (), + non_eq => return non_eq, + }, + } + } + } + + /// Determines if the elements of this `Iterator` are equal to those of + /// another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn eq(mut self, other: I) -> bool where + I: IntoIterator, + Self::Item: PartialEq, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return true, + (None, _) | (_, None) => return false, + (Some(x), Some(y)) => if x != y { return false }, + } + } + } + + /// Determines if the elements of this `Iterator` are unequal to those of + /// another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn ne(mut self, other: I) -> bool where + I: IntoIterator, + Self::Item: PartialEq, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return false, + (None, _) | (_, None) => return true, + (Some(x), Some(y)) => if x.ne(&y) { return true }, + } + } + } + + /// Determines if the elements of this `Iterator` are lexicographically + /// less than those of another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn lt(mut self, other: I) -> bool where + I: IntoIterator, + Self::Item: PartialOrd, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return false, + (None, _ ) => return true, + (_ , None) => return false, + (Some(x), Some(y)) => { + match x.partial_cmp(&y) { + Some(Ordering::Less) => return true, + Some(Ordering::Equal) => {} + Some(Ordering::Greater) => return false, + None => return false, + } + }, + } + } + } + + /// Determines if the elements of this `Iterator` are lexicographically + /// less or equal to those of another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn le(mut self, other: I) -> bool where + I: IntoIterator, + Self::Item: PartialOrd, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return true, + (None, _ ) => return true, + (_ , None) => return false, + (Some(x), Some(y)) => { + match x.partial_cmp(&y) { + Some(Ordering::Less) => return true, + Some(Ordering::Equal) => {} + Some(Ordering::Greater) => return false, + None => return false, + } + }, + } + } + } + + /// Determines if the elements of this `Iterator` are lexicographically + /// greater than those of another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn gt(mut self, other: I) -> bool where + I: IntoIterator, + Self::Item: PartialOrd, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return false, + (None, _ ) => return false, + (_ , None) => return true, + (Some(x), Some(y)) => { + match x.partial_cmp(&y) { + Some(Ordering::Less) => return false, + Some(Ordering::Equal) => {} + Some(Ordering::Greater) => return true, + None => return false, + } + } + } + } + } + + /// Determines if the elements of this `Iterator` are lexicographically + /// greater than or equal to those of another. + #[stable(feature = "iter_order", since = "1.5.0")] + fn ge(mut self, other: I) -> bool where + I: IntoIterator, + Self::Item: PartialOrd, + Self: Sized, + { + let mut other = other.into_iter(); + + loop { + match (self.next(), other.next()) { + (None, None) => return true, + (None, _ ) => return false, + (_ , None) => return true, + (Some(x), Some(y)) => { + match x.partial_cmp(&y) { + Some(Ordering::Less) => return false, + Some(Ordering::Equal) => {} + Some(Ordering::Greater) => return true, + None => return false, + } + }, + } + } + } +} + +/// Select an element from an iterator based on the given projection +/// and "comparison" function. +/// +/// This is an idiosyncratic helper to try to factor out the +/// commonalities of {max,min}{,_by}. In particular, this avoids +/// having to implement optimizations several times. +#[inline] +fn select_fold1(mut it: I, + mut f_proj: FProj, + mut f_cmp: FCmp) -> Option<(B, I::Item)> + where I: Iterator, + FProj: FnMut(&I::Item) -> B, + FCmp: FnMut(&B, &I::Item, &B, &I::Item) -> bool +{ + // start with the first element as our selection. This avoids + // having to use `Option`s inside the loop, translating to a + // sizeable performance gain (6x in one case). + it.next().map(|mut sel| { + let mut sel_p = f_proj(&sel); + + for x in it { + let x_p = f_proj(&x); + if f_cmp(&sel_p, &sel, &x_p, &x) { + sel = x; + sel_p = x_p; + } + } + (sel_p, sel) + }) +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, I: Iterator + ?Sized> Iterator for &'a mut I { + type Item = I::Item; + fn next(&mut self) -> Option { (**self).next() } + fn size_hint(&self) -> (usize, Option) { (**self).size_hint() } +} diff --git a/src/libcore/iter/mod.rs b/src/libcore/iter/mod.rs new file mode 100644 index 0000000000..f964527b4b --- /dev/null +++ b/src/libcore/iter/mod.rs @@ -0,0 +1,1675 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Composable external iteration. +//! +//! If you've found yourself with a collection of some kind, and needed to +//! perform an operation on the elements of said collection, you'll quickly run +//! into 'iterators'. Iterators are heavily used in idiomatic Rust code, so +//! it's worth becoming familiar with them. +//! +//! Before explaining more, let's talk about how this module is structured: +//! +//! # Organization +//! +//! This module is largely organized by type: +//! +//! * [Traits] are the core portion: these traits define what kind of iterators +//! exist and what you can do with them. The methods of these traits are worth +//! putting some extra study time into. +//! * [Functions] provide some helpful ways to create some basic iterators. +//! * [Structs] are often the return types of the various methods on this +//! module's traits. You'll usually want to look at the method that creates +//! the `struct`, rather than the `struct` itself. For more detail about why, +//! see '[Implementing Iterator](#implementing-iterator)'. +//! +//! [Traits]: #traits +//! [Functions]: #functions +//! [Structs]: #structs +//! +//! That's it! Let's dig into iterators. +//! +//! # Iterator +//! +//! The heart and soul of this module is the [`Iterator`] trait. The core of +//! [`Iterator`] looks like this: +//! +//! ``` +//! trait Iterator { +//! type Item; +//! fn next(&mut self) -> Option; +//! } +//! ``` +//! +//! An iterator has a method, [`next()`], which when called, returns an +//! [`Option`]``. [`next()`] will return `Some(Item)` as long as there +//! are elements, and once they've all been exhausted, will return `None` to +//! indicate that iteration is finished. Individual iterators may choose to +//! resume iteration, and so calling [`next()`] again may or may not eventually +//! start returning `Some(Item)` again at some point. +//! +//! [`Iterator`]'s full definition includes a number of other methods as well, +//! but they are default methods, built on top of [`next()`], and so you get +//! them for free. +//! +//! Iterators are also composable, and it's common to chain them together to do +//! more complex forms of processing. See the [Adapters](#adapters) section +//! below for more details. +//! +//! [`Iterator`]: trait.Iterator.html +//! [`next()`]: trait.Iterator.html#tymethod.next +//! [`Option`]: ../../std/option/enum.Option.html +//! +//! # The three forms of iteration +//! +//! There are three common methods which can create iterators from a collection: +//! +//! * `iter()`, which iterates over `&T`. +//! * `iter_mut()`, which iterates over `&mut T`. +//! * `into_iter()`, which iterates over `T`. +//! +//! Various things in the standard library may implement one or more of the +//! three, where appropriate. +//! +//! # Implementing Iterator +//! +//! Creating an iterator of your own involves two steps: creating a `struct` to +//! hold the iterator's state, and then `impl`ementing [`Iterator`] for that +//! `struct`. This is why there are so many `struct`s in this module: there is +//! one for each iterator and iterator adapter. +//! +//! Let's make an iterator named `Counter` which counts from `1` to `5`: +//! +//! ``` +//! // First, the struct: +//! +//! /// An iterator which counts from one to five +//! struct Counter { +//! count: usize, +//! } +//! +//! // we want our count to start at one, so let's add a new() method to help. +//! // This isn't strictly necessary, but is convenient. Note that we start +//! // `count` at zero, we'll see why in `next()`'s implementation below. +//! impl Counter { +//! fn new() -> Counter { +//! Counter { count: 0 } +//! } +//! } +//! +//! // Then, we implement `Iterator` for our `Counter`: +//! +//! impl Iterator for Counter { +//! // we will be counting with usize +//! type Item = usize; +//! +//! // next() is the only required method +//! fn next(&mut self) -> Option { +//! // increment our count. This is why we started at zero. +//! self.count += 1; +//! +//! // check to see if we've finished counting or not. +//! if self.count < 6 { +//! Some(self.count) +//! } else { +//! None +//! } +//! } +//! } +//! +//! // And now we can use it! +//! +//! let mut counter = Counter::new(); +//! +//! let x = counter.next().unwrap(); +//! println!("{}", x); +//! +//! let x = counter.next().unwrap(); +//! println!("{}", x); +//! +//! let x = counter.next().unwrap(); +//! println!("{}", x); +//! +//! let x = counter.next().unwrap(); +//! println!("{}", x); +//! +//! let x = counter.next().unwrap(); +//! println!("{}", x); +//! ``` +//! +//! This will print `1` through `5`, each on their own line. +//! +//! Calling `next()` this way gets repetitive. Rust has a construct which can +//! call `next()` on your iterator, until it reaches `None`. Let's go over that +//! next. +//! +//! # for Loops and IntoIterator +//! +//! Rust's `for` loop syntax is actually sugar for iterators. Here's a basic +//! example of `for`: +//! +//! ``` +//! let values = vec![1, 2, 3, 4, 5]; +//! +//! for x in values { +//! println!("{}", x); +//! } +//! ``` +//! +//! This will print the numbers one through five, each on their own line. But +//! you'll notice something here: we never called anything on our vector to +//! produce an iterator. What gives? +//! +//! There's a trait in the standard library for converting something into an +//! iterator: [`IntoIterator`]. This trait has one method, [`into_iter()`], +//! which converts the thing implementing [`IntoIterator`] into an iterator. +//! Let's take a look at that `for` loop again, and what the compiler converts +//! it into: +//! +//! [`IntoIterator`]: trait.IntoIterator.html +//! [`into_iter()`]: trait.IntoIterator.html#tymethod.into_iter +//! +//! ``` +//! let values = vec![1, 2, 3, 4, 5]; +//! +//! for x in values { +//! println!("{}", x); +//! } +//! ``` +//! +//! Rust de-sugars this into: +//! +//! ``` +//! let values = vec![1, 2, 3, 4, 5]; +//! { +//! let result = match IntoIterator::into_iter(values) { +//! mut iter => loop { +//! match iter.next() { +//! Some(x) => { println!("{}", x); }, +//! None => break, +//! } +//! }, +//! }; +//! result +//! } +//! ``` +//! +//! First, we call `into_iter()` on the value. Then, we match on the iterator +//! that returns, calling [`next()`] over and over until we see a `None`. At +//! that point, we `break` out of the loop, and we're done iterating. +//! +//! There's one more subtle bit here: the standard library contains an +//! interesting implementation of [`IntoIterator`]: +//! +//! ```ignore +//! impl IntoIterator for I +//! ``` +//! +//! In other words, all [`Iterator`]s implement [`IntoIterator`], by just +//! returning themselves. This means two things: +//! +//! 1. If you're writing an [`Iterator`], you can use it with a `for` loop. +//! 2. If you're creating a collection, implementing [`IntoIterator`] for it +//! will allow your collection to be used with the `for` loop. +//! +//! # Adapters +//! +//! Functions which take an [`Iterator`] and return another [`Iterator`] are +//! often called 'iterator adapters', as they're a form of the 'adapter +//! pattern'. +//! +//! Common iterator adapters include [`map()`], [`take()`], and [`collect()`]. +//! For more, see their documentation. +//! +//! [`map()`]: trait.Iterator.html#method.map +//! [`take()`]: trait.Iterator.html#method.take +//! [`collect()`]: trait.Iterator.html#method.collect +//! +//! # Laziness +//! +//! Iterators (and iterator [adapters](#adapters)) are *lazy*. This means that +//! just creating an iterator doesn't _do_ a whole lot. Nothing really happens +//! until you call [`next()`]. This is sometimes a source of confusion when +//! creating an iterator solely for its side effects. For example, the [`map()`] +//! method calls a closure on each element it iterates over: +//! +//! ``` +//! # #![allow(unused_must_use)] +//! let v = vec![1, 2, 3, 4, 5]; +//! v.iter().map(|x| println!("{}", x)); +//! ``` +//! +//! This will not print any values, as we only created an iterator, rather than +//! using it. The compiler will warn us about this kind of behavior: +//! +//! ```text +//! warning: unused result which must be used: iterator adaptors are lazy and +//! do nothing unless consumed +//! ``` +//! +//! The idiomatic way to write a [`map()`] for its side effects is to use a +//! `for` loop instead: +//! +//! ``` +//! let v = vec![1, 2, 3, 4, 5]; +//! +//! for x in &v { +//! println!("{}", x); +//! } +//! ``` +//! +//! [`map()`]: trait.Iterator.html#method.map +//! +//! The two most common ways to evaluate an iterator are to use a `for` loop +//! like this, or using the [`collect()`] adapter to produce a new collection. +//! +//! [`collect()`]: trait.Iterator.html#method.collect +//! +//! # Infinity +//! +//! Iterators do not have to be finite. As an example, an open-ended range is +//! an infinite iterator: +//! +//! ``` +//! let numbers = 0..; +//! ``` +//! +//! It is common to use the [`take()`] iterator adapter to turn an infinite +//! iterator into a finite one: +//! +//! ``` +//! let numbers = 0..; +//! let five_numbers = numbers.take(5); +//! +//! for number in five_numbers { +//! println!("{}", number); +//! } +//! ``` +//! +//! This will print the numbers `0` through `4`, each on their own line. +//! +//! [`take()`]: trait.Iterator.html#method.take + +#![stable(feature = "rust1", since = "1.0.0")] + +use clone::Clone; +use cmp; +use fmt; +use ops::FnMut; +use option::Option::{self, Some, None}; +use usize; + +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::iterator::Iterator; + +#[unstable(feature = "step_trait", + reason = "likely to be replaced by finer-grained traits", + issue = "27741")] +pub use self::range::Step; +#[unstable(feature = "step_by", reason = "recent addition", + issue = "27741")] +pub use self::range::StepBy; + +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::sources::{Repeat, repeat}; +#[stable(feature = "iter_empty", since = "1.2.0")] +pub use self::sources::{Empty, empty}; +#[stable(feature = "iter_once", since = "1.2.0")] +pub use self::sources::{Once, once}; + +#[stable(feature = "rust1", since = "1.0.0")] +pub use self::traits::{FromIterator, IntoIterator, DoubleEndedIterator, Extend, + ExactSizeIterator}; + +mod iterator; +mod range; +mod sources; +mod traits; + +/// An double-ended iterator with the direction inverted. +/// +/// This `struct` is created by the [`rev()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`rev()`]: trait.Iterator.html#method.rev +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Rev { + iter: T +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Rev where I: DoubleEndedIterator { + type Item = ::Item; + + #[inline] + fn next(&mut self) -> Option<::Item> { self.iter.next_back() } + #[inline] + fn size_hint(&self) -> (usize, Option) { self.iter.size_hint() } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Rev where I: DoubleEndedIterator { + #[inline] + fn next_back(&mut self) -> Option<::Item> { self.iter.next() } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Rev + where I: ExactSizeIterator + DoubleEndedIterator {} + +/// An iterator that clones the elements of an underlying iterator. +/// +/// This `struct` is created by the [`cloned()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`cloned()`]: trait.Iterator.html#method.cloned +/// [`Iterator`]: trait.Iterator.html +#[stable(feature = "iter_cloned", since = "1.1.0")] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[derive(Clone, Debug)] +pub struct Cloned { + it: I, +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, I, T: 'a> Iterator for Cloned + where I: Iterator, T: Clone +{ + type Item = T; + + fn next(&mut self) -> Option { + self.it.next().cloned() + } + + fn size_hint(&self) -> (usize, Option) { + self.it.size_hint() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, I, T: 'a> DoubleEndedIterator for Cloned + where I: DoubleEndedIterator, T: Clone +{ + fn next_back(&mut self) -> Option { + self.it.next_back().cloned() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, I, T: 'a> ExactSizeIterator for Cloned + where I: ExactSizeIterator, T: Clone +{} + +/// An iterator that repeats endlessly. +/// +/// This `struct` is created by the [`cycle()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`cycle()`]: trait.Iterator.html#method.cycle +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Cycle { + orig: I, + iter: I, +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Cycle where I: Clone + Iterator { + type Item = ::Item; + + #[inline] + fn next(&mut self) -> Option<::Item> { + match self.iter.next() { + None => { self.iter = self.orig.clone(); self.iter.next() } + y => y + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + // the cycle iterator is either empty or infinite + match self.orig.size_hint() { + sz @ (0, Some(0)) => sz, + (0, _) => (0, None), + _ => (usize::MAX, None) + } + } +} + +/// An iterator that strings two iterators together. +/// +/// This `struct` is created by the [`chain()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`chain()`]: trait.Iterator.html#method.chain +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Chain { + a: A, + b: B, + state: ChainState, +} + +// The iterator protocol specifies that iteration ends with the return value +// `None` from `.next()` (or `.next_back()`) and it is unspecified what +// further calls return. The chain adaptor must account for this since it uses +// two subiterators. +// +// It uses three states: +// +// - Both: `a` and `b` are remaining +// - Front: `a` remaining +// - Back: `b` remaining +// +// The fourth state (neither iterator is remaining) only occurs after Chain has +// returned None once, so we don't need to store this state. +#[derive(Clone, Debug)] +enum ChainState { + // both front and back iterator are remaining + Both, + // only front is remaining + Front, + // only back is remaining + Back, +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Chain where + A: Iterator, + B: Iterator +{ + type Item = A::Item; + + #[inline] + fn next(&mut self) -> Option { + match self.state { + ChainState::Both => match self.a.next() { + elt @ Some(..) => elt, + None => { + self.state = ChainState::Back; + self.b.next() + } + }, + ChainState::Front => self.a.next(), + ChainState::Back => self.b.next(), + } + } + + #[inline] + fn count(self) -> usize { + match self.state { + ChainState::Both => self.a.count() + self.b.count(), + ChainState::Front => self.a.count(), + ChainState::Back => self.b.count(), + } + } + + #[inline] + fn nth(&mut self, mut n: usize) -> Option { + match self.state { + ChainState::Both | ChainState::Front => { + for x in self.a.by_ref() { + if n == 0 { + return Some(x) + } + n -= 1; + } + if let ChainState::Both = self.state { + self.state = ChainState::Back; + } + } + ChainState::Back => {} + } + if let ChainState::Back = self.state { + self.b.nth(n) + } else { + None + } + } + + #[inline] + fn find

(&mut self, mut predicate: P) -> Option where + P: FnMut(&Self::Item) -> bool, + { + match self.state { + ChainState::Both => match self.a.find(&mut predicate) { + None => { + self.state = ChainState::Back; + self.b.find(predicate) + } + v => v + }, + ChainState::Front => self.a.find(predicate), + ChainState::Back => self.b.find(predicate), + } + } + + #[inline] + fn last(self) -> Option { + match self.state { + ChainState::Both => { + // Must exhaust a before b. + let a_last = self.a.last(); + let b_last = self.b.last(); + b_last.or(a_last) + }, + ChainState::Front => self.a.last(), + ChainState::Back => self.b.last() + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (a_lower, a_upper) = self.a.size_hint(); + let (b_lower, b_upper) = self.b.size_hint(); + + let lower = a_lower.saturating_add(b_lower); + + let upper = match (a_upper, b_upper) { + (Some(x), Some(y)) => x.checked_add(y), + _ => None + }; + + (lower, upper) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Chain where + A: DoubleEndedIterator, + B: DoubleEndedIterator, +{ + #[inline] + fn next_back(&mut self) -> Option { + match self.state { + ChainState::Both => match self.b.next_back() { + elt @ Some(..) => elt, + None => { + self.state = ChainState::Front; + self.a.next_back() + } + }, + ChainState::Front => self.a.next_back(), + ChainState::Back => self.b.next_back(), + } + } +} + +/// An iterator that iterates two other iterators simultaneously. +/// +/// This `struct` is created by the [`zip()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`zip()`]: trait.Iterator.html#method.zip +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Zip { + a: A, + b: B +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Zip where A: Iterator, B: Iterator +{ + type Item = (A::Item, B::Item); + + #[inline] + fn next(&mut self) -> Option<(A::Item, B::Item)> { + self.a.next().and_then(|x| { + self.b.next().and_then(|y| { + Some((x, y)) + }) + }) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (a_lower, a_upper) = self.a.size_hint(); + let (b_lower, b_upper) = self.b.size_hint(); + + let lower = cmp::min(a_lower, b_lower); + + let upper = match (a_upper, b_upper) { + (Some(x), Some(y)) => Some(cmp::min(x,y)), + (Some(x), None) => Some(x), + (None, Some(y)) => Some(y), + (None, None) => None + }; + + (lower, upper) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Zip where + A: DoubleEndedIterator + ExactSizeIterator, + B: DoubleEndedIterator + ExactSizeIterator, +{ + #[inline] + fn next_back(&mut self) -> Option<(A::Item, B::Item)> { + let a_sz = self.a.len(); + let b_sz = self.b.len(); + if a_sz != b_sz { + // Adjust a, b to equal length + if a_sz > b_sz { + for _ in 0..a_sz - b_sz { self.a.next_back(); } + } else { + for _ in 0..b_sz - a_sz { self.b.next_back(); } + } + } + match (self.a.next_back(), self.b.next_back()) { + (Some(x), Some(y)) => Some((x, y)), + (None, None) => None, + _ => unreachable!(), + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Zip + where A: ExactSizeIterator, B: ExactSizeIterator {} + +/// An iterator that maps the values of `iter` with `f`. +/// +/// This `struct` is created by the [`map()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`map()`]: trait.Iterator.html#method.map +/// [`Iterator`]: trait.Iterator.html +/// +/// # Notes about side effects +/// +/// The [`map()`] iterator implements [`DoubleEndedIterator`], meaning that +/// you can also [`map()`] backwards: +/// +/// ```rust +/// let v: Vec = vec![1, 2, 3].into_iter().rev().map(|x| x + 1).collect(); +/// +/// assert_eq!(v, [4, 3, 2]); +/// ``` +/// +/// [`DoubleEndedIterator`]: trait.DoubleEndedIterator.html +/// +/// But if your closure has state, iterating backwards may act in a way you do +/// not expect. Let's go through an example. First, in the forward direction: +/// +/// ```rust +/// let mut c = 0; +/// +/// for pair in vec!['a', 'b', 'c'].into_iter() +/// .map(|letter| { c += 1; (letter, c) }) { +/// println!("{:?}", pair); +/// } +/// ``` +/// +/// This will print "('a', 1), ('b', 2), ('c', 3)". +/// +/// Now consider this twist where we add a call to `rev`. This version will +/// print `('c', 1), ('b', 2), ('a', 3)`. Note that the letters are reversed, +/// but the values of the counter still go in order. This is because `map()` is +/// still being called lazilly on each item, but we are popping items off the +/// back of the vector now, instead of shifting them from the front. +/// +/// ```rust +/// let mut c = 0; +/// +/// for pair in vec!['a', 'b', 'c'].into_iter() +/// .map(|letter| { c += 1; (letter, c) }) +/// .rev() { +/// println!("{:?}", pair); +/// } +/// ``` +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct Map { + iter: I, + f: F, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for Map { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Map") + .field("iter", &self.iter) + .finish() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Map where F: FnMut(I::Item) -> B { + type Item = B; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next().map(&mut self.f) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Map where + F: FnMut(I::Item) -> B, +{ + #[inline] + fn next_back(&mut self) -> Option { + self.iter.next_back().map(&mut self.f) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Map + where F: FnMut(I::Item) -> B {} + +/// An iterator that filters the elements of `iter` with `predicate`. +/// +/// This `struct` is created by the [`filter()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`filter()`]: trait.Iterator.html#method.filter +/// [`Iterator`]: trait.Iterator.html +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct Filter { + iter: I, + predicate: P, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for Filter { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Filter") + .field("iter", &self.iter) + .finish() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Filter where P: FnMut(&I::Item) -> bool { + type Item = I::Item; + + #[inline] + fn next(&mut self) -> Option { + for x in self.iter.by_ref() { + if (self.predicate)(&x) { + return Some(x); + } + } + None + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) // can't know a lower bound, due to the predicate + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Filter + where P: FnMut(&I::Item) -> bool, +{ + #[inline] + fn next_back(&mut self) -> Option { + for x in self.iter.by_ref().rev() { + if (self.predicate)(&x) { + return Some(x); + } + } + None + } +} + +/// An iterator that uses `f` to both filter and map elements from `iter`. +/// +/// This `struct` is created by the [`filter_map()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`filter_map()`]: trait.Iterator.html#method.filter_map +/// [`Iterator`]: trait.Iterator.html +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct FilterMap { + iter: I, + f: F, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for FilterMap { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("FilterMap") + .field("iter", &self.iter) + .finish() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for FilterMap + where F: FnMut(I::Item) -> Option, +{ + type Item = B; + + #[inline] + fn next(&mut self) -> Option { + for x in self.iter.by_ref() { + if let Some(y) = (self.f)(x) { + return Some(y); + } + } + None + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) // can't know a lower bound, due to the predicate + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for FilterMap + where F: FnMut(I::Item) -> Option, +{ + #[inline] + fn next_back(&mut self) -> Option { + for x in self.iter.by_ref().rev() { + if let Some(y) = (self.f)(x) { + return Some(y); + } + } + None + } +} + +/// An iterator that yields the current count and the element during iteration. +/// +/// This `struct` is created by the [`enumerate()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`enumerate()`]: trait.Iterator.html#method.enumerate +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Enumerate { + iter: I, + count: usize, +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Enumerate where I: Iterator { + type Item = (usize, ::Item); + + /// # Overflow Behavior + /// + /// The method does no guarding against overflows, so enumerating more than + /// `usize::MAX` elements either produces the wrong result or panics. If + /// debug assertions are enabled, a panic is guaranteed. + /// + /// # Panics + /// + /// Might panic if the index of the element overflows a `usize`. + #[inline] + fn next(&mut self) -> Option<(usize, ::Item)> { + self.iter.next().map(|a| { + let ret = (self.count, a); + // Possible undefined overflow. + self.count += 1; + ret + }) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } + + #[inline] + fn nth(&mut self, n: usize) -> Option<(usize, I::Item)> { + self.iter.nth(n).map(|a| { + let i = self.count + n; + self.count = i + 1; + (i, a) + }) + } + + #[inline] + fn count(self) -> usize { + self.iter.count() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Enumerate where + I: ExactSizeIterator + DoubleEndedIterator +{ + #[inline] + fn next_back(&mut self) -> Option<(usize, ::Item)> { + self.iter.next_back().map(|a| { + let len = self.iter.len(); + // Can safely add, `ExactSizeIterator` promises that the number of + // elements fits into a `usize`. + (self.count + len, a) + }) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Enumerate where I: ExactSizeIterator {} + +/// An iterator with a `peek()` that returns an optional reference to the next +/// element. +/// +/// This `struct` is created by the [`peekable()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`peekable()`]: trait.Iterator.html#method.peekable +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Peekable { + iter: I, + peeked: Option, +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Peekable { + type Item = I::Item; + + #[inline] + fn next(&mut self) -> Option { + match self.peeked { + Some(_) => self.peeked.take(), + None => self.iter.next(), + } + } + + #[inline] + fn count(self) -> usize { + (if self.peeked.is_some() { 1 } else { 0 }) + self.iter.count() + } + + #[inline] + fn nth(&mut self, n: usize) -> Option { + match self.peeked { + Some(_) if n == 0 => self.peeked.take(), + Some(_) => { + self.peeked = None; + self.iter.nth(n-1) + }, + None => self.iter.nth(n) + } + } + + #[inline] + fn last(self) -> Option { + self.iter.last().or(self.peeked) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (lo, hi) = self.iter.size_hint(); + if self.peeked.is_some() { + let lo = lo.saturating_add(1); + let hi = hi.and_then(|x| x.checked_add(1)); + (lo, hi) + } else { + (lo, hi) + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Peekable {} + +impl Peekable { + /// Returns a reference to the next() value without advancing the iterator. + /// + /// The `peek()` method will return the value that a call to [`next()`] would + /// return, but does not advance the iterator. Like [`next()`], if there is + /// a value, it's wrapped in a `Some(T)`, but if the iterator is over, it + /// will return `None`. + /// + /// [`next()`]: trait.Iterator.html#tymethod.next + /// + /// Because `peek()` returns reference, and many iterators iterate over + /// references, this leads to a possibly confusing situation where the + /// return value is a double reference. You can see this effect in the + /// examples below, with `&&i32`. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let xs = [1, 2, 3]; + /// + /// let mut iter = xs.iter().peekable(); + /// + /// // peek() lets us see into the future + /// assert_eq!(iter.peek(), Some(&&1)); + /// assert_eq!(iter.next(), Some(&1)); + /// + /// assert_eq!(iter.next(), Some(&2)); + /// + /// // we can peek() multiple times, the iterator won't advance + /// assert_eq!(iter.peek(), Some(&&3)); + /// assert_eq!(iter.peek(), Some(&&3)); + /// + /// assert_eq!(iter.next(), Some(&3)); + /// + /// // after the iterator is finished, so is peek() + /// assert_eq!(iter.peek(), None); + /// assert_eq!(iter.next(), None); + /// ``` + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + pub fn peek(&mut self) -> Option<&I::Item> { + if self.peeked.is_none() { + self.peeked = self.iter.next(); + } + match self.peeked { + Some(ref value) => Some(value), + None => None, + } + } + + /// Checks if the iterator has finished iterating. + /// + /// Returns `true` if there are no more elements in the iterator, and + /// `false` if there are. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// #![feature(peekable_is_empty)] + /// + /// let xs = [1, 2, 3]; + /// + /// let mut iter = xs.iter().peekable(); + /// + /// // there are still elements to iterate over + /// assert_eq!(iter.is_empty(), false); + /// + /// // let's consume the iterator + /// iter.next(); + /// iter.next(); + /// iter.next(); + /// + /// assert_eq!(iter.is_empty(), true); + /// ``` + #[unstable(feature = "peekable_is_empty", issue = "32111")] + #[inline] + #[rustc_deprecated(since = "1.10.0", reason = "replaced by .peek().is_none()")] + pub fn is_empty(&mut self) -> bool { + self.peek().is_none() + } +} + +/// An iterator that rejects elements while `predicate` is true. +/// +/// This `struct` is created by the [`skip_while()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`skip_while()`]: trait.Iterator.html#method.skip_while +/// [`Iterator`]: trait.Iterator.html +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct SkipWhile { + iter: I, + flag: bool, + predicate: P, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for SkipWhile { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("SkipWhile") + .field("iter", &self.iter) + .field("flag", &self.flag) + .finish() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for SkipWhile + where P: FnMut(&I::Item) -> bool +{ + type Item = I::Item; + + #[inline] + fn next(&mut self) -> Option { + for x in self.iter.by_ref() { + if self.flag || !(self.predicate)(&x) { + self.flag = true; + return Some(x); + } + } + None + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) // can't know a lower bound, due to the predicate + } +} + +/// An iterator that only accepts elements while `predicate` is true. +/// +/// This `struct` is created by the [`take_while()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`take_while()`]: trait.Iterator.html#method.take_while +/// [`Iterator`]: trait.Iterator.html +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct TakeWhile { + iter: I, + flag: bool, + predicate: P, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for TakeWhile { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("TakeWhile") + .field("iter", &self.iter) + .field("flag", &self.flag) + .finish() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for TakeWhile + where P: FnMut(&I::Item) -> bool +{ + type Item = I::Item; + + #[inline] + fn next(&mut self) -> Option { + if self.flag { + None + } else { + self.iter.next().and_then(|x| { + if (self.predicate)(&x) { + Some(x) + } else { + self.flag = true; + None + } + }) + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) // can't know a lower bound, due to the predicate + } +} + +/// An iterator that skips over `n` elements of `iter`. +/// +/// This `struct` is created by the [`skip()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`skip()`]: trait.Iterator.html#method.skip +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Skip { + iter: I, + n: usize +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Skip where I: Iterator { + type Item = ::Item; + + #[inline] + fn next(&mut self) -> Option { + if self.n == 0 { + self.iter.next() + } else { + let old_n = self.n; + self.n = 0; + self.iter.nth(old_n) + } + } + + #[inline] + fn nth(&mut self, n: usize) -> Option { + // Can't just add n + self.n due to overflow. + if self.n == 0 { + self.iter.nth(n) + } else { + let to_skip = self.n; + self.n = 0; + // nth(n) skips n+1 + if self.iter.nth(to_skip-1).is_none() { + return None; + } + self.iter.nth(n) + } + } + + #[inline] + fn count(self) -> usize { + self.iter.count().saturating_sub(self.n) + } + + #[inline] + fn last(mut self) -> Option { + if self.n == 0 { + self.iter.last() + } else { + let next = self.next(); + if next.is_some() { + // recurse. n should be 0. + self.last().or(next) + } else { + None + } + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (lower, upper) = self.iter.size_hint(); + + let lower = lower.saturating_sub(self.n); + let upper = upper.map(|x| x.saturating_sub(self.n)); + + (lower, upper) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Skip where I: ExactSizeIterator {} + +#[stable(feature = "double_ended_skip_iterator", since = "1.8.0")] +impl DoubleEndedIterator for Skip where I: DoubleEndedIterator + ExactSizeIterator { + fn next_back(&mut self) -> Option { + if self.len() > 0 { + self.iter.next_back() + } else { + None + } + } +} + +/// An iterator that only iterates over the first `n` iterations of `iter`. +/// +/// This `struct` is created by the [`take()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`take()`]: trait.Iterator.html#method.take +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Take { + iter: I, + n: usize +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Take where I: Iterator{ + type Item = ::Item; + + #[inline] + fn next(&mut self) -> Option<::Item> { + if self.n != 0 { + self.n -= 1; + self.iter.next() + } else { + None + } + } + + #[inline] + fn nth(&mut self, n: usize) -> Option { + if self.n > n { + self.n -= n + 1; + self.iter.nth(n) + } else { + if self.n > 0 { + self.iter.nth(self.n - 1); + self.n = 0; + } + None + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (lower, upper) = self.iter.size_hint(); + + let lower = cmp::min(lower, self.n); + + let upper = match upper { + Some(x) if x < self.n => Some(x), + _ => Some(self.n) + }; + + (lower, upper) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Take where I: ExactSizeIterator {} + + +/// An iterator to maintain state while iterating another iterator. +/// +/// This `struct` is created by the [`scan()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`scan()`]: trait.Iterator.html#method.scan +/// [`Iterator`]: trait.Iterator.html +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct Scan { + iter: I, + f: F, + state: St, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for Scan { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Scan") + .field("iter", &self.iter) + .field("state", &self.state) + .finish() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Scan where + I: Iterator, + F: FnMut(&mut St, I::Item) -> Option, +{ + type Item = B; + + #[inline] + fn next(&mut self) -> Option { + self.iter.next().and_then(|a| (self.f)(&mut self.state, a)) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (_, upper) = self.iter.size_hint(); + (0, upper) // can't know a lower bound, due to the scan function + } +} + +/// An iterator that maps each element to an iterator, and yields the elements +/// of the produced iterators. +/// +/// This `struct` is created by the [`flat_map()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`flat_map()`]: trait.Iterator.html#method.flat_map +/// [`Iterator`]: trait.Iterator.html +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct FlatMap { + iter: I, + f: F, + frontiter: Option, + backiter: Option, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for FlatMap + where U::IntoIter: fmt::Debug +{ + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("FlatMap") + .field("iter", &self.iter) + .field("frontiter", &self.frontiter) + .field("backiter", &self.backiter) + .finish() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for FlatMap + where F: FnMut(I::Item) -> U, +{ + type Item = U::Item; + + #[inline] + fn next(&mut self) -> Option { + loop { + if let Some(ref mut inner) = self.frontiter { + if let Some(x) = inner.by_ref().next() { + return Some(x) + } + } + match self.iter.next().map(&mut self.f) { + None => return self.backiter.as_mut().and_then(|it| it.next()), + next => self.frontiter = next.map(IntoIterator::into_iter), + } + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + let (flo, fhi) = self.frontiter.as_ref().map_or((0, Some(0)), |it| it.size_hint()); + let (blo, bhi) = self.backiter.as_ref().map_or((0, Some(0)), |it| it.size_hint()); + let lo = flo.saturating_add(blo); + match (self.iter.size_hint(), fhi, bhi) { + ((0, Some(0)), Some(a), Some(b)) => (lo, a.checked_add(b)), + _ => (lo, None) + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for FlatMap where + F: FnMut(I::Item) -> U, + U: IntoIterator, + U::IntoIter: DoubleEndedIterator +{ + #[inline] + fn next_back(&mut self) -> Option { + loop { + if let Some(ref mut inner) = self.backiter { + if let Some(y) = inner.next_back() { + return Some(y) + } + } + match self.iter.next_back().map(&mut self.f) { + None => return self.frontiter.as_mut().and_then(|it| it.next_back()), + next => self.backiter = next.map(IntoIterator::into_iter), + } + } + } +} + +/// An iterator that yields `None` forever after the underlying iterator +/// yields `None` once. +/// +/// This `struct` is created by the [`fuse()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`fuse()`]: trait.Iterator.html#method.fuse +/// [`Iterator`]: trait.Iterator.html +#[derive(Clone, Debug)] +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Fuse { + iter: I, + done: bool +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Fuse where I: Iterator { + type Item = ::Item; + + #[inline] + fn next(&mut self) -> Option<::Item> { + if self.done { + None + } else { + let next = self.iter.next(); + self.done = next.is_none(); + next + } + } + + #[inline] + fn nth(&mut self, n: usize) -> Option { + if self.done { + None + } else { + let nth = self.iter.nth(n); + self.done = nth.is_none(); + nth + } + } + + #[inline] + fn last(self) -> Option { + if self.done { + None + } else { + self.iter.last() + } + } + + #[inline] + fn count(self) -> usize { + if self.done { + 0 + } else { + self.iter.count() + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + if self.done { + (0, Some(0)) + } else { + self.iter.size_hint() + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Fuse where I: DoubleEndedIterator { + #[inline] + fn next_back(&mut self) -> Option<::Item> { + if self.done { + None + } else { + let next = self.iter.next_back(); + self.done = next.is_none(); + next + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Fuse where I: ExactSizeIterator {} + +/// An iterator that calls a function with a reference to each element before +/// yielding it. +/// +/// This `struct` is created by the [`inspect()`] method on [`Iterator`]. See its +/// documentation for more. +/// +/// [`inspect()`]: trait.Iterator.html#method.inspect +/// [`Iterator`]: trait.Iterator.html +#[must_use = "iterator adaptors are lazy and do nothing unless consumed"] +#[stable(feature = "rust1", since = "1.0.0")] +#[derive(Clone)] +pub struct Inspect { + iter: I, + f: F, +} + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for Inspect { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_struct("Inspect") + .field("iter", &self.iter) + .finish() + } +} + +impl Inspect where F: FnMut(&I::Item) { + #[inline] + fn do_inspect(&mut self, elt: Option) -> Option { + if let Some(ref a) = elt { + (self.f)(a); + } + + elt + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Inspect where F: FnMut(&I::Item) { + type Item = I::Item; + + #[inline] + fn next(&mut self) -> Option { + let next = self.iter.next(); + self.do_inspect(next) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + self.iter.size_hint() + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Inspect + where F: FnMut(&I::Item), +{ + #[inline] + fn next_back(&mut self) -> Option { + let next = self.iter.next_back(); + self.do_inspect(next) + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExactSizeIterator for Inspect + where F: FnMut(&I::Item) {} diff --git a/src/libcore/iter/range.rs b/src/libcore/iter/range.rs new file mode 100644 index 0000000000..08143567be --- /dev/null +++ b/src/libcore/iter/range.rs @@ -0,0 +1,548 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use clone::Clone; +use cmp::PartialOrd; +use mem; +use num::{Zero, One}; +use ops::{self, Add, Sub}; +use option::Option::{self, Some, None}; +use marker::Sized; +use usize; + +use super::{DoubleEndedIterator, ExactSizeIterator, Iterator}; + +/// Objects that can be stepped over in both directions. +/// +/// The `steps_between` function provides a way to efficiently compare +/// two `Step` objects. +#[unstable(feature = "step_trait", + reason = "likely to be replaced by finer-grained traits", + issue = "27741")] +pub trait Step: PartialOrd + Sized { + /// Steps `self` if possible. + fn step(&self, by: &Self) -> Option; + + /// Returns the number of steps between two step objects. The count is + /// inclusive of `start` and exclusive of `end`. + /// + /// Returns `None` if it is not possible to calculate `steps_between` + /// without overflow. + fn steps_between(start: &Self, end: &Self, by: &Self) -> Option; +} + +macro_rules! step_impl_unsigned { + ($($t:ty)*) => ($( + #[unstable(feature = "step_trait", + reason = "likely to be replaced by finer-grained traits", + issue = "27741")] + impl Step for $t { + #[inline] + fn step(&self, by: &$t) -> Option<$t> { + (*self).checked_add(*by) + } + #[inline] + #[allow(trivial_numeric_casts)] + fn steps_between(start: &$t, end: &$t, by: &$t) -> Option { + if *by == 0 { return None; } + if *start < *end { + // Note: We assume $t <= usize here + let diff = (*end - *start) as usize; + let by = *by as usize; + if diff % by > 0 { + Some(diff / by + 1) + } else { + Some(diff / by) + } + } else { + Some(0) + } + } + } + )*) +} +macro_rules! step_impl_signed { + ($($t:ty)*) => ($( + #[unstable(feature = "step_trait", + reason = "likely to be replaced by finer-grained traits", + issue = "27741")] + impl Step for $t { + #[inline] + fn step(&self, by: &$t) -> Option<$t> { + (*self).checked_add(*by) + } + #[inline] + #[allow(trivial_numeric_casts)] + fn steps_between(start: &$t, end: &$t, by: &$t) -> Option { + if *by == 0 { return None; } + let diff: usize; + let by_u: usize; + if *by > 0 { + if *start >= *end { + return Some(0); + } + // Note: We assume $t <= isize here + // Use .wrapping_sub and cast to usize to compute the + // difference that may not fit inside the range of isize. + diff = (*end as isize).wrapping_sub(*start as isize) as usize; + by_u = *by as usize; + } else { + if *start <= *end { + return Some(0); + } + diff = (*start as isize).wrapping_sub(*end as isize) as usize; + by_u = (*by as isize).wrapping_mul(-1) as usize; + } + if diff % by_u > 0 { + Some(diff / by_u + 1) + } else { + Some(diff / by_u) + } + } + } + )*) +} + +macro_rules! step_impl_no_between { + ($($t:ty)*) => ($( + #[unstable(feature = "step_trait", + reason = "likely to be replaced by finer-grained traits", + issue = "27741")] + impl Step for $t { + #[inline] + fn step(&self, by: &$t) -> Option<$t> { + (*self).checked_add(*by) + } + #[inline] + fn steps_between(_a: &$t, _b: &$t, _by: &$t) -> Option { + None + } + } + )*) +} + +step_impl_unsigned!(usize u8 u16 u32); +step_impl_signed!(isize i8 i16 i32); +#[cfg(target_pointer_width = "64")] +step_impl_unsigned!(u64); +#[cfg(target_pointer_width = "64")] +step_impl_signed!(i64); +// If the target pointer width is not 64-bits, we +// assume here that it is less than 64-bits. +#[cfg(not(target_pointer_width = "64"))] +step_impl_no_between!(u64 i64); + +/// An adapter for stepping range iterators by a custom amount. +/// +/// The resulting iterator handles overflow by stopping. The `A` +/// parameter is the type being iterated over, while `R` is the range +/// type (usually one of `std::ops::{Range, RangeFrom, RangeInclusive}`. +#[derive(Clone, Debug)] +#[unstable(feature = "step_by", reason = "recent addition", + issue = "27741")] +pub struct StepBy { + step_by: A, + range: R, +} + +impl ops::RangeFrom { + /// Creates an iterator starting at the same point, but stepping by + /// the given amount at each iteration. + /// + /// # Examples + /// + /// ``` + /// # #![feature(step_by)] + /// + /// for i in (0u8..).step_by(2).take(10) { + /// println!("{}", i); + /// } + /// ``` + /// + /// This prints the first ten even natural integers (0 to 18). + #[unstable(feature = "step_by", reason = "recent addition", + issue = "27741")] + pub fn step_by(self, by: A) -> StepBy { + StepBy { + step_by: by, + range: self + } + } +} + +impl ops::Range { + /// Creates an iterator with the same range, but stepping by the + /// given amount at each iteration. + /// + /// The resulting iterator handles overflow by stopping. + /// + /// # Examples + /// + /// ``` + /// #![feature(step_by)] + /// + /// for i in (0..10).step_by(2) { + /// println!("{}", i); + /// } + /// ``` + /// + /// This prints: + /// + /// ```text + /// 0 + /// 2 + /// 4 + /// 6 + /// 8 + /// ``` + #[unstable(feature = "step_by", reason = "recent addition", + issue = "27741")] + pub fn step_by(self, by: A) -> StepBy { + StepBy { + step_by: by, + range: self + } + } +} + +impl ops::RangeInclusive { + /// Creates an iterator with the same range, but stepping by the + /// given amount at each iteration. + /// + /// The resulting iterator handles overflow by stopping. + /// + /// # Examples + /// + /// ``` + /// #![feature(step_by, inclusive_range_syntax)] + /// + /// for i in (0...10).step_by(2) { + /// println!("{}", i); + /// } + /// ``` + /// + /// This prints: + /// + /// ```text + /// 0 + /// 2 + /// 4 + /// 6 + /// 8 + /// 10 + /// ``` + #[unstable(feature = "step_by", reason = "recent addition", + issue = "27741")] + pub fn step_by(self, by: A) -> StepBy { + StepBy { + step_by: by, + range: self + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for StepBy> where + A: Clone, + for<'a> &'a A: Add<&'a A, Output = A> +{ + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + let mut n = &self.range.start + &self.step_by; + mem::swap(&mut n, &mut self.range.start); + Some(n) + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + (usize::MAX, None) // Too bad we can't specify an infinite lower bound + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for StepBy> { + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + let rev = self.step_by < A::zero(); + if (rev && self.range.start > self.range.end) || + (!rev && self.range.start < self.range.end) + { + match self.range.start.step(&self.step_by) { + Some(mut n) => { + mem::swap(&mut self.range.start, &mut n); + Some(n) + }, + None => { + let mut n = self.range.end.clone(); + mem::swap(&mut self.range.start, &mut n); + Some(n) + } + } + } else { + None + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + match Step::steps_between(&self.range.start, + &self.range.end, + &self.step_by) { + Some(hint) => (hint, Some(hint)), + None => (0, None) + } + } +} + +#[unstable(feature = "inclusive_range", + reason = "recently added, follows RFC", + issue = "28237")] +impl Iterator for StepBy> { + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + use ops::RangeInclusive::*; + + // this function has a sort of odd structure due to borrowck issues + // we may need to replace self.range, so borrows of start and end need to end early + + let (finishing, n) = match self.range { + Empty { .. } => return None, // empty iterators yield no values + + NonEmpty { ref mut start, ref mut end } => { + let zero = A::zero(); + let rev = self.step_by < zero; + + // march start towards (maybe past!) end and yield the old value + if (rev && start >= end) || + (!rev && start <= end) + { + match start.step(&self.step_by) { + Some(mut n) => { + mem::swap(start, &mut n); + (None, Some(n)) // yield old value, remain non-empty + }, + None => { + let mut n = end.clone(); + mem::swap(start, &mut n); + (None, Some(n)) // yield old value, remain non-empty + } + } + } else { + // found range in inconsistent state (start at or past end), so become empty + (Some(mem::replace(end, zero)), None) + } + } + }; + + // turn into an empty iterator if we've reached the end + if let Some(end) = finishing { + self.range = Empty { at: end }; + } + + n + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + use ops::RangeInclusive::*; + + match self.range { + Empty { .. } => (0, Some(0)), + + NonEmpty { ref start, ref end } => + match Step::steps_between(start, + end, + &self.step_by) { + Some(hint) => (hint.saturating_add(1), hint.checked_add(1)), + None => (0, None) + } + } + } +} + +macro_rules! range_exact_iter_impl { + ($($t:ty)*) => ($( + #[stable(feature = "rust1", since = "1.0.0")] + impl ExactSizeIterator for ops::Range<$t> { } + + #[unstable(feature = "inclusive_range", + reason = "recently added, follows RFC", + issue = "28237")] + impl ExactSizeIterator for ops::RangeInclusive<$t> { } + )*) +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for ops::Range where + for<'a> &'a A: Add<&'a A, Output = A> +{ + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + if self.start < self.end { + let mut n = &self.start + &A::one(); + mem::swap(&mut n, &mut self.start); + Some(n) + } else { + None + } + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + match Step::steps_between(&self.start, &self.end, &A::one()) { + Some(hint) => (hint, Some(hint)), + None => (0, None) + } + } +} + +// Ranges of u64 and i64 are excluded because they cannot guarantee having +// a length <= usize::MAX, which is required by ExactSizeIterator. +range_exact_iter_impl!(usize u8 u16 u32 isize i8 i16 i32); + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for ops::Range where + for<'a> &'a A: Add<&'a A, Output = A>, + for<'a> &'a A: Sub<&'a A, Output = A> +{ + #[inline] + fn next_back(&mut self) -> Option { + if self.start < self.end { + self.end = &self.end - &A::one(); + Some(self.end.clone()) + } else { + None + } + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for ops::RangeFrom where + for<'a> &'a A: Add<&'a A, Output = A> +{ + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + let mut n = &self.start + &A::one(); + mem::swap(&mut n, &mut self.start); + Some(n) + } +} + +#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +impl Iterator for ops::RangeInclusive where + for<'a> &'a A: Add<&'a A, Output = A> +{ + type Item = A; + + #[inline] + fn next(&mut self) -> Option { + use ops::RangeInclusive::*; + + // this function has a sort of odd structure due to borrowck issues + // we may need to replace self, so borrows of self.start and self.end need to end early + + let (finishing, n) = match *self { + Empty { .. } => (None, None), // empty iterators yield no values + + NonEmpty { ref mut start, ref mut end } => { + if start == end { + (Some(mem::replace(end, A::one())), Some(mem::replace(start, A::one()))) + } else if start < end { + let one = A::one(); + let mut n = &*start + &one; + mem::swap(&mut n, start); + + // if the iterator is done iterating, it will change from NonEmpty to Empty + // to avoid unnecessary drops or clones, we'll reuse either start or end + // (they are equal now, so it doesn't matter which) + // to pull out end, we need to swap something back in -- use the previously + // created A::one() as a dummy value + + (if n == *end { Some(mem::replace(end, one)) } else { None }, + // ^ are we done yet? + Some(n)) // < the value to output + } else { + (Some(mem::replace(start, A::one())), None) + } + } + }; + + // turn into an empty iterator if this is the last value + if let Some(end) = finishing { + *self = Empty { at: end }; + } + + n + } + + #[inline] + fn size_hint(&self) -> (usize, Option) { + use ops::RangeInclusive::*; + + match *self { + Empty { .. } => (0, Some(0)), + + NonEmpty { ref start, ref end } => + match Step::steps_between(start, end, &A::one()) { + Some(hint) => (hint.saturating_add(1), hint.checked_add(1)), + None => (0, None), + } + } + } +} + +#[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +impl DoubleEndedIterator for ops::RangeInclusive where + for<'a> &'a A: Add<&'a A, Output = A>, + for<'a> &'a A: Sub<&'a A, Output = A> +{ + #[inline] + fn next_back(&mut self) -> Option { + use ops::RangeInclusive::*; + + // see Iterator::next for comments + + let (finishing, n) = match *self { + Empty { .. } => return None, + + NonEmpty { ref mut start, ref mut end } => { + if start == end { + (Some(mem::replace(start, A::one())), Some(mem::replace(end, A::one()))) + } else if start < end { + let one = A::one(); + let mut n = &*end - &one; + mem::swap(&mut n, end); + + (if n == *start { Some(mem::replace(start, one)) } else { None }, + Some(n)) + } else { + (Some(mem::replace(end, A::one())), None) + } + } + }; + + if let Some(start) = finishing { + *self = Empty { at: start }; + } + + n + } +} + diff --git a/src/libcore/iter/sources.rs b/src/libcore/iter/sources.rs new file mode 100644 index 0000000000..ecd4a78b9e --- /dev/null +++ b/src/libcore/iter/sources.rs @@ -0,0 +1,270 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use clone::Clone; +use default::Default; +use fmt; +use marker; +use option::Option::{self, Some, None}; +use usize; + +use super::{DoubleEndedIterator, IntoIterator, Iterator, ExactSizeIterator}; + +/// An iterator that repeats an element endlessly. +/// +/// This `struct` is created by the [`repeat()`] function. See its documentation for more. +/// +/// [`repeat()`]: fn.repeat.html +#[derive(Clone, Debug)] +#[stable(feature = "rust1", since = "1.0.0")] +pub struct Repeat { + element: A +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl Iterator for Repeat { + type Item = A; + + #[inline] + fn next(&mut self) -> Option { Some(self.element.clone()) } + #[inline] + fn size_hint(&self) -> (usize, Option) { (usize::MAX, None) } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl DoubleEndedIterator for Repeat { + #[inline] + fn next_back(&mut self) -> Option { Some(self.element.clone()) } +} + +/// Creates a new iterator that endlessly repeats a single element. +/// +/// The `repeat()` function repeats a single value over and over and over and +/// over and over and 🔁. +/// +/// Infinite iterators like `repeat()` are often used with adapters like +/// [`take()`], in order to make them finite. +/// +/// [`take()`]: trait.Iterator.html#method.take +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use std::iter; +/// +/// // the number four 4ever: +/// let mut fours = iter::repeat(4); +/// +/// assert_eq!(Some(4), fours.next()); +/// assert_eq!(Some(4), fours.next()); +/// assert_eq!(Some(4), fours.next()); +/// assert_eq!(Some(4), fours.next()); +/// assert_eq!(Some(4), fours.next()); +/// +/// // yup, still four +/// assert_eq!(Some(4), fours.next()); +/// ``` +/// +/// Going finite with [`take()`]: +/// +/// ``` +/// use std::iter; +/// +/// // that last example was too many fours. Let's only have four fours. +/// let mut four_fours = iter::repeat(4).take(4); +/// +/// assert_eq!(Some(4), four_fours.next()); +/// assert_eq!(Some(4), four_fours.next()); +/// assert_eq!(Some(4), four_fours.next()); +/// assert_eq!(Some(4), four_fours.next()); +/// +/// // ... and now we're done +/// assert_eq!(None, four_fours.next()); +/// ``` +#[inline] +#[stable(feature = "rust1", since = "1.0.0")] +pub fn repeat(elt: T) -> Repeat { + Repeat{element: elt} +} + +/// An iterator that yields nothing. +/// +/// This `struct` is created by the [`empty()`] function. See its documentation for more. +/// +/// [`empty()`]: fn.empty.html +#[stable(feature = "iter_empty", since = "1.2.0")] +pub struct Empty(marker::PhantomData); + +#[stable(feature = "core_impl_debug", since = "1.9.0")] +impl fmt::Debug for Empty { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.pad("Empty") + } +} + +#[stable(feature = "iter_empty", since = "1.2.0")] +impl Iterator for Empty { + type Item = T; + + fn next(&mut self) -> Option { + None + } + + fn size_hint(&self) -> (usize, Option){ + (0, Some(0)) + } +} + +#[stable(feature = "iter_empty", since = "1.2.0")] +impl DoubleEndedIterator for Empty { + fn next_back(&mut self) -> Option { + None + } +} + +#[stable(feature = "iter_empty", since = "1.2.0")] +impl ExactSizeIterator for Empty { + fn len(&self) -> usize { + 0 + } +} + +// not #[derive] because that adds a Clone bound on T, +// which isn't necessary. +#[stable(feature = "iter_empty", since = "1.2.0")] +impl Clone for Empty { + fn clone(&self) -> Empty { + Empty(marker::PhantomData) + } +} + +// not #[derive] because that adds a Default bound on T, +// which isn't necessary. +#[stable(feature = "iter_empty", since = "1.2.0")] +impl Default for Empty { + fn default() -> Empty { + Empty(marker::PhantomData) + } +} + +/// Creates an iterator that yields nothing. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use std::iter; +/// +/// // this could have been an iterator over i32, but alas, it's just not. +/// let mut nope = iter::empty::(); +/// +/// assert_eq!(None, nope.next()); +/// ``` +#[stable(feature = "iter_empty", since = "1.2.0")] +pub fn empty() -> Empty { + Empty(marker::PhantomData) +} + +/// An iterator that yields an element exactly once. +/// +/// This `struct` is created by the [`once()`] function. See its documentation for more. +/// +/// [`once()`]: fn.once.html +#[derive(Clone, Debug)] +#[stable(feature = "iter_once", since = "1.2.0")] +pub struct Once { + inner: ::option::IntoIter +} + +#[stable(feature = "iter_once", since = "1.2.0")] +impl Iterator for Once { + type Item = T; + + fn next(&mut self) -> Option { + self.inner.next() + } + + fn size_hint(&self) -> (usize, Option) { + self.inner.size_hint() + } +} + +#[stable(feature = "iter_once", since = "1.2.0")] +impl DoubleEndedIterator for Once { + fn next_back(&mut self) -> Option { + self.inner.next_back() + } +} + +#[stable(feature = "iter_once", since = "1.2.0")] +impl ExactSizeIterator for Once { + fn len(&self) -> usize { + self.inner.len() + } +} + +/// Creates an iterator that yields an element exactly once. +/// +/// This is commonly used to adapt a single value into a [`chain()`] of other +/// kinds of iteration. Maybe you have an iterator that covers almost +/// everything, but you need an extra special case. Maybe you have a function +/// which works on iterators, but you only need to process one value. +/// +/// [`chain()`]: trait.Iterator.html#method.chain +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use std::iter; +/// +/// // one is the loneliest number +/// let mut one = iter::once(1); +/// +/// assert_eq!(Some(1), one.next()); +/// +/// // just one, that's all we get +/// assert_eq!(None, one.next()); +/// ``` +/// +/// Chaining together with another iterator. Let's say that we want to iterate +/// over each file of the `.foo` directory, but also a configuration file, +/// `.foorc`: +/// +/// ```no_run +/// use std::iter; +/// use std::fs; +/// use std::path::PathBuf; +/// +/// let dirs = fs::read_dir(".foo").unwrap(); +/// +/// // we need to convert from an iterator of DirEntry-s to an iterator of +/// // PathBufs, so we use map +/// let dirs = dirs.map(|file| file.unwrap().path()); +/// +/// // now, our iterator just for our config file +/// let config = iter::once(PathBuf::from(".foorc")); +/// +/// // chain the two iterators together into one big iterator +/// let files = dirs.chain(config); +/// +/// // this will give us all of the files in .foo as well as .foorc +/// for f in files { +/// println!("{:?}", f); +/// } +/// ``` +#[stable(feature = "iter_once", since = "1.2.0")] +pub fn once(value: T) -> Once { + Once { inner: Some(value).into_iter() } +} diff --git a/src/libcore/iter/traits.rs b/src/libcore/iter/traits.rs new file mode 100644 index 0000000000..6750398445 --- /dev/null +++ b/src/libcore/iter/traits.rs @@ -0,0 +1,526 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use option::Option::{self, Some}; +use marker::Sized; + +use super::Iterator; + +/// Conversion from an `Iterator`. +/// +/// By implementing `FromIterator` for a type, you define how it will be +/// created from an iterator. This is common for types which describe a +/// collection of some kind. +/// +/// `FromIterator`'s [`from_iter()`] is rarely called explicitly, and is instead +/// used through [`Iterator`]'s [`collect()`] method. See [`collect()`]'s +/// documentation for more examples. +/// +/// [`from_iter()`]: #tymethod.from_iter +/// [`Iterator`]: trait.Iterator.html +/// [`collect()`]: trait.Iterator.html#method.collect +/// +/// See also: [`IntoIterator`]. +/// +/// [`IntoIterator`]: trait.IntoIterator.html +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// use std::iter::FromIterator; +/// +/// let five_fives = std::iter::repeat(5).take(5); +/// +/// let v = Vec::from_iter(five_fives); +/// +/// assert_eq!(v, vec![5, 5, 5, 5, 5]); +/// ``` +/// +/// Using [`collect()`] to implicitly use `FromIterator`: +/// +/// ``` +/// let five_fives = std::iter::repeat(5).take(5); +/// +/// let v: Vec = five_fives.collect(); +/// +/// assert_eq!(v, vec![5, 5, 5, 5, 5]); +/// ``` +/// +/// Implementing `FromIterator` for your type: +/// +/// ``` +/// use std::iter::FromIterator; +/// +/// // A sample collection, that's just a wrapper over Vec +/// #[derive(Debug)] +/// struct MyCollection(Vec); +/// +/// // Let's give it some methods so we can create one and add things +/// // to it. +/// impl MyCollection { +/// fn new() -> MyCollection { +/// MyCollection(Vec::new()) +/// } +/// +/// fn add(&mut self, elem: i32) { +/// self.0.push(elem); +/// } +/// } +/// +/// // and we'll implement FromIterator +/// impl FromIterator for MyCollection { +/// fn from_iter>(iter: I) -> Self { +/// let mut c = MyCollection::new(); +/// +/// for i in iter { +/// c.add(i); +/// } +/// +/// c +/// } +/// } +/// +/// // Now we can make a new iterator... +/// let iter = (0..5).into_iter(); +/// +/// // ... and make a MyCollection out of it +/// let c = MyCollection::from_iter(iter); +/// +/// assert_eq!(c.0, vec![0, 1, 2, 3, 4]); +/// +/// // collect works too! +/// +/// let iter = (0..5).into_iter(); +/// let c: MyCollection = iter.collect(); +/// +/// assert_eq!(c.0, vec![0, 1, 2, 3, 4]); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented="a collection of type `{Self}` cannot be \ + built from an iterator over elements of type `{A}`"] +pub trait FromIterator: Sized { + /// Creates a value from an iterator. + /// + /// See the [module-level documentation] for more. + /// + /// [module-level documentation]: trait.FromIterator.html + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// use std::iter::FromIterator; + /// + /// let five_fives = std::iter::repeat(5).take(5); + /// + /// let v = Vec::from_iter(five_fives); + /// + /// assert_eq!(v, vec![5, 5, 5, 5, 5]); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn from_iter>(iter: T) -> Self; +} + +/// Conversion into an `Iterator`. +/// +/// By implementing `IntoIterator` for a type, you define how it will be +/// converted to an iterator. This is common for types which describe a +/// collection of some kind. +/// +/// One benefit of implementing `IntoIterator` is that your type will [work +/// with Rust's `for` loop syntax](index.html#for-loops-and-intoiterator). +/// +/// See also: [`FromIterator`]. +/// +/// [`FromIterator`]: trait.FromIterator.html +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// let v = vec![1, 2, 3]; +/// +/// let mut iter = v.into_iter(); +/// +/// let n = iter.next(); +/// assert_eq!(Some(1), n); +/// +/// let n = iter.next(); +/// assert_eq!(Some(2), n); +/// +/// let n = iter.next(); +/// assert_eq!(Some(3), n); +/// +/// let n = iter.next(); +/// assert_eq!(None, n); +/// ``` +/// +/// Implementing `IntoIterator` for your type: +/// +/// ``` +/// // A sample collection, that's just a wrapper over Vec +/// #[derive(Debug)] +/// struct MyCollection(Vec); +/// +/// // Let's give it some methods so we can create one and add things +/// // to it. +/// impl MyCollection { +/// fn new() -> MyCollection { +/// MyCollection(Vec::new()) +/// } +/// +/// fn add(&mut self, elem: i32) { +/// self.0.push(elem); +/// } +/// } +/// +/// // and we'll implement IntoIterator +/// impl IntoIterator for MyCollection { +/// type Item = i32; +/// type IntoIter = ::std::vec::IntoIter; +/// +/// fn into_iter(self) -> Self::IntoIter { +/// self.0.into_iter() +/// } +/// } +/// +/// // Now we can make a new collection... +/// let mut c = MyCollection::new(); +/// +/// // ... add some stuff to it ... +/// c.add(0); +/// c.add(1); +/// c.add(2); +/// +/// // ... and then turn it into an Iterator: +/// for (i, n) in c.into_iter().enumerate() { +/// assert_eq!(i as i32, n); +/// } +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +pub trait IntoIterator { + /// The type of the elements being iterated over. + #[stable(feature = "rust1", since = "1.0.0")] + type Item; + + /// Which kind of iterator are we turning this into? + #[stable(feature = "rust1", since = "1.0.0")] + type IntoIter: Iterator; + + /// Creates an iterator from a value. + /// + /// See the [module-level documentation] for more. + /// + /// [module-level documentation]: trait.IntoIterator.html + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let v = vec![1, 2, 3]; + /// + /// let mut iter = v.into_iter(); + /// + /// let n = iter.next(); + /// assert_eq!(Some(1), n); + /// + /// let n = iter.next(); + /// assert_eq!(Some(2), n); + /// + /// let n = iter.next(); + /// assert_eq!(Some(3), n); + /// + /// let n = iter.next(); + /// assert_eq!(None, n); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn into_iter(self) -> Self::IntoIter; +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl IntoIterator for I { + type Item = I::Item; + type IntoIter = I; + + fn into_iter(self) -> I { + self + } +} + +/// Extend a collection with the contents of an iterator. +/// +/// Iterators produce a series of values, and collections can also be thought +/// of as a series of values. The `Extend` trait bridges this gap, allowing you +/// to extend a collection by including the contents of that iterator. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// // You can extend a String with some chars: +/// let mut message = String::from("The first three letters are: "); +/// +/// message.extend(&['a', 'b', 'c']); +/// +/// assert_eq!("abc", &message[29..32]); +/// ``` +/// +/// Implementing `Extend`: +/// +/// ``` +/// // A sample collection, that's just a wrapper over Vec +/// #[derive(Debug)] +/// struct MyCollection(Vec); +/// +/// // Let's give it some methods so we can create one and add things +/// // to it. +/// impl MyCollection { +/// fn new() -> MyCollection { +/// MyCollection(Vec::new()) +/// } +/// +/// fn add(&mut self, elem: i32) { +/// self.0.push(elem); +/// } +/// } +/// +/// // since MyCollection has a list of i32s, we implement Extend for i32 +/// impl Extend for MyCollection { +/// +/// // This is a bit simpler with the concrete type signature: we can call +/// // extend on anything which can be turned into an Iterator which gives +/// // us i32s. Because we need i32s to put into MyCollection. +/// fn extend>(&mut self, iter: T) { +/// +/// // The implementation is very straightforward: loop through the +/// // iterator, and add() each element to ourselves. +/// for elem in iter { +/// self.add(elem); +/// } +/// } +/// } +/// +/// let mut c = MyCollection::new(); +/// +/// c.add(5); +/// c.add(6); +/// c.add(7); +/// +/// // let's extend our collection with three more numbers +/// c.extend(vec![1, 2, 3]); +/// +/// // we've added these elements onto the end +/// assert_eq!("MyCollection([5, 6, 7, 1, 2, 3])", format!("{:?}", c)); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +pub trait Extend { + /// Extends a collection with the contents of an iterator. + /// + /// As this is the only method for this trait, the [trait-level] docs + /// contain more details. + /// + /// [trait-level]: trait.Extend.html + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// // You can extend a String with some chars: + /// let mut message = String::from("abc"); + /// + /// message.extend(['d', 'e', 'f'].iter()); + /// + /// assert_eq!("abcdef", &message); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn extend>(&mut self, iter: T); +} + +/// An iterator able to yield elements from both ends. +/// +/// Something that implements `DoubleEndedIterator` has one extra capability +/// over something that implements [`Iterator`]: the ability to also take +/// `Item`s from the back, as well as the front. +/// +/// It is important to note that both back and forth work on the same range, +/// and do not cross: iteration is over when they meet in the middle. +/// +/// In a similar fashion to the [`Iterator`] protocol, once a +/// `DoubleEndedIterator` returns `None` from a `next_back()`, calling it again +/// may or may not ever return `Some` again. `next()` and `next_back()` are +/// interchangable for this purpose. +/// +/// [`Iterator`]: trait.Iterator.html +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// let numbers = vec![1, 2, 3]; +/// +/// let mut iter = numbers.iter(); +/// +/// assert_eq!(Some(&1), iter.next()); +/// assert_eq!(Some(&3), iter.next_back()); +/// assert_eq!(Some(&2), iter.next_back()); +/// assert_eq!(None, iter.next()); +/// assert_eq!(None, iter.next_back()); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +pub trait DoubleEndedIterator: Iterator { + /// An iterator able to yield elements from both ends. + /// + /// As this is the only method for this trait, the [trait-level] docs + /// contain more details. + /// + /// [trait-level]: trait.DoubleEndedIterator.html + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// let numbers = vec![1, 2, 3]; + /// + /// let mut iter = numbers.iter(); + /// + /// assert_eq!(Some(&1), iter.next()); + /// assert_eq!(Some(&3), iter.next_back()); + /// assert_eq!(Some(&2), iter.next_back()); + /// assert_eq!(None, iter.next()); + /// assert_eq!(None, iter.next_back()); + /// ``` + #[stable(feature = "rust1", since = "1.0.0")] + fn next_back(&mut self) -> Option; +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, I: DoubleEndedIterator + ?Sized> DoubleEndedIterator for &'a mut I { + fn next_back(&mut self) -> Option { (**self).next_back() } +} + +/// An iterator that knows its exact length. +/// +/// Many [`Iterator`]s don't know how many times they will iterate, but some do. +/// If an iterator knows how many times it can iterate, providing access to +/// that information can be useful. For example, if you want to iterate +/// backwards, a good start is to know where the end is. +/// +/// When implementing an `ExactSizeIterator`, You must also implement +/// [`Iterator`]. When doing so, the implementation of [`size_hint()`] *must* +/// return the exact size of the iterator. +/// +/// [`Iterator`]: trait.Iterator.html +/// [`size_hint()`]: trait.Iterator.html#method.size_hint +/// +/// The [`len()`] method has a default implementation, so you usually shouldn't +/// implement it. However, you may be able to provide a more performant +/// implementation than the default, so overriding it in this case makes sense. +/// +/// [`len()`]: #method.len +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// // a finite range knows exactly how many times it will iterate +/// let five = 0..5; +/// +/// assert_eq!(5, five.len()); +/// ``` +/// +/// In the [module level docs][moddocs], we implemented an [`Iterator`], +/// `Counter`. Let's implement `ExactSizeIterator` for it as well: +/// +/// [moddocs]: index.html +/// +/// ``` +/// # struct Counter { +/// # count: usize, +/// # } +/// # impl Counter { +/// # fn new() -> Counter { +/// # Counter { count: 0 } +/// # } +/// # } +/// # impl Iterator for Counter { +/// # type Item = usize; +/// # fn next(&mut self) -> Option { +/// # self.count += 1; +/// # if self.count < 6 { +/// # Some(self.count) +/// # } else { +/// # None +/// # } +/// # } +/// # } +/// impl ExactSizeIterator for Counter { +/// // We already have the number of iterations, so we can use it directly. +/// fn len(&self) -> usize { +/// self.count +/// } +/// } +/// +/// // And now we can use it! +/// +/// let counter = Counter::new(); +/// +/// assert_eq!(0, counter.len()); +/// ``` +#[stable(feature = "rust1", since = "1.0.0")] +pub trait ExactSizeIterator: Iterator { + #[inline] + #[stable(feature = "rust1", since = "1.0.0")] + /// Returns the exact number of times the iterator will iterate. + /// + /// This method has a default implementation, so you usually should not + /// implement it directly. However, if you can provide a more efficient + /// implementation, you can do so. See the [trait-level] docs for an + /// example. + /// + /// This function has the same safety guarantees as the [`size_hint()`] + /// function. + /// + /// [trait-level]: trait.ExactSizeIterator.html + /// [`size_hint()`]: trait.Iterator.html#method.size_hint + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// // a finite range knows exactly how many times it will iterate + /// let five = 0..5; + /// + /// assert_eq!(5, five.len()); + /// ``` + fn len(&self) -> usize { + let (lower, upper) = self.size_hint(); + // Note: This assertion is overly defensive, but it checks the invariant + // guaranteed by the trait. If this trait were rust-internal, + // we could use debug_assert!; assert_eq! will check all Rust user + // implementations too. + assert_eq!(upper, Some(lower)); + lower + } +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl<'a, I: ExactSizeIterator + ?Sized> ExactSizeIterator for &'a mut I {} + diff --git a/src/libcore/lib.rs b/src/libcore/lib.rs index fa5e90562d..a054e41b20 100644 --- a/src/libcore/lib.rs +++ b/src/libcore/lib.rs @@ -43,6 +43,7 @@ // Since libcore defines many fundamental lang items, all tests live in a // separate crate, libcoretest, to avoid bizarre issues. +#![cfg_attr(stage0, allow(unused_attributes))] #![crate_name = "core"] #![stable(feature = "core", since = "1.6.0")] #![crate_type = "rlib"] @@ -60,9 +61,12 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(allow_internal_unstable)] +#![feature(asm)] #![feature(associated_type_defaults)] +#![feature(cfg_target_feature)] #![feature(concat_idents)] #![feature(const_fn)] +#![feature(cfg_target_has_atomic)] #![feature(custom_attribute)] #![feature(fundamental)] #![feature(inclusive_range_syntax)] diff --git a/src/libcore/macros.rs b/src/libcore/macros.rs index f923668688..a40608b076 100644 --- a/src/libcore/macros.rs +++ b/src/libcore/macros.rs @@ -86,7 +86,7 @@ macro_rules! assert { #[stable(feature = "rust1", since = "1.0.0")] macro_rules! assert_eq { ($left:expr , $right:expr) => ({ - match (&($left), &($right)) { + match (&$left, &$right) { (left_val, right_val) => { if !(*left_val == *right_val) { panic!("assertion failed: `(left == right)` \ @@ -182,7 +182,7 @@ macro_rules! debug_assert_eq { /// fn write_to_file_using_match() -> Result<(), io::Error> { /// let mut file = try!(File::create("my_best_friends.txt")); /// match file.write_all(b"This is a list of my best friends.") { -/// Ok(_) => (), +/// Ok(v) => v, /// Err(e) => return Err(e), /// } /// println!("I wrote to the file"); diff --git a/src/libcore/mem.rs b/src/libcore/mem.rs index 2c648d1516..a0f2a2adcb 100644 --- a/src/libcore/mem.rs +++ b/src/libcore/mem.rs @@ -110,6 +110,7 @@ pub use intrinsics::transmute; /// } /// } /// ``` +#[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn forget(t: T) { unsafe { intrinsics::forget(t) } @@ -117,6 +118,9 @@ pub fn forget(t: T) { /// Returns the size of a type in bytes. /// +/// More specifically, this is the offset in bytes between successive +/// items of the same type, including alignment padding. +/// /// # Examples /// /// ``` diff --git a/src/libcore/num/bignum.rs b/src/libcore/num/bignum.rs index 66c6deb361..a881b539ce 100644 --- a/src/libcore/num/bignum.rs +++ b/src/libcore/num/bignum.rs @@ -33,7 +33,7 @@ use mem; use intrinsics; /// Arithmetic operations required by bignums. -pub trait FullOps { +pub trait FullOps: Sized { /// Returns `(carry', v')` such that `carry' * 2^W + v' = self + other + carry`, /// where `W` is the number of bits in `Self`. fn full_add(self, other: Self, carry: bool) -> (bool /*carry*/, Self); diff --git a/src/libcore/num/dec2flt/algorithm.rs b/src/libcore/num/dec2flt/algorithm.rs index e33c2814bf..c7af46a1e4 100644 --- a/src/libcore/num/dec2flt/algorithm.rs +++ b/src/libcore/num/dec2flt/algorithm.rs @@ -32,19 +32,80 @@ fn power_of_ten(e: i16) -> Fp { Fp { f: sig, e: exp } } +// In most architectures, floating point operations have an explicit bit size, therefore the +// precision of the computation is determined on a per-operation basis. +#[cfg(any(not(target_arch="x86"), target_feature="sse2"))] +mod fpu_precision { + pub fn set_precision() { } +} + +// On x86, the x87 FPU is used for float operations if the SSE/SSE2 extensions are not available. +// The x87 FPU operates with 80 bits of precision by default, which means that operations will +// round to 80 bits causing double rounding to happen when values are eventually represented as +// 32/64 bit float values. To overcome this, the FPU control word can be set so that the +// computations are performed in the desired precision. +#[cfg(all(target_arch="x86", not(target_feature="sse2")))] +mod fpu_precision { + use mem::size_of; + use ops::Drop; + + /// A structure used to preserve the original value of the FPU control word, so that it can be + /// restored when the structure is dropped. + /// + /// The x87 FPU is a 16-bits register whose fields are as follows: + /// + /// | 12-15 | 10-11 | 8-9 | 6-7 | 5 | 4 | 3 | 2 | 1 | 0 | + /// |------:|------:|----:|----:|---:|---:|---:|---:|---:|---:| + /// | | RC | PC | | PM | UM | OM | ZM | DM | IM | + /// + /// The documentation for all of the fields is available in the IA-32 Architectures Software + /// Developer's Manual (Volume 1). + /// + /// The only field which is relevant for the following code is PC, Precision Control. This + /// field determines the precision of the operations performed by the FPU. It can be set to: + /// - 0b00, single precision i.e. 32-bits + /// - 0b10, double precision i.e. 64-bits + /// - 0b11, double extended precision i.e. 80-bits (default state) + /// The 0b01 value is reserved and should not be used. + pub struct FPUControlWord(u16); + + fn set_cw(cw: u16) { + unsafe { asm!("fldcw $0" :: "m" (cw) :: "volatile") } + } + + /// Set the precision field of the FPU to `T` and return a `FPUControlWord` + pub fn set_precision() -> FPUControlWord { + let cw = 0u16; + + // Compute the value for the Precision Control field that is appropriate for `T`. + let cw_precision = match size_of::() { + 4 => 0x0000, // 32 bits + 8 => 0x0200, // 64 bits + _ => 0x0300, // default, 80 bits + }; + + // Get the original value of the control word to restore it later, when the + // `FPUControlWord` structure is dropped + unsafe { asm!("fnstcw $0" : "=*m" (&cw) ::: "volatile") } + + // Set the control word to the desired precision. This is achieved by masking away the old + // precision (bits 8 and 9, 0x300) and replacing it with the precision flag computed above. + set_cw((cw & 0xFCFF) | cw_precision); + + FPUControlWord(cw) + } + + impl Drop for FPUControlWord { + fn drop(&mut self) { + set_cw(self.0) + } + } +} + /// The fast path of Bellerophon using machine-sized integers and floats. /// /// This is extracted into a separate function so that it can be attempted before constructing /// a bignum. -/// -/// The fast path crucially depends on arithmetic being correctly rounded, so on x86 -/// without SSE or SSE2 it will be **wrong** (as in, off by one ULP occasionally), because the x87 -/// FPU stack will round to 80 bit first before rounding to 64/32 bit. However, as such hardware -/// is extremely rare nowadays and in fact all in-tree target triples assume an SSE2-capable -/// microarchitecture, there is little incentive to deal with that. There's a test that will fail -/// when SSE or SSE2 is disabled, so people building their own non-SSE copy will get a heads up. -/// -/// FIXME: It would nevertheless be nice if we had a good way to detect and deal with x87. pub fn fast_path(integral: &[u8], fractional: &[u8], e: i64) -> Option { let num_digits = integral.len() + fractional.len(); // log_10(f64::max_sig) ~ 15.95. We compare the exact value to max_sig near the end, @@ -60,9 +121,17 @@ pub fn fast_path(integral: &[u8], fractional: &[u8], e: i64) -> Opt if f > T::max_sig() { return None; } + + // The fast path crucially depends on arithmetic being rounded to the correct number of bits + // without any intermediate rounding. On x86 (without SSE or SSE2) this requires the precision + // of the x87 FPU stack to be changed so that it directly rounds to 64/32 bit. + // The `set_precision` function takes care of setting the precision on architectures which + // require setting it by changing the global state (like the control word of the x87 FPU). + let _cw = fpu_precision::set_precision::(); + // The case e < 0 cannot be folded into the other branch. Negative powers result in // a repeating fractional part in binary, which are rounded, which causes real - // (and occasioally quite significant!) errors in the final result. + // (and occasionally quite significant!) errors in the final result. if e >= 0 { Some(T::from_int(f) * T::short_fast_pow10(e as usize)) } else { diff --git a/src/libcore/num/int_macros.rs b/src/libcore/num/int_macros.rs index 42349257ab..fb1a3bbe3b 100644 --- a/src/libcore/num/int_macros.rs +++ b/src/libcore/num/int_macros.rs @@ -10,6 +10,7 @@ #![doc(hidden)] +#[cfg(stage0)] macro_rules! int_module { ($T:ty, $bits:expr) => ( // FIXME(#11621): Should be deprecated once CTFE is implemented in favour of @@ -25,3 +26,15 @@ pub const MIN: $T = (-1 as $T) << ($bits - 1); pub const MAX: $T = !MIN; ) } + +#[cfg(not(stage0))] +macro_rules! int_module { ($T:ident, $bits:expr) => ( + +#[stable(feature = "rust1", since = "1.0.0")] +#[allow(missing_docs)] +pub const MIN: $T = $T::min_value(); +#[stable(feature = "rust1", since = "1.0.0")] +#[allow(missing_docs)] +pub const MAX: $T = $T::max_value(); + +) } diff --git a/src/libcore/num/mod.rs b/src/libcore/num/mod.rs index 229a864d71..9b6f6698de 100644 --- a/src/libcore/num/mod.rs +++ b/src/libcore/num/mod.rs @@ -14,8 +14,8 @@ #![allow(missing_docs)] use char::CharExt; -use cmp::{Eq, PartialOrd}; -use convert::From; +use cmp::PartialOrd; +use convert::{From, TryFrom}; use fmt; use intrinsics; use marker::{Copy, Sized}; @@ -37,10 +37,35 @@ use slice::SliceExt; /// `wrapping_add`, or through the `Wrapping` type, which says that /// all standard arithmetic operations on the underlying value are /// intended to have wrapping semantics. +/// +/// # Examples +/// +/// ``` +/// use std::num::Wrapping; +/// +/// let zero = Wrapping(0u32); +/// let one = Wrapping(1u32); +/// +/// assert_eq!(std::u32::MAX, (zero - one).0); +/// ``` #[stable(feature = "rust1", since = "1.0.0")] -#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Debug, Default)] +#[derive(PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Default, Hash)] pub struct Wrapping(#[stable(feature = "rust1", since = "1.0.0")] pub T); +#[stable(feature = "rust1", since = "1.0.0")] +impl fmt::Debug for Wrapping { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.0.fmt(f) + } +} + +#[stable(feature = "wrapping_display", since = "1.10.0")] +impl fmt::Display for Wrapping { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + self.0.fmt(f) + } +} + mod wrapping; // All these modules are technically private and only exposed for libcoretest: @@ -149,7 +174,7 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// assert_eq!(u32::from_str_radix("A", 16), Ok(10)); + /// assert_eq!(i32::from_str_radix("A", 16), Ok(10)); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn from_str_radix(src: &str, radix: u32) -> Result { @@ -163,9 +188,9 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0b01001100u8; + /// let n = -0b1000_0000i8; /// - /// assert_eq!(n.count_ones(), 3); + /// assert_eq!(n.count_ones(), 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -178,9 +203,9 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0b01001100u8; + /// let n = -0b1000_0000i8; /// - /// assert_eq!(n.count_zeros(), 5); + /// assert_eq!(n.count_zeros(), 7); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -196,9 +221,9 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0b0101000u16; + /// let n = -1i16; /// - /// assert_eq!(n.leading_zeros(), 10); + /// assert_eq!(n.leading_zeros(), 0); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -214,9 +239,9 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0b0101000u16; + /// let n = -4i8; /// - /// assert_eq!(n.trailing_zeros(), 3); + /// assert_eq!(n.trailing_zeros(), 2); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -232,10 +257,10 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0x0123456789ABCDEFu64; - /// let m = 0x3456789ABCDEF012u64; + /// let n = 0x0123456789ABCDEFi64; + /// let m = -0x76543210FEDCBA99i64; /// - /// assert_eq!(n.rotate_left(12), m); + /// assert_eq!(n.rotate_left(32), m); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -252,10 +277,10 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0x0123456789ABCDEFu64; - /// let m = 0xDEF0123456789ABCu64; + /// let n = 0x0123456789ABCDEFi64; + /// let m = -0xFEDCBA987654322i64; /// - /// assert_eq!(n.rotate_right(12), m); + /// assert_eq!(n.rotate_right(4), m); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -270,8 +295,8 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0x0123456789ABCDEFu64; - /// let m = 0xEFCDAB8967452301u64; + /// let n = 0x0123456789ABCDEFi64; + /// let m = -0x1032547698BADCFFi64; /// /// assert_eq!(n.swap_bytes(), m); /// ``` @@ -291,12 +316,12 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0x0123456789ABCDEFu64; + /// let n = 0x0123456789ABCDEFi64; /// /// if cfg!(target_endian = "big") { - /// assert_eq!(u64::from_be(n), n) + /// assert_eq!(i64::from_be(n), n) /// } else { - /// assert_eq!(u64::from_be(n), n.swap_bytes()) + /// assert_eq!(i64::from_be(n), n.swap_bytes()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] @@ -315,12 +340,12 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0x0123456789ABCDEFu64; + /// let n = 0x0123456789ABCDEFi64; /// /// if cfg!(target_endian = "little") { - /// assert_eq!(u64::from_le(n), n) + /// assert_eq!(i64::from_le(n), n) /// } else { - /// assert_eq!(u64::from_le(n), n.swap_bytes()) + /// assert_eq!(i64::from_le(n), n.swap_bytes()) /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] @@ -339,7 +364,7 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0x0123456789ABCDEFu64; + /// let n = 0x0123456789ABCDEFi64; /// /// if cfg!(target_endian = "big") { /// assert_eq!(n.to_be(), n) @@ -363,7 +388,7 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// let n = 0x0123456789ABCDEFu64; + /// let n = 0x0123456789ABCDEFi64; /// /// if cfg!(target_endian = "little") { /// assert_eq!(n.to_le(), n) @@ -385,8 +410,8 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// assert_eq!(5u16.checked_add(65530), Some(65535)); - /// assert_eq!(6u16.checked_add(65530), None); + /// assert_eq!(7i16.checked_add(32760), Some(32767)); + /// assert_eq!(8i16.checked_add(32760), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -421,8 +446,8 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// assert_eq!(5u8.checked_mul(51), Some(255)); - /// assert_eq!(5u8.checked_mul(52), None); + /// assert_eq!(6i8.checked_mul(21), Some(126)); + /// assert_eq!(6i8.checked_mul(22), None); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] @@ -753,8 +778,8 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// assert_eq!(1u8.wrapping_shl(7), 128); - /// assert_eq!(1u8.wrapping_shl(8), 1); + /// assert_eq!((-1i8).wrapping_shl(7), -128); + /// assert_eq!((-1i8).wrapping_shl(8), -1); /// ``` #[stable(feature = "num_wrapping", since = "1.2.0")] #[inline(always)] @@ -778,8 +803,8 @@ macro_rules! int_impl { /// Basic usage: /// /// ``` - /// assert_eq!(128u8.wrapping_shr(7), 1); - /// assert_eq!(128u8.wrapping_shr(8), 128); + /// assert_eq!((-128i8).wrapping_shr(7), -1); + /// assert_eq!((-128i8).wrapping_shr(8), -128); /// ``` #[stable(feature = "num_wrapping", since = "1.2.0")] #[inline(always)] @@ -1193,15 +1218,13 @@ macro_rules! uint_impl { /// /// Leading and trailing whitespace represent an error. /// - /// # Arguments - /// - /// * src - A string slice - /// * radix - The base to use. Must lie in the range [2 .. 36] + /// # Examples /// - /// # Return value + /// Basic usage: /// - /// `Err(ParseIntError)` if the string did not represent a valid number. - /// Otherwise, `Ok(n)` where `n` is the integer represented by `src`. + /// ``` + /// assert_eq!(u32::from_str_radix("A", 16), Ok(10)); + /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn from_str_radix(src: &str, radix: u32) -> Result { from_str_radix(src, radix) @@ -1745,7 +1768,7 @@ macro_rules! uint_impl { /// Basic usage: /// /// ``` - /// assert_eq!(100i8.wrapping_rem(10), 0); + /// assert_eq!(100u8.wrapping_rem(10), 0); /// ``` #[stable(feature = "num_wrapping", since = "1.2.0")] #[inline(always)] @@ -1783,6 +1806,13 @@ macro_rules! uint_impl { /// where `mask` removes any high-order bits of `rhs` that /// would cause the shift to exceed the bitwidth of the type. /// + /// Note that this is *not* the same as a rotate-left; the + /// RHS of a wrapping shift-left is restricted to the range + /// of the type, rather than the bits shifted out of the LHS + /// being returned to the other end. The primitive integer + /// types all implement a `rotate_left` function, which may + /// be what you want instead. + /// /// # Examples /// /// Basic usage: @@ -1801,6 +1831,13 @@ macro_rules! uint_impl { /// where `mask` removes any high-order bits of `rhs` that /// would cause the shift to exceed the bitwidth of the type. /// + /// Note that this is *not* the same as a rotate-right; the + /// RHS of a wrapping shift-right is restricted to the range + /// of the type, rather than the bits shifted out of the LHS + /// being returned to the other end. The primitive integer + /// types all implement a `rotate_right` function, which may + /// be what you want instead. + /// /// # Examples /// /// Basic usage: @@ -2315,9 +2352,101 @@ macro_rules! from_str_radix_int_impl { } from_str_radix_int_impl! { isize i8 i16 i32 i64 usize u8 u16 u32 u64 } +/// The error type returned when a checked integral type conversion fails. +#[unstable(feature = "try_from", issue = "33417")] +#[derive(Debug, Copy, Clone)] +pub struct TryFromIntError(()); + +impl TryFromIntError { + #[unstable(feature = "int_error_internals", + reason = "available through Error trait and this method should \ + not be exposed publicly", + issue = "0")] + #[doc(hidden)] + pub fn __description(&self) -> &str { + "out of range integral type conversion attempted" + } +} + +#[unstable(feature = "try_from", issue = "33417")] +impl fmt::Display for TryFromIntError { + fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { + self.__description().fmt(fmt) + } +} + +macro_rules! same_sign_from_int_impl { + ($storage:ty, $target:ty, $($source:ty),*) => {$( + #[stable(feature = "rust1", since = "1.0.0")] + impl TryFrom<$source> for $target { + type Err = TryFromIntError; + + fn try_from(u: $source) -> Result<$target, TryFromIntError> { + let min = <$target as FromStrRadixHelper>::min_value() as $storage; + let max = <$target as FromStrRadixHelper>::max_value() as $storage; + if u as $storage < min || u as $storage > max { + Err(TryFromIntError(())) + } else { + Ok(u as $target) + } + } + } + )*} +} + +same_sign_from_int_impl!(u64, u8, u8, u16, u32, u64, usize); +same_sign_from_int_impl!(i64, i8, i8, i16, i32, i64, isize); +same_sign_from_int_impl!(u64, u16, u8, u16, u32, u64, usize); +same_sign_from_int_impl!(i64, i16, i8, i16, i32, i64, isize); +same_sign_from_int_impl!(u64, u32, u8, u16, u32, u64, usize); +same_sign_from_int_impl!(i64, i32, i8, i16, i32, i64, isize); +same_sign_from_int_impl!(u64, u64, u8, u16, u32, u64, usize); +same_sign_from_int_impl!(i64, i64, i8, i16, i32, i64, isize); +same_sign_from_int_impl!(u64, usize, u8, u16, u32, u64, usize); +same_sign_from_int_impl!(i64, isize, i8, i16, i32, i64, isize); + +macro_rules! cross_sign_from_int_impl { + ($unsigned:ty, $($signed:ty),*) => {$( + #[stable(feature = "rust1", since = "1.0.0")] + impl TryFrom<$unsigned> for $signed { + type Err = TryFromIntError; + + fn try_from(u: $unsigned) -> Result<$signed, TryFromIntError> { + let max = <$signed as FromStrRadixHelper>::max_value() as u64; + if u as u64 > max { + Err(TryFromIntError(())) + } else { + Ok(u as $signed) + } + } + } + + #[stable(feature = "rust1", since = "1.0.0")] + impl TryFrom<$signed> for $unsigned { + type Err = TryFromIntError; + + fn try_from(u: $signed) -> Result<$unsigned, TryFromIntError> { + let max = <$unsigned as FromStrRadixHelper>::max_value() as u64; + if u < 0 || u as u64 > max { + Err(TryFromIntError(())) + } else { + Ok(u as $unsigned) + } + } + } + )*} +} + +cross_sign_from_int_impl!(u8, i8, i16, i32, i64, isize); +cross_sign_from_int_impl!(u16, i8, i16, i32, i64, isize); +cross_sign_from_int_impl!(u32, i8, i16, i32, i64, isize); +cross_sign_from_int_impl!(u64, i8, i16, i32, i64, isize); +cross_sign_from_int_impl!(usize, i8, i16, i32, i64, isize); + #[doc(hidden)] trait FromStrRadixHelper: PartialOrd + Copy { fn min_value() -> Self; + fn max_value() -> Self; fn from_u32(u: u32) -> Self; fn checked_mul(&self, other: u32) -> Option; fn checked_sub(&self, other: u32) -> Option; @@ -2327,6 +2456,7 @@ trait FromStrRadixHelper: PartialOrd + Copy { macro_rules! doit { ($($t:ty)*) => ($(impl FromStrRadixHelper for $t { fn min_value() -> Self { Self::min_value() } + fn max_value() -> Self { Self::max_value() } fn from_u32(u: u32) -> Self { u as Self } fn checked_mul(&self, other: u32) -> Option { Self::checked_mul(*self, other as Self) diff --git a/src/libcore/num/uint_macros.rs b/src/libcore/num/uint_macros.rs index 6479836cbe..af6b1b89f9 100644 --- a/src/libcore/num/uint_macros.rs +++ b/src/libcore/num/uint_macros.rs @@ -10,6 +10,7 @@ #![doc(hidden)] +#[cfg(stage0)] macro_rules! uint_module { ($T:ty, $bits:expr) => ( #[stable(feature = "rust1", since = "1.0.0")] @@ -20,3 +21,15 @@ pub const MIN: $T = 0 as $T; pub const MAX: $T = !0 as $T; ) } + +#[cfg(not(stage0))] +macro_rules! uint_module { ($T:ident, $bits:expr) => ( + +#[stable(feature = "rust1", since = "1.0.0")] +#[allow(missing_docs)] +pub const MIN: $T = $T::min_value(); +#[stable(feature = "rust1", since = "1.0.0")] +#[allow(missing_docs)] +pub const MAX: $T = $T::max_value(); + +) } diff --git a/src/libcore/num/wrapping.rs b/src/libcore/num/wrapping.rs index e28a36af2f..04e8bc4913 100644 --- a/src/libcore/num/wrapping.rs +++ b/src/libcore/num/wrapping.rs @@ -275,6 +275,15 @@ macro_rules! wrapping_impl { *self = *self & other; } } + + #[stable(feature = "wrapping_neg", since = "1.10.0")] + impl Neg for Wrapping<$t> { + type Output = Self; + #[inline(always)] + fn neg(self) -> Self { + Wrapping(0) - self + } + } )*) } diff --git a/src/libcore/ops.rs b/src/libcore/ops.rs index 44c498ef6d..a2f84230af 100644 --- a/src/libcore/ops.rs +++ b/src/libcore/ops.rs @@ -1539,6 +1539,11 @@ impl> Range { /// /// See the [`contains()`](#method.contains) method for its characterization. /// +/// Note: Currently, no overflow checking is done for the iterator +/// implementation; if you use an integer range and the integer overflows, it +/// might panic in debug mode or create an endless loop in release mode. This +/// overflow behavior might change in the future. +/// /// # Examples /// /// ``` diff --git a/src/libcore/option.rs b/src/libcore/option.rs index beed2075d0..045c1f9fea 100644 --- a/src/libcore/option.rs +++ b/src/libcore/option.rs @@ -142,7 +142,6 @@ use self::Option::*; use clone::Clone; -use cmp::{Eq, Ord}; use default::Default; use iter::ExactSizeIterator; use iter::{Iterator, DoubleEndedIterator, FromIterator, IntoIterator}; diff --git a/src/libcore/ptr.rs b/src/libcore/ptr.rs index a6b5355d94..8b3a14b24d 100644 --- a/src/libcore/ptr.rs +++ b/src/libcore/ptr.rs @@ -119,6 +119,17 @@ pub unsafe fn replace(dest: *mut T, mut src: T) -> T { /// `src` is not used before the data is overwritten again (e.g. with `write`, /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use /// because it will attempt to drop the value previously at `*src`. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// let x = 12; +/// let y = &x as *const i32; +/// +/// unsafe { println!("{}", std::ptr::read(y)); } +/// ``` #[inline(always)] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn read(src: *const T) -> T { @@ -155,6 +166,21 @@ pub unsafe fn read_and_drop(dest: *mut T) -> T { /// /// This is appropriate for initializing uninitialized memory, or overwriting /// memory that has previously been `read` from. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// let mut x = 0; +/// let y = &mut x as *mut i32; +/// let z = 12; +/// +/// unsafe { +/// std::ptr::write(y, z); +/// println!("{}", std::ptr::read(y)); +/// } +/// ``` #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub unsafe fn write(dst: *mut T, src: T) { @@ -185,6 +211,17 @@ pub unsafe fn write(dst: *mut T, src: T) { /// `src` is not used before the data is overwritten again (e.g. with `write`, /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use /// because it will attempt to drop the value previously at `*src`. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// let x = 12; +/// let y = &x as *const i32; +/// +/// unsafe { println!("{}", std::ptr::read_volatile(y)); } +/// ``` #[inline] #[stable(feature = "volatile", since = "1.9.0")] pub unsafe fn read_volatile(src: *const T) -> T { @@ -217,6 +254,21 @@ pub unsafe fn read_volatile(src: *const T) -> T { /// /// This is appropriate for initializing uninitialized memory, or overwriting /// memory that has previously been `read` from. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// let mut x = 0; +/// let y = &mut x as *mut i32; +/// let z = 12; +/// +/// unsafe { +/// std::ptr::write_volatile(y, z); +/// println!("{}", std::ptr::read_volatile(y)); +/// } +/// ``` #[inline] #[stable(feature = "volatile", since = "1.9.0")] pub unsafe fn write_volatile(dst: *mut T, src: T) { @@ -407,6 +459,9 @@ impl *mut T { /// ``` /// let mut s = [1, 2, 3]; /// let ptr: *mut u32 = s.as_mut_ptr(); + /// let first_value = unsafe { ptr.as_mut().unwrap() }; + /// *first_value = 4; + /// println!("{:?}", s); // It'll print: "[4, 2, 3]". /// ``` #[stable(feature = "ptr_as_ref", since = "1.9.0")] #[inline] diff --git a/src/libcore/result.rs b/src/libcore/result.rs index 7f8cf531d2..4d9f042fdd 100644 --- a/src/libcore/result.rs +++ b/src/libcore/result.rs @@ -19,7 +19,7 @@ //! # #[allow(dead_code)] //! enum Result { //! Ok(T), -//! Err(E) +//! Err(E), //! } //! ``` //! @@ -39,7 +39,7 @@ //! None => Err("invalid header length"), //! Some(&1) => Ok(Version::Version1), //! Some(&2) => Ok(Version::Version2), -//! Some(_) => Err("invalid version") +//! Some(_) => Err("invalid version"), //! } //! } //! @@ -254,7 +254,7 @@ pub enum Result { /// Contains the error value #[stable(feature = "rust1", since = "1.0.0")] - Err(#[stable(feature = "rust1", since = "1.0.0")] E) + Err(#[stable(feature = "rust1", since = "1.0.0")] E), } ///////////////////////////////////////////////////////////////////////////// @@ -270,6 +270,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(-3); /// assert_eq!(x.is_ok(), true); @@ -290,6 +292,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(-3); /// assert_eq!(x.is_err(), false); @@ -314,6 +318,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(2); /// assert_eq!(x.ok(), Some(2)); @@ -337,6 +343,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(2); /// assert_eq!(x.err(), None); @@ -362,6 +370,10 @@ impl Result { /// Produces a new `Result`, containing a reference /// into the original, leaving the original in place. /// + /// # Examples + /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(2); /// assert_eq!(x.as_ref(), Ok(&2)); @@ -380,6 +392,10 @@ impl Result { /// Converts from `Result` to `Result<&mut T, &mut E>` /// + /// # Examples + /// + /// Basic usage: + /// /// ``` /// fn mutate(r: &mut Result) { /// match r.as_mut() { @@ -445,6 +461,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// fn stringify(x: u32) -> String { format!("error code: {}", x) } /// @@ -471,6 +489,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(7); /// assert_eq!(x.iter().next(), Some(&7)); @@ -488,6 +508,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let mut x: Result = Ok(7); /// match x.iter_mut().next() { @@ -513,6 +535,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(2); /// let y: Result<&str, &str> = Err("late error"); @@ -545,6 +569,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// fn sq(x: u32) -> Result { Ok(x * x) } /// fn err(x: u32) -> Result { Err(x) } @@ -567,6 +593,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(2); /// let y: Result = Err("late error"); @@ -599,6 +627,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// fn sq(x: u32) -> Result { Ok(x * x) } /// fn err(x: u32) -> Result { Err(x) } @@ -622,6 +652,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let optb = 2; /// let x: Result = Ok(9); @@ -644,6 +676,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// fn count(x: &str) -> usize { x.len() } /// @@ -670,6 +704,8 @@ impl Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(2); /// assert_eq!(x.unwrap(), 2); @@ -696,6 +732,9 @@ impl Result { /// passed message, and the content of the `Err`. /// /// # Examples + /// + /// Basic usage: + /// /// ```{.should_panic} /// let x: Result = Err("emergency failure"); /// x.expect("Testing expect"); // panics with `Testing expect: emergency failure` @@ -759,6 +798,8 @@ impl IntoIterator for Result { /// /// # Examples /// + /// Basic usage: + /// /// ``` /// let x: Result = Ok(5); /// let v: Vec = x.into_iter().collect(); diff --git a/src/libcore/slice.rs b/src/libcore/slice.rs index ca1abb4fe0..a0e978f783 100644 --- a/src/libcore/slice.rs +++ b/src/libcore/slice.rs @@ -41,7 +41,7 @@ use default::Default; use fmt; use intrinsics::assume; use iter::*; -use ops::{FnMut, self, Index}; +use ops::{FnMut, self}; use ops::RangeFull; use option::Option; use option::Option::{None, Some}; @@ -106,6 +106,10 @@ pub trait SliceExt { #[stable(feature = "core", since = "1.6.0")] fn binary_search_by(&self, f: F) -> Result where F: FnMut(&Self::Item) -> Ordering; + #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")] + fn binary_search_by_key(&self, b: &B, f: F) -> Result + where F: FnMut(&Self::Item) -> B, + B: Ord; #[stable(feature = "core", since = "1.6.0")] fn len(&self) -> usize; #[stable(feature = "core", since = "1.6.0")] @@ -507,9 +511,18 @@ impl SliceExt for [T] { src.as_ptr(), self.as_mut_ptr(), self.len()); } } + + #[inline] + fn binary_search_by_key(&self, b: &B, mut f: F) -> Result + where F: FnMut(&Self::Item) -> B, + B: Ord + { + self.binary_search_by(|k| f(k).cmp(b)) + } } #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::Index for [T] { type Output = T; @@ -520,6 +533,7 @@ impl ops::Index for [T] { } #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::IndexMut for [T] { #[inline] fn index_mut(&mut self, index: usize) -> &mut T { @@ -553,6 +567,7 @@ fn slice_index_order_fail(index: usize, end: usize) -> ! { /// Requires that `begin <= end` and `end <= self.len()`, /// otherwise slicing will panic. #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::Index> for [T] { type Output = [T]; @@ -579,6 +594,7 @@ impl ops::Index> for [T] { /// /// Equivalent to `&self[0 .. end]` #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::Index> for [T] { type Output = [T]; @@ -594,6 +610,7 @@ impl ops::Index> for [T] { /// /// Equivalent to `&self[begin .. self.len()]` #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::Index> for [T] { type Output = [T]; @@ -619,6 +636,7 @@ impl ops::Index for [T] { } #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::Index> for [T] { type Output = [T]; @@ -634,6 +652,7 @@ impl ops::Index> for [T] { } } #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::Index> for [T] { type Output = [T]; @@ -654,6 +673,7 @@ impl ops::Index> for [T] { /// Requires that `begin <= end` and `end <= self.len()`, /// otherwise slicing will panic. #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::IndexMut> for [T] { #[inline] fn index_mut(&mut self, index: ops::Range) -> &mut [T] { @@ -678,6 +698,7 @@ impl ops::IndexMut> for [T] { /// /// Equivalent to `&mut self[0 .. end]` #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::IndexMut> for [T] { #[inline] fn index_mut(&mut self, index: ops::RangeTo) -> &mut [T] { @@ -691,6 +712,7 @@ impl ops::IndexMut> for [T] { /// /// Equivalent to `&mut self[begin .. self.len()]` #[stable(feature = "rust1", since = "1.0.0")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::IndexMut> for [T] { #[inline] fn index_mut(&mut self, index: ops::RangeFrom) -> &mut [T] { @@ -713,6 +735,7 @@ impl ops::IndexMut for [T] { } #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::IndexMut> for [T] { #[inline] fn index_mut(&mut self, index: ops::RangeInclusive) -> &mut [T] { @@ -726,6 +749,7 @@ impl ops::IndexMut> for [T] { } } #[unstable(feature = "inclusive_range", reason = "recently added, follows RFC", issue = "28237")] +#[rustc_on_unimplemented = "slice indices are of type `usize`"] impl ops::IndexMut> for [T] { #[inline] fn index_mut(&mut self, index: ops::RangeToInclusive) -> &mut [T] { @@ -877,6 +901,20 @@ macro_rules! make_mut_slice { } /// Immutable slice iterator +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// // First, we declare a type which has `iter` method to get the `Iter` struct (&[usize here]): +/// let slice = &[1, 2, 3]; +/// +/// // Then, we iterate over it: +/// for element in slice.iter() { +/// println!("{}", element); +/// } +/// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct Iter<'a, T: 'a> { ptr: *const T, @@ -903,6 +941,26 @@ impl<'a, T> Iter<'a, T> { /// /// This has the same lifetime as the original slice, and so the /// iterator can continue to be used while this exists. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// // First, we declare a type which has the `iter` method to get the `Iter` + /// // struct (&[usize here]): + /// let slice = &[1, 2, 3]; + /// + /// // Then, we get the iterator: + /// let mut iter = slice.iter(); + /// // So if we print what `as_slice` method returns here, we have "[1, 2, 3]": + /// println!("{:?}", iter.as_slice()); + /// + /// // Next, we move to the second element of the slice: + /// iter.next(); + /// // Now `as_slice` returns "[2, 3]": + /// println!("{:?}", iter.as_slice()); + /// ``` #[stable(feature = "iter_to_slice", since = "1.4.0")] pub fn as_slice(&self) -> &'a [T] { make_slice!(self.ptr, self.end) @@ -934,6 +992,24 @@ impl<'a, T> Clone for Iter<'a, T> { } /// Mutable slice iterator. +/// +/// # Examples +/// +/// Basic usage: +/// +/// ``` +/// // First, we declare a type which has `iter_mut` method to get the `IterMut` +/// // struct (&[usize here]): +/// let mut slice = &mut [1, 2, 3]; +/// +/// // Then, we iterate over it and increment each element value: +/// for element in slice.iter_mut() { +/// *element += 1; +/// } +/// +/// // We now have "[2, 3, 4]": +/// println!("{:?}", slice); +/// ``` #[stable(feature = "rust1", since = "1.0.0")] pub struct IterMut<'a, T: 'a> { ptr: *mut T, @@ -962,6 +1038,35 @@ impl<'a, T> IterMut<'a, T> { /// to consume the iterator. Consider using the `Slice` and /// `SliceMut` implementations for obtaining slices with more /// restricted lifetimes that do not consume the iterator. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ``` + /// // First, we declare a type which has `iter_mut` method to get the `IterMut` + /// // struct (&[usize here]): + /// let mut slice = &mut [1, 2, 3]; + /// + /// { + /// // Then, we get the iterator: + /// let mut iter = slice.iter_mut(); + /// // We move to next element: + /// iter.next(); + /// // So if we print what `into_slice` method returns here, we have "[2, 3]": + /// println!("{:?}", iter.into_slice()); + /// } + /// + /// // Now let's modify a value of the slice: + /// { + /// // First we get back the iterator: + /// let mut iter = slice.iter_mut(); + /// // We change the value of the first element of the slice returned by the `next` method: + /// *iter.next().unwrap() += 1; + /// } + /// // Now slice is "[2, 2, 3]": + /// println!("{:?}", slice); + /// ``` #[stable(feature = "iter_to_slice", since = "1.4.0")] pub fn into_slice(self) -> &'a mut [T] { make_mut_slice!(self.ptr, self.end) @@ -1835,4 +1940,3 @@ macro_rules! impl_marker_for { impl_marker_for!(BytewiseEquality, u8 i8 u16 i16 u32 i32 u64 i64 usize isize char bool); - diff --git a/src/libcore/str/mod.rs b/src/libcore/str/mod.rs index ef8670df91..2c34caf63b 100644 --- a/src/libcore/str/mod.rs +++ b/src/libcore/str/mod.rs @@ -19,7 +19,6 @@ use self::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher}; use char::{self, CharExt}; use clone::Clone; -use cmp::Eq; use convert::AsRef; use default::Default; use fmt; @@ -1319,7 +1318,6 @@ Section: Trait implementations mod traits { use cmp::{Ord, Ordering, PartialEq, PartialOrd, Eq}; - use iter::Iterator; use option::Option; use option::Option::Some; use ops; @@ -1942,7 +1940,8 @@ impl StrExt for str { if index == 0 || index == self.len() { return true; } match self.as_bytes().get(index) { None => false, - Some(&b) => b < 128 || b >= 192, + // This is bit magic equivalent to: b < 128 || b >= 192 + Some(&b) => (b as i8) >= -0x40, } } diff --git a/src/libcore/sync/atomic.rs b/src/libcore/sync/atomic.rs index 483c3822df..d0a64de07e 100644 --- a/src/libcore/sync/atomic.rs +++ b/src/libcore/sync/atomic.rs @@ -26,8 +26,9 @@ //! [1]: http://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations //! //! Atomic variables are safe to share between threads (they implement `Sync`) -//! but they do not themselves provide the mechanism for sharing. The most -//! common way to share an atomic variable is to put it into an `Arc` (an +//! but they do not themselves provide the mechanism for sharing and follow the +//! [threading model](../../../std/thread/index.html#the-threading-model) of rust. +//! The most common way to share an atomic variable is to put it into an `Arc` (an //! atomically-reference-counted shared pointer). //! //! Most atomic types may be stored in static variables, initialized using @@ -48,12 +49,16 @@ //! let spinlock = Arc::new(AtomicUsize::new(1)); //! //! let spinlock_clone = spinlock.clone(); -//! thread::spawn(move|| { +//! let thread = thread::spawn(move|| { //! spinlock_clone.store(0, Ordering::SeqCst); //! }); //! //! // Wait for the other thread to release the lock //! while spinlock.load(Ordering::SeqCst) != 0 {} +//! +//! if let Err(panic) = thread.join() { +//! println!("Thread had an error: {:?}", panic); +//! } //! } //! ``` //! @@ -83,62 +88,33 @@ use default::Default; use fmt; /// A boolean type which can be safely shared between threads. +#[cfg(any(stage0, target_has_atomic = "8"))] #[stable(feature = "rust1", since = "1.0.0")] pub struct AtomicBool { - v: UnsafeCell, + v: UnsafeCell, } +#[cfg(any(stage0, target_has_atomic = "8"))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for AtomicBool { fn default() -> Self { - Self::new(Default::default()) + Self::new(false) } } // Send is implicitly implemented for AtomicBool. +#[cfg(any(stage0, target_has_atomic = "8"))] #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for AtomicBool {} -/// A signed integer type which can be safely shared between threads. -#[stable(feature = "rust1", since = "1.0.0")] -pub struct AtomicIsize { - v: UnsafeCell, -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Default for AtomicIsize { - fn default() -> Self { - Self::new(Default::default()) - } -} - -// Send is implicitly implemented for AtomicIsize. -#[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for AtomicIsize {} - -/// An unsigned integer type which can be safely shared between threads. -#[stable(feature = "rust1", since = "1.0.0")] -pub struct AtomicUsize { - v: UnsafeCell, -} - -#[stable(feature = "rust1", since = "1.0.0")] -impl Default for AtomicUsize { - fn default() -> Self { - Self::new(Default::default()) - } -} - -// Send is implicitly implemented for AtomicUsize. -#[stable(feature = "rust1", since = "1.0.0")] -unsafe impl Sync for AtomicUsize {} - /// A raw pointer type which can be safely shared between threads. +#[cfg(any(stage0, target_has_atomic = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] pub struct AtomicPtr { p: UnsafeCell<*mut T>, } +#[cfg(any(stage0, target_has_atomic = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] impl Default for AtomicPtr { fn default() -> AtomicPtr { @@ -146,8 +122,10 @@ impl Default for AtomicPtr { } } +#[cfg(any(stage0, target_has_atomic = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Send for AtomicPtr {} +#[cfg(any(stage0, target_has_atomic = "ptr"))] #[stable(feature = "rust1", since = "1.0.0")] unsafe impl Sync for AtomicPtr {} @@ -169,13 +147,13 @@ pub enum Ordering { #[stable(feature = "rust1", since = "1.0.0")] Relaxed, /// When coupled with a store, all previous writes become visible - /// to another thread that performs a load with `Acquire` ordering + /// to the other threads that perform a load with `Acquire` ordering /// on the same value. #[stable(feature = "rust1", since = "1.0.0")] Release, /// When coupled with a load, all subsequent loads will see data /// written before a store with `Release` ordering on the same value - /// in another thread. + /// in other threads. #[stable(feature = "rust1", since = "1.0.0")] Acquire, /// When coupled with a load, uses `Acquire` ordering, and with a store @@ -189,18 +167,11 @@ pub enum Ordering { } /// An `AtomicBool` initialized to `false`. +#[cfg(any(stage0, target_has_atomic = "8"))] #[stable(feature = "rust1", since = "1.0.0")] pub const ATOMIC_BOOL_INIT: AtomicBool = AtomicBool::new(false); -/// An `AtomicIsize` initialized to `0`. -#[stable(feature = "rust1", since = "1.0.0")] -pub const ATOMIC_ISIZE_INIT: AtomicIsize = AtomicIsize::new(0); -/// An `AtomicUsize` initialized to `0`. -#[stable(feature = "rust1", since = "1.0.0")] -pub const ATOMIC_USIZE_INIT: AtomicUsize = AtomicUsize::new(0); - -// NB: Needs to be -1 (0b11111111...) to make fetch_nand work correctly -const UINT_TRUE: usize = !0; +#[cfg(any(stage0, target_has_atomic = "8"))] impl AtomicBool { /// Creates a new `AtomicBool`. /// @@ -215,7 +186,7 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub const fn new(v: bool) -> AtomicBool { - AtomicBool { v: UnsafeCell::new(-(v as isize) as usize) } + AtomicBool { v: UnsafeCell::new(v as u8) } } /// Loads a value from the bool. @@ -238,7 +209,7 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn load(&self, order: Ordering) -> bool { - unsafe { atomic_load(self.v.get(), order) > 0 } + unsafe { atomic_load(self.v.get(), order) != 0 } } /// Stores a value into the bool. @@ -262,9 +233,7 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn store(&self, val: bool, order: Ordering) { - let val = if val { UINT_TRUE } else { 0 }; - - unsafe { atomic_store(self.v.get(), val, order); } + unsafe { atomic_store(self.v.get(), val as u8, order); } } /// Stores a value into the bool, returning the old value. @@ -284,9 +253,7 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn swap(&self, val: bool, order: Ordering) -> bool { - let val = if val { UINT_TRUE } else { 0 }; - - unsafe { atomic_swap(self.v.get(), val, order) > 0 } + unsafe { atomic_swap(self.v.get(), val as u8, order) != 0 } } /// Stores a value into the `bool` if the current value is the same as the `current` value. @@ -332,7 +299,6 @@ impl AtomicBool { /// # Examples /// /// ``` - /// # #![feature(extended_compare_and_swap)] /// use std::sync::atomic::{AtomicBool, Ordering}; /// /// let some_bool = AtomicBool::new(true); @@ -351,18 +317,16 @@ impl AtomicBool { /// assert_eq!(some_bool.load(Ordering::Relaxed), false); /// ``` #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] + #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] pub fn compare_exchange(&self, current: bool, new: bool, success: Ordering, failure: Ordering) -> Result { - let current = if current { UINT_TRUE } else { 0 }; - let new = if new { UINT_TRUE } else { 0 }; - - match unsafe { atomic_compare_exchange(self.v.get(), current, new, success, failure) } { - Ok(x) => Ok(x > 0), - Err(x) => Err(x > 0), + match unsafe { atomic_compare_exchange(self.v.get(), current as u8, new as u8, + success, failure) } { + Ok(x) => Ok(x != 0), + Err(x) => Err(x != 0), } } @@ -382,7 +346,6 @@ impl AtomicBool { /// # Examples /// /// ``` - /// # #![feature(extended_compare_and_swap)] /// use std::sync::atomic::{AtomicBool, Ordering}; /// /// let val = AtomicBool::new(false); @@ -397,19 +360,16 @@ impl AtomicBool { /// } /// ``` #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] + #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] pub fn compare_exchange_weak(&self, current: bool, new: bool, success: Ordering, failure: Ordering) -> Result { - let current = if current { UINT_TRUE } else { 0 }; - let new = if new { UINT_TRUE } else { 0 }; - - match unsafe { atomic_compare_exchange_weak(self.v.get(), current, new, + match unsafe { atomic_compare_exchange_weak(self.v.get(), current as u8, new as u8, success, failure) } { - Ok(x) => Ok(x > 0), - Err(x) => Err(x > 0), + Ok(x) => Ok(x != 0), + Err(x) => Err(x != 0), } } @@ -440,9 +400,7 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn fetch_and(&self, val: bool, order: Ordering) -> bool { - let val = if val { UINT_TRUE } else { 0 }; - - unsafe { atomic_and(self.v.get(), val, order) > 0 } + unsafe { atomic_and(self.v.get(), val as u8, order) != 0 } } /// Logical "nand" with a boolean value. @@ -473,9 +431,20 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn fetch_nand(&self, val: bool, order: Ordering) -> bool { - let val = if val { UINT_TRUE } else { 0 }; - - unsafe { atomic_nand(self.v.get(), val, order) > 0 } + // We can't use atomic_nand here because it can result in a bool with + // an invalid value. This happens because the atomic operation is done + // with an 8-bit integer internally, which would set the upper 7 bits. + // So we just use a compare-exchange loop instead, which is what the + // intrinsic actually expands to anyways on many platforms. + let mut old = self.load(Relaxed); + loop { + let new = !(old && val); + match self.compare_exchange_weak(old, new, order, Relaxed) { + Ok(_) => break, + Err(x) => old = x, + } + } + old } /// Logical "or" with a boolean value. @@ -505,9 +474,7 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn fetch_or(&self, val: bool, order: Ordering) -> bool { - let val = if val { UINT_TRUE } else { 0 }; - - unsafe { atomic_or(self.v.get(), val, order) > 0 } + unsafe { atomic_or(self.v.get(), val as u8, order) != 0 } } /// Logical "xor" with a boolean value. @@ -537,563 +504,11 @@ impl AtomicBool { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn fetch_xor(&self, val: bool, order: Ordering) -> bool { - let val = if val { UINT_TRUE } else { 0 }; - - unsafe { atomic_xor(self.v.get(), val, order) > 0 } - } -} - -impl AtomicIsize { - /// Creates a new `AtomicIsize`. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::AtomicIsize; - /// - /// let atomic_forty_two = AtomicIsize::new(42); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub const fn new(v: isize) -> AtomicIsize { - AtomicIsize {v: UnsafeCell::new(v)} - } - - /// Loads a value from the isize. - /// - /// `load` takes an `Ordering` argument which describes the memory ordering of this operation. - /// - /// # Panics - /// - /// Panics if `order` is `Release` or `AcqRel`. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let some_isize = AtomicIsize::new(5); - /// - /// assert_eq!(some_isize.load(Ordering::Relaxed), 5); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn load(&self, order: Ordering) -> isize { - unsafe { atomic_load(self.v.get(), order) } - } - - /// Stores a value into the isize. - /// - /// `store` takes an `Ordering` argument which describes the memory ordering of this operation. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let some_isize = AtomicIsize::new(5); - /// - /// some_isize.store(10, Ordering::Relaxed); - /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); - /// ``` - /// - /// # Panics - /// - /// Panics if `order` is `Acquire` or `AcqRel`. - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn store(&self, val: isize, order: Ordering) { - unsafe { atomic_store(self.v.get(), val, order); } - } - - /// Stores a value into the isize, returning the old value. - /// - /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let some_isize = AtomicIsize::new(5); - /// - /// assert_eq!(some_isize.swap(10, Ordering::Relaxed), 5); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn swap(&self, val: isize, order: Ordering) -> isize { - unsafe { atomic_swap(self.v.get(), val, order) } - } - - /// Stores a value into the `isize` if the current value is the same as the `current` value. - /// - /// The return value is always the previous value. If it is equal to `current`, then the value - /// was updated. - /// - /// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of - /// this operation. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let some_isize = AtomicIsize::new(5); - /// - /// assert_eq!(some_isize.compare_and_swap(5, 10, Ordering::Relaxed), 5); - /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); - /// - /// assert_eq!(some_isize.compare_and_swap(6, 12, Ordering::Relaxed), 10); - /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn compare_and_swap(&self, current: isize, new: isize, order: Ordering) -> isize { - match self.compare_exchange(current, new, order, strongest_failure_ordering(order)) { - Ok(x) => x, - Err(x) => x, - } - } - - /// Stores a value into the `isize` if the current value is the same as the `current` value. - /// - /// The return value is a result indicating whether the new value was written and containing - /// the previous value. On success this value is guaranteed to be equal to `new`. - /// - /// `compare_exchange` takes two `Ordering` arguments to describe the memory ordering of this - /// operation. The first describes the required ordering if the operation succeeds while the - /// second describes the required ordering when the operation fails. The failure ordering can't - /// be `Release` or `AcqRel` and must be equivalent or weaker than the success ordering. - /// - /// # Examples - /// - /// ``` - /// # #![feature(extended_compare_and_swap)] - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let some_isize = AtomicIsize::new(5); - /// - /// assert_eq!(some_isize.compare_exchange(5, 10, - /// Ordering::Acquire, - /// Ordering::Relaxed), - /// Ok(5)); - /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); - /// - /// assert_eq!(some_isize.compare_exchange(6, 12, - /// Ordering::SeqCst, - /// Ordering::Acquire), - /// Err(10)); - /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); - /// ``` - #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] - pub fn compare_exchange(&self, - current: isize, - new: isize, - success: Ordering, - failure: Ordering) -> Result { - unsafe { atomic_compare_exchange(self.v.get(), current, new, success, failure) } - } - - /// Stores a value into the `isize` if the current value is the same as the `current` value. - /// - /// Unlike `compare_exchange`, this function is allowed to spuriously fail even when the - /// comparison succeeds, which can result in more efficient code on some platforms. The - /// return value is a result indicating whether the new value was written and containing the - /// previous value. - /// - /// `compare_exchange_weak` takes two `Ordering` arguments to describe the memory - /// ordering of this operation. The first describes the required ordering if the operation - /// succeeds while the second describes the required ordering when the operation fails. The - /// failure ordering can't be `Release` or `AcqRel` and must be equivalent or weaker than the - /// success ordering. - /// - /// # Examples - /// - /// ``` - /// # #![feature(extended_compare_and_swap)] - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let val = AtomicIsize::new(4); - /// - /// let mut old = val.load(Ordering::Relaxed); - /// loop { - /// let new = old * 2; - /// match val.compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed) { - /// Ok(_) => break, - /// Err(x) => old = x, - /// } - /// } - /// ``` - #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] - pub fn compare_exchange_weak(&self, - current: isize, - new: isize, - success: Ordering, - failure: Ordering) -> Result { - unsafe { atomic_compare_exchange_weak(self.v.get(), current, new, success, failure) } - } - - /// Add an isize to the current value, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let foo = AtomicIsize::new(0); - /// assert_eq!(foo.fetch_add(10, Ordering::SeqCst), 0); - /// assert_eq!(foo.load(Ordering::SeqCst), 10); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_add(&self, val: isize, order: Ordering) -> isize { - unsafe { atomic_add(self.v.get(), val, order) } - } - - /// Subtract an isize from the current value, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let foo = AtomicIsize::new(0); - /// assert_eq!(foo.fetch_sub(10, Ordering::SeqCst), 0); - /// assert_eq!(foo.load(Ordering::SeqCst), -10); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_sub(&self, val: isize, order: Ordering) -> isize { - unsafe { atomic_sub(self.v.get(), val, order) } - } - - /// Bitwise and with the current isize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let foo = AtomicIsize::new(0b101101); - /// assert_eq!(foo.fetch_and(0b110011, Ordering::SeqCst), 0b101101); - /// assert_eq!(foo.load(Ordering::SeqCst), 0b100001); - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_and(&self, val: isize, order: Ordering) -> isize { - unsafe { atomic_and(self.v.get(), val, order) } - } - - /// Bitwise or with the current isize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let foo = AtomicIsize::new(0b101101); - /// assert_eq!(foo.fetch_or(0b110011, Ordering::SeqCst), 0b101101); - /// assert_eq!(foo.load(Ordering::SeqCst), 0b111111); - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_or(&self, val: isize, order: Ordering) -> isize { - unsafe { atomic_or(self.v.get(), val, order) } - } - - /// Bitwise xor with the current isize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicIsize, Ordering}; - /// - /// let foo = AtomicIsize::new(0b101101); - /// assert_eq!(foo.fetch_xor(0b110011, Ordering::SeqCst), 0b101101); - /// assert_eq!(foo.load(Ordering::SeqCst), 0b011110); - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_xor(&self, val: isize, order: Ordering) -> isize { - unsafe { atomic_xor(self.v.get(), val, order) } - } -} - -impl AtomicUsize { - /// Creates a new `AtomicUsize`. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::AtomicUsize; - /// - /// let atomic_forty_two = AtomicUsize::new(42); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub const fn new(v: usize) -> AtomicUsize { - AtomicUsize { v: UnsafeCell::new(v) } - } - - /// Loads a value from the usize. - /// - /// `load` takes an `Ordering` argument which describes the memory ordering of this operation. - /// - /// # Panics - /// - /// Panics if `order` is `Release` or `AcqRel`. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let some_usize = AtomicUsize::new(5); - /// - /// assert_eq!(some_usize.load(Ordering::Relaxed), 5); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn load(&self, order: Ordering) -> usize { - unsafe { atomic_load(self.v.get(), order) } - } - - /// Stores a value into the usize. - /// - /// `store` takes an `Ordering` argument which describes the memory ordering of this operation. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let some_usize = AtomicUsize::new(5); - /// - /// some_usize.store(10, Ordering::Relaxed); - /// assert_eq!(some_usize.load(Ordering::Relaxed), 10); - /// ``` - /// - /// # Panics - /// - /// Panics if `order` is `Acquire` or `AcqRel`. - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn store(&self, val: usize, order: Ordering) { - unsafe { atomic_store(self.v.get(), val, order); } - } - - /// Stores a value into the usize, returning the old value. - /// - /// `swap` takes an `Ordering` argument which describes the memory ordering of this operation. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let some_usize = AtomicUsize::new(5); - /// - /// assert_eq!(some_usize.swap(10, Ordering::Relaxed), 5); - /// assert_eq!(some_usize.load(Ordering::Relaxed), 10); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn swap(&self, val: usize, order: Ordering) -> usize { - unsafe { atomic_swap(self.v.get(), val, order) } - } - - /// Stores a value into the `usize` if the current value is the same as the `current` value. - /// - /// The return value is always the previous value. If it is equal to `current`, then the value - /// was updated. - /// - /// `compare_and_swap` also takes an `Ordering` argument which describes the memory ordering of - /// this operation. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let some_usize = AtomicUsize::new(5); - /// - /// assert_eq!(some_usize.compare_and_swap(5, 10, Ordering::Relaxed), 5); - /// assert_eq!(some_usize.load(Ordering::Relaxed), 10); - /// - /// assert_eq!(some_usize.compare_and_swap(6, 12, Ordering::Relaxed), 10); - /// assert_eq!(some_usize.load(Ordering::Relaxed), 10); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn compare_and_swap(&self, current: usize, new: usize, order: Ordering) -> usize { - match self.compare_exchange(current, new, order, strongest_failure_ordering(order)) { - Ok(x) => x, - Err(x) => x, - } - } - - /// Stores a value into the `usize` if the current value is the same as the `current` value. - /// - /// The return value is a result indicating whether the new value was written and containing - /// the previous value. On success this value is guaranteed to be equal to `new`. - /// - /// `compare_exchange` takes two `Ordering` arguments to describe the memory ordering of this - /// operation. The first describes the required ordering if the operation succeeds while the - /// second describes the required ordering when the operation fails. The failure ordering can't - /// be `Release` or `AcqRel` and must be equivalent or weaker than the success ordering. - /// - /// # Examples - /// - /// ``` - /// # #![feature(extended_compare_and_swap)] - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let some_isize = AtomicUsize::new(5); - /// - /// assert_eq!(some_isize.compare_exchange(5, 10, - /// Ordering::Acquire, - /// Ordering::Relaxed), - /// Ok(5)); - /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); - /// - /// assert_eq!(some_isize.compare_exchange(6, 12, - /// Ordering::SeqCst, - /// Ordering::Acquire), - /// Err(10)); - /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); - /// ``` - #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] - pub fn compare_exchange(&self, - current: usize, - new: usize, - success: Ordering, - failure: Ordering) -> Result { - unsafe { atomic_compare_exchange(self.v.get(), current, new, success, failure) } - } - - /// Stores a value into the `usize` if the current value is the same as the `current` value. - /// - /// Unlike `compare_exchange`, this function is allowed to spuriously fail even when the - /// comparison succeeds, which can result in more efficient code on some platforms. The - /// return value is a result indicating whether the new value was written and containing the - /// previous value. - /// - /// `compare_exchange_weak` takes two `Ordering` arguments to describe the memory - /// ordering of this operation. The first describes the required ordering if the operation - /// succeeds while the second describes the required ordering when the operation fails. The - /// failure ordering can't be `Release` or `AcqRel` and must be equivalent or weaker than the - /// success ordering. - /// - /// # Examples - /// - /// ``` - /// # #![feature(extended_compare_and_swap)] - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let val = AtomicUsize::new(4); - /// - /// let mut old = val.load(Ordering::Relaxed); - /// loop { - /// let new = old * 2; - /// match val.compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed) { - /// Ok(_) => break, - /// Err(x) => old = x, - /// } - /// } - /// ``` - #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] - pub fn compare_exchange_weak(&self, - current: usize, - new: usize, - success: Ordering, - failure: Ordering) -> Result { - unsafe { atomic_compare_exchange_weak(self.v.get(), current, new, success, failure) } - } - - /// Add to the current usize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let foo = AtomicUsize::new(0); - /// assert_eq!(foo.fetch_add(10, Ordering::SeqCst), 0); - /// assert_eq!(foo.load(Ordering::SeqCst), 10); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_add(&self, val: usize, order: Ordering) -> usize { - unsafe { atomic_add(self.v.get(), val, order) } - } - - /// Subtract from the current usize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let foo = AtomicUsize::new(10); - /// assert_eq!(foo.fetch_sub(10, Ordering::SeqCst), 10); - /// assert_eq!(foo.load(Ordering::SeqCst), 0); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_sub(&self, val: usize, order: Ordering) -> usize { - unsafe { atomic_sub(self.v.get(), val, order) } - } - - /// Bitwise and with the current usize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let foo = AtomicUsize::new(0b101101); - /// assert_eq!(foo.fetch_and(0b110011, Ordering::SeqCst), 0b101101); - /// assert_eq!(foo.load(Ordering::SeqCst), 0b100001); - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_and(&self, val: usize, order: Ordering) -> usize { - unsafe { atomic_and(self.v.get(), val, order) } - } - - /// Bitwise or with the current usize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let foo = AtomicUsize::new(0b101101); - /// assert_eq!(foo.fetch_or(0b110011, Ordering::SeqCst), 0b101101); - /// assert_eq!(foo.load(Ordering::SeqCst), 0b111111); - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_or(&self, val: usize, order: Ordering) -> usize { - unsafe { atomic_or(self.v.get(), val, order) } - } - - /// Bitwise xor with the current usize, returning the previous value. - /// - /// # Examples - /// - /// ``` - /// use std::sync::atomic::{AtomicUsize, Ordering}; - /// - /// let foo = AtomicUsize::new(0b101101); - /// assert_eq!(foo.fetch_xor(0b110011, Ordering::SeqCst), 0b101101); - /// assert_eq!(foo.load(Ordering::SeqCst), 0b011110); - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - pub fn fetch_xor(&self, val: usize, order: Ordering) -> usize { - unsafe { atomic_xor(self.v.get(), val, order) } + unsafe { atomic_xor(self.v.get(), val as u8, order) != 0 } } } +#[cfg(any(stage0, target_has_atomic = "ptr"))] impl AtomicPtr { /// Creates a new `AtomicPtr`. /// @@ -1228,7 +643,6 @@ impl AtomicPtr { /// # Examples /// /// ``` - /// # #![feature(extended_compare_and_swap)] /// use std::sync::atomic::{AtomicPtr, Ordering}; /// /// let ptr = &mut 5; @@ -1241,7 +655,7 @@ impl AtomicPtr { /// Ordering::SeqCst, Ordering::Relaxed); /// ``` #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] + #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] pub fn compare_exchange(&self, current: *mut T, new: *mut T, @@ -1276,7 +690,6 @@ impl AtomicPtr { /// # Examples /// /// ``` - /// # #![feature(extended_compare_and_swap)] /// use std::sync::atomic::{AtomicPtr, Ordering}; /// /// let some_ptr = AtomicPtr::new(&mut 5); @@ -1291,7 +704,7 @@ impl AtomicPtr { /// } /// ``` #[inline] - #[unstable(feature = "extended_compare_and_swap", reason = "recently added", issue = "31767")] + #[stable(feature = "extended_compare_and_swap", since = "1.10.0")] pub fn compare_exchange_weak(&self, current: *mut T, new: *mut T, @@ -1311,6 +724,403 @@ impl AtomicPtr { } } +macro_rules! atomic_int { + ($stable:meta, + $stable_cxchg:meta, + $stable_debug:meta, + $int_type:ident $atomic_type:ident $atomic_init:ident) => { + /// An integer type which can be safely shared between threads. + #[$stable] + pub struct $atomic_type { + v: UnsafeCell<$int_type>, + } + + /// An atomic integer initialized to `0`. + #[$stable] + pub const $atomic_init: $atomic_type = $atomic_type::new(0); + + #[$stable] + impl Default for $atomic_type { + fn default() -> Self { + Self::new(Default::default()) + } + } + + #[$stable_debug] + impl fmt::Debug for $atomic_type { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple(stringify!($atomic_type)) + .field(&self.load(Ordering::SeqCst)) + .finish() + } + } + + // Send is implicitly implemented. + #[$stable] + unsafe impl Sync for $atomic_type {} + + impl $atomic_type { + /// Creates a new atomic integer. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::AtomicIsize; + /// + /// let atomic_forty_two = AtomicIsize::new(42); + /// ``` + #[inline] + #[$stable] + pub const fn new(v: $int_type) -> Self { + $atomic_type {v: UnsafeCell::new(v)} + } + + /// Loads a value from the atomic integer. + /// + /// `load` takes an `Ordering` argument which describes the memory ordering of this + /// operation. + /// + /// # Panics + /// + /// Panics if `order` is `Release` or `AcqRel`. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let some_isize = AtomicIsize::new(5); + /// + /// assert_eq!(some_isize.load(Ordering::Relaxed), 5); + /// ``` + #[inline] + #[$stable] + pub fn load(&self, order: Ordering) -> $int_type { + unsafe { atomic_load(self.v.get(), order) } + } + + /// Stores a value into the atomic integer. + /// + /// `store` takes an `Ordering` argument which describes the memory ordering of this + /// operation. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let some_isize = AtomicIsize::new(5); + /// + /// some_isize.store(10, Ordering::Relaxed); + /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); + /// ``` + /// + /// # Panics + /// + /// Panics if `order` is `Acquire` or `AcqRel`. + #[inline] + #[$stable] + pub fn store(&self, val: $int_type, order: Ordering) { + unsafe { atomic_store(self.v.get(), val, order); } + } + + /// Stores a value into the atomic integer, returning the old value. + /// + /// `swap` takes an `Ordering` argument which describes the memory ordering of this + /// operation. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let some_isize = AtomicIsize::new(5); + /// + /// assert_eq!(some_isize.swap(10, Ordering::Relaxed), 5); + /// ``` + #[inline] + #[$stable] + pub fn swap(&self, val: $int_type, order: Ordering) -> $int_type { + unsafe { atomic_swap(self.v.get(), val, order) } + } + + /// Stores a value into the atomic integer if the current value is the same as the + /// `current` value. + /// + /// The return value is always the previous value. If it is equal to `current`, then the + /// value was updated. + /// + /// `compare_and_swap` also takes an `Ordering` argument which describes the memory + /// ordering of this operation. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let some_isize = AtomicIsize::new(5); + /// + /// assert_eq!(some_isize.compare_and_swap(5, 10, Ordering::Relaxed), 5); + /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); + /// + /// assert_eq!(some_isize.compare_and_swap(6, 12, Ordering::Relaxed), 10); + /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); + /// ``` + #[inline] + #[$stable] + pub fn compare_and_swap(&self, + current: $int_type, + new: $int_type, + order: Ordering) -> $int_type { + match self.compare_exchange(current, + new, + order, + strongest_failure_ordering(order)) { + Ok(x) => x, + Err(x) => x, + } + } + + /// Stores a value into the atomic integer if the current value is the same as the + /// `current` value. + /// + /// The return value is a result indicating whether the new value was written and + /// containing the previous value. On success this value is guaranteed to be equal to + /// `new`. + /// + /// `compare_exchange` takes two `Ordering` arguments to describe the memory ordering of + /// this operation. The first describes the required ordering if the operation succeeds + /// while the second describes the required ordering when the operation fails. The + /// failure ordering can't be `Release` or `AcqRel` and must be equivalent or weaker + /// than the success ordering. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let some_isize = AtomicIsize::new(5); + /// + /// assert_eq!(some_isize.compare_exchange(5, 10, + /// Ordering::Acquire, + /// Ordering::Relaxed), + /// Ok(5)); + /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); + /// + /// assert_eq!(some_isize.compare_exchange(6, 12, + /// Ordering::SeqCst, + /// Ordering::Acquire), + /// Err(10)); + /// assert_eq!(some_isize.load(Ordering::Relaxed), 10); + /// ``` + #[inline] + #[$stable_cxchg] + pub fn compare_exchange(&self, + current: $int_type, + new: $int_type, + success: Ordering, + failure: Ordering) -> Result<$int_type, $int_type> { + unsafe { atomic_compare_exchange(self.v.get(), current, new, success, failure) } + } + + /// Stores a value into the atomic integer if the current value is the same as the + /// `current` value. + /// + /// Unlike `compare_exchange`, this function is allowed to spuriously fail even when the + /// comparison succeeds, which can result in more efficient code on some platforms. The + /// return value is a result indicating whether the new value was written and containing + /// the previous value. + /// + /// `compare_exchange_weak` takes two `Ordering` arguments to describe the memory + /// ordering of this operation. The first describes the required ordering if the + /// operation succeeds while the second describes the required ordering when the + /// operation fails. The failure ordering can't be `Release` or `AcqRel` and must be + /// equivalent or weaker than the success ordering. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let val = AtomicIsize::new(4); + /// + /// let mut old = val.load(Ordering::Relaxed); + /// loop { + /// let new = old * 2; + /// match val.compare_exchange_weak(old, new, Ordering::SeqCst, Ordering::Relaxed) { + /// Ok(_) => break, + /// Err(x) => old = x, + /// } + /// } + /// ``` + #[inline] + #[$stable_cxchg] + pub fn compare_exchange_weak(&self, + current: $int_type, + new: $int_type, + success: Ordering, + failure: Ordering) -> Result<$int_type, $int_type> { + unsafe { + atomic_compare_exchange_weak(self.v.get(), current, new, success, failure) + } + } + + /// Add to the current value, returning the previous value. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let foo = AtomicIsize::new(0); + /// assert_eq!(foo.fetch_add(10, Ordering::SeqCst), 0); + /// assert_eq!(foo.load(Ordering::SeqCst), 10); + /// ``` + #[inline] + #[$stable] + pub fn fetch_add(&self, val: $int_type, order: Ordering) -> $int_type { + unsafe { atomic_add(self.v.get(), val, order) } + } + + /// Subtract from the current value, returning the previous value. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let foo = AtomicIsize::new(0); + /// assert_eq!(foo.fetch_sub(10, Ordering::SeqCst), 0); + /// assert_eq!(foo.load(Ordering::SeqCst), -10); + /// ``` + #[inline] + #[$stable] + pub fn fetch_sub(&self, val: $int_type, order: Ordering) -> $int_type { + unsafe { atomic_sub(self.v.get(), val, order) } + } + + /// Bitwise and with the current value, returning the previous value. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let foo = AtomicIsize::new(0b101101); + /// assert_eq!(foo.fetch_and(0b110011, Ordering::SeqCst), 0b101101); + /// assert_eq!(foo.load(Ordering::SeqCst), 0b100001); + #[inline] + #[$stable] + pub fn fetch_and(&self, val: $int_type, order: Ordering) -> $int_type { + unsafe { atomic_and(self.v.get(), val, order) } + } + + /// Bitwise or with the current value, returning the previous value. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let foo = AtomicIsize::new(0b101101); + /// assert_eq!(foo.fetch_or(0b110011, Ordering::SeqCst), 0b101101); + /// assert_eq!(foo.load(Ordering::SeqCst), 0b111111); + #[inline] + #[$stable] + pub fn fetch_or(&self, val: $int_type, order: Ordering) -> $int_type { + unsafe { atomic_or(self.v.get(), val, order) } + } + + /// Bitwise xor with the current value, returning the previous value. + /// + /// # Examples + /// + /// ``` + /// use std::sync::atomic::{AtomicIsize, Ordering}; + /// + /// let foo = AtomicIsize::new(0b101101); + /// assert_eq!(foo.fetch_xor(0b110011, Ordering::SeqCst), 0b101101); + /// assert_eq!(foo.load(Ordering::SeqCst), 0b011110); + #[inline] + #[$stable] + pub fn fetch_xor(&self, val: $int_type, order: Ordering) -> $int_type { + unsafe { atomic_xor(self.v.get(), val, order) } + } + } + } +} + +#[cfg(target_has_atomic = "8")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + i8 AtomicI8 ATOMIC_I8_INIT +} +#[cfg(target_has_atomic = "8")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + u8 AtomicU8 ATOMIC_U8_INIT +} +#[cfg(target_has_atomic = "16")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + i16 AtomicI16 ATOMIC_I16_INIT +} +#[cfg(target_has_atomic = "16")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + u16 AtomicU16 ATOMIC_U16_INIT +} +#[cfg(target_has_atomic = "32")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + i32 AtomicI32 ATOMIC_I32_INIT +} +#[cfg(target_has_atomic = "32")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + u32 AtomicU32 ATOMIC_U32_INIT +} +#[cfg(target_has_atomic = "64")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + i64 AtomicI64 ATOMIC_I64_INIT +} +#[cfg(target_has_atomic = "64")] +atomic_int! { + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + unstable(feature = "integer_atomics", issue = "32976"), + u64 AtomicU64 ATOMIC_U64_INIT +} +#[cfg(any(stage0, target_has_atomic = "ptr"))] +atomic_int!{ + stable(feature = "rust1", since = "1.0.0"), + stable(feature = "extended_compare_and_swap", since = "1.10.0"), + stable(feature = "atomic_debug", since = "1.3.0"), + isize AtomicIsize ATOMIC_ISIZE_INIT +} +#[cfg(any(stage0, target_has_atomic = "ptr"))] +atomic_int!{ + stable(feature = "rust1", since = "1.0.0"), + stable(feature = "extended_compare_and_swap", since = "1.10.0"), + stable(feature = "atomic_debug", since = "1.3.0"), + usize AtomicUsize ATOMIC_USIZE_INIT +} + #[inline] fn strongest_failure_ordering(order: Ordering) -> Ordering { match order { @@ -1380,7 +1190,6 @@ unsafe fn atomic_sub(dst: *mut T, val: T, order: Ordering) -> T { } #[inline] -#[cfg(any(not(stage0), cargobuild))] unsafe fn atomic_compare_exchange(dst: *mut T, old: T, new: T, @@ -1407,29 +1216,6 @@ unsafe fn atomic_compare_exchange(dst: *mut T, } } -#[inline] -#[cfg(all(stage0, not(cargobuild)))] -unsafe fn atomic_compare_exchange(dst: *mut T, - old: T, - new: T, - success: Ordering, - _: Ordering) -> Result - where T: ::cmp::Eq + ::marker::Copy -{ - let val = match success { - Acquire => intrinsics::atomic_cxchg_acq(dst, old, new), - Release => intrinsics::atomic_cxchg_rel(dst, old, new), - AcqRel => intrinsics::atomic_cxchg_acqrel(dst, old, new), - Relaxed => intrinsics::atomic_cxchg_relaxed(dst, old, new), - SeqCst => intrinsics::atomic_cxchg(dst, old, new), - }; - if val == old { - Ok(val) - } else { - Err(val) - } -} - #[inline] unsafe fn atomic_compare_exchange_weak(dst: *mut T, old: T, @@ -1468,18 +1254,6 @@ unsafe fn atomic_and(dst: *mut T, val: T, order: Ordering) -> T { } } -#[inline] -unsafe fn atomic_nand(dst: *mut T, val: T, order: Ordering) -> T { - match order { - Acquire => intrinsics::atomic_nand_acq(dst, val), - Release => intrinsics::atomic_nand_rel(dst, val), - AcqRel => intrinsics::atomic_nand_acqrel(dst, val), - Relaxed => intrinsics::atomic_nand_relaxed(dst, val), - SeqCst => intrinsics::atomic_nand(dst, val) - } -} - - #[inline] unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { match order { @@ -1491,7 +1265,6 @@ unsafe fn atomic_or(dst: *mut T, val: T, order: Ordering) -> T { } } - #[inline] unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { match order { @@ -1503,7 +1276,6 @@ unsafe fn atomic_xor(dst: *mut T, val: T, order: Ordering) -> T { } } - /// An atomic fence. /// /// A fence 'A' which has `Release` ordering semantics, synchronizes with a @@ -1538,19 +1310,16 @@ pub fn fence(order: Ordering) { } } -macro_rules! impl_Debug { - ($($t:ident)*) => ($( - #[stable(feature = "atomic_debug", since = "1.3.0")] - impl fmt::Debug for $t { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.debug_tuple(stringify!($t)).field(&self.load(Ordering::SeqCst)).finish() - } - } - )*); -} -impl_Debug!{ AtomicUsize AtomicIsize AtomicBool } +#[cfg(any(stage0, target_has_atomic = "8"))] +#[stable(feature = "atomic_debug", since = "1.3.0")] +impl fmt::Debug for AtomicBool { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.debug_tuple("AtomicBool").field(&self.load(Ordering::SeqCst)).finish() + } +} +#[cfg(any(stage0, target_has_atomic = "ptr"))] #[stable(feature = "atomic_debug", since = "1.3.0")] impl fmt::Debug for AtomicPtr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { diff --git a/src/libcoretest/char.rs b/src/libcoretest/char.rs index 41fd742c9e..e959e71daf 100644 --- a/src/libcoretest/char.rs +++ b/src/libcoretest/char.rs @@ -262,4 +262,37 @@ fn ed_iterator_specializations() { assert_eq!('\''.escape_default().last(), Some('\'')); } +#[test] +fn eu_iterator_specializations() { + fn check(c: char) { + let len = c.escape_unicode().count(); + + // Check OoB + assert_eq!(c.escape_unicode().nth(len), None); + + // For all possible in-bound offsets + let mut iter = c.escape_unicode(); + for offset in 0..len { + // Check last + assert_eq!(iter.clone().last(), Some('}')); + // Check counting + assert_eq!(iter.clone().count(), len - offset); + + // Check nth + assert_eq!(c.escape_unicode().nth(offset), iter.next()); + } + + // Check post-last + assert_eq!(iter.clone().last(), None); + assert_eq!(iter.clone().count(), 0); + } + + check('\u{0}'); + check('\u{1}'); + check('\u{12}'); + check('\u{123}'); + check('\u{1234}'); + check('\u{12340}'); + check('\u{10FFFF}'); +} diff --git a/src/libcoretest/iter.rs b/src/libcoretest/iter.rs index 6c0cb03b5f..54fca291e5 100644 --- a/src/libcoretest/iter.rs +++ b/src/libcoretest/iter.rs @@ -133,6 +133,19 @@ fn test_iterator_chain_count() { assert_eq!(zs.iter().chain(&ys).count(), 4); } +#[test] +fn test_iterator_chain_find() { + let xs = [0, 1, 2, 3, 4, 5]; + let ys = [30, 40, 50, 60]; + let mut iter = xs.iter().chain(&ys); + assert_eq!(iter.find(|&&i| i == 4), Some(&4)); + assert_eq!(iter.next(), Some(&5)); + assert_eq!(iter.find(|&&i| i == 40), Some(&40)); + assert_eq!(iter.next(), Some(&50)); + assert_eq!(iter.find(|&&i| i == 100), None); + assert_eq!(iter.next(), None); +} + #[test] fn test_filter_map() { let it = (0..).step_by(1).take(10) @@ -889,15 +902,6 @@ fn test_range_step() { assert_eq!((isize::MIN..isize::MAX).step_by(1).size_hint(), (usize::MAX, Some(usize::MAX))); } -#[test] -fn test_peekable_is_empty() { - let a = [1]; - let mut it = a.iter().peekable(); - assert!( !it.is_empty() ); - it.next(); - assert!( it.is_empty() ); -} - #[test] fn test_repeat() { let mut it = repeat(42); diff --git a/src/libcoretest/lib.rs b/src/libcoretest/lib.rs index aa7ab4b4e3..88d73df937 100644 --- a/src/libcoretest/lib.rs +++ b/src/libcoretest/lib.rs @@ -15,7 +15,6 @@ #![feature(box_syntax)] #![feature(cell_extras)] #![feature(const_fn)] -#![feature(core_float)] #![feature(core_private_bignum)] #![feature(core_private_diy_float)] #![feature(dec2flt)] @@ -25,7 +24,6 @@ #![feature(iter_arith)] #![feature(libc)] #![feature(nonzero)] -#![feature(peekable_is_empty)] #![feature(rand)] #![feature(raw)] #![feature(slice_patterns)] @@ -34,6 +32,7 @@ #![feature(unboxed_closures)] #![feature(unicode)] #![feature(unique)] +#![feature(try_from)] extern crate core; extern crate test; diff --git a/src/libcoretest/num/mod.rs b/src/libcoretest/num/mod.rs index 11c1bd667f..4834c0e072 100644 --- a/src/libcoretest/num/mod.rs +++ b/src/libcoretest/num/mod.rs @@ -8,10 +8,13 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use core::convert::TryFrom; use core::cmp::PartialEq; use core::fmt::Debug; -use core::ops::{Add, Sub, Mul, Div, Rem}; use core::marker::Copy; +use core::ops::{Add, Sub, Mul, Div, Rem}; +use core::option::Option; +use core::option::Option::{Some, None}; #[macro_use] mod int_macros; @@ -48,169 +51,318 @@ pub fn test_num(ten: T, two: T) where assert_eq!(ten.rem(two), ten % two); } -#[cfg(test)] -mod tests { - use core::option::Option; - use core::option::Option::{Some, None}; - use core::num::Float; - - #[test] - fn from_str_issue7588() { - let u : Option = u8::from_str_radix("1000", 10).ok(); - assert_eq!(u, None); - let s : Option = i16::from_str_radix("80000", 10).ok(); - assert_eq!(s, None); - } +#[test] +fn from_str_issue7588() { + let u : Option = u8::from_str_radix("1000", 10).ok(); + assert_eq!(u, None); + let s : Option = i16::from_str_radix("80000", 10).ok(); + assert_eq!(s, None); +} - #[test] - fn test_int_from_str_overflow() { - let mut i8_val: i8 = 127; - assert_eq!("127".parse::().ok(), Some(i8_val)); - assert_eq!("128".parse::().ok(), None); +#[test] +fn test_int_from_str_overflow() { + let mut i8_val: i8 = 127; + assert_eq!("127".parse::().ok(), Some(i8_val)); + assert_eq!("128".parse::().ok(), None); - i8_val = i8_val.wrapping_add(1); - assert_eq!("-128".parse::().ok(), Some(i8_val)); - assert_eq!("-129".parse::().ok(), None); + i8_val = i8_val.wrapping_add(1); + assert_eq!("-128".parse::().ok(), Some(i8_val)); + assert_eq!("-129".parse::().ok(), None); - let mut i16_val: i16 = 32_767; - assert_eq!("32767".parse::().ok(), Some(i16_val)); - assert_eq!("32768".parse::().ok(), None); + let mut i16_val: i16 = 32_767; + assert_eq!("32767".parse::().ok(), Some(i16_val)); + assert_eq!("32768".parse::().ok(), None); - i16_val = i16_val.wrapping_add(1); - assert_eq!("-32768".parse::().ok(), Some(i16_val)); - assert_eq!("-32769".parse::().ok(), None); + i16_val = i16_val.wrapping_add(1); + assert_eq!("-32768".parse::().ok(), Some(i16_val)); + assert_eq!("-32769".parse::().ok(), None); - let mut i32_val: i32 = 2_147_483_647; - assert_eq!("2147483647".parse::().ok(), Some(i32_val)); - assert_eq!("2147483648".parse::().ok(), None); + let mut i32_val: i32 = 2_147_483_647; + assert_eq!("2147483647".parse::().ok(), Some(i32_val)); + assert_eq!("2147483648".parse::().ok(), None); - i32_val = i32_val.wrapping_add(1); - assert_eq!("-2147483648".parse::().ok(), Some(i32_val)); - assert_eq!("-2147483649".parse::().ok(), None); + i32_val = i32_val.wrapping_add(1); + assert_eq!("-2147483648".parse::().ok(), Some(i32_val)); + assert_eq!("-2147483649".parse::().ok(), None); - let mut i64_val: i64 = 9_223_372_036_854_775_807; - assert_eq!("9223372036854775807".parse::().ok(), Some(i64_val)); - assert_eq!("9223372036854775808".parse::().ok(), None); + let mut i64_val: i64 = 9_223_372_036_854_775_807; + assert_eq!("9223372036854775807".parse::().ok(), Some(i64_val)); + assert_eq!("9223372036854775808".parse::().ok(), None); - i64_val = i64_val.wrapping_add(1); - assert_eq!("-9223372036854775808".parse::().ok(), Some(i64_val)); - assert_eq!("-9223372036854775809".parse::().ok(), None); - } + i64_val = i64_val.wrapping_add(1); + assert_eq!("-9223372036854775808".parse::().ok(), Some(i64_val)); + assert_eq!("-9223372036854775809".parse::().ok(), None); +} + +#[test] +fn test_leading_plus() { + assert_eq!("+127".parse::().ok(), Some(127)); + assert_eq!("+9223372036854775807".parse::().ok(), Some(9223372036854775807)); +} - #[test] - fn test_leading_plus() { - assert_eq!("+127".parse::().ok(), Some(127)); - assert_eq!("+9223372036854775807".parse::().ok(), Some(9223372036854775807)); +#[test] +fn test_invalid() { + assert_eq!("--129".parse::().ok(), None); + assert_eq!("++129".parse::().ok(), None); + assert_eq!("Съешь".parse::().ok(), None); +} + +#[test] +fn test_empty() { + assert_eq!("-".parse::().ok(), None); + assert_eq!("+".parse::().ok(), None); + assert_eq!("".parse::().ok(), None); +} + +macro_rules! test_impl_from { + ($fn_name: ident, $Small: ty, $Large: ty) => { + #[test] + fn $fn_name() { + let small_max = <$Small>::max_value(); + let small_min = <$Small>::min_value(); + let large_max: $Large = small_max.into(); + let large_min: $Large = small_min.into(); + assert_eq!(large_max as $Small, small_max); + assert_eq!(large_min as $Small, small_min); + } } +} + +// Unsigned -> Unsigned +test_impl_from! { test_u8u16, u8, u16 } +test_impl_from! { test_u8u32, u8, u32 } +test_impl_from! { test_u8u64, u8, u64 } +test_impl_from! { test_u8usize, u8, usize } +test_impl_from! { test_u16u32, u16, u32 } +test_impl_from! { test_u16u64, u16, u64 } +test_impl_from! { test_u32u64, u32, u64 } + +// Signed -> Signed +test_impl_from! { test_i8i16, i8, i16 } +test_impl_from! { test_i8i32, i8, i32 } +test_impl_from! { test_i8i64, i8, i64 } +test_impl_from! { test_i8isize, i8, isize } +test_impl_from! { test_i16i32, i16, i32 } +test_impl_from! { test_i16i64, i16, i64 } +test_impl_from! { test_i32i64, i32, i64 } + +// Unsigned -> Signed +test_impl_from! { test_u8i16, u8, i16 } +test_impl_from! { test_u8i32, u8, i32 } +test_impl_from! { test_u8i64, u8, i64 } +test_impl_from! { test_u16i32, u16, i32 } +test_impl_from! { test_u16i64, u16, i64 } +test_impl_from! { test_u32i64, u32, i64 } + +// Signed -> Float +test_impl_from! { test_i8f32, i8, f32 } +test_impl_from! { test_i8f64, i8, f64 } +test_impl_from! { test_i16f32, i16, f32 } +test_impl_from! { test_i16f64, i16, f64 } +test_impl_from! { test_i32f64, i32, f64 } + +// Unsigned -> Float +test_impl_from! { test_u8f32, u8, f32 } +test_impl_from! { test_u8f64, u8, f64 } +test_impl_from! { test_u16f32, u16, f32 } +test_impl_from! { test_u16f64, u16, f64 } +test_impl_from! { test_u32f64, u32, f64 } + +// Float -> Float +#[test] +fn test_f32f64() { + use core::f32; + + let max: f64 = f32::MAX.into(); + assert_eq!(max as f32, f32::MAX); + assert!(max.is_normal()); + + let min: f64 = f32::MIN.into(); + assert_eq!(min as f32, f32::MIN); + assert!(min.is_normal()); + + let min_positive: f64 = f32::MIN_POSITIVE.into(); + assert_eq!(min_positive as f32, f32::MIN_POSITIVE); + assert!(min_positive.is_normal()); + + let epsilon: f64 = f32::EPSILON.into(); + assert_eq!(epsilon as f32, f32::EPSILON); + assert!(epsilon.is_normal()); + + let zero: f64 = (0.0f32).into(); + assert_eq!(zero as f32, 0.0f32); + assert!(zero.is_sign_positive()); + + let neg_zero: f64 = (-0.0f32).into(); + assert_eq!(neg_zero as f32, -0.0f32); + assert!(neg_zero.is_sign_negative()); + + let infinity: f64 = f32::INFINITY.into(); + assert_eq!(infinity as f32, f32::INFINITY); + assert!(infinity.is_infinite()); + assert!(infinity.is_sign_positive()); + + let neg_infinity: f64 = f32::NEG_INFINITY.into(); + assert_eq!(neg_infinity as f32, f32::NEG_INFINITY); + assert!(neg_infinity.is_infinite()); + assert!(neg_infinity.is_sign_negative()); + + let nan: f64 = f32::NAN.into(); + assert!(nan.is_nan()); +} - #[test] - fn test_invalid() { - assert_eq!("--129".parse::().ok(), None); - assert_eq!("++129".parse::().ok(), None); - assert_eq!("Съешь".parse::().ok(), None); +macro_rules! test_impl_try_from_always_ok { + ($fn_name:ident, $source:ty, $target: ty) => { + #[test] + fn $fn_name() { + let max = <$source>::max_value(); + let min = <$source>::min_value(); + let zero: $source = 0; + assert_eq!(<$target as TryFrom<$source>>::try_from(max).unwrap(), + max as $target); + assert_eq!(<$target as TryFrom<$source>>::try_from(min).unwrap(), + min as $target); + assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target); + } } +} - #[test] - fn test_empty() { - assert_eq!("-".parse::().ok(), None); - assert_eq!("+".parse::().ok(), None); - assert_eq!("".parse::().ok(), None); +test_impl_try_from_always_ok! { test_try_u8u8, u8, u8 } +test_impl_try_from_always_ok! { test_try_u8u16, u8, u16 } +test_impl_try_from_always_ok! { test_try_u8u32, u8, u32 } +test_impl_try_from_always_ok! { test_try_u8u64, u8, u64 } +test_impl_try_from_always_ok! { test_try_u8i16, u8, i16 } +test_impl_try_from_always_ok! { test_try_u8i32, u8, i32 } +test_impl_try_from_always_ok! { test_try_u8i64, u8, i64 } + +test_impl_try_from_always_ok! { test_try_u16u16, u16, u16 } +test_impl_try_from_always_ok! { test_try_u16u32, u16, u32 } +test_impl_try_from_always_ok! { test_try_u16u64, u16, u64 } +test_impl_try_from_always_ok! { test_try_u16i32, u16, i32 } +test_impl_try_from_always_ok! { test_try_u16i64, u16, i64 } + +test_impl_try_from_always_ok! { test_try_u32u32, u32, u32 } +test_impl_try_from_always_ok! { test_try_u32u64, u32, u64 } +test_impl_try_from_always_ok! { test_try_u32i64, u32, i64 } + +test_impl_try_from_always_ok! { test_try_u64u64, u64, u64 } + +test_impl_try_from_always_ok! { test_try_i8i8, i8, i8 } +test_impl_try_from_always_ok! { test_try_i8i16, i8, i16 } +test_impl_try_from_always_ok! { test_try_i8i32, i8, i32 } +test_impl_try_from_always_ok! { test_try_i8i64, i8, i64 } + +test_impl_try_from_always_ok! { test_try_i16i16, i16, i16 } +test_impl_try_from_always_ok! { test_try_i16i32, i16, i32 } +test_impl_try_from_always_ok! { test_try_i16i64, i16, i64 } + +test_impl_try_from_always_ok! { test_try_i32i32, i32, i32 } +test_impl_try_from_always_ok! { test_try_i32i64, i32, i64 } + +test_impl_try_from_always_ok! { test_try_i64i64, i64, i64 } + +macro_rules! test_impl_try_from_signed_to_unsigned_upper_ok { + ($fn_name:ident, $source:ty, $target:ty) => { + #[test] + fn $fn_name() { + let max = <$source>::max_value(); + let min = <$source>::min_value(); + let zero: $source = 0; + let neg_one: $source = -1; + assert_eq!(<$target as TryFrom<$source>>::try_from(max).unwrap(), + max as $target); + assert!(<$target as TryFrom<$source>>::try_from(min).is_err()); + assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target); + assert!(<$target as TryFrom<$source>>::try_from(neg_one).is_err()); + } } +} - macro_rules! test_impl_from { - ($fn_name: ident, $Small: ty, $Large: ty) => { - #[test] - fn $fn_name() { - let small_max = <$Small>::max_value(); - let small_min = <$Small>::min_value(); - let large_max: $Large = small_max.into(); - let large_min: $Large = small_min.into(); - assert_eq!(large_max as $Small, small_max); - assert_eq!(large_min as $Small, small_min); - } +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i8u8, i8, u8 } +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i8u16, i8, u16 } +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i8u32, i8, u32 } +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i8u64, i8, u64 } + +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i16u16, i16, u16 } +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i16u32, i16, u32 } +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i16u64, i16, u64 } + +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i32u32, i32, u32 } +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i32u64, i32, u64 } + +test_impl_try_from_signed_to_unsigned_upper_ok! { test_try_i64u64, i64, u64 } + +macro_rules! test_impl_try_from_unsigned_to_signed_upper_err { + ($fn_name:ident, $source:ty, $target:ty) => { + #[test] + fn $fn_name() { + let max = <$source>::max_value(); + let min = <$source>::min_value(); + let zero: $source = 0; + assert!(<$target as TryFrom<$source>>::try_from(max).is_err()); + assert_eq!(<$target as TryFrom<$source>>::try_from(min).unwrap(), + min as $target); + assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target); } } +} - // Unsigned -> Unsigned - test_impl_from! { test_u8u16, u8, u16 } - test_impl_from! { test_u8u32, u8, u32 } - test_impl_from! { test_u8u64, u8, u64 } - test_impl_from! { test_u8usize, u8, usize } - test_impl_from! { test_u16u32, u16, u32 } - test_impl_from! { test_u16u64, u16, u64 } - test_impl_from! { test_u32u64, u32, u64 } - - // Signed -> Signed - test_impl_from! { test_i8i16, i8, i16 } - test_impl_from! { test_i8i32, i8, i32 } - test_impl_from! { test_i8i64, i8, i64 } - test_impl_from! { test_i8isize, i8, isize } - test_impl_from! { test_i16i32, i16, i32 } - test_impl_from! { test_i16i64, i16, i64 } - test_impl_from! { test_i32i64, i32, i64 } - - // Unsigned -> Signed - test_impl_from! { test_u8i16, u8, i16 } - test_impl_from! { test_u8i32, u8, i32 } - test_impl_from! { test_u8i64, u8, i64 } - test_impl_from! { test_u16i32, u16, i32 } - test_impl_from! { test_u16i64, u16, i64 } - test_impl_from! { test_u32i64, u32, i64 } - - // Signed -> Float - test_impl_from! { test_i8f32, i8, f32 } - test_impl_from! { test_i8f64, i8, f64 } - test_impl_from! { test_i16f32, i16, f32 } - test_impl_from! { test_i16f64, i16, f64 } - test_impl_from! { test_i32f64, i32, f64 } - - // Unsigned -> Float - test_impl_from! { test_u8f32, u8, f32 } - test_impl_from! { test_u8f64, u8, f64 } - test_impl_from! { test_u16f32, u16, f32 } - test_impl_from! { test_u16f64, u16, f64 } - test_impl_from! { test_u32f64, u32, f64 } - - // Float -> Float - #[test] - fn test_f32f64() { - use core::f32; - - let max: f64 = f32::MAX.into(); - assert_eq!(max as f32, f32::MAX); - assert!(max.is_normal()); - - let min: f64 = f32::MIN.into(); - assert_eq!(min as f32, f32::MIN); - assert!(min.is_normal()); - - let min_positive: f64 = f32::MIN_POSITIVE.into(); - assert_eq!(min_positive as f32, f32::MIN_POSITIVE); - assert!(min_positive.is_normal()); - - let epsilon: f64 = f32::EPSILON.into(); - assert_eq!(epsilon as f32, f32::EPSILON); - assert!(epsilon.is_normal()); - - let zero: f64 = (0.0f32).into(); - assert_eq!(zero as f32, 0.0f32); - assert!(zero.is_sign_positive()); - - let neg_zero: f64 = (-0.0f32).into(); - assert_eq!(neg_zero as f32, -0.0f32); - assert!(neg_zero.is_sign_negative()); - - let infinity: f64 = f32::INFINITY.into(); - assert_eq!(infinity as f32, f32::INFINITY); - assert!(infinity.is_infinite()); - assert!(infinity.is_sign_positive()); - - let neg_infinity: f64 = f32::NEG_INFINITY.into(); - assert_eq!(neg_infinity as f32, f32::NEG_INFINITY); - assert!(neg_infinity.is_infinite()); - assert!(neg_infinity.is_sign_negative()); - - let nan: f64 = f32::NAN.into(); - assert!(nan.is_nan()); +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u8i8, u8, i8 } + +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u16i8, u16, i8 } +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u16i16, u16, i16 } + +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u32i8, u32, i8 } +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u32i16, u32, i16 } +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u32i32, u32, i32 } + +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u64i8, u64, i8 } +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u64i16, u64, i16 } +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u64i32, u64, i32 } +test_impl_try_from_unsigned_to_signed_upper_err! { test_try_u64i64, u64, i64 } + +macro_rules! test_impl_try_from_same_sign_err { + ($fn_name:ident, $source:ty, $target:ty) => { + #[test] + fn $fn_name() { + let max = <$source>::max_value(); + let min = <$source>::min_value(); + let zero: $source = 0; + let t_max = <$target>::max_value(); + let t_min = <$target>::min_value(); + assert!(<$target as TryFrom<$source>>::try_from(max).is_err()); + if min != 0 { + assert!(<$target as TryFrom<$source>>::try_from(min).is_err()); + } + assert_eq!(<$target as TryFrom<$source>>::try_from(zero).unwrap(), + zero as $target); + assert_eq!(<$target as TryFrom<$source>>::try_from(t_max as $source) + .unwrap(), + t_max as $target); + assert_eq!(<$target as TryFrom<$source>>::try_from(t_min as $source) + .unwrap(), + t_min as $target); + } } } + +test_impl_try_from_same_sign_err! { test_try_u16u8, u16, u8 } + +test_impl_try_from_same_sign_err! { test_try_u32u8, u32, u8 } +test_impl_try_from_same_sign_err! { test_try_u32u16, u32, u16 } + +test_impl_try_from_same_sign_err! { test_try_u64u8, u64, u8 } +test_impl_try_from_same_sign_err! { test_try_u64u16, u64, u16 } +test_impl_try_from_same_sign_err! { test_try_u64u32, u64, u32 } + +test_impl_try_from_same_sign_err! { test_try_i16i8, i16, i8 } + +test_impl_try_from_same_sign_err! { test_try_i32i8, i32, i8 } +test_impl_try_from_same_sign_err! { test_try_i32i16, i32, i16 } + +test_impl_try_from_same_sign_err! { test_try_i64i8, i64, i8 } +test_impl_try_from_same_sign_err! { test_try_i64i16, i64, i16 } +test_impl_try_from_same_sign_err! { test_try_i64i32, i64, i32 } diff --git a/src/libflate/lib.rs b/src/libflate/lib.rs index 1cc008c5ee..b578b064d6 100644 --- a/src/libflate/lib.rs +++ b/src/libflate/lib.rs @@ -27,11 +27,7 @@ #![feature(libc)] #![feature(staged_api)] #![feature(unique)] -#![cfg_attr(test, feature(rustc_private, rand))] - -#[cfg(test)] -#[macro_use] -extern crate log; +#![cfg_attr(test, feature(rand))] extern crate libc; @@ -175,14 +171,8 @@ mod tests { for _ in 0..2000 { input.extend_from_slice(r.choose(&words).unwrap()); } - debug!("de/inflate of {} bytes of random word-sequences", - input.len()); let cmp = deflate_bytes(&input); let out = inflate_bytes(&cmp).unwrap(); - debug!("{} bytes deflated to {} ({:.1}% size)", - input.len(), - cmp.len(), - 100.0 * ((cmp.len() as f64) / (input.len() as f64))); assert_eq!(&*input, &*out); } } diff --git a/src/liblibc/.travis.yml b/src/liblibc/.travis.yml index 11a830e41f..0d76d4cdcc 100644 --- a/src/liblibc/.travis.yml +++ b/src/liblibc/.travis.yml @@ -14,8 +14,8 @@ script: elif [[ $TRAVIS_RUST_VERSION = "1.0.0" ]]; then cargo build; else - cargo build; - cargo build --no-default-features; + cargo build && + cargo build --no-default-features && rustc ci/style.rs && ./style src; fi os: diff --git a/src/liblibc/Cargo.toml b/src/liblibc/Cargo.toml index 36c6d199d3..3faf83f87c 100644 --- a/src/liblibc/Cargo.toml +++ b/src/liblibc/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "libc" -version = "0.2.9" +version = "0.2.11" authors = ["The Rust Project Developers"] license = "MIT/Apache-2.0" readme = "README.md" diff --git a/src/liblibc/README.md b/src/liblibc/README.md index a822744dce..09b9a567b4 100644 --- a/src/liblibc/README.md +++ b/src/liblibc/README.md @@ -88,6 +88,16 @@ With that in mind, the steps for adding a new API are: 4. Wait for CI to pass, fixing errors. 5. Wait for a merge! +### Test before you commit + +We have two automated tests running on [Travis](https://travis-ci.org/rust-lang/libc): + +1. [`libc-test`](https://github.com/alexcrichton/ctest) + - `cd libc-test && cargo run` + - Use the `skip_*()` functions in `build.rs` if you really need a workaround. +2. Style checker + - `rustc ci/style.rs && ./style src` + ## Platforms and Documentation The following platforms are currently tested and have documentation available: @@ -101,9 +111,8 @@ Tested: * [`i686-apple-darwin`](https://doc.rust-lang.org/libc/i686-apple-darwin/libc/) * [`x86_64-apple-darwin`](https://doc.rust-lang.org/libc/x86_64-apple-darwin/libc/) (OSX) - * [`i686-apple-ios`](https://doc.rust-lang.org/libc/i686-apple-ios/libc/) - * [`x86_64-apple-ios`](https://doc.rust-lang.org/libc/x86_64-apple-ios/libc/) - (iOS) + * `i686-apple-ios` + * `x86_64-apple-ios` * [`i686-unknown-linux-gnu`](https://doc.rust-lang.org/libc/i686-unknown-linux-gnu/libc/) * [`x86_64-unknown-linux-gnu`](https://doc.rust-lang.org/libc/x86_64-unknown-linux-gnu/libc/) (Linux) diff --git a/src/liblibc/ci/run-travis.sh b/src/liblibc/ci/run-travis.sh index d9f708aa12..3881e6ea0b 100644 --- a/src/liblibc/ci/run-travis.sh +++ b/src/liblibc/ci/run-travis.sh @@ -109,9 +109,9 @@ cp ci/cargo-config .cargo/config # Next up we need to install the standard library for the version of Rust that # we're testing. if [ "$TRAVIS" = "true" ]; then - curl -s $MAIN_TARGETS/rust-std-$TRAVIS_RUST_VERSION-$TARGET.tar.gz | \ - tar xzf - -C $HOME/rust/lib/rustlib --strip-components=4 \ - rust-std-$TRAVIS_RUST_VERSION-$TARGET/rust-std-$TARGET/lib/rustlib/$TARGET + curl https://static.rust-lang.org/rustup.sh | \ + sh -s -- --add-target=$TARGET --disable-sudo -y \ + --prefix=`rustc --print sysroot` fi # If we're testing with a docker image, then run tests entirely within that diff --git a/src/liblibc/libc-test/Cargo.lock b/src/liblibc/libc-test/Cargo.lock index 007ba034ec..e2b6664a90 100644 --- a/src/liblibc/libc-test/Cargo.lock +++ b/src/liblibc/libc-test/Cargo.lock @@ -3,7 +3,7 @@ name = "libc-test" version = "0.1.0" dependencies = [ "ctest 0.1.0 (git+https://github.com/alexcrichton/ctest)", - "libc 0.2.9", + "libc 0.2.11", ] [[package]] @@ -14,20 +14,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "ctest" version = "0.1.0" -source = "git+https://github.com/alexcrichton/ctest#50ac771acb7bb45cf0c182a5a9c8188a15c89efc" +source = "git+https://github.com/alexcrichton/ctest#a6becb6d7fd23d9863cba86eac31d1ffc4082734" dependencies = [ - "gcc 0.3.25 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "syntex_syntax 0.19.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] name = "gcc" -version = "0.3.25" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "kernel32-sys" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", @@ -41,24 +41,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] name = "libc" -version = "0.2.8" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "libc" -version = "0.2.9" +version = "0.2.11" [[package]] name = "log" -version = "0.3.5" +version = "0.3.6" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "libc 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "rustc-serialize" -version = "0.3.18" +version = "0.3.19" source = "registry+https://github.com/rust-lang/crates.io-index" [[package]] @@ -68,8 +60,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ "bitflags 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)", - "log 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)", - "rustc-serialize 0.3.18 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)", + "rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)", "term 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "unicode-xid 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -79,7 +71,7 @@ name = "term" version = "0.2.14" source = "registry+https://github.com/rust-lang/crates.io-index" dependencies = [ - "kernel32-sys 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", "winapi 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)", ] diff --git a/src/liblibc/libc-test/build.rs b/src/liblibc/libc-test/build.rs index 984a63ac8f..a3626937ba 100644 --- a/src/liblibc/libc-test/build.rs +++ b/src/liblibc/libc-test/build.rs @@ -95,15 +95,18 @@ fn main() { cfg.header("sched.h"); cfg.header("termios.h"); cfg.header("poll.h"); + cfg.header("syslog.h"); } if android { cfg.header("arpa/inet.h"); cfg.header("time64.h"); + cfg.header("xlocale.h"); } else if !windows { cfg.header("glob.h"); cfg.header("ifaddrs.h"); cfg.header("sys/statvfs.h"); + cfg.header("langinfo.h"); if !openbsd && !freebsd && !dragonfly { cfg.header("sys/quota.h"); @@ -114,6 +117,7 @@ fn main() { if !netbsd && !openbsd { cfg.header("execinfo.h"); + cfg.header("xlocale.h"); } } } @@ -338,6 +342,7 @@ fn main() { "getrlimit" | "getrlimit64" | // non-int in 1st arg "setrlimit" | "setrlimit64" | // non-int in 1st arg + "prlimit" | "prlimit64" | // non-int in 2nd arg "strerror_r" if linux => true, // actually xpg-something-or-other // typed 2nd arg on linux and android @@ -394,12 +399,6 @@ fn main() { cfg.skip_fn_ptrcheck(move |name| { match name { - // This used to be called bsd_signal in rev 18 of the android - // platform and is now just called signal, the old `bsd_signal` - // symbol, however, still remains, just gives a different function - // pointer. - "signal" if android => true, - // dllimport weirdness? _ if windows => true, diff --git a/src/liblibc/src/lib.rs b/src/liblibc/src/lib.rs index 2593e3ba6b..2886e159da 100644 --- a/src/liblibc/src/lib.rs +++ b/src/liblibc/src/lib.rs @@ -232,6 +232,7 @@ extern { pub fn strpbrk(cs: *const c_char, ct: *const c_char) -> *mut c_char; pub fn strstr(cs: *const c_char, ct: *const c_char) -> *mut c_char; pub fn strlen(cs: *const c_char) -> size_t; + pub fn strnlen(cs: *const c_char, maxlen: size_t) -> size_t; #[cfg_attr(all(target_os = "macos", target_arch = "x86"), link_name = "strerror$UNIX2003")] pub fn strerror(n: c_int) -> *mut c_char; diff --git a/src/liblibc/src/unix/bsd/apple/mod.rs b/src/liblibc/src/unix/bsd/apple/mod.rs index f4c3e197d1..dbf8b6e5fd 100644 --- a/src/liblibc/src/unix/bsd/apple/mod.rs +++ b/src/liblibc/src/unix/bsd/apple/mod.rs @@ -18,6 +18,7 @@ pub type fsblkcnt_t = ::c_uint; pub type fsfilcnt_t = ::c_uint; pub type speed_t = ::c_ulong; pub type tcflag_t = ::c_ulong; +pub type nl_item = ::c_int; pub enum timezone {} @@ -286,6 +287,88 @@ s! { } } +pub const LC_COLLATE_MASK: ::c_int = (1 << 0); +pub const LC_CTYPE_MASK: ::c_int = (1 << 1); +pub const LC_MESSAGES_MASK: ::c_int = (1 << 2); +pub const LC_MONETARY_MASK: ::c_int = (1 << 3); +pub const LC_NUMERIC_MASK: ::c_int = (1 << 4); +pub const LC_TIME_MASK: ::c_int = (1 << 5); +pub const LC_ALL_MASK: ::c_int = LC_COLLATE_MASK + | LC_CTYPE_MASK + | LC_MESSAGES_MASK + | LC_MONETARY_MASK + | LC_NUMERIC_MASK + | LC_TIME_MASK; + +pub const CODESET: ::nl_item = 0; +pub const D_T_FMT: ::nl_item = 1; +pub const D_FMT: ::nl_item = 2; +pub const T_FMT: ::nl_item = 3; +pub const T_FMT_AMPM: ::nl_item = 4; +pub const AM_STR: ::nl_item = 5; +pub const PM_STR: ::nl_item = 6; + +pub const DAY_1: ::nl_item = 7; +pub const DAY_2: ::nl_item = 8; +pub const DAY_3: ::nl_item = 9; +pub const DAY_4: ::nl_item = 10; +pub const DAY_5: ::nl_item = 11; +pub const DAY_6: ::nl_item = 12; +pub const DAY_7: ::nl_item = 13; + +pub const ABDAY_1: ::nl_item = 14; +pub const ABDAY_2: ::nl_item = 15; +pub const ABDAY_3: ::nl_item = 16; +pub const ABDAY_4: ::nl_item = 17; +pub const ABDAY_5: ::nl_item = 18; +pub const ABDAY_6: ::nl_item = 19; +pub const ABDAY_7: ::nl_item = 20; + +pub const MON_1: ::nl_item = 21; +pub const MON_2: ::nl_item = 22; +pub const MON_3: ::nl_item = 23; +pub const MON_4: ::nl_item = 24; +pub const MON_5: ::nl_item = 25; +pub const MON_6: ::nl_item = 26; +pub const MON_7: ::nl_item = 27; +pub const MON_8: ::nl_item = 28; +pub const MON_9: ::nl_item = 29; +pub const MON_10: ::nl_item = 30; +pub const MON_11: ::nl_item = 31; +pub const MON_12: ::nl_item = 32; + +pub const ABMON_1: ::nl_item = 33; +pub const ABMON_2: ::nl_item = 34; +pub const ABMON_3: ::nl_item = 35; +pub const ABMON_4: ::nl_item = 36; +pub const ABMON_5: ::nl_item = 37; +pub const ABMON_6: ::nl_item = 38; +pub const ABMON_7: ::nl_item = 39; +pub const ABMON_8: ::nl_item = 40; +pub const ABMON_9: ::nl_item = 41; +pub const ABMON_10: ::nl_item = 42; +pub const ABMON_11: ::nl_item = 43; +pub const ABMON_12: ::nl_item = 44; + +pub const ERA: ::nl_item = 45; +pub const ERA_D_FMT: ::nl_item = 46; +pub const ERA_D_T_FMT: ::nl_item = 47; +pub const ERA_T_FMT: ::nl_item = 48; +pub const ALT_DIGITS: ::nl_item = 49; + +pub const RADIXCHAR: ::nl_item = 50; +pub const THOUSEP: ::nl_item = 51; + +pub const YESEXPR: ::nl_item = 52; +pub const NOEXPR: ::nl_item = 53; + +pub const YESSTR: ::nl_item = 54; +pub const NOSTR: ::nl_item = 55; + +pub const CRNCYSTR: ::nl_item = 56; + +pub const D_MD_ORDER: ::nl_item = 57; + pub const EXIT_FAILURE: ::c_int = 1; pub const EXIT_SUCCESS: ::c_int = 0; pub const RAND_MAX: ::c_int = 2147483647; @@ -355,6 +438,7 @@ pub const SIGINT: ::c_int = 2; pub const SIGQUIT: ::c_int = 3; pub const SIGILL: ::c_int = 4; pub const SIGABRT: ::c_int = 6; +pub const SIGEMT: ::c_int = 7; pub const SIGFPE: ::c_int = 8; pub const SIGKILL: ::c_int = 9; pub const SIGSEGV: ::c_int = 11; @@ -642,6 +726,7 @@ pub const RLIMIT_DATA: ::c_int = 2; pub const RLIMIT_STACK: ::c_int = 3; pub const RLIMIT_CORE: ::c_int = 4; pub const RLIMIT_AS: ::c_int = 5; +pub const RLIMIT_RSS: ::c_int = RLIMIT_AS; pub const RLIMIT_MEMLOCK: ::c_int = 6; pub const RLIMIT_NPROC: ::c_int = 7; pub const RLIMIT_NOFILE: ::c_int = 8; @@ -869,8 +954,6 @@ pub const FD_SETSIZE: usize = 1024; pub const ST_NOSUID: ::c_ulong = 2; -pub const HW_AVAILCPU: ::c_int = 25; - pub const EVFILT_AIO: ::int16_t = 0xfffd; pub const EVFILT_PROC: ::int16_t = 0xfffb; pub const EVFILT_READ: ::int16_t = 0xffff; @@ -942,6 +1025,241 @@ pub const RTLD_NODELETE: ::c_int = 0x80; pub const RTLD_NOLOAD: ::c_int = 0x10; pub const RTLD_GLOBAL: ::c_int = 0x8; +pub const _WSTOPPED: ::c_int = 0o177; + +pub const LOG_NETINFO: ::c_int = 12 << 3; +pub const LOG_REMOTEAUTH: ::c_int = 13 << 3; +pub const LOG_INSTALL: ::c_int = 14 << 3; +pub const LOG_RAS: ::c_int = 15 << 3; +pub const LOG_LAUNCHD: ::c_int = 24 << 3; +pub const LOG_NFACILITIES: ::c_int = 25; + +pub const CTLTYPE: ::c_int = 0xf; +pub const CTLTYPE_NODE: ::c_int = 1; +pub const CTLTYPE_INT: ::c_int = 2; +pub const CTLTYPE_STRING: ::c_int = 3; +pub const CTLTYPE_QUAD: ::c_int = 4; +pub const CTLTYPE_OPAQUE: ::c_int = 5; +pub const CTLTYPE_STRUCT: ::c_int = CTLTYPE_OPAQUE; +pub const CTLFLAG_RD: ::c_int = 0x80000000; +pub const CTLFLAG_WR: ::c_int = 0x40000000; +pub const CTLFLAG_RW: ::c_int = CTLFLAG_RD | CTLFLAG_WR; +pub const CTLFLAG_NOLOCK: ::c_int = 0x20000000; +pub const CTLFLAG_ANYBODY: ::c_int = 0x10000000; +pub const CTLFLAG_SECURE: ::c_int = 0x08000000; +pub const CTLFLAG_MASKED: ::c_int = 0x04000000; +pub const CTLFLAG_NOAUTO: ::c_int = 0x02000000; +pub const CTLFLAG_KERN: ::c_int = 0x01000000; +pub const CTLFLAG_LOCKED: ::c_int = 0x00800000; +pub const CTLFLAG_OID2: ::c_int = 0x00400000; +pub const CTL_UNSPEC: ::c_int = 0; +pub const CTL_KERN: ::c_int = 1; +pub const CTL_VM: ::c_int = 2; +pub const CTL_VFS: ::c_int = 3; +pub const CTL_NET: ::c_int = 4; +pub const CTL_DEBUG: ::c_int = 5; +pub const CTL_HW: ::c_int = 6; +pub const CTL_MACHDEP: ::c_int = 7; +pub const CTL_USER: ::c_int = 8; +pub const CTL_MAXID: ::c_int = 9; +pub const KERN_OSTYPE: ::c_int = 1; +pub const KERN_OSRELEASE: ::c_int = 2; +pub const KERN_OSREV: ::c_int = 3; +pub const KERN_VERSION: ::c_int = 4; +pub const KERN_MAXVNODES: ::c_int = 5; +pub const KERN_MAXPROC: ::c_int = 6; +pub const KERN_MAXFILES: ::c_int = 7; +pub const KERN_ARGMAX: ::c_int = 8; +pub const KERN_SECURELVL: ::c_int = 9; +pub const KERN_HOSTNAME: ::c_int = 10; +pub const KERN_HOSTID: ::c_int = 11; +pub const KERN_CLOCKRATE: ::c_int = 12; +pub const KERN_VNODE: ::c_int = 13; +pub const KERN_PROC: ::c_int = 14; +pub const KERN_FILE: ::c_int = 15; +pub const KERN_PROF: ::c_int = 16; +pub const KERN_POSIX1: ::c_int = 17; +pub const KERN_NGROUPS: ::c_int = 18; +pub const KERN_JOB_CONTROL: ::c_int = 19; +pub const KERN_SAVED_IDS: ::c_int = 20; +pub const KERN_BOOTTIME: ::c_int = 21; +pub const KERN_NISDOMAINNAME: ::c_int = 22; +pub const KERN_DOMAINNAME: ::c_int = KERN_NISDOMAINNAME; +pub const KERN_MAXPARTITIONS: ::c_int = 23; +pub const KERN_KDEBUG: ::c_int = 24; +pub const KERN_UPDATEINTERVAL: ::c_int = 25; +pub const KERN_OSRELDATE: ::c_int = 26; +pub const KERN_NTP_PLL: ::c_int = 27; +pub const KERN_BOOTFILE: ::c_int = 28; +pub const KERN_MAXFILESPERPROC: ::c_int = 29; +pub const KERN_MAXPROCPERUID: ::c_int = 30; +pub const KERN_DUMPDEV: ::c_int = 31; +pub const KERN_IPC: ::c_int = 32; +pub const KERN_DUMMY: ::c_int = 33; +pub const KERN_PS_STRINGS: ::c_int = 34; +pub const KERN_USRSTACK32: ::c_int = 35; +pub const KERN_LOGSIGEXIT: ::c_int = 36; +pub const KERN_SYMFILE: ::c_int = 37; +pub const KERN_PROCARGS: ::c_int = 38; +pub const KERN_NETBOOT: ::c_int = 40; +pub const KERN_SYSV: ::c_int = 42; +pub const KERN_AFFINITY: ::c_int = 43; +pub const KERN_TRANSLATE: ::c_int = 44; +pub const KERN_CLASSIC: ::c_int = KERN_TRANSLATE; +pub const KERN_EXEC: ::c_int = 45; +pub const KERN_CLASSICHANDLER: ::c_int = KERN_EXEC; +pub const KERN_AIOMAX: ::c_int = 46; +pub const KERN_AIOPROCMAX: ::c_int = 47; +pub const KERN_AIOTHREADS: ::c_int = 48; +pub const KERN_COREFILE: ::c_int = 50; +pub const KERN_COREDUMP: ::c_int = 51; +pub const KERN_SUGID_COREDUMP: ::c_int = 52; +pub const KERN_PROCDELAYTERM: ::c_int = 53; +pub const KERN_SHREG_PRIVATIZABLE: ::c_int = 54; +pub const KERN_LOW_PRI_WINDOW: ::c_int = 56; +pub const KERN_LOW_PRI_DELAY: ::c_int = 57; +pub const KERN_POSIX: ::c_int = 58; +pub const KERN_USRSTACK64: ::c_int = 59; +pub const KERN_NX_PROTECTION: ::c_int = 60; +pub const KERN_TFP: ::c_int = 61; +pub const KERN_PROCNAME: ::c_int = 62; +pub const KERN_THALTSTACK: ::c_int = 63; +pub const KERN_SPECULATIVE_READS: ::c_int = 64; +pub const KERN_OSVERSION: ::c_int = 65; +pub const KERN_SAFEBOOT: ::c_int = 66; +pub const KERN_RAGEVNODE: ::c_int = 68; +pub const KERN_TTY: ::c_int = 69; +pub const KERN_CHECKOPENEVT: ::c_int = 70; +pub const KERN_THREADNAME: ::c_int = 71; +pub const KERN_MAXID: ::c_int = 72; +pub const KERN_RAGE_PROC: ::c_int = 1; +pub const KERN_RAGE_THREAD: ::c_int = 2; +pub const KERN_UNRAGE_PROC: ::c_int = 3; +pub const KERN_UNRAGE_THREAD: ::c_int = 4; +pub const KERN_OPENEVT_PROC: ::c_int = 1; +pub const KERN_UNOPENEVT_PROC: ::c_int = 2; +pub const KERN_TFP_POLICY: ::c_int = 1; +pub const KERN_TFP_POLICY_DENY: ::c_int = 0; +pub const KERN_TFP_POLICY_DEFAULT: ::c_int = 2; +pub const KERN_KDEFLAGS: ::c_int = 1; +pub const KERN_KDDFLAGS: ::c_int = 2; +pub const KERN_KDENABLE: ::c_int = 3; +pub const KERN_KDSETBUF: ::c_int = 4; +pub const KERN_KDGETBUF: ::c_int = 5; +pub const KERN_KDSETUP: ::c_int = 6; +pub const KERN_KDREMOVE: ::c_int = 7; +pub const KERN_KDSETREG: ::c_int = 8; +pub const KERN_KDGETREG: ::c_int = 9; +pub const KERN_KDREADTR: ::c_int = 10; +pub const KERN_KDPIDTR: ::c_int = 11; +pub const KERN_KDTHRMAP: ::c_int = 12; +pub const KERN_KDPIDEX: ::c_int = 14; +pub const KERN_KDSETRTCDEC: ::c_int = 15; +pub const KERN_KDGETENTROPY: ::c_int = 16; +pub const KERN_KDWRITETR: ::c_int = 17; +pub const KERN_KDWRITEMAP: ::c_int = 18; +pub const KERN_KDENABLE_BG_TRACE: ::c_int = 19; +pub const KERN_KDDISABLE_BG_TRACE: ::c_int = 20; +pub const KERN_KDREADCURTHRMAP: ::c_int = 21; +pub const KERN_KDSET_TYPEFILTER: ::c_int = 22; +pub const KERN_KDBUFWAIT: ::c_int = 23; +pub const KERN_KDCPUMAP: ::c_int = 24; +pub const KERN_PROC_ALL: ::c_int = 0; +pub const KERN_PROC_PID: ::c_int = 1; +pub const KERN_PROC_PGRP: ::c_int = 2; +pub const KERN_PROC_SESSION: ::c_int = 3; +pub const KERN_PROC_TTY: ::c_int = 4; +pub const KERN_PROC_UID: ::c_int = 5; +pub const KERN_PROC_RUID: ::c_int = 6; +pub const KERN_PROC_LCID: ::c_int = 7; +pub const KIPC_MAXSOCKBUF: ::c_int = 1; +pub const KIPC_SOCKBUF_WASTE: ::c_int = 2; +pub const KIPC_SOMAXCONN: ::c_int = 3; +pub const KIPC_MAX_LINKHDR: ::c_int = 4; +pub const KIPC_MAX_PROTOHDR: ::c_int = 5; +pub const KIPC_MAX_HDR: ::c_int = 6; +pub const KIPC_MAX_DATALEN: ::c_int = 7; +pub const KIPC_MBSTAT: ::c_int = 8; +pub const KIPC_NMBCLUSTERS: ::c_int = 9; +pub const KIPC_SOQLIMITCOMPAT: ::c_int = 10; +pub const VM_METER: ::c_int = 1; +pub const VM_LOADAVG: ::c_int = 2; +pub const VM_MACHFACTOR: ::c_int = 4; +pub const VM_SWAPUSAGE: ::c_int = 5; +pub const VM_MAXID: ::c_int = 6; +pub const HW_MACHINE: ::c_int = 1; +pub const HW_MODEL: ::c_int = 2; +pub const HW_NCPU: ::c_int = 3; +pub const HW_BYTEORDER: ::c_int = 4; +pub const HW_PHYSMEM: ::c_int = 5; +pub const HW_USERMEM: ::c_int = 6; +pub const HW_PAGESIZE: ::c_int = 7; +pub const HW_DISKNAMES: ::c_int = 8; +pub const HW_DISKSTATS: ::c_int = 9; +pub const HW_EPOCH: ::c_int = 10; +pub const HW_FLOATINGPT: ::c_int = 11; +pub const HW_MACHINE_ARCH: ::c_int = 12; +pub const HW_VECTORUNIT: ::c_int = 13; +pub const HW_BUS_FREQ: ::c_int = 14; +pub const HW_CPU_FREQ: ::c_int = 15; +pub const HW_CACHELINE: ::c_int = 16; +pub const HW_L1ICACHESIZE: ::c_int = 17; +pub const HW_L1DCACHESIZE: ::c_int = 18; +pub const HW_L2SETTINGS: ::c_int = 19; +pub const HW_L2CACHESIZE: ::c_int = 20; +pub const HW_L3SETTINGS: ::c_int = 21; +pub const HW_L3CACHESIZE: ::c_int = 22; +pub const HW_TB_FREQ: ::c_int = 23; +pub const HW_MEMSIZE: ::c_int = 24; +pub const HW_AVAILCPU: ::c_int = 25; +pub const HW_MAXID: ::c_int = 26; +pub const USER_CS_PATH: ::c_int = 1; +pub const USER_BC_BASE_MAX: ::c_int = 2; +pub const USER_BC_DIM_MAX: ::c_int = 3; +pub const USER_BC_SCALE_MAX: ::c_int = 4; +pub const USER_BC_STRING_MAX: ::c_int = 5; +pub const USER_COLL_WEIGHTS_MAX: ::c_int = 6; +pub const USER_EXPR_NEST_MAX: ::c_int = 7; +pub const USER_LINE_MAX: ::c_int = 8; +pub const USER_RE_DUP_MAX: ::c_int = 9; +pub const USER_POSIX2_VERSION: ::c_int = 10; +pub const USER_POSIX2_C_BIND: ::c_int = 11; +pub const USER_POSIX2_C_DEV: ::c_int = 12; +pub const USER_POSIX2_CHAR_TERM: ::c_int = 13; +pub const USER_POSIX2_FORT_DEV: ::c_int = 14; +pub const USER_POSIX2_FORT_RUN: ::c_int = 15; +pub const USER_POSIX2_LOCALEDEF: ::c_int = 16; +pub const USER_POSIX2_SW_DEV: ::c_int = 17; +pub const USER_POSIX2_UPE: ::c_int = 18; +pub const USER_STREAM_MAX: ::c_int = 19; +pub const USER_TZNAME_MAX: ::c_int = 20; +pub const USER_MAXID: ::c_int = 21; +pub const CTL_DEBUG_NAME: ::c_int = 0; +pub const CTL_DEBUG_VALUE: ::c_int = 1; +pub const CTL_DEBUG_MAXID: ::c_int = 20; + +f! { + pub fn WSTOPSIG(status: ::c_int) -> ::c_int { + status >> 8 + } + + pub fn _WSTATUS(status: ::c_int) -> ::c_int { + status & 0x7f + } + + pub fn WIFCONTINUED(status: ::c_int) -> bool { + _WSTATUS(status) == _WSTOPPED && WSTOPSIG(status) == 0x13 + } + + pub fn WIFSIGNALED(status: ::c_int) -> bool { + _WSTATUS(status) != _WSTOPPED && _WSTATUS(status) != 0 + } + + pub fn WIFSTOPPED(status: ::c_int) -> bool { + _WSTATUS(status) == _WSTOPPED && WSTOPSIG(status) != 0x13 + } +} + extern { pub fn getnameinfo(sa: *const ::sockaddr, salen: ::socklen_t, @@ -1027,6 +1345,14 @@ extern { name: *mut ::c_char, termp: *mut termios, winp: *mut ::winsize) -> ::pid_t; + pub fn duplocale(base: ::locale_t) -> ::locale_t; + pub fn freelocale(loc: ::locale_t) -> ::c_int; + pub fn localeconv_l(loc: ::locale_t) -> *mut lconv; + pub fn newlocale(mask: ::c_int, + locale: *const ::c_char, + base: ::locale_t) -> ::locale_t; + pub fn uselocale(loc: ::locale_t) -> ::locale_t; + pub fn querylocale(mask: ::c_int, loc: ::locale_t) -> *const ::c_char; } cfg_if! { diff --git a/src/liblibc/src/unix/bsd/freebsdlike/dragonfly/mod.rs b/src/liblibc/src/unix/bsd/freebsdlike/dragonfly/mod.rs index 141b2ad25d..6d31ad8db8 100644 --- a/src/liblibc/src/unix/bsd/freebsdlike/dragonfly/mod.rs +++ b/src/liblibc/src/unix/bsd/freebsdlike/dragonfly/mod.rs @@ -83,7 +83,6 @@ s! { pub const RAND_MAX: ::c_int = 0x7fff_ffff; pub const PTHREAD_STACK_MIN: ::size_t = 1024; -pub const KERN_PROC_PATHNAME: ::c_int = 9; pub const SIGSTKSZ: ::size_t = 40960; pub const MADV_INVAL: ::c_int = 10; pub const O_CLOEXEC: ::c_int = 0x00020000; @@ -112,6 +111,138 @@ pub const CLOCK_SECOND: clockid_t = 13; pub const CLOCK_THREAD_CPUTIME_ID: clockid_t = 14; pub const CLOCK_PROCESS_CPUTIME_ID: clockid_t = 15; +pub const CTL_UNSPEC: ::c_int = 0; +pub const CTL_KERN: ::c_int = 1; +pub const CTL_VM: ::c_int = 2; +pub const CTL_VFS: ::c_int = 3; +pub const CTL_NET: ::c_int = 4; +pub const CTL_DEBUG: ::c_int = 5; +pub const CTL_HW: ::c_int = 6; +pub const CTL_MACHDEP: ::c_int = 7; +pub const CTL_USER: ::c_int = 8; +pub const CTL_P1003_1B: ::c_int = 9; +pub const CTL_LWKT: ::c_int = 10; +pub const CTL_MAXID: ::c_int = 11; +pub const KERN_OSTYPE: ::c_int = 1; +pub const KERN_OSRELEASE: ::c_int = 2; +pub const KERN_OSREV: ::c_int = 3; +pub const KERN_VERSION: ::c_int = 4; +pub const KERN_MAXVNODES: ::c_int = 5; +pub const KERN_MAXPROC: ::c_int = 6; +pub const KERN_MAXFILES: ::c_int = 7; +pub const KERN_ARGMAX: ::c_int = 8; +pub const KERN_SECURELVL: ::c_int = 9; +pub const KERN_HOSTNAME: ::c_int = 10; +pub const KERN_HOSTID: ::c_int = 11; +pub const KERN_CLOCKRATE: ::c_int = 12; +pub const KERN_VNODE: ::c_int = 13; +pub const KERN_PROC: ::c_int = 14; +pub const KERN_FILE: ::c_int = 15; +pub const KERN_PROF: ::c_int = 16; +pub const KERN_POSIX1: ::c_int = 17; +pub const KERN_NGROUPS: ::c_int = 18; +pub const KERN_JOB_CONTROL: ::c_int = 19; +pub const KERN_SAVED_IDS: ::c_int = 20; +pub const KERN_BOOTTIME: ::c_int = 21; +pub const KERN_NISDOMAINNAME: ::c_int = 22; +pub const KERN_UPDATEINTERVAL: ::c_int = 23; +pub const KERN_OSRELDATE: ::c_int = 24; +pub const KERN_NTP_PLL: ::c_int = 25; +pub const KERN_BOOTFILE: ::c_int = 26; +pub const KERN_MAXFILESPERPROC: ::c_int = 27; +pub const KERN_MAXPROCPERUID: ::c_int = 28; +pub const KERN_DUMPDEV: ::c_int = 29; +pub const KERN_IPC: ::c_int = 30; +pub const KERN_DUMMY: ::c_int = 31; +pub const KERN_PS_STRINGS: ::c_int = 32; +pub const KERN_USRSTACK: ::c_int = 33; +pub const KERN_LOGSIGEXIT: ::c_int = 34; +pub const KERN_IOV_MAX: ::c_int = 35; +pub const KERN_MAXPOSIXLOCKSPERUID: ::c_int = 36; +pub const KERN_MAXID: ::c_int = 37; +pub const KERN_PROC_ALL: ::c_int = 0; +pub const KERN_PROC_PID: ::c_int = 1; +pub const KERN_PROC_PGRP: ::c_int = 2; +pub const KERN_PROC_SESSION: ::c_int = 3; +pub const KERN_PROC_TTY: ::c_int = 4; +pub const KERN_PROC_UID: ::c_int = 5; +pub const KERN_PROC_RUID: ::c_int = 6; +pub const KERN_PROC_ARGS: ::c_int = 7; +pub const KERN_PROC_CWD: ::c_int = 8; +pub const KERN_PROC_PATHNAME: ::c_int = 9; +pub const KERN_PROC_FLAGMASK: ::c_int = 0x10; +pub const KERN_PROC_FLAG_LWP: ::c_int = 0x10; +pub const KIPC_MAXSOCKBUF: ::c_int = 1; +pub const KIPC_SOCKBUF_WASTE: ::c_int = 2; +pub const KIPC_SOMAXCONN: ::c_int = 3; +pub const KIPC_MAX_LINKHDR: ::c_int = 4; +pub const KIPC_MAX_PROTOHDR: ::c_int = 5; +pub const KIPC_MAX_HDR: ::c_int = 6; +pub const KIPC_MAX_DATALEN: ::c_int = 7; +pub const KIPC_MBSTAT: ::c_int = 8; +pub const KIPC_NMBCLUSTERS: ::c_int = 9; +pub const HW_MACHINE: ::c_int = 1; +pub const HW_MODEL: ::c_int = 2; +pub const HW_NCPU: ::c_int = 3; +pub const HW_BYTEORDER: ::c_int = 4; +pub const HW_PHYSMEM: ::c_int = 5; +pub const HW_USERMEM: ::c_int = 6; +pub const HW_PAGESIZE: ::c_int = 7; +pub const HW_DISKNAMES: ::c_int = 8; +pub const HW_DISKSTATS: ::c_int = 9; +pub const HW_FLOATINGPT: ::c_int = 10; +pub const HW_MACHINE_ARCH: ::c_int = 11; +pub const HW_MACHINE_PLATFORM: ::c_int = 12; +pub const HW_SENSORS: ::c_int = 13; +pub const HW_MAXID: ::c_int = 14; +pub const USER_CS_PATH: ::c_int = 1; +pub const USER_BC_BASE_MAX: ::c_int = 2; +pub const USER_BC_DIM_MAX: ::c_int = 3; +pub const USER_BC_SCALE_MAX: ::c_int = 4; +pub const USER_BC_STRING_MAX: ::c_int = 5; +pub const USER_COLL_WEIGHTS_MAX: ::c_int = 6; +pub const USER_EXPR_NEST_MAX: ::c_int = 7; +pub const USER_LINE_MAX: ::c_int = 8; +pub const USER_RE_DUP_MAX: ::c_int = 9; +pub const USER_POSIX2_VERSION: ::c_int = 10; +pub const USER_POSIX2_C_BIND: ::c_int = 11; +pub const USER_POSIX2_C_DEV: ::c_int = 12; +pub const USER_POSIX2_CHAR_TERM: ::c_int = 13; +pub const USER_POSIX2_FORT_DEV: ::c_int = 14; +pub const USER_POSIX2_FORT_RUN: ::c_int = 15; +pub const USER_POSIX2_LOCALEDEF: ::c_int = 16; +pub const USER_POSIX2_SW_DEV: ::c_int = 17; +pub const USER_POSIX2_UPE: ::c_int = 18; +pub const USER_STREAM_MAX: ::c_int = 19; +pub const USER_TZNAME_MAX: ::c_int = 20; +pub const USER_MAXID: ::c_int = 21; +pub const CTL_P1003_1B_ASYNCHRONOUS_IO: ::c_int = 1; +pub const CTL_P1003_1B_MAPPED_FILES: ::c_int = 2; +pub const CTL_P1003_1B_MEMLOCK: ::c_int = 3; +pub const CTL_P1003_1B_MEMLOCK_RANGE: ::c_int = 4; +pub const CTL_P1003_1B_MEMORY_PROTECTION: ::c_int = 5; +pub const CTL_P1003_1B_MESSAGE_PASSING: ::c_int = 6; +pub const CTL_P1003_1B_PRIORITIZED_IO: ::c_int = 7; +pub const CTL_P1003_1B_PRIORITY_SCHEDULING: ::c_int = 8; +pub const CTL_P1003_1B_REALTIME_SIGNALS: ::c_int = 9; +pub const CTL_P1003_1B_SEMAPHORES: ::c_int = 10; +pub const CTL_P1003_1B_FSYNC: ::c_int = 11; +pub const CTL_P1003_1B_SHARED_MEMORY_OBJECTS: ::c_int = 12; +pub const CTL_P1003_1B_SYNCHRONIZED_IO: ::c_int = 13; +pub const CTL_P1003_1B_TIMERS: ::c_int = 14; +pub const CTL_P1003_1B_AIO_LISTIO_MAX: ::c_int = 15; +pub const CTL_P1003_1B_AIO_MAX: ::c_int = 16; +pub const CTL_P1003_1B_AIO_PRIO_DELTA_MAX: ::c_int = 17; +pub const CTL_P1003_1B_DELAYTIMER_MAX: ::c_int = 18; +pub const CTL_P1003_1B_UNUSED1: ::c_int = 19; +pub const CTL_P1003_1B_PAGESIZE: ::c_int = 20; +pub const CTL_P1003_1B_RTSIG_MAX: ::c_int = 21; +pub const CTL_P1003_1B_SEM_NSEMS_MAX: ::c_int = 22; +pub const CTL_P1003_1B_SEM_VALUE_MAX: ::c_int = 23; +pub const CTL_P1003_1B_SIGQUEUE_MAX: ::c_int = 24; +pub const CTL_P1003_1B_TIMER_MAX: ::c_int = 25; +pub const CTL_P1003_1B_MAXID: ::c_int = 26; + extern { pub fn mprotect(addr: *mut ::c_void, len: ::size_t, prot: ::c_int) -> ::c_int; diff --git a/src/liblibc/src/unix/bsd/freebsdlike/freebsd/mod.rs b/src/liblibc/src/unix/bsd/freebsdlike/freebsd/mod.rs index ae521f28c5..1596a93cf8 100644 --- a/src/liblibc/src/unix/bsd/freebsdlike/freebsd/mod.rs +++ b/src/liblibc/src/unix/bsd/freebsdlike/freebsd/mod.rs @@ -34,7 +34,6 @@ s! { pub const RAND_MAX: ::c_int = 0x7fff_fffd; pub const PTHREAD_STACK_MIN: ::size_t = 2048; -pub const KERN_PROC_PATHNAME: ::c_int = 12; pub const SIGSTKSZ: ::size_t = 34816; pub const SF_NODISKIO: ::c_int = 0x00000001; pub const SF_MNOWAIT: ::c_int = 0x00000002; @@ -76,6 +75,150 @@ pub const CLOCK_SECOND: clockid_t = 13; pub const CLOCK_THREAD_CPUTIME_ID: clockid_t = 14; pub const CLOCK_PROCESS_CPUTIME_ID: clockid_t = 15; +pub const CTL_UNSPEC: ::c_int = 0; +pub const CTL_KERN: ::c_int = 1; +pub const CTL_VM: ::c_int = 2; +pub const CTL_VFS: ::c_int = 3; +pub const CTL_NET: ::c_int = 4; +pub const CTL_DEBUG: ::c_int = 5; +pub const CTL_HW: ::c_int = 6; +pub const CTL_MACHDEP: ::c_int = 7; +pub const CTL_USER: ::c_int = 8; +pub const CTL_P1003_1B: ::c_int = 9; +pub const CTL_MAXID: ::c_int = 10; +pub const KERN_OSTYPE: ::c_int = 1; +pub const KERN_OSRELEASE: ::c_int = 2; +pub const KERN_OSREV: ::c_int = 3; +pub const KERN_VERSION: ::c_int = 4; +pub const KERN_MAXVNODES: ::c_int = 5; +pub const KERN_MAXPROC: ::c_int = 6; +pub const KERN_MAXFILES: ::c_int = 7; +pub const KERN_ARGMAX: ::c_int = 8; +pub const KERN_SECURELVL: ::c_int = 9; +pub const KERN_HOSTNAME: ::c_int = 10; +pub const KERN_HOSTID: ::c_int = 11; +pub const KERN_CLOCKRATE: ::c_int = 12; +pub const KERN_VNODE: ::c_int = 13; +pub const KERN_PROC: ::c_int = 14; +pub const KERN_FILE: ::c_int = 15; +pub const KERN_PROF: ::c_int = 16; +pub const KERN_POSIX1: ::c_int = 17; +pub const KERN_NGROUPS: ::c_int = 18; +pub const KERN_JOB_CONTROL: ::c_int = 19; +pub const KERN_SAVED_IDS: ::c_int = 20; +pub const KERN_BOOTTIME: ::c_int = 21; +pub const KERN_NISDOMAINNAME: ::c_int = 22; +pub const KERN_UPDATEINTERVAL: ::c_int = 23; +pub const KERN_OSRELDATE: ::c_int = 24; +pub const KERN_NTP_PLL: ::c_int = 25; +pub const KERN_BOOTFILE: ::c_int = 26; +pub const KERN_MAXFILESPERPROC: ::c_int = 27; +pub const KERN_MAXPROCPERUID: ::c_int = 28; +pub const KERN_DUMPDEV: ::c_int = 29; +pub const KERN_IPC: ::c_int = 30; +pub const KERN_DUMMY: ::c_int = 31; +pub const KERN_PS_STRINGS: ::c_int = 32; +pub const KERN_USRSTACK: ::c_int = 33; +pub const KERN_LOGSIGEXIT: ::c_int = 34; +pub const KERN_IOV_MAX: ::c_int = 35; +pub const KERN_HOSTUUID: ::c_int = 36; +pub const KERN_ARND: ::c_int = 37; +pub const KERN_MAXID: ::c_int = 38; +pub const KERN_PROC_ALL: ::c_int = 0; +pub const KERN_PROC_PID: ::c_int = 1; +pub const KERN_PROC_PGRP: ::c_int = 2; +pub const KERN_PROC_SESSION: ::c_int = 3; +pub const KERN_PROC_TTY: ::c_int = 4; +pub const KERN_PROC_UID: ::c_int = 5; +pub const KERN_PROC_RUID: ::c_int = 6; +pub const KERN_PROC_ARGS: ::c_int = 7; +pub const KERN_PROC_PROC: ::c_int = 8; +pub const KERN_PROC_SV_NAME: ::c_int = 9; +pub const KERN_PROC_RGID: ::c_int = 10; +pub const KERN_PROC_GID: ::c_int = 11; +pub const KERN_PROC_PATHNAME: ::c_int = 12; +pub const KERN_PROC_OVMMAP: ::c_int = 13; +pub const KERN_PROC_OFILEDESC: ::c_int = 14; +pub const KERN_PROC_KSTACK: ::c_int = 15; +pub const KERN_PROC_INC_THREAD: ::c_int = 0x10; +pub const KERN_PROC_VMMAP: ::c_int = 32; +pub const KERN_PROC_FILEDESC: ::c_int = 33; +pub const KERN_PROC_GROUPS: ::c_int = 34; +pub const KERN_PROC_ENV: ::c_int = 35; +pub const KERN_PROC_AUXV: ::c_int = 36; +pub const KERN_PROC_RLIMIT: ::c_int = 37; +pub const KERN_PROC_PS_STRINGS: ::c_int = 38; +pub const KERN_PROC_UMASK: ::c_int = 39; +pub const KERN_PROC_OSREL: ::c_int = 40; +pub const KERN_PROC_SIGTRAMP: ::c_int = 41; +pub const KIPC_MAXSOCKBUF: ::c_int = 1; +pub const KIPC_SOCKBUF_WASTE: ::c_int = 2; +pub const KIPC_SOMAXCONN: ::c_int = 3; +pub const KIPC_MAX_LINKHDR: ::c_int = 4; +pub const KIPC_MAX_PROTOHDR: ::c_int = 5; +pub const KIPC_MAX_HDR: ::c_int = 6; +pub const KIPC_MAX_DATALEN: ::c_int = 7; +pub const HW_MACHINE: ::c_int = 1; +pub const HW_MODEL: ::c_int = 2; +pub const HW_NCPU: ::c_int = 3; +pub const HW_BYTEORDER: ::c_int = 4; +pub const HW_PHYSMEM: ::c_int = 5; +pub const HW_USERMEM: ::c_int = 6; +pub const HW_PAGESIZE: ::c_int = 7; +pub const HW_DISKNAMES: ::c_int = 8; +pub const HW_DISKSTATS: ::c_int = 9; +pub const HW_FLOATINGPT: ::c_int = 10; +pub const HW_MACHINE_ARCH: ::c_int = 11; +pub const HW_REALMEM: ::c_int = 12; +pub const HW_MAXID: ::c_int = 13; +pub const USER_CS_PATH: ::c_int = 1; +pub const USER_BC_BASE_MAX: ::c_int = 2; +pub const USER_BC_DIM_MAX: ::c_int = 3; +pub const USER_BC_SCALE_MAX: ::c_int = 4; +pub const USER_BC_STRING_MAX: ::c_int = 5; +pub const USER_COLL_WEIGHTS_MAX: ::c_int = 6; +pub const USER_EXPR_NEST_MAX: ::c_int = 7; +pub const USER_LINE_MAX: ::c_int = 8; +pub const USER_RE_DUP_MAX: ::c_int = 9; +pub const USER_POSIX2_VERSION: ::c_int = 10; +pub const USER_POSIX2_C_BIND: ::c_int = 11; +pub const USER_POSIX2_C_DEV: ::c_int = 12; +pub const USER_POSIX2_CHAR_TERM: ::c_int = 13; +pub const USER_POSIX2_FORT_DEV: ::c_int = 14; +pub const USER_POSIX2_FORT_RUN: ::c_int = 15; +pub const USER_POSIX2_LOCALEDEF: ::c_int = 16; +pub const USER_POSIX2_SW_DEV: ::c_int = 17; +pub const USER_POSIX2_UPE: ::c_int = 18; +pub const USER_STREAM_MAX: ::c_int = 19; +pub const USER_TZNAME_MAX: ::c_int = 20; +pub const USER_MAXID: ::c_int = 21; +pub const CTL_P1003_1B_ASYNCHRONOUS_IO: ::c_int = 1; +pub const CTL_P1003_1B_MAPPED_FILES: ::c_int = 2; +pub const CTL_P1003_1B_MEMLOCK: ::c_int = 3; +pub const CTL_P1003_1B_MEMLOCK_RANGE: ::c_int = 4; +pub const CTL_P1003_1B_MEMORY_PROTECTION: ::c_int = 5; +pub const CTL_P1003_1B_MESSAGE_PASSING: ::c_int = 6; +pub const CTL_P1003_1B_PRIORITIZED_IO: ::c_int = 7; +pub const CTL_P1003_1B_PRIORITY_SCHEDULING: ::c_int = 8; +pub const CTL_P1003_1B_REALTIME_SIGNALS: ::c_int = 9; +pub const CTL_P1003_1B_SEMAPHORES: ::c_int = 10; +pub const CTL_P1003_1B_FSYNC: ::c_int = 11; +pub const CTL_P1003_1B_SHARED_MEMORY_OBJECTS: ::c_int = 12; +pub const CTL_P1003_1B_SYNCHRONIZED_IO: ::c_int = 13; +pub const CTL_P1003_1B_TIMERS: ::c_int = 14; +pub const CTL_P1003_1B_AIO_LISTIO_MAX: ::c_int = 15; +pub const CTL_P1003_1B_AIO_MAX: ::c_int = 16; +pub const CTL_P1003_1B_AIO_PRIO_DELTA_MAX: ::c_int = 17; +pub const CTL_P1003_1B_DELAYTIMER_MAX: ::c_int = 18; +pub const CTL_P1003_1B_MQ_OPEN_MAX: ::c_int = 19; +pub const CTL_P1003_1B_PAGESIZE: ::c_int = 20; +pub const CTL_P1003_1B_RTSIG_MAX: ::c_int = 21; +pub const CTL_P1003_1B_SEM_NSEMS_MAX: ::c_int = 22; +pub const CTL_P1003_1B_SEM_VALUE_MAX: ::c_int = 23; +pub const CTL_P1003_1B_SIGQUEUE_MAX: ::c_int = 24; +pub const CTL_P1003_1B_TIMER_MAX: ::c_int = 25; +pub const CTL_P1003_1B_MAXID: ::c_int = 26; + extern { pub fn __error() -> *mut ::c_int; diff --git a/src/liblibc/src/unix/bsd/freebsdlike/mod.rs b/src/liblibc/src/unix/bsd/freebsdlike/mod.rs index 8828401e4b..43181c0a5f 100644 --- a/src/liblibc/src/unix/bsd/freebsdlike/mod.rs +++ b/src/liblibc/src/unix/bsd/freebsdlike/mod.rs @@ -9,6 +9,7 @@ pub type pthread_rwlock_t = *mut ::c_void; pub type pthread_key_t = ::c_int; pub type tcflag_t = ::c_uint; pub type speed_t = ::c_uint; +pub type nl_item = ::c_int; pub enum timezone {} @@ -149,6 +150,101 @@ s! { } } +pub const LC_COLLATE_MASK: ::c_int = (1 << 0); +pub const LC_CTYPE_MASK: ::c_int = (1 << 1); +pub const LC_MESSAGES_MASK: ::c_int = (1 << 2); +pub const LC_MONETARY_MASK: ::c_int = (1 << 3); +pub const LC_NUMERIC_MASK: ::c_int = (1 << 4); +pub const LC_TIME_MASK: ::c_int = (1 << 5); +pub const LC_ALL_MASK: ::c_int = LC_COLLATE_MASK + | LC_CTYPE_MASK + | LC_MESSAGES_MASK + | LC_MONETARY_MASK + | LC_NUMERIC_MASK + | LC_TIME_MASK; + +pub const CODESET: ::nl_item = 0; +pub const D_T_FMT: ::nl_item = 1; +pub const D_FMT: ::nl_item = 2; +pub const T_FMT: ::nl_item = 3; +pub const T_FMT_AMPM: ::nl_item = 4; +pub const AM_STR: ::nl_item = 5; +pub const PM_STR: ::nl_item = 6; + +pub const DAY_1: ::nl_item = 7; +pub const DAY_2: ::nl_item = 8; +pub const DAY_3: ::nl_item = 9; +pub const DAY_4: ::nl_item = 10; +pub const DAY_5: ::nl_item = 11; +pub const DAY_6: ::nl_item = 12; +pub const DAY_7: ::nl_item = 13; + +pub const ABDAY_1: ::nl_item = 14; +pub const ABDAY_2: ::nl_item = 15; +pub const ABDAY_3: ::nl_item = 16; +pub const ABDAY_4: ::nl_item = 17; +pub const ABDAY_5: ::nl_item = 18; +pub const ABDAY_6: ::nl_item = 19; +pub const ABDAY_7: ::nl_item = 20; + +pub const MON_1: ::nl_item = 21; +pub const MON_2: ::nl_item = 22; +pub const MON_3: ::nl_item = 23; +pub const MON_4: ::nl_item = 24; +pub const MON_5: ::nl_item = 25; +pub const MON_6: ::nl_item = 26; +pub const MON_7: ::nl_item = 27; +pub const MON_8: ::nl_item = 28; +pub const MON_9: ::nl_item = 29; +pub const MON_10: ::nl_item = 30; +pub const MON_11: ::nl_item = 31; +pub const MON_12: ::nl_item = 32; + +pub const ABMON_1: ::nl_item = 33; +pub const ABMON_2: ::nl_item = 34; +pub const ABMON_3: ::nl_item = 35; +pub const ABMON_4: ::nl_item = 36; +pub const ABMON_5: ::nl_item = 37; +pub const ABMON_6: ::nl_item = 38; +pub const ABMON_7: ::nl_item = 39; +pub const ABMON_8: ::nl_item = 40; +pub const ABMON_9: ::nl_item = 41; +pub const ABMON_10: ::nl_item = 42; +pub const ABMON_11: ::nl_item = 43; +pub const ABMON_12: ::nl_item = 44; + +pub const ERA: ::nl_item = 45; +pub const ERA_D_FMT: ::nl_item = 46; +pub const ERA_D_T_FMT: ::nl_item = 47; +pub const ERA_T_FMT: ::nl_item = 48; +pub const ALT_DIGITS: ::nl_item = 49; + +pub const RADIXCHAR: ::nl_item = 50; +pub const THOUSEP: ::nl_item = 51; + +pub const YESEXPR: ::nl_item = 52; +pub const NOEXPR: ::nl_item = 53; + +pub const YESSTR: ::nl_item = 54; +pub const NOSTR: ::nl_item = 55; + +pub const CRNCYSTR: ::nl_item = 56; + +pub const D_MD_ORDER: ::nl_item = 57; + +pub const ALTMON_1: ::nl_item = 58; +pub const ALTMON_2: ::nl_item = 59; +pub const ALTMON_3: ::nl_item = 60; +pub const ALTMON_4: ::nl_item = 61; +pub const ALTMON_5: ::nl_item = 62; +pub const ALTMON_6: ::nl_item = 63; +pub const ALTMON_7: ::nl_item = 64; +pub const ALTMON_8: ::nl_item = 65; +pub const ALTMON_9: ::nl_item = 66; +pub const ALTMON_10: ::nl_item = 67; +pub const ALTMON_11: ::nl_item = 68; +pub const ALTMON_12: ::nl_item = 69; + pub const EXIT_FAILURE: ::c_int = 1; pub const EXIT_SUCCESS: ::c_int = 0; pub const EOF: ::c_int = -1; @@ -481,8 +577,6 @@ pub const LOCK_UN: ::c_int = 8; pub const O_SYNC: ::c_int = 128; pub const O_NONBLOCK: ::c_int = 4; -pub const CTL_KERN: ::c_int = 1; -pub const KERN_PROC: ::c_int = 14; pub const MAP_COPY: ::c_int = 0x0002; pub const MAP_RENAME: ::c_int = 0x0020; @@ -547,6 +641,7 @@ pub const _SC_SEM_NSEMS_MAX: ::c_int = 49; pub const _SC_SEM_VALUE_MAX: ::c_int = 50; pub const _SC_SIGQUEUE_MAX: ::c_int = 51; pub const _SC_TIMER_MAX: ::c_int = 52; +pub const _SC_HOST_NAME_MAX: ::c_int = 72; pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = 0 as *mut _; pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = 0 as *mut _; @@ -568,6 +663,11 @@ pub const RTLD_NODELETE: ::c_int = 0x1000; pub const RTLD_NOLOAD: ::c_int = 0x2000; pub const RTLD_GLOBAL: ::c_int = 0x100; +pub const LOG_NTP: ::c_int = 12 << 3; +pub const LOG_SECURITY: ::c_int = 13 << 3; +pub const LOG_CONSOLE: ::c_int = 14 << 3; +pub const LOG_NFACILITIES: ::c_int = 24; + #[link(name = "util")] extern { pub fn getnameinfo(sa: *const ::sockaddr, @@ -626,7 +726,14 @@ extern { name: *mut ::c_char, termp: *mut termios, winp: *mut ::winsize) -> ::pid_t; - + pub fn nl_langinfo_l(item: ::nl_item, locale: ::locale_t) -> *mut ::c_char; + pub fn duplocale(base: ::locale_t) -> ::locale_t; + pub fn freelocale(loc: ::locale_t) -> ::c_int; + pub fn newlocale(mask: ::c_int, + locale: *const ::c_char, + base: ::locale_t) -> ::locale_t; + pub fn uselocale(loc: ::locale_t) -> ::locale_t; + pub fn querylocale(mask: ::c_int, loc: ::locale_t) -> *const ::c_char; pub fn pthread_set_name_np(tid: ::pthread_t, name: *const ::c_char); pub fn pthread_attr_get_np(tid: ::pthread_t, attr: *mut ::pthread_attr_t) -> ::c_int; diff --git a/src/liblibc/src/unix/bsd/mod.rs b/src/liblibc/src/unix/bsd/mod.rs index a280fc9263..d60bbc58bf 100644 --- a/src/liblibc/src/unix/bsd/mod.rs +++ b/src/liblibc/src/unix/bsd/mod.rs @@ -163,6 +163,7 @@ pub const SIGXFSZ: ::c_int = 25; pub const SIGVTALRM: ::c_int = 26; pub const SIGPROF: ::c_int = 27; pub const SIGWINCH: ::c_int = 28; +pub const SIGINFO: ::c_int = 29; pub const SIG_SETMASK: ::c_int = 3; pub const SIG_BLOCK: ::c_int = 0x1; @@ -173,9 +174,6 @@ pub const IPV6_V6ONLY: ::c_int = 27; pub const ST_RDONLY: ::c_ulong = 1; -pub const CTL_HW: ::c_int = 6; -pub const HW_NCPU: ::c_int = 3; - pub const EV_ADD: ::uint16_t = 0x1; pub const EV_CLEAR: ::uint16_t = 0x20; pub const EV_DELETE: ::uint16_t = 0x2; @@ -292,6 +290,13 @@ pub const WNOHANG: ::c_int = 1; pub const RTLD_NOW: ::c_int = 0x2; pub const RTLD_DEFAULT: *mut ::c_void = -2isize as *mut ::c_void; +pub const LOG_CRON: ::c_int = 9 << 3; +pub const LOG_AUTHPRIV: ::c_int = 10 << 3; +pub const LOG_FTP: ::c_int = 11 << 3; +pub const LOG_PERROR: ::c_int = 0x20; + +pub const PIPE_BUF: usize = 512; + f! { pub fn FD_CLR(fd: ::c_int, set: *mut fd_set) -> () { let bits = mem::size_of_val(&(*set).fds_bits[0]) * 8; @@ -330,6 +335,10 @@ f! { pub fn WTERMSIG(status: ::c_int) -> ::c_int { status & 0o177 } + + pub fn WCOREDUMP(status: ::c_int) -> bool { + (status & 0o200) != 0 + } } extern { @@ -345,6 +354,9 @@ extern { buf: *mut ::c_char, buflen: ::size_t, result: *mut *mut passwd) -> ::c_int; + pub fn getprogname() -> *const ::c_char; + pub fn setprogname(name: *const ::c_char); + pub fn getloadavg(loadavg: *mut ::c_double, nelem: ::c_int) -> ::c_int; } cfg_if! { diff --git a/src/liblibc/src/unix/bsd/openbsdlike/bitrig.rs b/src/liblibc/src/unix/bsd/openbsdlike/bitrig.rs index aeba9fcfec..7b3b5e44ca 100644 --- a/src/liblibc/src/unix/bsd/openbsdlike/bitrig.rs +++ b/src/liblibc/src/unix/bsd/openbsdlike/bitrig.rs @@ -135,6 +135,40 @@ s! { } } +pub const LC_COLLATE_MASK: ::c_int = (1 << 0); +pub const LC_CTYPE_MASK: ::c_int = (1 << 1); +pub const LC_MESSAGES_MASK: ::c_int = (1 << 2); +pub const LC_MONETARY_MASK: ::c_int = (1 << 3); +pub const LC_NUMERIC_MASK: ::c_int = (1 << 4); +pub const LC_TIME_MASK: ::c_int = (1 << 5); +pub const LC_ALL_MASK: ::c_int = LC_COLLATE_MASK + | LC_CTYPE_MASK + | LC_MESSAGES_MASK + | LC_MONETARY_MASK + | LC_NUMERIC_MASK + | LC_TIME_MASK; + +pub const ERA: ::nl_item = 52; +pub const ERA_D_FMT: ::nl_item = 53; +pub const ERA_D_T_FMT: ::nl_item = 54; +pub const ERA_T_FMT: ::nl_item = 55; +pub const ALT_DIGITS: ::nl_item = 56; + +pub const D_MD_ORDER: ::nl_item = 57; + +pub const ALTMON_1: ::nl_item = 58; +pub const ALTMON_2: ::nl_item = 59; +pub const ALTMON_3: ::nl_item = 60; +pub const ALTMON_4: ::nl_item = 61; +pub const ALTMON_5: ::nl_item = 62; +pub const ALTMON_6: ::nl_item = 63; +pub const ALTMON_7: ::nl_item = 64; +pub const ALTMON_8: ::nl_item = 65; +pub const ALTMON_9: ::nl_item = 66; +pub const ALTMON_10: ::nl_item = 67; +pub const ALTMON_11: ::nl_item = 68; +pub const ALTMON_12: ::nl_item = 69; + pub const O_CLOEXEC: ::c_int = 0x10000; pub const MS_SYNC : ::c_int = 0x0002; @@ -158,7 +192,6 @@ pub const RLIM_NLIMITS: ::c_int = 9; pub const SO_SNDTIMEO: ::c_int = 0x1005; pub const SO_RCVTIMEO: ::c_int = 0x1006; -pub const KERN_PROC : ::c_int = 66; pub const O_DSYNC : ::c_int = 128; pub const MAP_RENAME : ::c_int = 0x0000; @@ -228,6 +261,7 @@ pub const _SC_REALTIME_SIGNALS : ::c_int = 64; pub const _SC_RTSIG_MAX : ::c_int = 66; pub const _SC_SIGQUEUE_MAX : ::c_int = 70; pub const _SC_TIMER_MAX : ::c_int = 93; +pub const _SC_HOST_NAME_MAX: ::c_int = 33; pub const FD_SETSIZE: usize = 1024; @@ -238,12 +272,125 @@ pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = 0 as *mut _; pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = 0 as *mut _; pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2; -pub const KERN_PROC_ARGS: ::c_int = 55; - pub const TMP_MAX : ::c_uint = 0x7fffffff; pub const NI_MAXHOST: ::size_t = 256; +pub const CTL_MAXNAME: ::c_int = 12; +pub const CTLTYPE_NODE: ::c_int = 1; +pub const CTLTYPE_INT: ::c_int = 2; +pub const CTLTYPE_STRING: ::c_int = 3; +pub const CTLTYPE_QUAD: ::c_int = 4; +pub const CTLTYPE_STRUCT: ::c_int = 5; +pub const CTL_UNSPEC: ::c_int = 0; +pub const CTL_KERN: ::c_int = 1; +pub const CTL_VM: ::c_int = 2; +pub const CTL_FS: ::c_int = 3; +pub const CTL_NET: ::c_int = 4; +pub const CTL_DEBUG: ::c_int = 5; +pub const CTL_HW: ::c_int = 6; +pub const CTL_MACHDEP: ::c_int = 7; +pub const CTL_DDB: ::c_int = 9; +pub const CTL_VFS: ::c_int = 10; +pub const CTL_MAXID: ::c_int = 11; +pub const KERN_OSTYPE: ::c_int = 1; +pub const KERN_OSRELEASE: ::c_int = 2; +pub const KERN_OSREV: ::c_int = 3; +pub const KERN_VERSION: ::c_int = 4; +pub const KERN_MAXVNODES: ::c_int = 5; +pub const KERN_MAXPROC: ::c_int = 6; +pub const KERN_MAXFILES: ::c_int = 7; +pub const KERN_ARGMAX: ::c_int = 8; +pub const KERN_SECURELVL: ::c_int = 9; +pub const KERN_HOSTNAME: ::c_int = 10; +pub const KERN_HOSTID: ::c_int = 11; +pub const KERN_CLOCKRATE: ::c_int = 12; +pub const KERN_PROF: ::c_int = 16; +pub const KERN_POSIX1: ::c_int = 17; +pub const KERN_NGROUPS: ::c_int = 18; +pub const KERN_JOB_CONTROL: ::c_int = 19; +pub const KERN_SAVED_IDS: ::c_int = 20; +pub const KERN_BOOTTIME: ::c_int = 21; +pub const KERN_DOMAINNAME: ::c_int = 22; +pub const KERN_MAXPARTITIONS: ::c_int = 23; +pub const KERN_RAWPARTITION: ::c_int = 24; +pub const KERN_MAXTHREAD: ::c_int = 25; +pub const KERN_NTHREADS: ::c_int = 26; +pub const KERN_OSVERSION: ::c_int = 27; +pub const KERN_SOMAXCONN: ::c_int = 28; +pub const KERN_SOMINCONN: ::c_int = 29; +pub const KERN_USERMOUNT: ::c_int = 30; +pub const KERN_RND: ::c_int = 31; +pub const KERN_NOSUIDCOREDUMP: ::c_int = 32; +pub const KERN_FSYNC: ::c_int = 33; +pub const KERN_SYSVMSG: ::c_int = 34; +pub const KERN_SYSVSEM: ::c_int = 35; +pub const KERN_SYSVSHM: ::c_int = 36; +pub const KERN_ARND: ::c_int = 37; +pub const KERN_MSGBUFSIZE: ::c_int = 38; +pub const KERN_MALLOCSTATS: ::c_int = 39; +pub const KERN_CPTIME: ::c_int = 40; +pub const KERN_NCHSTATS: ::c_int = 41; +pub const KERN_FORKSTAT: ::c_int = 42; +pub const KERN_NSELCOLL: ::c_int = 43; +pub const KERN_TTY: ::c_int = 44; +pub const KERN_CCPU: ::c_int = 45; +pub const KERN_FSCALE: ::c_int = 46; +pub const KERN_NPROCS: ::c_int = 47; +pub const KERN_MSGBUF: ::c_int = 48; +pub const KERN_POOL: ::c_int = 49; +pub const KERN_STACKGAPRANDOM: ::c_int = 50; +pub const KERN_SYSVIPC_INFO: ::c_int = 51; +pub const KERN_SPLASSERT: ::c_int = 54; +pub const KERN_PROC_ARGS: ::c_int = 55; +pub const KERN_NFILES: ::c_int = 56; +pub const KERN_TTYCOUNT: ::c_int = 57; +pub const KERN_NUMVNODES: ::c_int = 58; +pub const KERN_MBSTAT: ::c_int = 59; +pub const KERN_SEMINFO: ::c_int = 61; +pub const KERN_SHMINFO: ::c_int = 62; +pub const KERN_INTRCNT: ::c_int = 63; +pub const KERN_WATCHDOG: ::c_int = 64; +pub const KERN_PROC: ::c_int = 66; +pub const KERN_MAXCLUSTERS: ::c_int = 67; +pub const KERN_EVCOUNT: ::c_int = 68; +pub const KERN_TIMECOUNTER: ::c_int = 69; +pub const KERN_MAXLOCKSPERUID: ::c_int = 70; +pub const KERN_CPTIME2: ::c_int = 71; +pub const KERN_CACHEPCT: ::c_int = 72; +pub const KERN_FILE: ::c_int = 73; +pub const KERN_CONSDEV: ::c_int = 75; +pub const KERN_NETLIVELOCKS: ::c_int = 76; +pub const KERN_POOL_DEBUG: ::c_int = 77; +pub const KERN_PROC_CWD: ::c_int = 78; +pub const KERN_PROC_NOBROADCASTKILL: ::c_int = 79; +pub const KERN_PROC_VMMAP: ::c_int = 80; +pub const KERN_GLOBAL_PTRACE: ::c_int = 81; +pub const KERN_CONSBUFSIZE: ::c_int = 82; +pub const KERN_CONSBUF: ::c_int = 83; +pub const KERN_MAXID: ::c_int = 84; +pub const KERN_PROC_ALL: ::c_int = 0; +pub const KERN_PROC_PID: ::c_int = 1; +pub const KERN_PROC_PGRP: ::c_int = 2; +pub const KERN_PROC_SESSION: ::c_int = 3; +pub const KERN_PROC_TTY: ::c_int = 4; +pub const KERN_PROC_UID: ::c_int = 5; +pub const KERN_PROC_RUID: ::c_int = 6; +pub const KERN_PROC_KTHREAD: ::c_int = 7; +pub const KERN_PROC_SHOW_THREADS: ::c_int = 0x40000000; +pub const KERN_SYSVIPC_MSG_INFO: ::c_int = 1; +pub const KERN_SYSVIPC_SEM_INFO: ::c_int = 2; +pub const KERN_SYSVIPC_SHM_INFO: ::c_int = 3; +pub const KERN_PROC_ARGV: ::c_int = 1; +pub const KERN_PROC_NARGV: ::c_int = 2; +pub const KERN_PROC_ENV: ::c_int = 3; +pub const KERN_PROC_NENV: ::c_int = 4; +pub const KI_NGROUPS: ::c_int = 16; +pub const KI_MAXCOMLEN: ::c_int = 24; +pub const KI_WMESGLEN: ::c_int = 8; +pub const KI_MAXLOGNAME: ::c_int = 32; +pub const KI_EMULNAMELEN: ::c_int = 8; + extern { pub fn getnameinfo(sa: *const ::sockaddr, salen: ::socklen_t, @@ -271,4 +418,12 @@ extern { newp: *mut ::c_void, newlen: ::size_t) -> ::c_int; + pub fn nl_langinfo_l(item: ::nl_item, locale: ::locale_t) -> *mut ::c_char; + pub fn duplocale(base: ::locale_t) -> ::locale_t; + pub fn freelocale(loc: ::locale_t) -> ::c_int; + pub fn newlocale(mask: ::c_int, + locale: *const ::c_char, + base: ::locale_t) -> ::locale_t; + pub fn uselocale(loc: ::locale_t) -> ::locale_t; + pub fn querylocale(mask: ::c_int, loc: ::locale_t) -> *const ::c_char; } diff --git a/src/liblibc/src/unix/bsd/openbsdlike/mod.rs b/src/liblibc/src/unix/bsd/openbsdlike/mod.rs index c772e2e9c7..8bb15cffc0 100644 --- a/src/liblibc/src/unix/bsd/openbsdlike/mod.rs +++ b/src/liblibc/src/unix/bsd/openbsdlike/mod.rs @@ -8,6 +8,7 @@ pub type pthread_key_t = ::c_int; pub type rlim_t = u64; pub type speed_t = ::c_uint; pub type tcflag_t = ::c_uint; +pub type nl_item = c_long; pub type clockid_t = ::c_int; pub enum timezone {} @@ -52,6 +53,65 @@ s! { } } +pub const D_T_FMT: ::nl_item = 0; +pub const D_FMT: ::nl_item = 1; +pub const T_FMT: ::nl_item = 2; +pub const T_FMT_AMPM: ::nl_item = 3; +pub const AM_STR: ::nl_item = 4; +pub const PM_STR: ::nl_item = 5; + +pub const DAY_1: ::nl_item = 6; +pub const DAY_2: ::nl_item = 7; +pub const DAY_3: ::nl_item = 8; +pub const DAY_4: ::nl_item = 9; +pub const DAY_5: ::nl_item = 10; +pub const DAY_6: ::nl_item = 11; +pub const DAY_7: ::nl_item = 12; + +pub const ABDAY_1: ::nl_item = 13; +pub const ABDAY_2: ::nl_item = 14; +pub const ABDAY_3: ::nl_item = 15; +pub const ABDAY_4: ::nl_item = 16; +pub const ABDAY_5: ::nl_item = 17; +pub const ABDAY_6: ::nl_item = 18; +pub const ABDAY_7: ::nl_item = 19; + +pub const MON_1: ::nl_item = 20; +pub const MON_2: ::nl_item = 21; +pub const MON_3: ::nl_item = 22; +pub const MON_4: ::nl_item = 23; +pub const MON_5: ::nl_item = 24; +pub const MON_6: ::nl_item = 25; +pub const MON_7: ::nl_item = 26; +pub const MON_8: ::nl_item = 27; +pub const MON_9: ::nl_item = 28; +pub const MON_10: ::nl_item = 29; +pub const MON_11: ::nl_item = 30; +pub const MON_12: ::nl_item = 31; + +pub const ABMON_1: ::nl_item = 32; +pub const ABMON_2: ::nl_item = 33; +pub const ABMON_3: ::nl_item = 34; +pub const ABMON_4: ::nl_item = 35; +pub const ABMON_5: ::nl_item = 36; +pub const ABMON_6: ::nl_item = 37; +pub const ABMON_7: ::nl_item = 38; +pub const ABMON_8: ::nl_item = 39; +pub const ABMON_9: ::nl_item = 40; +pub const ABMON_10: ::nl_item = 41; +pub const ABMON_11: ::nl_item = 42; +pub const ABMON_12: ::nl_item = 43; + +pub const RADIXCHAR: ::nl_item = 44; +pub const THOUSEP: ::nl_item = 45; +pub const YESSTR: ::nl_item = 46; +pub const YESEXPR: ::nl_item = 47; +pub const NOSTR: ::nl_item = 48; +pub const NOEXPR: ::nl_item = 49; +pub const CRNCYSTR: ::nl_item = 50; + +pub const CODESET: ::nl_item = 51; + pub const EXIT_FAILURE : ::c_int = 1; pub const EXIT_SUCCESS : ::c_int = 0; pub const RAND_MAX : ::c_int = 2147483647; @@ -341,7 +401,6 @@ pub const LOCK_NB: ::c_int = 4; pub const LOCK_UN: ::c_int = 8; pub const O_NONBLOCK : ::c_int = 4; -pub const CTL_KERN : ::c_int = 1; pub const IPPROTO_RAW : ::c_int = 255; @@ -375,16 +434,15 @@ pub const _SC_PAGESIZE : ::c_int = 28; pub const _SC_PAGE_SIZE: ::c_int = _SC_PAGESIZE; pub const _SC_FSYNC : ::c_int = 29; -pub const KERN_PROC_ARGV: ::c_int = 1; -pub const KERN_PROC_NARGV: ::c_int = 2; -pub const KERN_PROC_ENV: ::c_int = 3; -pub const KERN_PROC_NENV: ::c_int = 4; - pub const Q_GETQUOTA: ::c_int = 0x300; pub const Q_SETQUOTA: ::c_int = 0x400; pub const RTLD_GLOBAL: ::c_int = 0x100; +pub const LOG_NFACILITIES: ::c_int = 24; + +pub const HW_NCPU: ::c_int = 3; + #[link(name = "util")] extern { pub fn mincore(addr: *mut ::c_void, len: ::size_t, diff --git a/src/liblibc/src/unix/bsd/openbsdlike/netbsd.rs b/src/liblibc/src/unix/bsd/openbsdlike/netbsd.rs index 2d1def9c74..330143bcc9 100644 --- a/src/liblibc/src/unix/bsd/openbsdlike/netbsd.rs +++ b/src/liblibc/src/unix/bsd/openbsdlike/netbsd.rs @@ -219,6 +219,20 @@ s! { } } +pub const LC_COLLATE_MASK: ::c_int = (1 << ::LC_COLLATE); +pub const LC_CTYPE_MASK: ::c_int = (1 << ::LC_CTYPE); +pub const LC_MONETARY_MASK: ::c_int = (1 << ::LC_MONETARY); +pub const LC_NUMERIC_MASK: ::c_int = (1 << ::LC_NUMERIC); +pub const LC_TIME_MASK: ::c_int = (1 << ::LC_TIME); +pub const LC_MESSAGES_MASK: ::c_int = (1 << ::LC_MESSAGES); +pub const LC_ALL_MASK: ::c_int = !0; + +pub const ERA: ::nl_item = 52; +pub const ERA_D_FMT: ::nl_item = 53; +pub const ERA_D_T_FMT: ::nl_item = 54; +pub const ERA_T_FMT: ::nl_item = 55; +pub const ALT_DIGITS: ::nl_item = 56; + pub const O_CLOEXEC: ::c_int = 0x400000; pub const O_ALT_IO: ::c_int = 0x40000; pub const O_NOSIGPIPE: ::c_int = 0x1000000; @@ -253,7 +267,6 @@ pub const IPV6_LEAVE_GROUP: ::c_int = 13; pub const SO_SNDTIMEO: ::c_int = 0x100b; pub const SO_RCVTIMEO: ::c_int = 0x100c; -pub const KERN_PROC : ::c_int = 14; pub const O_DSYNC : ::c_int = 0x10000; pub const MAP_RENAME : ::c_int = 0x20; @@ -296,6 +309,7 @@ pub const _SC_SEMAPHORES : ::c_int = 42; pub const _SC_SHARED_MEMORY_OBJECTS : ::c_int = 87; pub const _SC_SYNCHRONIZED_IO : ::c_int = 31; pub const _SC_TIMERS : ::c_int = 44; +pub const _SC_HOST_NAME_MAX : ::c_int = 69; pub const FD_SETSIZE: usize = 0x100; @@ -332,7 +346,6 @@ pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = pthread_rwlock_t { ptr_private: 0 as *mut _, }; pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2; -pub const KERN_PROC_ARGS: ::c_int = 48; pub const EVFILT_AIO: ::int16_t = 2; pub const EVFILT_PROC: ::int16_t = 4; @@ -354,6 +367,151 @@ pub const NI_MAXHOST: ::socklen_t = 1025; pub const RTLD_NOLOAD: ::c_int = 0x2000; pub const RTLD_LOCAL: ::c_int = 0x200; +pub const CTL_MAXNAME: ::c_int = 12; +pub const SYSCTL_NAMELEN: ::c_int = 32; +pub const SYSCTL_DEFSIZE: ::c_int = 8; +pub const CTLTYPE_NODE: ::c_int = 1; +pub const CTLTYPE_INT: ::c_int = 2; +pub const CTLTYPE_STRING: ::c_int = 3; +pub const CTLTYPE_QUAD: ::c_int = 4; +pub const CTLTYPE_STRUCT: ::c_int = 5; +pub const CTLTYPE_BOOL: ::c_int = 6; +pub const CTLFLAG_READONLY: ::c_int = 0x00000000; +pub const CTLFLAG_READWRITE: ::c_int = 0x00000070; +pub const CTLFLAG_ANYWRITE: ::c_int = 0x00000080; +pub const CTLFLAG_PRIVATE: ::c_int = 0x00000100; +pub const CTLFLAG_PERMANENT: ::c_int = 0x00000200; +pub const CTLFLAG_OWNDATA: ::c_int = 0x00000400; +pub const CTLFLAG_IMMEDIATE: ::c_int = 0x00000800; +pub const CTLFLAG_HEX: ::c_int = 0x00001000; +pub const CTLFLAG_ROOT: ::c_int = 0x00002000; +pub const CTLFLAG_ANYNUMBER: ::c_int = 0x00004000; +pub const CTLFLAG_HIDDEN: ::c_int = 0x00008000; +pub const CTLFLAG_ALIAS: ::c_int = 0x00010000; +pub const CTLFLAG_MMAP: ::c_int = 0x00020000; +pub const CTLFLAG_OWNDESC: ::c_int = 0x00040000; +pub const CTLFLAG_UNSIGNED: ::c_int = 0x00080000; +pub const SYSCTL_VERS_MASK: ::c_int = 0xff000000; +pub const SYSCTL_VERS_0: ::c_int = 0x00000000; +pub const SYSCTL_VERS_1: ::c_int = 0x01000000; +pub const SYSCTL_VERSION: ::c_int = SYSCTL_VERS_1; +pub const CTL_EOL: ::c_int = -1; +pub const CTL_QUERY: ::c_int = -2; +pub const CTL_CREATE: ::c_int = -3; +pub const CTL_CREATESYM: ::c_int = -4; +pub const CTL_DESTROY: ::c_int = -5; +pub const CTL_MMAP: ::c_int = -6; +pub const CTL_DESCRIBE: ::c_int = -7; +pub const CTL_UNSPEC: ::c_int = 0; +pub const CTL_KERN: ::c_int = 1; +pub const CTL_VM: ::c_int = 2; +pub const CTL_VFS: ::c_int = 3; +pub const CTL_NET: ::c_int = 4; +pub const CTL_DEBUG: ::c_int = 5; +pub const CTL_HW: ::c_int = 6; +pub const CTL_MACHDEP: ::c_int = 7; +pub const CTL_USER: ::c_int = 8; +pub const CTL_DDB: ::c_int = 9; +pub const CTL_PROC: ::c_int = 10; +pub const CTL_VENDOR: ::c_int = 11; +pub const CTL_EMUL: ::c_int = 12; +pub const CTL_SECURITY: ::c_int = 13; +pub const CTL_MAXID: ::c_int = 14; +pub const KERN_OSTYPE: ::c_int = 1; +pub const KERN_OSRELEASE: ::c_int = 2; +pub const KERN_OSREV: ::c_int = 3; +pub const KERN_VERSION: ::c_int = 4; +pub const KERN_MAXVNODES: ::c_int = 5; +pub const KERN_MAXPROC: ::c_int = 6; +pub const KERN_MAXFILES: ::c_int = 7; +pub const KERN_ARGMAX: ::c_int = 8; +pub const KERN_SECURELVL: ::c_int = 9; +pub const KERN_HOSTNAME: ::c_int = 10; +pub const KERN_HOSTID: ::c_int = 11; +pub const KERN_CLOCKRATE: ::c_int = 12; +pub const KERN_VNODE: ::c_int = 13; +pub const KERN_PROC: ::c_int = 14; +pub const KERN_FILE: ::c_int = 15; +pub const KERN_PROF: ::c_int = 16; +pub const KERN_POSIX1: ::c_int = 17; +pub const KERN_NGROUPS: ::c_int = 18; +pub const KERN_JOB_CONTROL: ::c_int = 19; +pub const KERN_SAVED_IDS: ::c_int = 20; +pub const KERN_OBOOTTIME: ::c_int = 21; +pub const KERN_DOMAINNAME: ::c_int = 22; +pub const KERN_MAXPARTITIONS: ::c_int = 23; +pub const KERN_RAWPARTITION: ::c_int = 24; +pub const KERN_NTPTIME: ::c_int = 25; +pub const KERN_TIMEX: ::c_int = 26; +pub const KERN_AUTONICETIME: ::c_int = 27; +pub const KERN_AUTONICEVAL: ::c_int = 28; +pub const KERN_RTC_OFFSET: ::c_int = 29; +pub const KERN_ROOT_DEVICE: ::c_int = 30; +pub const KERN_MSGBUFSIZE: ::c_int = 31; +pub const KERN_FSYNC: ::c_int = 32; +pub const KERN_OLDSYSVMSG: ::c_int = 33; +pub const KERN_OLDSYSVSEM: ::c_int = 34; +pub const KERN_OLDSYSVSHM: ::c_int = 35; +pub const KERN_OLDSHORTCORENAME: ::c_int = 36; +pub const KERN_SYNCHRONIZED_IO: ::c_int = 37; +pub const KERN_IOV_MAX: ::c_int = 38; +pub const KERN_MBUF: ::c_int = 39; +pub const KERN_MAPPED_FILES: ::c_int = 40; +pub const KERN_MEMLOCK: ::c_int = 41; +pub const KERN_MEMLOCK_RANGE: ::c_int = 42; +pub const KERN_MEMORY_PROTECTION: ::c_int = 43; +pub const KERN_LOGIN_NAME_MAX: ::c_int = 44; +pub const KERN_DEFCORENAME: ::c_int = 45; +pub const KERN_LOGSIGEXIT: ::c_int = 46; +pub const KERN_PROC2: ::c_int = 47; +pub const KERN_PROC_ARGS: ::c_int = 48; +pub const KERN_FSCALE: ::c_int = 49; +pub const KERN_CCPU: ::c_int = 50; +pub const KERN_CP_TIME: ::c_int = 51; +pub const KERN_OLDSYSVIPC_INFO: ::c_int = 52; +pub const KERN_MSGBUF: ::c_int = 53; +pub const KERN_CONSDEV: ::c_int = 54; +pub const KERN_MAXPTYS: ::c_int = 55; +pub const KERN_PIPE: ::c_int = 56; +pub const KERN_MAXPHYS: ::c_int = 57; +pub const KERN_SBMAX: ::c_int = 58; +pub const KERN_TKSTAT: ::c_int = 59; +pub const KERN_MONOTONIC_CLOCK: ::c_int = 60; +pub const KERN_URND: ::c_int = 61; +pub const KERN_LABELSECTOR: ::c_int = 62; +pub const KERN_LABELOFFSET: ::c_int = 63; +pub const KERN_LWP: ::c_int = 64; +pub const KERN_FORKFSLEEP: ::c_int = 65; +pub const KERN_POSIX_THREADS: ::c_int = 66; +pub const KERN_POSIX_SEMAPHORES: ::c_int = 67; +pub const KERN_POSIX_BARRIERS: ::c_int = 68; +pub const KERN_POSIX_TIMERS: ::c_int = 69; +pub const KERN_POSIX_SPIN_LOCKS: ::c_int = 70; +pub const KERN_POSIX_READER_WRITER_LOCKS: ::c_int = 71; +pub const KERN_DUMP_ON_PANIC: ::c_int = 72; +pub const KERN_SOMAXKVA: ::c_int = 73; +pub const KERN_ROOT_PARTITION: ::c_int = 74; +pub const KERN_DRIVERS: ::c_int = 75; +pub const KERN_BUF: ::c_int = 76; +pub const KERN_FILE2: ::c_int = 77; +pub const KERN_VERIEXEC: ::c_int = 78; +pub const KERN_CP_ID: ::c_int = 79; +pub const KERN_HARDCLOCK_TICKS: ::c_int = 80; +pub const KERN_ARND: ::c_int = 81; +pub const KERN_SYSVIPC: ::c_int = 82; +pub const KERN_BOOTTIME: ::c_int = 83; +pub const KERN_EVCNT: ::c_int = 84; +pub const KERN_MAXID: ::c_int = 85; +pub const KERN_PROC_ALL: ::c_int = 0; +pub const KERN_PROC_PID: ::c_int = 1; +pub const KERN_PROC_PGRP: ::c_int = 2; +pub const KERN_PROC_SESSION: ::c_int = 3; +pub const KERN_PROC_TTY: ::c_int = 4; +pub const KERN_PROC_UID: ::c_int = 5; +pub const KERN_PROC_RUID: ::c_int = 6; +pub const KERN_PROC_GID: ::c_int = 7; +pub const KERN_PROC_RGID: ::c_int = 8; + extern { pub fn getnameinfo(sa: *const ::sockaddr, salen: ::socklen_t, @@ -411,4 +569,10 @@ extern { timeout: *const ::timespec) -> ::c_int; pub fn sigwaitinfo(set: *const sigset_t, info: *mut siginfo_t) -> ::c_int; + pub fn duplocale(base: ::locale_t) -> ::locale_t; + pub fn freelocale(loc: ::locale_t); + pub fn localeconv_l(loc: ::locale_t) -> *mut lconv; + pub fn newlocale(mask: ::c_int, + locale: *const ::c_char, + base: ::locale_t) -> ::locale_t; } diff --git a/src/liblibc/src/unix/bsd/openbsdlike/openbsd.rs b/src/liblibc/src/unix/bsd/openbsdlike/openbsd.rs index 0277145569..9f8450ee67 100644 --- a/src/liblibc/src/unix/bsd/openbsdlike/openbsd.rs +++ b/src/liblibc/src/unix/bsd/openbsdlike/openbsd.rs @@ -164,7 +164,6 @@ pub const SO_RCVTIMEO: ::c_int = 0x1006; pub const IPV6_JOIN_GROUP: ::c_int = 12; pub const IPV6_LEAVE_GROUP: ::c_int = 13; -pub const KERN_PROC : ::c_int = 66; pub const O_DSYNC : ::c_int = 128; pub const MAP_RENAME : ::c_int = 0x0000; @@ -231,6 +230,7 @@ pub const _SC_REALTIME_SIGNALS : ::c_int = 64; pub const _SC_RTSIG_MAX : ::c_int = 66; pub const _SC_SIGQUEUE_MAX : ::c_int = 70; pub const _SC_TIMER_MAX : ::c_int = 93; +pub const _SC_HOST_NAME_MAX : ::c_int = 33; pub const FD_SETSIZE: usize = 1024; @@ -241,13 +241,125 @@ pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = 0 as *mut _; pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = 0 as *mut _; pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 2; -pub const KERN_PROC_ARGS: ::c_int = 55; - pub const TMP_MAX : ::c_uint = 0x7fffffff; pub const NI_MAXHOST: ::size_t = 256; pub const RTLD_LOCAL: ::c_int = 0; +pub const CTL_MAXNAME: ::c_int = 12; +pub const CTLTYPE_NODE: ::c_int = 1; +pub const CTLTYPE_INT: ::c_int = 2; +pub const CTLTYPE_STRING: ::c_int = 3; +pub const CTLTYPE_QUAD: ::c_int = 4; +pub const CTLTYPE_STRUCT: ::c_int = 5; +pub const CTL_UNSPEC: ::c_int = 0; +pub const CTL_KERN: ::c_int = 1; +pub const CTL_VM: ::c_int = 2; +pub const CTL_FS: ::c_int = 3; +pub const CTL_NET: ::c_int = 4; +pub const CTL_DEBUG: ::c_int = 5; +pub const CTL_HW: ::c_int = 6; +pub const CTL_MACHDEP: ::c_int = 7; +pub const CTL_DDB: ::c_int = 9; +pub const CTL_VFS: ::c_int = 10; +pub const CTL_MAXID: ::c_int = 11; +pub const KERN_OSTYPE: ::c_int = 1; +pub const KERN_OSRELEASE: ::c_int = 2; +pub const KERN_OSREV: ::c_int = 3; +pub const KERN_VERSION: ::c_int = 4; +pub const KERN_MAXVNODES: ::c_int = 5; +pub const KERN_MAXPROC: ::c_int = 6; +pub const KERN_MAXFILES: ::c_int = 7; +pub const KERN_ARGMAX: ::c_int = 8; +pub const KERN_SECURELVL: ::c_int = 9; +pub const KERN_HOSTNAME: ::c_int = 10; +pub const KERN_HOSTID: ::c_int = 11; +pub const KERN_CLOCKRATE: ::c_int = 12; +pub const KERN_PROF: ::c_int = 16; +pub const KERN_POSIX1: ::c_int = 17; +pub const KERN_NGROUPS: ::c_int = 18; +pub const KERN_JOB_CONTROL: ::c_int = 19; +pub const KERN_SAVED_IDS: ::c_int = 20; +pub const KERN_BOOTTIME: ::c_int = 21; +pub const KERN_DOMAINNAME: ::c_int = 22; +pub const KERN_MAXPARTITIONS: ::c_int = 23; +pub const KERN_RAWPARTITION: ::c_int = 24; +pub const KERN_MAXTHREAD: ::c_int = 25; +pub const KERN_NTHREADS: ::c_int = 26; +pub const KERN_OSVERSION: ::c_int = 27; +pub const KERN_SOMAXCONN: ::c_int = 28; +pub const KERN_SOMINCONN: ::c_int = 29; +pub const KERN_USERMOUNT: ::c_int = 30; +pub const KERN_RND: ::c_int = 31; +pub const KERN_NOSUIDCOREDUMP: ::c_int = 32; +pub const KERN_FSYNC: ::c_int = 33; +pub const KERN_SYSVMSG: ::c_int = 34; +pub const KERN_SYSVSEM: ::c_int = 35; +pub const KERN_SYSVSHM: ::c_int = 36; +pub const KERN_ARND: ::c_int = 37; +pub const KERN_MSGBUFSIZE: ::c_int = 38; +pub const KERN_MALLOCSTATS: ::c_int = 39; +pub const KERN_CPTIME: ::c_int = 40; +pub const KERN_NCHSTATS: ::c_int = 41; +pub const KERN_FORKSTAT: ::c_int = 42; +pub const KERN_NSELCOLL: ::c_int = 43; +pub const KERN_TTY: ::c_int = 44; +pub const KERN_CCPU: ::c_int = 45; +pub const KERN_FSCALE: ::c_int = 46; +pub const KERN_NPROCS: ::c_int = 47; +pub const KERN_MSGBUF: ::c_int = 48; +pub const KERN_POOL: ::c_int = 49; +pub const KERN_STACKGAPRANDOM: ::c_int = 50; +pub const KERN_SYSVIPC_INFO: ::c_int = 51; +pub const KERN_SPLASSERT: ::c_int = 54; +pub const KERN_PROC_ARGS: ::c_int = 55; +pub const KERN_NFILES: ::c_int = 56; +pub const KERN_TTYCOUNT: ::c_int = 57; +pub const KERN_NUMVNODES: ::c_int = 58; +pub const KERN_MBSTAT: ::c_int = 59; +pub const KERN_SEMINFO: ::c_int = 61; +pub const KERN_SHMINFO: ::c_int = 62; +pub const KERN_INTRCNT: ::c_int = 63; +pub const KERN_WATCHDOG: ::c_int = 64; +pub const KERN_PROC: ::c_int = 66; +pub const KERN_MAXCLUSTERS: ::c_int = 67; +pub const KERN_EVCOUNT: ::c_int = 68; +pub const KERN_TIMECOUNTER: ::c_int = 69; +pub const KERN_MAXLOCKSPERUID: ::c_int = 70; +pub const KERN_CPTIME2: ::c_int = 71; +pub const KERN_CACHEPCT: ::c_int = 72; +pub const KERN_FILE: ::c_int = 73; +pub const KERN_CONSDEV: ::c_int = 75; +pub const KERN_NETLIVELOCKS: ::c_int = 76; +pub const KERN_POOL_DEBUG: ::c_int = 77; +pub const KERN_PROC_CWD: ::c_int = 78; +pub const KERN_PROC_NOBROADCASTKILL: ::c_int = 79; +pub const KERN_PROC_VMMAP: ::c_int = 80; +pub const KERN_GLOBAL_PTRACE: ::c_int = 81; +pub const KERN_CONSBUFSIZE: ::c_int = 82; +pub const KERN_CONSBUF: ::c_int = 83; +pub const KERN_MAXID: ::c_int = 84; +pub const KERN_PROC_ALL: ::c_int = 0; +pub const KERN_PROC_PID: ::c_int = 1; +pub const KERN_PROC_PGRP: ::c_int = 2; +pub const KERN_PROC_SESSION: ::c_int = 3; +pub const KERN_PROC_TTY: ::c_int = 4; +pub const KERN_PROC_UID: ::c_int = 5; +pub const KERN_PROC_RUID: ::c_int = 6; +pub const KERN_PROC_KTHREAD: ::c_int = 7; +pub const KERN_PROC_SHOW_THREADS: ::c_int = 0x40000000; +pub const KERN_SYSVIPC_MSG_INFO: ::c_int = 1; +pub const KERN_SYSVIPC_SEM_INFO: ::c_int = 2; +pub const KERN_SYSVIPC_SHM_INFO: ::c_int = 3; +pub const KERN_PROC_ARGV: ::c_int = 1; +pub const KERN_PROC_NARGV: ::c_int = 2; +pub const KERN_PROC_ENV: ::c_int = 3; +pub const KERN_PROC_NENV: ::c_int = 4; +pub const KI_NGROUPS: ::c_int = 16; +pub const KI_MAXCOMLEN: ::c_int = 24; +pub const KI_WMESGLEN: ::c_int = 8; +pub const KI_MAXLOGNAME: ::c_int = 32; +pub const KI_EMULNAMELEN: ::c_int = 8; extern { pub fn getnameinfo(sa: *const ::sockaddr, diff --git a/src/liblibc/src/unix/mod.rs b/src/liblibc/src/unix/mod.rs index 3170363ccf..29de88bf44 100644 --- a/src/liblibc/src/unix/mod.rs +++ b/src/liblibc/src/unix/mod.rs @@ -14,6 +14,7 @@ pub type sighandler_t = ::size_t; pub type cc_t = ::c_uchar; pub enum DIR {} +pub enum locale_t {} s! { pub struct utimbuf { @@ -54,7 +55,9 @@ s! { pub ru_nvcsw: c_long, pub ru_nivcsw: c_long, - #[cfg(target_env = "musl")] + #[cfg(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf"))] __reserved: [c_long; 16], } @@ -142,20 +145,57 @@ pub const IF_NAMESIZE: ::size_t = 16; pub const RTLD_LAZY: ::c_int = 0x1; +pub const LOG_EMERG: ::c_int = 0; +pub const LOG_ALERT: ::c_int = 1; +pub const LOG_CRIT: ::c_int = 2; +pub const LOG_ERR: ::c_int = 3; +pub const LOG_WARNING: ::c_int = 4; +pub const LOG_NOTICE: ::c_int = 5; +pub const LOG_INFO: ::c_int = 6; +pub const LOG_DEBUG: ::c_int = 7; + +pub const LOG_KERN: ::c_int = 0; +pub const LOG_USER: ::c_int = 1 << 3; +pub const LOG_MAIL: ::c_int = 2 << 3; +pub const LOG_DAEMON: ::c_int = 3 << 3; +pub const LOG_AUTH: ::c_int = 4 << 3; +pub const LOG_SYSLOG: ::c_int = 5 << 3; +pub const LOG_LPR: ::c_int = 6 << 3; +pub const LOG_NEWS: ::c_int = 7 << 3; +pub const LOG_UUCP: ::c_int = 8 << 3; +pub const LOG_LOCAL0: ::c_int = 16 << 3; +pub const LOG_LOCAL1: ::c_int = 17 << 3; +pub const LOG_LOCAL2: ::c_int = 18 << 3; +pub const LOG_LOCAL3: ::c_int = 19 << 3; +pub const LOG_LOCAL4: ::c_int = 20 << 3; +pub const LOG_LOCAL5: ::c_int = 21 << 3; +pub const LOG_LOCAL6: ::c_int = 22 << 3; +pub const LOG_LOCAL7: ::c_int = 23 << 3; + +pub const LOG_PID: ::c_int = 0x01; +pub const LOG_CONS: ::c_int = 0x02; +pub const LOG_ODELAY: ::c_int = 0x04; +pub const LOG_NDELAY: ::c_int = 0x08; +pub const LOG_NOWAIT: ::c_int = 0x10; + +pub const LOG_PRIMASK: ::c_int = 7; +pub const LOG_FACMASK: ::c_int = 0x3f8; + cfg_if! { if #[cfg(dox)] { // on dox builds don't pull in anything } else if #[cfg(all(not(stdbuild), feature = "use_std"))] { // cargo build, don't pull in anything extra as the libstd dep // already pulls in all libs. - } else if #[cfg(all(target_env = "musl", not(any(target_arch = "mips", - target_arch = "arm"))))] { + } else if #[cfg(any(all(target_env = "musl", not(target_arch = "mips")), + target_env = "musleabi", + target_env = "musleabihf"))] { #[link(name = "c", kind = "static")] extern {} } else if #[cfg(target_os = "emscripten")] { #[link(name = "c")] extern {} - } else if #[cfg(all(target_vendor = "rumprun", target_os = "netbsd"))] { + } else if #[cfg(all(target_os = "netbsd", target_vendor = "rumprun"))] { // Since we don't use -nodefaultlibs on Rumprun, libc is always pulled // in automatically by the linker. We avoid passing it explicitly, as it // causes some versions of binutils to crash with an assertion failure. @@ -178,6 +218,16 @@ cfg_if! { } extern { + pub fn fprintf(stream: *mut ::FILE, + format: *const ::c_char, ...) -> ::c_int; + pub fn printf(format: *const ::c_char, ...) -> ::c_int; + pub fn snprintf(s: *mut ::c_char, n: ::size_t, + format: *const ::c_char, ...) -> ::c_int; + pub fn sprintf(s: *mut ::c_char, format: *const ::c_char, ...) -> ::c_int; + pub fn fscanf(stream: *mut ::FILE, format: *const ::c_char, ...) -> ::c_int; + pub fn scanf(format: *const ::c_char, ...) -> ::c_int; + pub fn sscanf(s: *const ::c_char, format: *const ::c_char, ...) -> ::c_int; + #[cfg_attr(target_os = "netbsd", link_name = "__socket30")] pub fn socket(domain: ::c_int, ty: ::c_int, protocol: ::c_int) -> ::c_int; #[cfg_attr(all(target_os = "macos", target_arch = "x86"), @@ -410,7 +460,6 @@ extern { pub fn ftruncate(fd: ::c_int, length: off_t) -> ::c_int; - #[cfg_attr(target_os = "android", link_name = "bsd_signal")] pub fn signal(signum: ::c_int, handler: sighandler_t) -> sighandler_t; #[cfg_attr(all(target_os = "macos", target_arch = "x86"), @@ -718,6 +767,12 @@ extern { pub fn mkstemps(template: *mut ::c_char, suffixlen: ::c_int) -> ::c_int; pub fn mkdtemp(template: *mut ::c_char) -> *mut ::c_char; pub fn futimes(fd: ::c_int, times: *const ::timeval) -> ::c_int; + pub fn nl_langinfo(item: ::nl_item) -> *mut ::c_char; + + pub fn openlog(ident: *const ::c_char, logopt: ::c_int, facility: ::c_int); + pub fn closelog(); + pub fn setlogmask(maskpri: ::c_int) -> ::c_int; + pub fn syslog(priority: ::c_int, message: *const ::c_char, ...); } cfg_if! { diff --git a/src/liblibc/src/unix/notbsd/android/b32.rs b/src/liblibc/src/unix/notbsd/android/b32.rs index 267060d422..3a295cc720 100644 --- a/src/liblibc/src/unix/notbsd/android/b32.rs +++ b/src/liblibc/src/unix/notbsd/android/b32.rs @@ -1,4 +1,7 @@ +pub type c_long = i32; +pub type c_ulong = u32; pub type mode_t = u16; +pub type off64_t = ::c_longlong; s! { pub struct sigaction { @@ -7,6 +10,119 @@ s! { pub sa_flags: ::c_ulong, pub sa_restorer: ::dox::Option, } + + pub struct stat { + pub st_dev: ::c_ulonglong, + __pad0: [::c_uchar; 4], + __st_ino: ::ino_t, + pub st_mode: ::c_uint, + pub st_nlink: ::c_uint, + pub st_uid: ::uid_t, + pub st_gid: ::gid_t, + pub st_rdev: ::c_ulonglong, + __pad3: [::c_uchar; 4], + pub st_size: ::c_longlong, + pub st_blksize: ::blksize_t, + pub st_blocks: ::c_ulonglong, + pub st_atime: ::c_ulong, + pub st_atime_nsec: ::c_ulong, + pub st_mtime: ::c_ulong, + pub st_mtime_nsec: ::c_ulong, + pub st_ctime: ::c_ulong, + pub st_ctime_nsec: ::c_ulong, + pub st_ino: ::c_ulonglong, + } + + pub struct stat64 { + pub st_dev: ::c_ulonglong, + __pad0: [::c_uchar; 4], + __st_ino: ::ino_t, + pub st_mode: ::c_uint, + pub st_nlink: ::c_uint, + pub st_uid: ::uid_t, + pub st_gid: ::gid_t, + pub st_rdev: ::c_ulonglong, + __pad3: [::c_uchar; 4], + pub st_size: ::c_longlong, + pub st_blksize: ::blksize_t, + pub st_blocks: ::c_ulonglong, + pub st_atime: ::c_ulong, + pub st_atime_nsec: ::c_ulong, + pub st_mtime: ::c_ulong, + pub st_mtime_nsec: ::c_ulong, + pub st_ctime: ::c_ulong, + pub st_ctime_nsec: ::c_ulong, + pub st_ino: ::c_ulonglong, + } + + pub struct pthread_attr_t { + pub flags: ::uint32_t, + pub stack_base: *mut ::c_void, + pub stack_size: ::size_t, + pub guard_size: ::size_t, + pub sched_policy: ::int32_t, + pub sched_priority: ::int32_t, + } + + pub struct pthread_mutex_t { value: ::c_int } + + pub struct pthread_cond_t { value: ::c_int } + + pub struct pthread_rwlock_t { + lock: pthread_mutex_t, + cond: pthread_cond_t, + numLocks: ::c_int, + writerThreadId: ::c_int, + pendingReaders: ::c_int, + pendingWriters: ::c_int, + attr: i32, + __reserved: [::c_char; 12], + } + + pub struct passwd { + pub pw_name: *mut ::c_char, + pub pw_passwd: *mut ::c_char, + pub pw_uid: ::uid_t, + pub pw_gid: ::gid_t, + pub pw_dir: *mut ::c_char, + pub pw_shell: *mut ::c_char, + } + + pub struct statfs { + pub f_type: ::uint32_t, + pub f_bsize: ::uint32_t, + pub f_blocks: ::uint64_t, + pub f_bfree: ::uint64_t, + pub f_bavail: ::uint64_t, + pub f_files: ::uint64_t, + pub f_ffree: ::uint64_t, + pub f_fsid: ::__fsid_t, + pub f_namelen: ::uint32_t, + pub f_frsize: ::uint32_t, + pub f_flags: ::uint32_t, + pub f_spare: [::uint32_t; 4], + } } pub const SYS_gettid: ::c_long = 224; +pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t { + value: 0, +}; +pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = pthread_cond_t { + value: 0, +}; +pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = pthread_rwlock_t { + lock: PTHREAD_MUTEX_INITIALIZER, + cond: PTHREAD_COND_INITIALIZER, + numLocks: 0, + writerThreadId: 0, + pendingReaders: 0, + pendingWriters: 0, + attr: 0, + __reserved: [0; 12], +}; +pub const PTHREAD_STACK_MIN: ::size_t = 4096 * 2; + +extern { + pub fn timegm64(tm: *const ::tm) -> ::time64_t; +} diff --git a/src/liblibc/src/unix/notbsd/android/b64.rs b/src/liblibc/src/unix/notbsd/android/b64.rs index 5cf4f9d0c8..98b643f0b0 100644 --- a/src/liblibc/src/unix/notbsd/android/b64.rs +++ b/src/liblibc/src/unix/notbsd/android/b64.rs @@ -1,4 +1,9 @@ +// The following definitions are correct for aarch64 and may be wrong for x86_64 + +pub type c_long = i64; +pub type c_ulong = u64; pub type mode_t = u32; +pub type off64_t = i64; s! { pub struct sigaction { @@ -7,6 +12,127 @@ s! { pub sa_mask: ::sigset_t, _restorer: *mut ::c_void, } + + pub struct stat { + pub st_dev: ::dev_t, + pub st_ino: ::ino_t, + pub st_mode: ::c_uint, + pub st_nlink: ::c_uint, + pub st_uid: ::uid_t, + pub st_gid: ::gid_t, + pub st_rdev: ::dev_t, + __pad1: ::c_ulong, + pub st_size: ::off64_t, + pub st_blksize: ::c_int, + __pad2: ::c_int, + pub st_blocks: ::c_long, + pub st_atime: ::time_t, + pub st_atime_nsec: ::c_ulong, + pub st_mtime: ::time_t, + pub st_mtime_nsec: ::c_ulong, + pub st_ctime: ::time_t, + pub st_ctime_nsec: ::c_ulong, + __unused4: ::c_uint, + __unused5: ::c_uint, + } + + pub struct stat64 { + pub st_dev: ::dev_t, + pub st_ino: ::ino_t, + pub st_mode: ::c_uint, + pub st_nlink: ::c_uint, + pub st_uid: ::uid_t, + pub st_gid: ::gid_t, + pub st_rdev: ::dev_t, + __pad1: ::c_ulong, + pub st_size: ::off64_t, + pub st_blksize: ::c_int, + __pad2: ::c_int, + pub st_blocks: ::c_long, + pub st_atime: ::time_t, + pub st_atime_nsec: ::c_ulong, + pub st_mtime: ::time_t, + pub st_mtime_nsec: ::c_ulong, + pub st_ctime: ::time_t, + pub st_ctime_nsec: ::c_ulong, + __unused4: ::c_uint, + __unused5: ::c_uint, + } + + pub struct pthread_attr_t { + pub flags: ::uint32_t, + pub stack_base: *mut ::c_void, + pub stack_size: ::size_t, + pub guard_size: ::size_t, + pub sched_policy: ::int32_t, + pub sched_priority: ::int32_t, + __reserved: [::c_char; 16], + } + + pub struct pthread_mutex_t { + value: ::c_int, + __reserved: [::c_char; 36], + } + + pub struct pthread_cond_t { + value: ::c_int, + __reserved: [::c_char; 44], + } + + pub struct pthread_rwlock_t { + numLocks: ::c_int, + writerThreadId: ::c_int, + pendingReaders: ::c_int, + pendingWriters: ::c_int, + attr: i32, + __reserved: [::c_char; 36], + } + + pub struct passwd { + pub pw_name: *mut ::c_char, + pub pw_passwd: *mut ::c_char, + pub pw_uid: ::uid_t, + pub pw_gid: ::gid_t, + pub pw_gecos: *mut ::c_char, + pub pw_dir: *mut ::c_char, + pub pw_shell: *mut ::c_char, + } + + pub struct statfs { + pub f_type: ::uint64_t, + pub f_bsize: ::uint64_t, + pub f_blocks: ::uint64_t, + pub f_bfree: ::uint64_t, + pub f_bavail: ::uint64_t, + pub f_files: ::uint64_t, + pub f_ffree: ::uint64_t, + pub f_fsid: ::__fsid_t, + pub f_namelen: ::uint64_t, + pub f_frsize: ::uint64_t, + pub f_flags: ::uint64_t, + pub f_spare: [::uint64_t; 4], + } } pub const SYS_gettid: ::c_long = 178; +pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t { + value: 0, + __reserved: [0; 36], +}; +pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = pthread_cond_t { + value: 0, + __reserved: [0; 44], +}; +pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = pthread_rwlock_t { + numLocks: 0, + writerThreadId: 0, + pendingReaders: 0, + pendingWriters: 0, + attr: 0, + __reserved: [0; 36], +}; +pub const PTHREAD_STACK_MIN: ::size_t = 4096 * 4; + +extern { + pub fn timegm(tm: *const ::tm) -> ::time64_t; +} diff --git a/src/liblibc/src/unix/notbsd/android/mod.rs b/src/liblibc/src/unix/notbsd/android/mod.rs index 8963ddf914..7cfde94427 100644 --- a/src/liblibc/src/unix/notbsd/android/mod.rs +++ b/src/liblibc/src/unix/notbsd/android/mod.rs @@ -1,75 +1,28 @@ //! Android-specific definitions for linux-like values pub type c_char = u8; -pub type c_long = i32; -pub type c_ulong = u32; -pub type clock_t = i32; -pub type time_t = i32; -pub type suseconds_t = i32; +pub type clock_t = ::c_long; +pub type time_t = ::c_long; +pub type suseconds_t = ::c_long; pub type wchar_t = u32; -pub type off_t = i32; -pub type off64_t = i64; -pub type ino_t = u32; -pub type blkcnt_t = u32; -pub type blksize_t = u32; -pub type dev_t = u32; +pub type off_t = ::c_long; +pub type blkcnt_t = ::c_ulong; +pub type blksize_t = ::c_ulong; pub type nlink_t = u32; pub type useconds_t = u32; pub type socklen_t = i32; -pub type pthread_t = c_long; +pub type pthread_t = ::c_long; pub type pthread_mutexattr_t = ::c_long; -pub type sigset_t = c_ulong; -pub type time64_t = i64; +pub type sigset_t = ::c_ulong; +pub type time64_t = i64; // N/A on android pub type fsfilcnt_t = ::c_ulong; pub type fsblkcnt_t = ::c_ulong; pub type nfds_t = ::c_uint; -pub type rlim_t = c_ulong; +pub type rlim_t = ::c_ulong; +pub type dev_t = ::c_ulong; +pub type ino_t = ::c_ulong; s! { - pub struct stat { - pub st_dev: ::c_ulonglong, - __pad0: [::c_uchar; 4], - __st_ino: ::ino_t, - pub st_mode: ::c_uint, - pub st_nlink: ::c_uint, - pub st_uid: ::uid_t, - pub st_gid: ::gid_t, - pub st_rdev: ::c_ulonglong, - __pad3: [::c_uchar; 4], - pub st_size: ::c_longlong, - pub st_blksize: blksize_t, - pub st_blocks: ::c_ulonglong, - pub st_atime: ::c_ulong, - pub st_atime_nsec: ::c_ulong, - pub st_mtime: ::c_ulong, - pub st_mtime_nsec: ::c_ulong, - pub st_ctime: ::c_ulong, - pub st_ctime_nsec: ::c_ulong, - pub st_ino: ::c_ulonglong, - } - - pub struct stat64 { - pub st_dev: ::c_ulonglong, - __pad0: [::c_uchar; 4], - __st_ino: ::ino_t, - pub st_mode: ::c_uint, - pub st_nlink: ::c_uint, - pub st_uid: ::uid_t, - pub st_gid: ::gid_t, - pub st_rdev: ::c_ulonglong, - __pad3: [::c_uchar; 4], - pub st_size: ::c_longlong, - pub st_blksize: blksize_t, - pub st_blocks: ::c_ulonglong, - pub st_atime: ::c_ulong, - pub st_atime_nsec: ::c_ulong, - pub st_mtime: ::c_ulong, - pub st_mtime_nsec: ::c_ulong, - pub st_ctime: ::c_ulong, - pub st_ctime_nsec: ::c_ulong, - pub st_ino: ::c_ulonglong, - } - pub struct dirent { pub d_ino: u64, pub d_off: i64, @@ -91,38 +44,6 @@ s! { pub rlim_max: u64, } - pub struct pthread_attr_t { - pub flags: ::uint32_t, - pub stack_base: *mut ::c_void, - pub stack_size: ::size_t, - pub guard_size: ::size_t, - pub sched_policy: ::int32_t, - pub sched_priority: ::int32_t, - } - - pub struct pthread_mutex_t { value: ::c_int } - - pub struct pthread_cond_t { value: ::c_int } - - pub struct pthread_rwlock_t { - lock: pthread_mutex_t, - cond: pthread_cond_t, - numLocks: ::c_int, - writerThreadId: ::c_int, - pendingReaders: ::c_int, - pendingWriters: ::c_int, - reserved: [*mut ::c_void; 4], - } - - pub struct passwd { - pub pw_name: *mut ::c_char, - pub pw_passwd: *mut ::c_char, - pub pw_uid: ::uid_t, - pub pw_gid: ::gid_t, - pub pw_dir: *mut ::c_char, - pub pw_shell: *mut ::c_char, - } - pub struct stack_t { pub ss_sp: *mut ::c_void, pub ss_flags: ::c_int, @@ -136,21 +57,6 @@ s! { pub _pad: [::c_int; 29], } - pub struct statfs { - pub f_type: ::uint32_t, - pub f_bsize: ::uint32_t, - pub f_blocks: ::uint64_t, - pub f_bfree: ::uint64_t, - pub f_bavail: ::uint64_t, - pub f_files: ::uint64_t, - pub f_ffree: ::uint64_t, - pub f_fsid: ::__fsid_t, - pub f_namelen: ::uint32_t, - pub f_frsize: ::uint32_t, - pub f_flags: ::uint32_t, - pub f_spare: [::uint32_t; 4], - } - pub struct __fsid_t { __val: [::c_int; 2], } @@ -251,22 +157,6 @@ pub const _SC_THREAD_PRIO_PROTECT: ::c_int = 84; pub const _SC_THREAD_SAFE_FUNCTIONS: ::c_int = 85; pub const _SC_NPROCESSORS_ONLN: ::c_int = 97; -pub const PTHREAD_STACK_MIN: ::size_t = 8192; -pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t { - value: 0, -}; -pub const PTHREAD_COND_INITIALIZER: pthread_cond_t = pthread_cond_t { - value: 0, -}; -pub const PTHREAD_RWLOCK_INITIALIZER: pthread_rwlock_t = pthread_rwlock_t { - lock: PTHREAD_MUTEX_INITIALIZER, - cond: PTHREAD_COND_INITIALIZER, - numLocks: 0, - writerThreadId: 0, - pendingReaders: 0, - pendingWriters: 0, - reserved: [0 as *mut _; 4], -}; pub const PTHREAD_MUTEX_RECURSIVE: ::c_int = 1; pub const FIOCLEX: ::c_int = 0x5451; @@ -308,6 +198,24 @@ pub const LC_ADDRESS: ::c_int = 9; pub const LC_TELEPHONE: ::c_int = 10; pub const LC_MEASUREMENT: ::c_int = 11; pub const LC_IDENTIFICATION: ::c_int = 12; +pub const LC_PAPER_MASK: ::c_int = (1 << LC_PAPER); +pub const LC_NAME_MASK: ::c_int = (1 << LC_NAME); +pub const LC_ADDRESS_MASK: ::c_int = (1 << LC_ADDRESS); +pub const LC_TELEPHONE_MASK: ::c_int = (1 << LC_TELEPHONE); +pub const LC_MEASUREMENT_MASK: ::c_int = (1 << LC_MEASUREMENT); +pub const LC_IDENTIFICATION_MASK: ::c_int = (1 << LC_IDENTIFICATION); +pub const LC_ALL_MASK: ::c_int = ::LC_CTYPE_MASK + | ::LC_NUMERIC_MASK + | ::LC_TIME_MASK + | ::LC_COLLATE_MASK + | ::LC_MONETARY_MASK + | ::LC_MESSAGES_MASK + | LC_PAPER_MASK + | LC_NAME_MASK + | LC_ADDRESS_MASK + | LC_TELEPHONE_MASK + | LC_MEASUREMENT_MASK + | LC_IDENTIFICATION_MASK; pub const MAP_ANON: ::c_int = 0x0020; pub const MAP_ANONYMOUS: ::c_int = 0x0020; @@ -607,6 +515,10 @@ f! { } } +extern { + static mut __progname: *mut ::c_char; +} + extern { pub fn madvise(addr: *const ::c_void, len: ::size_t, advice: ::c_int) -> ::c_int; @@ -630,31 +542,7 @@ extern { serv: *mut ::c_char, sevlen: ::size_t, flags: ::c_int) -> ::c_int; - pub fn timegm64(tm: *const ::tm) -> time64_t; - pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int; pub fn ptrace(request: ::c_int, ...) -> ::c_long; - pub fn fstat64(fildes: ::c_int, buf: *mut stat64) -> ::c_int; - pub fn stat64(path: *const c_char, buf: *mut stat64) -> ::c_int; - pub fn open64(path: *const c_char, oflag: ::c_int, ...) -> ::c_int; - pub fn creat64(path: *const c_char, mode: mode_t) -> ::c_int; - pub fn lseek64(fd: ::c_int, offset: off64_t, whence: ::c_int) -> off64_t; - pub fn pread64(fd: ::c_int, buf: *mut ::c_void, count: ::size_t, - offset: off64_t) -> ::ssize_t; - pub fn pwrite64(fd: ::c_int, buf: *const ::c_void, count: ::size_t, - offset: off64_t) -> ::ssize_t; - pub fn mmap64(addr: *mut ::c_void, - len: ::size_t, - prot: ::c_int, - flags: ::c_int, - fd: ::c_int, - offset: off64_t) - -> *mut ::c_void; - pub fn lstat64(path: *const c_char, buf: *mut stat64) -> ::c_int; - pub fn ftruncate64(fd: ::c_int, length: off64_t) -> ::c_int; - pub fn readdir64_r(dirp: *mut ::DIR, entry: *mut ::dirent64, - result: *mut *mut ::dirent64) -> ::c_int; - pub fn getrlimit64(resource: ::c_int, rlim: *mut rlimit64) -> ::c_int; - pub fn setrlimit64(resource: ::c_int, rlim: *const rlimit64) -> ::c_int; } cfg_if! { diff --git a/src/liblibc/src/unix/notbsd/linux/mips.rs b/src/liblibc/src/unix/notbsd/linux/mips.rs index 728c1a38da..a6611d17fe 100644 --- a/src/liblibc/src/unix/notbsd/linux/mips.rs +++ b/src/liblibc/src/unix/notbsd/linux/mips.rs @@ -300,6 +300,24 @@ pub const LC_ADDRESS: ::c_int = 9; pub const LC_TELEPHONE: ::c_int = 10; pub const LC_MEASUREMENT: ::c_int = 11; pub const LC_IDENTIFICATION: ::c_int = 12; +pub const LC_PAPER_MASK: ::c_int = (1 << LC_PAPER); +pub const LC_NAME_MASK: ::c_int = (1 << LC_NAME); +pub const LC_ADDRESS_MASK: ::c_int = (1 << LC_ADDRESS); +pub const LC_TELEPHONE_MASK: ::c_int = (1 << LC_TELEPHONE); +pub const LC_MEASUREMENT_MASK: ::c_int = (1 << LC_MEASUREMENT); +pub const LC_IDENTIFICATION_MASK: ::c_int = (1 << LC_IDENTIFICATION); +pub const LC_ALL_MASK: ::c_int = ::LC_CTYPE_MASK + | ::LC_NUMERIC_MASK + | ::LC_TIME_MASK + | ::LC_COLLATE_MASK + | ::LC_MONETARY_MASK + | ::LC_MESSAGES_MASK + | LC_PAPER_MASK + | LC_NAME_MASK + | LC_ADDRESS_MASK + | LC_TELEPHONE_MASK + | LC_MEASUREMENT_MASK + | LC_IDENTIFICATION_MASK; pub const MAP_NORESERVE: ::c_int = 0x400; pub const MAP_ANON: ::c_int = 0x800; @@ -484,6 +502,7 @@ pub const RTLD_NOLOAD: ::c_int = 0x8; pub const SYS_gettid: ::c_long = 4222; // Valid for O32 +#[link(name = "util")] extern { pub fn sysctl(name: *mut ::c_int, namelen: ::c_int, @@ -502,14 +521,6 @@ extern { -> ::c_int>, pglob: *mut glob64_t) -> ::c_int; pub fn globfree64(pglob: *mut glob64_t); - pub fn getnameinfo(sa: *const ::sockaddr, - salen: ::socklen_t, - host: *mut ::c_char, - hostlen: ::socklen_t, - serv: *mut ::c_char, - sevlen: ::socklen_t, - flags: ::c_int) -> ::c_int; - pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int; pub fn ptrace(request: ::c_uint, ...) -> ::c_long; pub fn pthread_attr_getaffinity_np(attr: *const ::pthread_attr_t, cpusetsize: ::size_t, diff --git a/src/liblibc/src/unix/notbsd/linux/mod.rs b/src/liblibc/src/unix/notbsd/linux/mod.rs index 374e0019d7..9fd5cf4f60 100644 --- a/src/liblibc/src/unix/notbsd/linux/mod.rs +++ b/src/liblibc/src/unix/notbsd/linux/mod.rs @@ -15,6 +15,7 @@ pub type key_t = ::c_int; pub type shmatt_t = ::c_ulong; pub type mqd_t = ::c_int; pub type nfds_t = ::c_ulong; +pub type nl_item = ::c_int; pub enum fpos64_t {} // TODO: fill this out with a struct @@ -92,9 +93,13 @@ s! { } pub struct pthread_cond_t { - #[cfg(target_env = "musl")] + #[cfg(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf"))] __align: [*const ::c_void; 0], - #[cfg(not(target_env = "musl"))] + #[cfg(not(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf")))] __align: [::c_longlong; 0], size: [u8; __SIZEOF_PTHREAD_COND_T], } @@ -178,6 +183,74 @@ s! { } } +pub const ABDAY_1: ::nl_item = 0x20000; +pub const ABDAY_2: ::nl_item = 0x20001; +pub const ABDAY_3: ::nl_item = 0x20002; +pub const ABDAY_4: ::nl_item = 0x20003; +pub const ABDAY_5: ::nl_item = 0x20004; +pub const ABDAY_6: ::nl_item = 0x20005; +pub const ABDAY_7: ::nl_item = 0x20006; + +pub const DAY_1: ::nl_item = 0x20007; +pub const DAY_2: ::nl_item = 0x20008; +pub const DAY_3: ::nl_item = 0x20009; +pub const DAY_4: ::nl_item = 0x2000A; +pub const DAY_5: ::nl_item = 0x2000B; +pub const DAY_6: ::nl_item = 0x2000C; +pub const DAY_7: ::nl_item = 0x2000D; + +pub const ABMON_1: ::nl_item = 0x2000E; +pub const ABMON_2: ::nl_item = 0x2000F; +pub const ABMON_3: ::nl_item = 0x20010; +pub const ABMON_4: ::nl_item = 0x20011; +pub const ABMON_5: ::nl_item = 0x20012; +pub const ABMON_6: ::nl_item = 0x20013; +pub const ABMON_7: ::nl_item = 0x20014; +pub const ABMON_8: ::nl_item = 0x20015; +pub const ABMON_9: ::nl_item = 0x20016; +pub const ABMON_10: ::nl_item = 0x20017; +pub const ABMON_11: ::nl_item = 0x20018; +pub const ABMON_12: ::nl_item = 0x20019; + +pub const MON_1: ::nl_item = 0x2001A; +pub const MON_2: ::nl_item = 0x2001B; +pub const MON_3: ::nl_item = 0x2001C; +pub const MON_4: ::nl_item = 0x2001D; +pub const MON_5: ::nl_item = 0x2001E; +pub const MON_6: ::nl_item = 0x2001F; +pub const MON_7: ::nl_item = 0x20020; +pub const MON_8: ::nl_item = 0x20021; +pub const MON_9: ::nl_item = 0x20022; +pub const MON_10: ::nl_item = 0x20023; +pub const MON_11: ::nl_item = 0x20024; +pub const MON_12: ::nl_item = 0x20025; + +pub const AM_STR: ::nl_item = 0x20026; +pub const PM_STR: ::nl_item = 0x20027; + +pub const D_T_FMT: ::nl_item = 0x20028; +pub const D_FMT: ::nl_item = 0x20029; +pub const T_FMT: ::nl_item = 0x2002A; +pub const T_FMT_AMPM: ::nl_item = 0x2002B; + +pub const ERA: ::nl_item = 0x2002C; +pub const ERA_D_FMT: ::nl_item = 0x2002E; +pub const ALT_DIGITS: ::nl_item = 0x2002F; +pub const ERA_D_T_FMT: ::nl_item = 0x20030; +pub const ERA_T_FMT: ::nl_item = 0x20031; + +pub const CODESET: ::nl_item = 14; + +pub const CRNCYSTR: ::nl_item = 0x4000F; + +pub const RADIXCHAR: ::nl_item = 0x10000; +pub const THOUSEP: ::nl_item = 0x10001; + +pub const YESEXPR: ::nl_item = 0x50000; +pub const NOEXPR: ::nl_item = 0x50001; +pub const YESSTR: ::nl_item = 0x50002; +pub const NOSTR: ::nl_item = 0x50003; + pub const FILENAME_MAX: ::c_uint = 4096; pub const L_tmpnam: ::c_uint = 20; pub const _PC_NAME_MAX: ::c_int = 3; @@ -266,6 +339,7 @@ pub const _SC_XBS5_LPBIG_OFFBIG: ::c_int = 128; pub const _SC_XOPEN_LEGACY: ::c_int = 129; pub const _SC_XOPEN_REALTIME: ::c_int = 130; pub const _SC_XOPEN_REALTIME_THREADS: ::c_int = 131; +pub const _SC_HOST_NAME_MAX: ::c_int = 180; pub const RLIM_SAVED_MAX: ::rlim_t = RLIM_INFINITY; pub const RLIM_SAVED_CUR: ::rlim_t = RLIM_INFINITY; @@ -373,6 +447,8 @@ pub const NCCS: usize = 32; pub const AF_NETLINK: ::c_int = 16; +pub const LOG_NFACILITIES: ::c_int = 24; + f! { pub fn CPU_ZERO(cpuset: &mut cpu_set_t) -> () { for slot in cpuset.bits.iter_mut() { @@ -381,15 +457,15 @@ f! { } pub fn CPU_SET(cpu: usize, cpuset: &mut cpu_set_t) -> () { - let size = mem::size_of_val(&cpuset.bits[0]); - let (idx, offset) = (cpu / size, cpu % size); + let size_in_bits = 8 * mem::size_of_val(&cpuset.bits[0]); // 32, 64 etc + let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits); cpuset.bits[idx] |= 1 << offset; () } pub fn CPU_ISSET(cpu: usize, cpuset: &cpu_set_t) -> bool { - let size = mem::size_of_val(&cpuset.bits[0]); - let (idx, offset) = (cpu / size, cpu % size); + let size_in_bits = 8 * mem::size_of_val(&cpuset.bits[0]); + let (idx, offset) = (cpu / size_in_bits, cpu % size_in_bits); 0 != (cpuset.bits[idx] & (1 << offset)) } @@ -398,7 +474,10 @@ f! { } } -#[link(name = "util")] +extern { + static mut program_invocation_short_name: *mut ::c_char; +} + extern { pub fn shm_open(name: *const c_char, oflag: ::c_int, mode: mode_t) -> ::c_int; @@ -421,29 +500,6 @@ extern { pub fn tmpfile64() -> *mut ::FILE; pub fn fgetpos64(stream: *mut ::FILE, ptr: *mut fpos64_t) -> ::c_int; pub fn fsetpos64(stream: *mut ::FILE, ptr: *const fpos64_t) -> ::c_int; - pub fn fstat64(fildes: ::c_int, buf: *mut stat64) -> ::c_int; - pub fn stat64(path: *const c_char, buf: *mut stat64) -> ::c_int; - pub fn open64(path: *const c_char, oflag: ::c_int, ...) -> ::c_int; - pub fn creat64(path: *const c_char, mode: mode_t) -> ::c_int; - pub fn lseek64(fd: ::c_int, offset: off64_t, whence: ::c_int) -> off64_t; - pub fn pread64(fd: ::c_int, buf: *mut ::c_void, count: ::size_t, - offset: off64_t) -> ::ssize_t; - pub fn pwrite64(fd: ::c_int, buf: *const ::c_void, count: ::size_t, - offset: off64_t) -> ::ssize_t; - pub fn mmap64(addr: *mut ::c_void, - len: ::size_t, - prot: ::c_int, - flags: ::c_int, - fd: ::c_int, - offset: off64_t) - -> *mut ::c_void; - pub fn lstat64(path: *const c_char, buf: *mut stat64) -> ::c_int; - pub fn ftruncate64(fd: ::c_int, length: off64_t) -> ::c_int; - pub fn readdir64_r(dirp: *mut ::DIR, entry: *mut ::dirent64, - result: *mut *mut ::dirent64) -> ::c_int; - - pub fn getrlimit64(resource: ::c_int, rlim: *mut rlimit64) -> ::c_int; - pub fn setrlimit64(resource: ::c_int, rlim: *const rlimit64) -> ::c_int; pub fn fseeko64(stream: *mut ::FILE, offset: ::off64_t, whence: ::c_int) -> ::c_int; @@ -541,10 +597,39 @@ extern { name: *mut ::c_char, termp: *const termios, winp: *const ::winsize) -> ::pid_t; + pub fn nl_langinfo_l(item: ::nl_item, locale: ::locale_t) -> *mut ::c_char; + pub fn getnameinfo(sa: *const ::sockaddr, + salen: ::socklen_t, + host: *mut ::c_char, + hostlen: ::socklen_t, + serv: *mut ::c_char, + sevlen: ::socklen_t, + flags: ::c_int) -> ::c_int; + pub fn prlimit(pid: ::pid_t, resource: ::c_int, new_limit: *const ::rlimit, + old_limit: *mut ::rlimit) -> ::c_int; + pub fn prlimit64(pid: ::pid_t, + resource: ::c_int, + new_limit: *const ::rlimit64, + old_limit: *mut ::rlimit64) -> ::c_int; + pub fn getloadavg(loadavg: *mut ::c_double, nelem: ::c_int) -> ::c_int; + pub fn process_vm_readv(pid: ::pid_t, + local_iov: *const ::iovec, + liovcnt: ::c_ulong, + remote_iov: *const ::iovec, + riovcnt: ::c_ulong, + flags: ::c_ulong) -> isize; + pub fn process_vm_writev(pid: ::pid_t, + local_iov: *const ::iovec, + liovcnt: ::c_ulong, + remote_iov: *const ::iovec, + riovcnt: ::c_ulong, + flags: ::c_ulong) -> isize; } cfg_if! { if #[cfg(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf", target_os = "emscripten"))] { mod musl; pub use self::musl::*; diff --git a/src/liblibc/src/unix/notbsd/linux/musl/b32/x86.rs b/src/liblibc/src/unix/notbsd/linux/musl/b32/x86.rs index 2785547820..aae6c04573 100644 --- a/src/liblibc/src/unix/notbsd/linux/musl/b32/x86.rs +++ b/src/liblibc/src/unix/notbsd/linux/musl/b32/x86.rs @@ -82,6 +82,19 @@ s! { pub f_flags: ::c_ulong, pub f_spare: [::c_ulong; 4], } + + pub struct mcontext_t { + __private: [u32; 22] + } + + pub struct ucontext_t { + pub uc_flags: ::c_ulong, + pub uc_link: *mut ucontext_t, + pub uc_stack: ::stack_t, + pub uc_mcontext: mcontext_t, + pub uc_sigmask: ::sigset_t, + __private: [u8; 112], + } } pub const O_DIRECT: ::c_int = 0x4000; diff --git a/src/liblibc/src/unix/notbsd/linux/musl/b64/x86_64.rs b/src/liblibc/src/unix/notbsd/linux/musl/b64/x86_64.rs index 168e242927..02324dae30 100644 --- a/src/liblibc/src/unix/notbsd/linux/musl/b64/x86_64.rs +++ b/src/liblibc/src/unix/notbsd/linux/musl/b64/x86_64.rs @@ -1,3 +1,18 @@ +s! { + pub struct mcontext_t { + __private: [u64; 32], + } + + pub struct ucontext_t { + pub uc_flags: ::c_ulong, + pub uc_link: *mut ucontext_t, + pub uc_stack: ::stack_t, + pub uc_mcontext: mcontext_t, + pub uc_sigmask: ::sigset_t, + __private: [u8; 512], + } +} + pub const SYS_gettid: ::c_long = 186; pub const SYS_perf_event_open: ::c_long = 298; diff --git a/src/liblibc/src/unix/notbsd/linux/musl/mod.rs b/src/liblibc/src/unix/notbsd/linux/musl/mod.rs index cd04d851a5..631a39a493 100644 --- a/src/liblibc/src/unix/notbsd/linux/musl/mod.rs +++ b/src/liblibc/src/unix/notbsd/linux/musl/mod.rs @@ -154,15 +154,7 @@ pub const CLOCK_SGI_CYCLE: ::clockid_t = 10; pub const CLOCK_TAI: ::clockid_t = 11; extern { - pub fn getnameinfo(sa: *const ::sockaddr, - salen: ::socklen_t, - host: *mut ::c_char, - hostlen: ::socklen_t, - serv: *mut ::c_char, - sevlen: ::socklen_t, - flags: ::c_int) -> ::c_int; pub fn ioctl(fd: ::c_int, request: ::c_int, ...) -> ::c_int; - pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int; pub fn ptrace(request: ::c_int, ...) -> ::c_long; } diff --git a/src/liblibc/src/unix/notbsd/linux/other/b32/x86.rs b/src/liblibc/src/unix/notbsd/linux/other/b32/x86.rs index 1b215a977d..e6d78deae3 100644 --- a/src/liblibc/src/unix/notbsd/linux/other/b32/x86.rs +++ b/src/liblibc/src/unix/notbsd/linux/other/b32/x86.rs @@ -1,9 +1,30 @@ pub type c_char = i8; pub type wchar_t = i32; +pub type greg_t = i32; s! { + pub struct _libc_fpreg { + pub significand: [u16; 4], + pub exponent: u16, + } + + pub struct _libc_fpstate { + pub cw: ::c_ulong, + pub sw: ::c_ulong, + pub tag: ::c_ulong, + pub ipoff: ::c_ulong, + pub cssel: ::c_ulong, + pub dataoff: ::c_ulong, + pub datasel: ::c_ulong, + pub _st: [_libc_fpreg; 8], + pub status: ::c_ulong, + } + pub struct mcontext_t { - __private: [u32; 22] + pub gregs: [greg_t; 19], + pub fpregs: *mut _libc_fpstate, + pub oldmask: ::c_ulong, + pub cr2: ::c_ulong, } pub struct ucontext_t { diff --git a/src/liblibc/src/unix/notbsd/linux/other/b64/x86_64.rs b/src/liblibc/src/unix/notbsd/linux/other/b64/x86_64.rs index 6903c62751..7103267d2c 100644 --- a/src/liblibc/src/unix/notbsd/linux/other/b64/x86_64.rs +++ b/src/liblibc/src/unix/notbsd/linux/other/b64/x86_64.rs @@ -4,6 +4,7 @@ pub type c_char = i8; pub type wchar_t = i32; pub type nlink_t = u64; pub type blksize_t = i64; +pub type greg_t = i64; s! { pub struct stat { @@ -52,8 +53,34 @@ s! { __size: [u64; 7] } + pub struct _libc_fpxreg { + pub significand: [u16; 4], + pub exponent: u16, + __private: [u16; 3], + } + + pub struct _libc_xmmreg { + pub element: [u32; 4], + } + + pub struct _libc_fpstate { + pub cwd: u16, + pub swd: u16, + pub ftw: u16, + pub fop: u16, + pub rip: u64, + pub rdp: u64, + pub mxcsr: u32, + pub mxcr_mask: u32, + pub _st: [_libc_fpxreg; 8], + pub _xmm: [_libc_xmmreg; 16], + __private: [u64; 12], + } + pub struct mcontext_t { - __private: [u64; 32], + pub gregs: [greg_t; 23], + pub fpregs: *mut _libc_fpstate, + __private: [u64; 8], } pub struct ucontext_t { diff --git a/src/liblibc/src/unix/notbsd/linux/other/mod.rs b/src/liblibc/src/unix/notbsd/linux/other/mod.rs index 337b625936..63a3e92017 100644 --- a/src/liblibc/src/unix/notbsd/linux/other/mod.rs +++ b/src/liblibc/src/unix/notbsd/linux/other/mod.rs @@ -149,6 +149,24 @@ pub const LC_ADDRESS: ::c_int = 9; pub const LC_TELEPHONE: ::c_int = 10; pub const LC_MEASUREMENT: ::c_int = 11; pub const LC_IDENTIFICATION: ::c_int = 12; +pub const LC_PAPER_MASK: ::c_int = (1 << LC_PAPER); +pub const LC_NAME_MASK: ::c_int = (1 << LC_NAME); +pub const LC_ADDRESS_MASK: ::c_int = (1 << LC_ADDRESS); +pub const LC_TELEPHONE_MASK: ::c_int = (1 << LC_TELEPHONE); +pub const LC_MEASUREMENT_MASK: ::c_int = (1 << LC_MEASUREMENT); +pub const LC_IDENTIFICATION_MASK: ::c_int = (1 << LC_IDENTIFICATION); +pub const LC_ALL_MASK: ::c_int = ::LC_CTYPE_MASK + | ::LC_NUMERIC_MASK + | ::LC_TIME_MASK + | ::LC_COLLATE_MASK + | ::LC_MONETARY_MASK + | ::LC_MESSAGES_MASK + | LC_PAPER_MASK + | LC_NAME_MASK + | LC_ADDRESS_MASK + | LC_TELEPHONE_MASK + | LC_MEASUREMENT_MASK + | LC_IDENTIFICATION_MASK; pub const MAP_ANON: ::c_int = 0x0020; pub const MAP_ANONYMOUS: ::c_int = 0x0020; @@ -451,6 +469,7 @@ cfg_if! { } } +#[link(name = "util")] extern { pub fn sysctl(name: *mut ::c_int, namelen: ::c_int, @@ -469,14 +488,6 @@ extern { -> ::c_int>, pglob: *mut glob64_t) -> ::c_int; pub fn globfree64(pglob: *mut glob64_t); - pub fn getnameinfo(sa: *const ::sockaddr, - salen: ::socklen_t, - host: *mut ::c_char, - hostlen: ::socklen_t, - serv: *mut ::c_char, - sevlen: ::socklen_t, - flags: ::c_int) -> ::c_int; - pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int; pub fn ptrace(request: ::c_uint, ...) -> ::c_long; pub fn pthread_attr_getaffinity_np(attr: *const ::pthread_attr_t, cpusetsize: ::size_t, diff --git a/src/liblibc/src/unix/notbsd/mod.rs b/src/liblibc/src/unix/notbsd/mod.rs index c2405e3d1b..94868d8a43 100644 --- a/src/liblibc/src/unix/notbsd/mod.rs +++ b/src/liblibc/src/unix/notbsd/mod.rs @@ -99,13 +99,21 @@ s! { pub struct sched_param { pub sched_priority: ::c_int, - #[cfg(target_env = "musl")] + #[cfg(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf"))] pub sched_ss_low_priority: ::c_int, - #[cfg(target_env = "musl")] + #[cfg(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf"))] pub sched_ss_repl_period: ::timespec, - #[cfg(target_env = "musl")] + #[cfg(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf"))] pub sched_ss_init_budget: ::timespec, - #[cfg(target_env = "musl")] + #[cfg(any(target_env = "musl", + target_env = "musleabi", + target_env = "musleabihf"))] pub sched_ss_max_repl: ::c_int, } @@ -291,6 +299,13 @@ pub const LC_COLLATE: ::c_int = 3; pub const LC_MONETARY: ::c_int = 4; pub const LC_MESSAGES: ::c_int = 5; pub const LC_ALL: ::c_int = 6; +pub const LC_CTYPE_MASK: ::c_int = (1 << LC_CTYPE); +pub const LC_NUMERIC_MASK: ::c_int = (1 << LC_NUMERIC); +pub const LC_TIME_MASK: ::c_int = (1 << LC_TIME); +pub const LC_COLLATE_MASK: ::c_int = (1 << LC_COLLATE); +pub const LC_MONETARY_MASK: ::c_int = (1 << LC_MONETARY); +pub const LC_MESSAGES_MASK: ::c_int = (1 << LC_MESSAGES); +// LC_ALL_MASK defined per platform pub const MAP_FILE: ::c_int = 0x0000; pub const MAP_SHARED: ::c_int = 0x0001; @@ -630,6 +645,13 @@ pub const POSIX_FADV_NOREUSE: ::c_int = 5; pub const AT_FDCWD: ::c_int = -100; pub const AT_SYMLINK_NOFOLLOW: ::c_int = 0x100; +pub const LOG_CRON: ::c_int = 9 << 3; +pub const LOG_AUTHPRIV: ::c_int = 10 << 3; +pub const LOG_FTP: ::c_int = 11 << 3; +pub const LOG_PERROR: ::c_int = 0x20; + +pub const PIPE_BUF: usize = 4096; + f! { pub fn FD_CLR(fd: ::c_int, set: *mut fd_set) -> () { let fd = fd as usize; @@ -749,6 +771,35 @@ extern { pub fn futimens(fd: ::c_int, times: *const ::timespec) -> ::c_int; pub fn utimensat(dirfd: ::c_int, path: *const ::c_char, times: *const ::timespec, flag: ::c_int) -> ::c_int; + pub fn duplocale(base: ::locale_t) -> ::locale_t; + pub fn freelocale(loc: ::locale_t); + pub fn newlocale(mask: ::c_int, + locale: *const ::c_char, + base: ::locale_t) -> ::locale_t; + pub fn uselocale(loc: ::locale_t) -> ::locale_t; + pub fn creat64(path: *const c_char, mode: mode_t) -> ::c_int; + pub fn fstat64(fildes: ::c_int, buf: *mut stat64) -> ::c_int; + pub fn ftruncate64(fd: ::c_int, length: off64_t) -> ::c_int; + pub fn getrlimit64(resource: ::c_int, rlim: *mut rlimit64) -> ::c_int; + pub fn lseek64(fd: ::c_int, offset: off64_t, whence: ::c_int) -> off64_t; + pub fn lstat64(path: *const c_char, buf: *mut stat64) -> ::c_int; + pub fn mmap64(addr: *mut ::c_void, + len: ::size_t, + prot: ::c_int, + flags: ::c_int, + fd: ::c_int, + offset: off64_t) + -> *mut ::c_void; + pub fn open64(path: *const c_char, oflag: ::c_int, ...) -> ::c_int; + pub fn pread64(fd: ::c_int, buf: *mut ::c_void, count: ::size_t, + offset: off64_t) -> ::ssize_t; + pub fn pwrite64(fd: ::c_int, buf: *const ::c_void, count: ::size_t, + offset: off64_t) -> ::ssize_t; + pub fn readdir64_r(dirp: *mut ::DIR, entry: *mut ::dirent64, + result: *mut *mut ::dirent64) -> ::c_int; + pub fn setrlimit64(resource: ::c_int, rlim: *const rlimit64) -> ::c_int; + pub fn stat64(path: *const c_char, buf: *mut stat64) -> ::c_int; + pub fn eventfd(init: ::c_uint, flags: ::c_int) -> ::c_int; } cfg_if! { diff --git a/src/liblibc/src/unix/solaris/mod.rs b/src/liblibc/src/unix/solaris/mod.rs index a47d1baaaa..0cb7e54436 100644 --- a/src/liblibc/src/unix/solaris/mod.rs +++ b/src/liblibc/src/unix/solaris/mod.rs @@ -31,6 +31,7 @@ pub type pthread_t = ::uintptr_t; pub type pthread_key_t = ::c_uint; pub type blksize_t = u32; pub type fflags_t = u32; +pub type nl_item = ::c_int; pub enum timezone {} @@ -272,8 +273,121 @@ s! { pub c_lflag: ::tcflag_t, pub c_cc: [::cc_t; ::NCCS] } + + pub struct lconv { + pub decimal_point: *mut ::c_char, + pub thousands_sep: *mut ::c_char, + pub grouping: *mut ::c_char, + pub int_curr_symbol: *mut ::c_char, + pub currency_symbol: *mut ::c_char, + pub mon_decimal_point: *mut ::c_char, + pub mon_thousands_sep: *mut ::c_char, + pub mon_grouping: *mut ::c_char, + pub positive_sign: *mut ::c_char, + pub negative_sign: *mut ::c_char, + pub int_frac_digits: ::c_char, + pub frac_digits: ::c_char, + pub p_cs_precedes: ::c_char, + pub p_sep_by_space: ::c_char, + pub n_cs_precedes: ::c_char, + pub n_sep_by_space: ::c_char, + pub p_sign_posn: ::c_char, + pub n_sign_posn: ::c_char, + pub int_p_cs_precedes: ::c_char, + pub int_p_sep_by_space: ::c_char, + pub int_n_cs_precedes: ::c_char, + pub int_n_sep_by_space: ::c_char, + pub int_p_sign_posn: ::c_char, + pub int_n_sign_posn: ::c_char, + } } +pub const LC_CTYPE: ::c_int = 0; +pub const LC_NUMERIC: ::c_int = 1; +pub const LC_TIME: ::c_int = 2; +pub const LC_COLLATE: ::c_int = 3; +pub const LC_MONETARY: ::c_int = 4; +pub const LC_MESSAGES: ::c_int = 5; +pub const LC_ALL: ::c_int = 6; +pub const LC_CTYPE_MASK: ::c_int = (1 << LC_CTYPE); +pub const LC_NUMERIC_MASK: ::c_int = (1 << LC_NUMERIC); +pub const LC_TIME_MASK: ::c_int = (1 << LC_TIME); +pub const LC_COLLATE_MASK: ::c_int = (1 << LC_COLLATE); +pub const LC_MONETARY_MASK: ::c_int = (1 << LC_MONETARY); +pub const LC_MESSAGES_MASK: ::c_int = (1 << LC_MESSAGES); +pub const LC_ALL_MASK: ::c_int = LC_CTYPE_MASK + | LC_NUMERIC_MASK + | LC_TIME_MASK + | LC_COLLATE_MASK + | LC_MONETARY_MASK + | LC_MESSAGES_MASK; + +pub const DAY_1: ::nl_item = 1; +pub const DAY_2: ::nl_item = 2; +pub const DAY_3: ::nl_item = 3; +pub const DAY_4: ::nl_item = 4; +pub const DAY_5: ::nl_item = 5; +pub const DAY_6: ::nl_item = 6; +pub const DAY_7: ::nl_item = 7; + +pub const ABDAY_1: ::nl_item = 8; +pub const ABDAY_2: ::nl_item = 9; +pub const ABDAY_3: ::nl_item = 10; +pub const ABDAY_4: ::nl_item = 11; +pub const ABDAY_5: ::nl_item = 12; +pub const ABDAY_6: ::nl_item = 13; +pub const ABDAY_7: ::nl_item = 14; + +pub const MON_1: ::nl_item = 15; +pub const MON_2: ::nl_item = 16; +pub const MON_3: ::nl_item = 17; +pub const MON_4: ::nl_item = 18; +pub const MON_5: ::nl_item = 19; +pub const MON_6: ::nl_item = 20; +pub const MON_7: ::nl_item = 21; +pub const MON_8: ::nl_item = 22; +pub const MON_9: ::nl_item = 23; +pub const MON_10: ::nl_item = 24; +pub const MON_11: ::nl_item = 25; +pub const MON_12: ::nl_item = 26; + +pub const ABMON_1: ::nl_item = 27; +pub const ABMON_2: ::nl_item = 28; +pub const ABMON_3: ::nl_item = 29; +pub const ABMON_4: ::nl_item = 30; +pub const ABMON_5: ::nl_item = 31; +pub const ABMON_6: ::nl_item = 32; +pub const ABMON_7: ::nl_item = 33; +pub const ABMON_8: ::nl_item = 34; +pub const ABMON_9: ::nl_item = 35; +pub const ABMON_10: ::nl_item = 36; +pub const ABMON_11: ::nl_item = 37; +pub const ABMON_12: ::nl_item = 38; + +pub const RADIXCHAR: ::nl_item = 39; +pub const THOUSEP: ::nl_item = 40; +pub const YESSTR: ::nl_item = 41; +pub const NOSTR: ::nl_item = 42; +pub const CRNCYSTR: ::nl_item = 43; + +pub const D_T_FMT: ::nl_item = 44; +pub const D_FMT: ::nl_item = 45; +pub const T_FMT: ::nl_item = 46; +pub const AM_STR: ::nl_item = 47; +pub const PM_STR: ::nl_item = 48; + +pub const CODESET: ::nl_item = 49; +pub const T_FMT_AMPM: ::nl_item = 50; +pub const ERA: ::nl_item = 51; +pub const ERA_D_FMT: ::nl_item = 52; +pub const ERA_D_T_FMT: ::nl_item = 53; +pub const ERA_T_FMT: ::nl_item = 54; +pub const ALT_DIGITS: ::nl_item = 55; +pub const YESEXPR: ::nl_item = 56; +pub const NOEXPR: ::nl_item = 57; +pub const _DATE_FMT: ::nl_item = 58; +pub const MAXSTRMSG: ::nl_item = 58; + pub const SA_ONSTACK: ::c_int = 0x00000001; pub const SA_RESETHAND: ::c_int = 0x00000002; pub const SA_RESTART: ::c_int = 0x00000004; @@ -692,6 +806,8 @@ pub const _RWL_MAGIC: u16 = 0x5257; // RW pub const NCCS: usize = 19; +pub const LOG_CRON: ::c_int = 15 << 3; + pub const PTHREAD_MUTEX_INITIALIZER: pthread_mutex_t = pthread_mutex_t { __pthread_mutex_flag1: 0, __pthread_mutex_flag2: 0, @@ -778,5 +894,15 @@ extern { buflen: ::size_t) -> *const passwd; pub fn readdir(dirp: *mut ::DIR) -> *const ::dirent; pub fn fdatasync(fd: ::c_int) -> ::c_int; + pub fn nl_langinfo_l(item: ::nl_item, locale: ::locale_t) -> *mut ::c_char; + pub fn duplocale(base: ::locale_t) -> ::locale_t; + pub fn freelocale(loc: ::locale_t); + pub fn newlocale(mask: ::c_int, + locale: *const ::c_char, + base: ::locale_t) -> ::locale_t; + pub fn uselocale(loc: ::locale_t) -> ::locale_t; + pub fn getprogname() -> *const ::c_char; + pub fn setprogname(name: *const ::c_char); + pub fn getloadavg(loadavg: *mut ::c_double, nelem: ::c_int) -> ::c_int; } diff --git a/src/liblog/lib.rs b/src/liblog/lib.rs index 011f5a744d..a71f6efe54 100644 --- a/src/liblog/lib.rs +++ b/src/liblog/lib.rs @@ -170,10 +170,7 @@ #![deny(missing_docs)] #![cfg_attr(not(stage0), deny(warnings))] -#![feature(box_syntax)] -#![feature(const_fn)] #![feature(staged_api)] -#![feature(static_mutex)] use std::cell::RefCell; use std::fmt; @@ -181,9 +178,8 @@ use std::io::{self, Stderr}; use std::io::prelude::*; use std::mem; use std::env; -use std::ptr; use std::slice; -use std::sync::{Once, StaticMutex}; +use std::sync::{Once, Mutex, ONCE_INIT}; use directive::LOG_LEVEL_NAMES; @@ -199,18 +195,13 @@ pub const MAX_LOG_LEVEL: u32 = 255; /// The default logging level of a crate if no other is specified. const DEFAULT_LOG_LEVEL: u32 = 1; -static LOCK: StaticMutex = StaticMutex::new(); +static mut LOCK: *mut Mutex<(Vec, Option)> = 0 as *mut _; /// An unsafe constant that is the maximum logging level of any module /// specified. This is the first line of defense to determining whether a /// logging statement should be run. static mut LOG_LEVEL: u32 = MAX_LOG_LEVEL; -static mut DIRECTIVES: *mut Vec = ptr::null_mut(); - -/// Optional filter. -static mut FILTER: *mut String = ptr::null_mut(); - /// Debug log level pub const DEBUG: u32 = 4; /// Info log level @@ -287,14 +278,10 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) { // Test the literal string from args against the current filter, if there // is one. unsafe { - let _g = LOCK.lock(); - match FILTER as usize { - 0 => {} - n => { - let filter = mem::transmute::<_, &String>(n); - if !args.to_string().contains(filter) { - return; - } + let filter = (*LOCK).lock().unwrap(); + if let Some(ref filter) = filter.1 { + if !args.to_string().contains(filter) { + return; } } } @@ -302,10 +289,10 @@ pub fn log(level: u32, loc: &'static LogLocation, args: fmt::Arguments) { // Completely remove the local logger from TLS in case anyone attempts to // frob the slot while we're doing the logging. This will destroy any logger // set during logging. - let mut logger: Box = LOCAL_LOGGER.with(|s| s.borrow_mut().take()) - .unwrap_or_else(|| { - box DefaultLogger { handle: io::stderr() } - }); + let logger = LOCAL_LOGGER.with(|s| s.borrow_mut().take()); + let mut logger = logger.unwrap_or_else(|| { + Box::new(DefaultLogger { handle: io::stderr() }) + }); logger.log(&LogRecord { level: LogLevel(level), args: args, @@ -363,7 +350,7 @@ pub struct LogLocation { /// module's log statement should be emitted or not. #[doc(hidden)] pub fn mod_enabled(level: u32, module: &str) -> bool { - static INIT: Once = Once::new(); + static INIT: Once = ONCE_INIT; INIT.call_once(init); // It's possible for many threads are in this function, only one of them @@ -378,10 +365,9 @@ pub fn mod_enabled(level: u32, module: &str) -> bool { // This assertion should never get tripped unless we're in an at_exit // handler after logging has been torn down and a logging attempt was made. - let _g = LOCK.lock(); unsafe { - assert!(DIRECTIVES as usize != 0); - enabled(level, module, (*DIRECTIVES).iter()) + let directives = (*LOCK).lock().unwrap(); + enabled(level, module, directives.0.iter()) } } @@ -422,14 +408,8 @@ fn init() { unsafe { LOG_LEVEL = max_level; - assert!(FILTER.is_null()); - match filter { - Some(f) => FILTER = Box::into_raw(box f), - None => {} - } - - assert!(DIRECTIVES.is_null()); - DIRECTIVES = Box::into_raw(box directives); + assert!(LOCK.is_null()); + LOCK = Box::into_raw(Box::new(Mutex::new((directives, filter)))); } } diff --git a/src/libpanic_abort/Cargo.toml b/src/libpanic_abort/Cargo.toml new file mode 100644 index 0000000000..9d62be64fc --- /dev/null +++ b/src/libpanic_abort/Cargo.toml @@ -0,0 +1,12 @@ +[package] +authors = ["The Rust Project Developers"] +name = "panic_abort" +version = "0.0.0" + +[lib] +path = "lib.rs" +test = false + +[dependencies] +core = { path = "../libcore" } +libc = { path = "../rustc/libc_shim" } diff --git a/src/libpanic_abort/lib.rs b/src/libpanic_abort/lib.rs new file mode 100644 index 0000000000..c085ddeb75 --- /dev/null +++ b/src/libpanic_abort/lib.rs @@ -0,0 +1,135 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Implementation of Rust panics via process aborts +//! +//! When compared to the implementation via unwinding, this crate is *much* +//! simpler! That being said, it's not quite as versatile, but here goes! + +#![no_std] +#![crate_name = "panic_abort"] +#![crate_type = "rlib"] +#![unstable(feature = "panic_abort", issue = "32837")] +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(staged_api)] + +#![cfg_attr(not(stage0), panic_runtime)] +#![cfg_attr(not(stage0), feature(panic_runtime))] +#![cfg_attr(unix, feature(libc))] +#![cfg_attr(windows, feature(core_intrinsics))] + +// Rust's "try" function, but if we're aborting on panics we just call the +// function as there's nothing else we need to do here. +#[no_mangle] +pub unsafe extern fn __rust_maybe_catch_panic(f: fn(*mut u8), + data: *mut u8, + _data_ptr: *mut usize, + _vtable_ptr: *mut usize) -> u32 { + f(data); + 0 +} + +// "Leak" the payload and shim to the relevant abort on the platform in +// question. +// +// For Unix we just use `abort` from libc as it'll trigger debuggers, core +// dumps, etc, as one might expect. On Windows, however, the best option we have +// is the `__fastfail` intrinsics, but that's unfortunately not defined in LLVM, +// and the `RaiseFailFastException` function isn't available until Windows 7 +// which would break compat with XP. For now just use `intrinsics::abort` which +// will kill us with an illegal instruction, which will do a good enough job for +// now hopefully. +#[no_mangle] +pub unsafe extern fn __rust_start_panic(_data: usize, _vtable: usize) -> u32 { + return abort(); + + #[cfg(unix)] + unsafe fn abort() -> ! { + extern crate libc; + libc::abort(); + } + + #[cfg(windows)] + unsafe fn abort() -> ! { + core::intrinsics::abort(); + } +} + +// This... is a bit of an oddity. The tl;dr; is that this is required to link +// correctly, the longer explanation is below. +// +// Right now the binaries of libcore/libstd that we ship are all compiled with +// `-C panic=unwind`. This is done to ensure that the binaries are maximally +// compatible with as many situations as possible. The compiler, however, +// requires a "personality function" for all functions compiled with `-C +// panic=unwind`. This personality function is hardcoded to the symbol +// `rust_eh_personality` and is defined by the `eh_personality` lang item. +// +// So... why not just define that lang item here? Good question! The way that +// panic runtimes are linked in is actually a little subtle in that they're +// "sort of" in the compiler's crate store, but only actually linked if another +// isn't actually linked. This ends up meaning that both this crate and the +// panic_unwind crate can appear in the compiler's crate store, and if both +// define the `eh_personality` lang item then that'll hit an error. +// +// To handle this the compiler only requires the `eh_personality` is defined if +// the panic runtime being linked in is the unwinding runtime, and otherwise +// it's not required to be defined (rightfully so). In this case, however, this +// library just defines this symbol so there's at least some personality +// somewhere. +// +// Essentially this symbol is just defined to get wired up to libcore/libstd +// binaries, but it should never be called as we don't link in an unwinding +// runtime at all. +#[cfg(not(stage0))] +pub mod personalities { + + #[no_mangle] + #[cfg(not(all(target_os = "windows", + target_env = "gnu", + target_arch = "x86_64")))] + pub extern fn rust_eh_personality() {} + + // On x86_64-pc-windows-gnu we use our own personality function that needs + // to return `ExceptionContinueSearch` as we're passing on all our frames. + #[no_mangle] + #[cfg(all(target_os = "windows", + target_env = "gnu", + target_arch = "x86_64"))] + pub extern fn rust_eh_personality(_record: usize, + _frame: usize, + _context: usize, + _dispatcher: usize) -> u32 { + 1 // `ExceptionContinueSearch` + } + + // Similar to above, this corresponds to the `eh_unwind_resume` lang item + // that's only used on Windows currently. + // + // Note that we don't execute landing pads, so this is never called, so it's + // body is empty. + #[no_mangle] + #[cfg(all(target_os = "windows", target_env = "gnu"))] + pub extern fn rust_eh_unwind_resume() {} + + // These two are called by our startup objects on i686-pc-windows-gnu, but + // they don't need to do anything so the bodies are nops. + #[no_mangle] + #[cfg(all(target_os = "windows", target_env = "gnu", target_arch = "x86"))] + pub extern fn rust_eh_register_frames() {} + #[no_mangle] + #[cfg(all(target_os = "windows", target_env = "gnu", target_arch = "x86"))] + pub extern fn rust_eh_unregister_frames() {} +} diff --git a/src/libpanic_unwind/Cargo.lock b/src/libpanic_unwind/Cargo.lock new file mode 100644 index 0000000000..20d826d4a4 --- /dev/null +++ b/src/libpanic_unwind/Cargo.lock @@ -0,0 +1,27 @@ +[root] +name = "panic_unwind" +version = "0.0.0" +dependencies = [ + "alloc 0.0.0", + "core 0.0.0", + "libc 0.0.0", +] + +[[package]] +name = "alloc" +version = "0.0.0" +dependencies = [ + "core 0.0.0", +] + +[[package]] +name = "core" +version = "0.0.0" + +[[package]] +name = "libc" +version = "0.0.0" +dependencies = [ + "core 0.0.0", +] + diff --git a/src/libpanic_unwind/Cargo.toml b/src/libpanic_unwind/Cargo.toml new file mode 100644 index 0000000000..18f37a8bb1 --- /dev/null +++ b/src/libpanic_unwind/Cargo.toml @@ -0,0 +1,14 @@ +[package] +authors = ["The Rust Project Developers"] +name = "panic_unwind" +version = "0.0.0" + +[lib] +path = "lib.rs" +test = false + +[dependencies] +alloc = { path = "../liballoc" } +core = { path = "../libcore" } +libc = { path = "../rustc/libc_shim" } +unwind = { path = "../libunwind" } diff --git a/src/libstd/sys/common/dwarf/eh.rs b/src/libpanic_unwind/dwarf/eh.rs similarity index 99% rename from src/libstd/sys/common/dwarf/eh.rs rename to src/libpanic_unwind/dwarf/eh.rs index 319be245bd..1c3fca98a1 100644 --- a/src/libstd/sys/common/dwarf/eh.rs +++ b/src/libpanic_unwind/dwarf/eh.rs @@ -21,8 +21,7 @@ #![allow(non_upper_case_globals)] #![allow(unused)] -use prelude::v1::*; -use sys_common::dwarf::DwarfReader; +use dwarf::DwarfReader; use core::mem; pub const DW_EH_PE_omit : u8 = 0xFF; diff --git a/src/libstd/sys/common/dwarf/mod.rs b/src/libpanic_unwind/dwarf/mod.rs similarity index 99% rename from src/libstd/sys/common/dwarf/mod.rs rename to src/libpanic_unwind/dwarf/mod.rs index 822826bcc8..cde21f9081 100644 --- a/src/libstd/sys/common/dwarf/mod.rs +++ b/src/libpanic_unwind/dwarf/mod.rs @@ -18,7 +18,6 @@ pub mod eh; -use prelude::v1::*; use core::mem; pub struct DwarfReader { diff --git a/src/libstd/sys/common/unwind/gcc.rs b/src/libpanic_unwind/gcc.rs similarity index 68% rename from src/libstd/sys/common/unwind/gcc.rs rename to src/libpanic_unwind/gcc.rs index da7a340af3..50b2e1534d 100644 --- a/src/libstd/sys/common/unwind/gcc.rs +++ b/src/libpanic_unwind/gcc.rs @@ -8,30 +8,76 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Implementation of panics backed by libgcc/libunwind (in some form) +//! +//! For background on exception handling and stack unwinding please see +//! "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and +//! documents linked from it. +//! These are also good reads: +//! http://mentorembedded.github.io/cxx-abi/abi-eh.html +//! http://monoinfinito.wordpress.com/series/exception-handling-in-c/ +//! http://www.airs.com/blog/index.php?s=exception+frames +//! +//! ## A brief summary +//! +//! Exception handling happens in two phases: a search phase and a cleanup +//! phase. +//! +//! In both phases the unwinder walks stack frames from top to bottom using +//! information from the stack frame unwind sections of the current process's +//! modules ("module" here refers to an OS module, i.e. an executable or a +//! dynamic library). +//! +//! For each stack frame, it invokes the associated "personality routine", whose +//! address is also stored in the unwind info section. +//! +//! In the search phase, the job of a personality routine is to examine +//! exception object being thrown, and to decide whether it should be caught at +//! that stack frame. Once the handler frame has been identified, cleanup phase +//! begins. +//! +//! In the cleanup phase, the unwinder invokes each personality routine again. +//! This time it decides which (if any) cleanup code needs to be run for +//! the current stack frame. If so, the control is transferred to a special +//! branch in the function body, the "landing pad", which invokes destructors, +//! frees memory, etc. At the end of the landing pad, control is transferred +//! back to the unwinder and unwinding resumes. +//! +//! Once stack has been unwound down to the handler frame level, unwinding stops +//! and the last personality routine transfers control to the catch block. +//! +//! ## `eh_personality` and `eh_unwind_resume` +//! +//! These language items are used by the compiler when generating unwind info. +//! The first one is the personality routine described above. The second one +//! allows compilation target to customize the process of resuming unwind at the +//! end of the landing pads. `eh_unwind_resume` is used only if +//! `custom_unwind_resume` flag in the target options is set. + #![allow(private_no_mangle_fns)] -use prelude::v1::*; +use core::any::Any; +use alloc::boxed::Box; -use any::Any; -use sys_common::libunwind as uw; +use unwind as uw; +#[repr(C)] struct Exception { - uwe: uw::_Unwind_Exception, - cause: Option>, + _uwe: uw::_Unwind_Exception, + cause: Option>, } -pub unsafe fn panic(data: Box) -> ! { - let exception: Box<_> = box Exception { - uwe: uw::_Unwind_Exception { +pub unsafe fn panic(data: Box) -> u32 { + let exception = Box::new(Exception { + _uwe: uw::_Unwind_Exception { exception_class: rust_exception_class(), exception_cleanup: exception_cleanup, private: [0; uw::unwinder_private_data_size], }, cause: Some(data), - }; + }); let exception_param = Box::into_raw(exception) as *mut uw::_Unwind_Exception; - let error = uw::_Unwind_RaiseException(exception_param); - rtabort!("Could not unwind stack, error = {}", error as isize); + return uw::_Unwind_RaiseException(exception_param) as u32; extern fn exception_cleanup(_unwind_code: uw::_Unwind_Reason_Code, exception: *mut uw::_Unwind_Exception) { @@ -45,7 +91,7 @@ pub fn payload() -> *mut u8 { 0 as *mut u8 } -pub unsafe fn cleanup(ptr: *mut u8) -> Box { +pub unsafe fn cleanup(ptr: *mut u8) -> Box { let my_ep = ptr as *mut Exception; let cause = (*my_ep).cause.take(); uw::_Unwind_DeleteException(ptr as *mut _); @@ -59,7 +105,7 @@ fn rust_exception_class() -> uw::_Unwind_Exception_Class { 0x4d4f5a_00_52555354 } -// We could implement our personality routine in pure Rust, however exception +// We could implement our personality routine in Rust, however exception // info decoding is tedious. More importantly, personality routines have to // handle various platform quirks, which are not fun to maintain. For this // reason, we attempt to reuse personality routine of the C language: @@ -79,10 +125,9 @@ fn rust_exception_class() -> uw::_Unwind_Exception_Class { // See also: rustc_trans::trans::intrinsic::trans_gnu_try #[cfg(all(not(target_arch = "arm"), - not(all(windows, target_arch = "x86_64")), - not(test)))] + not(all(windows, target_arch = "x86_64"))))] pub mod eabi { - use sys_common::libunwind as uw; + use unwind as uw; use libc::c_int; extern { @@ -136,9 +181,9 @@ pub mod eabi { // iOS on armv7 is using SjLj exceptions and therefore requires to use // a specialized personality routine: __gcc_personality_sj0 -#[cfg(all(target_os = "ios", target_arch = "arm", not(test)))] +#[cfg(all(target_os = "ios", target_arch = "arm"))] pub mod eabi { - use sys_common::libunwind as uw; + use unwind as uw; use libc::c_int; extern { @@ -191,9 +236,9 @@ pub mod eabi { // ARM EHABI uses a slightly different personality routine signature, // but otherwise works the same. -#[cfg(all(target_arch = "arm", not(target_os = "ios"), not(test)))] +#[cfg(all(target_arch = "arm", not(target_os = "ios")))] pub mod eabi { - use sys_common::libunwind as uw; + use unwind as uw; use libc::c_int; extern { @@ -242,19 +287,31 @@ pub mod eabi { } // See docs in the `unwind` module. -#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu", not(test)))] +#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))] #[lang = "eh_unwind_resume"] #[unwind] unsafe extern fn rust_eh_unwind_resume(panic_ctx: *mut u8) -> ! { uw::_Unwind_Resume(panic_ctx as *mut uw::_Unwind_Exception); } +// Frame unwind info registration +// +// Each module's image contains a frame unwind info section (usually +// ".eh_frame"). When a module is loaded/unloaded into the process, the +// unwinder must be informed about the location of this section in memory. The +// methods of achieving that vary by the platform. On some (e.g. Linux), the +// unwinder can discover unwind info sections on its own (by dynamically +// enumerating currently loaded modules via the dl_iterate_phdr() API and +// finding their ".eh_frame" sections); Others, like Windows, require modules +// to actively register their unwind info sections via unwinder API. +// +// This module defines two symbols which are referenced and called from +// rsbegin.rs to reigster our information with the GCC runtime. The +// implementation of stack unwinding is (for now) deferred to libgcc_eh, however +// Rust crates use these Rust-specific entry points to avoid potential clashes +// with any GCC runtime. #[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))] pub mod eh_frame_registry { - // The implementation of stack unwinding is (for now) deferred to libgcc_eh, however Rust - // crates use these Rust-specific entry points to avoid potential clashes with GCC runtime. - // See also: rtbegin.rs, `unwind` module. - #[link(name = "gcc_eh")] #[cfg(not(cargobuild))] extern {} @@ -263,16 +320,14 @@ pub mod eh_frame_registry { fn __register_frame_info(eh_frame_begin: *const u8, object: *mut u8); fn __deregister_frame_info(eh_frame_begin: *const u8, object: *mut u8); } - #[cfg(not(test))] + #[no_mangle] - #[unstable(feature = "libstd_sys_internals", issue = "0")] pub unsafe extern fn rust_eh_register_frames(eh_frame_begin: *const u8, object: *mut u8) { __register_frame_info(eh_frame_begin, object); } - #[cfg(not(test))] + #[no_mangle] - #[unstable(feature = "libstd_sys_internals", issue = "0")] pub unsafe extern fn rust_eh_unregister_frames(eh_frame_begin: *const u8, object: *mut u8) { __deregister_frame_info(eh_frame_begin, object); diff --git a/src/libpanic_unwind/lib.rs b/src/libpanic_unwind/lib.rs new file mode 100644 index 0000000000..17cbd2e0d4 --- /dev/null +++ b/src/libpanic_unwind/lib.rs @@ -0,0 +1,109 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Implementation of panics via stack unwinding +//! +//! This crate is an implementation of panics in Rust using "most native" stack +//! unwinding mechanism of the platform this is being compiled for. This +//! essentially gets categorized into three buckets currently: +//! +//! 1. MSVC targets use SEH in the `seh.rs` file. +//! 2. The 64-bit MinGW target half-uses SEH and half-use gcc-like information +//! in the `seh64_gnu.rs` module. +//! 3. All other targets use libunwind/libgcc in the `gcc/mod.rs` module. +//! +//! More documentation about each implementation can be found in the respective +//! module. + +#![no_std] +#![crate_name = "panic_unwind"] +#![crate_type = "rlib"] +#![unstable(feature = "panic_unwind", issue = "32837")] +#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png", + html_favicon_url = "https://doc.rust-lang.org/favicon.ico", + html_root_url = "https://doc.rust-lang.org/nightly/", + issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(alloc)] +#![feature(core_intrinsics)] +#![feature(lang_items)] +#![feature(libc)] +#![feature(panic_unwind)] +#![feature(raw)] +#![feature(staged_api)] +#![feature(unwind_attributes)] +#![cfg_attr(target_env = "msvc", feature(raw))] + +#![cfg_attr(not(stage0), panic_runtime)] +#![cfg_attr(not(stage0), feature(panic_runtime))] + +extern crate alloc; +extern crate libc; +extern crate unwind; + +use core::intrinsics; +use core::mem; +use core::raw; + +// Rust runtime's startup objects depend on these symbols, so make them public. +#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))] +pub use imp::eh_frame_registry::*; + +// *-pc-windows-msvc +#[cfg(target_env = "msvc")] +#[path = "seh.rs"] +mod imp; + +// x86_64-pc-windows-gnu +#[cfg(all(windows, target_arch = "x86_64", target_env = "gnu"))] +#[path = "seh64_gnu.rs"] +mod imp; + +// i686-pc-windows-gnu and all others +#[cfg(any(unix, all(windows, target_arch = "x86", target_env = "gnu")))] +#[path = "gcc.rs"] +mod imp; + +mod dwarf; +mod windows; + +// Entry point for catching an exception, implemented using the `try` intrinsic +// in the compiler. +// +// The interaction between the `payload` function and the compiler is pretty +// hairy and tightly coupled, for more information see the compiler's +// implementation of this. +#[no_mangle] +pub unsafe extern fn __rust_maybe_catch_panic(f: fn(*mut u8), + data: *mut u8, + data_ptr: *mut usize, + vtable_ptr: *mut usize) + -> u32 { + let mut payload = imp::payload(); + if intrinsics::try(f, data, &mut payload as *mut _ as *mut _) == 0 { + 0 + } else { + let obj = mem::transmute::<_, raw::TraitObject>(imp::cleanup(payload)); + *data_ptr = obj.data as usize; + *vtable_ptr = obj.vtable as usize; + 1 + } +} + +// Entry point for raising an exception, just delegates to the platform-specific +// implementation. +#[no_mangle] +pub unsafe extern fn __rust_start_panic(data: usize, vtable: usize) -> u32 { + imp::panic(mem::transmute(raw::TraitObject { + data: data as *mut (), + vtable: vtable as *mut (), + })) +} diff --git a/src/libpanic_unwind/seh.rs b/src/libpanic_unwind/seh.rs new file mode 100644 index 0000000000..04a3f7b966 --- /dev/null +++ b/src/libpanic_unwind/seh.rs @@ -0,0 +1,326 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Windows SEH +//! +//! On Windows (currently only on MSVC), the default exception handling +//! mechanism is Structured Exception Handling (SEH). This is quite different +//! than Dwarf-based exception handling (e.g. what other unix platforms use) in +//! terms of compiler internals, so LLVM is required to have a good deal of +//! extra support for SEH. +//! +//! In a nutshell, what happens here is: +//! +//! 1. The `panic` function calls the standard Windows function +//! `_CxxThrowException` to throw a C++-like exception, triggering the +//! unwinding process. +//! 2. All landing pads generated by the compiler use the personality function +//! `__CxxFrameHandler3`, a function in the CRT, and the unwinding code in +//! Windows will use this personality function to execute all cleanup code on +//! the stack. +//! 3. All compiler-generated calls to `invoke` have a landing pad set as a +//! `cleanuppad` LLVM instruction, which indicates the start of the cleanup +//! routine. The personality (in step 2, defined in the CRT) is responsible +//! for running the cleanup routines. +//! 4. Eventually the "catch" code in the `try` intrinsic (generated by the +//! compiler) is executed and indicates that control should come back to +//! Rust. This is done via a `catchswitch` plus a `catchpad` instruction in +//! LLVM IR terms, finally returning normal control to the program with a +//! `catchret` instruction. +//! +//! Some specific differences from the gcc-based exception handling are: +//! +//! * Rust has no custom personality function, it is instead *always* +//! `__CxxFrameHandler3`. Additionally, no extra filtering is performed, so we +//! end up catching any C++ exceptions that happen to look like the kind we're +//! throwing. Note that throwing an exception into Rust is undefined behavior +//! anyway, so this should be fine. +//! * We've got some data to transmit across the unwinding boundary, +//! specifically a `Box`. Like with Dwarf exceptions +//! these two pointers are stored as a payload in the exception itself. On +//! MSVC, however, there's no need for an extra heap allocation because the +//! call stack is preserved while filter functions are being executed. This +//! means that the pointers are passed directly to `_CxxThrowException` which +//! are then recovered in the filter function to be written to the stack frame +//! of the `try` intrinsic. +//! +//! [win64]: http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx +//! [llvm]: http://llvm.org/docs/ExceptionHandling.html#background-on-windows-exceptions + +#![allow(bad_style)] +#![allow(private_no_mangle_fns)] + +use alloc::boxed::Box; +use core::any::Any; +use core::mem; +use core::raw; + +use windows as c; +use libc::{c_int, c_uint}; + +// First up, a whole bunch of type definitions. There's a few platform-specific +// oddities here, and a lot that's just blatantly copied from LLVM. The purpose +// of all this is to implement the `panic` function below through a call to +// `_CxxThrowException`. +// +// This function takes two arguments. The first is a pointer to the data we're +// passing in, which in this case is our trait object. Pretty easy to find! The +// next, however, is more complicated. This is a pointer to a `_ThrowInfo` +// structure, and it generally is just intended to just describe the exception +// being thrown. +// +// Currently the definition of this type [1] is a little hairy, and the main +// oddity (and difference from the online article) is that on 32-bit the +// pointers are pointers but on 64-bit the pointers are expressed as 32-bit +// offsets from the `__ImageBase` symbol. The `ptr_t` and `ptr!` macro in the +// modules below are used to express this. +// +// The maze of type definitions also closely follows what LLVM emits for this +// sort of operation. For example, if you compile this C++ code on MSVC and emit +// the LLVM IR: +// +// #include +// +// void foo() { +// uint64_t a[2] = {0, 1}; +// throw a; +// } +// +// That's essentially what we're trying to emulate. Most of the constant values +// below were just copied from LLVM, I'm at least not 100% sure what's going on +// everywhere. For example the `.PA_K\0` and `.PEA_K\0` strings below (stuck in +// the names of a few of these) I'm not actually sure what they do, but it seems +// to mirror what LLVM does! +// +// In any case, these structures are all constructed in a similar manner, and +// it's just somewhat verbose for us. +// +// [1]: http://www.geoffchappell.com/studies/msvc/language/predefined/ + +#[cfg(target_arch = "x86")] +#[macro_use] +mod imp { + pub type ptr_t = *mut u8; + pub const OFFSET: i32 = 4; + + pub const NAME1: [u8; 7] = [b'.', b'P', b'A', b'_', b'K', 0, 0]; + pub const NAME2: [u8; 7] = [b'.', b'P', b'A', b'X', 0, 0, 0]; + + macro_rules! ptr { + (0) => (0 as *mut u8); + ($e:expr) => ($e as *mut u8); + } +} + +#[cfg(target_arch = "x86_64")] +#[macro_use] +mod imp { + pub type ptr_t = u32; + pub const OFFSET: i32 = 8; + + pub const NAME1: [u8; 7] = [b'.', b'P', b'E', b'A', b'_', b'K', 0]; + pub const NAME2: [u8; 7] = [b'.', b'P', b'E', b'A', b'X', 0, 0]; + + extern { + pub static __ImageBase: u8; + } + + macro_rules! ptr { + (0) => (0); + ($e:expr) => { + (($e as usize) - (&imp::__ImageBase as *const _ as usize)) as u32 + } + } +} + +#[repr(C)] +pub struct _ThrowInfo { + pub attribues: c_uint, + pub pnfnUnwind: imp::ptr_t, + pub pForwardCompat: imp::ptr_t, + pub pCatchableTypeArray: imp::ptr_t, +} + +#[repr(C)] +pub struct _CatchableTypeArray { + pub nCatchableTypes: c_int, + pub arrayOfCatchableTypes: [imp::ptr_t; 2], +} + +#[repr(C)] +pub struct _CatchableType { + pub properties: c_uint, + pub pType: imp::ptr_t, + pub thisDisplacement: _PMD, + pub sizeOrOffset: c_int, + pub copy_function: imp::ptr_t, +} + +#[repr(C)] +pub struct _PMD { + pub mdisp: c_int, + pub pdisp: c_int, + pub vdisp: c_int, +} + +#[repr(C)] +pub struct _TypeDescriptor { + pub pVFTable: *const u8, + pub spare: *mut u8, + pub name: [u8; 7], +} + +static mut THROW_INFO: _ThrowInfo = _ThrowInfo { + attribues: 0, + pnfnUnwind: ptr!(0), + pForwardCompat: ptr!(0), + pCatchableTypeArray: ptr!(0), +}; + +static mut CATCHABLE_TYPE_ARRAY: _CatchableTypeArray = _CatchableTypeArray { + nCatchableTypes: 2, + arrayOfCatchableTypes: [ + ptr!(0), + ptr!(0), + ], +}; + +static mut CATCHABLE_TYPE1: _CatchableType = _CatchableType { + properties: 1, + pType: ptr!(0), + thisDisplacement: _PMD { + mdisp: 0, + pdisp: -1, + vdisp: 0, + }, + sizeOrOffset: imp::OFFSET, + copy_function: ptr!(0), +}; + +static mut CATCHABLE_TYPE2: _CatchableType = _CatchableType { + properties: 1, + pType: ptr!(0), + thisDisplacement: _PMD { + mdisp: 0, + pdisp: -1, + vdisp: 0, + }, + sizeOrOffset: imp::OFFSET, + copy_function: ptr!(0), +}; + +extern { + // The leading `\x01` byte here is actually a magical signal to LLVM to + // *not* apply any other mangling like prefixing with a `_` character. + // + // This symbol is the vtable used by C++'s `std::type_info`. Objects of type + // `std::type_info`, type descriptors, have a pointer to this table. Type + // descriptors are referenced by the C++ EH structures defined above and + // that we construct below. + #[link_name = "\x01??_7type_info@@6B@"] + static TYPE_INFO_VTABLE: *const u8; +} + +// We use #[lang = "msvc_try_filter"] here as this is the type descriptor which +// we'll use in LLVM's `catchpad` instruction which ends up also being passed as +// an argument to the C++ personality function. +// +// Again, I'm not entirely sure what this is describing, it just seems to work. +#[cfg_attr(all(not(test), not(stage0)), + lang = "msvc_try_filter")] +static mut TYPE_DESCRIPTOR1: _TypeDescriptor = _TypeDescriptor { + pVFTable: &TYPE_INFO_VTABLE as *const _ as *const _, + spare: 0 as *mut _, + name: imp::NAME1, +}; + +static mut TYPE_DESCRIPTOR2: _TypeDescriptor = _TypeDescriptor { + pVFTable: &TYPE_INFO_VTABLE as *const _ as *const _, + spare: 0 as *mut _, + name: imp::NAME2, +}; + +pub unsafe fn panic(data: Box) -> u32 { + use core::intrinsics::atomic_store; + + // _CxxThrowException executes entirely on this stack frame, so there's no + // need to otherwise transfer `data` to the heap. We just pass a stack + // pointer to this function. + // + // The first argument is the payload being thrown (our two pointers), and + // the second argument is the type information object describing the + // exception (constructed above). + let ptrs = mem::transmute::<_, raw::TraitObject>(data); + let mut ptrs = [ptrs.data as u64, ptrs.vtable as u64]; + let mut ptrs_ptr = ptrs.as_mut_ptr(); + + // This... may seems surprising, and justifiably so. On 32-bit MSVC the + // pointers between these structure are just that, pointers. On 64-bit MSVC, + // however, the pointers between structures are rather expressed as 32-bit + // offsets from `__ImageBase`. + // + // Consequently, on 32-bit MSVC we can declare all these pointers in the + // `static`s above. On 64-bit MSVC, we would have to express subtraction of + // pointers in statics, which Rust does not currently allow, so we can't + // actually do that. + // + // The next best thing, then is to fill in these structures at runtime + // (panicking is already the "slow path" anyway). So here we reinterpret all + // of these pointer fields as 32-bit integers and then store the + // relevant value into it (atomically, as concurrent panics may be + // happening). Technically the runtime will probably do a nonatomic read of + // these fields, but in theory they never read the *wrong* value so it + // shouldn't be too bad... + // + // In any case, we basically need to do something like this until we can + // express more operations in statics (and we may never be able to). + atomic_store(&mut THROW_INFO.pCatchableTypeArray as *mut _ as *mut u32, + ptr!(&CATCHABLE_TYPE_ARRAY as *const _) as u32); + atomic_store(&mut CATCHABLE_TYPE_ARRAY.arrayOfCatchableTypes[0] as *mut _ as *mut u32, + ptr!(&CATCHABLE_TYPE1 as *const _) as u32); + atomic_store(&mut CATCHABLE_TYPE_ARRAY.arrayOfCatchableTypes[1] as *mut _ as *mut u32, + ptr!(&CATCHABLE_TYPE2 as *const _) as u32); + atomic_store(&mut CATCHABLE_TYPE1.pType as *mut _ as *mut u32, + ptr!(&TYPE_DESCRIPTOR1 as *const _) as u32); + atomic_store(&mut CATCHABLE_TYPE2.pType as *mut _ as *mut u32, + ptr!(&TYPE_DESCRIPTOR2 as *const _) as u32); + + c::_CxxThrowException(&mut ptrs_ptr as *mut _ as *mut _, + &mut THROW_INFO as *mut _ as *mut _); + u32::max_value() +} + +pub fn payload() -> [u64; 2] { + [0; 2] +} + +pub unsafe fn cleanup(payload: [u64; 2]) -> Box { + mem::transmute(raw::TraitObject { + data: payload[0] as *mut _, + vtable: payload[1] as *mut _, + }) +} + +#[lang = "msvc_try_filter"] +#[cfg(stage0)] +unsafe extern fn __rust_try_filter(_eh_ptrs: *mut u8, + _payload: *mut u8) -> i32 { + return 0 +} + +// This is required by the compiler to exist (e.g. it's a lang item), but +// it's never actually called by the compiler because __C_specific_handler +// or _except_handler3 is the personality function that is always used. +// Hence this is just an aborting stub. +#[lang = "eh_personality"] +#[cfg(not(test))] +fn rust_eh_personality() { + unsafe { ::core::intrinsics::abort() } +} diff --git a/src/libstd/sys/common/unwind/seh64_gnu.rs b/src/libpanic_unwind/seh64_gnu.rs similarity index 92% rename from src/libstd/sys/common/unwind/seh64_gnu.rs rename to src/libpanic_unwind/seh64_gnu.rs index 57281d67eb..adb38d857e 100644 --- a/src/libstd/sys/common/unwind/seh64_gnu.rs +++ b/src/libpanic_unwind/seh64_gnu.rs @@ -14,13 +14,12 @@ #![allow(bad_style)] #![allow(private_no_mangle_fns)] -use prelude::v1::*; +use alloc::boxed::Box; -use any::Any; -use sys_common::dwarf::eh; -use core::mem; -use core::ptr; -use sys::c; +use core::any::Any; +use core::intrinsics; +use dwarf::eh; +use windows as c; // Define our exception codes: // according to http://msdn.microsoft.com/en-us/library/het71c37(v=VS.80).aspx, @@ -37,24 +36,24 @@ const RUST_PANIC: c::DWORD = ETYPE | (1 << 24) | MAGIC; #[repr(C)] struct PanicData { - data: Box + data: Box } -pub unsafe fn panic(data: Box) -> ! { +pub unsafe fn panic(data: Box) -> u32 { let panic_ctx = Box::new(PanicData { data: data }); let params = [Box::into_raw(panic_ctx) as c::ULONG_PTR]; c::RaiseException(RUST_PANIC, c::EXCEPTION_NONCONTINUABLE, params.len() as c::DWORD, ¶ms as *const c::ULONG_PTR); - rtabort!("could not unwind stack"); + u32::max_value() } pub fn payload() -> *mut u8 { 0 as *mut u8 } -pub unsafe fn cleanup(ptr: *mut u8) -> Box { +pub unsafe fn cleanup(ptr: *mut u8) -> Box { let panic_ctx = Box::from_raw(ptr as *mut PanicData); return panic_ctx.data; } @@ -115,14 +114,12 @@ unsafe extern fn rust_eh_personality( er.ExceptionInformation[0] as c::LPVOID, // pointer to PanicData contextRecord, dc.HistoryTable); - rtabort!("could not unwind"); } } } c::ExceptionContinueSearch } -#[cfg(not(test))] #[lang = "eh_unwind_resume"] #[unwind] unsafe extern fn rust_eh_unwind_resume(panic_ctx: c::LPVOID) -> ! { @@ -131,7 +128,7 @@ unsafe extern fn rust_eh_unwind_resume(panic_ctx: c::LPVOID) -> ! { c::EXCEPTION_NONCONTINUABLE, params.len() as c::DWORD, ¶ms as *const c::ULONG_PTR); - rtabort!("could not resume unwind"); + intrinsics::abort(); } unsafe fn find_landing_pad(dc: &c::DISPATCHER_CONTEXT) -> Option { diff --git a/src/libpanic_unwind/windows.rs b/src/libpanic_unwind/windows.rs new file mode 100644 index 0000000000..9cca018ff1 --- /dev/null +++ b/src/libpanic_unwind/windows.rs @@ -0,0 +1,98 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(bad_style)] +#![allow(dead_code)] +#![cfg(windows)] + +use libc::{c_void, c_ulong, c_long, c_ulonglong}; + +pub type DWORD = c_ulong; +pub type LONG = c_long; +pub type ULONG_PTR = c_ulonglong; +pub type LPVOID = *mut c_void; + +pub const EXCEPTION_MAXIMUM_PARAMETERS: usize = 15; +pub const EXCEPTION_NONCONTINUABLE: DWORD = 0x1; // Noncontinuable exception +pub const EXCEPTION_UNWINDING: DWORD = 0x2; // Unwind is in progress +pub const EXCEPTION_EXIT_UNWIND: DWORD = 0x4; // Exit unwind is in progress +pub const EXCEPTION_TARGET_UNWIND: DWORD = 0x20; // Target unwind in progress +pub const EXCEPTION_COLLIDED_UNWIND: DWORD = 0x40; // Collided exception handler call +pub const EXCEPTION_UNWIND: DWORD = EXCEPTION_UNWINDING | + EXCEPTION_EXIT_UNWIND | + EXCEPTION_TARGET_UNWIND | + EXCEPTION_COLLIDED_UNWIND; + +#[repr(C)] +pub struct EXCEPTION_RECORD { + pub ExceptionCode: DWORD, + pub ExceptionFlags: DWORD, + pub ExceptionRecord: *mut EXCEPTION_RECORD, + pub ExceptionAddress: LPVOID, + pub NumberParameters: DWORD, + pub ExceptionInformation: [LPVOID; EXCEPTION_MAXIMUM_PARAMETERS] +} + +#[repr(C)] +pub struct EXCEPTION_POINTERS { + pub ExceptionRecord: *mut EXCEPTION_RECORD, + pub ContextRecord: *mut CONTEXT, +} + +pub enum UNWIND_HISTORY_TABLE {} + +#[repr(C)] +pub struct RUNTIME_FUNCTION { + pub BeginAddress: DWORD, + pub EndAddress: DWORD, + pub UnwindData: DWORD, +} + +pub enum CONTEXT {} + +#[repr(C)] +pub struct DISPATCHER_CONTEXT { + pub ControlPc: LPVOID, + pub ImageBase: LPVOID, + pub FunctionEntry: *const RUNTIME_FUNCTION, + pub EstablisherFrame: LPVOID, + pub TargetIp: LPVOID, + pub ContextRecord: *const CONTEXT, + pub LanguageHandler: LPVOID, + pub HandlerData: *const u8, + pub HistoryTable: *const UNWIND_HISTORY_TABLE, +} + +#[repr(C)] +pub enum EXCEPTION_DISPOSITION { + ExceptionContinueExecution, + ExceptionContinueSearch, + ExceptionNestedException, + ExceptionCollidedUnwind +} +pub use self::EXCEPTION_DISPOSITION::*; + +extern "system" { + #[unwind] + pub fn RaiseException(dwExceptionCode: DWORD, + dwExceptionFlags: DWORD, + nNumberOfArguments: DWORD, + lpArguments: *const ULONG_PTR); + #[unwind] + pub fn RtlUnwindEx(TargetFrame: LPVOID, + TargetIp: LPVOID, + ExceptionRecord: *const EXCEPTION_RECORD, + ReturnValue: LPVOID, + OriginalContext: *const CONTEXT, + HistoryTable: *const UNWIND_HISTORY_TABLE); + #[unwind] + pub fn _CxxThrowException(pExceptionObject: *mut c_void, + pThrowInfo: *mut u8); +} diff --git a/src/librand/Cargo.toml b/src/librand/Cargo.toml index 784654c085..86b061db05 100644 --- a/src/librand/Cargo.toml +++ b/src/librand/Cargo.toml @@ -6,7 +6,6 @@ version = "0.0.0" [lib] name = "rand" path = "lib.rs" -test = false [dependencies] core = { path = "../libcore" } diff --git a/src/librand/distributions/exponential.rs b/src/librand/distributions/exponential.rs index f02b945178..12dbbfdb0e 100644 --- a/src/librand/distributions/exponential.rs +++ b/src/librand/distributions/exponential.rs @@ -10,6 +10,7 @@ //! The exponential distribution. +#[cfg(not(test))] // only necessary for no_std use FloatMath; use {Rng, Rand}; diff --git a/src/librand/distributions/gamma.rs b/src/librand/distributions/gamma.rs index 8cd7ac06f9..cf48823656 100644 --- a/src/librand/distributions/gamma.rs +++ b/src/librand/distributions/gamma.rs @@ -13,6 +13,7 @@ use self::GammaRepr::*; use self::ChiSquaredRepr::*; +#[cfg(not(test))] // only necessary for no_std use FloatMath; use {Rng, Open01}; diff --git a/src/librand/distributions/mod.rs b/src/librand/distributions/mod.rs index a54c8df235..2557d39c55 100644 --- a/src/librand/distributions/mod.rs +++ b/src/librand/distributions/mod.rs @@ -17,7 +17,9 @@ //! internally. The `IndependentSample` trait is for generating values //! that do not need to record state. +#[cfg(not(test))] // only necessary for no_std use core::num::Float; + use core::marker::PhantomData; use {Rng, Rand}; diff --git a/src/librand/distributions/normal.rs b/src/librand/distributions/normal.rs index b2ccc5eb60..86840c568e 100644 --- a/src/librand/distributions/normal.rs +++ b/src/librand/distributions/normal.rs @@ -10,6 +10,7 @@ //! The normal and derived distributions. +#[cfg(not(test))] // only necessary for no_std use FloatMath; use {Rng, Rand, Open01}; diff --git a/src/librand/lib.rs b/src/librand/lib.rs index e651f5bc18..d8517fb4c5 100644 --- a/src/librand/lib.rs +++ b/src/librand/lib.rs @@ -28,13 +28,13 @@ #![unstable(feature = "rand", reason = "use `rand` from crates.io", issue = "27703")] -#![feature(core_float)] #![feature(core_intrinsics)] #![feature(staged_api)] #![feature(step_by)] #![feature(custom_attribute)] #![allow(unused_attributes)] +#![cfg_attr(not(test), feature(core_float))] // only necessary for no_std #![cfg_attr(test, feature(test, rand))] #![allow(deprecated)] diff --git a/src/librustc/Cargo.toml b/src/librustc/Cargo.toml index d77268219f..9291227a73 100644 --- a/src/librustc/Cargo.toml +++ b/src/librustc/Cargo.toml @@ -19,5 +19,6 @@ rustc_back = { path = "../librustc_back" } rustc_bitflags = { path = "../librustc_bitflags" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_llvm = { path = "../librustc_llvm" } serialize = { path = "../libserialize" } syntax = { path = "../libsyntax" } diff --git a/src/librustc/cfg/construct.rs b/src/librustc/cfg/construct.rs index 7049749135..76699f1395 100644 --- a/src/librustc/cfg/construct.rs +++ b/src/librustc/cfg/construct.rs @@ -19,7 +19,7 @@ use syntax::ptr::P; use hir::{self, PatKind}; struct CFGBuilder<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, graph: CFGGraph, fn_exit: CFGIndex, loop_scopes: Vec, @@ -32,8 +32,8 @@ struct LoopScope { break_index: CFGIndex, // where to go on a `break } -pub fn construct(tcx: &TyCtxt, - blk: &hir::Block) -> CFG { +pub fn construct<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + blk: &hir::Block) -> CFG { let mut graph = graph::Graph::new(); let entry = graph.add_node(CFGNodeData::Entry); @@ -285,7 +285,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } hir::ExprBreak(label) => { - let loop_scope = self.find_scope(expr, label.map(|l| l.node.name)); + let loop_scope = self.find_scope(expr, label.map(|l| l.node)); let b = self.add_ast_node(expr.id, &[pred]); self.add_exiting_edge(expr, b, loop_scope, loop_scope.break_index); @@ -293,7 +293,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> { } hir::ExprAgain(label) => { - let loop_scope = self.find_scope(expr, label.map(|l| l.node.name)); + let loop_scope = self.find_scope(expr, label.map(|l| l.node)); let a = self.add_ast_node(expr.id, &[pred]); self.add_exiting_edge(expr, a, loop_scope, loop_scope.continue_index); diff --git a/src/librustc/cfg/mod.rs b/src/librustc/cfg/mod.rs index e86bf6ebc5..d06f51073d 100644 --- a/src/librustc/cfg/mod.rs +++ b/src/librustc/cfg/mod.rs @@ -58,13 +58,13 @@ pub type CFGNode = graph::Node; pub type CFGEdge = graph::Edge; impl CFG { - pub fn new(tcx: &TyCtxt, - blk: &hir::Block) -> CFG { + pub fn new<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + blk: &hir::Block) -> CFG { construct::construct(tcx, blk) } pub fn node_is_reachable(&self, id: ast::NodeId) -> bool { - self.graph.depth_traverse(self.entry) + self.graph.depth_traverse(self.entry, graph::OUTGOING) .any(|idx| self.graph.node_data(idx).id() == id) } } diff --git a/src/librustc/dep_graph/debug.rs b/src/librustc/dep_graph/debug.rs new file mode 100644 index 0000000000..15b0380374 --- /dev/null +++ b/src/librustc/dep_graph/debug.rs @@ -0,0 +1,69 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Code for debugging the dep-graph. + +use super::dep_node::DepNode; +use std::error::Error; +use std::fmt::Debug; + +/// A dep-node filter goes from a user-defined string to a query over +/// nodes. Right now the format is like this: +/// +/// x & y & z +/// +/// where the format-string of the dep-node must contain `x`, `y`, and +/// `z`. +#[derive(Debug)] +pub struct DepNodeFilter { + text: String +} + +impl DepNodeFilter { + pub fn new(text: &str) -> Self { + DepNodeFilter { + text: text.trim().to_string() + } + } + + /// True if all nodes always pass the filter. + pub fn accepts_all(&self) -> bool { + self.text.is_empty() + } + + /// Tests whether `node` meets the filter, returning true if so. + pub fn test(&self, node: &DepNode) -> bool { + let debug_str = format!("{:?}", node); + self.text.split("&") + .map(|s| s.trim()) + .all(|f| debug_str.contains(f)) + } +} + +/// A filter like `F -> G` where `F` and `G` are valid dep-node +/// filters. This can be used to test the source/target independently. +pub struct EdgeFilter { + pub source: DepNodeFilter, + pub target: DepNodeFilter, +} + +impl EdgeFilter { + pub fn new(test: &str) -> Result> { + let parts: Vec<_> = test.split("->").collect(); + if parts.len() != 2 { + Err(format!("expected a filter like `a&b -> c&d`, not `{}`", test).into()) + } else { + Ok(EdgeFilter { + source: DepNodeFilter::new(parts[0]), + target: DepNodeFilter::new(parts[1]), + }) + } + } +} diff --git a/src/librustc/dep_graph/dep_node.rs b/src/librustc/dep_graph/dep_node.rs index 536c739bf1..84c84a7ed5 100644 --- a/src/librustc/dep_graph/dep_node.rs +++ b/src/librustc/dep_graph/dep_node.rs @@ -32,6 +32,10 @@ pub enum DepNode { // Represents the HIR node with the given node-id Hir(D), + // Represents the metadata for a given HIR node, typically found + // in an extern crate. + MetaData(D), + // Represents different phases in the compiler. CrateReader, CollectLanguageItems, @@ -59,11 +63,13 @@ pub enum DepNode { TypeckItemBody(D), Dropck, DropckImpl(D), + UnusedTraitCheck, CheckConst(D), Privacy, IntrinsicCheck(D), MatchCheck(D), MirMapConstruction(D), + MirPass(D), MirTypeck(D), BorrowCheck(D), RvalueCheck(D), @@ -75,6 +81,7 @@ pub enum DepNode { TransCrateItem(D), TransInlinedItem(D), TransWriteMetadata, + LinkBinary, // Nodes representing bits of computed IR in the tcx. Each shared // table in the tcx (or elsewhere) maps to one of these @@ -88,6 +95,7 @@ pub enum DepNode { ImplOrTraitItems(D), ItemSignature(D), FieldTy(D), + SizedConstraint(D), TraitItemDefIds(D), InherentImpls(D), ImplItems(D), @@ -163,6 +171,7 @@ impl DepNode { CheckEntryFn => Some(CheckEntryFn), Variance => Some(Variance), Dropck => Some(Dropck), + UnusedTraitCheck => Some(UnusedTraitCheck), Privacy => Some(Privacy), Reachability => Some(Reachability), DeadCheck => Some(DeadCheck), @@ -170,7 +179,9 @@ impl DepNode { LateLintCheck => Some(LateLintCheck), TransCrate => Some(TransCrate), TransWriteMetadata => Some(TransWriteMetadata), + LinkBinary => Some(LinkBinary), Hir(ref d) => op(d).map(Hir), + MetaData(ref d) => op(d).map(MetaData), CollectItem(ref d) => op(d).map(CollectItem), CoherenceCheckImpl(ref d) => op(d).map(CoherenceCheckImpl), CoherenceOverlapCheck(ref d) => op(d).map(CoherenceOverlapCheck), @@ -185,6 +196,7 @@ impl DepNode { IntrinsicCheck(ref d) => op(d).map(IntrinsicCheck), MatchCheck(ref d) => op(d).map(MatchCheck), MirMapConstruction(ref d) => op(d).map(MirMapConstruction), + MirPass(ref d) => op(d).map(MirPass), MirTypeck(ref d) => op(d).map(MirTypeck), BorrowCheck(ref d) => op(d).map(BorrowCheck), RvalueCheck(ref d) => op(d).map(RvalueCheck), @@ -193,6 +205,7 @@ impl DepNode { ImplOrTraitItems(ref d) => op(d).map(ImplOrTraitItems), ItemSignature(ref d) => op(d).map(ItemSignature), FieldTy(ref d) => op(d).map(FieldTy), + SizedConstraint(ref d) => op(d).map(SizedConstraint), TraitItemDefIds(ref d) => op(d).map(TraitItemDefIds), InherentImpls(ref d) => op(d).map(InherentImpls), ImplItems(ref d) => op(d).map(ImplItems), diff --git a/src/librustc/dep_graph/mod.rs b/src/librustc/dep_graph/mod.rs index 49481dcb79..e65f6bbcf7 100644 --- a/src/librustc/dep_graph/mod.rs +++ b/src/librustc/dep_graph/mod.rs @@ -8,6 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +pub mod debug; mod dep_node; mod dep_tracking_map; mod edges; @@ -22,3 +23,4 @@ pub use self::dep_node::DepNode; pub use self::graph::DepGraph; pub use self::query::DepGraphQuery; pub use self::visit::visit_all_items_in_krate; +pub use self::raii::DepTask; diff --git a/src/librustc/dep_graph/query.rs b/src/librustc/dep_graph/query.rs index acc6660da6..93248edb19 100644 --- a/src/librustc/dep_graph/query.rs +++ b/src/librustc/dep_graph/query.rs @@ -9,7 +9,7 @@ // except according to those terms. use rustc_data_structures::fnv::FnvHashMap; -use rustc_data_structures::graph::{Graph, NodeIndex}; +use rustc_data_structures::graph::{Direction, INCOMING, Graph, NodeIndex, OUTGOING}; use std::fmt::Debug; use std::hash::Hash; @@ -63,11 +63,9 @@ impl DepGraphQuery { .collect() } - /// All nodes reachable from `node`. In other words, things that - /// will have to be recomputed if `node` changes. - pub fn transitive_dependents(&self, node: DepNode) -> Vec> { + fn reachable_nodes(&self, node: DepNode, direction: Direction) -> Vec> { if let Some(&index) = self.indices.get(&node) { - self.graph.depth_traverse(index) + self.graph.depth_traverse(index, direction) .map(|s| self.graph.node_data(s).clone()) .collect() } else { @@ -75,8 +73,19 @@ impl DepGraphQuery { } } + /// All nodes reachable from `node`. In other words, things that + /// will have to be recomputed if `node` changes. + pub fn transitive_successors(&self, node: DepNode) -> Vec> { + self.reachable_nodes(node, OUTGOING) + } + + /// All nodes that can reach `node`. + pub fn transitive_predecessors(&self, node: DepNode) -> Vec> { + self.reachable_nodes(node, INCOMING) + } + /// Just the outgoing edges from `node`. - pub fn immediate_dependents(&self, node: DepNode) -> Vec> { + pub fn immediate_successors(&self, node: DepNode) -> Vec> { if let Some(&index) = self.indices.get(&node) { self.graph.successor_nodes(index) .map(|s| self.graph.node_data(s).clone()) diff --git a/src/librustc/dep_graph/thread.rs b/src/librustc/dep_graph/thread.rs index 5b0e4a909c..70d0a4e315 100644 --- a/src/librustc/dep_graph/thread.rs +++ b/src/librustc/dep_graph/thread.rs @@ -28,6 +28,7 @@ use super::DepGraphQuery; use super::DepNode; use super::edges::DepGraphEdges; +#[derive(Debug)] pub enum DepMessage { Read(DepNode), Write(DepNode), @@ -117,6 +118,8 @@ impl DepGraphThreadData { /// the buffer is full, this may swap.) #[inline] pub fn enqueue(&self, message: DepMessage) { + debug!("enqueue: {:?} tasks_pushed={}", message, self.tasks_pushed.get()); + // Regardless of whether dep graph construction is enabled, we // still want to check that we always have a valid task on the // stack when a read/write/etc event occurs. @@ -176,6 +179,9 @@ pub fn main(swap_in: Receiver>, DepMessage::Query => query_out.send(edges.query()).unwrap(), } } - swap_out.send(messages).unwrap(); + if let Err(_) = swap_out.send(messages) { + // the receiver must have been dropped already + break; + } } } diff --git a/src/librustc/dep_graph/visit.rs b/src/librustc/dep_graph/visit.rs index cdc1621679..9133b4d22e 100644 --- a/src/librustc/dep_graph/visit.rs +++ b/src/librustc/dep_graph/visit.rs @@ -22,13 +22,13 @@ use super::dep_node::DepNode; /// read edge from the corresponding AST node. This is used in /// compiler passes to automatically record the item that they are /// working on. -pub fn visit_all_items_in_krate<'tcx,V,F>(tcx: &TyCtxt<'tcx>, - mut dep_node_fn: F, - visitor: &mut V) +pub fn visit_all_items_in_krate<'a, 'tcx, V, F>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + mut dep_node_fn: F, + visitor: &mut V) where F: FnMut(DefId) -> DepNode, V: Visitor<'tcx> { struct TrackingVisitor<'visit, 'tcx: 'visit, F: 'visit, V: 'visit> { - tcx: &'visit TyCtxt<'tcx>, + tcx: TyCtxt<'visit, 'tcx, 'tcx>, dep_node_fn: &'visit mut F, visitor: &'visit mut V } @@ -42,7 +42,8 @@ pub fn visit_all_items_in_krate<'tcx,V,F>(tcx: &TyCtxt<'tcx>, let _task = self.tcx.dep_graph.in_task(task_id); debug!("Started task {:?}", task_id); self.tcx.dep_graph.read(DepNode::Hir(item_def_id)); - self.visitor.visit_item(i) + self.visitor.visit_item(i); + debug!("Ended task {:?}", task_id); } } diff --git a/src/librustc/diagnostics.rs b/src/librustc/diagnostics.rs index 0ef130127a..c64f0ddac5 100644 --- a/src/librustc/diagnostics.rs +++ b/src/librustc/diagnostics.rs @@ -115,7 +115,7 @@ trait Trait { Now, `foo()` can no longer be called on a trait object, but you will now be allowed to make a trait object, and that will be able to call any object-safe -methods". With such a bound, one can still call `foo()` on types implementing +methods. With such a bound, one can still call `foo()` on types implementing that trait that aren't behind trait objects. ### Method has generic type parameters @@ -292,7 +292,7 @@ E0072: r##" When defining a recursive struct or enum, any use of the type being defined from inside the definition must occur behind a pointer (like `Box` or `&`). This is because structs and enums must have a well-defined size, and without -the pointer the size of the type would need to be unbounded. +the pointer, the size of the type would need to be unbounded. Consider the following erroneous definition of a type for a list of bytes: @@ -635,7 +635,17 @@ fn foo(x: u8) -> u8 { ``` It is advisable to find out what the unhandled cases are and check for them, -returning an appropriate value or panicking if necessary. +returning an appropriate value or panicking if necessary. Check if you need +to remove a semicolon from the last expression, like in this case: + +```ignore +fn foo(x: u8) -> u8 { + inner(2*x + 1); +} +``` + +The semicolon discards the return value of `inner`, instead of returning +it from `foo`. "##, E0270: r##" @@ -1569,4 +1579,5 @@ register_diagnostics! { E0490, // a value of type `..` is borrowed for too long E0491, // in type `..`, reference has a longer lifetime than the data it... E0495, // cannot infer an appropriate lifetime due to conflicting requirements + E0525, // expected a closure that implements `..` but this closure only implements `..` } diff --git a/src/librustc/hir/def.rs b/src/librustc/hir/def.rs index c270a4413b..a056ba588b 100644 --- a/src/librustc/hir/def.rs +++ b/src/librustc/hir/def.rs @@ -17,8 +17,7 @@ use hir; #[derive(Clone, Copy, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Def { Fn(DefId), - SelfTy(Option, // trait id - Option<(ast::NodeId, ast::NodeId)>), // (impl id, self type id) + SelfTy(Option /* trait */, Option /* impl */), Mod(DefId), ForeignMod(DefId), Static(DefId, bool /* is_mutbl */), diff --git a/src/librustc/hir/fold.rs b/src/librustc/hir/fold.rs index a6ff716488..a91d16f25a 100644 --- a/src/librustc/hir/fold.rs +++ b/src/librustc/hir/fold.rs @@ -18,7 +18,7 @@ use syntax::attr::ThinAttributesExt; use hir; use syntax::codemap::{respan, Span, Spanned}; use syntax::ptr::P; -use syntax::parse::token; +use syntax::parse::token::keywords; use syntax::util::move_map::MoveMap; pub trait Folder : Sized { @@ -126,10 +126,6 @@ pub trait Folder : Sized { noop_fold_name(n, self) } - fn fold_ident(&mut self, i: Ident) -> Ident { - noop_fold_ident(i, self) - } - fn fold_usize(&mut self, i: usize) -> usize { noop_fold_usize(i, self) } @@ -158,14 +154,6 @@ pub trait Folder : Sized { noop_fold_local(l, self) } - fn fold_explicit_self(&mut self, es: ExplicitSelf) -> ExplicitSelf { - noop_fold_explicit_self(es, self) - } - - fn fold_explicit_self_underscore(&mut self, es: ExplicitSelf_) -> ExplicitSelf_ { - noop_fold_explicit_self_underscore(es, self) - } - fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { noop_fold_lifetime(l, self) } @@ -415,10 +403,6 @@ pub fn noop_fold_name(n: Name, _: &mut T) -> Name { n } -pub fn noop_fold_ident(i: Ident, _: &mut T) -> Ident { - i -} - pub fn noop_fold_usize(i: usize, _: &mut T) -> usize { i } @@ -426,9 +410,9 @@ pub fn noop_fold_usize(i: usize, _: &mut T) -> usize { pub fn noop_fold_path(Path { global, segments, span }: Path, fld: &mut T) -> Path { Path { global: global, - segments: segments.move_map(|PathSegment { identifier, parameters }| { + segments: segments.move_map(|PathSegment { name, parameters }| { PathSegment { - identifier: fld.fold_ident(identifier), + name: fld.fold_name(name), parameters: fld.fold_path_parameters(parameters), } }), @@ -495,29 +479,6 @@ pub fn noop_fold_attribute(at: Attribute, fld: &mut T) -> Option(es: ExplicitSelf_, - fld: &mut T) - -> ExplicitSelf_ { - match es { - SelfStatic | SelfValue(_) => es, - SelfRegion(lifetime, m, name) => { - SelfRegion(fld.fold_opt_lifetime(lifetime), m, name) - } - SelfExplicit(typ, name) => { - SelfExplicit(fld.fold_ty(typ), name) - } - } -} - -pub fn noop_fold_explicit_self(Spanned { span, node }: ExplicitSelf, - fld: &mut T) - -> ExplicitSelf { - Spanned { - node: fld.fold_explicit_self_underscore(node), - span: fld.new_span(span), - } -} - pub fn noop_fold_meta_item(mi: P, fld: &mut T) -> P { mi.map(|Spanned { node, span }| { Spanned { @@ -867,7 +828,7 @@ pub fn noop_fold_crate(Crate { module, attrs, config, span, let config = folder.fold_meta_items(config); let crate_mod = folder.fold_item(hir::Item { - name: token::special_idents::invalid.name, + name: keywords::Invalid.name(), attrs: attrs, id: DUMMY_NODE_ID, vis: hir::Public, @@ -941,7 +902,6 @@ pub fn noop_fold_method_sig(sig: MethodSig, folder: &mut T) -> Method MethodSig { generics: folder.fold_generics(sig.generics), abi: sig.abi, - explicit_self: folder.fold_explicit_self(sig.explicit_self), unsafety: sig.unsafety, constness: sig.constness, decl: folder.fold_fn_decl(sig.decl), @@ -958,7 +918,7 @@ pub fn noop_fold_pat(p: P, folder: &mut T) -> P { PatKind::Ident(binding_mode, Spanned { span: folder.new_span(pth1.span), - node: folder.fold_ident(pth1.node), + node: folder.fold_name(pth1.node), }, sub.map(|x| folder.fold_pat(x))) } @@ -1046,24 +1006,25 @@ pub fn noop_fold_expr(Expr { id, node, span, attrs }: Expr, folder: & folder.fold_block(tr), fl.map(|x| folder.fold_expr(x))) } - ExprWhile(cond, body, opt_ident) => { + ExprWhile(cond, body, opt_name) => { ExprWhile(folder.fold_expr(cond), folder.fold_block(body), - opt_ident.map(|i| folder.fold_ident(i))) + opt_name.map(|i| folder.fold_name(i))) } - ExprLoop(body, opt_ident) => { + ExprLoop(body, opt_name) => { ExprLoop(folder.fold_block(body), - opt_ident.map(|i| folder.fold_ident(i))) + opt_name.map(|i| folder.fold_name(i))) } ExprMatch(expr, arms, source) => { ExprMatch(folder.fold_expr(expr), arms.move_map(|x| folder.fold_arm(x)), source) } - ExprClosure(capture_clause, decl, body) => { + ExprClosure(capture_clause, decl, body, fn_decl_span) => { ExprClosure(capture_clause, folder.fold_fn_decl(decl), - folder.fold_block(body)) + folder.fold_block(body), + folder.new_span(fn_decl_span)) } ExprBlock(blk) => ExprBlock(folder.fold_block(blk)), ExprAssign(el, er) => { @@ -1092,11 +1053,11 @@ pub fn noop_fold_expr(Expr { id, node, span, attrs }: Expr, folder: & }); ExprPath(qself, folder.fold_path(path)) } - ExprBreak(opt_ident) => ExprBreak(opt_ident.map(|label| { - respan(folder.new_span(label.span), folder.fold_ident(label.node)) + ExprBreak(opt_name) => ExprBreak(opt_name.map(|label| { + respan(folder.new_span(label.span), folder.fold_name(label.node)) })), - ExprAgain(opt_ident) => ExprAgain(opt_ident.map(|label| { - respan(folder.new_span(label.span), folder.fold_ident(label.node)) + ExprAgain(opt_name) => ExprAgain(opt_name.map(|label| { + respan(folder.new_span(label.span), folder.fold_name(label.node)) })), ExprRet(e) => ExprRet(e.map(|x| folder.fold_expr(x))), ExprInlineAsm(asm, outputs, inputs) => { diff --git a/src/librustc/hir/intravisit.rs b/src/librustc/hir/intravisit.rs index 55fd58da86..2e9e433b83 100644 --- a/src/librustc/hir/intravisit.rs +++ b/src/librustc/hir/intravisit.rs @@ -97,9 +97,6 @@ pub trait Visitor<'v> : Sized { fn visit_name(&mut self, _span: Span, _name: Name) { // Nothing to do. } - fn visit_ident(&mut self, span: Span, ident: Ident) { - walk_ident(self, span, ident); - } fn visit_mod(&mut self, m: &'v Mod, _s: Span, _n: NodeId) { walk_mod(self, m) } @@ -180,9 +177,6 @@ pub trait Visitor<'v> : Sized { fn visit_lifetime_def(&mut self, lifetime: &'v LifetimeDef) { walk_lifetime_def(self, lifetime) } - fn visit_explicit_self(&mut self, es: &'v ExplicitSelf) { - walk_explicit_self(self, es) - } fn visit_path(&mut self, path: &'v Path, _id: NodeId) { walk_path(self, path) } @@ -203,6 +197,9 @@ pub trait Visitor<'v> : Sized { fn visit_macro_def(&mut self, macro_def: &'v MacroDef) { walk_macro_def(self, macro_def) } + fn visit_vis(&mut self, vis: &'v Visibility) { + walk_vis(self, vis) + } } pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: Option) { @@ -211,16 +208,6 @@ pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: } } -pub fn walk_opt_ident<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_ident: Option) { - for ident in opt_ident { - visitor.visit_ident(span, ident); - } -} - -pub fn walk_ident<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, ident: Ident) { - visitor.visit_name(span, ident.name); -} - /// Walks the contents of a crate. See also `Crate::visit_all_items`. pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) { visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID); @@ -255,23 +242,6 @@ pub fn walk_lifetime_def<'v, V: Visitor<'v>>(visitor: &mut V, lifetime_def: &'v walk_list!(visitor, visit_lifetime, &lifetime_def.bounds); } -pub fn walk_explicit_self<'v, V: Visitor<'v>>(visitor: &mut V, explicit_self: &'v ExplicitSelf) { - match explicit_self.node { - SelfStatic => {} - SelfValue(name) => { - visitor.visit_name(explicit_self.span, name) - } - SelfRegion(ref opt_lifetime, _, name) => { - visitor.visit_name(explicit_self.span, name); - walk_list!(visitor, visit_lifetime, opt_lifetime); - } - SelfExplicit(ref typ, name) => { - visitor.visit_name(explicit_self.span, name); - visitor.visit_ty(typ) - } - } -} - pub fn walk_poly_trait_ref<'v, V>(visitor: &mut V, trait_ref: &'v PolyTraitRef, _modifier: &'v TraitBoundModifier) @@ -288,6 +258,7 @@ pub fn walk_trait_ref<'v, V>(visitor: &mut V, trait_ref: &'v TraitRef) } pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { + visitor.visit_vis(&item.vis); visitor.visit_name(item.span, item.name); match item.node { ItemExternCrate(opt_name) => { @@ -455,7 +426,7 @@ pub fn walk_path_list_item<'v, V: Visitor<'v>>(visitor: &mut V, pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V, path_span: Span, segment: &'v PathSegment) { - visitor.visit_ident(path_span, segment.identifier); + visitor.visit_name(path_span, segment.name); visitor.visit_path_parameters(path_span, &segment.parameters); } @@ -511,7 +482,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { visitor.visit_pat(subpattern) } PatKind::Ident(_, ref pth1, ref optional_subpattern) => { - visitor.visit_ident(pth1.span, pth1.node); + visitor.visit_name(pth1.span, pth1.node); walk_list!(visitor, visit_pat, optional_subpattern); } PatKind::Lit(ref expression) => visitor.visit_expr(expression), @@ -529,6 +500,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { } pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v ForeignItem) { + visitor.visit_vis(&foreign_item.vis); visitor.visit_name(foreign_item.span, foreign_item.name); match foreign_item.node { @@ -615,7 +587,6 @@ pub fn walk_fn_kind<'v, V: Visitor<'v>>(visitor: &mut V, function_kind: FnKind<' } FnKind::Method(_, sig, _, _) => { visitor.visit_generics(&sig.generics); - visitor.visit_explicit_self(&sig.explicit_self); } FnKind::Closure(_) => {} } @@ -640,7 +611,6 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai walk_list!(visitor, visit_expr, default); } MethodTraitItem(ref sig, None) => { - visitor.visit_explicit_self(&sig.explicit_self); visitor.visit_generics(&sig.generics); walk_fn_decl(visitor, &sig.decl); } @@ -662,6 +632,7 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai } pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem) { + visitor.visit_vis(&impl_item.vis); visitor.visit_name(impl_item.span, impl_item.name); walk_list!(visitor, visit_attribute, &impl_item.attrs); match impl_item.node { @@ -690,6 +661,7 @@ pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V, struct_definition: & } pub fn walk_struct_field<'v, V: Visitor<'v>>(visitor: &mut V, struct_field: &'v StructField) { + visitor.visit_vis(&struct_field.vis); visitor.visit_name(struct_field.span, struct_field.name); visitor.visit_ty(&struct_field.ty); walk_list!(visitor, visit_attribute, &struct_field.attrs); @@ -765,20 +737,20 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_block(if_block); walk_list!(visitor, visit_expr, optional_else); } - ExprWhile(ref subexpression, ref block, opt_ident) => { + ExprWhile(ref subexpression, ref block, opt_name) => { visitor.visit_expr(subexpression); visitor.visit_block(block); - walk_opt_ident(visitor, expression.span, opt_ident) + walk_opt_name(visitor, expression.span, opt_name) } - ExprLoop(ref block, opt_ident) => { + ExprLoop(ref block, opt_name) => { visitor.visit_block(block); - walk_opt_ident(visitor, expression.span, opt_ident) + walk_opt_name(visitor, expression.span, opt_name) } ExprMatch(ref subexpression, ref arms, _) => { visitor.visit_expr(subexpression); walk_list!(visitor, visit_arm, arms); } - ExprClosure(_, ref function_declaration, ref body) => { + ExprClosure(_, ref function_declaration, ref body, _fn_decl_span) => { visitor.visit_fn(FnKind::Closure(expression.attrs.as_attr_slice()), function_declaration, body, @@ -811,9 +783,9 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { } visitor.visit_path(path, expression.id) } - ExprBreak(ref opt_sp_ident) | ExprAgain(ref opt_sp_ident) => { - for sp_ident in opt_sp_ident { - visitor.visit_ident(sp_ident.span, sp_ident.node); + ExprBreak(ref opt_sp_name) | ExprAgain(ref opt_sp_name) => { + for sp_name in opt_sp_name { + visitor.visit_name(sp_name.span, sp_name.node); } } ExprRet(ref optional_expression) => { @@ -839,6 +811,12 @@ pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) { walk_list!(visitor, visit_attribute, &arm.attrs); } +pub fn walk_vis<'v, V: Visitor<'v>>(visitor: &mut V, vis: &'v Visibility) { + if let Visibility::Restricted { ref path, id } = *vis { + visitor.visit_path(path, id) + } +} + #[derive(Copy, Clone, RustcEncodable, RustcDecodable, Debug)] pub struct IdRange { pub min: NodeId, diff --git a/src/librustc/hir/lowering.rs b/src/librustc/hir/lowering.rs index 738a04dea5..28506fd20f 100644 --- a/src/librustc/hir/lowering.rs +++ b/src/librustc/hir/lowering.rs @@ -29,27 +29,6 @@ // are unique). Every new node must have a unique id. Avoid cloning HIR nodes. // If you do, you must then set the new node's id to a fresh one. // -// Lowering must be reproducable (the compiler only lowers once, but tools and -// custom lints may lower an AST node to a HIR node to interact with the -// compiler). The most interesting bit of this is ids - if you lower an AST node -// and create new HIR nodes with fresh ids, when re-lowering the same node, you -// must ensure you get the same ids! To do this, we keep track of the next id -// when we translate a node which requires new ids. By checking this cache and -// using node ids starting with the cached id, we ensure ids are reproducible. -// To use this system, you just need to hold on to a CachedIdSetter object -// whilst lowering. This is an RAII object that takes care of setting and -// restoring the cached id, etc. -// -// This whole system relies on node ids being incremented one at a time and -// all increments being for lowering. This means that you should not call any -// non-lowering function which will use new node ids. -// -// We must also cache gensym'ed Idents to ensure that we get the same Ident -// every time we lower a node with gensym'ed names. One consequence of this is -// that you can only gensym a name once in a lowering (you don't need to worry -// about nested lowering though). That's because we cache based on the name and -// the currently cached node id, which is unique per lowered node. -// // Spans are used for error messages and for tools to map semantics back to // source code. It is therefore not as important with spans as ids to be strict // about use (you can't break the compiler by screwing up a span). Obviously, a @@ -62,1028 +41,986 @@ // in the HIR, especially for multiple identifiers. use hir; +use hir::map::Definitions; +use hir::map::definitions::DefPathData; +use hir::def_id::{DefIndex, DefId}; +use hir::def::{Def, PathResolution}; +use session::Session; use std::collections::BTreeMap; -use std::collections::HashMap; use std::iter; use syntax::ast::*; use syntax::attr::{ThinAttributes, ThinAttributesExt}; -use syntax::errors::Handler; use syntax::ext::mtwt; use syntax::ptr::P; use syntax::codemap::{respan, Spanned, Span}; -use syntax::parse::token; +use syntax::parse::token::{self, keywords}; use syntax::std_inject; use syntax::visit::{self, Visitor}; -use std::cell::{Cell, RefCell}; - pub struct LoweringContext<'a> { crate_root: Option<&'static str>, - // Map AST ids to ids used for expanded nodes. - id_cache: RefCell>, - // Use if there are no cached ids for the current node. + // Use to assign ids to hir nodes that do not directly correspond to an ast node id_assigner: &'a NodeIdAssigner, - // 0 == no cached id. Must be incremented to align with previous id - // incrementing. - cached_id: Cell, - // Keep track of gensym'ed idents. - gensym_cache: RefCell>, - // A copy of cached_id, but is also set to an id while a node is lowered for - // the first time. - gensym_key: Cell, + // As we walk the AST we must keep track of the current 'parent' def id (in + // the form of a DefIndex) so that if we create a new node which introduces + // a definition, then we can properly create the def id. + parent_def: Option, + resolver: &'a mut Resolver, } -impl<'a, 'hir> LoweringContext<'a> { - pub fn new(id_assigner: &'a NodeIdAssigner, c: Option<&Crate>) -> LoweringContext<'a> { - let crate_root = c.and_then(|c| { - if std_inject::no_core(c) { - None - } else if std_inject::no_std(c) { - Some("core") - } else { - Some("std") - } - }); +pub trait Resolver { + // Resolve a global hir path generated by the lowerer when expanding `for`, `if let`, etc. + fn resolve_generated_global_path(&mut self, path: &hir::Path, is_value: bool) -> Def; + + // Obtain the resolution for a node id + fn get_resolution(&mut self, id: NodeId) -> Option; + + // Record the resolution of a path or binding generated by the lowerer when expanding. + fn record_resolution(&mut self, id: NodeId, def: Def); + + // We must keep the set of definitions up to date as we add nodes that weren't in the AST. + // This should only return `None` during testing. + fn definitions(&mut self) -> Option<&mut Definitions>; +} + +pub struct DummyResolver; +impl Resolver for DummyResolver { + fn resolve_generated_global_path(&mut self, _path: &hir::Path, _is_value: bool) -> Def { + Def::Err + } + fn get_resolution(&mut self, _id: NodeId) -> Option { + None + } + fn record_resolution(&mut self, _id: NodeId, _def: Def) {} + fn definitions(&mut self) -> Option<&mut Definitions> { + None + } +} +pub fn lower_crate(sess: &Session, + krate: &Crate, + id_assigner: &NodeIdAssigner, + resolver: &mut Resolver) + -> hir::Crate { + // We're constructing the HIR here; we don't care what we will + // read, since we haven't even constructed the *input* to + // incr. comp. yet. + let _ignore = sess.dep_graph.in_ignore(); + + LoweringContext { + crate_root: if std_inject::no_core(krate) { + None + } else if std_inject::no_std(krate) { + Some("core") + } else { + Some("std") + }, + id_assigner: id_assigner, + parent_def: None, + resolver: resolver, + }.lower_crate(krate) +} + +impl<'a> LoweringContext<'a> { + pub fn testing_context(id_assigner: &'a NodeIdAssigner, resolver: &'a mut Resolver) -> Self { LoweringContext { - crate_root: crate_root, - id_cache: RefCell::new(HashMap::new()), + crate_root: None, id_assigner: id_assigner, - cached_id: Cell::new(0), - gensym_cache: RefCell::new(HashMap::new()), - gensym_key: Cell::new(0), + parent_def: None, + resolver: resolver, } } - fn next_id(&self) -> NodeId { - let cached_id = self.cached_id.get(); - if cached_id == 0 { - return self.id_assigner.next_node_id(); + fn lower_crate(&mut self, c: &Crate) -> hir::Crate { + struct ItemLowerer<'lcx, 'interner: 'lcx> { + items: BTreeMap, + lctx: &'lcx mut LoweringContext<'interner>, } - self.cached_id.set(cached_id + 1); - cached_id - } - - fn str_to_ident(&self, s: &'static str) -> hir::Ident { - let gensym_key = self.gensym_key.get(); - if gensym_key == 0 { - return hir::Ident::from_name(token::gensym(s)); + impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> { + fn visit_item(&mut self, item: &'lcx Item) { + self.items.insert(item.id, self.lctx.lower_item(item)); + visit::walk_item(self, item); + } } - let cached = self.gensym_cache.borrow().contains_key(&(gensym_key, s)); - if cached { - self.gensym_cache.borrow()[&(gensym_key, s)] - } else { - let result = hir::Ident::from_name(token::gensym(s)); - self.gensym_cache.borrow_mut().insert((gensym_key, s), result); - result + let items = { + let mut item_lowerer = ItemLowerer { items: BTreeMap::new(), lctx: self }; + visit::walk_crate(&mut item_lowerer, c); + item_lowerer.items + }; + + hir::Crate { + module: self.lower_mod(&c.module), + attrs: self.lower_attrs(&c.attrs), + config: c.config.clone().into(), + span: c.span, + exported_macros: c.exported_macros.iter().map(|m| self.lower_macro_def(m)).collect(), + items: items, } } - // Panics if this LoweringContext's NodeIdAssigner is not able to emit diagnostics. - fn diagnostic(&self) -> &Handler { - self.id_assigner.diagnostic() + fn next_id(&self) -> NodeId { + self.id_assigner.next_node_id() } -} -// Utility fn for setting and unsetting the cached id. -fn cache_ids<'a, OP, R>(lctx: &LoweringContext, expr_id: NodeId, op: OP) -> R - where OP: FnOnce(&LoweringContext) -> R -{ - // Only reset the id if it was previously 0, i.e., was not cached. - // If it was cached, we are in a nested node, but our id count will - // still count towards the parent's count. - let reset_cached_id = lctx.cached_id.get() == 0; - // We always reset gensym_key so that if we use the same name in a nested - // node and after that node, they get different values. - let old_gensym_key = lctx.gensym_key.get(); + fn str_to_ident(&self, s: &'static str) -> Name { + token::gensym(s) + } + fn with_parent_def(&mut self, parent_id: NodeId, f: F) -> T + where F: FnOnce(&mut LoweringContext) -> T { - let id_cache: &mut HashMap<_, _> = &mut lctx.id_cache.borrow_mut(); - - if id_cache.contains_key(&expr_id) { - let cached_id = lctx.cached_id.get(); - if cached_id == 0 { - // We're entering a node where we need to track ids, but are not - // yet tracking. - lctx.cached_id.set(id_cache[&expr_id]); - } else { - // We're already tracking - check that the tracked id is the same - // as the expected id. - assert!(cached_id == id_cache[&expr_id], "id mismatch"); - } - lctx.gensym_key.set(id_cache[&expr_id]); - } else { - // We've never lowered this node before, remember it for next time. - let next_id = lctx.id_assigner.peek_node_id(); - id_cache.insert(expr_id, next_id); - lctx.gensym_key.set(next_id); - // self.cached_id is not set when we lower a node for the first time, - // only on re-lowering. - } - } + let old_def = self.parent_def; + self.parent_def = match self.resolver.definitions() { + Some(defs) => Some(defs.opt_def_index(parent_id).unwrap()), + None => old_def, + }; - let result = op(lctx); + let result = f(self); - if reset_cached_id { - lctx.cached_id.set(0); + self.parent_def = old_def; + result } - lctx.gensym_key.set(old_gensym_key); - - result -} -pub fn lower_ident(_lctx: &LoweringContext, ident: Ident) -> hir::Ident { - hir::Ident { - name: mtwt::resolve(ident), - unhygienic_name: ident.name, + fn lower_ident(&mut self, ident: Ident) -> Name { + if ident.name != keywords::Invalid.name() { + mtwt::resolve(ident) + } else { + ident.name + } } -} -pub fn lower_attrs(_lctx: &LoweringContext, attrs: &Vec) -> hir::HirVec { - attrs.clone().into() -} - -pub fn lower_view_path(lctx: &LoweringContext, view_path: &ViewPath) -> P { - P(Spanned { - node: match view_path.node { - ViewPathSimple(ident, ref path) => { - hir::ViewPathSimple(ident.name, lower_path(lctx, path)) - } - ViewPathGlob(ref path) => { - hir::ViewPathGlob(lower_path(lctx, path)) - } - ViewPathList(ref path, ref path_list_idents) => { - hir::ViewPathList(lower_path(lctx, path), - path_list_idents.iter() - .map(lower_path_list_item) - .collect()) - } - }, - span: view_path.span, - }) -} + fn lower_attrs(&mut self, attrs: &Vec) -> hir::HirVec { + attrs.clone().into() + } -fn lower_path_list_item(path_list_ident: &PathListItem) -> hir::PathListItem { - Spanned { - node: match path_list_ident.node { - PathListItemKind::Ident { id, name, rename } => hir::PathListIdent { - id: id, - name: name.name, - rename: rename.map(|x| x.name), + fn lower_view_path(&mut self, view_path: &ViewPath) -> P { + P(Spanned { + node: match view_path.node { + ViewPathSimple(ident, ref path) => { + hir::ViewPathSimple(ident.name, self.lower_path(path)) + } + ViewPathGlob(ref path) => { + hir::ViewPathGlob(self.lower_path(path)) + } + ViewPathList(ref path, ref path_list_idents) => { + hir::ViewPathList(self.lower_path(path), + path_list_idents.iter() + .map(|item| self.lower_path_list_item(item)) + .collect()) + } }, - PathListItemKind::Mod { id, rename } => hir::PathListMod { - id: id, - rename: rename.map(|x| x.name), + span: view_path.span, + }) + } + + fn lower_path_list_item(&mut self, path_list_ident: &PathListItem) -> hir::PathListItem { + Spanned { + node: match path_list_ident.node { + PathListItemKind::Ident { id, name, rename } => hir::PathListIdent { + id: id, + name: name.name, + rename: rename.map(|x| x.name), + }, + PathListItemKind::Mod { id, rename } => hir::PathListMod { + id: id, + rename: rename.map(|x| x.name), + }, }, - }, - span: path_list_ident.span, + span: path_list_ident.span, + } } -} -pub fn lower_arm(lctx: &LoweringContext, arm: &Arm) -> hir::Arm { - hir::Arm { - attrs: lower_attrs(lctx, &arm.attrs), - pats: arm.pats.iter().map(|x| lower_pat(lctx, x)).collect(), - guard: arm.guard.as_ref().map(|ref x| lower_expr(lctx, x)), - body: lower_expr(lctx, &arm.body), + fn lower_arm(&mut self, arm: &Arm) -> hir::Arm { + hir::Arm { + attrs: self.lower_attrs(&arm.attrs), + pats: arm.pats.iter().map(|x| self.lower_pat(x)).collect(), + guard: arm.guard.as_ref().map(|ref x| self.lower_expr(x)), + body: self.lower_expr(&arm.body), + } } -} -pub fn lower_decl(lctx: &LoweringContext, d: &Decl) -> P { - match d.node { - DeclKind::Local(ref l) => P(Spanned { - node: hir::DeclLocal(lower_local(lctx, l)), - span: d.span, - }), - DeclKind::Item(ref it) => P(Spanned { - node: hir::DeclItem(lower_item_id(lctx, it)), - span: d.span, - }), + fn lower_decl(&mut self, d: &Decl) -> P { + match d.node { + DeclKind::Local(ref l) => P(Spanned { + node: hir::DeclLocal(self.lower_local(l)), + span: d.span, + }), + DeclKind::Item(ref it) => P(Spanned { + node: hir::DeclItem(self.lower_item_id(it)), + span: d.span, + }), + } } -} -pub fn lower_ty_binding(lctx: &LoweringContext, b: &TypeBinding) -> hir::TypeBinding { - hir::TypeBinding { - id: b.id, - name: b.ident.name, - ty: lower_ty(lctx, &b.ty), - span: b.span, + fn lower_ty_binding(&mut self, b: &TypeBinding) -> hir::TypeBinding { + hir::TypeBinding { + id: b.id, + name: b.ident.name, + ty: self.lower_ty(&b.ty), + span: b.span, + } } -} -pub fn lower_ty(lctx: &LoweringContext, t: &Ty) -> P { - use syntax::ast::TyKind::*; - P(hir::Ty { - id: t.id, - node: match t.node { - Infer => hir::TyInfer, - Vec(ref ty) => hir::TyVec(lower_ty(lctx, ty)), - Ptr(ref mt) => hir::TyPtr(lower_mt(lctx, mt)), - Rptr(ref region, ref mt) => { - hir::TyRptr(lower_opt_lifetime(lctx, region), lower_mt(lctx, mt)) - } - BareFn(ref f) => { - hir::TyBareFn(P(hir::BareFnTy { - lifetimes: lower_lifetime_defs(lctx, &f.lifetimes), - unsafety: lower_unsafety(lctx, f.unsafety), - abi: f.abi, - decl: lower_fn_decl(lctx, &f.decl), - })) - } - Tup(ref tys) => hir::TyTup(tys.iter().map(|ty| lower_ty(lctx, ty)).collect()), - Paren(ref ty) => { - return lower_ty(lctx, ty); - } - Path(ref qself, ref path) => { - let qself = qself.as_ref().map(|&QSelf { ref ty, position }| { - hir::QSelf { - ty: lower_ty(lctx, ty), - position: position, - } - }); - hir::TyPath(qself, lower_path(lctx, path)) - } - ObjectSum(ref ty, ref bounds) => { - hir::TyObjectSum(lower_ty(lctx, ty), lower_bounds(lctx, bounds)) - } - FixedLengthVec(ref ty, ref e) => { - hir::TyFixedLengthVec(lower_ty(lctx, ty), lower_expr(lctx, e)) - } - Typeof(ref expr) => { - hir::TyTypeof(lower_expr(lctx, expr)) - } - PolyTraitRef(ref bounds) => { - hir::TyPolyTraitRef(bounds.iter().map(|b| lower_ty_param_bound(lctx, b)).collect()) - } - Mac(_) => panic!("TyMac should have been expanded by now."), - }, - span: t.span, - }) -} + fn lower_ty(&mut self, t: &Ty) -> P { + use syntax::ast::TyKind::*; + P(hir::Ty { + id: t.id, + node: match t.node { + Infer => hir::TyInfer, + Vec(ref ty) => hir::TyVec(self.lower_ty(ty)), + Ptr(ref mt) => hir::TyPtr(self.lower_mt(mt)), + Rptr(ref region, ref mt) => { + hir::TyRptr(self.lower_opt_lifetime(region), self.lower_mt(mt)) + } + BareFn(ref f) => { + hir::TyBareFn(P(hir::BareFnTy { + lifetimes: self.lower_lifetime_defs(&f.lifetimes), + unsafety: self.lower_unsafety(f.unsafety), + abi: f.abi, + decl: self.lower_fn_decl(&f.decl), + })) + } + Tup(ref tys) => hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty)).collect()), + Paren(ref ty) => { + return self.lower_ty(ty); + } + Path(ref qself, ref path) => { + let qself = qself.as_ref().map(|&QSelf { ref ty, position }| { + hir::QSelf { + ty: self.lower_ty(ty), + position: position, + } + }); + hir::TyPath(qself, self.lower_path(path)) + } + ObjectSum(ref ty, ref bounds) => { + hir::TyObjectSum(self.lower_ty(ty), self.lower_bounds(bounds)) + } + FixedLengthVec(ref ty, ref e) => { + hir::TyFixedLengthVec(self.lower_ty(ty), self.lower_expr(e)) + } + Typeof(ref expr) => { + hir::TyTypeof(self.lower_expr(expr)) + } + PolyTraitRef(ref bounds) => { + let bounds = bounds.iter().map(|b| self.lower_ty_param_bound(b)).collect(); + hir::TyPolyTraitRef(bounds) + } + Mac(_) => panic!("TyMac should have been expanded by now."), + }, + span: t.span, + }) + } -pub fn lower_foreign_mod(lctx: &LoweringContext, fm: &ForeignMod) -> hir::ForeignMod { - hir::ForeignMod { - abi: fm.abi, - items: fm.items.iter().map(|x| lower_foreign_item(lctx, x)).collect(), + fn lower_foreign_mod(&mut self, fm: &ForeignMod) -> hir::ForeignMod { + hir::ForeignMod { + abi: fm.abi, + items: fm.items.iter().map(|x| self.lower_foreign_item(x)).collect(), + } } -} -pub fn lower_variant(lctx: &LoweringContext, v: &Variant) -> hir::Variant { - Spanned { - node: hir::Variant_ { - name: v.node.name.name, - attrs: lower_attrs(lctx, &v.node.attrs), - data: lower_variant_data(lctx, &v.node.data), - disr_expr: v.node.disr_expr.as_ref().map(|e| lower_expr(lctx, e)), - }, - span: v.span, + fn lower_variant(&mut self, v: &Variant) -> hir::Variant { + Spanned { + node: hir::Variant_ { + name: v.node.name.name, + attrs: self.lower_attrs(&v.node.attrs), + data: self.lower_variant_data(&v.node.data), + disr_expr: v.node.disr_expr.as_ref().map(|e| self.lower_expr(e)), + }, + span: v.span, + } } -} -// Path segments are usually unhygienic, hygienic path segments can occur only in -// identifier-like paths originating from `ExprPath`. -// Make life simpler for rustc_resolve by renaming only such segments. -pub fn lower_path_full(lctx: &LoweringContext, p: &Path, maybe_hygienic: bool) -> hir::Path { - let maybe_hygienic = maybe_hygienic && !p.global && p.segments.len() == 1; - hir::Path { - global: p.global, - segments: p.segments - .iter() - .map(|&PathSegment { identifier, ref parameters }| { - hir::PathSegment { - identifier: if maybe_hygienic { - lower_ident(lctx, identifier) - } else { - hir::Ident::from_name(identifier.name) - }, - parameters: lower_path_parameters(lctx, parameters), - } - }) - .collect(), - span: p.span, + fn lower_path_full(&mut self, p: &Path, rename: bool) -> hir::Path { + hir::Path { + global: p.global, + segments: p.segments + .iter() + .map(|&PathSegment { identifier, ref parameters }| { + hir::PathSegment { + name: if rename { + self.lower_ident(identifier) + } else { + identifier.name + }, + parameters: self.lower_path_parameters(parameters), + } + }) + .collect(), + span: p.span, + } } -} -pub fn lower_path(lctx: &LoweringContext, p: &Path) -> hir::Path { - lower_path_full(lctx, p, false) -} + fn lower_path(&mut self, p: &Path) -> hir::Path { + self.lower_path_full(p, false) + } -pub fn lower_path_parameters(lctx: &LoweringContext, - path_parameters: &PathParameters) - -> hir::PathParameters { - match *path_parameters { - PathParameters::AngleBracketed(ref data) => - hir::AngleBracketedParameters(lower_angle_bracketed_parameter_data(lctx, data)), - PathParameters::Parenthesized(ref data) => - hir::ParenthesizedParameters(lower_parenthesized_parameter_data(lctx, data)), + fn lower_path_parameters(&mut self, path_parameters: &PathParameters) -> hir::PathParameters { + match *path_parameters { + PathParameters::AngleBracketed(ref data) => + hir::AngleBracketedParameters(self.lower_angle_bracketed_parameter_data(data)), + PathParameters::Parenthesized(ref data) => + hir::ParenthesizedParameters(self.lower_parenthesized_parameter_data(data)), + } } -} -pub fn lower_angle_bracketed_parameter_data(lctx: &LoweringContext, + fn lower_angle_bracketed_parameter_data(&mut self, data: &AngleBracketedParameterData) -> hir::AngleBracketedParameterData { - let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings } = data; - hir::AngleBracketedParameterData { - lifetimes: lower_lifetimes(lctx, lifetimes), - types: types.iter().map(|ty| lower_ty(lctx, ty)).collect(), - bindings: bindings.iter().map(|b| lower_ty_binding(lctx, b)).collect(), + let &AngleBracketedParameterData { ref lifetimes, ref types, ref bindings } = data; + hir::AngleBracketedParameterData { + lifetimes: self.lower_lifetimes(lifetimes), + types: types.iter().map(|ty| self.lower_ty(ty)).collect(), + bindings: bindings.iter().map(|b| self.lower_ty_binding(b)).collect(), + } } -} -pub fn lower_parenthesized_parameter_data(lctx: &LoweringContext, + fn lower_parenthesized_parameter_data(&mut self, data: &ParenthesizedParameterData) -> hir::ParenthesizedParameterData { - let &ParenthesizedParameterData { ref inputs, ref output, span } = data; - hir::ParenthesizedParameterData { - inputs: inputs.iter().map(|ty| lower_ty(lctx, ty)).collect(), - output: output.as_ref().map(|ty| lower_ty(lctx, ty)), - span: span, + let &ParenthesizedParameterData { ref inputs, ref output, span } = data; + hir::ParenthesizedParameterData { + inputs: inputs.iter().map(|ty| self.lower_ty(ty)).collect(), + output: output.as_ref().map(|ty| self.lower_ty(ty)), + span: span, + } } -} -pub fn lower_local(lctx: &LoweringContext, l: &Local) -> P { - P(hir::Local { - id: l.id, - ty: l.ty.as_ref().map(|t| lower_ty(lctx, t)), - pat: lower_pat(lctx, &l.pat), - init: l.init.as_ref().map(|e| lower_expr(lctx, e)), - span: l.span, - attrs: l.attrs.clone(), - }) -} + fn lower_local(&mut self, l: &Local) -> P { + P(hir::Local { + id: l.id, + ty: l.ty.as_ref().map(|t| self.lower_ty(t)), + pat: self.lower_pat(&l.pat), + init: l.init.as_ref().map(|e| self.lower_expr(e)), + span: l.span, + attrs: l.attrs.clone(), + }) + } -pub fn lower_explicit_self_underscore(lctx: &LoweringContext, - es: &SelfKind) - -> hir::ExplicitSelf_ { - match *es { - SelfKind::Static => hir::SelfStatic, - SelfKind::Value(v) => hir::SelfValue(v.name), - SelfKind::Region(ref lifetime, m, ident) => { - hir::SelfRegion(lower_opt_lifetime(lctx, lifetime), - lower_mutability(lctx, m), - ident.name) - } - SelfKind::Explicit(ref typ, ident) => { - hir::SelfExplicit(lower_ty(lctx, typ), ident.name) + fn lower_mutability(&mut self, m: Mutability) -> hir::Mutability { + match m { + Mutability::Mutable => hir::MutMutable, + Mutability::Immutable => hir::MutImmutable, } } -} -pub fn lower_mutability(_lctx: &LoweringContext, m: Mutability) -> hir::Mutability { - match m { - Mutability::Mutable => hir::MutMutable, - Mutability::Immutable => hir::MutImmutable, + fn lower_arg(&mut self, arg: &Arg) -> hir::Arg { + hir::Arg { + id: arg.id, + pat: self.lower_pat(&arg.pat), + ty: self.lower_ty(&arg.ty), + } } -} -pub fn lower_explicit_self(lctx: &LoweringContext, s: &ExplicitSelf) -> hir::ExplicitSelf { - Spanned { - node: lower_explicit_self_underscore(lctx, &s.node), - span: s.span, + fn lower_fn_decl(&mut self, decl: &FnDecl) -> P { + P(hir::FnDecl { + inputs: decl.inputs.iter().map(|x| self.lower_arg(x)).collect(), + output: match decl.output { + FunctionRetTy::Ty(ref ty) => hir::Return(self.lower_ty(ty)), + FunctionRetTy::Default(span) => hir::DefaultReturn(span), + FunctionRetTy::None(span) => hir::NoReturn(span), + }, + variadic: decl.variadic, + }) } -} -pub fn lower_arg(lctx: &LoweringContext, arg: &Arg) -> hir::Arg { - hir::Arg { - id: arg.id, - pat: lower_pat(lctx, &arg.pat), - ty: lower_ty(lctx, &arg.ty), + fn lower_ty_param_bound(&mut self, tpb: &TyParamBound) -> hir::TyParamBound { + match *tpb { + TraitTyParamBound(ref ty, modifier) => { + hir::TraitTyParamBound(self.lower_poly_trait_ref(ty), + self.lower_trait_bound_modifier(modifier)) + } + RegionTyParamBound(ref lifetime) => { + hir::RegionTyParamBound(self.lower_lifetime(lifetime)) + } + } } -} - -pub fn lower_fn_decl(lctx: &LoweringContext, decl: &FnDecl) -> P { - P(hir::FnDecl { - inputs: decl.inputs.iter().map(|x| lower_arg(lctx, x)).collect(), - output: match decl.output { - FunctionRetTy::Ty(ref ty) => hir::Return(lower_ty(lctx, ty)), - FunctionRetTy::Default(span) => hir::DefaultReturn(span), - FunctionRetTy::None(span) => hir::NoReturn(span), - }, - variadic: decl.variadic, - }) -} -pub fn lower_ty_param_bound(lctx: &LoweringContext, tpb: &TyParamBound) -> hir::TyParamBound { - match *tpb { - TraitTyParamBound(ref ty, modifier) => { - hir::TraitTyParamBound(lower_poly_trait_ref(lctx, ty), - lower_trait_bound_modifier(lctx, modifier)) - } - RegionTyParamBound(ref lifetime) => { - hir::RegionTyParamBound(lower_lifetime(lctx, lifetime)) + fn lower_ty_param(&mut self, tp: &TyParam) -> hir::TyParam { + hir::TyParam { + id: tp.id, + name: tp.ident.name, + bounds: self.lower_bounds(&tp.bounds), + default: tp.default.as_ref().map(|x| self.lower_ty(x)), + span: tp.span, } } -} -pub fn lower_ty_param(lctx: &LoweringContext, tp: &TyParam) -> hir::TyParam { - hir::TyParam { - id: tp.id, - name: tp.ident.name, - bounds: lower_bounds(lctx, &tp.bounds), - default: tp.default.as_ref().map(|x| lower_ty(lctx, x)), - span: tp.span, + fn lower_ty_params(&mut self, tps: &P<[TyParam]>) -> hir::HirVec { + tps.iter().map(|tp| self.lower_ty_param(tp)).collect() } -} -pub fn lower_ty_params(lctx: &LoweringContext, - tps: &P<[TyParam]>) - -> hir::HirVec { - tps.iter().map(|tp| lower_ty_param(lctx, tp)).collect() -} - -pub fn lower_lifetime(_lctx: &LoweringContext, l: &Lifetime) -> hir::Lifetime { - hir::Lifetime { - id: l.id, - name: l.name, - span: l.span, + fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime { + hir::Lifetime { + id: l.id, + name: l.name, + span: l.span, + } } -} -pub fn lower_lifetime_def(lctx: &LoweringContext, l: &LifetimeDef) -> hir::LifetimeDef { - hir::LifetimeDef { - lifetime: lower_lifetime(lctx, &l.lifetime), - bounds: lower_lifetimes(lctx, &l.bounds), + fn lower_lifetime_def(&mut self, l: &LifetimeDef) -> hir::LifetimeDef { + hir::LifetimeDef { + lifetime: self.lower_lifetime(&l.lifetime), + bounds: self.lower_lifetimes(&l.bounds), + } } -} - -pub fn lower_lifetimes(lctx: &LoweringContext, lts: &Vec) -> hir::HirVec { - lts.iter().map(|l| lower_lifetime(lctx, l)).collect() -} - -pub fn lower_lifetime_defs(lctx: &LoweringContext, - lts: &Vec) - -> hir::HirVec { - lts.iter().map(|l| lower_lifetime_def(lctx, l)).collect() -} -pub fn lower_opt_lifetime(lctx: &LoweringContext, - o_lt: &Option) - -> Option { - o_lt.as_ref().map(|lt| lower_lifetime(lctx, lt)) -} + fn lower_lifetimes(&mut self, lts: &Vec) -> hir::HirVec { + lts.iter().map(|l| self.lower_lifetime(l)).collect() + } -pub fn lower_generics(lctx: &LoweringContext, g: &Generics) -> hir::Generics { - hir::Generics { - ty_params: lower_ty_params(lctx, &g.ty_params), - lifetimes: lower_lifetime_defs(lctx, &g.lifetimes), - where_clause: lower_where_clause(lctx, &g.where_clause), + fn lower_lifetime_defs(&mut self, lts: &Vec) -> hir::HirVec { + lts.iter().map(|l| self.lower_lifetime_def(l)).collect() } -} -pub fn lower_where_clause(lctx: &LoweringContext, wc: &WhereClause) -> hir::WhereClause { - hir::WhereClause { - id: wc.id, - predicates: wc.predicates - .iter() - .map(|predicate| lower_where_predicate(lctx, predicate)) - .collect(), + fn lower_opt_lifetime(&mut self, o_lt: &Option) -> Option { + o_lt.as_ref().map(|lt| self.lower_lifetime(lt)) } -} -pub fn lower_where_predicate(lctx: &LoweringContext, - pred: &WherePredicate) - -> hir::WherePredicate { - match *pred { - WherePredicate::BoundPredicate(WhereBoundPredicate{ ref bound_lifetimes, - ref bounded_ty, - ref bounds, - span}) => { - hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate { - bound_lifetimes: lower_lifetime_defs(lctx, bound_lifetimes), - bounded_ty: lower_ty(lctx, bounded_ty), - bounds: bounds.iter().map(|x| lower_ty_param_bound(lctx, x)).collect(), - span: span, - }) - } - WherePredicate::RegionPredicate(WhereRegionPredicate{ ref lifetime, - ref bounds, - span}) => { - hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate { - span: span, - lifetime: lower_lifetime(lctx, lifetime), - bounds: bounds.iter().map(|bound| lower_lifetime(lctx, bound)).collect(), - }) - } - WherePredicate::EqPredicate(WhereEqPredicate{ id, - ref path, - ref ty, - span}) => { - hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { - id: id, - path: lower_path(lctx, path), - ty: lower_ty(lctx, ty), - span: span, - }) + fn lower_generics(&mut self, g: &Generics) -> hir::Generics { + hir::Generics { + ty_params: self.lower_ty_params(&g.ty_params), + lifetimes: self.lower_lifetime_defs(&g.lifetimes), + where_clause: self.lower_where_clause(&g.where_clause), } } -} -pub fn lower_variant_data(lctx: &LoweringContext, vdata: &VariantData) -> hir::VariantData { - match *vdata { - VariantData::Struct(ref fields, id) => { - hir::VariantData::Struct(fields.iter() - .enumerate() - .map(|f| lower_struct_field(lctx, f)) - .collect(), - id) + fn lower_where_clause(&mut self, wc: &WhereClause) -> hir::WhereClause { + hir::WhereClause { + id: wc.id, + predicates: wc.predicates + .iter() + .map(|predicate| self.lower_where_predicate(predicate)) + .collect(), } - VariantData::Tuple(ref fields, id) => { - hir::VariantData::Tuple(fields.iter() - .enumerate() - .map(|f| lower_struct_field(lctx, f)) - .collect(), - id) - } - VariantData::Unit(id) => hir::VariantData::Unit(id), } -} -pub fn lower_trait_ref(lctx: &LoweringContext, p: &TraitRef) -> hir::TraitRef { - hir::TraitRef { - path: lower_path(lctx, &p.path), - ref_id: p.ref_id, + fn lower_where_predicate(&mut self, pred: &WherePredicate) -> hir::WherePredicate { + match *pred { + WherePredicate::BoundPredicate(WhereBoundPredicate{ ref bound_lifetimes, + ref bounded_ty, + ref bounds, + span}) => { + hir::WherePredicate::BoundPredicate(hir::WhereBoundPredicate { + bound_lifetimes: self.lower_lifetime_defs(bound_lifetimes), + bounded_ty: self.lower_ty(bounded_ty), + bounds: bounds.iter().map(|x| self.lower_ty_param_bound(x)).collect(), + span: span, + }) + } + WherePredicate::RegionPredicate(WhereRegionPredicate{ ref lifetime, + ref bounds, + span}) => { + hir::WherePredicate::RegionPredicate(hir::WhereRegionPredicate { + span: span, + lifetime: self.lower_lifetime(lifetime), + bounds: bounds.iter().map(|bound| self.lower_lifetime(bound)).collect(), + }) + } + WherePredicate::EqPredicate(WhereEqPredicate{ id, + ref path, + ref ty, + span}) => { + hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { + id: id, + path: self.lower_path(path), + ty: self.lower_ty(ty), + span: span, + }) + } + } } -} -pub fn lower_poly_trait_ref(lctx: &LoweringContext, p: &PolyTraitRef) -> hir::PolyTraitRef { - hir::PolyTraitRef { - bound_lifetimes: lower_lifetime_defs(lctx, &p.bound_lifetimes), - trait_ref: lower_trait_ref(lctx, &p.trait_ref), - span: p.span, + fn lower_variant_data(&mut self, vdata: &VariantData) -> hir::VariantData { + match *vdata { + VariantData::Struct(ref fields, id) => { + hir::VariantData::Struct(fields.iter() + .enumerate() + .map(|f| self.lower_struct_field(f)) + .collect(), + id) + } + VariantData::Tuple(ref fields, id) => { + hir::VariantData::Tuple(fields.iter() + .enumerate() + .map(|f| self.lower_struct_field(f)) + .collect(), + id) + } + VariantData::Unit(id) => hir::VariantData::Unit(id), + } } -} -pub fn lower_struct_field(lctx: &LoweringContext, - (index, f): (usize, &StructField)) - -> hir::StructField { - hir::StructField { - span: f.span, - id: f.id, - name: f.ident.map(|ident| ident.name).unwrap_or(token::intern(&index.to_string())), - vis: lower_visibility(lctx, &f.vis), - ty: lower_ty(lctx, &f.ty), - attrs: lower_attrs(lctx, &f.attrs), + fn lower_trait_ref(&mut self, p: &TraitRef) -> hir::TraitRef { + hir::TraitRef { + path: self.lower_path(&p.path), + ref_id: p.ref_id, + } } -} -pub fn lower_field(lctx: &LoweringContext, f: &Field) -> hir::Field { - hir::Field { - name: respan(f.ident.span, f.ident.node.name), - expr: lower_expr(lctx, &f.expr), - span: f.span, + fn lower_poly_trait_ref(&mut self, p: &PolyTraitRef) -> hir::PolyTraitRef { + hir::PolyTraitRef { + bound_lifetimes: self.lower_lifetime_defs(&p.bound_lifetimes), + trait_ref: self.lower_trait_ref(&p.trait_ref), + span: p.span, + } } -} -pub fn lower_mt(lctx: &LoweringContext, mt: &MutTy) -> hir::MutTy { - hir::MutTy { - ty: lower_ty(lctx, &mt.ty), - mutbl: lower_mutability(lctx, mt.mutbl), + fn lower_struct_field(&mut self, (index, f): (usize, &StructField)) -> hir::StructField { + hir::StructField { + span: f.span, + id: f.id, + name: f.ident.map(|ident| ident.name).unwrap_or(token::intern(&index.to_string())), + vis: self.lower_visibility(&f.vis), + ty: self.lower_ty(&f.ty), + attrs: self.lower_attrs(&f.attrs), + } } -} -pub fn lower_opt_bounds(lctx: &LoweringContext, - b: &Option) - -> Option { - b.as_ref().map(|ref bounds| lower_bounds(lctx, bounds)) -} + fn lower_field(&mut self, f: &Field) -> hir::Field { + hir::Field { + name: respan(f.ident.span, f.ident.node.name), + expr: self.lower_expr(&f.expr), + span: f.span, + } + } -fn lower_bounds(lctx: &LoweringContext, bounds: &TyParamBounds) -> hir::TyParamBounds { - bounds.iter().map(|bound| lower_ty_param_bound(lctx, bound)).collect() -} + fn lower_mt(&mut self, mt: &MutTy) -> hir::MutTy { + hir::MutTy { + ty: self.lower_ty(&mt.ty), + mutbl: self.lower_mutability(mt.mutbl), + } + } -pub fn lower_block(lctx: &LoweringContext, b: &Block) -> P { - P(hir::Block { - id: b.id, - stmts: b.stmts.iter().map(|s| lower_stmt(lctx, s)).collect(), - expr: b.expr.as_ref().map(|ref x| lower_expr(lctx, x)), - rules: lower_block_check_mode(lctx, &b.rules), - span: b.span, - }) -} + fn lower_bounds(&mut self, bounds: &TyParamBounds) -> hir::TyParamBounds { + bounds.iter().map(|bound| self.lower_ty_param_bound(bound)).collect() + } -pub fn lower_item_kind(lctx: &LoweringContext, i: &ItemKind) -> hir::Item_ { - match *i { - ItemKind::ExternCrate(string) => hir::ItemExternCrate(string), - ItemKind::Use(ref view_path) => { - hir::ItemUse(lower_view_path(lctx, view_path)) - } - ItemKind::Static(ref t, m, ref e) => { - hir::ItemStatic(lower_ty(lctx, t), - lower_mutability(lctx, m), - lower_expr(lctx, e)) - } - ItemKind::Const(ref t, ref e) => { - hir::ItemConst(lower_ty(lctx, t), lower_expr(lctx, e)) - } - ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => { - hir::ItemFn(lower_fn_decl(lctx, decl), - lower_unsafety(lctx, unsafety), - lower_constness(lctx, constness), - abi, - lower_generics(lctx, generics), - lower_block(lctx, body)) - } - ItemKind::Mod(ref m) => hir::ItemMod(lower_mod(lctx, m)), - ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(lower_foreign_mod(lctx, nm)), - ItemKind::Ty(ref t, ref generics) => { - hir::ItemTy(lower_ty(lctx, t), lower_generics(lctx, generics)) - } - ItemKind::Enum(ref enum_definition, ref generics) => { - hir::ItemEnum(hir::EnumDef { - variants: enum_definition.variants - .iter() - .map(|x| lower_variant(lctx, x)) - .collect(), - }, - lower_generics(lctx, generics)) - } - ItemKind::Struct(ref struct_def, ref generics) => { - let struct_def = lower_variant_data(lctx, struct_def); - hir::ItemStruct(struct_def, lower_generics(lctx, generics)) - } - ItemKind::DefaultImpl(unsafety, ref trait_ref) => { - hir::ItemDefaultImpl(lower_unsafety(lctx, unsafety), - lower_trait_ref(lctx, trait_ref)) - } - ItemKind::Impl(unsafety, polarity, ref generics, ref ifce, ref ty, ref impl_items) => { - let new_impl_items = impl_items.iter() - .map(|item| lower_impl_item(lctx, item)) - .collect(); - let ifce = ifce.as_ref().map(|trait_ref| lower_trait_ref(lctx, trait_ref)); - hir::ItemImpl(lower_unsafety(lctx, unsafety), - lower_impl_polarity(lctx, polarity), - lower_generics(lctx, generics), - ifce, - lower_ty(lctx, ty), - new_impl_items) - } - ItemKind::Trait(unsafety, ref generics, ref bounds, ref items) => { - let bounds = lower_bounds(lctx, bounds); - let items = items.iter().map(|item| lower_trait_item(lctx, item)).collect(); - hir::ItemTrait(lower_unsafety(lctx, unsafety), - lower_generics(lctx, generics), - bounds, - items) - } - ItemKind::Mac(_) => panic!("Shouldn't still be around"), + fn lower_block(&mut self, b: &Block) -> P { + P(hir::Block { + id: b.id, + stmts: b.stmts.iter().map(|s| self.lower_stmt(s)).collect(), + expr: b.expr.as_ref().map(|ref x| self.lower_expr(x)), + rules: self.lower_block_check_mode(&b.rules), + span: b.span, + }) } -} -pub fn lower_trait_item(lctx: &LoweringContext, i: &TraitItem) -> hir::TraitItem { - hir::TraitItem { - id: i.id, - name: i.ident.name, - attrs: lower_attrs(lctx, &i.attrs), - node: match i.node { - TraitItemKind::Const(ref ty, ref default) => { - hir::ConstTraitItem(lower_ty(lctx, ty), - default.as_ref().map(|x| lower_expr(lctx, x))) + fn lower_item_kind(&mut self, i: &ItemKind) -> hir::Item_ { + match *i { + ItemKind::ExternCrate(string) => hir::ItemExternCrate(string), + ItemKind::Use(ref view_path) => { + hir::ItemUse(self.lower_view_path(view_path)) } - TraitItemKind::Method(ref sig, ref body) => { - hir::MethodTraitItem(lower_method_sig(lctx, sig), - body.as_ref().map(|x| lower_block(lctx, x))) + ItemKind::Static(ref t, m, ref e) => { + hir::ItemStatic(self.lower_ty(t), + self.lower_mutability(m), + self.lower_expr(e)) } - TraitItemKind::Type(ref bounds, ref default) => { - hir::TypeTraitItem(lower_bounds(lctx, bounds), - default.as_ref().map(|x| lower_ty(lctx, x))) + ItemKind::Const(ref t, ref e) => { + hir::ItemConst(self.lower_ty(t), self.lower_expr(e)) } - }, - span: i.span, - } -} - -pub fn lower_impl_item(lctx: &LoweringContext, i: &ImplItem) -> hir::ImplItem { - hir::ImplItem { - id: i.id, - name: i.ident.name, - attrs: lower_attrs(lctx, &i.attrs), - vis: lower_visibility(lctx, &i.vis), - defaultness: lower_defaultness(lctx, i.defaultness), - node: match i.node { - ImplItemKind::Const(ref ty, ref expr) => { - hir::ImplItemKind::Const(lower_ty(lctx, ty), lower_expr(lctx, expr)) + ItemKind::Fn(ref decl, unsafety, constness, abi, ref generics, ref body) => { + hir::ItemFn(self.lower_fn_decl(decl), + self.lower_unsafety(unsafety), + self.lower_constness(constness), + abi, + self.lower_generics(generics), + self.lower_block(body)) } - ImplItemKind::Method(ref sig, ref body) => { - hir::ImplItemKind::Method(lower_method_sig(lctx, sig), lower_block(lctx, body)) + ItemKind::Mod(ref m) => hir::ItemMod(self.lower_mod(m)), + ItemKind::ForeignMod(ref nm) => hir::ItemForeignMod(self.lower_foreign_mod(nm)), + ItemKind::Ty(ref t, ref generics) => { + hir::ItemTy(self.lower_ty(t), self.lower_generics(generics)) } - ImplItemKind::Type(ref ty) => hir::ImplItemKind::Type(lower_ty(lctx, ty)), - ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"), - }, - span: i.span, + ItemKind::Enum(ref enum_definition, ref generics) => { + hir::ItemEnum(hir::EnumDef { + variants: enum_definition.variants + .iter() + .map(|x| self.lower_variant(x)) + .collect(), + }, + self.lower_generics(generics)) + } + ItemKind::Struct(ref struct_def, ref generics) => { + let struct_def = self.lower_variant_data(struct_def); + hir::ItemStruct(struct_def, self.lower_generics(generics)) + } + ItemKind::DefaultImpl(unsafety, ref trait_ref) => { + hir::ItemDefaultImpl(self.lower_unsafety(unsafety), + self.lower_trait_ref(trait_ref)) + } + ItemKind::Impl(unsafety, polarity, ref generics, ref ifce, ref ty, ref impl_items) => { + let new_impl_items = impl_items.iter() + .map(|item| self.lower_impl_item(item)) + .collect(); + let ifce = ifce.as_ref().map(|trait_ref| self.lower_trait_ref(trait_ref)); + hir::ItemImpl(self.lower_unsafety(unsafety), + self.lower_impl_polarity(polarity), + self.lower_generics(generics), + ifce, + self.lower_ty(ty), + new_impl_items) + } + ItemKind::Trait(unsafety, ref generics, ref bounds, ref items) => { + let bounds = self.lower_bounds(bounds); + let items = items.iter().map(|item| self.lower_trait_item(item)).collect(); + hir::ItemTrait(self.lower_unsafety(unsafety), + self.lower_generics(generics), + bounds, + items) + } + ItemKind::Mac(_) => panic!("Shouldn't still be around"), + } } -} -pub fn lower_mod(lctx: &LoweringContext, m: &Mod) -> hir::Mod { - hir::Mod { - inner: m.inner, - item_ids: m.items.iter().map(|x| lower_item_id(lctx, x)).collect(), + fn lower_trait_item(&mut self, i: &TraitItem) -> hir::TraitItem { + self.with_parent_def(i.id, |this| { + hir::TraitItem { + id: i.id, + name: i.ident.name, + attrs: this.lower_attrs(&i.attrs), + node: match i.node { + TraitItemKind::Const(ref ty, ref default) => { + hir::ConstTraitItem(this.lower_ty(ty), + default.as_ref().map(|x| this.lower_expr(x))) + } + TraitItemKind::Method(ref sig, ref body) => { + hir::MethodTraitItem(this.lower_method_sig(sig), + body.as_ref().map(|x| this.lower_block(x))) + } + TraitItemKind::Type(ref bounds, ref default) => { + hir::TypeTraitItem(this.lower_bounds(bounds), + default.as_ref().map(|x| this.lower_ty(x))) + } + }, + span: i.span, + } + }) + } + + fn lower_impl_item(&mut self, i: &ImplItem) -> hir::ImplItem { + self.with_parent_def(i.id, |this| { + hir::ImplItem { + id: i.id, + name: i.ident.name, + attrs: this.lower_attrs(&i.attrs), + vis: this.lower_visibility(&i.vis), + defaultness: this.lower_defaultness(i.defaultness), + node: match i.node { + ImplItemKind::Const(ref ty, ref expr) => { + hir::ImplItemKind::Const(this.lower_ty(ty), this.lower_expr(expr)) + } + ImplItemKind::Method(ref sig, ref body) => { + hir::ImplItemKind::Method(this.lower_method_sig(sig), + this.lower_block(body)) + } + ImplItemKind::Type(ref ty) => hir::ImplItemKind::Type(this.lower_ty(ty)), + ImplItemKind::Macro(..) => panic!("Shouldn't exist any more"), + }, + span: i.span, + } + }) } -} - -struct ItemLowerer<'lcx, 'interner: 'lcx> { - items: BTreeMap, - lctx: &'lcx LoweringContext<'interner>, -} -impl<'lcx, 'interner> Visitor<'lcx> for ItemLowerer<'lcx, 'interner> { - fn visit_item(&mut self, item: &'lcx Item) { - self.items.insert(item.id, lower_item(self.lctx, item)); - visit::walk_item(self, item); + fn lower_mod(&mut self, m: &Mod) -> hir::Mod { + hir::Mod { + inner: m.inner, + item_ids: m.items.iter().map(|x| self.lower_item_id(x)).collect(), + } } -} -pub fn lower_crate(lctx: &LoweringContext, c: &Crate) -> hir::Crate { - let items = { - let mut item_lowerer = ItemLowerer { items: BTreeMap::new(), lctx: lctx }; - visit::walk_crate(&mut item_lowerer, c); - item_lowerer.items - }; - - hir::Crate { - module: lower_mod(lctx, &c.module), - attrs: lower_attrs(lctx, &c.attrs), - config: c.config.clone().into(), - span: c.span, - exported_macros: c.exported_macros.iter().map(|m| lower_macro_def(lctx, m)).collect(), - items: items, + fn lower_macro_def(&mut self, m: &MacroDef) -> hir::MacroDef { + hir::MacroDef { + name: m.ident.name, + attrs: self.lower_attrs(&m.attrs), + id: m.id, + span: m.span, + imported_from: m.imported_from.map(|x| x.name), + export: m.export, + use_locally: m.use_locally, + allow_internal_unstable: m.allow_internal_unstable, + body: m.body.clone().into(), + } } -} -pub fn lower_macro_def(lctx: &LoweringContext, m: &MacroDef) -> hir::MacroDef { - hir::MacroDef { - name: m.ident.name, - attrs: lower_attrs(lctx, &m.attrs), - id: m.id, - span: m.span, - imported_from: m.imported_from.map(|x| x.name), - export: m.export, - use_locally: m.use_locally, - allow_internal_unstable: m.allow_internal_unstable, - body: m.body.clone().into(), + fn lower_item_id(&mut self, i: &Item) -> hir::ItemId { + hir::ItemId { id: i.id } } -} - -pub fn lower_item_id(_lctx: &LoweringContext, i: &Item) -> hir::ItemId { - hir::ItemId { id: i.id } -} -pub fn lower_item(lctx: &LoweringContext, i: &Item) -> hir::Item { - let node = lower_item_kind(lctx, &i.node); + pub fn lower_item(&mut self, i: &Item) -> hir::Item { + let node = self.with_parent_def(i.id, |this| { + this.lower_item_kind(&i.node) + }); - hir::Item { - id: i.id, - name: i.ident.name, - attrs: lower_attrs(lctx, &i.attrs), - node: node, - vis: lower_visibility(lctx, &i.vis), - span: i.span, + hir::Item { + id: i.id, + name: i.ident.name, + attrs: self.lower_attrs(&i.attrs), + node: node, + vis: self.lower_visibility(&i.vis), + span: i.span, + } } -} -pub fn lower_foreign_item(lctx: &LoweringContext, i: &ForeignItem) -> hir::ForeignItem { - hir::ForeignItem { - id: i.id, - name: i.ident.name, - attrs: lower_attrs(lctx, &i.attrs), - node: match i.node { - ForeignItemKind::Fn(ref fdec, ref generics) => { - hir::ForeignItemFn(lower_fn_decl(lctx, fdec), lower_generics(lctx, generics)) - } - ForeignItemKind::Static(ref t, m) => { - hir::ForeignItemStatic(lower_ty(lctx, t), m) + fn lower_foreign_item(&mut self, i: &ForeignItem) -> hir::ForeignItem { + self.with_parent_def(i.id, |this| { + hir::ForeignItem { + id: i.id, + name: i.ident.name, + attrs: this.lower_attrs(&i.attrs), + node: match i.node { + ForeignItemKind::Fn(ref fdec, ref generics) => { + hir::ForeignItemFn(this.lower_fn_decl(fdec), this.lower_generics(generics)) + } + ForeignItemKind::Static(ref t, m) => { + hir::ForeignItemStatic(this.lower_ty(t), m) + } + }, + vis: this.lower_visibility(&i.vis), + span: i.span, } - }, - vis: lower_visibility(lctx, &i.vis), - span: i.span, + }) } -} -pub fn lower_method_sig(lctx: &LoweringContext, sig: &MethodSig) -> hir::MethodSig { - hir::MethodSig { - generics: lower_generics(lctx, &sig.generics), - abi: sig.abi, - explicit_self: lower_explicit_self(lctx, &sig.explicit_self), - unsafety: lower_unsafety(lctx, sig.unsafety), - constness: lower_constness(lctx, sig.constness), - decl: lower_fn_decl(lctx, &sig.decl), + fn lower_method_sig(&mut self, sig: &MethodSig) -> hir::MethodSig { + // Check for `self: _` and `self: &_` + if let SelfKind::Explicit(ref ty, _) = sig.explicit_self.node { + match sig.decl.inputs.get(0).and_then(Arg::to_self).map(|eself| eself.node) { + Some(SelfKind::Value(..)) | Some(SelfKind::Region(..)) => { + self.id_assigner.diagnostic().span_err(ty.span, + "the type placeholder `_` is not allowed within types on item signatures"); + } + _ => {} + } + } + hir::MethodSig { + generics: self.lower_generics(&sig.generics), + abi: sig.abi, + unsafety: self.lower_unsafety(sig.unsafety), + constness: self.lower_constness(sig.constness), + decl: self.lower_fn_decl(&sig.decl), + } } -} -pub fn lower_unsafety(_lctx: &LoweringContext, u: Unsafety) -> hir::Unsafety { - match u { - Unsafety::Unsafe => hir::Unsafety::Unsafe, - Unsafety::Normal => hir::Unsafety::Normal, + fn lower_unsafety(&mut self, u: Unsafety) -> hir::Unsafety { + match u { + Unsafety::Unsafe => hir::Unsafety::Unsafe, + Unsafety::Normal => hir::Unsafety::Normal, + } } -} -pub fn lower_constness(_lctx: &LoweringContext, c: Constness) -> hir::Constness { - match c { - Constness::Const => hir::Constness::Const, - Constness::NotConst => hir::Constness::NotConst, + fn lower_constness(&mut self, c: Constness) -> hir::Constness { + match c { + Constness::Const => hir::Constness::Const, + Constness::NotConst => hir::Constness::NotConst, + } } -} -pub fn lower_unop(_lctx: &LoweringContext, u: UnOp) -> hir::UnOp { - match u { - UnOp::Deref => hir::UnDeref, - UnOp::Not => hir::UnNot, - UnOp::Neg => hir::UnNeg, + fn lower_unop(&mut self, u: UnOp) -> hir::UnOp { + match u { + UnOp::Deref => hir::UnDeref, + UnOp::Not => hir::UnNot, + UnOp::Neg => hir::UnNeg, + } } -} -pub fn lower_binop(_lctx: &LoweringContext, b: BinOp) -> hir::BinOp { - Spanned { - node: match b.node { - BinOpKind::Add => hir::BiAdd, - BinOpKind::Sub => hir::BiSub, - BinOpKind::Mul => hir::BiMul, - BinOpKind::Div => hir::BiDiv, - BinOpKind::Rem => hir::BiRem, - BinOpKind::And => hir::BiAnd, - BinOpKind::Or => hir::BiOr, - BinOpKind::BitXor => hir::BiBitXor, - BinOpKind::BitAnd => hir::BiBitAnd, - BinOpKind::BitOr => hir::BiBitOr, - BinOpKind::Shl => hir::BiShl, - BinOpKind::Shr => hir::BiShr, - BinOpKind::Eq => hir::BiEq, - BinOpKind::Lt => hir::BiLt, - BinOpKind::Le => hir::BiLe, - BinOpKind::Ne => hir::BiNe, - BinOpKind::Ge => hir::BiGe, - BinOpKind::Gt => hir::BiGt, - }, - span: b.span, + fn lower_binop(&mut self, b: BinOp) -> hir::BinOp { + Spanned { + node: match b.node { + BinOpKind::Add => hir::BiAdd, + BinOpKind::Sub => hir::BiSub, + BinOpKind::Mul => hir::BiMul, + BinOpKind::Div => hir::BiDiv, + BinOpKind::Rem => hir::BiRem, + BinOpKind::And => hir::BiAnd, + BinOpKind::Or => hir::BiOr, + BinOpKind::BitXor => hir::BiBitXor, + BinOpKind::BitAnd => hir::BiBitAnd, + BinOpKind::BitOr => hir::BiBitOr, + BinOpKind::Shl => hir::BiShl, + BinOpKind::Shr => hir::BiShr, + BinOpKind::Eq => hir::BiEq, + BinOpKind::Lt => hir::BiLt, + BinOpKind::Le => hir::BiLe, + BinOpKind::Ne => hir::BiNe, + BinOpKind::Ge => hir::BiGe, + BinOpKind::Gt => hir::BiGt, + }, + span: b.span, + } } -} -pub fn lower_pat(lctx: &LoweringContext, p: &Pat) -> P { - P(hir::Pat { - id: p.id, - node: match p.node { - PatKind::Wild => hir::PatKind::Wild, - PatKind::Ident(ref binding_mode, pth1, ref sub) => { - hir::PatKind::Ident(lower_binding_mode(lctx, binding_mode), - respan(pth1.span, lower_ident(lctx, pth1.node)), - sub.as_ref().map(|x| lower_pat(lctx, x))) - } - PatKind::Lit(ref e) => hir::PatKind::Lit(lower_expr(lctx, e)), - PatKind::TupleStruct(ref pth, ref pats) => { - hir::PatKind::TupleStruct(lower_path(lctx, pth), - pats.as_ref() - .map(|pats| pats.iter().map(|x| lower_pat(lctx, x)).collect())) - } - PatKind::Path(ref pth) => { - hir::PatKind::Path(lower_path(lctx, pth)) - } - PatKind::QPath(ref qself, ref pth) => { - let qself = hir::QSelf { - ty: lower_ty(lctx, &qself.ty), - position: qself.position, - }; - hir::PatKind::QPath(qself, lower_path(lctx, pth)) - } - PatKind::Struct(ref pth, ref fields, etc) => { - let pth = lower_path(lctx, pth); - let fs = fields.iter() - .map(|f| { - Spanned { - span: f.span, - node: hir::FieldPat { - name: f.node.ident.name, - pat: lower_pat(lctx, &f.node.pat), - is_shorthand: f.node.is_shorthand, - }, - } - }) - .collect(); - hir::PatKind::Struct(pth, fs, etc) - } - PatKind::Tup(ref elts) => { - hir::PatKind::Tup(elts.iter().map(|x| lower_pat(lctx, x)).collect()) - } - PatKind::Box(ref inner) => hir::PatKind::Box(lower_pat(lctx, inner)), - PatKind::Ref(ref inner, mutbl) => { - hir::PatKind::Ref(lower_pat(lctx, inner), lower_mutability(lctx, mutbl)) - } - PatKind::Range(ref e1, ref e2) => { - hir::PatKind::Range(lower_expr(lctx, e1), lower_expr(lctx, e2)) - } - PatKind::Vec(ref before, ref slice, ref after) => { - hir::PatKind::Vec(before.iter().map(|x| lower_pat(lctx, x)).collect(), - slice.as_ref().map(|x| lower_pat(lctx, x)), - after.iter().map(|x| lower_pat(lctx, x)).collect()) - } - PatKind::Mac(_) => panic!("Shouldn't exist here"), - }, - span: p.span, - }) -} - -pub fn lower_expr(lctx: &LoweringContext, e: &Expr) -> P { - P(hir::Expr { - id: e.id, - node: match e.node { - // Issue #22181: - // Eventually a desugaring for `box EXPR` - // (similar to the desugaring above for `in PLACE BLOCK`) - // should go here, desugaring - // - // to: - // - // let mut place = BoxPlace::make_place(); - // let raw_place = Place::pointer(&mut place); - // let value = $value; - // unsafe { - // ::std::ptr::write(raw_place, value); - // Boxed::finalize(place) - // } - // - // But for now there are type-inference issues doing that. - ExprKind::Box(ref e) => { - hir::ExprBox(lower_expr(lctx, e)) - } - - // Desugar ExprBox: `in (PLACE) EXPR` - ExprKind::InPlace(ref placer, ref value_expr) => { + fn lower_pat(&mut self, p: &Pat) -> P { + P(hir::Pat { + id: p.id, + node: match p.node { + PatKind::Wild => hir::PatKind::Wild, + PatKind::Ident(ref binding_mode, pth1, ref sub) => { + self.with_parent_def(p.id, |this| { + let name = match this.resolver.get_resolution(p.id).map(|d| d.full_def()) { + // Only pattern bindings are renamed + None | Some(Def::Local(..)) => this.lower_ident(pth1.node), + _ => pth1.node.name, + }; + hir::PatKind::Ident(this.lower_binding_mode(binding_mode), + respan(pth1.span, name), + sub.as_ref().map(|x| this.lower_pat(x))) + }) + } + PatKind::Lit(ref e) => hir::PatKind::Lit(self.lower_expr(e)), + PatKind::TupleStruct(ref pth, ref pats) => { + hir::PatKind::TupleStruct(self.lower_path(pth), + pats.as_ref() + .map(|pats| pats.iter().map(|x| self.lower_pat(x)).collect())) + } + PatKind::Path(ref pth) => { + hir::PatKind::Path(self.lower_path(pth)) + } + PatKind::QPath(ref qself, ref pth) => { + let qself = hir::QSelf { + ty: self.lower_ty(&qself.ty), + position: qself.position, + }; + hir::PatKind::QPath(qself, self.lower_path(pth)) + } + PatKind::Struct(ref pth, ref fields, etc) => { + let pth = self.lower_path(pth); + let fs = fields.iter() + .map(|f| { + Spanned { + span: f.span, + node: hir::FieldPat { + name: f.node.ident.name, + pat: self.lower_pat(&f.node.pat), + is_shorthand: f.node.is_shorthand, + }, + } + }) + .collect(); + hir::PatKind::Struct(pth, fs, etc) + } + PatKind::Tup(ref elts) => { + hir::PatKind::Tup(elts.iter().map(|x| self.lower_pat(x)).collect()) + } + PatKind::Box(ref inner) => hir::PatKind::Box(self.lower_pat(inner)), + PatKind::Ref(ref inner, mutbl) => { + hir::PatKind::Ref(self.lower_pat(inner), self.lower_mutability(mutbl)) + } + PatKind::Range(ref e1, ref e2) => { + hir::PatKind::Range(self.lower_expr(e1), self.lower_expr(e2)) + } + PatKind::Vec(ref before, ref slice, ref after) => { + hir::PatKind::Vec(before.iter().map(|x| self.lower_pat(x)).collect(), + slice.as_ref().map(|x| self.lower_pat(x)), + after.iter().map(|x| self.lower_pat(x)).collect()) + } + PatKind::Mac(_) => panic!("Shouldn't exist here"), + }, + span: p.span, + }) + } + + fn lower_expr(&mut self, e: &Expr) -> P { + P(hir::Expr { + id: e.id, + node: match e.node { + // Issue #22181: + // Eventually a desugaring for `box EXPR` + // (similar to the desugaring above for `in PLACE BLOCK`) + // should go here, desugaring + // // to: // - // let p = PLACE; - // let mut place = Placer::make_place(p); + // let mut place = BoxPlace::make_place(); // let raw_place = Place::pointer(&mut place); - // push_unsafe!({ - // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR )); - // InPlace::finalize(place) - // }) - return cache_ids(lctx, e.id, |lctx| { - let placer_expr = lower_expr(lctx, placer); - let value_expr = lower_expr(lctx, value_expr); - - let placer_ident = lctx.str_to_ident("placer"); - let place_ident = lctx.str_to_ident("place"); - let p_ptr_ident = lctx.str_to_ident("p_ptr"); + // let value = $value; + // unsafe { + // ::std::ptr::write(raw_place, value); + // Boxed::finalize(place) + // } + // + // But for now there are type-inference issues doing that. + ExprKind::Box(ref e) => { + hir::ExprBox(self.lower_expr(e)) + } + + // Desugar ExprBox: `in (PLACE) EXPR` + ExprKind::InPlace(ref placer, ref value_expr) => { + // to: + // + // let p = PLACE; + // let mut place = Placer::make_place(p); + // let raw_place = Place::pointer(&mut place); + // push_unsafe!({ + // std::intrinsics::move_val_init(raw_place, pop_unsafe!( EXPR )); + // InPlace::finalize(place) + // }) + let placer_expr = self.lower_expr(placer); + let value_expr = self.lower_expr(value_expr); + + let placer_ident = self.str_to_ident("placer"); + let place_ident = self.str_to_ident("place"); + let p_ptr_ident = self.str_to_ident("p_ptr"); let make_place = ["ops", "Placer", "make_place"]; let place_pointer = ["ops", "Place", "pointer"]; let move_val_init = ["intrinsics", "move_val_init"]; let inplace_finalize = ["ops", "InPlace", "finalize"]; - let make_call = |lctx: &LoweringContext, p, args| { - let path = core_path(lctx, e.span, p); - let path = expr_path(lctx, path, None); - expr_call(lctx, e.span, path, args, None) + let make_call = |this: &mut LoweringContext, p, args| { + let path = this.core_path(e.span, p); + let path = this.expr_path(path, None); + this.expr_call(e.span, path, args, None) }; - let mk_stmt_let = |lctx: &LoweringContext, bind, expr| { - stmt_let(lctx, e.span, false, bind, expr, None) + let mk_stmt_let = |this: &mut LoweringContext, bind, expr| { + this.stmt_let(e.span, false, bind, expr, None) }; - let mk_stmt_let_mut = |lctx: &LoweringContext, bind, expr| { - stmt_let(lctx, e.span, true, bind, expr, None) + let mk_stmt_let_mut = |this: &mut LoweringContext, bind, expr| { + this.stmt_let(e.span, true, bind, expr, None) }; // let placer = ; - let s1 = { - let placer_expr = signal_block_expr(lctx, - hir_vec![], - placer_expr, - e.span, - hir::PopUnstableBlock, - None); - mk_stmt_let(lctx, placer_ident, placer_expr) + let (s1, placer_binding) = { + let placer_expr = self.signal_block_expr(hir_vec![], + placer_expr, + e.span, + hir::PopUnstableBlock, + None); + mk_stmt_let(self, placer_ident, placer_expr) }; // let mut place = Placer::make_place(placer); - let s2 = { - let placer = expr_ident(lctx, e.span, placer_ident, None); - let call = make_call(lctx, &make_place, hir_vec![placer]); - mk_stmt_let_mut(lctx, place_ident, call) + let (s2, place_binding) = { + let placer = self.expr_ident(e.span, placer_ident, None, placer_binding); + let call = make_call(self, &make_place, hir_vec![placer]); + mk_stmt_let_mut(self, place_ident, call) }; // let p_ptr = Place::pointer(&mut place); - let s3 = { - let agent = expr_ident(lctx, e.span, place_ident, None); - let args = hir_vec![expr_mut_addr_of(lctx, e.span, agent, None)]; - let call = make_call(lctx, &place_pointer, args); - mk_stmt_let(lctx, p_ptr_ident, call) + let (s3, p_ptr_binding) = { + let agent = self.expr_ident(e.span, place_ident, None, place_binding); + let args = hir_vec![self.expr_mut_addr_of(e.span, agent, None)]; + let call = make_call(self, &place_pointer, args); + mk_stmt_let(self, p_ptr_ident, call) }; // pop_unsafe!(EXPR)); let pop_unsafe_expr = { - let value_expr = signal_block_expr(lctx, - hir_vec![], - value_expr, - e.span, - hir::PopUnstableBlock, - None); - signal_block_expr(lctx, - hir_vec![], - value_expr, - e.span, - hir::PopUnsafeBlock(hir::CompilerGenerated), None) + let value_expr = self.signal_block_expr(hir_vec![], + value_expr, + e.span, + hir::PopUnstableBlock, + None); + self.signal_block_expr(hir_vec![], + value_expr, + e.span, + hir::PopUnsafeBlock(hir::CompilerGenerated), None) }; // push_unsafe!({ @@ -1091,87 +1028,83 @@ pub fn lower_expr(lctx: &LoweringContext, e: &Expr) -> P { // InPlace::finalize(place) // }) let expr = { - let ptr = expr_ident(lctx, e.span, p_ptr_ident, None); + let ptr = self.expr_ident(e.span, p_ptr_ident, None, p_ptr_binding); let call_move_val_init = hir::StmtSemi( - make_call(lctx, &move_val_init, hir_vec![ptr, pop_unsafe_expr]), - lctx.next_id()); + make_call(self, &move_val_init, hir_vec![ptr, pop_unsafe_expr]), + self.next_id()); let call_move_val_init = respan(e.span, call_move_val_init); - let place = expr_ident(lctx, e.span, place_ident, None); - let call = make_call(lctx, &inplace_finalize, hir_vec![place]); - signal_block_expr(lctx, - hir_vec![call_move_val_init], - call, - e.span, - hir::PushUnsafeBlock(hir::CompilerGenerated), None) + let place = self.expr_ident(e.span, place_ident, None, place_binding); + let call = make_call(self, &inplace_finalize, hir_vec![place]); + self.signal_block_expr(hir_vec![call_move_val_init], + call, + e.span, + hir::PushUnsafeBlock(hir::CompilerGenerated), None) }; - signal_block_expr(lctx, - hir_vec![s1, s2, s3], - expr, - e.span, - hir::PushUnstableBlock, - e.attrs.clone()) - }); - } + return self.signal_block_expr(hir_vec![s1, s2, s3], + expr, + e.span, + hir::PushUnstableBlock, + e.attrs.clone()); + } - ExprKind::Vec(ref exprs) => { - hir::ExprVec(exprs.iter().map(|x| lower_expr(lctx, x)).collect()) - } - ExprKind::Repeat(ref expr, ref count) => { - let expr = lower_expr(lctx, expr); - let count = lower_expr(lctx, count); - hir::ExprRepeat(expr, count) - } - ExprKind::Tup(ref elts) => { - hir::ExprTup(elts.iter().map(|x| lower_expr(lctx, x)).collect()) - } - ExprKind::Call(ref f, ref args) => { - let f = lower_expr(lctx, f); - hir::ExprCall(f, args.iter().map(|x| lower_expr(lctx, x)).collect()) - } - ExprKind::MethodCall(i, ref tps, ref args) => { - let tps = tps.iter().map(|x| lower_ty(lctx, x)).collect(); - let args = args.iter().map(|x| lower_expr(lctx, x)).collect(); - hir::ExprMethodCall(respan(i.span, i.node.name), tps, args) - } - ExprKind::Binary(binop, ref lhs, ref rhs) => { - let binop = lower_binop(lctx, binop); - let lhs = lower_expr(lctx, lhs); - let rhs = lower_expr(lctx, rhs); - hir::ExprBinary(binop, lhs, rhs) - } - ExprKind::Unary(op, ref ohs) => { - let op = lower_unop(lctx, op); - let ohs = lower_expr(lctx, ohs); - hir::ExprUnary(op, ohs) - } - ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())), - ExprKind::Cast(ref expr, ref ty) => { - let expr = lower_expr(lctx, expr); - hir::ExprCast(expr, lower_ty(lctx, ty)) - } - ExprKind::Type(ref expr, ref ty) => { - let expr = lower_expr(lctx, expr); - hir::ExprType(expr, lower_ty(lctx, ty)) - } - ExprKind::AddrOf(m, ref ohs) => { - let m = lower_mutability(lctx, m); - let ohs = lower_expr(lctx, ohs); - hir::ExprAddrOf(m, ohs) - } - // More complicated than you might expect because the else branch - // might be `if let`. - ExprKind::If(ref cond, ref blk, ref else_opt) => { - let else_opt = else_opt.as_ref().map(|els| { - match els.node { - ExprKind::IfLet(..) => { - cache_ids(lctx, e.id, |lctx| { + ExprKind::Vec(ref exprs) => { + hir::ExprVec(exprs.iter().map(|x| self.lower_expr(x)).collect()) + } + ExprKind::Repeat(ref expr, ref count) => { + let expr = self.lower_expr(expr); + let count = self.lower_expr(count); + hir::ExprRepeat(expr, count) + } + ExprKind::Tup(ref elts) => { + hir::ExprTup(elts.iter().map(|x| self.lower_expr(x)).collect()) + } + ExprKind::Call(ref f, ref args) => { + let f = self.lower_expr(f); + hir::ExprCall(f, args.iter().map(|x| self.lower_expr(x)).collect()) + } + ExprKind::MethodCall(i, ref tps, ref args) => { + let tps = tps.iter().map(|x| self.lower_ty(x)).collect(); + let args = args.iter().map(|x| self.lower_expr(x)).collect(); + hir::ExprMethodCall(respan(i.span, i.node.name), tps, args) + } + ExprKind::Binary(binop, ref lhs, ref rhs) => { + let binop = self.lower_binop(binop); + let lhs = self.lower_expr(lhs); + let rhs = self.lower_expr(rhs); + hir::ExprBinary(binop, lhs, rhs) + } + ExprKind::Unary(op, ref ohs) => { + let op = self.lower_unop(op); + let ohs = self.lower_expr(ohs); + hir::ExprUnary(op, ohs) + } + ExprKind::Lit(ref l) => hir::ExprLit(P((**l).clone())), + ExprKind::Cast(ref expr, ref ty) => { + let expr = self.lower_expr(expr); + hir::ExprCast(expr, self.lower_ty(ty)) + } + ExprKind::Type(ref expr, ref ty) => { + let expr = self.lower_expr(expr); + hir::ExprType(expr, self.lower_ty(ty)) + } + ExprKind::AddrOf(m, ref ohs) => { + let m = self.lower_mutability(m); + let ohs = self.lower_expr(ohs); + hir::ExprAddrOf(m, ohs) + } + // More complicated than you might expect because the else branch + // might be `if let`. + ExprKind::If(ref cond, ref blk, ref else_opt) => { + let else_opt = else_opt.as_ref().map(|els| { + match els.node { + ExprKind::IfLet(..) => { // wrap the if-let expr in a block let span = els.span; - let els = lower_expr(lctx, els); - let id = lctx.next_id(); + let els = self.lower_expr(els); + let id = self.next_id(); let blk = P(hir::Block { stmts: hir_vec![], expr: Some(els), @@ -1179,208 +1112,214 @@ pub fn lower_expr(lctx: &LoweringContext, e: &Expr) -> P { rules: hir::DefaultBlock, span: span, }); - expr_block(lctx, blk, None) - }) + self.expr_block(blk, None) + } + _ => self.lower_expr(els), } - _ => lower_expr(lctx, els), - } - }); - - hir::ExprIf(lower_expr(lctx, cond), lower_block(lctx, blk), else_opt) - } - ExprKind::While(ref cond, ref body, opt_ident) => { - hir::ExprWhile(lower_expr(lctx, cond), lower_block(lctx, body), - opt_ident.map(|ident| lower_ident(lctx, ident))) - } - ExprKind::Loop(ref body, opt_ident) => { - hir::ExprLoop(lower_block(lctx, body), - opt_ident.map(|ident| lower_ident(lctx, ident))) - } - ExprKind::Match(ref expr, ref arms) => { - hir::ExprMatch(lower_expr(lctx, expr), - arms.iter().map(|x| lower_arm(lctx, x)).collect(), - hir::MatchSource::Normal) - } - ExprKind::Closure(capture_clause, ref decl, ref body) => { - hir::ExprClosure(lower_capture_clause(lctx, capture_clause), - lower_fn_decl(lctx, decl), - lower_block(lctx, body)) - } - ExprKind::Block(ref blk) => hir::ExprBlock(lower_block(lctx, blk)), - ExprKind::Assign(ref el, ref er) => { - hir::ExprAssign(lower_expr(lctx, el), lower_expr(lctx, er)) - } - ExprKind::AssignOp(op, ref el, ref er) => { - hir::ExprAssignOp(lower_binop(lctx, op), - lower_expr(lctx, el), - lower_expr(lctx, er)) - } - ExprKind::Field(ref el, ident) => { - hir::ExprField(lower_expr(lctx, el), respan(ident.span, ident.node.name)) - } - ExprKind::TupField(ref el, ident) => { - hir::ExprTupField(lower_expr(lctx, el), ident) - } - ExprKind::Index(ref el, ref er) => { - hir::ExprIndex(lower_expr(lctx, el), lower_expr(lctx, er)) - } - ExprKind::Range(ref e1, ref e2, lims) => { - fn make_struct(lctx: &LoweringContext, - ast_expr: &Expr, - path: &[&str], - fields: &[(&str, &P)]) -> P { - let strs = std_path(lctx, &iter::once(&"ops") - .chain(path) - .map(|s| *s) - .collect::>()); - - let structpath = path_global(ast_expr.span, strs); - - let hir_expr = if fields.len() == 0 { - expr_path(lctx, - structpath, - ast_expr.attrs.clone()) - } else { - expr_struct(lctx, - ast_expr.span, - structpath, - fields.into_iter().map(|&(s, e)| { - field(token::intern(s), - signal_block_expr(lctx, - hir_vec![], - lower_expr(lctx, &**e), - e.span, - hir::PopUnstableBlock, - None), - ast_expr.span) - }).collect(), - None, - ast_expr.attrs.clone()) - }; + }); - signal_block_expr(lctx, - hir_vec![], - hir_expr, - ast_expr.span, - hir::PushUnstableBlock, - None) + hir::ExprIf(self.lower_expr(cond), self.lower_block(blk), else_opt) + } + ExprKind::While(ref cond, ref body, opt_ident) => { + hir::ExprWhile(self.lower_expr(cond), self.lower_block(body), + opt_ident.map(|ident| self.lower_ident(ident))) + } + ExprKind::Loop(ref body, opt_ident) => { + hir::ExprLoop(self.lower_block(body), + opt_ident.map(|ident| self.lower_ident(ident))) + } + ExprKind::Match(ref expr, ref arms) => { + hir::ExprMatch(self.lower_expr(expr), + arms.iter().map(|x| self.lower_arm(x)).collect(), + hir::MatchSource::Normal) + } + ExprKind::Closure(capture_clause, ref decl, ref body, fn_decl_span) => { + self.with_parent_def(e.id, |this| { + hir::ExprClosure(this.lower_capture_clause(capture_clause), + this.lower_fn_decl(decl), + this.lower_block(body), + fn_decl_span) + }) } + ExprKind::Block(ref blk) => hir::ExprBlock(self.lower_block(blk)), + ExprKind::Assign(ref el, ref er) => { + hir::ExprAssign(self.lower_expr(el), self.lower_expr(er)) + } + ExprKind::AssignOp(op, ref el, ref er) => { + hir::ExprAssignOp(self.lower_binop(op), + self.lower_expr(el), + self.lower_expr(er)) + } + ExprKind::Field(ref el, ident) => { + hir::ExprField(self.lower_expr(el), respan(ident.span, ident.node.name)) + } + ExprKind::TupField(ref el, ident) => { + hir::ExprTupField(self.lower_expr(el), ident) + } + ExprKind::Index(ref el, ref er) => { + hir::ExprIndex(self.lower_expr(el), self.lower_expr(er)) + } + ExprKind::Range(ref e1, ref e2, lims) => { + fn make_struct(this: &mut LoweringContext, + ast_expr: &Expr, + path: &[&str], + fields: &[(&str, &P)]) -> P { + let strs = this.std_path(&iter::once(&"ops") + .chain(path) + .map(|s| *s) + .collect::>()); + + let structpath = this.path_global(ast_expr.span, strs); + + let hir_expr = if fields.len() == 0 { + this.expr_path(structpath, ast_expr.attrs.clone()) + } else { + let fields = fields.into_iter().map(|&(s, e)| { + let expr = this.lower_expr(&e); + let signal_block = this.signal_block_expr(hir_vec![], + expr, + e.span, + hir::PopUnstableBlock, + None); + this.field(token::intern(s), signal_block, ast_expr.span) + }).collect(); + let attrs = ast_expr.attrs.clone(); + + this.expr_struct(ast_expr.span, structpath, fields, None, attrs) + }; + + this.signal_block_expr(hir_vec![], + hir_expr, + ast_expr.span, + hir::PushUnstableBlock, + None) + } - return cache_ids(lctx, e.id, |lctx| { use syntax::ast::RangeLimits::*; - match (e1, e2, lims) { + return match (e1, e2, lims) { (&None, &None, HalfOpen) => - make_struct(lctx, e, &["RangeFull"], - &[]), + make_struct(self, e, &["RangeFull"], &[]), (&Some(ref e1), &None, HalfOpen) => - make_struct(lctx, e, &["RangeFrom"], + make_struct(self, e, &["RangeFrom"], &[("start", e1)]), (&None, &Some(ref e2), HalfOpen) => - make_struct(lctx, e, &["RangeTo"], + make_struct(self, e, &["RangeTo"], &[("end", e2)]), (&Some(ref e1), &Some(ref e2), HalfOpen) => - make_struct(lctx, e, &["Range"], + make_struct(self, e, &["Range"], &[("start", e1), ("end", e2)]), (&None, &Some(ref e2), Closed) => - make_struct(lctx, e, &["RangeToInclusive"], + make_struct(self, e, &["RangeToInclusive"], &[("end", e2)]), (&Some(ref e1), &Some(ref e2), Closed) => - make_struct(lctx, e, &["RangeInclusive", "NonEmpty"], + make_struct(self, e, &["RangeInclusive", "NonEmpty"], &[("start", e1), ("end", e2)]), - _ => panic!(lctx.diagnostic().span_fatal(e.span, - "inclusive range with no end")) - } - }); - } - ExprKind::Path(ref qself, ref path) => { - let hir_qself = qself.as_ref().map(|&QSelf { ref ty, position }| { - hir::QSelf { - ty: lower_ty(lctx, ty), - position: position, - } - }); - hir::ExprPath(hir_qself, lower_path_full(lctx, path, qself.is_none())) - } - ExprKind::Break(opt_ident) => hir::ExprBreak(opt_ident.map(|sp_ident| { - respan(sp_ident.span, lower_ident(lctx, sp_ident.node)) - })), - ExprKind::Again(opt_ident) => hir::ExprAgain(opt_ident.map(|sp_ident| { - respan(sp_ident.span, lower_ident(lctx, sp_ident.node)) - })), - ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| lower_expr(lctx, x))), - ExprKind::InlineAsm(InlineAsm { - ref inputs, - ref outputs, - ref asm, - asm_str_style, - ref clobbers, - volatile, - alignstack, - dialect, - expn_id, - }) => hir::ExprInlineAsm(hir::InlineAsm { - inputs: inputs.iter().map(|&(ref c, _)| c.clone()).collect(), - outputs: outputs.iter() - .map(|out| { - hir::InlineAsmOutput { - constraint: out.constraint.clone(), - is_rw: out.is_rw, - is_indirect: out.is_indirect, - } - }) - .collect(), - asm: asm.clone(), - asm_str_style: asm_str_style, - clobbers: clobbers.clone().into(), - volatile: volatile, - alignstack: alignstack, - dialect: dialect, - expn_id: expn_id, - }, outputs.iter().map(|out| lower_expr(lctx, &out.expr)).collect(), - inputs.iter().map(|&(_, ref input)| lower_expr(lctx, input)).collect()), - ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { - hir::ExprStruct(lower_path(lctx, path), - fields.iter().map(|x| lower_field(lctx, x)).collect(), - maybe_expr.as_ref().map(|x| lower_expr(lctx, x))) - } - ExprKind::Paren(ref ex) => { - // merge attributes into the inner expression. - return lower_expr(lctx, ex).map(|mut ex| { - ex.attrs.update(|attrs| { - attrs.prepend(e.attrs.clone()) + _ => panic!(self.id_assigner.diagnostic() + .span_fatal(e.span, "inclusive range with no end")), + }; + } + ExprKind::Path(ref qself, ref path) => { + let hir_qself = qself.as_ref().map(|&QSelf { ref ty, position }| { + hir::QSelf { + ty: self.lower_ty(ty), + position: position, + } }); - ex - }); - } + let rename = if path.segments.len() == 1 { + // Only local variables are renamed + match self.resolver.get_resolution(e.id).map(|d| d.full_def()) { + Some(Def::Local(..)) | Some(Def::Upvar(..)) => true, + _ => false, + } + } else { + false + }; + hir::ExprPath(hir_qself, self.lower_path_full(path, rename)) + } + ExprKind::Break(opt_ident) => hir::ExprBreak(opt_ident.map(|sp_ident| { + respan(sp_ident.span, self.lower_ident(sp_ident.node)) + })), + ExprKind::Again(opt_ident) => hir::ExprAgain(opt_ident.map(|sp_ident| { + respan(sp_ident.span, self.lower_ident(sp_ident.node)) + })), + ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| self.lower_expr(x))), + ExprKind::InlineAsm(InlineAsm { + ref inputs, + ref outputs, + ref asm, + asm_str_style, + ref clobbers, + volatile, + alignstack, + dialect, + expn_id, + }) => hir::ExprInlineAsm(hir::InlineAsm { + inputs: inputs.iter().map(|&(ref c, _)| c.clone()).collect(), + outputs: outputs.iter() + .map(|out| { + hir::InlineAsmOutput { + constraint: out.constraint.clone(), + is_rw: out.is_rw, + is_indirect: out.is_indirect, + } + }) + .collect(), + asm: asm.clone(), + asm_str_style: asm_str_style, + clobbers: clobbers.clone().into(), + volatile: volatile, + alignstack: alignstack, + dialect: dialect, + expn_id: expn_id, + }, outputs.iter().map(|out| self.lower_expr(&out.expr)).collect(), + inputs.iter().map(|&(_, ref input)| self.lower_expr(input)).collect()), + ExprKind::Struct(ref path, ref fields, ref maybe_expr) => { + hir::ExprStruct(self.lower_path(path), + fields.iter().map(|x| self.lower_field(x)).collect(), + maybe_expr.as_ref().map(|x| self.lower_expr(x))) + } + ExprKind::Paren(ref ex) => { + return self.lower_expr(ex).map(|mut ex| { + // include parens in span, but only if it is a super-span. + if e.span.contains(ex.span) { + ex.span = e.span; + } + // merge attributes into the inner expression. + ex.attrs.update(|attrs| { + attrs.prepend(e.attrs.clone()) + }); + ex + }); + } - // Desugar ExprIfLet - // From: `if let = []` - ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => { - // to: - // - // match { - // => , - // [_ if => ,] - // _ => [ | ()] - // } + // Desugar ExprIfLet + // From: `if let = []` + ExprKind::IfLet(ref pat, ref sub_expr, ref body, ref else_opt) => { + // to: + // + // match { + // => , + // [_ if => ,] + // _ => [ | ()] + // } - return cache_ids(lctx, e.id, |lctx| { // ` => ` let pat_arm = { - let body = lower_block(lctx, body); - let body_expr = expr_block(lctx, body, None); - arm(hir_vec![lower_pat(lctx, pat)], body_expr) + let body = self.lower_block(body); + let body_expr = self.expr_block(body, None); + let pat = self.lower_pat(pat); + self.arm(hir_vec![pat], body_expr) }; // `[_ if => ,]` - let mut else_opt = else_opt.as_ref().map(|e| lower_expr(lctx, e)); + let mut else_opt = else_opt.as_ref().map(|e| self.lower_expr(e)); let else_if_arms = { let mut arms = vec![]; loop { @@ -1389,12 +1328,12 @@ pub fn lower_expr(lctx: &LoweringContext, e: &Expr) -> P { match els.node { // else if hir::ExprIf(cond, then, else_opt) => { - let pat_under = pat_wild(lctx, e.span); + let pat_under = self.pat_wild(e.span); arms.push(hir::Arm { attrs: hir_vec![], pats: hir_vec![pat_under], guard: Some(cond), - body: expr_block(lctx, then, None), + body: self.expr_block(then, None), }); else_opt.map(|else_opt| (else_opt, true)) } @@ -1423,11 +1362,10 @@ pub fn lower_expr(lctx: &LoweringContext, e: &Expr) -> P { // `_ => [ | ()]` let else_arm = { - let pat_under = pat_wild(lctx, e.span); + let pat_under = self.pat_wild(e.span); let else_expr = - else_opt.unwrap_or_else( - || expr_tuple(lctx, e.span, hir_vec![], None)); - arm(hir_vec![pat_under], else_expr) + else_opt.unwrap_or_else(|| self.expr_tuple(e.span, hir_vec![], None)); + self.arm(hir_vec![pat_under], else_expr) }; let mut arms = Vec::with_capacity(else_if_arms.len() + 2); @@ -1435,714 +1373,609 @@ pub fn lower_expr(lctx: &LoweringContext, e: &Expr) -> P { arms.extend(else_if_arms); arms.push(else_arm); - let sub_expr = lower_expr(lctx, sub_expr); + let sub_expr = self.lower_expr(sub_expr); // add attributes to the outer returned expr node - expr(lctx, - e.span, - hir::ExprMatch(sub_expr, - arms.into(), - hir::MatchSource::IfLetDesugar { - contains_else_clause: contains_else_clause, - }), - e.attrs.clone()) - }); - } + return self.expr(e.span, + hir::ExprMatch(sub_expr, + arms.into(), + hir::MatchSource::IfLetDesugar { + contains_else_clause: contains_else_clause, + }), + e.attrs.clone()); + } + + // Desugar ExprWhileLet + // From: `[opt_ident]: while let = ` + ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => { + // to: + // + // [opt_ident]: loop { + // match { + // => , + // _ => break + // } + // } - // Desugar ExprWhileLet - // From: `[opt_ident]: while let = ` - ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => { - // to: - // - // [opt_ident]: loop { - // match { - // => , - // _ => break - // } - // } - - return cache_ids(lctx, e.id, |lctx| { // ` => ` let pat_arm = { - let body = lower_block(lctx, body); - let body_expr = expr_block(lctx, body, None); - arm(hir_vec![lower_pat(lctx, pat)], body_expr) + let body = self.lower_block(body); + let body_expr = self.expr_block(body, None); + let pat = self.lower_pat(pat); + self.arm(hir_vec![pat], body_expr) }; // `_ => break` let break_arm = { - let pat_under = pat_wild(lctx, e.span); - let break_expr = expr_break(lctx, e.span, None); - arm(hir_vec![pat_under], break_expr) + let pat_under = self.pat_wild(e.span); + let break_expr = self.expr_break(e.span, None); + self.arm(hir_vec![pat_under], break_expr) }; // `match { ... }` let arms = hir_vec![pat_arm, break_arm]; - let sub_expr = lower_expr(lctx, sub_expr); - let match_expr = expr(lctx, - e.span, - hir::ExprMatch(sub_expr, - arms, - hir::MatchSource::WhileLetDesugar), - None); + let sub_expr = self.lower_expr(sub_expr); + let match_expr = self.expr(e.span, + hir::ExprMatch(sub_expr, + arms, + hir::MatchSource::WhileLetDesugar), + None); // `[opt_ident]: loop { ... }` - let loop_block = block_expr(lctx, match_expr); + let loop_block = self.block_expr(match_expr); let loop_expr = hir::ExprLoop(loop_block, - opt_ident.map(|ident| lower_ident(lctx, ident))); + opt_ident.map(|ident| self.lower_ident(ident))); // add attributes to the outer returned expr node - expr(lctx, e.span, loop_expr, e.attrs.clone()) - }); - } + let attrs = e.attrs.clone(); + return P(hir::Expr { id: e.id, node: loop_expr, span: e.span, attrs: attrs }); + } + + // Desugar ExprForLoop + // From: `[opt_ident]: for in ` + ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => { + // to: + // + // { + // let result = match ::std::iter::IntoIterator::into_iter() { + // mut iter => { + // [opt_ident]: loop { + // match ::std::iter::Iterator::next(&mut iter) { + // ::std::option::Option::Some() => , + // ::std::option::Option::None => break + // } + // } + // } + // }; + // result + // } - // Desugar ExprForLoop - // From: `[opt_ident]: for in ` - ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => { - // to: - // - // { - // let result = match ::std::iter::IntoIterator::into_iter() { - // mut iter => { - // [opt_ident]: loop { - // match ::std::iter::Iterator::next(&mut iter) { - // ::std::option::Option::Some() => , - // ::std::option::Option::None => break - // } - // } - // } - // }; - // result - // } - - return cache_ids(lctx, e.id, |lctx| { // expand - let head = lower_expr(lctx, head); + let head = self.lower_expr(head); - let iter = lctx.str_to_ident("iter"); + let iter = self.str_to_ident("iter"); // `::std::option::Option::Some() => ` let pat_arm = { - let body_block = lower_block(lctx, body); + let body_block = self.lower_block(body); let body_span = body_block.span; let body_expr = P(hir::Expr { - id: lctx.next_id(), + id: self.next_id(), node: hir::ExprBlock(body_block), span: body_span, attrs: None, }); - let pat = lower_pat(lctx, pat); - let some_pat = pat_some(lctx, e.span, pat); + let pat = self.lower_pat(pat); + let some_pat = self.pat_some(e.span, pat); - arm(hir_vec![some_pat], body_expr) + self.arm(hir_vec![some_pat], body_expr) }; // `::std::option::Option::None => break` let break_arm = { - let break_expr = expr_break(lctx, e.span, None); - - arm(hir_vec![pat_none(lctx, e.span)], break_expr) + let break_expr = self.expr_break(e.span, None); + let pat = self.pat_none(e.span); + self.arm(hir_vec![pat], break_expr) }; + // `mut iter` + let iter_pat = self.pat_ident_binding_mode(e.span, iter, + hir::BindByValue(hir::MutMutable)); + // `match ::std::iter::Iterator::next(&mut iter) { ... }` let match_expr = { let next_path = { - let strs = std_path(lctx, &["iter", "Iterator", "next"]); + let strs = self.std_path(&["iter", "Iterator", "next"]); - path_global(e.span, strs) + self.path_global(e.span, strs) }; - let iter = expr_ident(lctx, e.span, iter, None); - let ref_mut_iter = expr_mut_addr_of(lctx, e.span, iter, None); - let next_path = expr_path(lctx, next_path, None); - let next_expr = expr_call(lctx, - e.span, - next_path, - hir_vec![ref_mut_iter], - None); + let iter = self.expr_ident(e.span, iter, None, iter_pat.id); + let ref_mut_iter = self.expr_mut_addr_of(e.span, iter, None); + let next_path = self.expr_path(next_path, None); + let next_expr = self.expr_call(e.span, + next_path, + hir_vec![ref_mut_iter], + None); let arms = hir_vec![pat_arm, break_arm]; - expr(lctx, - e.span, - hir::ExprMatch(next_expr, arms, hir::MatchSource::ForLoopDesugar), - None) + self.expr(e.span, + hir::ExprMatch(next_expr, arms, hir::MatchSource::ForLoopDesugar), + None) }; // `[opt_ident]: loop { ... }` - let loop_block = block_expr(lctx, match_expr); + let loop_block = self.block_expr(match_expr); let loop_expr = hir::ExprLoop(loop_block, - opt_ident.map(|ident| lower_ident(lctx, ident))); - let loop_expr = expr(lctx, e.span, loop_expr, None); + opt_ident.map(|ident| self.lower_ident(ident))); + let loop_expr = + P(hir::Expr { id: e.id, node: loop_expr, span: e.span, attrs: None }); // `mut iter => { ... }` - let iter_arm = { - let iter_pat = pat_ident_binding_mode(lctx, - e.span, - iter, - hir::BindByValue(hir::MutMutable)); - arm(hir_vec![iter_pat], loop_expr) - }; + let iter_arm = self.arm(hir_vec![iter_pat], loop_expr); // `match ::std::iter::IntoIterator::into_iter() { ... }` let into_iter_expr = { let into_iter_path = { - let strs = std_path(lctx, &["iter", "IntoIterator", "into_iter"]); + let strs = self.std_path(&["iter", "IntoIterator", "into_iter"]); - path_global(e.span, strs) + self.path_global(e.span, strs) }; - let into_iter = expr_path(lctx, into_iter_path, None); - expr_call(lctx, e.span, into_iter, hir_vec![head], None) + let into_iter = self.expr_path(into_iter_path, None); + self.expr_call(e.span, into_iter, hir_vec![head], None) }; - let match_expr = expr_match(lctx, - e.span, - into_iter_expr, - hir_vec![iter_arm], - hir::MatchSource::ForLoopDesugar, - None); + let match_expr = self.expr_match(e.span, + into_iter_expr, + hir_vec![iter_arm], + hir::MatchSource::ForLoopDesugar, + None); // `{ let _result = ...; _result }` // underscore prevents an unused_variables lint if the head diverges - let result_ident = lctx.str_to_ident("_result"); - let let_stmt = stmt_let(lctx, e.span, false, result_ident, match_expr, None); - let result = expr_ident(lctx, e.span, result_ident, None); - let block = block_all(lctx, e.span, hir_vec![let_stmt], Some(result)); + let result_ident = self.str_to_ident("_result"); + let (let_stmt, let_stmt_binding) = + self.stmt_let(e.span, false, result_ident, match_expr, None); + + let result = self.expr_ident(e.span, result_ident, None, let_stmt_binding); + let block = self.block_all(e.span, hir_vec![let_stmt], Some(result)); // add the attributes to the outer returned expr node - expr_block(lctx, block, e.attrs.clone()) - }); - } + return self.expr_block(block, e.attrs.clone()); + } - // Desugar ExprKind::Try - // From: `?` - ExprKind::Try(ref sub_expr) => { - // to: - // - // { - // match { - // Ok(val) => val, - // Err(err) => { - // return Err(From::from(err)) - // } - // } - // } + // Desugar ExprKind::Try + // From: `?` + ExprKind::Try(ref sub_expr) => { + // to: + // + // { + // match { + // Ok(val) => val, + // Err(err) => { + // return Err(From::from(err)) + // } + // } + // } - return cache_ids(lctx, e.id, |lctx| { // expand - let sub_expr = lower_expr(lctx, sub_expr); + let sub_expr = self.lower_expr(sub_expr); // Ok(val) => val let ok_arm = { - let val_ident = lctx.str_to_ident("val"); - let val_pat = pat_ident(lctx, e.span, val_ident); - let val_expr = expr_ident(lctx, e.span, val_ident, None); - let ok_pat = pat_ok(lctx, e.span, val_pat); + let val_ident = self.str_to_ident("val"); + let val_pat = self.pat_ident(e.span, val_ident); + let val_expr = self.expr_ident(e.span, val_ident, None, val_pat.id); + let ok_pat = self.pat_ok(e.span, val_pat); - arm(hir_vec![ok_pat], val_expr) + self.arm(hir_vec![ok_pat], val_expr) }; // Err(err) => return Err(From::from(err)) let err_arm = { - let err_ident = lctx.str_to_ident("err"); + let err_ident = self.str_to_ident("err"); + let err_local = self.pat_ident(e.span, err_ident); let from_expr = { - let path = std_path(lctx, &["convert", "From", "from"]); - let path = path_global(e.span, path); - let from = expr_path(lctx, path, None); - let err_expr = expr_ident(lctx, e.span, err_ident, None); + let path = self.std_path(&["convert", "From", "from"]); + let path = self.path_global(e.span, path); + let from = self.expr_path(path, None); + let err_expr = self.expr_ident(e.span, err_ident, None, err_local.id); - expr_call(lctx, e.span, from, hir_vec![err_expr], None) + self.expr_call(e.span, from, hir_vec![err_expr], None) }; let err_expr = { - let path = std_path(lctx, &["result", "Result", "Err"]); - let path = path_global(e.span, path); - let err_ctor = expr_path(lctx, path, None); - expr_call(lctx, e.span, err_ctor, hir_vec![from_expr], None) + let path = self.std_path(&["result", "Result", "Err"]); + let path = self.path_global(e.span, path); + let err_ctor = self.expr_path(path, None); + self.expr_call(e.span, err_ctor, hir_vec![from_expr], None) }; - let err_pat = pat_err(lctx, e.span, pat_ident(lctx, e.span, err_ident)); - let ret_expr = expr(lctx, e.span, - hir::Expr_::ExprRet(Some(err_expr)), None); + let err_pat = self.pat_err(e.span, err_local); + let ret_expr = self.expr(e.span, + hir::Expr_::ExprRet(Some(err_expr)), None); - arm(hir_vec![err_pat], ret_expr) + self.arm(hir_vec![err_pat], ret_expr) }; - expr_match(lctx, e.span, sub_expr, hir_vec![err_arm, ok_arm], - hir::MatchSource::TryDesugar, None) - }) - } + return self.expr_match(e.span, sub_expr, hir_vec![err_arm, ok_arm], + hir::MatchSource::TryDesugar, None); + } - ExprKind::Mac(_) => panic!("Shouldn't exist here"), - }, - span: e.span, - attrs: e.attrs.clone(), - }) -} + ExprKind::Mac(_) => panic!("Shouldn't exist here"), + }, + span: e.span, + attrs: e.attrs.clone(), + }) + } -pub fn lower_stmt(lctx: &LoweringContext, s: &Stmt) -> hir::Stmt { - match s.node { - StmtKind::Decl(ref d, id) => { - Spanned { - node: hir::StmtDecl(lower_decl(lctx, d), id), - span: s.span, + fn lower_stmt(&mut self, s: &Stmt) -> hir::Stmt { + match s.node { + StmtKind::Decl(ref d, id) => { + Spanned { + node: hir::StmtDecl(self.lower_decl(d), id), + span: s.span, + } } - } - StmtKind::Expr(ref e, id) => { - Spanned { - node: hir::StmtExpr(lower_expr(lctx, e), id), - span: s.span, + StmtKind::Expr(ref e, id) => { + Spanned { + node: hir::StmtExpr(self.lower_expr(e), id), + span: s.span, + } } - } - StmtKind::Semi(ref e, id) => { - Spanned { - node: hir::StmtSemi(lower_expr(lctx, e), id), - span: s.span, + StmtKind::Semi(ref e, id) => { + Spanned { + node: hir::StmtSemi(self.lower_expr(e), id), + span: s.span, + } } + StmtKind::Mac(..) => panic!("Shouldn't exist here"), } - StmtKind::Mac(..) => panic!("Shouldn't exist here"), } -} -pub fn lower_capture_clause(_lctx: &LoweringContext, c: CaptureBy) -> hir::CaptureClause { - match c { - CaptureBy::Value => hir::CaptureByValue, - CaptureBy::Ref => hir::CaptureByRef, + fn lower_capture_clause(&mut self, c: CaptureBy) -> hir::CaptureClause { + match c { + CaptureBy::Value => hir::CaptureByValue, + CaptureBy::Ref => hir::CaptureByRef, + } } -} -pub fn lower_visibility(lctx: &LoweringContext, v: &Visibility) -> hir::Visibility { - match *v { - Visibility::Public => hir::Public, - Visibility::Inherited => hir::Inherited, - _ => panic!(lctx.diagnostic().fatal("pub(restricted) is not implemented yet!")) + fn lower_visibility(&mut self, v: &Visibility) -> hir::Visibility { + match *v { + Visibility::Public => hir::Public, + Visibility::Crate(_) => hir::Visibility::Crate, + Visibility::Restricted { ref path, id } => + hir::Visibility::Restricted { path: P(self.lower_path(path)), id: id }, + Visibility::Inherited => hir::Inherited, + } } -} -pub fn lower_defaultness(_lctx: &LoweringContext, d: Defaultness) -> hir::Defaultness { - match d { - Defaultness::Default => hir::Defaultness::Default, - Defaultness::Final => hir::Defaultness::Final, + fn lower_defaultness(&mut self, d: Defaultness) -> hir::Defaultness { + match d { + Defaultness::Default => hir::Defaultness::Default, + Defaultness::Final => hir::Defaultness::Final, + } } -} -pub fn lower_block_check_mode(lctx: &LoweringContext, b: &BlockCheckMode) -> hir::BlockCheckMode { - match *b { - BlockCheckMode::Default => hir::DefaultBlock, - BlockCheckMode::Unsafe(u) => hir::UnsafeBlock(lower_unsafe_source(lctx, u)), + fn lower_block_check_mode(&mut self, b: &BlockCheckMode) -> hir::BlockCheckMode { + match *b { + BlockCheckMode::Default => hir::DefaultBlock, + BlockCheckMode::Unsafe(u) => hir::UnsafeBlock(self.lower_unsafe_source(u)), + } } -} -pub fn lower_binding_mode(lctx: &LoweringContext, b: &BindingMode) -> hir::BindingMode { - match *b { - BindingMode::ByRef(m) => hir::BindByRef(lower_mutability(lctx, m)), - BindingMode::ByValue(m) => hir::BindByValue(lower_mutability(lctx, m)), + fn lower_binding_mode(&mut self, b: &BindingMode) -> hir::BindingMode { + match *b { + BindingMode::ByRef(m) => hir::BindByRef(self.lower_mutability(m)), + BindingMode::ByValue(m) => hir::BindByValue(self.lower_mutability(m)), + } } -} -pub fn lower_unsafe_source(_lctx: &LoweringContext, u: UnsafeSource) -> hir::UnsafeSource { - match u { - CompilerGenerated => hir::CompilerGenerated, - UserProvided => hir::UserProvided, + fn lower_unsafe_source(&mut self, u: UnsafeSource) -> hir::UnsafeSource { + match u { + CompilerGenerated => hir::CompilerGenerated, + UserProvided => hir::UserProvided, + } } -} -pub fn lower_impl_polarity(_lctx: &LoweringContext, i: ImplPolarity) -> hir::ImplPolarity { - match i { - ImplPolarity::Positive => hir::ImplPolarity::Positive, - ImplPolarity::Negative => hir::ImplPolarity::Negative, + fn lower_impl_polarity(&mut self, i: ImplPolarity) -> hir::ImplPolarity { + match i { + ImplPolarity::Positive => hir::ImplPolarity::Positive, + ImplPolarity::Negative => hir::ImplPolarity::Negative, + } } -} -pub fn lower_trait_bound_modifier(_lctx: &LoweringContext, - f: TraitBoundModifier) - -> hir::TraitBoundModifier { - match f { - TraitBoundModifier::None => hir::TraitBoundModifier::None, - TraitBoundModifier::Maybe => hir::TraitBoundModifier::Maybe, + fn lower_trait_bound_modifier(&mut self, f: TraitBoundModifier) -> hir::TraitBoundModifier { + match f { + TraitBoundModifier::None => hir::TraitBoundModifier::None, + TraitBoundModifier::Maybe => hir::TraitBoundModifier::Maybe, + } } -} -// Helper methods for building HIR. + // Helper methods for building HIR. -fn arm(pats: hir::HirVec>, expr: P) -> hir::Arm { - hir::Arm { - attrs: hir_vec![], - pats: pats, - guard: None, - body: expr, + fn arm(&mut self, pats: hir::HirVec>, expr: P) -> hir::Arm { + hir::Arm { + attrs: hir_vec![], + pats: pats, + guard: None, + body: expr, + } } -} -fn field(name: Name, expr: P, span: Span) -> hir::Field { - hir::Field { - name: Spanned { - node: name, + fn field(&mut self, name: Name, expr: P, span: Span) -> hir::Field { + hir::Field { + name: Spanned { + node: name, + span: span, + }, span: span, - }, - span: span, - expr: expr, + expr: expr, + } } -} -fn expr_break(lctx: &LoweringContext, span: Span, - attrs: ThinAttributes) -> P { - expr(lctx, span, hir::ExprBreak(None), attrs) -} - -fn expr_call(lctx: &LoweringContext, - span: Span, - e: P, - args: hir::HirVec>, - attrs: ThinAttributes) - -> P { - expr(lctx, span, hir::ExprCall(e, args), attrs) -} - -fn expr_ident(lctx: &LoweringContext, span: Span, id: hir::Ident, - attrs: ThinAttributes) -> P { - expr_path(lctx, path_ident(span, id), attrs) -} - -fn expr_mut_addr_of(lctx: &LoweringContext, span: Span, e: P, - attrs: ThinAttributes) -> P { - expr(lctx, span, hir::ExprAddrOf(hir::MutMutable, e), attrs) -} - -fn expr_path(lctx: &LoweringContext, path: hir::Path, - attrs: ThinAttributes) -> P { - expr(lctx, path.span, hir::ExprPath(None, path), attrs) -} - -fn expr_match(lctx: &LoweringContext, - span: Span, - arg: P, - arms: hir::HirVec, - source: hir::MatchSource, - attrs: ThinAttributes) - -> P { - expr(lctx, span, hir::ExprMatch(arg, arms, source), attrs) -} - -fn expr_block(lctx: &LoweringContext, b: P, - attrs: ThinAttributes) -> P { - expr(lctx, b.span, hir::ExprBlock(b), attrs) -} - -fn expr_tuple(lctx: &LoweringContext, sp: Span, exprs: hir::HirVec>, - attrs: ThinAttributes) -> P { - expr(lctx, sp, hir::ExprTup(exprs), attrs) -} + fn expr_break(&mut self, span: Span, attrs: ThinAttributes) -> P { + self.expr(span, hir::ExprBreak(None), attrs) + } -fn expr_struct(lctx: &LoweringContext, - sp: Span, - path: hir::Path, - fields: hir::HirVec, - e: Option>, - attrs: ThinAttributes) -> P { - expr(lctx, sp, hir::ExprStruct(path, fields, e), attrs) -} + fn expr_call(&mut self, + span: Span, + e: P, + args: hir::HirVec>, + attrs: ThinAttributes) + -> P { + self.expr(span, hir::ExprCall(e, args), attrs) + } -fn expr(lctx: &LoweringContext, span: Span, node: hir::Expr_, - attrs: ThinAttributes) -> P { - P(hir::Expr { - id: lctx.next_id(), - node: node, - span: span, - attrs: attrs, - }) -} + fn expr_ident(&mut self, span: Span, id: Name, attrs: ThinAttributes, binding: NodeId) + -> P { + let expr_path = hir::ExprPath(None, self.path_ident(span, id)); + let expr = self.expr(span, expr_path, attrs); -fn stmt_let(lctx: &LoweringContext, - sp: Span, - mutbl: bool, - ident: hir::Ident, - ex: P, - attrs: ThinAttributes) - -> hir::Stmt { - let pat = if mutbl { - pat_ident_binding_mode(lctx, sp, ident, hir::BindByValue(hir::MutMutable)) - } else { - pat_ident(lctx, sp, ident) - }; - let local = P(hir::Local { - pat: pat, - ty: None, - init: Some(ex), - id: lctx.next_id(), - span: sp, - attrs: attrs, - }); - let decl = respan(sp, hir::DeclLocal(local)); - respan(sp, hir::StmtDecl(P(decl), lctx.next_id())) -} + let def = self.resolver.definitions().map(|defs| { + Def::Local(defs.local_def_id(binding), binding) + }).unwrap_or(Def::Err); + self.resolver.record_resolution(expr.id, def); -fn block_expr(lctx: &LoweringContext, expr: P) -> P { - block_all(lctx, expr.span, hir::HirVec::new(), Some(expr)) -} + expr + } -fn block_all(lctx: &LoweringContext, - span: Span, - stmts: hir::HirVec, - expr: Option>) - -> P { - P(hir::Block { - stmts: stmts, - expr: expr, - id: lctx.next_id(), - rules: hir::DefaultBlock, - span: span, - }) -} + fn expr_mut_addr_of(&mut self, span: Span, e: P, attrs: ThinAttributes) + -> P { + self.expr(span, hir::ExprAddrOf(hir::MutMutable, e), attrs) + } -fn pat_ok(lctx: &LoweringContext, span: Span, pat: P) -> P { - let ok = std_path(lctx, &["result", "Result", "Ok"]); - let path = path_global(span, ok); - pat_enum(lctx, span, path, hir_vec![pat]) -} + fn expr_path(&mut self, path: hir::Path, attrs: ThinAttributes) -> P { + let def = self.resolver.resolve_generated_global_path(&path, true); + let expr = self.expr(path.span, hir::ExprPath(None, path), attrs); + self.resolver.record_resolution(expr.id, def); + expr + } -fn pat_err(lctx: &LoweringContext, span: Span, pat: P) -> P { - let err = std_path(lctx, &["result", "Result", "Err"]); - let path = path_global(span, err); - pat_enum(lctx, span, path, hir_vec![pat]) -} + fn expr_match(&mut self, + span: Span, + arg: P, + arms: hir::HirVec, + source: hir::MatchSource, + attrs: ThinAttributes) + -> P { + self.expr(span, hir::ExprMatch(arg, arms, source), attrs) + } -fn pat_some(lctx: &LoweringContext, span: Span, pat: P) -> P { - let some = std_path(lctx, &["option", "Option", "Some"]); - let path = path_global(span, some); - pat_enum(lctx, span, path, hir_vec![pat]) -} + fn expr_block(&mut self, b: P, attrs: ThinAttributes) -> P { + self.expr(b.span, hir::ExprBlock(b), attrs) + } -fn pat_none(lctx: &LoweringContext, span: Span) -> P { - let none = std_path(lctx, &["option", "Option", "None"]); - let path = path_global(span, none); - pat_enum(lctx, span, path, hir_vec![]) -} + fn expr_tuple(&mut self, sp: Span, exprs: hir::HirVec>, attrs: ThinAttributes) + -> P { + self.expr(sp, hir::ExprTup(exprs), attrs) + } -fn pat_enum(lctx: &LoweringContext, - span: Span, - path: hir::Path, - subpats: hir::HirVec>) - -> P { - let pt = if subpats.is_empty() { - hir::PatKind::Path(path) - } else { - hir::PatKind::TupleStruct(path, Some(subpats)) - }; - pat(lctx, span, pt) -} + fn expr_struct(&mut self, + sp: Span, + path: hir::Path, + fields: hir::HirVec, + e: Option>, + attrs: ThinAttributes) -> P { + let def = self.resolver.resolve_generated_global_path(&path, false); + let expr = self.expr(sp, hir::ExprStruct(path, fields, e), attrs); + self.resolver.record_resolution(expr.id, def); + expr + } -fn pat_ident(lctx: &LoweringContext, span: Span, ident: hir::Ident) -> P { - pat_ident_binding_mode(lctx, span, ident, hir::BindByValue(hir::MutImmutable)) -} + fn expr(&mut self, span: Span, node: hir::Expr_, attrs: ThinAttributes) -> P { + P(hir::Expr { + id: self.next_id(), + node: node, + span: span, + attrs: attrs, + }) + } + + fn stmt_let(&mut self, + sp: Span, + mutbl: bool, + ident: Name, + ex: P, + attrs: ThinAttributes) + -> (hir::Stmt, NodeId) { + let pat = if mutbl { + self.pat_ident_binding_mode(sp, ident, hir::BindByValue(hir::MutMutable)) + } else { + self.pat_ident(sp, ident) + }; + let pat_id = pat.id; + let local = P(hir::Local { + pat: pat, + ty: None, + init: Some(ex), + id: self.next_id(), + span: sp, + attrs: attrs, + }); + let decl = respan(sp, hir::DeclLocal(local)); + (respan(sp, hir::StmtDecl(P(decl), self.next_id())), pat_id) + } -fn pat_ident_binding_mode(lctx: &LoweringContext, - span: Span, - ident: hir::Ident, - bm: hir::BindingMode) - -> P { - let pat_ident = hir::PatKind::Ident(bm, - Spanned { - span: span, - node: ident, - }, - None); - pat(lctx, span, pat_ident) -} + fn block_expr(&mut self, expr: P) -> P { + self.block_all(expr.span, hir::HirVec::new(), Some(expr)) + } -fn pat_wild(lctx: &LoweringContext, span: Span) -> P { - pat(lctx, span, hir::PatKind::Wild) -} + fn block_all(&mut self, span: Span, stmts: hir::HirVec, expr: Option>) + -> P { + P(hir::Block { + stmts: stmts, + expr: expr, + id: self.next_id(), + rules: hir::DefaultBlock, + span: span, + }) + } -fn pat(lctx: &LoweringContext, span: Span, pat: hir::PatKind) -> P { - P(hir::Pat { - id: lctx.next_id(), - node: pat, - span: span, - }) -} + fn pat_ok(&mut self, span: Span, pat: P) -> P { + let ok = self.std_path(&["result", "Result", "Ok"]); + let path = self.path_global(span, ok); + self.pat_enum(span, path, hir_vec![pat]) + } -fn path_ident(span: Span, id: hir::Ident) -> hir::Path { - path(span, vec![id]) -} + fn pat_err(&mut self, span: Span, pat: P) -> P { + let err = self.std_path(&["result", "Result", "Err"]); + let path = self.path_global(span, err); + self.pat_enum(span, path, hir_vec![pat]) + } -fn path(span: Span, strs: Vec) -> hir::Path { - path_all(span, false, strs, hir::HirVec::new(), hir::HirVec::new(), hir::HirVec::new()) -} + fn pat_some(&mut self, span: Span, pat: P) -> P { + let some = self.std_path(&["option", "Option", "Some"]); + let path = self.path_global(span, some); + self.pat_enum(span, path, hir_vec![pat]) + } -fn path_global(span: Span, strs: Vec) -> hir::Path { - path_all(span, true, strs, hir::HirVec::new(), hir::HirVec::new(), hir::HirVec::new()) -} + fn pat_none(&mut self, span: Span) -> P { + let none = self.std_path(&["option", "Option", "None"]); + let path = self.path_global(span, none); + self.pat_enum(span, path, hir_vec![]) + } -fn path_all(sp: Span, - global: bool, - mut idents: Vec, - lifetimes: hir::HirVec, - types: hir::HirVec>, - bindings: hir::HirVec) - -> hir::Path { - let last_identifier = idents.pop().unwrap(); - let mut segments: Vec = idents.into_iter() - .map(|ident| { - hir::PathSegment { - identifier: ident, - parameters: hir::PathParameters::none(), - } - }) - .collect(); - segments.push(hir::PathSegment { - identifier: last_identifier, - parameters: hir::AngleBracketedParameters(hir::AngleBracketedParameterData { - lifetimes: lifetimes, - types: types, - bindings: bindings, - }), - }); - hir::Path { - span: sp, - global: global, - segments: segments.into(), + fn pat_enum(&mut self, span: Span, path: hir::Path, subpats: hir::HirVec>) + -> P { + let def = self.resolver.resolve_generated_global_path(&path, true); + let pt = if subpats.is_empty() { + hir::PatKind::Path(path) + } else { + hir::PatKind::TupleStruct(path, Some(subpats)) + }; + let pat = self.pat(span, pt); + self.resolver.record_resolution(pat.id, def); + pat } -} -fn std_path(lctx: &LoweringContext, components: &[&str]) -> Vec { - let mut v = Vec::new(); - if let Some(s) = lctx.crate_root { - v.push(hir::Ident::from_name(token::intern(s))); + fn pat_ident(&mut self, span: Span, name: Name) -> P { + self.pat_ident_binding_mode(span, name, hir::BindByValue(hir::MutImmutable)) } - v.extend(components.iter().map(|s| hir::Ident::from_name(token::intern(s)))); - return v; -} -// Given suffix ["b","c","d"], returns path `::std::b::c::d` when -// `fld.cx.use_std`, and `::core::b::c::d` otherwise. -fn core_path(lctx: &LoweringContext, span: Span, components: &[&str]) -> hir::Path { - let idents = std_path(lctx, components); - path_global(span, idents) -} + fn pat_ident_binding_mode(&mut self, span: Span, name: Name, bm: hir::BindingMode) + -> P { + let pat_ident = hir::PatKind::Ident(bm, + Spanned { + span: span, + node: name, + }, + None); -fn signal_block_expr(lctx: &LoweringContext, - stmts: hir::HirVec, - expr: P, - span: Span, - rule: hir::BlockCheckMode, - attrs: ThinAttributes) - -> P { - let id = lctx.next_id(); - expr_block(lctx, - P(hir::Block { - rules: rule, - span: span, - id: id, - stmts: stmts, - expr: Some(expr), - }), - attrs) -} + let pat = self.pat(span, pat_ident); + let parent_def = self.parent_def; + let def = self.resolver.definitions().map(|defs| { + let def_path_data = DefPathData::Binding(name); + let def_index = defs.create_def_with_parent(parent_def, pat.id, def_path_data); + Def::Local(DefId::local(def_index), pat.id) + }).unwrap_or(Def::Err); + self.resolver.record_resolution(pat.id, def); + pat + } -#[cfg(test)] -mod test { - use super::*; - use syntax::ast::{self, NodeId, NodeIdAssigner}; - use syntax::{parse, codemap}; - use syntax::fold::Folder; - use std::cell::Cell; + fn pat_wild(&mut self, span: Span) -> P { + self.pat(span, hir::PatKind::Wild) + } - struct MockAssigner { - next_id: Cell, + fn pat(&mut self, span: Span, pat: hir::PatKind) -> P { + P(hir::Pat { + id: self.next_id(), + node: pat, + span: span, + }) } - impl MockAssigner { - fn new() -> MockAssigner { - MockAssigner { next_id: Cell::new(0) } - } + fn path_ident(&mut self, span: Span, id: Name) -> hir::Path { + self.path(span, vec![id]) } - trait FakeExtCtxt { - fn call_site(&self) -> codemap::Span; - fn cfg(&self) -> ast::CrateConfig; - fn ident_of(&self, st: &str) -> ast::Ident; - fn name_of(&self, st: &str) -> ast::Name; - fn parse_sess(&self) -> &parse::ParseSess; + fn path(&mut self, span: Span, strs: Vec) -> hir::Path { + self.path_all(span, false, strs, hir::HirVec::new(), hir::HirVec::new(), hir::HirVec::new()) } - impl FakeExtCtxt for parse::ParseSess { - fn call_site(&self) -> codemap::Span { - codemap::Span { - lo: codemap::BytePos(0), - hi: codemap::BytePos(0), - expn_id: codemap::NO_EXPANSION, - } - } - fn cfg(&self) -> ast::CrateConfig { - Vec::new() - } - fn ident_of(&self, st: &str) -> ast::Ident { - parse::token::str_to_ident(st) - } - fn name_of(&self, st: &str) -> ast::Name { - parse::token::intern(st) - } - fn parse_sess(&self) -> &parse::ParseSess { - self - } + fn path_global(&mut self, span: Span, strs: Vec) -> hir::Path { + self.path_all(span, true, strs, hir::HirVec::new(), hir::HirVec::new(), hir::HirVec::new()) } - impl NodeIdAssigner for MockAssigner { - fn next_node_id(&self) -> NodeId { - let result = self.next_id.get(); - self.next_id.set(result + 1); - result - } + fn path_all(&mut self, + sp: Span, + global: bool, + mut names: Vec, + lifetimes: hir::HirVec, + types: hir::HirVec>, + bindings: hir::HirVec) + -> hir::Path { + let last_identifier = names.pop().unwrap(); + let mut segments: Vec = names.into_iter().map(|name| { + hir::PathSegment { + name: name, + parameters: hir::PathParameters::none(), + } + }).collect(); - fn peek_node_id(&self) -> NodeId { - self.next_id.get() + segments.push(hir::PathSegment { + name: last_identifier, + parameters: hir::AngleBracketedParameters(hir::AngleBracketedParameterData { + lifetimes: lifetimes, + types: types, + bindings: bindings, + }), + }); + hir::Path { + span: sp, + global: global, + segments: segments.into(), } } - impl Folder for MockAssigner { - fn new_id(&mut self, old_id: NodeId) -> NodeId { - assert_eq!(old_id, ast::DUMMY_NODE_ID); - self.next_node_id() + fn std_path(&mut self, components: &[&str]) -> Vec { + let mut v = Vec::new(); + if let Some(s) = self.crate_root { + v.push(token::intern(s)); } - } - - #[test] - fn test_preserves_ids() { - let cx = parse::ParseSess::new(); - let mut assigner = MockAssigner::new(); - - let ast_if_let = quote_expr!(&cx, - if let Some(foo) = baz { - bar(foo); - }); - let ast_if_let = assigner.fold_expr(ast_if_let); - let ast_while_let = quote_expr!(&cx, - while let Some(foo) = baz { - bar(foo); - }); - let ast_while_let = assigner.fold_expr(ast_while_let); - let ast_for = quote_expr!(&cx, - for i in 0..10 { - for j in 0..10 { - foo(i, j); - } - }); - let ast_for = assigner.fold_expr(ast_for); - let ast_in = quote_expr!(&cx, in HEAP { foo() }); - let ast_in = assigner.fold_expr(ast_in); - - let lctx = LoweringContext::new(&assigner, None); - let hir1 = lower_expr(&lctx, &ast_if_let); - let hir2 = lower_expr(&lctx, &ast_if_let); - assert!(hir1 == hir2); - - let hir1 = lower_expr(&lctx, &ast_while_let); - let hir2 = lower_expr(&lctx, &ast_while_let); - assert!(hir1 == hir2); - - let hir1 = lower_expr(&lctx, &ast_for); - let hir2 = lower_expr(&lctx, &ast_for); - assert!(hir1 == hir2); - - let hir1 = lower_expr(&lctx, &ast_in); - let hir2 = lower_expr(&lctx, &ast_in); - assert!(hir1 == hir2); + v.extend(components.iter().map(|s| token::intern(s))); + return v; + } + + // Given suffix ["b","c","d"], returns path `::std::b::c::d` when + // `fld.cx.use_std`, and `::core::b::c::d` otherwise. + fn core_path(&mut self, span: Span, components: &[&str]) -> hir::Path { + let idents = self.std_path(components); + self.path_global(span, idents) + } + + fn signal_block_expr(&mut self, + stmts: hir::HirVec, + expr: P, + span: Span, + rule: hir::BlockCheckMode, + attrs: ThinAttributes) + -> P { + let id = self.next_id(); + let block = P(hir::Block { + rules: rule, + span: span, + id: id, + stmts: stmts, + expr: Some(expr), + }); + self.expr_block(block, attrs) } } diff --git a/src/librustc/hir/map/blocks.rs b/src/librustc/hir/map/blocks.rs index 8c626226bd..bac96c68e4 100644 --- a/src/librustc/hir/map/blocks.rs +++ b/src/librustc/hir/map/blocks.rs @@ -133,7 +133,13 @@ struct ClosureParts<'a> { impl<'a> ClosureParts<'a> { fn new(d: &'a FnDecl, b: &'a Block, id: NodeId, s: Span, attrs: &'a [Attribute]) -> Self { - ClosureParts { decl: d, body: b, id: id, span: s, attrs: attrs } + ClosureParts { + decl: d, + body: b, + id: id, + span: s, + attrs: attrs, + } } } @@ -250,7 +256,7 @@ impl<'a> FnLikeNode<'a> { } } map::NodeExpr(e) => match e.node { - ast::ExprClosure(_, ref decl, ref block) => + ast::ExprClosure(_, ref decl, ref block, _fn_decl_span) => closure(ClosureParts::new(&decl, &block, e.id, diff --git a/src/librustc/hir/map/collector.rs b/src/librustc/hir/map/collector.rs index 94fa393ae3..99e5f32e26 100644 --- a/src/librustc/hir/map/collector.rs +++ b/src/librustc/hir/map/collector.rs @@ -1,4 +1,4 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// Copyright 2015-2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,18 +13,19 @@ use super::MapEntry::*; use hir::*; use hir::intravisit::Visitor; -use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; +use hir::def_id::DefId; use middle::cstore::InlinedItem; use std::iter::repeat; -use syntax::ast::{NodeId, CRATE_NODE_ID, DUMMY_NODE_ID}; +use syntax::ast::{NodeId, CRATE_NODE_ID}; use syntax::codemap::Span; -/// A Visitor that walks over an AST and collects Node's into an AST -/// Map. +/// A Visitor that walks over the HIR and collects Nodes into a HIR map pub struct NodeCollector<'ast> { + /// The crate pub krate: &'ast Crate, + /// The node map pub map: Vec>, - pub definitions: Definitions, + /// The parent of this node pub parent_node: NodeId, } @@ -33,16 +34,10 @@ impl<'ast> NodeCollector<'ast> { let mut collector = NodeCollector { krate: krate, map: vec![], - definitions: Definitions::new(), parent_node: CRATE_NODE_ID, }; collector.insert_entry(CRATE_NODE_ID, RootCrate); - let result = collector.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot); - assert_eq!(result, CRATE_DEF_INDEX); - - collector.create_def_with_parent(Some(CRATE_DEF_INDEX), DUMMY_NODE_ID, DefPathData::Misc); - collector } @@ -51,53 +46,20 @@ impl<'ast> NodeCollector<'ast> { parent_node: NodeId, parent_def_path: DefPath, parent_def_id: DefId, - map: Vec>, - definitions: Definitions) + map: Vec>) -> NodeCollector<'ast> { let mut collector = NodeCollector { krate: krate, map: map, parent_node: parent_node, - definitions: definitions, }; assert_eq!(parent_def_path.krate, parent_def_id.krate); - let root_path = Box::new(InlinedRootPath { - data: parent_def_path.data, - def_id: parent_def_id, - }); - collector.insert_entry(parent_node, RootInlinedParent(parent)); - collector.create_def(parent_node, DefPathData::InlinedRoot(root_path)); collector } - fn parent_def(&self) -> Option { - let mut parent_node = Some(self.parent_node); - while let Some(p) = parent_node { - if let Some(q) = self.definitions.opt_def_index(p) { - return Some(q); - } - parent_node = self.map[p as usize].parent_node(); - } - None - } - - fn create_def(&mut self, node_id: NodeId, data: DefPathData) -> DefIndex { - let parent_def = self.parent_def(); - debug!("create_def(node_id={:?}, data={:?}, parent_def={:?})", node_id, data, parent_def); - self.definitions.create_def_with_parent(parent_def, node_id, data) - } - - fn create_def_with_parent(&mut self, - parent: Option, - node_id: NodeId, - data: DefPathData) - -> DefIndex { - self.definitions.create_def_with_parent(parent, node_id, data) - } - fn insert_entry(&mut self, id: NodeId, entry: MapEntry<'ast>) { debug!("ast_map: {:?} => {:?}", id, entry); let len = self.map.len(); @@ -107,15 +69,17 @@ impl<'ast> NodeCollector<'ast> { self.map[id as usize] = entry; } - fn insert_def(&mut self, id: NodeId, node: Node<'ast>, data: DefPathData) -> DefIndex { - self.insert(id, node); - self.create_def(id, data) - } - fn insert(&mut self, id: NodeId, node: Node<'ast>) { let entry = MapEntry::from_node(self.parent_node, node); self.insert_entry(id, entry); } + + fn with_parent(&mut self, parent_id: NodeId, f: F) { + let parent_node = self.parent_node; + self.parent_node = parent_id; + f(self); + self.parent_node = parent_node; + } } impl<'ast> Visitor<'ast> for NodeCollector<'ast> { @@ -130,187 +94,104 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { fn visit_item(&mut self, i: &'ast Item) { debug!("visit_item: {:?}", i); - // Pick the def data. This need not be unique, but the more - // information we encapsulate into - let def_data = match i.node { - ItemDefaultImpl(..) | ItemImpl(..) => - DefPathData::Impl, - ItemEnum(..) | ItemStruct(..) | ItemTrait(..) | - ItemExternCrate(..) | ItemMod(..) | ItemForeignMod(..) | - ItemTy(..) => - DefPathData::TypeNs(i.name), - ItemStatic(..) | ItemConst(..) | ItemFn(..) => - DefPathData::ValueNs(i.name), - ItemUse(..) => - DefPathData::Misc, - }; - - self.insert_def(i.id, NodeItem(i), def_data); + self.insert(i.id, NodeItem(i)); - let parent_node = self.parent_node; - self.parent_node = i.id; - - match i.node { - ItemImpl(..) => {} - ItemEnum(ref enum_definition, _) => { - for v in &enum_definition.variants { - let variant_def_index = - self.insert_def(v.node.data.id(), - NodeVariant(v), - DefPathData::EnumVariant(v.node.name)); - - for field in v.node.data.fields() { - self.create_def_with_parent( - Some(variant_def_index), - field.id, - DefPathData::Field(field.name)); + self.with_parent(i.id, |this| { + match i.node { + ItemEnum(ref enum_definition, _) => { + for v in &enum_definition.variants { + this.insert(v.node.data.id(), NodeVariant(v)); } } - } - ItemForeignMod(..) => { - } - ItemStruct(ref struct_def, _) => { - // If this is a tuple-like struct, register the constructor. - if !struct_def.is_struct() { - self.insert_def(struct_def.id(), - NodeStructCtor(struct_def), - DefPathData::StructCtor); - } - - for field in struct_def.fields() { - self.create_def(field.id, DefPathData::Field(field.name)); + ItemStruct(ref struct_def, _) => { + // If this is a tuple-like struct, register the constructor. + if !struct_def.is_struct() { + this.insert(struct_def.id(), NodeStructCtor(struct_def)); + } } - } - ItemTrait(_, _, ref bounds, _) => { - for b in bounds.iter() { - if let TraitTyParamBound(ref t, TraitBoundModifier::None) = *b { - self.insert(t.trait_ref.ref_id, NodeItem(i)); + ItemTrait(_, _, ref bounds, _) => { + for b in bounds.iter() { + if let TraitTyParamBound(ref t, TraitBoundModifier::None) = *b { + this.insert(t.trait_ref.ref_id, NodeItem(i)); + } } } - } - ItemUse(ref view_path) => { - match view_path.node { - ViewPathList(_, ref paths) => { - for path in paths { - self.insert(path.node.id(), NodeItem(i)); + ItemUse(ref view_path) => { + match view_path.node { + ViewPathList(_, ref paths) => { + for path in paths { + this.insert(path.node.id(), NodeItem(i)); + } } + _ => () } - _ => () } + _ => {} } - _ => {} - } - intravisit::walk_item(self, i); - self.parent_node = parent_node; + intravisit::walk_item(this, i); + }); } fn visit_foreign_item(&mut self, foreign_item: &'ast ForeignItem) { - self.insert_def(foreign_item.id, - NodeForeignItem(foreign_item), - DefPathData::ValueNs(foreign_item.name)); + self.insert(foreign_item.id, NodeForeignItem(foreign_item)); - let parent_node = self.parent_node; - self.parent_node = foreign_item.id; - intravisit::walk_foreign_item(self, foreign_item); - self.parent_node = parent_node; + self.with_parent(foreign_item.id, |this| { + intravisit::walk_foreign_item(this, foreign_item); + }); } fn visit_generics(&mut self, generics: &'ast Generics) { for ty_param in generics.ty_params.iter() { - self.insert_def(ty_param.id, - NodeTyParam(ty_param), - DefPathData::TypeParam(ty_param.name)); + self.insert(ty_param.id, NodeTyParam(ty_param)); } intravisit::walk_generics(self, generics); } fn visit_trait_item(&mut self, ti: &'ast TraitItem) { - let def_data = match ti.node { - MethodTraitItem(..) | ConstTraitItem(..) => DefPathData::ValueNs(ti.name), - TypeTraitItem(..) => DefPathData::TypeNs(ti.name), - }; - self.insert(ti.id, NodeTraitItem(ti)); - self.create_def(ti.id, def_data); - - let parent_node = self.parent_node; - self.parent_node = ti.id; - match ti.node { - ConstTraitItem(_, Some(ref expr)) => { - self.create_def(expr.id, DefPathData::Initializer); - } - _ => { } - } - - intravisit::walk_trait_item(self, ti); - - self.parent_node = parent_node; + self.with_parent(ti.id, |this| { + intravisit::walk_trait_item(this, ti); + }); } fn visit_impl_item(&mut self, ii: &'ast ImplItem) { - let def_data = match ii.node { - ImplItemKind::Method(..) | ImplItemKind::Const(..) => DefPathData::ValueNs(ii.name), - ImplItemKind::Type(..) => DefPathData::TypeNs(ii.name), - }; - - self.insert_def(ii.id, NodeImplItem(ii), def_data); - - let parent_node = self.parent_node; - self.parent_node = ii.id; + self.insert(ii.id, NodeImplItem(ii)); - match ii.node { - ImplItemKind::Const(_, ref expr) => { - self.create_def(expr.id, DefPathData::Initializer); - } - _ => { } - } - - intravisit::walk_impl_item(self, ii); - - self.parent_node = parent_node; + self.with_parent(ii.id, |this| { + intravisit::walk_impl_item(this, ii); + }); } fn visit_pat(&mut self, pat: &'ast Pat) { - let maybe_binding = match pat.node { - PatKind::Ident(_, id, _) => Some(id.node), - _ => None - }; - - if let Some(id) = maybe_binding { - self.insert_def(pat.id, NodeLocal(pat), DefPathData::Binding(id.name)); + let node = if let PatKind::Ident(..) = pat.node { + NodeLocal(pat) } else { - self.insert(pat.id, NodePat(pat)); - } + NodePat(pat) + }; + self.insert(pat.id, node); - let parent_node = self.parent_node; - self.parent_node = pat.id; - intravisit::walk_pat(self, pat); - self.parent_node = parent_node; + self.with_parent(pat.id, |this| { + intravisit::walk_pat(this, pat); + }); } fn visit_expr(&mut self, expr: &'ast Expr) { self.insert(expr.id, NodeExpr(expr)); - match expr.node { - ExprClosure(..) => { self.create_def(expr.id, DefPathData::ClosureExpr); } - _ => { } - } - - let parent_node = self.parent_node; - self.parent_node = expr.id; - intravisit::walk_expr(self, expr); - self.parent_node = parent_node; + self.with_parent(expr.id, |this| { + intravisit::walk_expr(this, expr); + }); } fn visit_stmt(&mut self, stmt: &'ast Stmt) { let id = stmt.node.id(); self.insert(id, NodeStmt(stmt)); - let parent_node = self.parent_node; - self.parent_node = id; - intravisit::walk_stmt(self, stmt); - self.parent_node = parent_node; + + self.with_parent(id, |this| { + intravisit::walk_stmt(this, stmt); + }); } fn visit_fn(&mut self, fk: intravisit::FnKind<'ast>, fd: &'ast FnDecl, @@ -321,22 +202,12 @@ impl<'ast> Visitor<'ast> for NodeCollector<'ast> { fn visit_block(&mut self, block: &'ast Block) { self.insert(block.id, NodeBlock(block)); - let parent_node = self.parent_node; - self.parent_node = block.id; - intravisit::walk_block(self, block); - self.parent_node = parent_node; + self.with_parent(block.id, |this| { + intravisit::walk_block(this, block); + }); } fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) { self.insert(lifetime.id, NodeLifetime(lifetime)); } - - fn visit_lifetime_def(&mut self, def: &'ast LifetimeDef) { - self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name)); - self.visit_lifetime(&def.lifetime); - } - - fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) { - self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.name)); - } } diff --git a/src/librustc/hir/map/def_collector.rs b/src/librustc/hir/map/def_collector.rs new file mode 100644 index 0000000000..e783d84dc1 --- /dev/null +++ b/src/librustc/hir/map/def_collector.rs @@ -0,0 +1,438 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use super::*; + +use hir; +use hir::intravisit; +use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; + +use middle::cstore::InlinedItem; + +use syntax::ast::*; +use syntax::visit; +use syntax::parse::token; + +/// Creates def ids for nodes in the HIR. +pub struct DefCollector<'ast> { + // If we are walking HIR (c.f., AST), we need to keep a reference to the + // crate. + hir_crate: Option<&'ast hir::Crate>, + pub definitions: Definitions, + parent_def: Option, +} + +impl<'ast> DefCollector<'ast> { + pub fn root() -> DefCollector<'ast> { + let mut collector = DefCollector { + hir_crate: None, + definitions: Definitions::new(), + parent_def: None, + }; + let root = collector.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot); + assert_eq!(root, CRATE_DEF_INDEX); + collector.parent_def = Some(root); + + collector.create_def_with_parent(Some(CRATE_DEF_INDEX), DUMMY_NODE_ID, DefPathData::Misc); + + collector + } + + pub fn extend(parent_node: NodeId, + parent_def_path: DefPath, + parent_def_id: DefId, + definitions: Definitions) + -> DefCollector<'ast> { + let mut collector = DefCollector { + hir_crate: None, + parent_def: None, + definitions: definitions, + }; + + assert_eq!(parent_def_path.krate, parent_def_id.krate); + let root_path = Box::new(InlinedRootPath { + data: parent_def_path.data, + def_id: parent_def_id, + }); + + let def = collector.create_def(parent_node, DefPathData::InlinedRoot(root_path)); + collector.parent_def = Some(def); + + collector + } + + pub fn walk_item(&mut self, ii: &'ast InlinedItem, krate: &'ast hir::Crate) { + self.hir_crate = Some(krate); + ii.visit(self); + } + + fn parent_def(&self) -> Option { + self.parent_def + } + + fn create_def(&mut self, node_id: NodeId, data: DefPathData) -> DefIndex { + let parent_def = self.parent_def(); + debug!("create_def(node_id={:?}, data={:?}, parent_def={:?})", node_id, data, parent_def); + self.definitions.create_def_with_parent(parent_def, node_id, data) + } + + fn create_def_with_parent(&mut self, + parent: Option, + node_id: NodeId, + data: DefPathData) + -> DefIndex { + self.definitions.create_def_with_parent(parent, node_id, data) + } + + fn with_parent(&mut self, parent_def: DefIndex, f: F) { + let parent = self.parent_def; + self.parent_def = Some(parent_def); + f(self); + self.parent_def = parent; + } + + fn visit_ast_const_integer(&mut self, expr: &'ast Expr) { + // Find the node which will be used after lowering. + if let ExprKind::Paren(ref inner) = expr.node { + return self.visit_ast_const_integer(inner); + } + + // FIXME(eddyb) Closures should have separate + // function definition IDs and expression IDs. + if let ExprKind::Closure(..) = expr.node { + return; + } + + self.create_def(expr.id, DefPathData::Initializer); + } + + fn visit_hir_const_integer(&mut self, expr: &'ast hir::Expr) { + // FIXME(eddyb) Closures should have separate + // function definition IDs and expression IDs. + if let hir::ExprClosure(..) = expr.node { + return; + } + + self.create_def(expr.id, DefPathData::Initializer); + } +} + +impl<'ast> visit::Visitor<'ast> for DefCollector<'ast> { + fn visit_item(&mut self, i: &'ast Item) { + debug!("visit_item: {:?}", i); + + // Pick the def data. This need not be unique, but the more + // information we encapsulate into + let def_data = match i.node { + ItemKind::DefaultImpl(..) | ItemKind::Impl(..) => + DefPathData::Impl, + ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Trait(..) | + ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) => + DefPathData::TypeNs(i.ident.name), + ItemKind::Mod(..) => DefPathData::Module(i.ident.name), + ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) => + DefPathData::ValueNs(i.ident.name), + ItemKind::Mac(..) => DefPathData::MacroDef(i.ident.name), + ItemKind::Use(..) => DefPathData::Misc, + }; + let def = self.create_def(i.id, def_data); + + self.with_parent(def, |this| { + match i.node { + ItemKind::Enum(ref enum_definition, _) => { + for v in &enum_definition.variants { + let variant_def_index = + this.create_def(v.node.data.id(), + DefPathData::EnumVariant(v.node.name.name)); + this.with_parent(variant_def_index, |this| { + for (index, field) in v.node.data.fields().iter().enumerate() { + let name = field.ident.map(|ident| ident.name) + .unwrap_or_else(|| token::intern(&index.to_string())); + this.create_def(field.id, DefPathData::Field(name)); + } + + if let Some(ref expr) = v.node.disr_expr { + this.visit_ast_const_integer(expr); + } + }); + } + } + ItemKind::Struct(ref struct_def, _) => { + // If this is a tuple-like struct, register the constructor. + if !struct_def.is_struct() { + this.create_def(struct_def.id(), + DefPathData::StructCtor); + } + + for (index, field) in struct_def.fields().iter().enumerate() { + let name = field.ident.map(|ident| ident.name) + .unwrap_or(token::intern(&index.to_string())); + this.create_def(field.id, DefPathData::Field(name)); + } + } + _ => {} + } + visit::walk_item(this, i); + }); + } + + fn visit_foreign_item(&mut self, foreign_item: &'ast ForeignItem) { + let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.ident.name)); + + self.with_parent(def, |this| { + visit::walk_foreign_item(this, foreign_item); + }); + } + + fn visit_generics(&mut self, generics: &'ast Generics) { + for ty_param in generics.ty_params.iter() { + self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.ident.name)); + } + + visit::walk_generics(self, generics); + } + + fn visit_trait_item(&mut self, ti: &'ast TraitItem) { + let def_data = match ti.node { + TraitItemKind::Method(..) | TraitItemKind::Const(..) => + DefPathData::ValueNs(ti.ident.name), + TraitItemKind::Type(..) => DefPathData::TypeNs(ti.ident.name), + }; + + let def = self.create_def(ti.id, def_data); + self.with_parent(def, |this| { + if let TraitItemKind::Const(_, Some(ref expr)) = ti.node { + this.create_def(expr.id, DefPathData::Initializer); + } + + visit::walk_trait_item(this, ti); + }); + } + + fn visit_impl_item(&mut self, ii: &'ast ImplItem) { + let def_data = match ii.node { + ImplItemKind::Method(..) | ImplItemKind::Const(..) => + DefPathData::ValueNs(ii.ident.name), + ImplItemKind::Type(..) => DefPathData::TypeNs(ii.ident.name), + ImplItemKind::Macro(..) => DefPathData::MacroDef(ii.ident.name), + }; + + let def = self.create_def(ii.id, def_data); + self.with_parent(def, |this| { + if let ImplItemKind::Const(_, ref expr) = ii.node { + this.create_def(expr.id, DefPathData::Initializer); + } + + visit::walk_impl_item(this, ii); + }); + } + + fn visit_pat(&mut self, pat: &'ast Pat) { + let parent_def = self.parent_def; + + if let PatKind::Ident(_, id, _) = pat.node { + let def = self.create_def(pat.id, DefPathData::Binding(id.node.name)); + self.parent_def = Some(def); + } + + visit::walk_pat(self, pat); + self.parent_def = parent_def; + } + + fn visit_expr(&mut self, expr: &'ast Expr) { + let parent_def = self.parent_def; + + if let ExprKind::Repeat(_, ref count) = expr.node { + self.visit_ast_const_integer(count); + } + + if let ExprKind::Closure(..) = expr.node { + let def = self.create_def(expr.id, DefPathData::ClosureExpr); + self.parent_def = Some(def); + } + + visit::walk_expr(self, expr); + self.parent_def = parent_def; + } + + fn visit_ty(&mut self, ty: &'ast Ty) { + if let TyKind::FixedLengthVec(_, ref length) = ty.node { + self.visit_ast_const_integer(length); + } + visit::walk_ty(self, ty); + } + + fn visit_lifetime_def(&mut self, def: &'ast LifetimeDef) { + self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name)); + } + + fn visit_macro_def(&mut self, macro_def: &'ast MacroDef) { + self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.ident.name)); + } +} + +// We walk the HIR rather than the AST when reading items from metadata. +impl<'ast> intravisit::Visitor<'ast> for DefCollector<'ast> { + /// Because we want to track parent items and so forth, enable + /// deep walking so that we walk nested items in the context of + /// their outer items. + fn visit_nested_item(&mut self, item_id: hir::ItemId) { + debug!("visit_nested_item: {:?}", item_id); + let item = self.hir_crate.unwrap().item(item_id.id); + self.visit_item(item) + } + + fn visit_item(&mut self, i: &'ast hir::Item) { + debug!("visit_item: {:?}", i); + + // Pick the def data. This need not be unique, but the more + // information we encapsulate into + let def_data = match i.node { + hir::ItemDefaultImpl(..) | hir::ItemImpl(..) => + DefPathData::Impl, + hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemTrait(..) | + hir::ItemExternCrate(..) | hir::ItemMod(..) | hir::ItemForeignMod(..) | + hir::ItemTy(..) => + DefPathData::TypeNs(i.name), + hir::ItemStatic(..) | hir::ItemConst(..) | hir::ItemFn(..) => + DefPathData::ValueNs(i.name), + hir::ItemUse(..) => DefPathData::Misc, + }; + let def = self.create_def(i.id, def_data); + + self.with_parent(def, |this| { + match i.node { + hir::ItemEnum(ref enum_definition, _) => { + for v in &enum_definition.variants { + let variant_def_index = + this.create_def(v.node.data.id(), + DefPathData::EnumVariant(v.node.name)); + + this.with_parent(variant_def_index, |this| { + for field in v.node.data.fields() { + this.create_def(field.id, + DefPathData::Field(field.name)); + } + if let Some(ref expr) = v.node.disr_expr { + this.visit_hir_const_integer(expr); + } + }); + } + } + hir::ItemStruct(ref struct_def, _) => { + // If this is a tuple-like struct, register the constructor. + if !struct_def.is_struct() { + this.create_def(struct_def.id(), + DefPathData::StructCtor); + } + + for field in struct_def.fields() { + this.create_def(field.id, DefPathData::Field(field.name)); + } + } + _ => {} + } + intravisit::walk_item(this, i); + }); + } + + fn visit_foreign_item(&mut self, foreign_item: &'ast hir::ForeignItem) { + let def = self.create_def(foreign_item.id, DefPathData::ValueNs(foreign_item.name)); + + self.with_parent(def, |this| { + intravisit::walk_foreign_item(this, foreign_item); + }); + } + + fn visit_generics(&mut self, generics: &'ast hir::Generics) { + for ty_param in generics.ty_params.iter() { + self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.name)); + } + + intravisit::walk_generics(self, generics); + } + + fn visit_trait_item(&mut self, ti: &'ast hir::TraitItem) { + let def_data = match ti.node { + hir::MethodTraitItem(..) | hir::ConstTraitItem(..) => + DefPathData::ValueNs(ti.name), + hir::TypeTraitItem(..) => DefPathData::TypeNs(ti.name), + }; + + let def = self.create_def(ti.id, def_data); + self.with_parent(def, |this| { + if let hir::ConstTraitItem(_, Some(ref expr)) = ti.node { + this.create_def(expr.id, DefPathData::Initializer); + } + + intravisit::walk_trait_item(this, ti); + }); + } + + fn visit_impl_item(&mut self, ii: &'ast hir::ImplItem) { + let def_data = match ii.node { + hir::ImplItemKind::Method(..) | hir::ImplItemKind::Const(..) => + DefPathData::ValueNs(ii.name), + hir::ImplItemKind::Type(..) => DefPathData::TypeNs(ii.name), + }; + + let def = self.create_def(ii.id, def_data); + self.with_parent(def, |this| { + if let hir::ImplItemKind::Const(_, ref expr) = ii.node { + this.create_def(expr.id, DefPathData::Initializer); + } + + intravisit::walk_impl_item(this, ii); + }); + } + + fn visit_pat(&mut self, pat: &'ast hir::Pat) { + let parent_def = self.parent_def; + + if let hir::PatKind::Ident(_, name, _) = pat.node { + let def = self.create_def(pat.id, DefPathData::Binding(name.node)); + self.parent_def = Some(def); + } + + intravisit::walk_pat(self, pat); + self.parent_def = parent_def; + } + + fn visit_expr(&mut self, expr: &'ast hir::Expr) { + let parent_def = self.parent_def; + + if let hir::ExprRepeat(_, ref count) = expr.node { + self.visit_hir_const_integer(count); + } + + if let hir::ExprClosure(..) = expr.node { + let def = self.create_def(expr.id, DefPathData::ClosureExpr); + self.parent_def = Some(def); + } + + intravisit::walk_expr(self, expr); + self.parent_def = parent_def; + } + + fn visit_ty(&mut self, ty: &'ast hir::Ty) { + if let hir::TyFixedLengthVec(_, ref length) = ty.node { + self.visit_hir_const_integer(length); + } + intravisit::walk_ty(self, ty); + } + + fn visit_lifetime_def(&mut self, def: &'ast hir::LifetimeDef) { + self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name)); + } + + fn visit_macro_def(&mut self, macro_def: &'ast hir::MacroDef) { + self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.name)); + } +} diff --git a/src/librustc/hir/map/definitions.rs b/src/librustc/hir/map/definitions.rs index 2e26fe5057..d66df3e4e8 100644 --- a/src/librustc/hir/map/definitions.rs +++ b/src/librustc/hir/map/definitions.rs @@ -15,6 +15,7 @@ use syntax::ast; use syntax::parse::token::InternedString; use util::nodemap::NodeMap; +/// The definition table containing node definitions #[derive(Clone)] pub struct Definitions { data: Vec, @@ -82,8 +83,10 @@ impl DefPath { let mut data = vec![]; let mut index = Some(start_index); loop { + debug!("DefPath::make: krate={:?} index={:?}", krate, index); let p = index.unwrap(); let key = get_key(p); + debug!("DefPath::make: key={:?}", key); match key.disambiguated_data.data { DefPathData::CrateRoot => { assert!(key.parent.is_none()); @@ -137,30 +140,47 @@ pub struct InlinedRootPath { pub enum DefPathData { // Root: these should only be used for the root nodes, because // they are treated specially by the `def_path` function. + /// The crate root (marker) CrateRoot, + /// An inlined root InlinedRoot(Box), // Catch-all for random DefId things like DUMMY_NODE_ID Misc, // Different kinds of items and item-like things: + /// An impl Impl, - TypeNs(ast::Name), // something in the type NS - ValueNs(ast::Name), // something in the value NS + /// Something in the type NS + TypeNs(ast::Name), + /// Something in the value NS + ValueNs(ast::Name), + /// A module declaration + Module(ast::Name), + /// A macro rule MacroDef(ast::Name), + /// A closure expression ClosureExpr, // Subportions of items + /// A type parameter (generic parameter) TypeParam(ast::Name), + /// A lifetime definition LifetimeDef(ast::Name), + /// A variant of a enum EnumVariant(ast::Name), + /// A struct field Field(ast::Name), - StructCtor, // implicit ctor for a tuple-like struct - Initializer, // initializer for a const - Binding(ast::Name), // pattern binding + /// Implicit ctor for a tuple-like struct + StructCtor, + /// Initializer for a const + Initializer, + /// Pattern binding + Binding(ast::Name), } impl Definitions { + /// Create new empty definition map. pub fn new() -> Definitions { Definitions { data: vec![], @@ -169,6 +189,7 @@ impl Definitions { } } + /// Get the number of definitions. pub fn len(&self) -> usize { self.data.len() } @@ -177,6 +198,10 @@ impl Definitions { self.data[index.as_usize()].key.clone() } + pub fn def_index_for_def_key(&self, key: DefKey) -> Option { + self.key_map.get(&key).cloned() + } + /// Returns the path from the crate root to `index`. The root /// nodes are not included in the path (i.e., this will be an /// empty vector for the crate root). For an inlined item, this @@ -194,6 +219,10 @@ impl Definitions { self.opt_def_index(node).map(DefId::local) } + pub fn local_def_id(&self, node: ast::NodeId) -> DefId { + self.opt_local_def_id(node).unwrap() + } + pub fn as_local_node_id(&self, def_id: DefId) -> Option { if def_id.krate == LOCAL_CRATE { assert!(def_id.index.as_usize() < self.data.len()); @@ -203,37 +232,7 @@ impl Definitions { } } - pub fn retrace_path(&self, path: &DefPath) -> Option { - debug!("retrace_path(path={:?})", path); - - // we assume that we only want to retrace paths relative to - // the crate root - assert!(path.is_local()); - - let root_key = DefKey { - parent: None, - disambiguated_data: DisambiguatedDefPathData { - data: DefPathData::CrateRoot, - disambiguator: 0, - }, - }; - let root_id = self.key_map[&root_key]; - - debug!("retrace_path: root_id={:?}", root_id); - - let mut id = root_id; - for data in &path.data { - let key = DefKey { parent: Some(id), disambiguated_data: data.clone() }; - debug!("key = {:?}", key); - id = match self.key_map.get(&key) { - Some(&id) => id, - None => return None - }; - } - - Some(id) - } - + /// Add a definition with a parent definition. pub fn create_def_with_parent(&mut self, parent: Option, node_id: ast::NodeId, @@ -288,6 +287,7 @@ impl DefPathData { match *self { TypeNs(name) | ValueNs(name) | + Module(name) | MacroDef(name) | TypeParam(name) | LifetimeDef(name) | diff --git a/src/librustc/hir/map/mod.rs b/src/librustc/hir/map/mod.rs index 7b8ddee0e2..2f310806a7 100644 --- a/src/librustc/hir/map/mod.rs +++ b/src/librustc/hir/map/mod.rs @@ -11,6 +11,7 @@ pub use self::Node::*; use self::MapEntry::*; use self::collector::NodeCollector; +use self::def_collector::DefCollector; pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData, DisambiguatedDefPathData, InlinedRootPath}; @@ -18,12 +19,13 @@ use dep_graph::{DepGraph, DepNode}; use middle::cstore::InlinedItem; use middle::cstore::InlinedItem as II; -use hir::def_id::{CRATE_DEF_INDEX, DefId}; +use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex}; use syntax::abi::Abi; -use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID}; +use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, }; use syntax::attr::ThinAttributesExt; use syntax::codemap::{Span, Spanned}; +use syntax::visit; use hir::*; use hir::fold::Folder; @@ -36,6 +38,7 @@ use std::mem; pub mod blocks; mod collector; +mod def_collector; pub mod definitions; #[derive(Copy, Clone, Debug)] @@ -157,10 +160,10 @@ pub struct Forest { } impl Forest { - pub fn new(krate: Crate, dep_graph: DepGraph) -> Forest { + pub fn new(krate: Crate, dep_graph: &DepGraph) -> Forest { Forest { krate: krate, - dep_graph: dep_graph, + dep_graph: dep_graph.clone(), inlined_items: TypedArena::new() } } @@ -282,9 +285,8 @@ impl<'ast> Map<'ast> { self.definitions.borrow().def_path(def_id.index) } - pub fn retrace_path(&self, path: &DefPath) -> Option { - self.definitions.borrow().retrace_path(path) - .map(DefId::local) + pub fn def_index_for_def_key(&self, def_key: DefKey) -> Option { + self.definitions.borrow().def_index_for_def_key(def_key) } pub fn local_def_id(&self, node: NodeId) -> DefId { @@ -559,9 +561,7 @@ impl<'ast> Map<'ast> { NodeVariant(v) => v.node.name, NodeLifetime(lt) => lt.name, NodeTyParam(tp) => tp.name, - NodeLocal(&Pat { node: PatKind::Ident(_,l,_), .. }) => { - l.node.name - }, + NodeLocal(&Pat { node: PatKind::Ident(_,l,_), .. }) => l.node, NodeStructCtor(_) => self.name(self.get_parent(id)), _ => bug!("no name for {}", self.node_to_string(id)) } @@ -780,12 +780,18 @@ impl Folder for IdAndSpanUpdater { } } -pub fn map_crate<'ast>(forest: &'ast mut Forest) -> Map<'ast> { - let (map, definitions) = { - let mut collector = NodeCollector::root(&forest.krate); - intravisit::walk_crate(&mut collector, &forest.krate); - (collector.map, collector.definitions) - }; +pub fn collect_definitions<'ast>(krate: &'ast ast::Crate) -> Definitions { + let mut def_collector = DefCollector::root(); + visit::walk_crate(&mut def_collector, krate); + def_collector.definitions +} + +pub fn map_crate<'ast>(forest: &'ast mut Forest, + definitions: Definitions) + -> Map<'ast> { + let mut collector = NodeCollector::root(&forest.krate); + intravisit::walk_crate(&mut collector, &forest.krate); + let map = collector.map; if log_enabled!(::log::DEBUG) { // This only makes sense for ordered stores; note the @@ -834,21 +840,24 @@ pub fn map_decoded_item<'ast, F: FoldOps>(map: &Map<'ast>, }; let ii = map.forest.inlined_items.alloc(ii); - let ii_parent_id = fld.new_id(DUMMY_NODE_ID); - let mut collector = - NodeCollector::extend( - map.krate(), - ii, - ii_parent_id, - parent_def_path, - parent_def_id, - mem::replace(&mut *map.map.borrow_mut(), vec![]), - mem::replace(&mut *map.definitions.borrow_mut(), Definitions::new())); - ii.visit(&mut collector); + let defs = mem::replace(&mut *map.definitions.borrow_mut(), Definitions::new()); + let mut def_collector = DefCollector::extend(ii_parent_id, + parent_def_path.clone(), + parent_def_id, + defs); + def_collector.walk_item(ii, map.krate()); + *map.definitions.borrow_mut() = def_collector.definitions; + + let mut collector = NodeCollector::extend(map.krate(), + ii, + ii_parent_id, + parent_def_path, + parent_def_id, + mem::replace(&mut *map.map.borrow_mut(), vec![])); + ii.visit(&mut collector); *map.map.borrow_mut() = collector.map; - *map.definitions.borrow_mut() = collector.definitions; ii } diff --git a/src/librustc/hir/mod.rs b/src/librustc/hir/mod.rs index 8e748875b9..39a6ec9f3a 100644 --- a/src/librustc/hir/mod.rs +++ b/src/librustc/hir/mod.rs @@ -15,7 +15,6 @@ pub use self::BinOp_::*; pub use self::BlockCheckMode::*; pub use self::CaptureClause::*; pub use self::Decl_::*; -pub use self::ExplicitSelf_::*; pub use self::Expr_::*; pub use self::FunctionRetTy::*; pub use self::ForeignItem_::*; @@ -30,25 +29,23 @@ pub use self::TyParamBound::*; pub use self::UnOp::*; pub use self::UnsafeSource::*; pub use self::ViewPath_::*; -pub use self::Visibility::*; +pub use self::Visibility::{Public, Inherited}; pub use self::PathParameters::*; use hir::def::Def; use hir::def_id::DefId; use util::nodemap::{NodeMap, FnvHashSet}; -use syntax::codemap::{self, Span, Spanned, DUMMY_SP, ExpnId}; +use syntax::codemap::{self, mk_sp, respan, Span, Spanned, ExpnId}; use syntax::abi::Abi; use syntax::ast::{Name, NodeId, DUMMY_NODE_ID, TokenTree, AsmDialect}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; use syntax::attr::{ThinAttributes, ThinAttributesExt}; -use syntax::parse::token::InternedString; +use syntax::parse::token::{keywords, InternedString}; use syntax::ptr::P; use std::collections::BTreeMap; use std::fmt; -use std::hash::{Hash, Hasher}; -use serialize::{Encodable, Decodable, Encoder, Decoder}; /// HIR doesn't commit to a concrete storage type and have its own alias for a vector. /// It can be `Vec`, `P<[T]>` or potentially `Box<[T]>`, or some other container with similar @@ -77,63 +74,6 @@ pub mod pat_util; pub mod print; pub mod svh; -/// Identifier in HIR -#[derive(Clone, Copy, Eq)] -pub struct Ident { - /// Hygienic name (renamed), should be used by default - pub name: Name, - /// Unhygienic name (original, not renamed), needed in few places in name resolution - pub unhygienic_name: Name, -} - -impl Ident { - /// Creates a HIR identifier with both `name` and `unhygienic_name` initialized with - /// the argument. Hygiene properties of the created identifier depend entirely on this - /// argument. If the argument is a plain interned string `intern("iter")`, then the result - /// is unhygienic and can interfere with other entities named "iter". If the argument is - /// a "fresh" name created with `gensym("iter")`, then the result is hygienic and can't - /// interfere with other entities having the same string as a name. - pub fn from_name(name: Name) -> Ident { - Ident { name: name, unhygienic_name: name } - } -} - -impl PartialEq for Ident { - fn eq(&self, other: &Ident) -> bool { - self.name == other.name - } -} - -impl Hash for Ident { - fn hash(&self, state: &mut H) { - self.name.hash(state) - } -} - -impl fmt::Debug for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Debug::fmt(&self.name, f) - } -} - -impl fmt::Display for Ident { - fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - fmt::Display::fmt(&self.name, f) - } -} - -impl Encodable for Ident { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { - self.name.encode(s) - } -} - -impl Decodable for Ident { - fn decode(d: &mut D) -> Result { - Ok(Ident::from_name(Name::decode(d)?)) - } -} - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] pub struct Lifetime { pub id: NodeId, @@ -185,12 +125,12 @@ impl fmt::Display for Path { impl Path { /// Convert a span and an identifier to the corresponding /// 1-segment path. - pub fn from_ident(s: Span, ident: Ident) -> Path { + pub fn from_name(s: Span, name: Name) -> Path { Path { span: s, global: false, segments: hir_vec![PathSegment { - identifier: ident, + name: name, parameters: PathParameters::none() }], } @@ -202,15 +142,7 @@ impl Path { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct PathSegment { /// The identifier portion of this path segment. - /// - /// Hygiene properties of this identifier are worth noting. - /// Most path segments are not hygienic and they are not renamed during - /// lowering from AST to HIR (see comments to `fn lower_path`). However segments from - /// unqualified paths with one segment originating from `ExprPath` (local-variable-like paths) - /// can be hygienic, so they are renamed. You should not normally care about this peculiarity - /// and just use `identifier.name` unless you modify identifier resolution code - /// (`fn resolve_identifier` and other functions called by it in `rustc_resolve`). - pub identifier: Ident, + pub name: Name, /// Type/lifetime parameters attached to this path. They come in /// two flavors: `Path` and `Path(A,B) -> C`. Note that @@ -601,7 +533,7 @@ pub enum PatKind { /// which it is. The resolver determines this, and /// records this pattern's `NodeId` in an auxiliary /// set (of "PatIdents that refer to unit patterns or constants"). - Ident(BindingMode, Spanned, Option>), + Ident(BindingMode, Spanned, Option>), /// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`. /// The `bool` is `true` in the presence of a `..`. @@ -941,16 +873,18 @@ pub enum Expr_ { /// A while loop, with an optional label /// /// `'label: while expr { block }` - ExprWhile(P, P, Option), + ExprWhile(P, P, Option), /// Conditionless loop (can be exited with break, continue, or return) /// /// `'label: loop { block }` - ExprLoop(P, Option), + ExprLoop(P, Option), /// A `match` block, with a source that indicates whether or not it is /// the result of a desugaring, and if so, which kind. ExprMatch(P, HirVec, MatchSource), - /// A closure (for example, `move |a, b, c| {a + b + c}`) - ExprClosure(CaptureClause, P, P), + /// A closure (for example, `move |a, b, c| {a + b + c}`). + /// + /// The final span is the span of the argument block `|...|` + ExprClosure(CaptureClause, P, P, Span), /// A block (`{ ... }`) ExprBlock(P), @@ -979,9 +913,9 @@ pub enum Expr_ { /// A referencing operation (`&a` or `&mut a`) ExprAddrOf(Mutability, P), /// A `break`, with an optional label to break - ExprBreak(Option>), + ExprBreak(Option>), /// A `continue`, with an optional label - ExprAgain(Option>), + ExprAgain(Option>), /// A `return`, with an optional value to be returned ExprRet(Option>), @@ -1053,7 +987,6 @@ pub struct MethodSig { pub abi: Abi, pub decl: P, pub generics: Generics, - pub explicit_self: ExplicitSelf, } /// Represents an item declaration within a trait declaration, @@ -1194,25 +1127,41 @@ pub struct Arg { pub id: NodeId, } +/// Alternative representation for `Arg`s describing `self` parameter of methods. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub enum SelfKind { + /// `self`, `mut self` + Value(Mutability), + /// `&'lt self`, `&'lt mut self` + Region(Option, Mutability), + /// `self: TYPE`, `mut self: TYPE` + Explicit(P, Mutability), +} + +pub type ExplicitSelf = Spanned; + impl Arg { - pub fn new_self(span: Span, mutability: Mutability, self_ident: Ident) -> Arg { - let path = Spanned { - span: span, - node: self_ident, - }; - Arg { - // HACK(eddyb) fake type for the self argument. - ty: P(Ty { - id: DUMMY_NODE_ID, - node: TyInfer, - span: DUMMY_SP, - }), - pat: P(Pat { - id: DUMMY_NODE_ID, - node: PatKind::Ident(BindByValue(mutability), path, None), - span: span, - }), - id: DUMMY_NODE_ID, + pub fn to_self(&self) -> Option { + if let PatKind::Ident(BindByValue(mutbl), name, _) = self.pat.node { + if name.node.unhygienize() == keywords::SelfValue.name() { + return match self.ty.node { + TyInfer => Some(respan(self.pat.span, SelfKind::Value(mutbl))), + TyRptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyInfer => { + Some(respan(self.pat.span, SelfKind::Region(lt, mutbl))) + } + _ => Some(respan(mk_sp(self.pat.span.lo, self.ty.span.hi), + SelfKind::Explicit(self.ty.clone(), mutbl))) + } + } + } + None + } + + pub fn is_self(&self) -> bool { + if let PatKind::Ident(_, name, _) = self.pat.node { + name.node.unhygienize() == keywords::SelfValue.name() + } else { + false } } } @@ -1225,6 +1174,12 @@ pub struct FnDecl { pub variadic: bool, } +impl FnDecl { + pub fn has_self(&self) -> bool { + self.inputs.get(0).map(Arg::is_self).unwrap_or(false) + } +} + #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Unsafety { Unsafe, @@ -1306,21 +1261,6 @@ impl FunctionRetTy { } } -/// Represents the kind of 'self' associated with a method -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub enum ExplicitSelf_ { - /// No self - SelfStatic, - /// `self` - SelfValue(Name), - /// `&'lt self`, `&'lt mut self` - SelfRegion(Option, Mutability, Name), - /// `self: TYPE` - SelfExplicit(P, Name), -} - -pub type ExplicitSelf = Spanned; - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Mod { /// A span from the first token past `{` to the last token until `}`. @@ -1434,6 +1374,8 @@ pub struct PolyTraitRef { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Visibility { Public, + Crate, + Restricted { path: P, id: NodeId }, Inherited, } @@ -1635,8 +1577,14 @@ pub type FreevarMap = NodeMap>; pub type CaptureModeMap = NodeMap; +#[derive(Clone)] +pub struct TraitCandidate { + pub def_id: DefId, + pub import_id: Option, +} + // Trait method resolution -pub type TraitMap = NodeMap>; +pub type TraitMap = NodeMap>; // Map from the NodeId of a glob import to a list of items which are actually // imported. diff --git a/src/librustc/hir/pat_util.rs b/src/librustc/hir/pat_util.rs index 6cc5a29062..15f2310607 100644 --- a/src/librustc/hir/pat_util.rs +++ b/src/librustc/hir/pat_util.rs @@ -113,19 +113,6 @@ pub fn pat_is_binding_or_wild(dm: &DefMap, pat: &hir::Pat) -> bool { /// `match foo() { Some(a) => (), None => () }` pub fn pat_bindings(dm: &RefCell, pat: &hir::Pat, mut it: I) where I: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned), -{ - pat.walk(|p| { - match p.node { - PatKind::Ident(binding_mode, ref pth, _) if pat_is_binding(&dm.borrow(), p) => { - it(binding_mode, p.id, p.span, &respan(pth.span, pth.node.name)); - } - _ => {} - } - true - }); -} -pub fn pat_bindings_ident(dm: &RefCell, pat: &hir::Pat, mut it: I) where - I: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned), { pat.walk(|p| { match p.node { @@ -201,7 +188,7 @@ pub fn pat_contains_bindings_or_wild(dm: &DefMap, pat: &hir::Pat) -> bool { pub fn simple_name<'a>(pat: &'a hir::Pat) -> Option { match pat.node { PatKind::Ident(hir::BindByValue(_), ref path1, None) => { - Some(path1.node.name) + Some(path1.node) } _ => { None @@ -209,9 +196,8 @@ pub fn simple_name<'a>(pat: &'a hir::Pat) -> Option { } } -pub fn def_to_path(tcx: &TyCtxt, id: DefId) -> hir::Path { - let name = tcx.item_name(id); - hir::Path::from_ident(DUMMY_SP, hir::Ident::from_name(name)) +pub fn def_to_path<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> hir::Path { + hir::Path::from_name(DUMMY_SP, tcx.item_name(id)) } /// Return variants that are necessary to exist for the pattern to match. diff --git a/src/librustc/hir/print.rs b/src/librustc/hir/print.rs index e9ed0ed574..4455c7da3b 100644 --- a/src/librustc/hir/print.rs +++ b/src/librustc/hir/print.rs @@ -14,9 +14,8 @@ use syntax::abi::Abi; use syntax::ast; use syntax::codemap::{self, CodeMap, BytePos, Spanned}; use syntax::errors; -use syntax::parse::token::{self, BinOpToken}; +use syntax::parse::token::{self, keywords, BinOpToken}; use syntax::parse::lexer::comments; -use syntax::parse; use syntax::print::pp::{self, break_offset, word, space, hardbreak}; use syntax::print::pp::{Breaks, eof}; use syntax::print::pp::Breaks::{Consistent, Inconsistent}; @@ -24,7 +23,7 @@ use syntax::print::pprust::{self as ast_pp, PrintState}; use syntax::ptr::P; use hir; -use hir::{Crate, PatKind, RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; +use hir::{Crate, PatKind, RegionTyParamBound, SelfKind, TraitTyParamBound, TraitBoundModifier}; use std::io::{self, Write, Read}; @@ -282,7 +281,6 @@ pub fn fun_to_string(decl: &hir::FnDecl, unsafety: hir::Unsafety, constness: hir::Constness, name: ast::Name, - opt_explicit_self: Option<&hir::ExplicitSelf_>, generics: &hir::Generics) -> String { to_string(|s| { @@ -293,7 +291,6 @@ pub fn fun_to_string(decl: &hir::FnDecl, Abi::Rust, Some(name), generics, - opt_explicit_self, &hir::Inherited)?; s.end()?; // Close the head box s.end() // Close the outer box @@ -310,10 +307,6 @@ pub fn block_to_string(blk: &hir::Block) -> String { }) } -pub fn explicit_self_to_string(explicit_self: &hir::ExplicitSelf_) -> String { - to_string(|s| s.print_explicit_self(explicit_self, hir::MutImmutable).map(|_| {})) -} - pub fn variant_to_string(var: &hir::Variant) -> String { to_string(|s| s.print_variant(var)) } @@ -325,6 +318,8 @@ pub fn arg_to_string(arg: &hir::Arg) -> String { pub fn visibility_qualified(vis: &hir::Visibility, s: &str) -> String { match *vis { hir::Public => format!("pub {}", s), + hir::Visibility::Crate => format!("pub(crate) {}", s), + hir::Visibility::Restricted { ref path, .. } => format!("pub({}) {}", path, s), hir::Inherited => s.to_string(), } } @@ -525,7 +520,7 @@ impl<'a> State<'a> { predicates: hir::HirVec::new(), }, }; - self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &generics, None)?; + self.print_ty_fn(f.abi, f.unsafety, &f.decl, None, &generics)?; } hir::TyPath(None, ref path) => { self.print_path(path, false, 0)?; @@ -572,7 +567,6 @@ impl<'a> State<'a> { Abi::Rust, Some(item.name), generics, - None, &item.vis)?; self.end()?; // end head-ibox word(&mut self.s, ";")?; @@ -709,7 +703,6 @@ impl<'a> State<'a> { abi, Some(item.name), typarams, - None, &item.vis)?; word(&mut self.s, " ")?; self.print_block_with_attrs(&body, &item.attrs)?; @@ -898,6 +891,9 @@ impl<'a> State<'a> { pub fn print_visibility(&mut self, vis: &hir::Visibility) -> io::Result<()> { match *vis { hir::Public => self.word_nbsp("pub"), + hir::Visibility::Crate => self.word_nbsp("pub(crate)"), + hir::Visibility::Restricted { ref path, .. } => + self.word_nbsp(&format!("pub({})", path)), hir::Inherited => Ok(()), } } @@ -972,7 +968,6 @@ impl<'a> State<'a> { m.abi, Some(name), &m.generics, - Some(&m.explicit_self.node), vis) } @@ -1356,9 +1351,9 @@ impl<'a> State<'a> { hir::ExprIf(ref test, ref blk, ref elseopt) => { self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?; } - hir::ExprWhile(ref test, ref blk, opt_ident) => { - if let Some(ident) = opt_ident { - self.print_name(ident.name)?; + hir::ExprWhile(ref test, ref blk, opt_name) => { + if let Some(name) = opt_name { + self.print_name(name)?; self.word_space(":")?; } self.head("while")?; @@ -1366,9 +1361,9 @@ impl<'a> State<'a> { space(&mut self.s)?; self.print_block(&blk)?; } - hir::ExprLoop(ref blk, opt_ident) => { - if let Some(ident) = opt_ident { - self.print_name(ident.name)?; + hir::ExprLoop(ref blk, opt_name) => { + if let Some(name) = opt_name { + self.print_name(name)?; self.word_space(":")?; } self.head("loop")?; @@ -1387,7 +1382,7 @@ impl<'a> State<'a> { } self.bclose_(expr.span, indent_unit)?; } - hir::ExprClosure(capture_clause, ref decl, ref body) => { + hir::ExprClosure(capture_clause, ref decl, ref body, _fn_decl_span) => { self.print_capture_clause(capture_clause)?; self.print_fn_block_args(&decl)?; @@ -1460,19 +1455,19 @@ impl<'a> State<'a> { hir::ExprPath(Some(ref qself), ref path) => { self.print_qpath(path, qself, true)? } - hir::ExprBreak(opt_ident) => { + hir::ExprBreak(opt_name) => { word(&mut self.s, "break")?; space(&mut self.s)?; - if let Some(ident) = opt_ident { - self.print_name(ident.node.name)?; + if let Some(name) = opt_name { + self.print_name(name.node)?; space(&mut self.s)?; } } - hir::ExprAgain(opt_ident) => { + hir::ExprAgain(opt_name) => { word(&mut self.s, "continue")?; space(&mut self.s)?; - if let Some(ident) = opt_ident { - self.print_name(ident.node.name)?; + if let Some(name) = opt_name { + self.print_name(name.node)?; space(&mut self.s)? } } @@ -1620,7 +1615,7 @@ impl<'a> State<'a> { word(&mut self.s, "::")? } - self.print_name(segment.identifier.name)?; + self.print_name(segment.name)?; self.print_path_parameters(&segment.parameters, colons_before_params)?; } @@ -1644,7 +1639,7 @@ impl<'a> State<'a> { word(&mut self.s, ">")?; word(&mut self.s, "::")?; let item_segment = path.segments.last().unwrap(); - self.print_name(item_segment.identifier.name)?; + self.print_name(item_segment.name)?; self.print_path_parameters(&item_segment.parameters, colons_before_params) } @@ -1732,7 +1727,7 @@ impl<'a> State<'a> { self.word_nbsp("mut")?; } } - self.print_name(path1.node.name)?; + self.print_name(path1.node)?; match *sub { Some(ref p) => { word(&mut self.s, "@")?; @@ -1877,32 +1872,25 @@ impl<'a> State<'a> { self.end() // close enclosing cbox } - // Returns whether it printed anything - fn print_explicit_self(&mut self, - explicit_self: &hir::ExplicitSelf_, - mutbl: hir::Mutability) - -> io::Result { - self.print_mutability(mutbl)?; - match *explicit_self { - hir::SelfStatic => { - return Ok(false); - } - hir::SelfValue(_) => { - word(&mut self.s, "self")?; + fn print_explicit_self(&mut self, explicit_self: &hir::ExplicitSelf) -> io::Result<()> { + match explicit_self.node { + SelfKind::Value(m) => { + self.print_mutability(m)?; + word(&mut self.s, "self") } - hir::SelfRegion(ref lt, m, _) => { + SelfKind::Region(ref lt, m) => { word(&mut self.s, "&")?; self.print_opt_lifetime(lt)?; self.print_mutability(m)?; - word(&mut self.s, "self")?; + word(&mut self.s, "self") } - hir::SelfExplicit(ref typ, _) => { + SelfKind::Explicit(ref typ, m) => { + self.print_mutability(m)?; word(&mut self.s, "self")?; self.word_space(":")?; - self.print_type(&typ)?; + self.print_type(&typ) } } - return Ok(true); } pub fn print_fn(&mut self, @@ -1912,7 +1900,6 @@ impl<'a> State<'a> { abi: Abi, name: Option, generics: &hir::Generics, - opt_explicit_self: Option<&hir::ExplicitSelf_>, vis: &hir::Visibility) -> io::Result<()> { self.print_fn_header_info(unsafety, constness, abi, vis)?; @@ -1922,55 +1909,13 @@ impl<'a> State<'a> { self.print_name(name)?; } self.print_generics(generics)?; - self.print_fn_args_and_ret(decl, opt_explicit_self)?; + self.print_fn_args_and_ret(decl)?; self.print_where_clause(&generics.where_clause) } - pub fn print_fn_args(&mut self, - decl: &hir::FnDecl, - opt_explicit_self: Option<&hir::ExplicitSelf_>, - is_closure: bool) - -> io::Result<()> { - // It is unfortunate to duplicate the commasep logic, but we want the - // self type and the args all in the same box. - self.rbox(0, Inconsistent)?; - let mut first = true; - if let Some(explicit_self) = opt_explicit_self { - let m = match explicit_self { - &hir::SelfStatic => hir::MutImmutable, - _ => match decl.inputs[0].pat.node { - PatKind::Ident(hir::BindByValue(m), _, _) => m, - _ => hir::MutImmutable, - }, - }; - first = !self.print_explicit_self(explicit_self, m)?; - } - - // HACK(eddyb) ignore the separately printed self argument. - let args = if first { - &decl.inputs[..] - } else { - &decl.inputs[1..] - }; - - for arg in args { - if first { - first = false; - } else { - self.word_space(",")?; - } - self.print_arg(arg, is_closure)?; - } - - self.end() - } - - pub fn print_fn_args_and_ret(&mut self, - decl: &hir::FnDecl, - opt_explicit_self: Option<&hir::ExplicitSelf_>) - -> io::Result<()> { + pub fn print_fn_args_and_ret(&mut self, decl: &hir::FnDecl) -> io::Result<()> { self.popen()?; - self.print_fn_args(decl, opt_explicit_self, false)?; + self.commasep(Inconsistent, &decl.inputs, |s, arg| s.print_arg(arg, false))?; if decl.variadic { word(&mut self.s, ", ...")?; } @@ -1981,7 +1926,7 @@ impl<'a> State<'a> { pub fn print_fn_block_args(&mut self, decl: &hir::FnDecl) -> io::Result<()> { word(&mut self.s, "|")?; - self.print_fn_args(decl, None, true)?; + self.commasep(Inconsistent, &decl.inputs, |s, arg| s.print_arg(arg, true))?; word(&mut self.s, "|")?; if let hir::DefaultReturn(..) = decl.output { @@ -2150,7 +2095,7 @@ impl<'a> State<'a> { hir::ViewPathSimple(name, ref path) => { self.print_path(path, false, 0)?; - if path.segments.last().unwrap().identifier.name != name { + if path.segments.last().unwrap().name != name { space(&mut self.s)?; self.word_space("as")?; self.print_name(name)?; @@ -2203,19 +2148,21 @@ impl<'a> State<'a> { match input.ty.node { hir::TyInfer if is_closure => self.print_pat(&input.pat)?, _ => { - match input.pat.node { - PatKind::Ident(_, ref path1, _) if - path1.node.name == - parse::token::special_idents::invalid.name => { - // Do nothing. - } - _ => { + if let Some(eself) = input.to_self() { + self.print_explicit_self(&eself)?; + } else { + let invalid = if let PatKind::Ident(_, name, _) = input.pat.node { + name.node == keywords::Invalid.name() + } else { + false + }; + if !invalid { self.print_pat(&input.pat)?; word(&mut self.s, ":")?; space(&mut self.s)?; } + self.print_type(&input.ty)?; } - self.print_type(&input.ty)?; } } self.end() @@ -2247,8 +2194,7 @@ impl<'a> State<'a> { unsafety: hir::Unsafety, decl: &hir::FnDecl, name: Option, - generics: &hir::Generics, - opt_explicit_self: Option<&hir::ExplicitSelf_>) + generics: &hir::Generics) -> io::Result<()> { self.ibox(indent_unit)?; if !generics.lifetimes.is_empty() || !generics.ty_params.is_empty() { @@ -2269,7 +2215,6 @@ impl<'a> State<'a> { abi, name, &generics, - opt_explicit_self, &hir::Inherited)?; self.end() } diff --git a/src/librustc/hir/svh.rs b/src/librustc/hir/svh.rs index 08c3d70034..d4e797c9f2 100644 --- a/src/librustc/hir/svh.rs +++ b/src/librustc/hir/svh.rs @@ -10,78 +10,44 @@ //! Calculation and management of a Strict Version Hash for crates //! -//! # Today's ABI problem -//! -//! In today's implementation of rustc, it is incredibly difficult to achieve -//! forward binary compatibility without resorting to C-like interfaces. Within -//! rust code itself, abi details such as symbol names suffer from a variety of -//! unrelated factors to code changing such as the "def id drift" problem. This -//! ends up yielding confusing error messages about metadata mismatches and -//! such. -//! -//! The core of this problem is when an upstream dependency changes and -//! downstream dependents are not recompiled. This causes compile errors because -//! the upstream crate's metadata has changed but the downstream crates are -//! still referencing the older crate's metadata. -//! -//! This problem exists for many reasons, the primary of which is that rust does -//! not currently support forwards ABI compatibility (in place upgrades of a -//! crate). -//! -//! # SVH and how it alleviates the problem -//! -//! With all of this knowledge on hand, this module contains the implementation -//! of a notion of a "Strict Version Hash" for a crate. This is essentially a -//! hash of all contents of a crate which can somehow be exposed to downstream -//! crates. -//! -//! This hash is currently calculated by just hashing the AST, but this is -//! obviously wrong (doc changes should not result in an incompatible ABI). -//! Implementation-wise, this is required at this moment in time. -//! -//! By encoding this strict version hash into all crate's metadata, stale crates -//! can be detected immediately and error'd about by rustc itself. -//! -//! # Relevant links -//! -//! Original issue: https://github.com/rust-lang/rust/issues/10207 +//! The SVH is used for incremental compilation to track when HIR +//! nodes have changed between compilations, and also to detect +//! mismatches where we have two versions of the same crate that were +//! compiled from distinct sources. use std::fmt; +use std::hash::{Hash, Hasher}; -#[derive(Clone, PartialEq, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct Svh { - hash: String, + hash: u64, } impl Svh { /// Create a new `Svh` given the hash. If you actually want to /// compute the SVH from some HIR, you want the `calculate_svh` - /// function found in `librustc_trans`. - pub fn new(hash: String) -> Svh { - assert!(hash.len() == 16); + /// function found in `librustc_incremental`. + pub fn new(hash: u64) -> Svh { Svh { hash: hash } } - pub fn from_hash(hash: u64) -> Svh { - return Svh::new((0..64).step_by(4).map(|i| hex(hash >> i)).collect()); + pub fn as_u64(&self) -> u64 { + self.hash + } - fn hex(b: u64) -> char { - let b = (b & 0xf) as u8; - let b = match b { - 0 ... 9 => '0' as u8 + b, - _ => 'a' as u8 + b - 10, - }; - b as char - } + pub fn to_string(&self) -> String { + format!("{:016x}", self.hash) } +} - pub fn as_str<'a>(&'a self) -> &'a str { - &self.hash +impl Hash for Svh { + fn hash(&self, state: &mut H) where H: Hasher { + self.hash.to_le().hash(state); } } impl fmt::Display for Svh { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - f.pad(self.as_str()) + f.pad(&self.to_string()) } } diff --git a/src/librustc/infer/bivariate.rs b/src/librustc/infer/bivariate.rs index a9ea395e6b..96b14a6c32 100644 --- a/src/librustc/infer/bivariate.rs +++ b/src/librustc/infer/bivariate.rs @@ -25,35 +25,35 @@ //! In particular, it might be enough to say (A,B) are bivariant for //! all (A,B). -use super::combine::{self, CombineFields}; +use super::combine::CombineFields; use super::type_variable::{BiTo}; use ty::{self, Ty, TyCtxt}; use ty::TyVar; use ty::relate::{Relate, RelateResult, TypeRelation}; -pub struct Bivariate<'a, 'tcx: 'a> { - fields: CombineFields<'a, 'tcx> +pub struct Bivariate<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fields: CombineFields<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> Bivariate<'a, 'tcx> { - pub fn new(fields: CombineFields<'a, 'tcx>) -> Bivariate<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Bivariate<'a, 'gcx, 'tcx> { + pub fn new(fields: CombineFields<'a, 'gcx, 'tcx>) -> Bivariate<'a, 'gcx, 'tcx> { Bivariate { fields: fields } } } -impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Bivariate<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Bivariate<'a, 'gcx, 'tcx> { fn tag(&self) -> &'static str { "Bivariate" } - fn tcx(&self) -> &'a TyCtxt<'tcx> { self.fields.tcx() } + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.fields.tcx() } fn a_is_expected(&self) -> bool { self.fields.a_is_expected } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> + fn relate_with_variance>(&mut self, + variance: ty::Variance, + a: &T, + b: &T) + -> RelateResult<'tcx, T> { match variance { // If we have Foo and Foo is invariant w/r/t A, @@ -96,7 +96,7 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Bivariate<'a, 'tcx> { } _ => { - combine::super_combine_tys(self.fields.infcx, self, a, b) + self.fields.infcx.super_combine_tys(self, a, b) } } } @@ -107,7 +107,7 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Bivariate<'a, 'tcx> { fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) -> RelateResult<'tcx, ty::Binder> - where T: Relate<'a,'tcx> + where T: Relate<'tcx> { let a1 = self.tcx().erase_late_bound_regions(a); let b1 = self.tcx().erase_late_bound_regions(b); diff --git a/src/librustc/infer/combine.rs b/src/librustc/infer/combine.rs index 67669c54ac..e2f27074db 100644 --- a/src/librustc/infer/combine.rs +++ b/src/librustc/infer/combine.rs @@ -44,139 +44,141 @@ use super::type_variable::{RelationDir, BiTo, EqTo, SubtypeOf, SupertypeOf}; use ty::{IntType, UintType}; use ty::{self, Ty, TyCtxt}; use ty::error::TypeError; -use ty::fold::{TypeFolder, TypeFoldable}; -use ty::relate::{Relate, RelateResult, TypeRelation}; +use ty::fold::TypeFoldable; +use ty::relate::{RelateResult, TypeRelation}; use traits::PredicateObligations; use syntax::ast; use syntax::codemap::Span; #[derive(Clone)] -pub struct CombineFields<'a, 'tcx: 'a> { - pub infcx: &'a InferCtxt<'a, 'tcx>, +pub struct CombineFields<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + pub infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, pub a_is_expected: bool, pub trace: TypeTrace<'tcx>, pub cause: Option, pub obligations: PredicateObligations<'tcx>, } -pub fn super_combine_tys<'a,'tcx:'a,R>(infcx: &InferCtxt<'a, 'tcx>, - relation: &mut R, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where R: TypeRelation<'a,'tcx> -{ - let a_is_expected = relation.a_is_expected(); - - match (&a.sty, &b.sty) { - // Relate integral variables to other types - (&ty::TyInfer(ty::IntVar(a_id)), &ty::TyInfer(ty::IntVar(b_id))) => { - infcx.int_unification_table - .borrow_mut() - .unify_var_var(a_id, b_id) - .map_err(|e| int_unification_error(a_is_expected, e))?; - Ok(a) - } - (&ty::TyInfer(ty::IntVar(v_id)), &ty::TyInt(v)) => { - unify_integral_variable(infcx, a_is_expected, v_id, IntType(v)) - } - (&ty::TyInt(v), &ty::TyInfer(ty::IntVar(v_id))) => { - unify_integral_variable(infcx, !a_is_expected, v_id, IntType(v)) - } - (&ty::TyInfer(ty::IntVar(v_id)), &ty::TyUint(v)) => { - unify_integral_variable(infcx, a_is_expected, v_id, UintType(v)) - } - (&ty::TyUint(v), &ty::TyInfer(ty::IntVar(v_id))) => { - unify_integral_variable(infcx, !a_is_expected, v_id, UintType(v)) - } +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { + pub fn super_combine_tys(&self, + relation: &mut R, + a: Ty<'tcx>, + b: Ty<'tcx>) + -> RelateResult<'tcx, Ty<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx> + { + let a_is_expected = relation.a_is_expected(); + + match (&a.sty, &b.sty) { + // Relate integral variables to other types + (&ty::TyInfer(ty::IntVar(a_id)), &ty::TyInfer(ty::IntVar(b_id))) => { + self.int_unification_table + .borrow_mut() + .unify_var_var(a_id, b_id) + .map_err(|e| int_unification_error(a_is_expected, e))?; + Ok(a) + } + (&ty::TyInfer(ty::IntVar(v_id)), &ty::TyInt(v)) => { + self.unify_integral_variable(a_is_expected, v_id, IntType(v)) + } + (&ty::TyInt(v), &ty::TyInfer(ty::IntVar(v_id))) => { + self.unify_integral_variable(!a_is_expected, v_id, IntType(v)) + } + (&ty::TyInfer(ty::IntVar(v_id)), &ty::TyUint(v)) => { + self.unify_integral_variable(a_is_expected, v_id, UintType(v)) + } + (&ty::TyUint(v), &ty::TyInfer(ty::IntVar(v_id))) => { + self.unify_integral_variable(!a_is_expected, v_id, UintType(v)) + } - // Relate floating-point variables to other types - (&ty::TyInfer(ty::FloatVar(a_id)), &ty::TyInfer(ty::FloatVar(b_id))) => { - infcx.float_unification_table - .borrow_mut() - .unify_var_var(a_id, b_id) - .map_err(|e| float_unification_error(relation.a_is_expected(), e))?; - Ok(a) - } - (&ty::TyInfer(ty::FloatVar(v_id)), &ty::TyFloat(v)) => { - unify_float_variable(infcx, a_is_expected, v_id, v) - } - (&ty::TyFloat(v), &ty::TyInfer(ty::FloatVar(v_id))) => { - unify_float_variable(infcx, !a_is_expected, v_id, v) - } + // Relate floating-point variables to other types + (&ty::TyInfer(ty::FloatVar(a_id)), &ty::TyInfer(ty::FloatVar(b_id))) => { + self.float_unification_table + .borrow_mut() + .unify_var_var(a_id, b_id) + .map_err(|e| float_unification_error(relation.a_is_expected(), e))?; + Ok(a) + } + (&ty::TyInfer(ty::FloatVar(v_id)), &ty::TyFloat(v)) => { + self.unify_float_variable(a_is_expected, v_id, v) + } + (&ty::TyFloat(v), &ty::TyInfer(ty::FloatVar(v_id))) => { + self.unify_float_variable(!a_is_expected, v_id, v) + } - // All other cases of inference are errors - (&ty::TyInfer(_), _) | - (_, &ty::TyInfer(_)) => { - Err(TypeError::Sorts(ty::relate::expected_found(relation, &a, &b))) - } + // All other cases of inference are errors + (&ty::TyInfer(_), _) | + (_, &ty::TyInfer(_)) => { + Err(TypeError::Sorts(ty::relate::expected_found(relation, &a, &b))) + } - _ => { - ty::relate::super_relate_tys(relation, a, b) + _ => { + ty::relate::super_relate_tys(relation, a, b) + } } } -} -fn unify_integral_variable<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, - vid_is_expected: bool, - vid: ty::IntVid, - val: ty::IntVarValue) - -> RelateResult<'tcx, Ty<'tcx>> -{ - infcx.int_unification_table - .borrow_mut() - .unify_var_value(vid, val) - .map_err(|e| int_unification_error(vid_is_expected, e))?; - match val { - IntType(v) => Ok(infcx.tcx.mk_mach_int(v)), - UintType(v) => Ok(infcx.tcx.mk_mach_uint(v)), + fn unify_integral_variable(&self, + vid_is_expected: bool, + vid: ty::IntVid, + val: ty::IntVarValue) + -> RelateResult<'tcx, Ty<'tcx>> + { + self.int_unification_table + .borrow_mut() + .unify_var_value(vid, val) + .map_err(|e| int_unification_error(vid_is_expected, e))?; + match val { + IntType(v) => Ok(self.tcx.mk_mach_int(v)), + UintType(v) => Ok(self.tcx.mk_mach_uint(v)), + } } -} -fn unify_float_variable<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, - vid_is_expected: bool, - vid: ty::FloatVid, - val: ast::FloatTy) - -> RelateResult<'tcx, Ty<'tcx>> -{ - infcx.float_unification_table - .borrow_mut() - .unify_var_value(vid, val) - .map_err(|e| float_unification_error(vid_is_expected, e))?; - Ok(infcx.tcx.mk_mach_float(val)) + fn unify_float_variable(&self, + vid_is_expected: bool, + vid: ty::FloatVid, + val: ast::FloatTy) + -> RelateResult<'tcx, Ty<'tcx>> + { + self.float_unification_table + .borrow_mut() + .unify_var_value(vid, val) + .map_err(|e| float_unification_error(vid_is_expected, e))?; + Ok(self.tcx.mk_mach_float(val)) + } } -impl<'a, 'tcx> CombineFields<'a, 'tcx> { - pub fn tcx(&self) -> &'a TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> { + pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.infcx.tcx } - pub fn switch_expected(&self) -> CombineFields<'a, 'tcx> { + pub fn switch_expected(&self) -> CombineFields<'a, 'gcx, 'tcx> { CombineFields { a_is_expected: !self.a_is_expected, ..(*self).clone() } } - pub fn equate(&self) -> Equate<'a, 'tcx> { + pub fn equate(&self) -> Equate<'a, 'gcx, 'tcx> { Equate::new(self.clone()) } - pub fn bivariate(&self) -> Bivariate<'a, 'tcx> { + pub fn bivariate(&self) -> Bivariate<'a, 'gcx, 'tcx> { Bivariate::new(self.clone()) } - pub fn sub(&self) -> Sub<'a, 'tcx> { + pub fn sub(&self) -> Sub<'a, 'gcx, 'tcx> { Sub::new(self.clone()) } - pub fn lub(&self) -> Lub<'a, 'tcx> { + pub fn lub(&self) -> Lub<'a, 'gcx, 'tcx> { Lub::new(self.clone()) } - pub fn glb(&self) -> Glb<'a, 'tcx> { + pub fn glb(&self) -> Glb<'a, 'gcx, 'tcx> { Glb::new(self.clone()) } @@ -289,16 +291,16 @@ impl<'a, 'tcx> CombineFields<'a, 'tcx> { } } -struct Generalizer<'cx, 'tcx:'cx> { - infcx: &'cx InferCtxt<'cx, 'tcx>, +struct Generalizer<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { + infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, span: Span, for_vid: ty::TyVid, make_region_vars: bool, cycle_detected: bool, } -impl<'cx, 'tcx> ty::fold::TypeFolder<'tcx> for Generalizer<'cx, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { +impl<'cx, 'gcx, 'tcx> ty::fold::TypeFolder<'gcx, 'tcx> for Generalizer<'cx, 'gcx, 'tcx> { + fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx> { self.infcx.tcx } diff --git a/src/librustc/infer/equate.rs b/src/librustc/infer/equate.rs index 5540046c9e..408f22cf15 100644 --- a/src/librustc/infer/equate.rs +++ b/src/librustc/infer/equate.rs @@ -8,8 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use super::combine::{self, CombineFields}; -use super::higher_ranked::HigherRankedRelations; +use super::combine::CombineFields; use super::{Subtype}; use super::type_variable::{EqTo}; @@ -19,12 +18,12 @@ use ty::relate::{Relate, RelateResult, TypeRelation}; use traits::PredicateObligations; /// Ensures `a` is made equal to `b`. Returns `a` on success. -pub struct Equate<'a, 'tcx: 'a> { - fields: CombineFields<'a, 'tcx> +pub struct Equate<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fields: CombineFields<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> Equate<'a, 'tcx> { - pub fn new(fields: CombineFields<'a, 'tcx>) -> Equate<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Equate<'a, 'gcx, 'tcx> { + pub fn new(fields: CombineFields<'a, 'gcx, 'tcx>) -> Equate<'a, 'gcx, 'tcx> { Equate { fields: fields } } @@ -33,18 +32,18 @@ impl<'a, 'tcx> Equate<'a, 'tcx> { } } -impl<'a, 'tcx> TypeRelation<'a,'tcx> for Equate<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Equate<'a, 'gcx, 'tcx> { fn tag(&self) -> &'static str { "Equate" } - fn tcx(&self) -> &'a TyCtxt<'tcx> { self.fields.tcx() } + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.fields.tcx() } fn a_is_expected(&self) -> bool { self.fields.a_is_expected } - fn relate_with_variance>(&mut self, - _: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> + fn relate_with_variance>(&mut self, + _: ty::Variance, + a: &T, + b: &T) + -> RelateResult<'tcx, T> { self.relate(a, b) } @@ -74,7 +73,7 @@ impl<'a, 'tcx> TypeRelation<'a,'tcx> for Equate<'a, 'tcx> { } _ => { - combine::super_combine_tys(self.fields.infcx, self, a, b)?; + self.fields.infcx.super_combine_tys(self, a, b)?; Ok(a) } } @@ -92,7 +91,7 @@ impl<'a, 'tcx> TypeRelation<'a,'tcx> for Equate<'a, 'tcx> { fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) -> RelateResult<'tcx, ty::Binder> - where T: Relate<'a, 'tcx> + where T: Relate<'tcx> { self.fields.higher_ranked_sub(a, b)?; self.fields.higher_ranked_sub(b, a) diff --git a/src/librustc/infer/error_reporting.rs b/src/librustc/infer/error_reporting.rs index a7553f4eb1..8afee54c4b 100644 --- a/src/librustc/infer/error_reporting.rs +++ b/src/librustc/infer/error_reporting.rs @@ -77,7 +77,6 @@ use hir::map as ast_map; use hir; use hir::print as pprust; -use middle::cstore::CrateStore; use hir::def::Def; use hir::def_id::DefId; use infer::{self, TypeOrigin}; @@ -91,13 +90,13 @@ use std::cell::{Cell, RefCell}; use std::char::from_u32; use std::fmt; use syntax::ast; -use syntax::errors::DiagnosticBuilder; +use syntax::errors::{DiagnosticBuilder, check_old_skool}; use syntax::codemap::{self, Pos, Span}; use syntax::parse::token; use syntax::ptr::P; -impl<'tcx> TyCtxt<'tcx> { - pub fn note_and_explain_region(&self, +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn note_and_explain_region(self, err: &mut DiagnosticBuilder, prefix: &str, region: ty::Region, @@ -113,8 +112,9 @@ impl<'tcx> TyCtxt<'tcx> { } } - fn explain_span(tcx: &TyCtxt, heading: &str, span: Span) - -> (String, Option) { + fn explain_span<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + heading: &str, span: Span) + -> (String, Option) { let lo = tcx.sess.codemap().lookup_char_pos_adj(span.lo); (format!("the {} at {}:{}", heading, lo.line, lo.col.to_usize()), Some(span)) @@ -158,7 +158,7 @@ impl<'tcx> TyCtxt<'tcx> { "scope of call-site for function" } region::CodeExtentData::ParameterScope { .. } => { - "scope of parameters for function" + "scope of function body" } region::CodeExtentData::DestructionScope(_) => { new_string = format!("destruction scope surrounding {}", tag); @@ -228,83 +228,9 @@ impl<'tcx> TyCtxt<'tcx> { } } -pub trait ErrorReporting<'tcx> { - fn report_region_errors(&self, - errors: &Vec>); - - fn process_errors(&self, errors: &Vec>) - -> Option>>; - - fn report_type_error(&self, - trace: TypeTrace<'tcx>, - terr: &TypeError<'tcx>) - -> DiagnosticBuilder<'tcx>; - - fn check_and_note_conflicting_crates(&self, - err: &mut DiagnosticBuilder, - terr: &TypeError<'tcx>, - sp: Span); - - fn report_and_explain_type_error(&self, - trace: TypeTrace<'tcx>, - terr: &TypeError<'tcx>) - -> DiagnosticBuilder<'tcx>; - - fn values_str(&self, values: &ValuePairs<'tcx>) -> Option; - - fn expected_found_str + TypeFoldable<'tcx>>( - &self, - exp_found: &ty::error::ExpectedFound) - -> Option; - - fn report_concrete_failure(&self, - origin: SubregionOrigin<'tcx>, - sub: Region, - sup: Region) - -> DiagnosticBuilder<'tcx>; - - fn report_generic_bound_failure(&self, - origin: SubregionOrigin<'tcx>, - kind: GenericKind<'tcx>, - sub: Region); - - fn report_sub_sup_conflict(&self, - var_origin: RegionVariableOrigin, - sub_origin: SubregionOrigin<'tcx>, - sub_region: Region, - sup_origin: SubregionOrigin<'tcx>, - sup_region: Region); - - fn report_processed_errors(&self, - origins: &[ProcessedErrorOrigin<'tcx>], - same_regions: &[SameRegions]); - - fn give_suggestion(&self, err: &mut DiagnosticBuilder, same_regions: &[SameRegions]); -} - -trait ErrorReportingHelpers<'tcx> { - fn report_inference_failure(&self, - var_origin: RegionVariableOrigin) - -> DiagnosticBuilder<'tcx>; - - fn note_region_origin(&self, - err: &mut DiagnosticBuilder, - origin: &SubregionOrigin<'tcx>); - - fn give_expl_lifetime_param(&self, - err: &mut DiagnosticBuilder, - decl: &hir::FnDecl, - unsafety: hir::Unsafety, - constness: hir::Constness, - name: ast::Name, - opt_explicit_self: Option<&hir::ExplicitSelf_>, - generics: &hir::Generics, - span: Span); -} - -impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { - fn report_region_errors(&self, - errors: &Vec>) { +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { + pub fn report_region_errors(&self, + errors: &Vec>) { debug!("report_region_errors(): {} errors to start", errors.len()); // try to pre-process the errors, which will group some of them @@ -475,10 +401,10 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { } } - fn free_regions_from_same_fn(tcx: &TyCtxt, - sub: Region, - sup: Region) - -> Option { + fn free_regions_from_same_fn<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + sub: Region, + sup: Region) + -> Option { debug!("free_regions_from_same_fn(sub={:?}, sup={:?})", sub, sup); let (scope_id, fr1, fr2) = match (sub, sup) { (ReFree(fr1), ReFree(fr2)) => { @@ -536,7 +462,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { trace: TypeTrace<'tcx>, terr: &TypeError<'tcx>) -> DiagnosticBuilder<'tcx> { - let expected_found_str = match self.values_str(&trace.values) { + let (expected, found) = match self.values_str(&trace.values) { Some(v) => v, None => { return self.tcx.sess.diagnostic().struct_dummy(); /* derived error */ @@ -549,18 +475,17 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { false }; - let expected_found_str = if is_simple_error { - expected_found_str - } else { - format!("{} ({})", expected_found_str, terr) - }; - let mut err = struct_span_err!(self.tcx.sess, trace.origin.span(), E0308, - "{}: {}", - trace.origin, - expected_found_str); + "{}", + trace.origin); + + if !is_simple_error || check_old_skool() { + err.note_expected_found(&"type", &expected, &found); + } + + err.span_label(trace.origin.span(), &terr); self.check_and_note_conflicting_crates(&mut err, terr, trace.origin.span()); @@ -575,6 +500,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { }, _ => () } + err } @@ -620,10 +546,10 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { } } - fn report_and_explain_type_error(&self, - trace: TypeTrace<'tcx>, - terr: &TypeError<'tcx>) - -> DiagnosticBuilder<'tcx> { + pub fn report_and_explain_type_error(&self, + trace: TypeTrace<'tcx>, + terr: &TypeError<'tcx>) + -> DiagnosticBuilder<'tcx> { let span = trace.origin.span(); let mut err = self.report_type_error(trace, terr); self.tcx.note_and_explain_type_err(&mut err, terr, span); @@ -632,7 +558,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { /// Returns a string of the form "expected `{}`, found `{}`", or None if this is a derived /// error. - fn values_str(&self, values: &ValuePairs<'tcx>) -> Option { + fn values_str(&self, values: &ValuePairs<'tcx>) -> Option<(String, String)> { match *values { infer::Types(ref exp_found) => self.expected_found_str(exp_found), infer::TraitRefs(ref exp_found) => self.expected_found_str(exp_found), @@ -643,7 +569,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { fn expected_found_str + TypeFoldable<'tcx>>( &self, exp_found: &ty::error::ExpectedFound) - -> Option + -> Option<(String, String)> { let expected = exp_found.expected.resolve(self); if expected.references_error() { @@ -655,9 +581,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { return None; } - Some(format!("expected `{}`, found `{}`", - expected, - found)) + Some((format!("{}", expected), format!("{}", found))) } fn report_generic_bound_failure(&self, @@ -685,10 +609,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { E0309, "{} may not live long enough", labeled_user_string); - err.fileline_help(origin.span(), - &format!("consider adding an explicit lifetime bound `{}: {}`...", - bound_kind, - sub)); + err.help(&format!("consider adding an explicit lifetime bound `{}: {}`...", + bound_kind, + sub)); err } @@ -699,10 +622,9 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { E0310, "{} may not live long enough", labeled_user_string); - err.fileline_help(origin.span(), - &format!("consider adding an explicit lifetime \ - bound `{}: 'static`...", - bound_kind)); + err.help(&format!("consider adding an explicit lifetime \ + bound `{}: 'static`...", + bound_kind)); err } @@ -713,9 +635,8 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { E0311, "{} may not live long enough", labeled_user_string); - err.fileline_help(origin.span(), - &format!("consider adding an explicit lifetime bound for `{}`", - bound_kind)); + err.help(&format!("consider adding an explicit lifetime bound for `{}`", + bound_kind)); self.tcx.note_and_explain_region( &mut err, &format!("{} must be valid for ", labeled_user_string), @@ -741,7 +662,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { } infer::Reborrow(span) => { let mut err = struct_span_err!(self.tcx.sess, span, E0312, - "lifetime of reference outlines \ + "lifetime of reference outlives \ lifetime of borrowed content..."); self.tcx.note_and_explain_region(&mut err, "...the reference is valid for ", @@ -1055,8 +976,7 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { ast_map::NodeItem(ref item) => { match item.node { hir::ItemFn(ref fn_decl, unsafety, constness, _, ref gen, _) => { - Some((fn_decl, gen, unsafety, constness, - item.name, None, item.span)) + Some((fn_decl, gen, unsafety, constness, item.name, item.span)) }, _ => None } @@ -1069,7 +989,6 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { sig.unsafety, sig.constness, item.name, - Some(&sig.explicit_self.node), item.span)) } _ => None, @@ -1083,7 +1002,6 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { sig.unsafety, sig.constness, item.name, - Some(&sig.explicit_self.node), item.span)) } _ => None @@ -1093,13 +1011,11 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> { }, None => None }; - let (fn_decl, generics, unsafety, constness, name, expl_self, span) + let (fn_decl, generics, unsafety, constness, name, span) = node_inner.expect("expect item fn"); - let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self, - generics, same_regions, &life_giver); - let (fn_decl, expl_self, generics) = rebuilder.rebuild(); - self.give_expl_lifetime_param(err, &fn_decl, unsafety, constness, name, - expl_self.as_ref(), &generics, span); + let rebuilder = Rebuilder::new(self.tcx, fn_decl, generics, same_regions, &life_giver); + let (fn_decl, generics) = rebuilder.rebuild(); + self.give_expl_lifetime_param(err, &fn_decl, unsafety, constness, name, &generics, span); } } @@ -1114,10 +1030,9 @@ struct RebuildPathInfo<'a> { region_names: &'a HashSet } -struct Rebuilder<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, +struct Rebuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, fn_decl: &'a hir::FnDecl, - expl_self_opt: Option<&'a hir::ExplicitSelf_>, generics: &'a hir::Generics, same_regions: &'a [SameRegions], life_giver: &'a LifeGiver, @@ -1130,18 +1045,16 @@ enum FreshOrKept { Kept } -impl<'a, 'tcx> Rebuilder<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>, +impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> { + fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, fn_decl: &'a hir::FnDecl, - expl_self_opt: Option<&'a hir::ExplicitSelf_>, generics: &'a hir::Generics, same_regions: &'a [SameRegions], life_giver: &'a LifeGiver) - -> Rebuilder<'a, 'tcx> { + -> Rebuilder<'a, 'gcx, 'tcx> { Rebuilder { tcx: tcx, fn_decl: fn_decl, - expl_self_opt: expl_self_opt, generics: generics, same_regions: same_regions, life_giver: life_giver, @@ -1150,9 +1063,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } } - fn rebuild(&self) - -> (hir::FnDecl, Option, hir::Generics) { - let mut expl_self_opt = self.expl_self_opt.cloned(); + fn rebuild(&self) -> (hir::FnDecl, hir::Generics) { let mut inputs = self.fn_decl.inputs.clone(); let mut output = self.fn_decl.output.clone(); let mut ty_params = self.generics.ty_params.clone(); @@ -1168,8 +1079,6 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { Kept => { kept_lifetimes.insert(lifetime.name); } _ => () } - expl_self_opt = self.rebuild_expl_self(expl_self_opt, lifetime, - &anon_nums, ®ion_names); inputs = self.rebuild_args_ty(&inputs[..], lifetime, &anon_nums, ®ion_names); output = self.rebuild_output(&output, lifetime, &anon_nums, ®ion_names); @@ -1189,7 +1098,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { output: output, variadic: self.fn_decl.variadic }; - (new_fn_decl, expl_self_opt, generics) + (new_fn_decl, generics) } fn pick_lifetime(&self, @@ -1329,34 +1238,6 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { }).collect() } - fn rebuild_expl_self(&self, - expl_self_opt: Option, - lifetime: hir::Lifetime, - anon_nums: &HashSet, - region_names: &HashSet) - -> Option { - match expl_self_opt { - Some(ref expl_self) => match *expl_self { - hir::SelfRegion(lt_opt, muta, id) => match lt_opt { - Some(lt) => if region_names.contains(<.name) { - return Some(hir::SelfRegion(Some(lifetime), muta, id)); - }, - None => { - let anon = self.cur_anon.get(); - self.inc_and_offset_cur_anon(1); - if anon_nums.contains(&anon) { - self.track_anon(anon); - return Some(hir::SelfRegion(Some(lifetime), muta, id)); - } - } - }, - _ => () - }, - None => () - } - expl_self_opt - } - fn rebuild_generics(&self, generics: &hir::Generics, add: &Vec, @@ -1633,7 +1514,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } }; let new_seg = hir::PathSegment { - identifier: last_seg.identifier, + name: last_seg.name, parameters: new_parameters }; let mut new_segs = Vec::new(); @@ -1647,18 +1528,16 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> { } } -impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { fn give_expl_lifetime_param(&self, err: &mut DiagnosticBuilder, decl: &hir::FnDecl, unsafety: hir::Unsafety, constness: hir::Constness, name: ast::Name, - opt_explicit_self: Option<&hir::ExplicitSelf_>, generics: &hir::Generics, span: Span) { - let suggested_fn = pprust::fun_to_string(decl, unsafety, constness, name, - opt_explicit_self, generics); + let suggested_fn = pprust::fun_to_string(decl, unsafety, constness, name, generics); let msg = format!("consider using an explicit lifetime \ parameter as shown: {}", suggested_fn); err.span_help(span, &msg[..]); @@ -1752,11 +1631,11 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { }; match self.values_str(&trace.values) { - Some(values_str) => { + Some((expected, found)) => { err.span_note( trace.origin.span(), - &format!("...so that {} ({})", - desc, values_str)); + &format!("...so that {} (expected {}, found {})", + desc, expected, found)); } None => { // Really should avoid printing this error at @@ -1910,34 +1789,34 @@ impl<'a, 'tcx> ErrorReportingHelpers<'tcx> for InferCtxt<'a, 'tcx> { } pub trait Resolvable<'tcx> { - fn resolve<'a>(&self, infcx: &InferCtxt<'a, 'tcx>) -> Self; + fn resolve<'a, 'gcx>(&self, infcx: &InferCtxt<'a, 'gcx, 'tcx>) -> Self; } impl<'tcx> Resolvable<'tcx> for Ty<'tcx> { - fn resolve<'a>(&self, infcx: &InferCtxt<'a, 'tcx>) -> Ty<'tcx> { + fn resolve<'a, 'gcx>(&self, infcx: &InferCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { infcx.resolve_type_vars_if_possible(self) } } impl<'tcx> Resolvable<'tcx> for ty::TraitRef<'tcx> { - fn resolve<'a>(&self, infcx: &InferCtxt<'a, 'tcx>) - -> ty::TraitRef<'tcx> { + fn resolve<'a, 'gcx>(&self, infcx: &InferCtxt<'a, 'gcx, 'tcx>) + -> ty::TraitRef<'tcx> { infcx.resolve_type_vars_if_possible(self) } } impl<'tcx> Resolvable<'tcx> for ty::PolyTraitRef<'tcx> { - fn resolve<'a>(&self, - infcx: &InferCtxt<'a, 'tcx>) - -> ty::PolyTraitRef<'tcx> + fn resolve<'a, 'gcx>(&self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>) + -> ty::PolyTraitRef<'tcx> { infcx.resolve_type_vars_if_possible(self) } } -fn lifetimes_in_scope(tcx: &TyCtxt, - scope_id: ast::NodeId) - -> Vec { +fn lifetimes_in_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + scope_id: ast::NodeId) + -> Vec { let mut taken = Vec::new(); let parent = tcx.map.get_parent(scope_id); let method_id_opt = match tcx.map.find(parent) { diff --git a/src/librustc/infer/freshen.rs b/src/librustc/infer/freshen.rs index b0f1e9d890..5ded6dc736 100644 --- a/src/librustc/infer/freshen.rs +++ b/src/librustc/infer/freshen.rs @@ -37,14 +37,15 @@ use std::collections::hash_map::{self, Entry}; use super::InferCtxt; use super::unify_key::ToType; -pub struct TypeFreshener<'a, 'tcx:'a> { - infcx: &'a InferCtxt<'a, 'tcx>, +pub struct TypeFreshener<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, freshen_count: u32, freshen_map: hash_map::HashMap>, } -impl<'a, 'tcx> TypeFreshener<'a, 'tcx> { - pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> TypeFreshener<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> TypeFreshener<'a, 'gcx, 'tcx> { + pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) + -> TypeFreshener<'a, 'gcx, 'tcx> { TypeFreshener { infcx: infcx, freshen_count: 0, @@ -77,8 +78,8 @@ impl<'a, 'tcx> TypeFreshener<'a, 'tcx> { } } -impl<'a, 'tcx> TypeFolder<'tcx> for TypeFreshener<'a, 'tcx> { - fn tcx<'b>(&'b self) -> &'b TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for TypeFreshener<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.infcx.tcx } diff --git a/src/librustc/infer/glb.rs b/src/librustc/infer/glb.rs index 37717c2b6b..b7085f0829 100644 --- a/src/librustc/infer/glb.rs +++ b/src/librustc/infer/glb.rs @@ -9,7 +9,6 @@ // except according to those terms. use super::combine::CombineFields; -use super::higher_ranked::HigherRankedRelations; use super::InferCtxt; use super::lattice::{self, LatticeDir}; use super::Subtype; @@ -19,12 +18,12 @@ use ty::relate::{Relate, RelateResult, TypeRelation}; use traits::PredicateObligations; /// "Greatest lower bound" (common subtype) -pub struct Glb<'a, 'tcx: 'a> { - fields: CombineFields<'a, 'tcx> +pub struct Glb<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fields: CombineFields<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> Glb<'a, 'tcx> { - pub fn new(fields: CombineFields<'a, 'tcx>) -> Glb<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Glb<'a, 'gcx, 'tcx> { + pub fn new(fields: CombineFields<'a, 'gcx, 'tcx>) -> Glb<'a, 'gcx, 'tcx> { Glb { fields: fields } } @@ -33,18 +32,18 @@ impl<'a, 'tcx> Glb<'a, 'tcx> { } } -impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Glb<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Glb<'a, 'gcx, 'tcx> { fn tag(&self) -> &'static str { "Glb" } - fn tcx(&self) -> &'a TyCtxt<'tcx> { self.fields.tcx() } + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.fields.tcx() } fn a_is_expected(&self) -> bool { self.fields.a_is_expected } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> + fn relate_with_variance>(&mut self, + variance: ty::Variance, + a: &T, + b: &T) + -> RelateResult<'tcx, T> { match variance { ty::Invariant => self.fields.equate().relate(a, b), @@ -70,14 +69,14 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Glb<'a, 'tcx> { fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) -> RelateResult<'tcx, ty::Binder> - where T: Relate<'a, 'tcx> + where T: Relate<'tcx> { self.fields.higher_ranked_glb(a, b) } } -impl<'a, 'tcx> LatticeDir<'a,'tcx> for Glb<'a, 'tcx> { - fn infcx(&self) -> &'a InferCtxt<'a,'tcx> { +impl<'a, 'gcx, 'tcx> LatticeDir<'a, 'gcx, 'tcx> for Glb<'a, 'gcx, 'tcx> { + fn infcx(&self) -> &'a InferCtxt<'a, 'gcx, 'tcx> { self.fields.infcx } diff --git a/src/librustc/infer/higher_ranked/mod.rs b/src/librustc/infer/higher_ranked/mod.rs index d89ef8ef6c..6814d50107 100644 --- a/src/librustc/infer/higher_ranked/mod.rs +++ b/src/librustc/infer/higher_ranked/mod.rs @@ -20,29 +20,10 @@ use ty::relate::{Relate, RelateResult, TypeRelation}; use syntax::codemap::Span; use util::nodemap::{FnvHashMap, FnvHashSet}; -pub trait HigherRankedRelations<'a,'tcx> { - fn higher_ranked_sub(&self, a: &Binder, b: &Binder) -> RelateResult<'tcx, Binder> - where T: Relate<'a,'tcx>; - - fn higher_ranked_lub(&self, a: &Binder, b: &Binder) -> RelateResult<'tcx, Binder> - where T: Relate<'a,'tcx>; - - fn higher_ranked_glb(&self, a: &Binder, b: &Binder) -> RelateResult<'tcx, Binder> - where T: Relate<'a,'tcx>; -} - -trait InferCtxtExt { - fn tainted_regions(&self, snapshot: &CombinedSnapshot, r: ty::Region) -> Vec; - - fn region_vars_confined_to_snapshot(&self, - snapshot: &CombinedSnapshot) - -> Vec; -} - -impl<'a,'tcx> HigherRankedRelations<'a,'tcx> for CombineFields<'a,'tcx> { - fn higher_ranked_sub(&self, a: &Binder, b: &Binder) - -> RelateResult<'tcx, Binder> - where T: Relate<'a,'tcx> +impl<'a, 'gcx, 'tcx> CombineFields<'a, 'gcx, 'tcx> { + pub fn higher_ranked_sub(&self, a: &Binder, b: &Binder) + -> RelateResult<'tcx, Binder> + where T: Relate<'tcx> { debug!("higher_ranked_sub(a={:?}, b={:?})", a, b); @@ -79,30 +60,17 @@ impl<'a,'tcx> HigherRankedRelations<'a,'tcx> for CombineFields<'a,'tcx> { // Presuming type comparison succeeds, we need to check // that the skolemized regions do not "leak". - match leak_check(self.infcx, &skol_map, snapshot) { - Ok(()) => { } - Err((skol_br, tainted_region)) => { - if self.a_is_expected { - debug!("Not as polymorphic!"); - return Err(TypeError::RegionsInsufficientlyPolymorphic(skol_br, - tainted_region)); - } else { - debug!("Overly polymorphic!"); - return Err(TypeError::RegionsOverlyPolymorphic(skol_br, - tainted_region)); - } - } - } + self.infcx.leak_check(!self.a_is_expected, &skol_map, snapshot)?; - debug!("higher_ranked_sub: OK result={:?}", - result); + debug!("higher_ranked_sub: OK result={:?}", result); Ok(ty::Binder(result)) }); } - fn higher_ranked_lub(&self, a: &Binder, b: &Binder) -> RelateResult<'tcx, Binder> - where T: Relate<'a,'tcx> + pub fn higher_ranked_lub(&self, a: &Binder, b: &Binder) + -> RelateResult<'tcx, Binder> + where T: Relate<'tcx> { // Start a snapshot so we can examine "all bindings that were // created as part of this type comparison". @@ -141,14 +109,14 @@ impl<'a,'tcx> HigherRankedRelations<'a,'tcx> for CombineFields<'a,'tcx> { Ok(ty::Binder(result1)) }); - fn generalize_region(infcx: &InferCtxt, - span: Span, - snapshot: &CombinedSnapshot, - debruijn: ty::DebruijnIndex, - new_vars: &[ty::RegionVid], - a_map: &FnvHashMap, - r0: ty::Region) - -> ty::Region { + fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + span: Span, + snapshot: &CombinedSnapshot, + debruijn: ty::DebruijnIndex, + new_vars: &[ty::RegionVid], + a_map: &FnvHashMap, + r0: ty::Region) + -> ty::Region { // Regions that pre-dated the LUB computation stay as they are. if !is_var_in_set(new_vars, r0) { assert!(!r0.is_bound()); @@ -190,8 +158,9 @@ impl<'a,'tcx> HigherRankedRelations<'a,'tcx> for CombineFields<'a,'tcx> { } } - fn higher_ranked_glb(&self, a: &Binder, b: &Binder) -> RelateResult<'tcx, Binder> - where T: Relate<'a,'tcx> + pub fn higher_ranked_glb(&self, a: &Binder, b: &Binder) + -> RelateResult<'tcx, Binder> + where T: Relate<'tcx> { debug!("higher_ranked_glb({:?}, {:?})", a, b); @@ -236,15 +205,15 @@ impl<'a,'tcx> HigherRankedRelations<'a,'tcx> for CombineFields<'a,'tcx> { Ok(ty::Binder(result1)) }); - fn generalize_region(infcx: &InferCtxt, - span: Span, - snapshot: &CombinedSnapshot, - debruijn: ty::DebruijnIndex, - new_vars: &[ty::RegionVid], - a_map: &FnvHashMap, - a_vars: &[ty::RegionVid], - b_vars: &[ty::RegionVid], - r0: ty::Region) -> ty::Region { + fn generalize_region<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + span: Span, + snapshot: &CombinedSnapshot, + debruijn: ty::DebruijnIndex, + new_vars: &[ty::RegionVid], + a_map: &FnvHashMap, + a_vars: &[ty::RegionVid], + b_vars: &[ty::RegionVid], + r0: ty::Region) -> ty::Region { if !is_var_in_set(new_vars, r0) { assert!(!r0.is_bound()); return r0; @@ -328,9 +297,9 @@ impl<'a,'tcx> HigherRankedRelations<'a,'tcx> for CombineFields<'a,'tcx> { } } -fn var_ids<'a, 'tcx>(fields: &CombineFields<'a, 'tcx>, - map: &FnvHashMap) - -> Vec { +fn var_ids<'a, 'gcx, 'tcx>(fields: &CombineFields<'a, 'gcx, 'tcx>, + map: &FnvHashMap) + -> Vec { map.iter() .map(|(_, r)| match *r { ty::ReVar(r) => { r } @@ -351,10 +320,10 @@ fn is_var_in_set(new_vars: &[ty::RegionVid], r: ty::Region) -> bool { } } -fn fold_regions_in<'tcx, T, F>(tcx: &TyCtxt<'tcx>, - unbound_value: &T, - mut fldr: F) - -> T +fn fold_regions_in<'a, 'gcx, 'tcx, T, F>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + unbound_value: &T, + mut fldr: F) + -> T where T: TypeFoldable<'tcx>, F: FnMut(ty::Region, ty::DebruijnIndex) -> ty::Region, { @@ -371,7 +340,7 @@ fn fold_regions_in<'tcx, T, F>(tcx: &TyCtxt<'tcx>, }) } -impl<'a,'tcx> InferCtxtExt for InferCtxt<'a,'tcx> { +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { fn tainted_regions(&self, snapshot: &CombinedSnapshot, r: ty::Region) -> Vec { self.region_vars.tainted(&snapshot.region_vars_snapshot, r) } @@ -452,163 +421,170 @@ impl<'a,'tcx> InferCtxtExt for InferCtxt<'a,'tcx> { region_vars } -} -pub fn skolemize_late_bound_regions<'a,'tcx,T>(infcx: &InferCtxt<'a,'tcx>, - binder: &ty::Binder, - snapshot: &CombinedSnapshot) - -> (T, SkolemizationMap) - where T : TypeFoldable<'tcx> -{ - /*! - * Replace all regions bound by `binder` with skolemized regions and - * return a map indicating which bound-region was replaced with what - * skolemized region. This is the first step of checking subtyping - * when higher-ranked things are involved. See `README.md` for more - * details. - */ - - let (result, map) = infcx.tcx.replace_late_bound_regions(binder, |br| { - infcx.region_vars.new_skolemized(br, &snapshot.region_vars_snapshot) - }); - - debug!("skolemize_bound_regions(binder={:?}, result={:?}, map={:?})", - binder, - result, - map); - - (result, map) -} + pub fn skolemize_late_bound_regions(&self, + binder: &ty::Binder, + snapshot: &CombinedSnapshot) + -> (T, SkolemizationMap) + where T : TypeFoldable<'tcx> + { + /*! + * Replace all regions bound by `binder` with skolemized regions and + * return a map indicating which bound-region was replaced with what + * skolemized region. This is the first step of checking subtyping + * when higher-ranked things are involved. See `README.md` for more + * details. + */ -pub fn leak_check<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, - skol_map: &SkolemizationMap, - snapshot: &CombinedSnapshot) - -> Result<(),(ty::BoundRegion,ty::Region)> -{ - /*! - * Searches the region constriants created since `snapshot` was started - * and checks to determine whether any of the skolemized regions created - * in `skol_map` would "escape" -- meaning that they are related to - * other regions in some way. If so, the higher-ranked subtyping doesn't - * hold. See `README.md` for more details. - */ - - debug!("leak_check: skol_map={:?}", - skol_map); - - let new_vars = infcx.region_vars_confined_to_snapshot(snapshot); - for (&skol_br, &skol) in skol_map { - let tainted = infcx.tainted_regions(snapshot, skol); - for &tainted_region in &tainted { - // Each skolemized should only be relatable to itself - // or new variables: - match tainted_region { - ty::ReVar(vid) => { - if new_vars.iter().any(|&x| x == vid) { continue; } - } - _ => { - if tainted_region == skol { continue; } - } - }; + let (result, map) = self.tcx.replace_late_bound_regions(binder, |br| { + self.region_vars.new_skolemized(br, &snapshot.region_vars_snapshot) + }); - debug!("{:?} (which replaced {:?}) is tainted by {:?}", - skol, - skol_br, - tainted_region); + debug!("skolemize_bound_regions(binder={:?}, result={:?}, map={:?})", + binder, + result, + map); - // A is not as polymorphic as B: - return Err((skol_br, tainted_region)); - } + (result, map) } - Ok(()) -} -/// This code converts from skolemized regions back to late-bound -/// regions. It works by replacing each region in the taint set of a -/// skolemized region with a bound-region. The bound region will be bound -/// by the outer-most binder in `value`; the caller must ensure that there is -/// such a binder and it is the right place. -/// -/// This routine is only intended to be used when the leak-check has -/// passed; currently, it's used in the trait matching code to create -/// a set of nested obligations frmo an impl that matches against -/// something higher-ranked. More details can be found in -/// `librustc/middle/traits/README.md`. -/// -/// As a brief example, consider the obligation `for<'a> Fn(&'a int) -/// -> &'a int`, and the impl: -/// -/// impl Fn for SomethingOrOther -/// where A : Clone -/// { ... } -/// -/// Here we will have replaced `'a` with a skolemized region -/// `'0`. This means that our substitution will be `{A=>&'0 -/// int, R=>&'0 int}`. -/// -/// When we apply the substitution to the bounds, we will wind up with -/// `&'0 int : Clone` as a predicate. As a last step, we then go and -/// replace `'0` with a late-bound region `'a`. The depth is matched -/// to the depth of the predicate, in this case 1, so that the final -/// predicate is `for<'a> &'a int : Clone`. -pub fn plug_leaks<'a,'tcx,T>(infcx: &InferCtxt<'a,'tcx>, - skol_map: SkolemizationMap, - snapshot: &CombinedSnapshot, - value: &T) - -> T - where T : TypeFoldable<'tcx> -{ - debug_assert!(leak_check(infcx, &skol_map, snapshot).is_ok()); - - debug!("plug_leaks(skol_map={:?}, value={:?})", - skol_map, - value); - - // Compute a mapping from the "taint set" of each skolemized - // region back to the `ty::BoundRegion` that it originally - // represented. Because `leak_check` passed, we know that - // these taint sets are mutually disjoint. - let inv_skol_map: FnvHashMap = - skol_map - .into_iter() - .flat_map(|(skol_br, skol)| { - infcx.tainted_regions(snapshot, skol) - .into_iter() - .map(move |tainted_region| (tainted_region, skol_br)) - }) - .collect(); - - debug!("plug_leaks: inv_skol_map={:?}", - inv_skol_map); - - // Remove any instantiated type variables from `value`; those can hide - // references to regions from the `fold_regions` code below. - let value = infcx.resolve_type_vars_if_possible(value); - - // Map any skolemization byproducts back to a late-bound - // region. Put that late-bound region at whatever the outermost - // binder is that we encountered in `value`. The caller is - // responsible for ensuring that (a) `value` contains at least one - // binder and (b) that binder is the one we want to use. - let result = infcx.tcx.fold_regions(&value, &mut false, |r, current_depth| { - match inv_skol_map.get(&r) { - None => r, - Some(br) => { - // It is the responsibility of the caller to ensure - // that each skolemized region appears within a - // binder. In practice, this routine is only used by - // trait checking, and all of the skolemized regions - // appear inside predicates, which always have - // binders, so this assert is satisfied. - assert!(current_depth > 1); - - ty::ReLateBound(ty::DebruijnIndex::new(current_depth - 1), br.clone()) + pub fn leak_check(&self, + overly_polymorphic: bool, + skol_map: &SkolemizationMap, + snapshot: &CombinedSnapshot) + -> RelateResult<'tcx, ()> + { + /*! + * Searches the region constriants created since `snapshot` was started + * and checks to determine whether any of the skolemized regions created + * in `skol_map` would "escape" -- meaning that they are related to + * other regions in some way. If so, the higher-ranked subtyping doesn't + * hold. See `README.md` for more details. + */ + + debug!("leak_check: skol_map={:?}", + skol_map); + + let new_vars = self.region_vars_confined_to_snapshot(snapshot); + for (&skol_br, &skol) in skol_map { + let tainted = self.tainted_regions(snapshot, skol); + for &tainted_region in &tainted { + // Each skolemized should only be relatable to itself + // or new variables: + match tainted_region { + ty::ReVar(vid) => { + if new_vars.iter().any(|&x| x == vid) { continue; } + } + _ => { + if tainted_region == skol { continue; } + } + }; + + debug!("{:?} (which replaced {:?}) is tainted by {:?}", + skol, + skol_br, + tainted_region); + + if overly_polymorphic { + debug!("Overly polymorphic!"); + return Err(TypeError::RegionsOverlyPolymorphic(skol_br, + tainted_region)); + } else { + debug!("Not as polymorphic!"); + return Err(TypeError::RegionsInsufficientlyPolymorphic(skol_br, + tainted_region)); + } } } - }); + Ok(()) + } + + /// This code converts from skolemized regions back to late-bound + /// regions. It works by replacing each region in the taint set of a + /// skolemized region with a bound-region. The bound region will be bound + /// by the outer-most binder in `value`; the caller must ensure that there is + /// such a binder and it is the right place. + /// + /// This routine is only intended to be used when the leak-check has + /// passed; currently, it's used in the trait matching code to create + /// a set of nested obligations frmo an impl that matches against + /// something higher-ranked. More details can be found in + /// `librustc/middle/traits/README.md`. + /// + /// As a brief example, consider the obligation `for<'a> Fn(&'a int) + /// -> &'a int`, and the impl: + /// + /// impl Fn for SomethingOrOther + /// where A : Clone + /// { ... } + /// + /// Here we will have replaced `'a` with a skolemized region + /// `'0`. This means that our substitution will be `{A=>&'0 + /// int, R=>&'0 int}`. + /// + /// When we apply the substitution to the bounds, we will wind up with + /// `&'0 int : Clone` as a predicate. As a last step, we then go and + /// replace `'0` with a late-bound region `'a`. The depth is matched + /// to the depth of the predicate, in this case 1, so that the final + /// predicate is `for<'a> &'a int : Clone`. + pub fn plug_leaks(&self, + skol_map: SkolemizationMap, + snapshot: &CombinedSnapshot, + value: &T) -> T + where T : TypeFoldable<'tcx> + { + debug_assert!(self.leak_check(false, &skol_map, snapshot).is_ok()); + + debug!("plug_leaks(skol_map={:?}, value={:?})", + skol_map, + value); + + // Compute a mapping from the "taint set" of each skolemized + // region back to the `ty::BoundRegion` that it originally + // represented. Because `leak_check` passed, we know that + // these taint sets are mutually disjoint. + let inv_skol_map: FnvHashMap = + skol_map + .into_iter() + .flat_map(|(skol_br, skol)| { + self.tainted_regions(snapshot, skol) + .into_iter() + .map(move |tainted_region| (tainted_region, skol_br)) + }) + .collect(); + + debug!("plug_leaks: inv_skol_map={:?}", + inv_skol_map); + + // Remove any instantiated type variables from `value`; those can hide + // references to regions from the `fold_regions` code below. + let value = self.resolve_type_vars_if_possible(value); + + // Map any skolemization byproducts back to a late-bound + // region. Put that late-bound region at whatever the outermost + // binder is that we encountered in `value`. The caller is + // responsible for ensuring that (a) `value` contains at least one + // binder and (b) that binder is the one we want to use. + let result = self.tcx.fold_regions(&value, &mut false, |r, current_depth| { + match inv_skol_map.get(&r) { + None => r, + Some(br) => { + // It is the responsibility of the caller to ensure + // that each skolemized region appears within a + // binder. In practice, this routine is only used by + // trait checking, and all of the skolemized regions + // appear inside predicates, which always have + // binders, so this assert is satisfied. + assert!(current_depth > 1); + + ty::ReLateBound(ty::DebruijnIndex::new(current_depth - 1), br.clone()) + } + } + }); - debug!("plug_leaks: result={:?}", - result); + debug!("plug_leaks: result={:?}", + result); - result + result + } } diff --git a/src/librustc/infer/lattice.rs b/src/librustc/infer/lattice.rs index 17ad32e742..1a2bc4b5cf 100644 --- a/src/librustc/infer/lattice.rs +++ b/src/librustc/infer/lattice.rs @@ -29,26 +29,25 @@ //! over a `LatticeValue`, which is a value defined with respect to //! a lattice. -use super::combine; use super::InferCtxt; use ty::TyVar; use ty::{self, Ty}; use ty::relate::{RelateResult, TypeRelation}; -pub trait LatticeDir<'f,'tcx> : TypeRelation<'f,'tcx> { - fn infcx(&self) -> &'f InferCtxt<'f, 'tcx>; +pub trait LatticeDir<'f, 'gcx: 'f+'tcx, 'tcx: 'f> : TypeRelation<'f, 'gcx, 'tcx> { + fn infcx(&self) -> &'f InferCtxt<'f, 'gcx, 'tcx>; // Relates the type `v` to `a` and `b` such that `v` represents // the LUB/GLB of `a` and `b` as appropriate. fn relate_bound(&self, v: Ty<'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, ()>; } -pub fn super_lattice_tys<'a,'tcx,L:LatticeDir<'a,'tcx>>(this: &mut L, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where 'tcx: 'a +pub fn super_lattice_tys<'a, 'gcx, 'tcx, L>(this: &mut L, + a: Ty<'tcx>, + b: Ty<'tcx>) + -> RelateResult<'tcx, Ty<'tcx>> + where L: LatticeDir<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { debug!("{}.lattice_tys({:?}, {:?})", this.tag(), @@ -78,7 +77,7 @@ pub fn super_lattice_tys<'a,'tcx,L:LatticeDir<'a,'tcx>>(this: &mut L, } _ => { - combine::super_combine_tys(this.infcx(), this, a, b) + infcx.super_combine_tys(this, a, b) } } } diff --git a/src/librustc/infer/lub.rs b/src/librustc/infer/lub.rs index 32b2fe911e..bd46f3a26a 100644 --- a/src/librustc/infer/lub.rs +++ b/src/librustc/infer/lub.rs @@ -9,7 +9,6 @@ // except according to those terms. use super::combine::CombineFields; -use super::higher_ranked::HigherRankedRelations; use super::InferCtxt; use super::lattice::{self, LatticeDir}; use super::Subtype; @@ -19,12 +18,12 @@ use ty::relate::{Relate, RelateResult, TypeRelation}; use traits::PredicateObligations; /// "Least upper bound" (common supertype) -pub struct Lub<'a, 'tcx: 'a> { - fields: CombineFields<'a, 'tcx> +pub struct Lub<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fields: CombineFields<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> Lub<'a, 'tcx> { - pub fn new(fields: CombineFields<'a, 'tcx>) -> Lub<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Lub<'a, 'gcx, 'tcx> { + pub fn new(fields: CombineFields<'a, 'gcx, 'tcx>) -> Lub<'a, 'gcx, 'tcx> { Lub { fields: fields } } @@ -33,18 +32,18 @@ impl<'a, 'tcx> Lub<'a, 'tcx> { } } -impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Lub<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Lub<'a, 'gcx, 'tcx> { fn tag(&self) -> &'static str { "Lub" } - fn tcx(&self) -> &'a TyCtxt<'tcx> { self.fields.tcx() } + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.fields.tcx() } fn a_is_expected(&self) -> bool { self.fields.a_is_expected } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> + fn relate_with_variance>(&mut self, + variance: ty::Variance, + a: &T, + b: &T) + -> RelateResult<'tcx, T> { match variance { ty::Invariant => self.fields.equate().relate(a, b), @@ -70,14 +69,14 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Lub<'a, 'tcx> { fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) -> RelateResult<'tcx, ty::Binder> - where T: Relate<'a, 'tcx> + where T: Relate<'tcx> { self.fields.higher_ranked_lub(a, b) } } -impl<'a, 'tcx> LatticeDir<'a,'tcx> for Lub<'a, 'tcx> { - fn infcx(&self) -> &'a InferCtxt<'a,'tcx> { +impl<'a, 'gcx, 'tcx> LatticeDir<'a, 'gcx, 'tcx> for Lub<'a, 'gcx, 'tcx> { + fn infcx(&self) -> &'a InferCtxt<'a, 'gcx, 'tcx> { self.fields.infcx } diff --git a/src/librustc/infer/mod.rs b/src/librustc/infer/mod.rs index 4c1216aa86..41982ddc78 100644 --- a/src/librustc/infer/mod.rs +++ b/src/librustc/infer/mod.rs @@ -24,6 +24,7 @@ use middle::free_region::FreeRegionMap; use middle::mem_categorization as mc; use middle::mem_categorization::McResult; use middle::region::CodeExtent; +use mir::tcx::LvalueTy; use ty::subst; use ty::subst::Substs; use ty::subst::Subst; @@ -31,11 +32,11 @@ use ty::adjustment; use ty::{TyVid, IntVid, FloatVid}; use ty::{self, Ty, TyCtxt}; use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric}; -use ty::fold::{TypeFolder, TypeFoldable}; +use ty::fold::TypeFoldable; use ty::relate::{Relate, RelateResult, TypeRelation}; use traits::{self, PredicateObligations, ProjectionMode}; use rustc_data_structures::unify::{self, UnificationTable}; -use std::cell::{RefCell, Ref}; +use std::cell::{Cell, RefCell, Ref, RefMut}; use std::fmt; use syntax::ast; use syntax::codemap; @@ -45,7 +46,6 @@ use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap}; use self::combine::CombineFields; use self::region_inference::{RegionVarBindings, RegionSnapshot}; -use self::error_reporting::ErrorReporting; use self::unify_key::ToType; pub mod bivariate; @@ -73,10 +73,36 @@ pub type Bound = Option; pub type UnitResult<'tcx> = RelateResult<'tcx, ()>; // "unify result" pub type FixupResult = Result; // "fixup result" -pub struct InferCtxt<'a, 'tcx: 'a> { - pub tcx: &'a TyCtxt<'tcx>, +/// A version of &ty::Tables which can be global or local. +/// Only the local version supports borrow_mut. +#[derive(Copy, Clone)] +pub enum InferTables<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + Global(&'a RefCell>), + Local(&'a RefCell>) +} + +impl<'a, 'gcx, 'tcx> InferTables<'a, 'gcx, 'tcx> { + pub fn borrow(self) -> Ref<'a, ty::Tables<'tcx>> { + match self { + InferTables::Global(tables) => tables.borrow(), + InferTables::Local(tables) => tables.borrow() + } + } + + pub fn borrow_mut(self) -> RefMut<'a, ty::Tables<'tcx>> { + match self { + InferTables::Global(_) => { + bug!("InferTables: infcx.tables.borrow_mut() outside of type-checking"); + } + InferTables::Local(tables) => tables.borrow_mut() + } + } +} + +pub struct InferCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + pub tcx: TyCtxt<'a, 'gcx, 'tcx>, - pub tables: &'a RefCell>, + pub tables: InferTables<'a, 'gcx, 'tcx>, // We instantiate UnificationTable with bounds because the // types that might instantiate a general type variable have an @@ -90,9 +116,16 @@ pub struct InferCtxt<'a, 'tcx: 'a> { float_unification_table: RefCell>, // For region variables. - region_vars: RegionVarBindings<'a, 'tcx>, + region_vars: RegionVarBindings<'a, 'gcx, 'tcx>, + + pub parameter_environment: ty::ParameterEnvironment<'gcx>, + + /// Caches the results of trait selection. This cache is used + /// for things that have to do with the parameters in scope. + pub selection_cache: traits::SelectionCache<'tcx>, - pub parameter_environment: ty::ParameterEnvironment<'a, 'tcx>, + /// Caches the results of trait evaluation. + pub evaluation_cache: traits::EvaluationCache<'tcx>, // the set of predicates on which errors have been reported, to // avoid reporting the same error twice. @@ -110,12 +143,31 @@ pub struct InferCtxt<'a, 'tcx: 'a> { // documentation for `ProjectionMode`. projection_mode: ProjectionMode, + // When an error occurs, we want to avoid reporting "derived" + // errors that are due to this original failure. Normally, we + // handle this with the `err_count_on_creation` count, which + // basically just tracks how many errors were reported when we + // started type-checking a fn and checks to see if any new errors + // have been reported since then. Not great, but it works. + // + // However, when errors originated in other passes -- notably + // resolve -- this heuristic breaks down. Therefore, we have this + // auxiliary flag that one can set whenever one creates a + // type-error that is due to an error in a prior pass. + // + // Don't read this flag directly, call `is_tainted_by_errors()` + // and `set_tainted_by_errors()`. + tainted_by_errors_flag: Cell, + + // Track how many errors were reported when this infcx is created. + // If the number of errors increases, that's also a sign (line + // `tained_by_errors`) to avoid reporting certain kinds of errors. err_count_on_creation: usize, } /// A map returned by `skolemize_late_bound_regions()` indicating the skolemized /// region that each late-bound region was replaced with. -pub type SkolemizationMap = FnvHashMap; +pub type SkolemizationMap = FnvHashMap; /// Why did we require that the two types be related? /// @@ -347,148 +399,140 @@ pub enum FixupError { UnresolvedTy(TyVid) } -pub fn fixup_err_to_string(f: FixupError) -> String { - use self::FixupError::*; - - match f { - UnresolvedIntTy(_) => { - "cannot determine the type of this integer; add a suffix to \ - specify the type explicitly".to_string() - } - UnresolvedFloatTy(_) => { - "cannot determine the type of this number; add a suffix to specify \ - the type explicitly".to_string() - } - UnresolvedTy(_) => "unconstrained type".to_string(), - } -} +impl fmt::Display for FixupError { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + use self::FixupError::*; -pub fn new_infer_ctxt<'a, 'tcx>(tcx: &'a TyCtxt<'tcx>, - tables: &'a RefCell>, - param_env: Option>, - projection_mode: ProjectionMode) - -> InferCtxt<'a, 'tcx> { - InferCtxt { - tcx: tcx, - tables: tables, - type_variables: RefCell::new(type_variable::TypeVariableTable::new()), - int_unification_table: RefCell::new(UnificationTable::new()), - float_unification_table: RefCell::new(UnificationTable::new()), - region_vars: RegionVarBindings::new(tcx), - parameter_environment: param_env.unwrap_or(tcx.empty_parameter_environment()), - reported_trait_errors: RefCell::new(FnvHashSet()), - normalize: false, - projection_mode: projection_mode, - err_count_on_creation: tcx.sess.err_count() + match *self { + UnresolvedIntTy(_) => { + write!(f, "cannot determine the type of this integer; \ + add a suffix to specify the type explicitly") + } + UnresolvedFloatTy(_) => { + write!(f, "cannot determine the type of this number; \ + add a suffix to specify the type explicitly") + } + UnresolvedTy(_) => write!(f, "unconstrained type") + } } } -pub fn normalizing_infer_ctxt<'a, 'tcx>(tcx: &'a TyCtxt<'tcx>, - tables: &'a RefCell>, - projection_mode: ProjectionMode) - -> InferCtxt<'a, 'tcx> { - let mut infcx = new_infer_ctxt(tcx, tables, None, projection_mode); - infcx.normalize = true; - infcx -} - -pub fn mk_subty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, - a_is_expected: bool, - origin: TypeOrigin, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> InferResult<'tcx, ()> -{ - debug!("mk_subty({:?} <: {:?})", a, b); - cx.sub_types(a_is_expected, origin, a, b) -} - -pub fn can_mk_subty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) - -> UnitResult<'tcx> -{ - debug!("can_mk_subty({:?} <: {:?})", a, b); - cx.probe(|_| { - let trace = TypeTrace { - origin: TypeOrigin::Misc(codemap::DUMMY_SP), - values: Types(expected_found(true, a, b)) - }; - cx.sub(true, trace, &a, &b).map(|_| ()) - }) -} - -pub fn can_mk_eqty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, a: Ty<'tcx>, b: Ty<'tcx>) - -> UnitResult<'tcx> -{ - cx.can_equate(&a, &b) +/// Helper type of a temporary returned by tcx.infer_ctxt(...). +/// Necessary because we can't write the following bound: +/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>). +pub struct InferCtxtBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + global_tcx: TyCtxt<'a, 'gcx, 'gcx>, + arenas: ty::CtxtArenas<'tcx>, + tables: Option>>, + param_env: Option>, + projection_mode: ProjectionMode, + normalize: bool } -pub fn mk_subr<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, - origin: SubregionOrigin<'tcx>, - a: ty::Region, - b: ty::Region) { - debug!("mk_subr({:?} <: {:?})", a, b); - let snapshot = cx.region_vars.start_snapshot(); - cx.region_vars.make_subregion(origin, a, b); - cx.region_vars.commit(snapshot); -} +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'gcx> { + pub fn infer_ctxt(self, + tables: Option>, + param_env: Option>, + projection_mode: ProjectionMode) + -> InferCtxtBuilder<'a, 'gcx, 'tcx> { + InferCtxtBuilder { + global_tcx: self, + arenas: ty::CtxtArenas::new(), + tables: tables.map(RefCell::new), + param_env: param_env, + projection_mode: projection_mode, + normalize: false + } + } -pub fn mk_eqty<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, - a_is_expected: bool, - origin: TypeOrigin, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> InferResult<'tcx, ()> -{ - debug!("mk_eqty({:?} <: {:?})", a, b); - cx.eq_types(a_is_expected, origin, a, b) -} + pub fn normalizing_infer_ctxt(self, projection_mode: ProjectionMode) + -> InferCtxtBuilder<'a, 'gcx, 'tcx> { + InferCtxtBuilder { + global_tcx: self, + arenas: ty::CtxtArenas::new(), + tables: None, + param_env: None, + projection_mode: projection_mode, + normalize: false + } + } -pub fn mk_eq_trait_refs<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, - a_is_expected: bool, - origin: TypeOrigin, - a: ty::TraitRef<'tcx>, - b: ty::TraitRef<'tcx>) - -> InferResult<'tcx, ()> -{ - debug!("mk_eq_trait_refs({:?} = {:?})", a, b); - cx.eq_trait_refs(a_is_expected, origin, a, b) + /// Fake InferCtxt with the global tcx. Used by pre-MIR borrowck + /// for MemCategorizationContext/ExprUseVisitor. + /// If any inference functionality is used, ICEs will occur. + pub fn borrowck_fake_infer_ctxt(self, param_env: ty::ParameterEnvironment<'gcx>) + -> InferCtxt<'a, 'gcx, 'gcx> { + InferCtxt { + tcx: self, + tables: InferTables::Global(&self.tables), + type_variables: RefCell::new(type_variable::TypeVariableTable::new()), + int_unification_table: RefCell::new(UnificationTable::new()), + float_unification_table: RefCell::new(UnificationTable::new()), + region_vars: RegionVarBindings::new(self), + parameter_environment: param_env, + selection_cache: traits::SelectionCache::new(), + evaluation_cache: traits::EvaluationCache::new(), + reported_trait_errors: RefCell::new(FnvHashSet()), + normalize: false, + projection_mode: ProjectionMode::AnyFinal, + tainted_by_errors_flag: Cell::new(false), + err_count_on_creation: self.sess.err_count() + } + } } -pub fn mk_sub_poly_trait_refs<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, - a_is_expected: bool, - origin: TypeOrigin, - a: ty::PolyTraitRef<'tcx>, - b: ty::PolyTraitRef<'tcx>) - -> InferResult<'tcx, ()> -{ - debug!("mk_sub_poly_trait_refs({:?} <: {:?})", a, b); - cx.sub_poly_trait_refs(a_is_expected, origin, a, b) +impl<'a, 'gcx, 'tcx> InferCtxtBuilder<'a, 'gcx, 'tcx> { + pub fn enter(&'tcx mut self, f: F) -> R + where F: for<'b> FnOnce(InferCtxt<'b, 'gcx, 'tcx>) -> R + { + let InferCtxtBuilder { + global_tcx, + ref arenas, + ref tables, + ref mut param_env, + projection_mode, + normalize + } = *self; + let tables = if let Some(ref tables) = *tables { + InferTables::Local(tables) + } else { + InferTables::Global(&global_tcx.tables) + }; + let param_env = param_env.take().unwrap_or_else(|| { + global_tcx.empty_parameter_environment() + }); + global_tcx.enter_local(arenas, |tcx| f(InferCtxt { + tcx: tcx, + tables: tables, + type_variables: RefCell::new(type_variable::TypeVariableTable::new()), + int_unification_table: RefCell::new(UnificationTable::new()), + float_unification_table: RefCell::new(UnificationTable::new()), + region_vars: RegionVarBindings::new(tcx), + parameter_environment: param_env, + selection_cache: traits::SelectionCache::new(), + evaluation_cache: traits::EvaluationCache::new(), + reported_trait_errors: RefCell::new(FnvHashSet()), + normalize: normalize, + projection_mode: projection_mode, + tainted_by_errors_flag: Cell::new(false), + err_count_on_creation: tcx.sess.err_count() + })) + } } -pub fn mk_eq_impl_headers<'a, 'tcx>(cx: &InferCtxt<'a, 'tcx>, - a_is_expected: bool, - origin: TypeOrigin, - a: &ty::ImplHeader<'tcx>, - b: &ty::ImplHeader<'tcx>) - -> InferResult<'tcx, ()> -{ - debug!("mk_eq_impl_header({:?} = {:?})", a, b); - match (a.trait_ref, b.trait_ref) { - (Some(a_ref), Some(b_ref)) => mk_eq_trait_refs(cx, a_is_expected, origin, a_ref, b_ref), - (None, None) => mk_eqty(cx, a_is_expected, origin, a.self_ty, b.self_ty), - _ => bug!("mk_eq_impl_headers given mismatched impl kinds"), +impl ExpectedFound { + fn new(a_is_expected: bool, a: T, b: T) -> Self { + if a_is_expected { + ExpectedFound {expected: a, found: b} + } else { + ExpectedFound {expected: b, found: a} + } } } -fn expected_found(a_is_expected: bool, - a: T, - b: T) - -> ExpectedFound -{ - if a_is_expected { - ExpectedFound {expected: a, found: b} - } else { - ExpectedFound {expected: b, found: a} +impl<'tcx, T> InferOk<'tcx, T> { + fn unit(self) -> InferOk<'tcx, ()> { + InferOk { value: (), obligations: self.obligations } } } @@ -500,92 +544,138 @@ pub struct CombinedSnapshot { region_vars_snapshot: RegionSnapshot, } -// NOTE: Callable from trans only! -pub fn normalize_associated_type<'tcx,T>(tcx: &TyCtxt<'tcx>, value: &T) -> T - where T : TypeFoldable<'tcx> -{ - debug!("normalize_associated_type(t={:?})", value); +/// Helper trait for shortening the lifetimes inside a +/// value for post-type-checking normalization. +pub trait TransNormalize<'gcx>: TypeFoldable<'gcx> { + fn trans_normalize<'a, 'tcx>(&self, infcx: &InferCtxt<'a, 'gcx, 'tcx>) -> Self; +} - let value = tcx.erase_regions(value); +macro_rules! items { ($($item:item)+) => ($($item)+) } +macro_rules! impl_trans_normalize { + ($lt_gcx:tt, $($ty:ty),+) => { + items!($(impl<$lt_gcx> TransNormalize<$lt_gcx> for $ty { + fn trans_normalize<'a, 'tcx>(&self, + infcx: &InferCtxt<'a, $lt_gcx, 'tcx>) + -> Self { + infcx.normalize_projections_in(self) + } + })+); + } +} - if !value.has_projection_types() { - return value; +impl_trans_normalize!('gcx, + Ty<'gcx>, + &'gcx Substs<'gcx>, + ty::FnSig<'gcx>, + ty::FnOutput<'gcx>, + &'gcx ty::BareFnTy<'gcx>, + ty::ClosureSubsts<'gcx>, + ty::PolyTraitRef<'gcx> +); + +impl<'gcx> TransNormalize<'gcx> for LvalueTy<'gcx> { + fn trans_normalize<'a, 'tcx>(&self, infcx: &InferCtxt<'a, 'gcx, 'tcx>) -> Self { + match *self { + LvalueTy::Ty { ty } => LvalueTy::Ty { ty: ty.trans_normalize(infcx) }, + LvalueTy::Downcast { adt_def, substs, variant_index } => { + LvalueTy::Downcast { + adt_def: adt_def, + substs: substs.trans_normalize(infcx), + variant_index: variant_index + } + } + } } +} - let infcx = new_infer_ctxt(tcx, &tcx.tables, None, ProjectionMode::Any); - let mut selcx = traits::SelectionContext::new(&infcx); - let cause = traits::ObligationCause::dummy(); - let traits::Normalized { value: result, obligations } = - traits::normalize(&mut selcx, cause, &value); +// NOTE: Callable from trans only! +impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { + pub fn normalize_associated_type(self, value: &T) -> T + where T: TransNormalize<'tcx> + { + debug!("normalize_associated_type(t={:?})", value); - debug!("normalize_associated_type: result={:?} obligations={:?}", - result, - obligations); + let value = self.erase_regions(value); - let mut fulfill_cx = traits::FulfillmentContext::new(); + if !value.has_projection_types() { + return value; + } - for obligation in obligations { - fulfill_cx.register_predicate_obligation(&infcx, obligation); + self.infer_ctxt(None, None, ProjectionMode::Any).enter(|infcx| { + value.trans_normalize(&infcx) + }) } - - drain_fulfillment_cx_or_panic(DUMMY_SP, &infcx, &mut fulfill_cx, &result) } -pub fn drain_fulfillment_cx_or_panic<'a,'tcx,T>(span: Span, - infcx: &InferCtxt<'a,'tcx>, - fulfill_cx: &mut traits::FulfillmentContext<'tcx>, - result: &T) - -> T - where T : TypeFoldable<'tcx> -{ - match drain_fulfillment_cx(infcx, fulfill_cx, result) { - Ok(v) => v, - Err(errors) => { - span_bug!( - span, - "Encountered errors `{:?}` fulfilling during trans", - errors); +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { + fn normalize_projections_in(&self, value: &T) -> T::Lifted + where T: TypeFoldable<'tcx> + ty::Lift<'gcx> + { + let mut selcx = traits::SelectionContext::new(self); + let cause = traits::ObligationCause::dummy(); + let traits::Normalized { value: result, obligations } = + traits::normalize(&mut selcx, cause, value); + + debug!("normalize_projections_in: result={:?} obligations={:?}", + result, obligations); + + let mut fulfill_cx = traits::FulfillmentContext::new(); + + for obligation in obligations { + fulfill_cx.register_predicate_obligation(self, obligation); } + + self.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &result) } -} -/// Finishes processes any obligations that remain in the fulfillment -/// context, and then "freshens" and returns `result`. This is -/// primarily used during normalization and other cases where -/// processing the obligations in `fulfill_cx` may cause type -/// inference variables that appear in `result` to be unified, and -/// hence we need to process those obligations to get the complete -/// picture of the type. -pub fn drain_fulfillment_cx<'a,'tcx,T>(infcx: &InferCtxt<'a,'tcx>, - fulfill_cx: &mut traits::FulfillmentContext<'tcx>, - result: &T) - -> Result>> - where T : TypeFoldable<'tcx> -{ - debug!("drain_fulfillment_cx(result={:?})", - result); - - // In principle, we only need to do this so long as `result` - // contains unbound type parameters. It could be a slight - // optimization to stop iterating early. - match fulfill_cx.select_all_or_error(infcx) { - Ok(()) => { } - Err(errors) => { - return Err(errors); + pub fn drain_fulfillment_cx_or_panic(&self, + span: Span, + fulfill_cx: &mut traits::FulfillmentContext<'tcx>, + result: &T) + -> T::Lifted + where T: TypeFoldable<'tcx> + ty::Lift<'gcx> + { + let when = "resolving bounds after type-checking"; + let v = match self.drain_fulfillment_cx(fulfill_cx, result) { + Ok(v) => v, + Err(errors) => { + span_bug!(span, "Encountered errors `{:?}` {}", errors, when); + } + }; + + match self.tcx.lift_to_global(&v) { + Some(v) => v, + None => { + span_bug!(span, "Uninferred types/regions in `{:?}` {}", v, when); + } } } - let result = infcx.resolve_type_vars_if_possible(result); - Ok(infcx.tcx.erase_regions(&result)) -} + /// Finishes processes any obligations that remain in the fulfillment + /// context, and then "freshens" and returns `result`. This is + /// primarily used during normalization and other cases where + /// processing the obligations in `fulfill_cx` may cause type + /// inference variables that appear in `result` to be unified, and + /// hence we need to process those obligations to get the complete + /// picture of the type. + pub fn drain_fulfillment_cx(&self, + fulfill_cx: &mut traits::FulfillmentContext<'tcx>, + result: &T) + -> Result>> + where T : TypeFoldable<'tcx> + { + debug!("drain_fulfillment_cx(result={:?})", + result); -impl<'tcx, T> InferOk<'tcx, T> { - fn unit(self) -> InferOk<'tcx, ()> { - InferOk { value: (), obligations: self.obligations } + // In principle, we only need to do this so long as `result` + // contains unbound type parameters. It could be a slight + // optimization to stop iterating early. + fulfill_cx.select_all_or_error(self)?; + + let result = self.resolve_type_vars_if_possible(result); + Ok(self.tcx.erase_regions(&result)) } -} -impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn projection_mode(&self) -> ProjectionMode { self.projection_mode } @@ -601,7 +691,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } } - pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'tcx> { + pub fn freshener<'b>(&'b self) -> TypeFreshener<'b, 'gcx, 'tcx> { freshen::TypeFreshener::new(self) } @@ -672,8 +762,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { } fn combine_fields(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>) - -> CombineFields<'a, 'tcx> - { + -> CombineFields<'a, 'gcx, 'tcx> { CombineFields { infcx: self, a_is_expected: a_is_expected, @@ -685,7 +774,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn equate(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T) -> InferResult<'tcx, T> - where T: Relate<'a, 'tcx> + where T: Relate<'tcx> { let mut equate = self.combine_fields(a_is_expected, trace).equate(); let result = equate.relate(a, b); @@ -694,7 +783,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn sub(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T) -> InferResult<'tcx, T> - where T: Relate<'a, 'tcx> + where T: Relate<'tcx> { let mut sub = self.combine_fields(a_is_expected, trace).sub(); let result = sub.relate(a, b); @@ -703,7 +792,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn lub(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T) -> InferResult<'tcx, T> - where T: Relate<'a, 'tcx> + where T: Relate<'tcx> { let mut lub = self.combine_fields(a_is_expected, trace).lub(); let result = lub.relate(a, b); @@ -712,7 +801,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn glb(&'a self, a_is_expected: bool, trace: TypeTrace<'tcx>, a: &T, b: &T) -> InferResult<'tcx, T> - where T: Relate<'a, 'tcx> + where T: Relate<'tcx> { let mut glb = self.combine_fields(a_is_expected, trace).glb(); let result = glb.relate(a, b); @@ -794,6 +883,17 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { r } + // Execute `f` in a snapshot, and commit the bindings it creates + pub fn in_snapshot(&self, f: F) -> T where + F: FnOnce(&CombinedSnapshot) -> T + { + debug!("in_snapshot()"); + let snapshot = self.start_snapshot(); + let r = f(&snapshot); + self.commit_from(snapshot); + r + } + /// Execute `f` and commit only the region bindings if successful. /// The function f must be very careful not to leak any non-region /// variables that get created. @@ -861,6 +961,18 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { }) } + pub fn can_sub_types(&self, + a: Ty<'tcx>, + b: Ty<'tcx>) + -> UnitResult<'tcx> + { + self.probe(|_| { + let origin = TypeOrigin::Misc(codemap::DUMMY_SP); + let trace = TypeTrace::types(origin, true, a, b); + self.sub(true, trace, &a, &b).map(|_| ()) + }) + } + pub fn eq_types(&self, a_is_expected: bool, origin: TypeOrigin, @@ -881,18 +993,31 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { b: ty::TraitRef<'tcx>) -> InferResult<'tcx, ()> { - debug!("eq_trait_refs({:?} <: {:?})", - a, - b); + debug!("eq_trait_refs({:?} = {:?})", a, b); self.commit_if_ok(|_| { let trace = TypeTrace { origin: origin, - values: TraitRefs(expected_found(a_is_expected, a.clone(), b.clone())) + values: TraitRefs(ExpectedFound::new(a_is_expected, a, b)) }; self.equate(a_is_expected, trace, &a, &b).map(|ok| ok.unit()) }) } + pub fn eq_impl_headers(&self, + a_is_expected: bool, + origin: TypeOrigin, + a: &ty::ImplHeader<'tcx>, + b: &ty::ImplHeader<'tcx>) + -> InferResult<'tcx, ()> + { + debug!("eq_impl_header({:?} = {:?})", a, b); + match (a.trait_ref, b.trait_ref) { + (Some(a_ref), Some(b_ref)) => self.eq_trait_refs(a_is_expected, origin, a_ref, b_ref), + (None, None) => self.eq_types(a_is_expected, origin, a.self_ty, b.self_ty), + _ => bug!("mk_eq_impl_headers given mismatched impl kinds"), + } + } + pub fn sub_poly_trait_refs(&self, a_is_expected: bool, origin: TypeOrigin, @@ -900,52 +1025,22 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { b: ty::PolyTraitRef<'tcx>) -> InferResult<'tcx, ()> { - debug!("sub_poly_trait_refs({:?} <: {:?})", - a, - b); + debug!("sub_poly_trait_refs({:?} <: {:?})", a, b); self.commit_if_ok(|_| { let trace = TypeTrace { origin: origin, - values: PolyTraitRefs(expected_found(a_is_expected, a.clone(), b.clone())) + values: PolyTraitRefs(ExpectedFound::new(a_is_expected, a, b)) }; self.sub(a_is_expected, trace, &a, &b).map(|ok| ok.unit()) }) } - pub fn skolemize_late_bound_regions(&self, - value: &ty::Binder, - snapshot: &CombinedSnapshot) - -> (T, SkolemizationMap) - where T : TypeFoldable<'tcx> - { - /*! See `higher_ranked::skolemize_late_bound_regions` */ - - higher_ranked::skolemize_late_bound_regions(self, value, snapshot) - } - - pub fn leak_check(&self, - skol_map: &SkolemizationMap, - snapshot: &CombinedSnapshot) - -> UnitResult<'tcx> - { - /*! See `higher_ranked::leak_check` */ - - match higher_ranked::leak_check(self, skol_map, snapshot) { - Ok(()) => Ok(()), - Err((br, r)) => Err(TypeError::RegionsInsufficientlyPolymorphic(br, r)) - } - } - - pub fn plug_leaks(&self, - skol_map: SkolemizationMap, - snapshot: &CombinedSnapshot, - value: &T) - -> T - where T : TypeFoldable<'tcx> - { - /*! See `higher_ranked::plug_leaks` */ - - higher_ranked::plug_leaks(self, skol_map, snapshot, value) + pub fn sub_regions(&self, + origin: SubregionOrigin<'tcx>, + a: ty::Region, + b: ty::Region) { + debug!("sub_regions({:?} <: {:?})", a, b); + self.region_vars.make_subregion(origin, a, b); } pub fn equality_predicate(&self, @@ -957,8 +1052,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let (ty::EquatePredicate(a, b), skol_map) = self.skolemize_late_bound_regions(predicate, snapshot); let origin = TypeOrigin::EquatePredicate(span); - let eqty_ok = mk_eqty(self, false, origin, a, b)?; - self.leak_check(&skol_map, snapshot).map(|_| eqty_ok.unit()) + let eqty_ok = self.eq_types(false, origin, a, b)?; + self.leak_check(false, &skol_map, snapshot).map(|_| eqty_ok.unit()) }) } @@ -971,8 +1066,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { let (ty::OutlivesPredicate(r_a, r_b), skol_map) = self.skolemize_late_bound_regions(predicate, snapshot); let origin = RelateRegionParamBound(span); - let () = mk_subr(self, origin, r_b, r_a); // `b : a` ==> `a <= b` - self.leak_check(&skol_map, snapshot) + self.sub_regions(origin, r_b, r_a); // `b : a` ==> `a <= b` + self.leak_check(false, &skol_map, snapshot) }) } @@ -1055,7 +1150,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { pub fn fresh_substs_for_generics(&self, span: Span, generics: &ty::Generics<'tcx>) - -> subst::Substs<'tcx> + -> &'tcx subst::Substs<'tcx> { let type_params = subst::VecPerParamSpace::empty(); @@ -1073,7 +1168,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { generics.types.get_slice(*space)); } - return substs; + self.tcx.mk_substs(substs) } /// Given a set of generics defined on a trait, returns a substitution mapping each output @@ -1128,15 +1223,36 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { .map(|method| resolve_ty(method.ty))) } - pub fn errors_since_creation(&self) -> bool { - self.tcx.sess.err_count() - self.err_count_on_creation != 0 + /// True if errors have been reported since this infcx was + /// created. This is sometimes used as a heuristic to skip + /// reporting errors that often occur as a result of earlier + /// errors, but where it's hard to be 100% sure (e.g., unresolved + /// inference variables, regionck errors). + pub fn is_tainted_by_errors(&self) -> bool { + debug!("is_tainted_by_errors(err_count={}, err_count_on_creation={}, \ + tainted_by_errors_flag={})", + self.tcx.sess.err_count(), + self.err_count_on_creation, + self.tainted_by_errors_flag.get()); + + if self.tcx.sess.err_count() > self.err_count_on_creation { + return true; // errors reported since this infcx was made + } + self.tainted_by_errors_flag.get() + } + + /// Set the "tainted by errors" flag to true. We call this when we + /// observe an error from a prior pass. + pub fn set_tainted_by_errors(&self) { + debug!("set_tainted_by_errors()"); + self.tainted_by_errors_flag.set(true) } pub fn node_type(&self, id: ast::NodeId) -> Ty<'tcx> { match self.tables.borrow().node_types.get(&id) { Some(&t) => t, // FIXME - None if self.errors_since_creation() => + None if self.is_tainted_by_errors() => self.tcx.types.err, None => { bug!("no type for node {}: {} in fcx", @@ -1158,7 +1274,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { free_regions: &FreeRegionMap, subject_node_id: ast::NodeId) { let errors = self.region_vars.resolve_regions(free_regions, subject_node_id); - if !self.errors_since_creation() { + if !self.is_tainted_by_errors() { // As a heuristic, just skip reporting region errors // altogether if other errors have been reported while // this infcx was in use. This is totally hokey but @@ -1444,8 +1560,8 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.region_vars.verify_generic_bound(origin, kind, a, bound); } - pub fn can_equate<'b,T>(&'b self, a: &T, b: &T) -> UnitResult<'tcx> - where T: Relate<'b,'tcx> + fmt::Debug + pub fn can_equate(&self, a: &T, b: &T) -> UnitResult<'tcx> + where T: Relate<'tcx> + fmt::Debug { debug!("can_equate({:?}, {:?})", a, b); self.probe(|_| { @@ -1453,12 +1569,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // anyhow. We should make this typetrace stuff more // generic so we don't have to do anything quite this // terrible. - let e = self.tcx.types.err; - let trace = TypeTrace { - origin: TypeOrigin::Misc(codemap::DUMMY_SP), - values: Types(expected_found(true, e, e)) - }; - self.equate(true, trace, a, b) + self.equate(true, TypeTrace::dummy(self.tcx), a, b) }).map(|_| ()) } @@ -1472,24 +1583,25 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.resolve_type_vars_or_error(&ty) } - pub fn tables_are_tcx_tables(&self) -> bool { - let tables: &RefCell = &self.tables; - let tcx_tables: &RefCell = &self.tcx.tables; - tables as *const _ == tcx_tables as *const _ - } - pub fn type_moves_by_default(&self, ty: Ty<'tcx>, span: Span) -> bool { let ty = self.resolve_type_vars_if_possible(&ty); - if ty.needs_infer() || - (ty.has_closure_types() && !self.tables_are_tcx_tables()) { - // this can get called from typeck (by euv), and moves_by_default - // rightly refuses to work with inference variables, but - // moves_by_default has a cache, which we want to use in other - // cases. - !traits::type_known_to_meet_builtin_bound(self, ty, ty::BoundCopy, span) - } else { - ty.moves_by_default(&self.parameter_environment, span) + if let Some(ty) = self.tcx.lift_to_global(&ty) { + // Even if the type may have no inference variables, during + // type-checking closure types are in local tables only. + let local_closures = match self.tables { + InferTables::Local(_) => ty.has_closure_types(), + InferTables::Global(_) => false + }; + if !local_closures { + return ty.moves_by_default(self.tcx.global_tcx(), self.param_env(), span); + } } + + // this can get called from typeck (by euv), and moves_by_default + // rightly refuses to work with inference variables, but + // moves_by_default has a cache, which we want to use in other + // cases. + !traits::type_known_to_meet_builtin_bound(self, ty, ty::BoundCopy, span) } pub fn node_method_ty(&self, method_call: ty::MethodCall) @@ -1532,7 +1644,7 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { self.tables.borrow().upvar_capture_map.get(&upvar_id).cloned() } - pub fn param_env<'b>(&'b self) -> &'b ty::ParameterEnvironment<'b,'tcx> { + pub fn param_env(&self) -> &ty::ParameterEnvironment<'gcx> { &self.parameter_environment } @@ -1547,30 +1659,37 @@ impl<'a, 'tcx> InferCtxt<'a, 'tcx> { // during trans, we see closure ids from other traits. // That may require loading the closure data out of the // cstore. - Some(ty::Tables::closure_kind(&self.tables, self.tcx, def_id)) + Some(self.tcx.closure_kind(def_id)) } } pub fn closure_type(&self, def_id: DefId, - substs: &ty::ClosureSubsts<'tcx>) + substs: ty::ClosureSubsts<'tcx>) -> ty::ClosureTy<'tcx> { - let closure_ty = - ty::Tables::closure_type(self.tables, - self.tcx, - def_id, - substs); + if let InferTables::Local(tables) = self.tables { + if let Some(ty) = tables.borrow().closure_tys.get(&def_id) { + return ty.subst(self.tcx, substs.func_substs); + } + } + let closure_ty = self.tcx.closure_type(def_id, substs); if self.normalize { - normalize_associated_type(&self.tcx, &closure_ty) + let closure_ty = self.tcx.erase_regions(&closure_ty); + + if !closure_ty.has_projection_types() { + return closure_ty; + } + + self.normalize_projections_in(&closure_ty) } else { closure_ty } } } -impl<'tcx> TypeTrace<'tcx> { +impl<'a, 'gcx, 'tcx> TypeTrace<'tcx> { pub fn span(&self) -> Span { self.origin.span() } @@ -1582,11 +1701,11 @@ impl<'tcx> TypeTrace<'tcx> { -> TypeTrace<'tcx> { TypeTrace { origin: origin, - values: Types(expected_found(a_is_expected, a, b)) + values: Types(ExpectedFound::new(a_is_expected, a, b)) } } - pub fn dummy(tcx: &TyCtxt<'tcx>) -> TypeTrace<'tcx> { + pub fn dummy(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> TypeTrace<'tcx> { TypeTrace { origin: TypeOrigin::Misc(codemap::DUMMY_SP), values: Types(ExpectedFound { diff --git a/src/librustc/infer/region_inference/graphviz.rs b/src/librustc/infer/region_inference/graphviz.rs index e611c00569..c9037d6b12 100644 --- a/src/librustc/infer/region_inference/graphviz.rs +++ b/src/librustc/infer/region_inference/graphviz.rs @@ -53,8 +53,10 @@ graphs will be printed. \n\ "); } -pub fn maybe_print_constraints_for<'a, 'tcx>(region_vars: &RegionVarBindings<'a, 'tcx>, - subject_node: ast::NodeId) { +pub fn maybe_print_constraints_for<'a, 'gcx, 'tcx>( + region_vars: &RegionVarBindings<'a, 'gcx, 'tcx>, + subject_node: ast::NodeId) +{ let tcx = region_vars.tcx; if !region_vars.tcx.sess.opts.debugging_opts.print_region_graph { @@ -118,8 +120,8 @@ pub fn maybe_print_constraints_for<'a, 'tcx>(region_vars: &RegionVarBindings<'a, } } -struct ConstraintGraph<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, +struct ConstraintGraph<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, graph_name: String, map: &'a FnvHashMap>, node_ids: FnvHashMap, @@ -138,11 +140,11 @@ enum Edge { EnclScope(CodeExtent, CodeExtent), } -impl<'a, 'tcx> ConstraintGraph<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>, +impl<'a, 'gcx, 'tcx> ConstraintGraph<'a, 'gcx, 'tcx> { + fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, name: String, map: &'a ConstraintMap<'tcx>) - -> ConstraintGraph<'a, 'tcx> { + -> ConstraintGraph<'a, 'gcx, 'tcx> { let mut i = 0; let mut node_ids = FnvHashMap(); { @@ -173,7 +175,7 @@ impl<'a, 'tcx> ConstraintGraph<'a, 'tcx> { } } -impl<'a, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> dot::Labeller<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { type Node = Node; type Edge = Edge; fn graph_id(&self) -> dot::Id { @@ -226,7 +228,7 @@ fn edge_to_nodes(e: &Edge) -> (Node, Node) { } } -impl<'a, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'gcx, 'tcx> { type Node = Node; type Edge = Edge; fn nodes(&self) -> dot::Nodes { @@ -258,10 +260,10 @@ impl<'a, 'tcx> dot::GraphWalk<'a> for ConstraintGraph<'a, 'tcx> { pub type ConstraintMap<'tcx> = FnvHashMap>; -fn dump_region_constraints_to<'a, 'tcx: 'a>(tcx: &'a TyCtxt<'tcx>, - map: &ConstraintMap<'tcx>, - path: &str) - -> io::Result<()> { +fn dump_region_constraints_to<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + map: &ConstraintMap<'tcx>, + path: &str) + -> io::Result<()> { debug!("dump_region_constraints map (len: {}) path: {}", map.len(), path); diff --git a/src/librustc/infer/region_inference/mod.rs b/src/librustc/infer/region_inference/mod.rs index 2f610bf238..5312d03052 100644 --- a/src/librustc/infer/region_inference/mod.rs +++ b/src/librustc/infer/region_inference/mod.rs @@ -20,7 +20,7 @@ pub use self::VarValue::*; use super::{RegionVariableOrigin, SubregionOrigin, MiscVariable}; use super::unify_key; -use rustc_data_structures::graph::{self, Direction, NodeIndex}; +use rustc_data_structures::graph::{self, Direction, NodeIndex, OUTGOING}; use rustc_data_structures::unify::{self, UnificationTable}; use middle::free_region::FreeRegionMap; use ty::{self, Ty, TyCtxt}; @@ -190,8 +190,8 @@ impl SameRegions { pub type CombineMap = FnvHashMap; -pub struct RegionVarBindings<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, +pub struct RegionVarBindings<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, var_origins: RefCell>, // Constraints of the form `A <= B` introduced by the region @@ -253,8 +253,8 @@ pub struct RegionSnapshot { skolemization_count: u32, } -impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { - pub fn new(tcx: &'a TyCtxt<'tcx>) -> RegionVarBindings<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> { + pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> RegionVarBindings<'a, 'gcx, 'tcx> { RegionVarBindings { tcx: tcx, var_origins: RefCell::new(Vec::new()), @@ -600,7 +600,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { origin: SubregionOrigin<'tcx>, mut relate: F) -> Region - where F: FnMut(&RegionVarBindings<'a, 'tcx>, Region, Region) + where F: FnMut(&RegionVarBindings<'a, 'gcx, 'tcx>, Region, Region) { let vars = TwoRegions { a: a, b: b }; match self.combine_map(t).borrow().get(&vars) { @@ -816,7 +816,7 @@ struct RegionAndOrigin<'tcx> { type RegionGraph = graph::Graph<(), Constraint>; -impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> RegionVarBindings<'a, 'gcx, 'tcx> { fn infer_variable_values(&self, free_regions: &FreeRegionMap, errors: &mut Vec>, @@ -872,7 +872,7 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { let seeds: Vec<_> = givens.iter().cloned().collect(); for (fr, vid) in seeds { let seed_index = NodeIndex(vid.index as usize); - for succ_index in graph.depth_traverse(seed_index) { + for succ_index in graph.depth_traverse(seed_index, OUTGOING) { let succ_index = succ_index.0 as u32; if succ_index < self.num_vars() { let succ_vid = RegionVid { index: succ_index }; @@ -1240,20 +1240,17 @@ impl<'a, 'tcx> RegionVarBindings<'a, 'tcx> { orig_node_idx, node_idx); - // figure out the direction from which this node takes its - // values, and search for concrete regions etc in that direction - let dir = graph::INCOMING; process_edges(self, &mut state, graph, node_idx, dir); } let WalkState {result, dup_found, ..} = state; return (result, dup_found); - fn process_edges<'a, 'tcx>(this: &RegionVarBindings<'a, 'tcx>, - state: &mut WalkState<'tcx>, - graph: &RegionGraph, - source_vid: RegionVid, - dir: Direction) { + fn process_edges<'a, 'gcx, 'tcx>(this: &RegionVarBindings<'a, 'gcx, 'tcx>, + state: &mut WalkState<'tcx>, + graph: &RegionGraph, + source_vid: RegionVid, + dir: Direction) { debug!("process_edges(source_vid={:?}, dir={:?})", source_vid, dir); let source_node_index = NodeIndex(source_vid.index as usize); @@ -1362,8 +1359,8 @@ impl<'tcx> fmt::Display for GenericKind<'tcx> { } } -impl<'tcx> GenericKind<'tcx> { - pub fn to_ty(&self, tcx: &TyCtxt<'tcx>) -> Ty<'tcx> { +impl<'a, 'gcx, 'tcx> GenericKind<'tcx> { + pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match *self { GenericKind::Param(ref p) => p.to_ty(tcx), GenericKind::Projection(ref p) => tcx.mk_projection(p.trait_ref.clone(), p.item_name), @@ -1371,7 +1368,7 @@ impl<'tcx> GenericKind<'tcx> { } } -impl VerifyBound { +impl<'a, 'gcx, 'tcx> VerifyBound { fn for_each_region(&self, f: &mut FnMut(ty::Region)) { match self { &VerifyBound::AnyRegion(ref rs) | @@ -1424,12 +1421,11 @@ impl VerifyBound { } } - fn is_met<'tcx>(&self, - tcx: &TyCtxt<'tcx>, - free_regions: &FreeRegionMap, - var_values: &Vec, - min: ty::Region) - -> bool { + fn is_met(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + free_regions: &FreeRegionMap, + var_values: &Vec, + min: ty::Region) + -> bool { match self { &VerifyBound::AnyRegion(ref rs) => rs.iter() diff --git a/src/librustc/infer/resolve.rs b/src/librustc/infer/resolve.rs index 8651b52e3f..5f550b427e 100644 --- a/src/librustc/infer/resolve.rs +++ b/src/librustc/infer/resolve.rs @@ -10,6 +10,7 @@ use super::{InferCtxt, FixupError, FixupResult}; use ty::{self, Ty, TyCtxt, TypeFoldable}; +use ty::fold::TypeFolder; /////////////////////////////////////////////////////////////////////////// // OPPORTUNISTIC TYPE RESOLVER @@ -19,18 +20,18 @@ use ty::{self, Ty, TyCtxt, TypeFoldable}; /// been unified with (similar to `shallow_resolve`, but deep). This is /// useful for printing messages etc but also required at various /// points for correctness. -pub struct OpportunisticTypeResolver<'a, 'tcx:'a> { - infcx: &'a InferCtxt<'a, 'tcx>, +pub struct OpportunisticTypeResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, } -impl<'a, 'tcx> OpportunisticTypeResolver<'a, 'tcx> { - pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> OpportunisticTypeResolver<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> OpportunisticTypeResolver<'a, 'gcx, 'tcx> { + pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self { OpportunisticTypeResolver { infcx: infcx } } } -impl<'a, 'tcx> ty::fold::TypeFolder<'tcx> for OpportunisticTypeResolver<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpportunisticTypeResolver<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.infcx.tcx } @@ -47,18 +48,18 @@ impl<'a, 'tcx> ty::fold::TypeFolder<'tcx> for OpportunisticTypeResolver<'a, 'tcx /// The opportunistic type and region resolver is similar to the /// opportunistic type resolver, but also opportunistly resolves /// regions. It is useful for canonicalization. -pub struct OpportunisticTypeAndRegionResolver<'a, 'tcx:'a> { - infcx: &'a InferCtxt<'a, 'tcx>, +pub struct OpportunisticTypeAndRegionResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, } -impl<'a, 'tcx> OpportunisticTypeAndRegionResolver<'a, 'tcx> { - pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> Self { +impl<'a, 'gcx, 'tcx> OpportunisticTypeAndRegionResolver<'a, 'gcx, 'tcx> { + pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self { OpportunisticTypeAndRegionResolver { infcx: infcx } } } -impl<'a, 'tcx> ty::fold::TypeFolder<'tcx> for OpportunisticTypeAndRegionResolver<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for OpportunisticTypeAndRegionResolver<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.infcx.tcx } @@ -85,7 +86,8 @@ impl<'a, 'tcx> ty::fold::TypeFolder<'tcx> for OpportunisticTypeAndRegionResolver /// Full type resolution replaces all type and region variables with /// their concrete results. If any variable cannot be replaced (never unified, etc) /// then an `Err` result is returned. -pub fn fully_resolve<'a, 'tcx, T>(infcx: &InferCtxt<'a,'tcx>, value: &T) -> FixupResult +pub fn fully_resolve<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + value: &T) -> FixupResult where T : TypeFoldable<'tcx> { let mut full_resolver = FullTypeResolver { infcx: infcx, err: None }; @@ -98,13 +100,13 @@ pub fn fully_resolve<'a, 'tcx, T>(infcx: &InferCtxt<'a,'tcx>, value: &T) -> Fixu // N.B. This type is not public because the protocol around checking the // `err` field is not enforcable otherwise. -struct FullTypeResolver<'a, 'tcx:'a> { - infcx: &'a InferCtxt<'a, 'tcx>, +struct FullTypeResolver<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, err: Option, } -impl<'a, 'tcx> ty::fold::TypeFolder<'tcx> for FullTypeResolver<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for FullTypeResolver<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.infcx.tcx } diff --git a/src/librustc/infer/sub.rs b/src/librustc/infer/sub.rs index ece8c0c696..680dd0d635 100644 --- a/src/librustc/infer/sub.rs +++ b/src/librustc/infer/sub.rs @@ -8,8 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use super::combine::{self, CombineFields}; -use super::higher_ranked::HigherRankedRelations; +use super::combine::CombineFields; use super::SubregionOrigin; use super::type_variable::{SubtypeOf, SupertypeOf}; @@ -20,12 +19,12 @@ use traits::PredicateObligations; use std::mem; /// Ensures `a` is made a subtype of `b`. Returns `a` on success. -pub struct Sub<'a, 'tcx: 'a> { - fields: CombineFields<'a, 'tcx>, +pub struct Sub<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fields: CombineFields<'a, 'gcx, 'tcx>, } -impl<'a, 'tcx> Sub<'a, 'tcx> { - pub fn new(f: CombineFields<'a, 'tcx>) -> Sub<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Sub<'a, 'gcx, 'tcx> { + pub fn new(f: CombineFields<'a, 'gcx, 'tcx>) -> Sub<'a, 'gcx, 'tcx> { Sub { fields: f } } @@ -34,9 +33,9 @@ impl<'a, 'tcx> Sub<'a, 'tcx> { } } -impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Sub<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Sub<'a, 'gcx, 'tcx> { fn tag(&self) -> &'static str { "Sub" } - fn tcx(&self) -> &'a TyCtxt<'tcx> { self.fields.infcx.tcx } + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.fields.infcx.tcx } fn a_is_expected(&self) -> bool { self.fields.a_is_expected } fn with_cause(&mut self, cause: Cause, f: F) -> R @@ -50,11 +49,11 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Sub<'a, 'tcx> { r } - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> + fn relate_with_variance>(&mut self, + variance: ty::Variance, + a: &T, + b: &T) + -> RelateResult<'tcx, T> { match variance { ty::Invariant => self.fields.equate().relate(a, b), @@ -91,11 +90,12 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Sub<'a, 'tcx> { } (&ty::TyError, _) | (_, &ty::TyError) => { + infcx.set_tainted_by_errors(); Ok(self.tcx().types.err) } _ => { - combine::super_combine_tys(self.fields.infcx, self, a, b)?; + self.fields.infcx.super_combine_tys(self, a, b)?; Ok(a) } } @@ -114,7 +114,7 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Sub<'a, 'tcx> { fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) -> RelateResult<'tcx, ty::Binder> - where T: Relate<'a,'tcx> + where T: Relate<'tcx> { self.fields.higher_ranked_sub(a, b) } diff --git a/src/librustc/infer/unify_key.rs b/src/librustc/infer/unify_key.rs index a9eb20b829..d7e3a53ff2 100644 --- a/src/librustc/infer/unify_key.rs +++ b/src/librustc/infer/unify_key.rs @@ -12,8 +12,8 @@ use syntax::ast; use ty::{self, IntVarValue, Ty, TyCtxt}; use rustc_data_structures::unify::{Combine, UnifyKey}; -pub trait ToType<'tcx> { - fn to_type(&self, tcx: &TyCtxt<'tcx>) -> Ty<'tcx>; +pub trait ToType { + fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx>; } impl UnifyKey for ty::IntVid { @@ -50,8 +50,8 @@ impl UnifyKey for ty::RegionVid { fn tag(_: Option) -> &'static str { "RegionVid" } } -impl<'tcx> ToType<'tcx> for IntVarValue { - fn to_type(&self, tcx: &TyCtxt<'tcx>) -> Ty<'tcx> { +impl ToType for IntVarValue { + fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match *self { ty::IntType(i) => tcx.mk_mach_int(i), ty::UintType(i) => tcx.mk_mach_uint(i), @@ -68,8 +68,8 @@ impl UnifyKey for ty::FloatVid { fn tag(_: Option) -> &'static str { "FloatVid" } } -impl<'tcx> ToType<'tcx> for ast::FloatTy { - fn to_type(&self, tcx: &TyCtxt<'tcx>) -> Ty<'tcx> { +impl ToType for ast::FloatTy { + fn to_type<'a, 'gcx, 'tcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { tcx.mk_mach_float(*self) } } diff --git a/src/librustc/lib.rs b/src/librustc/lib.rs index d1bbbf08ac..e1fb701e64 100644 --- a/src/librustc/lib.rs +++ b/src/librustc/lib.rs @@ -37,7 +37,6 @@ #![feature(rustc_private)] #![feature(slice_patterns)] #![feature(staged_api)] -#![feature(step_by)] #![feature(question_mark)] #![cfg_attr(test, feature(test))] @@ -49,6 +48,7 @@ extern crate getopts; extern crate graphviz; extern crate libc; extern crate rbml; +extern crate rustc_llvm as llvm; extern crate rustc_back; extern crate rustc_data_structures; extern crate serialize; diff --git a/src/librustc/lint/builtin.rs b/src/librustc/lint/builtin.rs index 2564838c67..d7971cd2cf 100644 --- a/src/librustc/lint/builtin.rs +++ b/src/librustc/lint/builtin.rs @@ -167,6 +167,13 @@ declare_lint! { "transmute from function item type to pointer-sized type erroneously allowed" } +declare_lint! { + pub HR_LIFETIME_IN_ASSOC_TYPE, + Warn, + "binding for associated type references higher-ranked lifetime \ + that does not appear in the trait input types" +} + declare_lint! { pub OVERLAPPING_INHERENT_IMPLS, Warn, @@ -185,6 +192,18 @@ declare_lint! { "detects super or self keywords at the beginning of global path" } +declare_lint! { + pub UNSIZED_IN_TUPLE, + Warn, + "unsized types in the interior of a tuple were erroneously allowed" +} + +declare_lint! { + pub OBJECT_UNSAFE_FRAGMENT, + Warn, + "object-unsafe non-principal fragments in object types were erroneously allowed" +} + /// Does nothing as a lint pass, but registers some `Lint`s /// which are used by other parts of the compiler. #[derive(Copy, Clone)] @@ -220,7 +239,10 @@ impl LintPass for HardwiredLints { TRANSMUTE_FROM_FN_ITEM_TYPES, OVERLAPPING_INHERENT_IMPLS, RENAMED_AND_REMOVED_LINTS, - SUPER_OR_SELF_IN_GLOBAL_PATH + SUPER_OR_SELF_IN_GLOBAL_PATH, + UNSIZED_IN_TUPLE, + OBJECT_UNSAFE_FRAGMENT, + HR_LIFETIME_IN_ASSOC_TYPE ) } } diff --git a/src/librustc/lint/context.rs b/src/librustc/lint/context.rs index 1c6dd26588..0801f8f4ac 100644 --- a/src/librustc/lint/context.rs +++ b/src/librustc/lint/context.rs @@ -30,7 +30,7 @@ use middle::privacy::AccessLevels; use ty::TyCtxt; use session::{config, early_error, Session}; use lint::{Level, LevelSource, Lint, LintId, LintArray, LintPass}; -use lint::{EarlyLintPass, EarlyLintPassObject, LateLintPass, LateLintPassObject}; +use lint::{EarlyLintPassObject, LateLintPass, LateLintPassObject}; use lint::{Default, CommandLine, Node, Allow, Warn, Deny, Forbid}; use lint::builtin; use util::nodemap::FnvHashMap; @@ -297,7 +297,7 @@ impl LintStore { /// Context for lint checking after type checking. pub struct LateContext<'a, 'tcx: 'a> { /// Type context we're checking in. - pub tcx: &'a TyCtxt<'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, /// The crate being checked. pub krate: &'a hir::Crate, @@ -456,17 +456,13 @@ pub fn raw_struct_lint<'a>(sess: &'a Session, it will become a hard error in a future release!"); let citation = format!("for more information, see {}", future_incompatible.reference); - if let Some(sp) = span { - err.fileline_warn(sp, &explanation); - err.fileline_note(sp, &citation); - } else { - err.warn(&explanation); - err.note(&citation); - } + err.warn(&explanation); + err.note(&citation); } if let Some(span) = def { - err.span_note(span, "lint level defined here"); + let explanation = "lint level defined here"; + err.span_note(span, &explanation); } err @@ -542,7 +538,7 @@ pub trait LintContext: Sized { let mut err = self.lookup(lint, Some(span), msg); if self.current_level(lint) != Level::Allow { if note_span == span { - err.fileline_note(note_span, note); + err.note(note); } else { err.span_note(note_span, note); } @@ -656,7 +652,7 @@ impl<'a> EarlyContext<'a> { } impl<'a, 'tcx> LateContext<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>, + fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate: &'a hir::Crate, access_levels: &'a AccessLevels) -> LateContext<'a, 'tcx> { // We want to own the lint store, so move it out of the session. @@ -744,7 +740,8 @@ impl<'a, 'tcx, 'v> hir_visit::Visitor<'v> for LateContext<'a, 'tcx> { /// items in the context of the outer item, so enable /// deep-walking. fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.tcx.map.expect_item(item.id)) + let tcx = self.tcx; + self.visit_item(tcx.map.expect_item(item.id)) } fn visit_item(&mut self, it: &hir::Item) { @@ -892,11 +889,6 @@ impl<'a, 'tcx, 'v> hir_visit::Visitor<'v> for LateContext<'a, 'tcx> { run_lints!(self, check_lifetime_def, late_passes, lt); } - fn visit_explicit_self(&mut self, es: &hir::ExplicitSelf) { - run_lints!(self, check_explicit_self, late_passes, es); - hir_visit::walk_explicit_self(self, es); - } - fn visit_path(&mut self, p: &hir::Path, id: ast::NodeId) { run_lints!(self, check_path, late_passes, p, id); hir_visit::walk_path(self, p); @@ -1223,7 +1215,8 @@ fn check_lint_name_cmdline(sess: &Session, lint_cx: &LintStore, /// Perform lint checking on a crate. /// /// Consumes the `lint_store` field of the `Session`. -pub fn check_crate(tcx: &TyCtxt, access_levels: &AccessLevels) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + access_levels: &AccessLevels) { let _task = tcx.dep_graph.in_task(DepNode::LateLintCheck); let krate = tcx.map.krate(); diff --git a/src/librustc/middle/astconv_util.rs b/src/librustc/middle/astconv_util.rs index 2a27732b95..8f97a89e65 100644 --- a/src/librustc/middle/astconv_util.rs +++ b/src/librustc/middle/astconv_util.rs @@ -20,63 +20,64 @@ use ty::{Ty, TyCtxt}; use syntax::codemap::Span; use hir as ast; -pub fn prohibit_type_params(tcx: &TyCtxt, segments: &[ast::PathSegment]) { - for segment in segments { - for typ in segment.parameters.types() { - span_err!(tcx.sess, typ.span, E0109, - "type parameters are not allowed on this type"); - break; - } - for lifetime in segment.parameters.lifetimes() { - span_err!(tcx.sess, lifetime.span, E0110, - "lifetime parameters are not allowed on this type"); - break; - } - for binding in segment.parameters.bindings() { - prohibit_projection(tcx, binding.span); - break; +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn prohibit_type_params(self, segments: &[ast::PathSegment]) { + for segment in segments { + for typ in segment.parameters.types() { + span_err!(self.sess, typ.span, E0109, + "type parameters are not allowed on this type"); + break; + } + for lifetime in segment.parameters.lifetimes() { + span_err!(self.sess, lifetime.span, E0110, + "lifetime parameters are not allowed on this type"); + break; + } + for binding in segment.parameters.bindings() { + self.prohibit_projection(binding.span); + break; + } } } -} -pub fn prohibit_projection(tcx: &TyCtxt, span: Span) -{ - span_err!(tcx.sess, span, E0229, - "associated type bindings are not allowed here"); -} + pub fn prohibit_projection(self, span: Span) + { + span_err!(self.sess, span, E0229, + "associated type bindings are not allowed here"); + } -pub fn prim_ty_to_ty<'tcx>(tcx: &TyCtxt<'tcx>, - segments: &[ast::PathSegment], - nty: ast::PrimTy) - -> Ty<'tcx> { - prohibit_type_params(tcx, segments); - match nty { - ast::TyBool => tcx.types.bool, - ast::TyChar => tcx.types.char, - ast::TyInt(it) => tcx.mk_mach_int(it), - ast::TyUint(uit) => tcx.mk_mach_uint(uit), - ast::TyFloat(ft) => tcx.mk_mach_float(ft), - ast::TyStr => tcx.mk_str() + pub fn prim_ty_to_ty(self, + segments: &[ast::PathSegment], + nty: ast::PrimTy) + -> Ty<'tcx> { + self.prohibit_type_params(segments); + match nty { + ast::TyBool => self.types.bool, + ast::TyChar => self.types.char, + ast::TyInt(it) => self.mk_mach_int(it), + ast::TyUint(uit) => self.mk_mach_uint(uit), + ast::TyFloat(ft) => self.mk_mach_float(ft), + ast::TyStr => self.mk_str() + } } -} -/// If a type in the AST is a primitive type, return the ty::Ty corresponding -/// to it. -pub fn ast_ty_to_prim_ty<'tcx>(tcx: &TyCtxt<'tcx>, ast_ty: &ast::Ty) - -> Option> { - if let ast::TyPath(None, ref path) = ast_ty.node { - let def = match tcx.def_map.borrow().get(&ast_ty.id) { - None => { - span_bug!(ast_ty.span, "unbound path {:?}", path) + /// If a type in the AST is a primitive type, return the ty::Ty corresponding + /// to it. + pub fn ast_ty_to_prim_ty(self, ast_ty: &ast::Ty) -> Option> { + if let ast::TyPath(None, ref path) = ast_ty.node { + let def = match self.def_map.borrow().get(&ast_ty.id) { + None => { + span_bug!(ast_ty.span, "unbound path {:?}", path) + } + Some(d) => d.full_def() + }; + if let Def::PrimTy(nty) = def { + Some(self.prim_ty_to_ty(&path.segments, nty)) + } else { + None } - Some(d) => d.full_def() - }; - if let Def::PrimTy(nty) = def { - Some(prim_ty_to_ty(tcx, &path.segments, nty)) } else { None } - } else { - None } } diff --git a/src/librustc/middle/cstore.rs b/src/librustc/middle/cstore.rs index d3db0804c2..e5a8c1d1b4 100644 --- a/src/librustc/middle/cstore.rs +++ b/src/librustc/middle/cstore.rs @@ -22,18 +22,19 @@ // are *mostly* used as a part of that interface, but these should // probably get a better home if someone can find one. -use hir::svh::Svh; -use hir::map as hir_map; use hir::def::{self, Def}; +use hir::def_id::{DefId, DefIndex}; +use hir::map as hir_map; +use hir::map::definitions::DefKey; +use hir::svh::Svh; use middle::lang_items; use ty::{self, Ty, TyCtxt, VariantKind}; -use hir::def_id::{DefId, DefIndex}; use mir::repr::Mir; use mir::mir_map::MirMap; use session::Session; +use session::config::PanicStrategy; use session::search_paths::PathKind; use util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap}; -use std::any::Any; use std::cell::RefCell; use std::rc::Rc; use std::path::PathBuf; @@ -113,6 +114,7 @@ pub enum InlinedItemRef<'a> { /// LOCAL_CRATE in their DefId. pub const LOCAL_CRATE: ast::CrateNum = 0; +#[derive(Copy, Clone)] pub struct ChildItem { pub def: DefLike, pub name: ast::Name, @@ -148,67 +150,61 @@ pub struct ExternCrate { /// A store of Rust crates, through with their metadata /// can be accessed. -/// -/// The `: Any` bound is a temporary measure that allows access -/// to the backing `rustc_metadata::cstore::CStore` object. It -/// will be removed in the near future - if you need to access -/// internal APIs, please tell us. -pub trait CrateStore<'tcx> : Any { +pub trait CrateStore<'tcx> { // item info fn stability(&self, def: DefId) -> Option; fn deprecation(&self, def: DefId) -> Option; fn visibility(&self, def: DefId) -> ty::Visibility; - fn closure_kind(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) - -> ty::ClosureKind; - fn closure_ty(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) - -> ty::ClosureTy<'tcx>; + fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind; + fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) + -> ty::ClosureTy<'tcx>; fn item_variances(&self, def: DefId) -> ty::ItemVariances; fn repr_attrs(&self, def: DefId) -> Vec; - fn item_type(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::TypeScheme<'tcx>; + fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::TypeScheme<'tcx>; fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap>; fn item_name(&self, def: DefId) -> ast::Name; - fn item_predicates(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx>; - fn item_super_predicates(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx>; + fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx>; + fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx>; fn item_attrs(&self, def_id: DefId) -> Vec; fn item_symbol(&self, def: DefId) -> String; - fn trait_def(&self, tcx: &TyCtxt<'tcx>, def: DefId)-> ty::TraitDef<'tcx>; - fn adt_def(&self, tcx: &TyCtxt<'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>; + fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx>; + fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx>; fn method_arg_names(&self, did: DefId) -> Vec; fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec; // trait info fn implementations_of_trait(&self, def_id: DefId) -> Vec; - fn provided_trait_methods(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Vec>>; + fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Vec>>; fn trait_item_def_ids(&self, def: DefId) -> Vec; // impl info fn impl_items(&self, impl_def_id: DefId) -> Vec; - fn impl_trait_ref(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option>; + fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option>; fn impl_polarity(&self, def: DefId) -> Option; fn custom_coerce_unsized_kind(&self, def: DefId) -> Option; - fn associated_consts(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Vec>>; + fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Vec>>; fn impl_parent(&self, impl_def_id: DefId) -> Option; // trait/impl-item info - fn trait_of_item(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) - -> Option; - fn impl_or_trait_item(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option>; + fn trait_of_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) + -> Option; + fn impl_or_trait_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option>; // flags fn is_const_fn(&self, did: DefId) -> bool; fn is_defaulted_trait(&self, did: DefId) -> bool; fn is_impl(&self, did: DefId) -> bool; fn is_default_impl(&self, impl_did: DefId) -> bool; - fn is_extern_item(&self, tcx: &TyCtxt<'tcx>, did: DefId) -> bool; + fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool; fn is_static_method(&self, did: DefId) -> bool; fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool; fn is_typedef(&self, did: DefId) -> bool; @@ -221,6 +217,8 @@ pub trait CrateStore<'tcx> : Any { fn is_staged_api(&self, cnum: ast::CrateNum) -> bool; fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool; fn is_allocator(&self, cnum: ast::CrateNum) -> bool; + fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool; + fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy; fn extern_crate(&self, cnum: ast::CrateNum) -> Option; fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec; /// The name of the crate as it is referred to in source code of the current @@ -237,6 +235,10 @@ pub trait CrateStore<'tcx> : Any { fn reachable_ids(&self, cnum: ast::CrateNum) -> Vec; // resolve + fn def_index_for_def_key(&self, + cnum: ast::CrateNum, + def: DefKey) + -> Option; fn def_key(&self, def: DefId) -> hir_map::DefKey; fn relative_def_path(&self, def: DefId) -> hir_map::DefPath; fn variant_kind(&self, def_id: DefId) -> Option; @@ -247,10 +249,10 @@ pub trait CrateStore<'tcx> : Any { fn crate_top_level_items(&self, cnum: ast::CrateNum) -> Vec; // misc. metadata - fn maybe_get_item_ast(&'tcx self, tcx: &TyCtxt<'tcx>, def: DefId) - -> FoundAst<'tcx>; - fn maybe_get_item_mir(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option>; + fn maybe_get_item_ast<'a>(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> FoundAst<'tcx>; + fn maybe_get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option>; fn is_item_mir_available(&self, def: DefId) -> bool; // This is basically a 1-based range of ints, which is a little @@ -262,22 +264,21 @@ pub trait CrateStore<'tcx> : Any { // utility functions fn metadata_filename(&self) -> &str; fn metadata_section_name(&self, target: &Target) -> &str; - fn encode_type(&self, - tcx: &TyCtxt<'tcx>, - ty: Ty<'tcx>, - def_id_to_string: fn(&TyCtxt<'tcx>, DefId) -> String) - -> Vec; + fn encode_type<'a>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>, + def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) + -> Vec; fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option)>; fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource; fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option; - fn encode_metadata(&self, - tcx: &TyCtxt<'tcx>, - reexports: &def::ExportMap, - item_symbols: &RefCell>, - link_meta: &LinkMeta, - reachable: &NodeSet, - mir_map: &MirMap<'tcx>, - krate: &hir::Crate) -> Vec; + fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + reexports: &def::ExportMap, + item_symbols: &RefCell>, + link_meta: &LinkMeta, + reachable: &NodeSet, + mir_map: &MirMap<'tcx>, + krate: &hir::Crate) -> Vec; fn metadata_encoding_version(&self) -> &[u8]; } @@ -335,63 +336,69 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { fn stability(&self, def: DefId) -> Option { bug!("stability") } fn deprecation(&self, def: DefId) -> Option { bug!("deprecation") } fn visibility(&self, def: DefId) -> ty::Visibility { bug!("visibility") } - fn closure_kind(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) - -> ty::ClosureKind { bug!("closure_kind") } - fn closure_ty(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) - -> ty::ClosureTy<'tcx> { bug!("closure_ty") } + fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind { bug!("closure_kind") } + fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) + -> ty::ClosureTy<'tcx> { bug!("closure_ty") } fn item_variances(&self, def: DefId) -> ty::ItemVariances { bug!("item_variances") } fn repr_attrs(&self, def: DefId) -> Vec { bug!("repr_attrs") } - fn item_type(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::TypeScheme<'tcx> { bug!("item_type") } + fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::TypeScheme<'tcx> { bug!("item_type") } fn visible_parent_map<'a>(&'a self) -> ::std::cell::RefMut<'a, DefIdMap> { bug!("visible_parent_map") } fn item_name(&self, def: DefId) -> ast::Name { bug!("item_name") } - fn item_predicates(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx> { bug!("item_predicates") } - fn item_super_predicates(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx> { bug!("item_super_predicates") } + fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx> { bug!("item_predicates") } + fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx> { bug!("item_super_predicates") } fn item_attrs(&self, def_id: DefId) -> Vec { bug!("item_attrs") } fn item_symbol(&self, def: DefId) -> String { bug!("item_symbol") } - fn trait_def(&self, tcx: &TyCtxt<'tcx>, def: DefId)-> ty::TraitDef<'tcx> + fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId)-> ty::TraitDef<'tcx> { bug!("trait_def") } - fn adt_def(&self, tcx: &TyCtxt<'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> + fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> { bug!("adt_def") } fn method_arg_names(&self, did: DefId) -> Vec { bug!("method_arg_names") } fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec { vec![] } // trait info fn implementations_of_trait(&self, def_id: DefId) -> Vec { vec![] } - fn provided_trait_methods(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Vec>> { bug!("provided_trait_methods") } + fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Vec>> { bug!("provided_trait_methods") } fn trait_item_def_ids(&self, def: DefId) -> Vec { bug!("trait_item_def_ids") } + fn def_index_for_def_key(&self, + cnum: ast::CrateNum, + def: DefKey) + -> Option { + None + } // impl info fn impl_items(&self, impl_def_id: DefId) -> Vec { bug!("impl_items") } - fn impl_trait_ref(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option> { bug!("impl_trait_ref") } + fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> { bug!("impl_trait_ref") } fn impl_polarity(&self, def: DefId) -> Option { bug!("impl_polarity") } fn custom_coerce_unsized_kind(&self, def: DefId) -> Option { bug!("custom_coerce_unsized_kind") } - fn associated_consts(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Vec>> { bug!("associated_consts") } + fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Vec>> { bug!("associated_consts") } fn impl_parent(&self, def: DefId) -> Option { bug!("impl_parent") } // trait/impl-item info - fn trait_of_item(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) - -> Option { bug!("trait_of_item") } - fn impl_or_trait_item(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option> { bug!("impl_or_trait_item") } + fn trait_of_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) + -> Option { bug!("trait_of_item") } + fn impl_or_trait_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> { bug!("impl_or_trait_item") } // flags fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") } fn is_defaulted_trait(&self, did: DefId) -> bool { bug!("is_defaulted_trait") } fn is_impl(&self, did: DefId) -> bool { bug!("is_impl") } fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") } - fn is_extern_item(&self, tcx: &TyCtxt<'tcx>, did: DefId) -> bool { bug!("is_extern_item") } + fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool + { bug!("is_extern_item") } fn is_static_method(&self, did: DefId) -> bool { bug!("is_static_method") } fn is_statically_included_foreign_item(&self, id: ast::NodeId) -> bool { false } fn is_typedef(&self, did: DefId) -> bool { bug!("is_typedef") } @@ -407,6 +414,10 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { fn is_staged_api(&self, cnum: ast::CrateNum) -> bool { bug!("is_staged_api") } fn is_explicitly_linked(&self, cnum: ast::CrateNum) -> bool { bug!("is_explicitly_linked") } fn is_allocator(&self, cnum: ast::CrateNum) -> bool { bug!("is_allocator") } + fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool { bug!("is_panic_runtime") } + fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy { + bug!("panic_strategy") + } fn extern_crate(&self, cnum: ast::CrateNum) -> Option { bug!("extern_crate") } fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec { bug!("crate_attrs") } @@ -440,10 +451,10 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { { bug!("crate_top_level_items") } // misc. metadata - fn maybe_get_item_ast(&'tcx self, tcx: &TyCtxt<'tcx>, def: DefId) - -> FoundAst<'tcx> { bug!("maybe_get_item_ast") } - fn maybe_get_item_mir(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option> { bug!("maybe_get_item_mir") } + fn maybe_get_item_ast<'a>(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> FoundAst<'tcx> { bug!("maybe_get_item_ast") } + fn maybe_get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> { bug!("maybe_get_item_mir") } fn is_item_mir_available(&self, def: DefId) -> bool { bug!("is_item_mir_available") } @@ -457,25 +468,24 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore { // utility functions fn metadata_filename(&self) -> &str { bug!("metadata_filename") } fn metadata_section_name(&self, target: &Target) -> &str { bug!("metadata_section_name") } - fn encode_type(&self, - tcx: &TyCtxt<'tcx>, - ty: Ty<'tcx>, - def_id_to_string: fn(&TyCtxt<'tcx>, DefId) -> String) - -> Vec { + fn encode_type<'a>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>, + def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) + -> Vec { bug!("encode_type") } fn used_crates(&self, prefer: LinkagePreference) -> Vec<(ast::CrateNum, Option)> { vec![] } fn used_crate_source(&self, cnum: ast::CrateNum) -> CrateSource { bug!("used_crate_source") } fn extern_mod_stmt_cnum(&self, emod_id: ast::NodeId) -> Option { None } - fn encode_metadata(&self, - tcx: &TyCtxt<'tcx>, - reexports: &def::ExportMap, - item_symbols: &RefCell>, - link_meta: &LinkMeta, - reachable: &NodeSet, - mir_map: &MirMap<'tcx>, - krate: &hir::Crate) -> Vec { vec![] } + fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + reexports: &def::ExportMap, + item_symbols: &RefCell>, + link_meta: &LinkMeta, + reachable: &NodeSet, + mir_map: &MirMap<'tcx>, + krate: &hir::Crate) -> Vec { vec![] } fn metadata_encoding_version(&self) -> &[u8] { bug!("metadata_encoding_version") } } @@ -502,7 +512,7 @@ pub mod tls { use hir::def_id::DefId; pub trait EncodingContext<'tcx> { - fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx>; + fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>; fn encode_ty(&self, encoder: &mut OpaqueEncoder, t: Ty<'tcx>); fn encode_substs(&self, encoder: &mut OpaqueEncoder, substs: &Substs<'tcx>); } @@ -569,7 +579,7 @@ pub mod tls { } pub trait DecodingContext<'tcx> { - fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx>; + fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx>; fn decode_ty(&self, decoder: &mut OpaqueDecoder) -> ty::Ty<'tcx>; fn decode_substs(&self, decoder: &mut OpaqueDecoder) -> Substs<'tcx>; fn translate_def_id(&self, def_id: DefId) -> DefId; diff --git a/src/librustc/middle/dataflow.rs b/src/librustc/middle/dataflow.rs index 1aaaa4bcd7..41b27a48b2 100644 --- a/src/librustc/middle/dataflow.rs +++ b/src/librustc/middle/dataflow.rs @@ -37,7 +37,7 @@ pub enum EntryOrExit { #[derive(Clone)] pub struct DataFlowContext<'a, 'tcx: 'a, O> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, /// a name for the analysis using this dataflow instance analysis_name: &'static str, @@ -222,7 +222,7 @@ pub enum KillFrom { } impl<'a, 'tcx, O:DataFlowOperator> DataFlowContext<'a, 'tcx, O> { - pub fn new(tcx: &'a TyCtxt<'tcx>, + pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, analysis_name: &'static str, decl: Option<&hir::FnDecl>, cfg: &cfg::CFG, diff --git a/src/librustc/middle/dead.rs b/src/librustc/middle/dead.rs index dcdc02c9df..cc6b83fccf 100644 --- a/src/librustc/middle/dead.rs +++ b/src/librustc/middle/dead.rs @@ -25,13 +25,14 @@ use lint; use std::collections::HashSet; use syntax::{ast, codemap}; -use syntax::attr::{self, AttrMetaMethods}; +use syntax::attr; // Any local node that may call something in its body block should be // explored. For example, if it's a live NodeItem that is a // function, then we should explore its block to check for codes that // may need to be marked as live. -fn should_explore(tcx: &TyCtxt, node_id: ast::NodeId) -> bool { +fn should_explore<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + node_id: ast::NodeId) -> bool { match tcx.map.find(node_id) { Some(ast_map::NodeItem(..)) | Some(ast_map::NodeImplItem(..)) | @@ -45,7 +46,7 @@ fn should_explore(tcx: &TyCtxt, node_id: ast::NodeId) -> bool { struct MarkSymbolVisitor<'a, 'tcx: 'a> { worklist: Vec, - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, live_symbols: Box>, struct_has_extern_repr: bool, ignore_non_const_paths: bool, @@ -54,7 +55,7 @@ struct MarkSymbolVisitor<'a, 'tcx: 'a> { } impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>, + fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, worklist: Vec) -> MarkSymbolVisitor<'a, 'tcx> { MarkSymbolVisitor { worklist: worklist, @@ -362,9 +363,10 @@ impl<'v> Visitor<'v> for LifeSeeder { } } -fn create_and_seed_worklist(tcx: &TyCtxt, - access_levels: &privacy::AccessLevels, - krate: &hir::Crate) -> Vec { +fn create_and_seed_worklist<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + access_levels: &privacy::AccessLevels, + krate: &hir::Crate) + -> Vec { let mut worklist = Vec::new(); for (id, _) in &access_levels.map { worklist.push(*id); @@ -385,10 +387,10 @@ fn create_and_seed_worklist(tcx: &TyCtxt, return life_seeder.worklist; } -fn find_live(tcx: &TyCtxt, - access_levels: &privacy::AccessLevels, - krate: &hir::Crate) - -> Box> { +fn find_live<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + access_levels: &privacy::AccessLevels, + krate: &hir::Crate) + -> Box> { let worklist = create_and_seed_worklist(tcx, access_levels, krate); let mut symbol_visitor = MarkSymbolVisitor::new(tcx, worklist); symbol_visitor.mark_live_symbols(); @@ -405,7 +407,7 @@ fn get_struct_ctor_id(item: &hir::Item) -> Option { } struct DeadVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, live_symbols: Box>, } @@ -504,7 +506,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for DeadVisitor<'a, 'tcx> { /// an error. We could do this also by checking the parents, but /// this is how the code is setup and it seems harmless enough. fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.tcx.map.expect_item(item.id)) + let tcx = self.tcx; + self.visit_item(tcx.map.expect_item(item.id)) } fn visit_item(&mut self, item: &hir::Item) { @@ -582,7 +585,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for DeadVisitor<'a, 'tcx> { } } -pub fn check_crate(tcx: &TyCtxt, access_levels: &privacy::AccessLevels) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + access_levels: &privacy::AccessLevels) { let _task = tcx.dep_graph.in_task(DepNode::DeadCheck); let krate = tcx.map.krate(); let live_symbols = find_live(tcx, access_levels, krate); diff --git a/src/librustc/middle/dependency_format.rs b/src/librustc/middle/dependency_format.rs index aac6f1edc0..0b398fd0d4 100644 --- a/src/librustc/middle/dependency_format.rs +++ b/src/librustc/middle/dependency_format.rs @@ -64,8 +64,7 @@ use syntax::ast; use session; -use session::config; -use middle::cstore::CrateStore; +use session::config::{self, PanicStrategy}; use middle::cstore::LinkagePreference::{self, RequireStatic, RequireDynamic}; use util::nodemap::FnvHashMap; @@ -116,9 +115,10 @@ fn calculate_type(sess: &session::Session, // got long ago), so don't bother with anything. config::CrateTypeRlib => return Vec::new(), - // Staticlibs must have all static dependencies. If any fail to be - // found, we generate some nice pretty errors. - config::CrateTypeStaticlib => { + // Staticlibs and cdylibs must have all static dependencies. If any fail + // to be found, we generate some nice pretty errors. + config::CrateTypeStaticlib | + config::CrateTypeCdylib => { match attempt_static(sess) { Some(v) => return v, None => {} @@ -194,10 +194,15 @@ fn calculate_type(sess: &session::Session, } // We've gotten this far because we're emitting some form of a final - // artifact which means that we're going to need an allocator of some form. - // No allocator may have been required or linked so far, so activate one - // here if one isn't set. - activate_allocator(sess, &mut ret); + // artifact which means that we may need to inject dependencies of some + // form. + // + // Things like allocators and panic runtimes may not have been activated + // quite yet, so do so here. + activate_injected_dep(sess.injected_allocator.get(), &mut ret, + &|cnum| sess.cstore.is_allocator(cnum)); + activate_injected_dep(sess.injected_panic_runtime.get(), &mut ret, + &|cnum| sess.cstore.is_panic_runtime(cnum)); // When dylib B links to dylib A, then when using B we must also link to A. // It could be the case, however, that the rlib for A is present (hence we @@ -271,40 +276,42 @@ fn attempt_static(sess: &session::Session) -> Option { } }).collect::>(); - // Our allocator may not have been activated as it's not flagged with - // explicitly_linked, so flag it here if necessary. - activate_allocator(sess, &mut ret); + // Our allocator/panic runtime may not have been linked above if it wasn't + // explicitly linked, which is the case for any injected dependency. Handle + // that here and activate them. + activate_injected_dep(sess.injected_allocator.get(), &mut ret, + &|cnum| sess.cstore.is_allocator(cnum)); + activate_injected_dep(sess.injected_panic_runtime.get(), &mut ret, + &|cnum| sess.cstore.is_panic_runtime(cnum)); Some(ret) } // Given a list of how to link upstream dependencies so far, ensure that an -// allocator is activated. This will not do anything if one was transitively -// included already (e.g. via a dylib or explicitly so). +// injected dependency is activated. This will not do anything if one was +// transitively included already (e.g. via a dylib or explicitly so). // -// If an allocator was not found then we're guaranteed the metadata::creader -// module has injected an allocator dependency (not listed as a required -// dependency) in the session's `injected_allocator` field. If this field is not -// set then this compilation doesn't actually need an allocator and we can also -// skip this step entirely. -fn activate_allocator(sess: &session::Session, list: &mut DependencyList) { - let mut allocator_found = false; +// If an injected dependency was not found then we're guaranteed the +// metadata::creader module has injected that dependency (not listed as +// a required dependency) in one of the session's field. If this field is not +// set then this compilation doesn't actually need the dependency and we can +// also skip this step entirely. +fn activate_injected_dep(injected: Option, + list: &mut DependencyList, + replaces_injected: &Fn(ast::CrateNum) -> bool) { for (i, slot) in list.iter().enumerate() { let cnum = (i + 1) as ast::CrateNum; - if !sess.cstore.is_allocator(cnum) { + if !replaces_injected(cnum) { continue } - if let Linkage::NotLinked = *slot { - continue + if *slot != Linkage::NotLinked { + return } - allocator_found = true; } - if !allocator_found { - if let Some(injected_allocator) = sess.injected_allocator.get() { - let idx = injected_allocator as usize - 1; - assert_eq!(list[idx], Linkage::NotLinked); - list[idx] = Linkage::Static; - } + if let Some(injected) = injected { + let idx = injected as usize - 1; + assert_eq!(list[idx], Linkage::NotLinked); + list[idx] = Linkage::Static; } } @@ -315,21 +322,75 @@ fn verify_ok(sess: &session::Session, list: &[Linkage]) { return } let mut allocator = None; + let mut panic_runtime = None; for (i, linkage) in list.iter().enumerate() { - let cnum = (i + 1) as ast::CrateNum; - if !sess.cstore.is_allocator(cnum) { - continue - } if let Linkage::NotLinked = *linkage { continue } - if let Some(prev_alloc) = allocator { - let prev_name = sess.cstore.crate_name(prev_alloc); - let cur_name = sess.cstore.crate_name(cnum); - sess.err(&format!("cannot link together two \ - allocators: {} and {}", - prev_name, cur_name)); + let cnum = (i + 1) as ast::CrateNum; + if sess.cstore.is_allocator(cnum) { + if let Some(prev) = allocator { + let prev_name = sess.cstore.crate_name(prev); + let cur_name = sess.cstore.crate_name(cnum); + sess.err(&format!("cannot link together two \ + allocators: {} and {}", + prev_name, cur_name)); + } + allocator = Some(cnum); + } + + if sess.cstore.is_panic_runtime(cnum) { + if let Some((prev, _)) = panic_runtime { + let prev_name = sess.cstore.crate_name(prev); + let cur_name = sess.cstore.crate_name(cnum); + sess.err(&format!("cannot link together two \ + panic runtimes: {} and {}", + prev_name, cur_name)); + } + panic_runtime = Some((cnum, sess.cstore.panic_strategy(cnum))); + } + } + + // If we found a panic runtime, then we know by this point that it's the + // only one, but we perform validation here that all the panic strategy + // compilation modes for the whole DAG are valid. + if let Some((cnum, found_strategy)) = panic_runtime { + let desired_strategy = sess.opts.cg.panic.clone(); + + // First up, validate that our selected panic runtime is indeed exactly + // our same strategy. + if found_strategy != desired_strategy { + sess.err(&format!("the linked panic runtime `{}` is \ + not compiled with this crate's \ + panic strategy `{}`", + sess.cstore.crate_name(cnum), + desired_strategy.desc())); + } + + // Next up, verify that all other crates are compatible with this panic + // strategy. If the dep isn't linked, we ignore it, and if our strategy + // is abort then it's compatible with everything. Otherwise all crates' + // panic strategy must match our own. + for (i, linkage) in list.iter().enumerate() { + if let Linkage::NotLinked = *linkage { + continue + } + if desired_strategy == PanicStrategy::Abort { + continue + } + let cnum = (i + 1) as ast::CrateNum; + let found_strategy = sess.cstore.panic_strategy(cnum); + if desired_strategy == found_strategy { + continue + } + + sess.err(&format!("the crate `{}` is compiled with the \ + panic strategy `{}` which is \ + incompatible with this crate's \ + strategy of `{}`", + sess.cstore.crate_name(cnum), + found_strategy.desc(), + desired_strategy.desc())); } - allocator = Some(cnum); } } diff --git a/src/librustc/middle/effect.rs b/src/librustc/middle/effect.rs index ac7a1b8aa0..b62368c2a9 100644 --- a/src/librustc/middle/effect.rs +++ b/src/librustc/middle/effect.rs @@ -51,7 +51,7 @@ fn type_is_unsafe_function(ty: Ty) -> bool { } struct EffectCheckVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, /// Whether we're in an unsafe context. unsafe_context: UnsafeContext, @@ -183,7 +183,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EffectCheckVisitor<'a, 'tcx> { } } -pub fn check_crate(tcx: &TyCtxt) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let _task = tcx.dep_graph.in_task(DepNode::EffectCheck); let mut visitor = EffectCheckVisitor { diff --git a/src/librustc/middle/expr_use_visitor.rs b/src/librustc/middle/expr_use_visitor.rs index c4d6f10067..4cee8c5d89 100644 --- a/src/librustc/middle/expr_use_visitor.rs +++ b/src/librustc/middle/expr_use_visitor.rs @@ -22,7 +22,7 @@ use self::OverloadedCallType::*; use hir::pat_util; use hir::def::Def; use hir::def_id::{DefId}; -use infer; +use infer::InferCtxt; use middle::mem_categorization as mc; use ty::{self, TyCtxt, adjustment}; @@ -209,8 +209,7 @@ enum OverloadedCallType { } impl OverloadedCallType { - fn from_trait_id(tcx: &TyCtxt, trait_id: DefId) - -> OverloadedCallType { + fn from_trait_id(tcx: TyCtxt, trait_id: DefId) -> OverloadedCallType { for &(maybe_function_trait, overloaded_call_type) in &[ (tcx.lang_items.fn_once_trait(), FnOnceOverloadedCall), (tcx.lang_items.fn_mut_trait(), FnMutOverloadedCall), @@ -227,8 +226,7 @@ impl OverloadedCallType { bug!("overloaded call didn't map to known function trait") } - fn from_method_id(tcx: &TyCtxt, method_id: DefId) - -> OverloadedCallType { + fn from_method_id(tcx: TyCtxt, method_id: DefId) -> OverloadedCallType { let method = tcx.impl_or_trait_item(method_id); OverloadedCallType::from_trait_id(tcx, method.container().id()) } @@ -241,10 +239,9 @@ impl OverloadedCallType { // mem_categorization, it requires a TYPER, which is a type that // supplies types from the tree. After type checking is complete, you // can just use the tcx as the typer. -pub struct ExprUseVisitor<'d, 't, 'a: 't, 'tcx:'a+'d> { - typer: &'t infer::InferCtxt<'a, 'tcx>, - mc: mc::MemCategorizationContext<'t, 'a, 'tcx>, - delegate: &'d mut Delegate<'tcx>, +pub struct ExprUseVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>, + delegate: &'a mut Delegate<'tcx>, } // If the TYPER results in an error, it's because the type check @@ -272,14 +269,14 @@ enum PassArgs { ByRef, } -impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { - pub fn new(delegate: &'d mut (Delegate<'tcx>+'d), - typer: &'t infer::InferCtxt<'a, 'tcx>) - -> ExprUseVisitor<'d,'t,'a,'tcx> where 'tcx:'a+'d +impl<'a, 'gcx, 'tcx> ExprUseVisitor<'a, 'gcx, 'tcx> { + pub fn new(delegate: &'a mut (Delegate<'tcx>+'a), + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self { - let mc: mc::MemCategorizationContext<'t, 'a, 'tcx> = - mc::MemCategorizationContext::new(typer); - ExprUseVisitor { typer: typer, mc: mc, delegate: delegate } + ExprUseVisitor { + mc: mc::MemCategorizationContext::new(infcx), + delegate: delegate + } } pub fn walk_fn(&mut self, @@ -293,7 +290,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { decl: &hir::FnDecl, body: &hir::Block) { for arg in &decl.inputs { - let arg_ty = return_if_err!(self.typer.node_ty(arg.pat.id)); + let arg_ty = return_if_err!(self.mc.infcx.node_ty(arg.pat.id)); let fn_body_scope = self.tcx().region_maps.node_extent(body.id); let arg_cmt = self.mc.cat_rvalue( @@ -306,8 +303,8 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { } } - fn tcx(&self) -> &'t TyCtxt<'tcx> { - self.typer.tcx + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { + self.mc.infcx.tcx } fn delegate_consume(&mut self, @@ -317,7 +314,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { debug!("delegate_consume(consume_id={}, cmt={:?})", consume_id, cmt); - let mode = copy_or_move(self.typer, &cmt, DirectRefMove); + let mode = copy_or_move(self.mc.infcx, &cmt, DirectRefMove); self.delegate.consume(consume_id, consume_span, cmt, mode); } @@ -442,7 +439,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { hir::ExprAddrOf(m, ref base) => { // &base // make sure that the thing we are pointing out stays valid // for the lifetime `scope_r` of the resulting ptr: - let expr_ty = return_if_err!(self.typer.node_ty(expr.id)); + let expr_ty = return_if_err!(self.mc.infcx.node_ty(expr.id)); if let ty::TyRef(&r, _) = expr_ty.sty { let bk = ty::BorrowKind::from_mutbl(m); self.borrow_expr(&base, r, bk, AddrOf); @@ -537,8 +534,8 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { self.consume_expr(&count); } - hir::ExprClosure(..) => { - self.walk_captures(expr) + hir::ExprClosure(_, _, _, fn_decl_span) => { + self.walk_captures(expr, fn_decl_span) } hir::ExprBox(ref base) => { @@ -548,7 +545,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { } fn walk_callee(&mut self, call: &hir::Expr, callee: &hir::Expr) { - let callee_ty = return_if_err!(self.typer.expr_ty_adjusted(callee)); + let callee_ty = return_if_err!(self.mc.infcx.expr_ty_adjusted(callee)); debug!("walk_callee: callee={:?} callee_ty={:?}", callee, callee_ty); let call_scope = self.tcx().region_maps.node_extent(call.id); @@ -559,7 +556,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { ty::TyError => { } _ => { let overloaded_call_type = - match self.typer.node_method_id(ty::MethodCall::expr(call.id)) { + match self.mc.infcx.node_method_id(ty::MethodCall::expr(call.id)) { Some(method_id) => { OverloadedCallType::from_method_id(self.tcx(), method_id) } @@ -615,7 +612,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { match local.init { None => { let delegate = &mut self.delegate; - pat_util::pat_bindings(&self.typer.tcx.def_map, &local.pat, + pat_util::pat_bindings(&self.mc.infcx.tcx.def_map, &local.pat, |_, id, span, _| { delegate.decl_without_init(id, span); }) @@ -707,9 +704,9 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { // consumed or borrowed as part of the automatic adjustment // process. fn walk_adjustment(&mut self, expr: &hir::Expr) { - let typer = self.typer; + let infcx = self.mc.infcx; //NOTE(@jroesch): mixed RefCell borrow causes crash - let adj = typer.adjustments().get(&expr.id).map(|x| x.clone()); + let adj = infcx.adjustments().get(&expr.id).map(|x| x.clone()); if let Some(adjustment) = adj { match adjustment { adjustment::AdjustReifyFnPointer | @@ -739,7 +736,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { for i in 0..autoderefs { let deref_id = ty::MethodCall::autoderef(expr.id, i as u32); - match self.typer.node_method_ty(deref_id) { + match self.mc.infcx.node_method_ty(deref_id) { None => {} Some(method_ty) => { let cmt = return_if_err!(self.mc.cat_expr_autoderefd(expr, i)); @@ -865,7 +862,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { pass_args: PassArgs) -> bool { - if !self.typer.is_method_call(expr.id) { + if !self.mc.infcx.is_method_call(expr.id) { return false; } @@ -941,7 +938,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { PatKind::Ident(hir::BindByRef(_), _, _) => mode.lub(BorrowingMatch), PatKind::Ident(hir::BindByValue(_), _, _) => { - match copy_or_move(self.typer, &cmt_pat, PatBindingMove) { + match copy_or_move(self.mc.infcx, &cmt_pat, PatBindingMove) { Copy => mode.lub(CopyingMatch), Move(_) => mode.lub(MovingMatch), } @@ -967,7 +964,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { pat); let mc = &self.mc; - let typer = self.typer; + let infcx = self.mc.infcx; let def_map = &self.tcx().def_map; let delegate = &mut self.delegate; return_if_err!(mc.cat_pattern(cmt_discr.clone(), pat, |mc, cmt_pat, pat| { @@ -978,7 +975,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { match_mode); // pat_ty: the type of the binding being produced. - let pat_ty = return_if_err!(typer.node_ty(pat.id)); + let pat_ty = return_if_err!(infcx.node_ty(pat.id)); // Each match binding is effectively an assignment to the // binding being produced. @@ -1000,7 +997,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { } } PatKind::Ident(hir::BindByValue(_), _, _) => { - let mode = copy_or_move(typer, &cmt_pat, PatBindingMove); + let mode = copy_or_move(infcx, &cmt_pat, PatBindingMove); debug!("walk_pat binding consuming pat"); delegate.consume_pat(pat, cmt_pat, mode); } @@ -1057,7 +1054,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { // the leaves of the pattern tree structure. return_if_err!(mc.cat_pattern(cmt_discr, pat, |mc, cmt_pat, pat| { let def_map = def_map.borrow(); - let tcx = typer.tcx; + let tcx = infcx.tcx; match pat.node { PatKind::TupleStruct(..) | PatKind::Path(..) | PatKind::QPath(..) | @@ -1142,7 +1139,7 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { })); } - fn walk_captures(&mut self, closure_expr: &hir::Expr) { + fn walk_captures(&mut self, closure_expr: &hir::Expr, fn_decl_span: Span) { debug!("walk_captures({:?})", closure_expr); self.tcx().with_freevars(closure_expr.id, |freevars| { @@ -1150,18 +1147,18 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { let id_var = freevar.def.var_id(); let upvar_id = ty::UpvarId { var_id: id_var, closure_expr_id: closure_expr.id }; - let upvar_capture = self.typer.upvar_capture(upvar_id).unwrap(); + let upvar_capture = self.mc.infcx.upvar_capture(upvar_id).unwrap(); let cmt_var = return_if_err!(self.cat_captured_var(closure_expr.id, - closure_expr.span, + fn_decl_span, freevar.def)); match upvar_capture { ty::UpvarCapture::ByValue => { - let mode = copy_or_move(self.typer, &cmt_var, CaptureMove); + let mode = copy_or_move(self.mc.infcx, &cmt_var, CaptureMove); self.delegate.consume(closure_expr.id, freevar.span, cmt_var, mode); } ty::UpvarCapture::ByRef(upvar_borrow) => { self.delegate.borrow(closure_expr.id, - closure_expr.span, + fn_decl_span, cmt_var, upvar_borrow.region, upvar_borrow.kind, @@ -1180,17 +1177,17 @@ impl<'d,'t,'a,'tcx> ExprUseVisitor<'d,'t,'a,'tcx> { // Create the cmt for the variable being borrowed, from the // caller's perspective let var_id = upvar_def.var_id(); - let var_ty = self.typer.node_ty(var_id)?; + let var_ty = self.mc.infcx.node_ty(var_id)?; self.mc.cat_def(closure_id, closure_span, var_ty, upvar_def) } } -fn copy_or_move<'a, 'tcx>(typer: &infer::InferCtxt<'a, 'tcx>, - cmt: &mc::cmt<'tcx>, - move_reason: MoveReason) - -> ConsumeMode +fn copy_or_move<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + cmt: &mc::cmt<'tcx>, + move_reason: MoveReason) + -> ConsumeMode { - if typer.type_moves_by_default(cmt.ty, cmt.span) { + if infcx.type_moves_by_default(cmt.ty, cmt.span) { Move(move_reason) } else { Copy diff --git a/src/librustc/middle/free_region.rs b/src/librustc/middle/free_region.rs index ae0540696c..e4ce897671 100644 --- a/src/librustc/middle/free_region.rs +++ b/src/librustc/middle/free_region.rs @@ -48,17 +48,18 @@ impl FreeRegionMap { } } - pub fn relate_free_regions_from_predicates<'tcx>(&mut self, - _tcx: &TyCtxt<'tcx>, - predicates: &[ty::Predicate<'tcx>]) { + pub fn relate_free_regions_from_predicates(&mut self, + predicates: &[ty::Predicate]) { debug!("relate_free_regions_from_predicates(predicates={:?})", predicates); for predicate in predicates { match *predicate { ty::Predicate::Projection(..) | ty::Predicate::Trait(..) | + ty::Predicate::Rfc1592(..) | ty::Predicate::Equate(..) | ty::Predicate::WellFormed(..) | ty::Predicate::ObjectSafe(..) | + ty::Predicate::ClosureKind(..) | ty::Predicate::TypeOutlives(..) => { // No region bounds here } @@ -120,7 +121,7 @@ impl FreeRegionMap { /// Determines whether one region is a subregion of another. This is intended to run *after /// inference* and sadly the logic is somewhat duplicated with the code in infer.rs. pub fn is_subregion_of(&self, - tcx: &TyCtxt, + tcx: TyCtxt, sub_region: ty::Region, super_region: ty::Region) -> bool { diff --git a/src/librustc/middle/intrinsicck.rs b/src/librustc/middle/intrinsicck.rs index e84be7e456..07e69d85ff 100644 --- a/src/librustc/middle/intrinsicck.rs +++ b/src/librustc/middle/intrinsicck.rs @@ -11,7 +11,7 @@ use dep_graph::DepNode; use hir::def::Def; use hir::def_id::DefId; -use infer::{InferCtxt, new_infer_ctxt}; +use infer::InferCtxt; use traits::ProjectionMode; use ty::{self, Ty, TyCtxt}; use ty::layout::{LayoutError, Pointer, SizeSkeleton}; @@ -22,7 +22,7 @@ use syntax::codemap::Span; use hir::intravisit::{self, Visitor, FnKind}; use hir; -pub fn check_crate(tcx: &TyCtxt) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut visitor = ItemVisitor { tcx: tcx }; @@ -30,27 +30,26 @@ pub fn check_crate(tcx: &TyCtxt) { } struct ItemVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx> + tcx: TyCtxt<'a, 'tcx, 'tcx> } impl<'a, 'tcx> ItemVisitor<'a, 'tcx> { fn visit_const(&mut self, item_id: ast::NodeId, expr: &hir::Expr) { let param_env = ty::ParameterEnvironment::for_item(self.tcx, item_id); - let infcx = new_infer_ctxt(self.tcx, &self.tcx.tables, - Some(param_env), - ProjectionMode::Any); - let mut visitor = ExprVisitor { - infcx: &infcx - }; - visitor.visit_expr(expr); + self.tcx.infer_ctxt(None, Some(param_env), ProjectionMode::Any).enter(|infcx| { + let mut visitor = ExprVisitor { + infcx: &infcx + }; + visitor.visit_expr(expr); + }); } } -struct ExprVisitor<'a, 'tcx: 'a> { - infcx: &'a InferCtxt<'a, 'tcx> +struct ExprVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> ExprVisitor<'a, 'gcx, 'tcx> { fn def_id_is_transmute(&self, def_id: DefId) -> bool { let intrinsic = match self.infcx.tcx.lookup_item_type(def_id).ty.sty { ty::TyFnDef(_, _, ref bfty) => bfty.abi == RustIntrinsic, @@ -59,7 +58,7 @@ impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { intrinsic && self.infcx.tcx.item_name(def_id).as_str() == "transmute" } - fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>, id: ast::NodeId) { + fn check_transmute(&self, span: Span, from: Ty<'gcx>, to: Ty<'gcx>, id: ast::NodeId) { let sk_from = SizeSkeleton::compute(from, self.infcx); let sk_to = SizeSkeleton::compute(to, self.infcx); @@ -85,7 +84,7 @@ impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { } // Try to display a sensible error with as much information as possible. - let skeleton_string = |ty: Ty<'tcx>, sk| { + let skeleton_string = |ty: Ty<'gcx>, sk| { match sk { Ok(SizeSkeleton::Known(size)) => { format!("{} bits", size.bits()) @@ -115,12 +114,12 @@ impl<'a, 'tcx> ExprVisitor<'a, 'tcx> { impl<'a, 'tcx, 'v> Visitor<'v> for ItemVisitor<'a, 'tcx> { // const, static and N in [T; N]. fn visit_expr(&mut self, expr: &hir::Expr) { - let infcx = new_infer_ctxt(self.tcx, &self.tcx.tables, - None, ProjectionMode::Any); - let mut visitor = ExprVisitor { - infcx: &infcx - }; - visitor.visit_expr(expr); + self.tcx.infer_ctxt(None, None, ProjectionMode::Any).enter(|infcx| { + let mut visitor = ExprVisitor { + infcx: &infcx + }; + visitor.visit_expr(expr); + }); } fn visit_trait_item(&mut self, item: &hir::TraitItem) { @@ -141,25 +140,20 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ItemVisitor<'a, 'tcx> { fn visit_fn(&mut self, fk: FnKind<'v>, fd: &'v hir::FnDecl, b: &'v hir::Block, s: Span, id: ast::NodeId) { - match fk { - FnKind::ItemFn(..) | FnKind::Method(..) => { - let param_env = ty::ParameterEnvironment::for_item(self.tcx, id); - let infcx = new_infer_ctxt(self.tcx, &self.tcx.tables, - Some(param_env), - ProjectionMode::Any); - let mut visitor = ExprVisitor { - infcx: &infcx - }; - visitor.visit_fn(fk, fd, b, s, id); - } - FnKind::Closure(..) => { - span_bug!(s, "intrinsicck: closure outside of function") - } + if let FnKind::Closure(..) = fk { + span_bug!(s, "intrinsicck: closure outside of function") } + let param_env = ty::ParameterEnvironment::for_item(self.tcx, id); + self.tcx.infer_ctxt(None, Some(param_env), ProjectionMode::Any).enter(|infcx| { + let mut visitor = ExprVisitor { + infcx: &infcx + }; + visitor.visit_fn(fk, fd, b, s, id); + }); } } -impl<'a, 'tcx, 'v> Visitor<'v> for ExprVisitor<'a, 'tcx> { +impl<'a, 'gcx, 'tcx, 'v> Visitor<'v> for ExprVisitor<'a, 'gcx, 'tcx> { fn visit_expr(&mut self, expr: &hir::Expr) { if let hir::ExprPath(..) = expr.node { match self.infcx.tcx.resolve_expr(expr) { diff --git a/src/librustc/middle/lang_items.rs b/src/librustc/middle/lang_items.rs index 86531ced8d..853477ac97 100644 --- a/src/librustc/middle/lang_items.rs +++ b/src/librustc/middle/lang_items.rs @@ -24,7 +24,6 @@ pub use self::LangItem::*; use dep_graph::DepNode; use hir::map as hir_map; use session::Session; -use middle::cstore::CrateStore; use hir::def_id::DefId; use ty; use middle::weak_lang_items; @@ -189,13 +188,19 @@ impl<'a, 'tcx> LanguageItemCollector<'a, 'tcx> { match self.items.items[item_index] { Some(original_def_id) if original_def_id != item_def_id => { let cstore = &self.session.cstore; - let span = self.ast_map.span_if_local(item_def_id) - .expect("we should have found local duplicate earlier"); - let mut err = struct_span_err!(self.session, - span, - E0152, - "duplicate lang item found: `{}`.", - LanguageItems::item_name(item_index)); + let name = LanguageItems::item_name(item_index); + let mut err = match self.ast_map.span_if_local(item_def_id) { + Some(span) => struct_span_err!( + self.session, + span, + E0152, + "duplicate lang item found: `{}`.", + name), + None => self.session.struct_err(&format!( + "duplicate lang item in crate `{}`: `{}`.", + cstore.crate_name(item_def_id.krate), + name)), + }; if let Some(span) = self.ast_map.span_if_local(original_def_id) { span_note!(&mut err, span, "first defined here."); diff --git a/src/librustc/middle/liveness.rs b/src/librustc/middle/liveness.rs index 35991ae56c..be8caeb436 100644 --- a/src/librustc/middle/liveness.rs +++ b/src/librustc/middle/liveness.rs @@ -114,7 +114,6 @@ use hir::def::*; use hir::pat_util; use ty::{self, TyCtxt, ParameterEnvironment}; use traits::{self, ProjectionMode}; -use infer; use ty::subst::Subst; use lint; use util::nodemap::NodeMap; @@ -125,7 +124,7 @@ use std::io; use std::rc::Rc; use syntax::ast::{self, NodeId}; use syntax::codemap::{BytePos, original_sp, Span}; -use syntax::parse::token::special_idents; +use syntax::parse::token::keywords; use syntax::ptr::P; use hir::Expr; @@ -169,8 +168,8 @@ enum LiveNodeKind { ExitNode } -fn live_node_kind_to_string(lnk: LiveNodeKind, cx: &TyCtxt) -> String { - let cm = cx.sess.codemap(); +fn live_node_kind_to_string(lnk: LiveNodeKind, tcx: TyCtxt) -> String { + let cm = tcx.sess.codemap(); match lnk { FreeVarNode(s) => { format!("Free var node [{}]", cm.span_to_string(s)) @@ -195,7 +194,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for IrMaps<'a, 'tcx> { fn visit_arm(&mut self, a: &hir::Arm) { visit_arm(self, a); } } -pub fn check_crate(tcx: &TyCtxt) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let _task = tcx.dep_graph.in_task(DepNode::Liveness); tcx.map.krate().visit_all_items(&mut IrMaps::new(tcx)); tcx.sess.abort_if_errors(); @@ -263,7 +262,7 @@ enum VarKind { } struct IrMaps<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, num_live_nodes: usize, num_vars: usize, @@ -275,7 +274,7 @@ struct IrMaps<'a, 'tcx: 'a> { } impl<'a, 'tcx> IrMaps<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>) -> IrMaps<'a, 'tcx> { + fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> IrMaps<'a, 'tcx> { IrMaps { tcx: tcx, num_live_nodes: 0, @@ -948,7 +947,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { self.propagate_through_expr(&e, succ) } - hir::ExprClosure(_, _, ref blk) => { + hir::ExprClosure(_, _, ref blk, _) => { debug!("{} is an ExprClosure", expr_to_string(expr)); @@ -1051,7 +1050,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { hir::ExprBreak(opt_label) => { // Find which label this break jumps to - let sc = self.find_loop_scope(opt_label.map(|l| l.node.name), expr.id, expr.span); + let sc = self.find_loop_scope(opt_label.map(|l| l.node), expr.id, expr.span); // Now that we know the label we're going to, // look it up in the break loop nodes table @@ -1064,7 +1063,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { hir::ExprAgain(opt_label) => { // Find which label this expr continues to - let sc = self.find_loop_scope(opt_label.map(|l| l.node.name), expr.id, expr.span); + let sc = self.find_loop_scope(opt_label.map(|l| l.node), expr.id, expr.span); // Now that we know the label we're going to, // look it up in the continue loop nodes table @@ -1462,7 +1461,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { fn fn_ret(&self, id: NodeId) -> ty::PolyFnOutput<'tcx> { let fn_ty = self.ir.tcx.node_id_to_type(id); match fn_ty.sty { - ty::TyClosure(closure_def_id, ref substs) => + ty::TyClosure(closure_def_id, substs) => self.ir.tcx.closure_type(closure_def_id, substs).sig.output(), _ => fn_ty.fn_ret() } @@ -1486,20 +1485,16 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { ty::FnConverging(t_ret) if self.live_on_entry(entry_ln, self.s.no_ret_var).is_some() => { - let param_env = ParameterEnvironment::for_item(&self.ir.tcx, id); - let t_ret_subst = t_ret.subst(&self.ir.tcx, ¶m_env.free_substs); - let infcx = infer::new_infer_ctxt(&self.ir.tcx, - &self.ir.tcx.tables, - Some(param_env), - ProjectionMode::Any); - let cause = traits::ObligationCause::dummy(); - let norm = traits::fully_normalize(&infcx, - cause, - &t_ret_subst); - - if norm.unwrap().is_nil() { - // for nil return types, it is ok to not return a value expl. - } else { + let param_env = ParameterEnvironment::for_item(self.ir.tcx, id); + let t_ret_subst = t_ret.subst(self.ir.tcx, ¶m_env.free_substs); + let is_nil = self.ir.tcx.infer_ctxt(None, Some(param_env), + ProjectionMode::Any).enter(|infcx| { + let cause = traits::ObligationCause::dummy(); + traits::fully_normalize(&infcx, cause, &t_ret_subst).unwrap().is_nil() + }); + + // for nil return types, it is ok to not return a value expl. + if !is_nil { let ends_with_stmt = match body.expr { None if !body.stmts.is_empty() => match body.stmts.last().unwrap().node { @@ -1578,7 +1573,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> { let var = self.variable(p_id, sp); // Ignore unused self. let name = path1.node; - if name != special_idents::self_.name { + if name != keywords::SelfValue.name() { if !self.warn_about_unused(sp, p_id, entry_ln, var) { if self.live_on_entry(entry_ln, var).is_none() { self.report_dead_assign(p_id, sp, var, true); diff --git a/src/librustc/middle/mem_categorization.rs b/src/librustc/middle/mem_categorization.rs index 2f77552c38..3999b02425 100644 --- a/src/librustc/middle/mem_categorization.rs +++ b/src/librustc/middle/mem_categorization.rs @@ -73,7 +73,7 @@ use self::Aliasability::*; use hir::def_id::DefId; use hir::map as ast_map; -use infer; +use infer::InferCtxt; use middle::const_qualif::ConstQualif; use hir::def::Def; use ty::adjustment; @@ -256,8 +256,8 @@ impl ast_node for hir::Pat { } #[derive(Copy, Clone)] -pub struct MemCategorizationContext<'t, 'a: 't, 'tcx : 'a> { - pub typer: &'t infer::InferCtxt<'a, 'tcx>, +pub struct MemCategorizationContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + pub infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, } pub type McResult = Result; @@ -302,7 +302,7 @@ impl MutabilityCategory { ret } - fn from_local(tcx: &TyCtxt, id: ast::NodeId) -> MutabilityCategory { + fn from_local(tcx: TyCtxt, id: ast::NodeId) -> MutabilityCategory { let ret = match tcx.map.get(id) { ast_map::NodeLocal(p) => match p.node { PatKind::Ident(bind_mode, _, _) => { @@ -358,17 +358,18 @@ impl MutabilityCategory { } } -impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { - pub fn new(typer: &'t infer::InferCtxt<'a, 'tcx>) -> MemCategorizationContext<'t, 'a, 'tcx> { - MemCategorizationContext { typer: typer } +impl<'a, 'gcx, 'tcx> MemCategorizationContext<'a, 'gcx, 'tcx> { + pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) + -> MemCategorizationContext<'a, 'gcx, 'tcx> { + MemCategorizationContext { infcx: infcx } } - fn tcx(&self) -> &'a TyCtxt<'tcx> { - self.typer.tcx + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { + self.infcx.tcx } fn expr_ty(&self, expr: &hir::Expr) -> McResult> { - match self.typer.node_ty(expr.id) { + match self.infcx.node_ty(expr.id) { Ok(t) => Ok(t), Err(()) => { debug!("expr_ty({:?}) yielded Err", expr); @@ -381,16 +382,16 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { let unadjusted_ty = self.expr_ty(expr)?; Ok(unadjusted_ty.adjust( self.tcx(), expr.span, expr.id, - self.typer.adjustments().get(&expr.id), - |method_call| self.typer.node_method_ty(method_call))) + self.infcx.adjustments().get(&expr.id), + |method_call| self.infcx.node_method_ty(method_call))) } fn node_ty(&self, id: ast::NodeId) -> McResult> { - self.typer.node_ty(id) + self.infcx.node_ty(id) } fn pat_ty(&self, pat: &hir::Pat) -> McResult> { - let base_ty = self.typer.node_ty(pat.id)?; + let base_ty = self.infcx.node_ty(pat.id)?; // FIXME (Issue #18207): This code detects whether we are // looking at a `ref x`, and if so, figures out what the type // *being borrowed* is. But ideally we would put in a more @@ -413,7 +414,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { } pub fn cat_expr(&self, expr: &hir::Expr) -> McResult> { - match self.typer.adjustments().get(&expr.id) { + match self.infcx.adjustments().get(&expr.id) { None => { // No adjustments. self.cat_expr_unadjusted(expr) @@ -485,7 +486,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { hir::ExprIndex(ref base, _) => { let method_call = ty::MethodCall::expr(expr.id()); let context = InteriorOffsetKind::Index; - match self.typer.node_method_ty(method_call) { + match self.infcx.node_method_ty(method_call) { Some(method_ty) => { // If this is an index implemented by a method call, then it // will include an implicit deref of the result. @@ -578,7 +579,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { let ty = self.node_ty(fn_node_id)?; match ty.sty { ty::TyClosure(closure_id, _) => { - match self.typer.closure_kind(closure_id) { + match self.infcx.closure_kind(closure_id) { Some(kind) => { self.cat_upvar(id, span, var_id, fn_node_id, kind) } @@ -687,7 +688,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { // for that. let upvar_id = ty::UpvarId { var_id: var_id, closure_expr_id: fn_node_id }; - let upvar_capture = self.typer.upvar_capture(upvar_id).unwrap(); + let upvar_capture = self.infcx.upvar_capture(upvar_id).unwrap(); let cmt_result = match upvar_capture { ty::UpvarCapture::ByValue => { cmt_result @@ -728,7 +729,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { }; match fn_expr.node { - hir::ExprClosure(_, _, ref body) => body.id, + hir::ExprClosure(_, _, ref body, _) => body.id, _ => bug!() } }; @@ -785,7 +786,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { /// Returns the lifetime of a temporary created by expr with id `id`. /// This could be `'static` if `id` is part of a constant expression. pub fn temporary_scope(&self, id: ast::NodeId) -> ty::Region { - match self.typer.temporary_scope(id) { + match self.infcx.temporary_scope(id) { Some(scope) => ty::ReScope(scope), None => ty::ReStatic } @@ -882,7 +883,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { expr_id: node.id(), autoderef: deref_cnt as u32 }; - let method_ty = self.typer.node_method_ty(method_call); + let method_ty = self.infcx.node_method_ty(method_call); debug!("cat_deref: method_call={:?} method_ty={:?}", method_call, method_ty.map(|ty| ty)); @@ -977,7 +978,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { //! - `base_cmt`: the cmt of `elt` let method_call = ty::MethodCall::expr(elt.id()); - let method_ty = self.typer.node_method_ty(method_call); + let method_ty = self.infcx.node_method_ty(method_call); let element_ty = match method_ty { Some(method_ty) => { @@ -1071,9 +1072,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { slice_pat: &hir::Pat) -> McResult<(cmt<'tcx>, hir::Mutability, ty::Region)> { let slice_ty = self.node_ty(slice_pat.id)?; - let (slice_mutbl, slice_r) = vec_slice_info(self.tcx(), - slice_pat, - slice_ty); + let (slice_mutbl, slice_r) = vec_slice_info(slice_pat, slice_ty); let context = InteriorOffsetKind::Pattern; let cmt_vec = self.deref_vec(slice_pat, vec_cmt, context)?; let cmt_slice = self.cat_index(slice_pat, cmt_vec, context)?; @@ -1082,14 +1081,12 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { /// In a pattern like [a, b, ..c], normally `c` has slice type, but if you have [a, b, /// ..ref c], then the type of `ref c` will be `&&[]`, so to extract the slice details we /// have to recurse through rptrs. - fn vec_slice_info(tcx: &TyCtxt, - pat: &hir::Pat, - slice_ty: Ty) + fn vec_slice_info(pat: &hir::Pat, slice_ty: Ty) -> (hir::Mutability, ty::Region) { match slice_ty.sty { ty::TyRef(r, ref mt) => match mt.ty.sty { ty::TySlice(_) => (mt.mutbl, *r), - _ => vec_slice_info(tcx, pat, mt.ty), + _ => vec_slice_info(pat, mt.ty), }, _ => { @@ -1137,7 +1134,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { } pub fn cat_pattern(&self, cmt: cmt<'tcx>, pat: &hir::Pat, mut op: F) -> McResult<()> - where F: FnMut(&MemCategorizationContext<'t, 'a, 'tcx>, cmt<'tcx>, &hir::Pat), + where F: FnMut(&MemCategorizationContext<'a, 'gcx, 'tcx>, cmt<'tcx>, &hir::Pat), { self.cat_pattern_(cmt, pat, &mut op) } @@ -1145,7 +1142,7 @@ impl<'t, 'a,'tcx> MemCategorizationContext<'t, 'a, 'tcx> { // FIXME(#19596) This is a workaround, but there should be a better way to do this fn cat_pattern_(&self, cmt: cmt<'tcx>, pat: &hir::Pat, op: &mut F) -> McResult<()> - where F : FnMut(&MemCategorizationContext<'t, 'a, 'tcx>, cmt<'tcx>, &hir::Pat), + where F : FnMut(&MemCategorizationContext<'a, 'gcx, 'tcx>, cmt<'tcx>, &hir::Pat), { // Here, `cmt` is the categorization for the value being // matched and pat is the pattern it is being matched against. @@ -1389,8 +1386,7 @@ impl<'tcx> cmt_<'tcx> { } /// Returns `FreelyAliasable(_)` if this lvalue represents a freely aliasable pointer type. - pub fn freely_aliasable(&self, ctxt: &TyCtxt<'tcx>) - -> Aliasability { + pub fn freely_aliasable(&self) -> Aliasability { // Maybe non-obvious: copied upvars can only be considered // non-aliasable in once closures, since any other kind can be // aliased and eventually recused. @@ -1403,11 +1399,11 @@ impl<'tcx> cmt_<'tcx> { Categorization::Downcast(ref b, _) | Categorization::Interior(ref b, _) => { // Aliasability depends on base cmt - b.freely_aliasable(ctxt) + b.freely_aliasable() } Categorization::Deref(ref b, _, Unique) => { - let sub = b.freely_aliasable(ctxt); + let sub = b.freely_aliasable(); if b.mutbl.is_mutable() { // Aliasability depends on base cmt alone sub @@ -1464,7 +1460,7 @@ impl<'tcx> cmt_<'tcx> { } - pub fn descriptive_string(&self, tcx: &TyCtxt) -> String { + pub fn descriptive_string(&self, tcx: TyCtxt) -> String { match self.cat { Categorization::StaticItem => { "static item".to_string() diff --git a/src/librustc/middle/privacy.rs b/src/librustc/middle/privacy.rs index c1dc727449..478f662d09 100644 --- a/src/librustc/middle/privacy.rs +++ b/src/librustc/middle/privacy.rs @@ -15,10 +15,11 @@ use util::nodemap::{DefIdSet, FnvHashMap}; use std::hash::Hash; +use std::fmt; use syntax::ast::NodeId; // Accessibility levels, sorted in ascending order -#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)] pub enum AccessLevel { // Exported items + items participating in various kinds of public interfaces, // but not directly nameable. For example, if function `fn f() -> T {...}` is @@ -56,6 +57,12 @@ impl Default for AccessLevels { } } +impl fmt::Debug for AccessLevels { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + fmt::Debug::fmt(&self.map, f) + } +} + /// A set containing all exported definitions from external crates. /// The set does not contain any entries from local crates. pub type ExternalExports = DefIdSet; diff --git a/src/librustc/middle/reachable.rs b/src/librustc/middle/reachable.rs index 63bccc2d02..55d75ace08 100644 --- a/src/librustc/middle/reachable.rs +++ b/src/librustc/middle/reachable.rs @@ -55,9 +55,10 @@ fn item_might_be_inlined(item: &hir::Item) -> bool { } } -fn method_might_be_inlined(tcx: &TyCtxt, sig: &hir::MethodSig, - impl_item: &hir::ImplItem, - impl_src: DefId) -> bool { +fn method_might_be_inlined<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + sig: &hir::MethodSig, + impl_item: &hir::ImplItem, + impl_src: DefId) -> bool { if attr::requests_inline(&impl_item.attrs) || generics_require_inlining(&sig.generics) { return true @@ -77,7 +78,7 @@ fn method_might_be_inlined(tcx: &TyCtxt, sig: &hir::MethodSig, // Information needed while computing reachability. struct ReachableContext<'a, 'tcx: 'a> { // The type context. - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, // The set of items which must be exported in the linkage sense. reachable_symbols: NodeSet, // A worklist of item IDs. Each item ID in this worklist will be inlined @@ -142,9 +143,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ReachableContext<'a, 'tcx> { impl<'a, 'tcx> ReachableContext<'a, 'tcx> { // Creates a new reachability computation context. - fn new(tcx: &'a TyCtxt<'tcx>) -> ReachableContext<'a, 'tcx> { + fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ReachableContext<'a, 'tcx> { let any_library = tcx.sess.crate_types.borrow().iter().any(|ty| { - *ty != config::CrateTypeExecutable + *ty == config::CrateTypeRlib || *ty == config::CrateTypeDylib }); ReachableContext { tcx: tcx, @@ -344,9 +345,9 @@ impl<'a, 'v> Visitor<'v> for CollectPrivateImplItemsVisitor<'a> { } } -pub fn find_reachable(tcx: &TyCtxt, - access_levels: &privacy::AccessLevels) - -> NodeSet { +pub fn find_reachable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + access_levels: &privacy::AccessLevels) + -> NodeSet { let _task = tcx.dep_graph.in_task(DepNode::Reachability); let mut reachable_context = ReachableContext::new(tcx); diff --git a/src/librustc/middle/region.rs b/src/librustc/middle/region.rs index de34991725..56b4036b7d 100644 --- a/src/librustc/middle/region.rs +++ b/src/librustc/middle/region.rs @@ -389,7 +389,7 @@ impl RegionMaps { // but this isn't the only place } let idx = CodeExtent(self.code_extents.borrow().len() as u32); - info!("CodeExtent({}) = {:?} [parent={}]", idx.0, e, parent.0); + debug!("CodeExtent({}) = {:?} [parent={}]", idx.0, e, parent.0); self.code_extents.borrow_mut().push(e); self.scope_map.borrow_mut().push(parent); *v.insert(idx) diff --git a/src/librustc/middle/resolve_lifetime.rs b/src/librustc/middle/resolve_lifetime.rs index 585b65b9f5..2200d72c88 100644 --- a/src/librustc/middle/resolve_lifetime.rs +++ b/src/librustc/middle/resolve_lifetime.rs @@ -29,7 +29,7 @@ use std::fmt; use std::mem::replace; use syntax::ast; use syntax::codemap::Span; -use syntax::parse::token::special_idents; +use syntax::parse::token::keywords; use util::nodemap::NodeMap; use hir; @@ -193,7 +193,12 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> { }) } FnKind::Closure(_) => { - self.add_scope_and_walk_fn(fk, fd, b, s, fn_id) + // Closures have their own set of labels, save labels just + // like for foreign items above. + let saved = replace(&mut self.labels_in_fn, vec![]); + let result = self.add_scope_and_walk_fn(fk, fd, b, s, fn_id); + replace(&mut self.labels_in_fn, saved); + result } } } @@ -245,7 +250,7 @@ impl<'a, 'v> Visitor<'v> for LifetimeContext<'a> { } fn visit_lifetime(&mut self, lifetime_ref: &hir::Lifetime) { - if lifetime_ref.name == special_idents::static_lifetime.name { + if lifetime_ref.name == keywords::StaticLifetime.name() { self.insert_lifetime(lifetime_ref, DefStaticRegion); return; } @@ -428,7 +433,7 @@ fn extract_labels<'v, 'a>(ctxt: &mut LifetimeContext<'a>, b: &'v hir::Block) { fn expression_label(ex: &hir::Expr) -> Option { match ex.node { hir::ExprWhile(_, _, Some(label)) | - hir::ExprLoop(_, Some(label)) => Some(label.unhygienic_name), + hir::ExprLoop(_, Some(label)) => Some(label.unhygienize()), _ => None, } } @@ -478,7 +483,6 @@ impl<'a> LifetimeContext<'a> { FnKind::Method(_, sig, _, _) => { intravisit::walk_fn_decl(self, fd); self.visit_generics(&sig.generics); - self.visit_explicit_self(&sig.explicit_self); } FnKind::Closure(_) => { intravisit::walk_fn_decl(self, fd); @@ -672,9 +676,8 @@ impl<'a> LifetimeContext<'a> { for i in 0..lifetimes.len() { let lifetime_i = &lifetimes[i]; - let special_idents = [special_idents::static_lifetime]; for lifetime in lifetimes { - if special_idents.iter().any(|&i| i.name == lifetime.lifetime.name) { + if lifetime.lifetime.name == keywords::StaticLifetime.name() { span_err!(self.sess, lifetime.lifetime.span, E0262, "invalid lifetime parameter name: `{}`", lifetime.lifetime.name); } diff --git a/src/librustc/middle/stability.rs b/src/librustc/middle/stability.rs index 6c7c7f160f..c2db6de037 100644 --- a/src/librustc/middle/stability.rs +++ b/src/librustc/middle/stability.rs @@ -17,7 +17,7 @@ use dep_graph::DepNode; use hir::map as hir_map; use session::Session; use lint; -use middle::cstore::{CrateStore, LOCAL_CRATE}; +use middle::cstore::LOCAL_CRATE; use hir::def::Def; use hir::def_id::{CRATE_DEF_INDEX, DefId}; use ty::{self, TyCtxt}; @@ -72,7 +72,7 @@ pub struct Index<'tcx> { // A private tree-walker for producing an Index. struct Annotator<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, index: &'a mut Index<'tcx>, parent_stab: Option<&'tcx Stability>, parent_depr: Option, @@ -203,7 +203,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Annotator<'a, 'tcx> { /// nested items in the context of the outer item, so enable /// deep-walking. fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.tcx.map.expect_item(item.id)) + let tcx = self.tcx; + self.visit_item(tcx.map.expect_item(item.id)) } fn visit_item(&mut self, i: &Item) { @@ -277,9 +278,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Annotator<'a, 'tcx> { } } -impl<'tcx> Index<'tcx> { +impl<'a, 'tcx> Index<'tcx> { /// Construct the stability index for a crate being compiled. - pub fn build(&mut self, tcx: &TyCtxt<'tcx>, access_levels: &AccessLevels) { + pub fn build(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, access_levels: &AccessLevels) { let _task = tcx.dep_graph.in_task(DepNode::StabilityIndex); let krate = tcx.map.krate(); let mut annotator = Annotator { @@ -319,8 +320,8 @@ impl<'tcx> Index<'tcx> { /// Cross-references the feature names of unstable APIs with enabled /// features and possibly prints errors. Returns a list of all /// features used. -pub fn check_unstable_api_usage(tcx: &TyCtxt) - -> FnvHashMap { +pub fn check_unstable_api_usage<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> FnvHashMap { let _task = tcx.dep_graph.in_task(DepNode::StabilityCheck); let ref active_lib_features = tcx.sess.features.borrow().declared_lib_features; @@ -339,7 +340,7 @@ pub fn check_unstable_api_usage(tcx: &TyCtxt) } struct Checker<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, active_features: FnvHashSet, used_features: FnvHashMap, // Within a block where feature gate checking can be skipped. @@ -411,7 +412,8 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Checker<'a, 'tcx> { /// nested items in the context of the outer item, so enable /// deep-walking. fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.tcx.map.expect_item(item.id)) + let tcx = self.tcx; + self.visit_item(tcx.map.expect_item(item.id)) } fn visit_item(&mut self, item: &hir::Item) { @@ -466,8 +468,12 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Checker<'a, 'tcx> { } /// Helper for discovering nodes to check for stability -pub fn check_item(tcx: &TyCtxt, item: &hir::Item, warn_about_defns: bool, - cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option)) { +pub fn check_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + item: &hir::Item, + warn_about_defns: bool, + cb: &mut FnMut(DefId, Span, + &Option<&Stability>, + &Option)) { match item.node { hir::ItemExternCrate(_) => { // compiler-generated `extern crate` items have a dummy span. @@ -503,8 +509,10 @@ pub fn check_item(tcx: &TyCtxt, item: &hir::Item, warn_about_defns: bool, } /// Helper for discovering nodes to check for stability -pub fn check_expr(tcx: &TyCtxt, e: &hir::Expr, - cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option)) { +pub fn check_expr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, e: &hir::Expr, + cb: &mut FnMut(DefId, Span, + &Option<&Stability>, + &Option)) { let span; let id = match e.node { hir::ExprMethodCall(i, _, _) => { @@ -564,8 +572,11 @@ pub fn check_expr(tcx: &TyCtxt, e: &hir::Expr, maybe_do_stability_check(tcx, id, span, cb); } -pub fn check_path(tcx: &TyCtxt, path: &hir::Path, id: ast::NodeId, - cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option)) { +pub fn check_path<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + path: &hir::Path, id: ast::NodeId, + cb: &mut FnMut(DefId, Span, + &Option<&Stability>, + &Option)) { match tcx.def_map.borrow().get(&id).map(|d| d.full_def()) { Some(Def::PrimTy(..)) => {} Some(Def::SelfTy(..)) => {} @@ -576,8 +587,11 @@ pub fn check_path(tcx: &TyCtxt, path: &hir::Path, id: ast::NodeId, } } -pub fn check_path_list_item(tcx: &TyCtxt, item: &hir::PathListItem, - cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option)) { +pub fn check_path_list_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + item: &hir::PathListItem, + cb: &mut FnMut(DefId, Span, + &Option<&Stability>, + &Option)) { match tcx.def_map.borrow().get(&item.node.id()).map(|d| d.full_def()) { Some(Def::PrimTy(..)) => {} Some(def) => { @@ -587,8 +601,10 @@ pub fn check_path_list_item(tcx: &TyCtxt, item: &hir::PathListItem, } } -pub fn check_pat(tcx: &TyCtxt, pat: &hir::Pat, - cb: &mut FnMut(DefId, Span, &Option<&Stability>, &Option)) { +pub fn check_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pat: &hir::Pat, + cb: &mut FnMut(DefId, Span, + &Option<&Stability>, + &Option)) { debug!("check_pat(pat = {:?})", pat); if is_internal(tcx, pat.span) { return; } @@ -616,29 +632,31 @@ pub fn check_pat(tcx: &TyCtxt, pat: &hir::Pat, } } -fn maybe_do_stability_check(tcx: &TyCtxt, id: DefId, span: Span, - cb: &mut FnMut(DefId, Span, - &Option<&Stability>, &Option)) { +fn maybe_do_stability_check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: DefId, span: Span, + cb: &mut FnMut(DefId, Span, + &Option<&Stability>, + &Option)) { if is_internal(tcx, span) { debug!("maybe_do_stability_check: \ skipping span={:?} since it is internal", span); return; } let (stability, deprecation) = if is_staged_api(tcx, id) { - (lookup_stability(tcx, id), None) + (tcx.lookup_stability(id), None) } else { - (None, lookup_deprecation(tcx, id)) + (None, tcx.lookup_deprecation(id)) }; debug!("maybe_do_stability_check: \ inspecting id={:?} span={:?} of stability={:?}", id, span, stability); cb(id, span, &stability, &deprecation); } -fn is_internal(tcx: &TyCtxt, span: Span) -> bool { +fn is_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, span: Span) -> bool { tcx.sess.codemap().span_allows_unstable(span) } -fn is_staged_api(tcx: &TyCtxt, id: DefId) -> bool { +fn is_staged_api<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefId) -> bool { match tcx.trait_item_of_item(id) { Some(ty::MethodTraitItemId(trait_method_id)) if trait_method_id != id => { @@ -651,43 +669,45 @@ fn is_staged_api(tcx: &TyCtxt, id: DefId) -> bool { } } -/// Lookup the stability for a node, loading external crate -/// metadata as necessary. -pub fn lookup_stability<'tcx>(tcx: &TyCtxt<'tcx>, id: DefId) -> Option<&'tcx Stability> { - if let Some(st) = tcx.stability.borrow().stab_map.get(&id) { - return *st; +impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { + /// Lookup the stability for a node, loading external crate + /// metadata as necessary. + pub fn lookup_stability(self, id: DefId) -> Option<&'tcx Stability> { + if let Some(st) = self.stability.borrow().stab_map.get(&id) { + return *st; + } + + let st = self.lookup_stability_uncached(id); + self.stability.borrow_mut().stab_map.insert(id, st); + st } - let st = lookup_stability_uncached(tcx, id); - tcx.stability.borrow_mut().stab_map.insert(id, st); - st -} + pub fn lookup_deprecation(self, id: DefId) -> Option { + if let Some(depr) = self.stability.borrow().depr_map.get(&id) { + return depr.clone(); + } -pub fn lookup_deprecation<'tcx>(tcx: &TyCtxt<'tcx>, id: DefId) -> Option { - if let Some(depr) = tcx.stability.borrow().depr_map.get(&id) { - return depr.clone(); + let depr = self.lookup_deprecation_uncached(id); + self.stability.borrow_mut().depr_map.insert(id, depr.clone()); + depr } - let depr = lookup_deprecation_uncached(tcx, id); - tcx.stability.borrow_mut().depr_map.insert(id, depr.clone()); - depr -} - -fn lookup_stability_uncached<'tcx>(tcx: &TyCtxt<'tcx>, id: DefId) -> Option<&'tcx Stability> { - debug!("lookup(id={:?})", id); - if id.is_local() { - None // The stability cache is filled partially lazily - } else { - tcx.sess.cstore.stability(id).map(|st| tcx.intern_stability(st)) + fn lookup_stability_uncached(self, id: DefId) -> Option<&'tcx Stability> { + debug!("lookup(id={:?})", id); + if id.is_local() { + None // The stability cache is filled partially lazily + } else { + self.sess.cstore.stability(id).map(|st| self.intern_stability(st)) + } } -} -fn lookup_deprecation_uncached<'tcx>(tcx: &TyCtxt<'tcx>, id: DefId) -> Option { - debug!("lookup(id={:?})", id); - if id.is_local() { - None // The stability cache is filled partially lazily - } else { - tcx.sess.cstore.deprecation(id) + fn lookup_deprecation_uncached(self, id: DefId) -> Option { + debug!("lookup(id={:?})", id); + if id.is_local() { + None // The stability cache is filled partially lazily + } else { + self.sess.cstore.deprecation(id) + } } } diff --git a/src/librustc/middle/weak_lang_items.rs b/src/librustc/middle/weak_lang_items.rs index 74bb41785c..3258876849 100644 --- a/src/librustc/middle/weak_lang_items.rs +++ b/src/librustc/middle/weak_lang_items.rs @@ -10,9 +10,8 @@ //! Validity checking for weak lang items -use session::config; +use session::config::{self, PanicStrategy}; use session::Session; -use middle::cstore::CrateStore; use middle::lang_items; use syntax::ast; @@ -71,12 +70,15 @@ fn verify(sess: &Session, items: &lang_items::LanguageItems) { let needs_check = sess.crate_types.borrow().iter().any(|kind| { match *kind { config::CrateTypeDylib | + config::CrateTypeCdylib | config::CrateTypeExecutable | config::CrateTypeStaticlib => true, config::CrateTypeRlib => false, } }); - if !needs_check { return } + if !needs_check { + return + } let mut missing = HashSet::new(); for cnum in sess.cstore.crates() { @@ -85,8 +87,19 @@ fn verify(sess: &Session, items: &lang_items::LanguageItems) { } } + // If we're not compiling with unwinding, we won't actually need these + // symbols. Other panic runtimes ensure that the relevant symbols are + // available to link things together, but they're never exercised. + let mut whitelisted = HashSet::new(); + if sess.opts.cg.panic != PanicStrategy::Unwind { + whitelisted.insert(lang_items::EhPersonalityLangItem); + whitelisted.insert(lang_items::EhUnwindResumeLangItem); + } + $( - if missing.contains(&lang_items::$item) && items.$name().is_none() { + if missing.contains(&lang_items::$item) && + !whitelisted.contains(&lang_items::$item) && + items.$name().is_none() { sess.err(&format!("language item required, but not found: `{}`", stringify!($name))); diff --git a/src/librustc/mir/repr.rs b/src/librustc/mir/repr.rs index 28437fa133..458cb28144 100644 --- a/src/librustc/mir/repr.rs +++ b/src/librustc/mir/repr.rs @@ -36,6 +36,11 @@ pub struct Mir<'tcx> { /// used (eventually) for debuginfo. Indexed by a `ScopeId`. pub scopes: Vec, + /// Rvalues promoted from this function, such as borrows of constants. + /// Each of them is the Mir of a constant with the fn's type parameters + /// in scope, but no vars or args and a separate set of temps. + pub promoted: Vec>, + /// Return type of the function. pub return_ty: FnOutput<'tcx>, @@ -52,6 +57,10 @@ pub struct Mir<'tcx> { /// through the resulting reference. pub temp_decls: Vec>, + /// Names and capture modes of all the closure upvars, assuming + /// the first argument is either the closure or a reference to it. + pub upvar_decls: Vec, + /// A span representing this MIR, for error reporting pub span: Span, } @@ -59,9 +68,6 @@ pub struct Mir<'tcx> { /// where execution begins pub const START_BLOCK: BasicBlock = BasicBlock(0); -/// where execution ends, on normal return -pub const END_BLOCK: BasicBlock = BasicBlock(1); - impl<'tcx> Mir<'tcx> { pub fn all_basic_blocks(&self) -> Vec { (0..self.basic_blocks.len()) @@ -197,7 +203,20 @@ pub struct ArgDecl<'tcx> { /// If true, this argument is a tuple after monomorphization, /// and has to be collected from multiple actual arguments. - pub spread: bool + pub spread: bool, + + /// Either keywords::Invalid or the name of a single-binding + /// pattern associated with this argument. Useful for debuginfo. + pub debug_name: Name +} + +/// A closure capture, with its name and mode. +#[derive(Clone, Debug, RustcEncodable, RustcDecodable)] +pub struct UpvarDecl { + pub debug_name: Name, + + /// If true, the capture is behind a reference. + pub by_ref: bool } /////////////////////////////////////////////////////////////////////////// @@ -305,8 +324,7 @@ pub enum TerminatorKind<'tcx> { Resume, /// Indicates a normal return. The ReturnPointer lvalue should - /// have been filled in by now. This should only occur in the - /// `END_BLOCK`. + /// have been filled in by now. This should occur at most once. Return, /// Drop the Lvalue @@ -698,6 +716,7 @@ impl ScopeId { #[derive(Clone, Debug, RustcEncodable, RustcDecodable)] pub struct ScopeData { + pub span: Span, pub parent_scope: Option, } @@ -797,7 +816,7 @@ pub enum AggregateKind<'tcx> { Vec, Tuple, Adt(AdtDef<'tcx>, usize, &'tcx Substs<'tcx>), - Closure(DefId, &'tcx ClosureSubsts<'tcx>), + Closure(DefId, ClosureSubsts<'tcx>), } #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)] @@ -973,6 +992,10 @@ pub enum Literal<'tcx> { Value { value: ConstVal, }, + Promoted { + // Index into the `promoted` vector of `Mir`. + index: usize + }, } impl<'tcx> Debug for Constant<'tcx> { @@ -993,6 +1016,9 @@ impl<'tcx> Debug for Literal<'tcx> { write!(fmt, "const ")?; fmt_const_val(fmt, value) } + Promoted { index } => { + write!(fmt, "promoted{}", index) + } } } } diff --git a/src/librustc/mir/tcx.rs b/src/librustc/mir/tcx.rs index d710417bf2..a1c0d92f60 100644 --- a/src/librustc/mir/tcx.rs +++ b/src/librustc/mir/tcx.rs @@ -30,12 +30,12 @@ pub enum LvalueTy<'tcx> { variant_index: usize }, } -impl<'tcx> LvalueTy<'tcx> { +impl<'a, 'gcx, 'tcx> LvalueTy<'tcx> { pub fn from_ty(ty: Ty<'tcx>) -> LvalueTy<'tcx> { LvalueTy::Ty { ty: ty } } - pub fn to_ty(&self, tcx: &TyCtxt<'tcx>) -> Ty<'tcx> { + pub fn to_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match *self { LvalueTy::Ty { ty } => ty, @@ -44,8 +44,7 @@ impl<'tcx> LvalueTy<'tcx> { } } - pub fn projection_ty(self, - tcx: &TyCtxt<'tcx>, + pub fn projection_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, elem: &LvalueElem<'tcx>) -> LvalueTy<'tcx> { @@ -79,14 +78,13 @@ impl<'tcx> LvalueTy<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for LvalueTy<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { match *self { LvalueTy::Ty { ty } => LvalueTy::Ty { ty: ty.fold_with(folder) }, LvalueTy::Downcast { adt_def, substs, variant_index } => { - let substs = substs.fold_with(folder); LvalueTy::Downcast { adt_def: adt_def, - substs: folder.tcx().mk_substs(substs), + substs: substs.fold_with(folder), variant_index: variant_index } } @@ -101,9 +99,8 @@ impl<'tcx> TypeFoldable<'tcx> for LvalueTy<'tcx> { } } -impl<'tcx> Mir<'tcx> { - pub fn operand_ty(&self, - tcx: &TyCtxt<'tcx>, +impl<'a, 'gcx, 'tcx> Mir<'tcx> { + pub fn operand_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, operand: &Operand<'tcx>) -> Ty<'tcx> { @@ -113,8 +110,7 @@ impl<'tcx> Mir<'tcx> { } } - pub fn binop_ty(&self, - tcx: &TyCtxt<'tcx>, + pub fn binop_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, op: BinOp, lhs_ty: Ty<'tcx>, rhs_ty: Ty<'tcx>) @@ -138,8 +134,7 @@ impl<'tcx> Mir<'tcx> { } } - pub fn lvalue_ty(&self, - tcx: &TyCtxt<'tcx>, + pub fn lvalue_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, lvalue: &Lvalue<'tcx>) -> LvalueTy<'tcx> { @@ -159,8 +154,7 @@ impl<'tcx> Mir<'tcx> { } } - pub fn rvalue_ty(&self, - tcx: &TyCtxt<'tcx>, + pub fn rvalue_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, rvalue: &Rvalue<'tcx>) -> Option> { @@ -211,11 +205,10 @@ impl<'tcx> Mir<'tcx> { )) } AggregateKind::Adt(def, _, substs) => { - Some(def.type_scheme(tcx).ty.subst(tcx, substs)) + Some(tcx.lookup_item_type(def.did).ty.subst(tcx, substs)) } AggregateKind::Closure(did, substs) => { - Some(tcx.mk_closure_from_closure_substs( - did, Box::new(substs.clone()))) + Some(tcx.mk_closure_from_closure_substs(did, substs)) } } } diff --git a/src/librustc/mir/transform.rs b/src/librustc/mir/transform.rs index 410e3f9d06..79c44b2b85 100644 --- a/src/librustc/mir/transform.rs +++ b/src/librustc/mir/transform.rs @@ -8,31 +8,103 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use dep_graph::DepNode; +use hir; +use hir::map::DefPathData; +use hir::def_id::DefId; use mir::mir_map::MirMap; use mir::repr::Mir; use ty::TyCtxt; use syntax::ast::NodeId; +/// Where a specific Mir comes from. +#[derive(Copy, Clone)] +pub enum MirSource { + /// Functions and methods. + Fn(NodeId), + + /// Constants and associated constants. + Const(NodeId), + + /// Initializer of a `static` item. + Static(NodeId, hir::Mutability), + + /// Promoted rvalues within a function. + Promoted(NodeId, usize) +} + +impl<'a, 'tcx> MirSource { + pub fn from_node(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: NodeId) -> MirSource { + use hir::*; + + // Handle constants in enum discriminants, types, and repeat expressions. + let def_id = tcx.map.local_def_id(id); + let def_key = tcx.def_key(def_id); + if def_key.disambiguated_data.data == DefPathData::Initializer { + return MirSource::Const(id); + } + + match tcx.map.get(id) { + map::NodeItem(&Item { node: ItemConst(..), .. }) | + map::NodeTraitItem(&TraitItem { node: ConstTraitItem(..), .. }) | + map::NodeImplItem(&ImplItem { node: ImplItemKind::Const(..), .. }) => { + MirSource::Const(id) + } + map::NodeItem(&Item { node: ItemStatic(_, m, _), .. }) => { + MirSource::Static(id, m) + } + // Default to function if it's not a constant or static. + _ => MirSource::Fn(id) + } + } + + pub fn item_id(&self) -> NodeId { + match *self { + MirSource::Fn(id) | + MirSource::Const(id) | + MirSource::Static(id, _) | + MirSource::Promoted(id, _) => id + } + } +} + /// Various information about pass. pub trait Pass { // fn name() for printouts of various sorts? // fn should_run(Session) to check if pass should run? + fn dep_node(&self, def_id: DefId) -> DepNode { + DepNode::MirPass(def_id) + } } /// A pass which inspects the whole MirMap. pub trait MirMapPass<'tcx>: Pass { - fn run_pass(&mut self, cx: &TyCtxt<'tcx>, map: &mut MirMap<'tcx>); + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, map: &mut MirMap<'tcx>); } /// A pass which inspects Mir of functions in isolation. pub trait MirPass<'tcx>: Pass { - fn run_pass(&mut self, cx: &TyCtxt<'tcx>, id: NodeId, mir: &mut Mir<'tcx>); + fn run_pass_on_promoted<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + item_id: NodeId, index: usize, + mir: &mut Mir<'tcx>) { + self.run_pass(tcx, MirSource::Promoted(item_id, index), mir); + } + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + src: MirSource, mir: &mut Mir<'tcx>); } impl<'tcx, T: MirPass<'tcx>> MirMapPass<'tcx> for T { - fn run_pass(&mut self, tcx: &TyCtxt<'tcx>, map: &mut MirMap<'tcx>) { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, map: &mut MirMap<'tcx>) { for (&id, mir) in &mut map.map { - MirPass::run_pass(self, tcx, id, mir); + let def_id = tcx.map.local_def_id(id); + let _task = tcx.dep_graph.in_task(self.dep_node(def_id)); + + let src = MirSource::from_node(tcx, id); + MirPass::run_pass(self, tcx, src, mir); + + for (i, mir) in mir.promoted.iter_mut().enumerate() { + self.run_pass_on_promoted(tcx, id, i, mir); + } } } } @@ -43,7 +115,7 @@ pub struct Passes { plugin_passes: Vec MirMapPass<'tcx>>> } -impl Passes { +impl<'a, 'tcx> Passes { pub fn new() -> Passes { let passes = Passes { passes: Vec::new(), @@ -52,17 +124,17 @@ impl Passes { passes } - pub fn run_passes<'tcx>(&mut self, pcx: &TyCtxt<'tcx>, map: &mut MirMap<'tcx>) { + pub fn run_passes(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, map: &mut MirMap<'tcx>) { for pass in &mut self.plugin_passes { - pass.run_pass(pcx, map); + pass.run_pass(tcx, map); } for pass in &mut self.passes { - pass.run_pass(pcx, map); + pass.run_pass(tcx, map); } } /// Pushes a built-in pass. - pub fn push_pass(&mut self, pass: Box MirMapPass<'a>>) { + pub fn push_pass(&mut self, pass: Box MirMapPass<'b>>) { self.passes.push(pass); } } diff --git a/src/librustc/mir/visit.rs b/src/librustc/mir/visit.rs index 450d25b606..8846065135 100644 --- a/src/librustc/mir/visit.rs +++ b/src/librustc/mir/visit.rs @@ -197,7 +197,7 @@ macro_rules! make_mir_visitor { } fn visit_closure_substs(&mut self, - substs: & $($mutability)* &'tcx ClosureSubsts<'tcx>) { + substs: & $($mutability)* ClosureSubsts<'tcx>) { self.super_closure_substs(substs); } @@ -244,10 +244,12 @@ macro_rules! make_mir_visitor { let Mir { ref $($mutability)* basic_blocks, ref $($mutability)* scopes, + promoted: _, // Visited by passes separately. ref $($mutability)* return_ty, ref $($mutability)* var_decls, ref $($mutability)* arg_decls, ref $($mutability)* temp_decls, + upvar_decls: _, ref $($mutability)* span, } = *mir; @@ -298,9 +300,11 @@ macro_rules! make_mir_visitor { fn super_scope_data(&mut self, scope_data: & $($mutability)* ScopeData) { let ScopeData { + ref $($mutability)* span, ref $($mutability)* parent_scope, } = *scope_data; + self.visit_span(span); if let Some(ref $($mutability)* parent_scope) = *parent_scope { self.visit_scope_id(parent_scope); } @@ -597,7 +601,8 @@ macro_rules! make_mir_visitor { arg_decl: & $($mutability)* ArgDecl<'tcx>) { let ArgDecl { ref $($mutability)* ty, - spread: _ + spread: _, + debug_name: _ } = *arg_decl; self.visit_ty(ty); @@ -645,10 +650,11 @@ macro_rules! make_mir_visitor { ref $($mutability)* substs } => { self.visit_def_id(def_id); self.visit_substs(substs); - }, + } Literal::Value { ref $($mutability)* value } => { self.visit_const_val(value); } + Literal::Promoted { index: _ } => {} } } @@ -675,7 +681,7 @@ macro_rules! make_mir_visitor { } fn super_closure_substs(&mut self, - _substs: & $($mutability)* &'tcx ClosureSubsts<'tcx>) { + _substs: & $($mutability)* ClosureSubsts<'tcx>) { } fn super_const_val(&mut self, _substs: & $($mutability)* ConstVal) { diff --git a/src/librustc/session/config.rs b/src/librustc/session/config.rs index 574c927bd7..da5555dbd6 100644 --- a/src/librustc/session/config.rs +++ b/src/librustc/session/config.rs @@ -28,7 +28,6 @@ use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::errors::{ColorConfig, Handler}; use syntax::parse; -use syntax::parse::lexer::Reader; use syntax::parse::token::InternedString; use syntax::feature_gate::UnstableFeatures; @@ -49,7 +48,9 @@ pub enum OptLevel { No, // -O0 Less, // -O1 Default, // -O2 - Aggressive // -O3 + Aggressive, // -O3 + Size, // -Os + SizeMin, // -Oz } #[derive(Clone, Copy, PartialEq)] @@ -299,6 +300,7 @@ pub enum CrateType { CrateTypeDylib, CrateTypeRlib, CrateTypeStaticlib, + CrateTypeCdylib, } #[derive(Clone)] @@ -316,6 +318,21 @@ impl Passes { } } +#[derive(Clone, PartialEq)] +pub enum PanicStrategy { + Unwind, + Abort, +} + +impl PanicStrategy { + pub fn desc(&self) -> &str { + match *self { + PanicStrategy::Unwind => "unwind", + PanicStrategy::Abort => "abort", + } + } +} + /// Declare a macro that will define all CodegenOptions/DebuggingOptions fields and parsers all /// at once. The goal of this macro is to define an interface that can be /// programmatically used by the option parser in order to initialize the struct @@ -401,11 +418,13 @@ macro_rules! options { Some("a space-separated list of passes, or `all`"); pub const parse_opt_uint: Option<&'static str> = Some("a number"); + pub const parse_panic_strategy: Option<&'static str> = + Some("either `panic` or `abort`"); } #[allow(dead_code)] mod $mod_set { - use super::{$struct_name, Passes, SomePasses, AllPasses}; + use super::{$struct_name, Passes, SomePasses, AllPasses, PanicStrategy}; $( pub fn $opt(cg: &mut $struct_name, v: Option<&str>) -> bool { @@ -509,6 +528,15 @@ macro_rules! options { } } } + + fn parse_panic_strategy(slot: &mut PanicStrategy, v: Option<&str>) -> bool { + match v { + Some("unwind") => *slot = PanicStrategy::Unwind, + Some("abort") => *slot = PanicStrategy::Abort, + _ => return false + } + true + } } ) } @@ -568,12 +596,14 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options, debuginfo: Option = (None, parse_opt_uint, "debug info emission level, 0 = no debug info, 1 = line tables only, \ 2 = full debug info with variable and type information"), - opt_level: Option = (None, parse_opt_uint, - "optimize with possible levels 0-3"), + opt_level: Option = (None, parse_opt_string, + "optimize with possible levels 0-3, s, or z"), debug_assertions: Option = (None, parse_opt_bool, "explicitly enable the cfg(debug_assertions) directive"), inline_threshold: Option = (None, parse_opt_uint, "set the inlining threshold for"), + panic: PanicStrategy = (PanicStrategy::Unwind, parse_panic_strategy, + "panic strategy to compile crate with"), } @@ -619,7 +649,9 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options, ls: bool = (false, parse_bool, "list the symbols defined by a library crate"), save_analysis: bool = (false, parse_bool, - "write syntax and type analysis information in addition to normal output"), + "write syntax and type analysis (in JSON format) information in addition to normal output"), + save_analysis_csv: bool = (false, parse_bool, + "write syntax and type analysis (in CSV format) information in addition to normal output"), print_move_fragments: bool = (false, parse_bool, "print out move-fragment data for every fn"), flowgraph_print_loans: bool = (false, parse_bool, @@ -692,6 +724,7 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { let os = &sess.target.target.target_os; let env = &sess.target.target.target_env; let vendor = &sess.target.target.target_vendor; + let max_atomic_width = sess.target.target.options.max_atomic_width; let fam = if let Some(ref fam) = sess.target.target.options.target_family { intern(fam) @@ -718,6 +751,15 @@ pub fn default_configuration(sess: &Session) -> ast::CrateConfig { if sess.target.target.options.has_elf_tls { ret.push(attr::mk_word_item(InternedString::new("target_thread_local"))); } + for &i in &[8, 16, 32, 64, 128] { + if i <= max_atomic_width { + let s = i.to_string(); + ret.push(mk(InternedString::new("target_has_atomic"), intern(&s))); + if &s == wordsz { + ret.push(mk(InternedString::new("target_has_atomic"), intern("ptr"))); + } + } + } if sess.opts.debug_assertions { ret.push(attr::mk_word_item(InternedString::new("debug_assertions"))); } @@ -889,10 +931,11 @@ pub fn rustc_short_optgroups() -> Vec { vec![ opt::flag_s("h", "help", "Display this message"), opt::multi_s("", "cfg", "Configure the compilation environment", "SPEC"), - opt::multi_s("L", "", "Add a directory to the library search path", - "[KIND=]PATH"), + opt::multi_s("L", "", "Add a directory to the library search path. The + optional KIND can be one of dependency, crate, native, + framework or all (the default).", "[KIND=]PATH"), opt::multi_s("l", "", "Link the generated crate(s) to the specified native - library NAME. The optional KIND can be one of, + library NAME. The optional KIND can be one of static, dylib, or framework. If omitted, dylib is assumed.", "[KIND=]NAME"), opt::multi_s("", "crate-type", "Comma separated list of types of crates @@ -1123,13 +1166,20 @@ pub fn build_session_options(matches: &getopts::Matches) -> Options { } OptLevel::Default } else { - match cg.opt_level { - None => OptLevel::No, - Some(0) => OptLevel::No, - Some(1) => OptLevel::Less, - Some(2) => OptLevel::Default, - Some(3) => OptLevel::Aggressive, - Some(arg) => { + match (cg.opt_level.as_ref().map(String::as_ref), + nightly_options::is_nightly_build()) { + (None, _) => OptLevel::No, + (Some("0"), _) => OptLevel::No, + (Some("1"), _) => OptLevel::Less, + (Some("2"), _) => OptLevel::Default, + (Some("3"), _) => OptLevel::Aggressive, + (Some("s"), true) => OptLevel::Size, + (Some("z"), true) => OptLevel::SizeMin, + (Some("s"), false) | (Some("z"), false) => { + early_error(error_format, &format!("the optimizations s or z are only \ + accepted on the nightly compiler")); + }, + (Some(arg), _) => { early_error(error_format, &format!("optimization level needs to be \ between 0-3 (instead was `{}`)", arg)); @@ -1277,6 +1327,7 @@ pub fn parse_crate_types_from_list(list_list: Vec) -> Result CrateTypeRlib, "staticlib" => CrateTypeStaticlib, "dylib" => CrateTypeDylib, + "cdylib" => CrateTypeCdylib, "bin" => CrateTypeExecutable, _ => { return Err(format!("unknown crate type: `{}`", @@ -1302,7 +1353,7 @@ pub mod nightly_options { is_nightly_build() && matches.opt_strs("Z").iter().any(|x| *x == "unstable-options") } - fn is_nightly_build() -> bool { + pub fn is_nightly_build() -> bool { match get_unstable_features_setting() { UnstableFeatures::Allow | UnstableFeatures::Cheat => true, _ => false, @@ -1344,7 +1395,7 @@ pub mod nightly_options { early_error(ErrorOutputType::default(), &msg); } OptionStability::UnstableButNotReally => { - let msg = format!("the option `{}` is is unstable and should \ + let msg = format!("the option `{}` is unstable and should \ only be used on the nightly compiler, but \ it is currently accepted for backwards \ compatibility; this will soon change, \ @@ -1364,13 +1415,15 @@ impl fmt::Display for CrateType { CrateTypeExecutable => "bin".fmt(f), CrateTypeDylib => "dylib".fmt(f), CrateTypeRlib => "rlib".fmt(f), - CrateTypeStaticlib => "staticlib".fmt(f) + CrateTypeStaticlib => "staticlib".fmt(f), + CrateTypeCdylib => "cdylib".fmt(f), } } } #[cfg(test)] mod tests { + use dep_graph::DepGraph; use middle::cstore::DummyCrateStore; use session::config::{build_configuration, build_session_options}; use session::build_session; @@ -1390,6 +1443,7 @@ mod tests { // When the user supplies --test we should implicitly supply --cfg test #[test] fn test_switch_implies_cfg_test() { + let dep_graph = DepGraph::new(false); let matches = &match getopts(&["--test".to_string()], &optgroups()) { Ok(m) => m, @@ -1397,7 +1451,7 @@ mod tests { }; let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(matches); - let sess = build_session(sessopts, None, registry, Rc::new(DummyCrateStore)); + let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); let cfg = build_configuration(&sess); assert!((attr::contains_name(&cfg[..], "test"))); } @@ -1406,6 +1460,7 @@ mod tests { // another --cfg test #[test] fn test_switch_implies_cfg_test_unless_cfg_test() { + let dep_graph = DepGraph::new(false); let matches = &match getopts(&["--test".to_string(), "--cfg=test".to_string()], &optgroups()) { @@ -1416,7 +1471,7 @@ mod tests { }; let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(matches); - let sess = build_session(sessopts, None, registry, + let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); let cfg = build_configuration(&sess); let mut test_items = cfg.iter().filter(|m| m.name() == "test"); @@ -1426,13 +1481,14 @@ mod tests { #[test] fn test_can_print_warnings() { + let dep_graph = DepGraph::new(false); { let matches = getopts(&[ "-Awarnings".to_string() ], &optgroups()).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); - let sess = build_session(sessopts, None, registry, + let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); assert!(!sess.diagnostic().can_emit_warnings); } @@ -1444,7 +1500,7 @@ mod tests { ], &optgroups()).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); - let sess = build_session(sessopts, None, registry, + let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); assert!(sess.diagnostic().can_emit_warnings); } @@ -1455,7 +1511,7 @@ mod tests { ], &optgroups()).unwrap(); let registry = diagnostics::registry::Registry::new(&[]); let sessopts = build_session_options(&matches); - let sess = build_session(sessopts, None, registry, + let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore)); assert!(sess.diagnostic().can_emit_warnings); } diff --git a/src/librustc/session/filesearch.rs b/src/librustc/session/filesearch.rs index e54acf3fdc..a3eea324fd 100644 --- a/src/librustc/session/filesearch.rs +++ b/src/librustc/session/filesearch.rs @@ -15,7 +15,6 @@ pub use self::FileMatch::*; use std::collections::HashSet; use std::env; use std::fs; -use std::io::prelude::*; use std::path::{Path, PathBuf}; use session::search_paths::{SearchPaths, PathKind}; diff --git a/src/librustc/session/mod.rs b/src/librustc/session/mod.rs index 815e60a8e0..907241d174 100644 --- a/src/librustc/session/mod.rs +++ b/src/librustc/session/mod.rs @@ -8,10 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use dep_graph::DepGraph; use lint; use middle::cstore::CrateStore; use middle::dependency_format; use session::search_paths::PathKind; +use session::config::PanicStrategy; use ty::tls; use util::nodemap::{NodeMap, FnvHashMap}; use mir::transform as mir_pass; @@ -30,13 +32,16 @@ use syntax::{ast, codemap}; use syntax::feature_gate::AttributeType; use rustc_back::target::Target; +use llvm; use std::path::{Path, PathBuf}; use std::cell::{Cell, RefCell}; use std::collections::{HashMap, HashSet}; use std::env; +use std::ffi::CString; use std::rc::Rc; use std::fmt; +use libc::c_int; pub mod config; pub mod filesearch; @@ -45,6 +50,7 @@ pub mod search_paths; // Represents the data associated with a compilation // session for a single crate. pub struct Session { + pub dep_graph: DepGraph, pub target: config::Config, pub host: Target, pub opts: config::Options, @@ -79,9 +85,11 @@ pub struct Session { /// operations such as auto-dereference and monomorphization. pub recursion_limit: Cell, - /// The metadata::creader module may inject an allocator dependency if it - /// didn't already find one, and this tracks what was injected. + /// The metadata::creader module may inject an allocator/panic_runtime + /// dependency if it didn't already find one, and this tracks what was + /// injected. pub injected_allocator: Cell>, + pub injected_panic_runtime: Cell>, /// Names of all bang-style macros and syntax extensions /// available in this crate @@ -292,7 +300,8 @@ impl Session { self.opts.cg.lto } pub fn no_landing_pads(&self) -> bool { - self.opts.debugging_opts.no_landing_pads + self.opts.debugging_opts.no_landing_pads || + self.opts.cg.panic == PanicStrategy::Abort } pub fn unstable_options(&self) -> bool { self.opts.debugging_opts.unstable_options @@ -401,10 +410,26 @@ fn split_msg_into_multilines(msg: &str) -> Option { } pub fn build_session(sopts: config::Options, + dep_graph: &DepGraph, local_crate_source_file: Option, registry: diagnostics::registry::Registry, cstore: Rc CrateStore<'a>>) -> Session { + build_session_with_codemap(sopts, + dep_graph, + local_crate_source_file, + registry, + cstore, + Rc::new(codemap::CodeMap::new())) +} + +pub fn build_session_with_codemap(sopts: config::Options, + dep_graph: &DepGraph, + local_crate_source_file: Option, + registry: diagnostics::registry::Registry, + cstore: Rc CrateStore<'a>>, + codemap: Rc) + -> Session { // FIXME: This is not general enough to make the warning lint completely override // normal diagnostic warnings, since the warning lint can also be denied and changed // later via the source code. @@ -416,7 +441,6 @@ pub fn build_session(sopts: config::Options, .unwrap_or(true); let treat_err_as_bug = sopts.treat_err_as_bug; - let codemap = Rc::new(codemap::CodeMap::new()); let emitter: Box = match sopts.error_format { config::ErrorOutputType::HumanReadable(color_config) => { Box::new(EmitterWriter::stderr(color_config, Some(registry), codemap.clone())) @@ -431,10 +455,16 @@ pub fn build_session(sopts: config::Options, treat_err_as_bug, emitter); - build_session_(sopts, local_crate_source_file, diagnostic_handler, codemap, cstore) + build_session_(sopts, + dep_graph, + local_crate_source_file, + diagnostic_handler, + codemap, + cstore) } pub fn build_session_(sopts: config::Options, + dep_graph: &DepGraph, local_crate_source_file: Option, span_diagnostic: errors::Handler, codemap: Rc, @@ -463,6 +493,7 @@ pub fn build_session_(sopts: config::Options, ); let sess = Session { + dep_graph: dep_graph.clone(), target: target_cfg, host: host, opts: sopts, @@ -487,13 +518,65 @@ pub fn build_session_(sopts: config::Options, recursion_limit: Cell::new(64), next_node_id: Cell::new(1), injected_allocator: Cell::new(None), + injected_panic_runtime: Cell::new(None), available_macros: RefCell::new(HashSet::new()), imported_macro_spans: RefCell::new(HashMap::new()), }; + init_llvm(&sess); + sess } +fn init_llvm(sess: &Session) { + unsafe { + // Before we touch LLVM, make sure that multithreading is enabled. + use std::sync::Once; + static INIT: Once = Once::new(); + static mut POISONED: bool = false; + INIT.call_once(|| { + if llvm::LLVMStartMultithreaded() != 1 { + // use an extra bool to make sure that all future usage of LLVM + // cannot proceed despite the Once not running more than once. + POISONED = true; + } + + configure_llvm(sess); + }); + + if POISONED { + bug!("couldn't enable multi-threaded LLVM"); + } + } +} + +unsafe fn configure_llvm(sess: &Session) { + let mut llvm_c_strs = Vec::new(); + let mut llvm_args = Vec::new(); + + { + let mut add = |arg: &str| { + let s = CString::new(arg).unwrap(); + llvm_args.push(s.as_ptr()); + llvm_c_strs.push(s); + }; + add("rustc"); // fake program name + if sess.time_llvm_passes() { add("-time-passes"); } + if sess.print_llvm_passes() { add("-debug-pass=Structure"); } + + for arg in &sess.opts.cg.llvm_args { + add(&(*arg)); + } + } + + llvm::LLVMInitializePasses(); + + llvm::initialize_available_targets(); + + llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int, + llvm_args.as_ptr()); +} + pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! { let mut emitter: Box = match output { config::ErrorOutputType::HumanReadable(color_config) => { @@ -501,7 +584,7 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! { } config::ErrorOutputType::Json => Box::new(JsonEmitter::basic()), }; - emitter.emit(None, msg, None, errors::Level::Fatal); + emitter.emit(&MultiSpan::new(), msg, None, errors::Level::Fatal); panic!(errors::FatalError); } @@ -512,7 +595,7 @@ pub fn early_warn(output: config::ErrorOutputType, msg: &str) { } config::ErrorOutputType::Json => Box::new(JsonEmitter::basic()), }; - emitter.emit(None, msg, None, errors::Level::Warning); + emitter.emit(&MultiSpan::new(), msg, None, errors::Level::Warning); } // Err(0) means compilation was stopped, but no errors were found. @@ -545,9 +628,9 @@ pub fn span_bug_fmt>(file: &'static str, } fn opt_span_bug_fmt>(file: &'static str, - line: u32, - span: Option, - args: fmt::Arguments) -> ! { + line: u32, + span: Option, + args: fmt::Arguments) -> ! { tls::with_opt(move |tcx| { let msg = format!("{}:{}: {}", file, line, args); match (tcx, span) { diff --git a/src/librustc/traits/coherence.rs b/src/librustc/traits/coherence.rs index 6710d2a653..414b9fa70c 100644 --- a/src/librustc/traits/coherence.rs +++ b/src/librustc/traits/coherence.rs @@ -16,7 +16,7 @@ use middle::cstore::LOCAL_CRATE; use hir::def_id::DefId; use ty::subst::TypeSpace; use ty::{self, Ty, TyCtxt}; -use infer::{self, InferCtxt, TypeOrigin}; +use infer::{InferCtxt, TypeOrigin}; use syntax::codemap::DUMMY_SP; #[derive(Copy, Clone)] @@ -24,10 +24,10 @@ struct InferIsLocal(bool); /// If there are types that satisfy both impls, returns a suitably-freshened /// `ImplHeader` with those types substituted -pub fn overlapping_impls<'cx, 'tcx>(infcx: &InferCtxt<'cx, 'tcx>, - impl1_def_id: DefId, - impl2_def_id: DefId) - -> Option> +pub fn overlapping_impls<'cx, 'gcx, 'tcx>(infcx: &InferCtxt<'cx, 'gcx, 'tcx>, + impl1_def_id: DefId, + impl2_def_id: DefId) + -> Option> { debug!("impl_can_satisfy(\ impl1_def_id={:?}, \ @@ -41,10 +41,10 @@ pub fn overlapping_impls<'cx, 'tcx>(infcx: &InferCtxt<'cx, 'tcx>, /// Can both impl `a` and impl `b` be satisfied by a common type (including /// `where` clauses)? If so, returns an `ImplHeader` that unifies the two impls. -fn overlap<'cx, 'tcx>(selcx: &mut SelectionContext<'cx, 'tcx>, - a_def_id: DefId, - b_def_id: DefId) - -> Option> +fn overlap<'cx, 'gcx, 'tcx>(selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, + a_def_id: DefId, + b_def_id: DefId) + -> Option> { debug!("overlap(a_def_id={:?}, b_def_id={:?})", a_def_id, @@ -57,11 +57,10 @@ fn overlap<'cx, 'tcx>(selcx: &mut SelectionContext<'cx, 'tcx>, debug!("overlap: b_impl_header={:?}", b_impl_header); // Do `a` and `b` unify? If not, no overlap. - if let Err(_) = infer::mk_eq_impl_headers(selcx.infcx(), - true, - TypeOrigin::Misc(DUMMY_SP), - &a_impl_header, - &b_impl_header) { + if let Err(_) = selcx.infcx().eq_impl_headers(true, + TypeOrigin::Misc(DUMMY_SP), + &a_impl_header, + &b_impl_header) { return None; } @@ -87,7 +86,8 @@ fn overlap<'cx, 'tcx>(selcx: &mut SelectionContext<'cx, 'tcx>, Some(selcx.infcx().resolve_type_vars_if_possible(&a_impl_header)) } -pub fn trait_ref_is_knowable<'tcx>(tcx: &TyCtxt<'tcx>, trait_ref: &ty::TraitRef<'tcx>) -> bool +pub fn trait_ref_is_knowable<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + trait_ref: &ty::TraitRef<'tcx>) -> bool { debug!("trait_ref_is_knowable(trait_ref={:?})", trait_ref); @@ -129,9 +129,9 @@ pub enum OrphanCheckErr<'tcx> { /// /// 1. All type parameters in `Self` must be "covered" by some local type constructor. /// 2. Some local type must appear in `Self`. -pub fn orphan_check<'tcx>(tcx: &TyCtxt<'tcx>, - impl_def_id: DefId) - -> Result<(), OrphanCheckErr<'tcx>> +pub fn orphan_check<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId) + -> Result<(), OrphanCheckErr<'tcx>> { debug!("orphan_check({:?})", impl_def_id); @@ -150,7 +150,7 @@ pub fn orphan_check<'tcx>(tcx: &TyCtxt<'tcx>, orphan_check_trait_ref(tcx, &trait_ref, InferIsLocal(false)) } -fn orphan_check_trait_ref<'tcx>(tcx: &TyCtxt<'tcx>, +fn orphan_check_trait_ref<'tcx>(tcx: TyCtxt, trait_ref: &ty::TraitRef<'tcx>, infer_is_local: InferIsLocal) -> Result<(), OrphanCheckErr<'tcx>> @@ -198,11 +198,8 @@ fn orphan_check_trait_ref<'tcx>(tcx: &TyCtxt<'tcx>, return Err(OrphanCheckErr::NoLocalInputType); } -fn uncovered_tys<'tcx>(tcx: &TyCtxt<'tcx>, - ty: Ty<'tcx>, - infer_is_local: InferIsLocal) - -> Vec> -{ +fn uncovered_tys<'tcx>(tcx: TyCtxt, ty: Ty<'tcx>, infer_is_local: InferIsLocal) + -> Vec> { if ty_is_local_constructor(tcx, ty, infer_is_local) { vec![] } else if fundamental_ty(tcx, ty) { @@ -214,7 +211,7 @@ fn uncovered_tys<'tcx>(tcx: &TyCtxt<'tcx>, } } -fn is_type_parameter<'tcx>(ty: Ty<'tcx>) -> bool { +fn is_type_parameter(ty: Ty) -> bool { match ty.sty { // FIXME(#20590) straighten story about projection types ty::TyProjection(..) | ty::TyParam(..) => true, @@ -222,14 +219,12 @@ fn is_type_parameter<'tcx>(ty: Ty<'tcx>) -> bool { } } -fn ty_is_local<'tcx>(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>, infer_is_local: InferIsLocal) -> bool -{ +fn ty_is_local(tcx: TyCtxt, ty: Ty, infer_is_local: InferIsLocal) -> bool { ty_is_local_constructor(tcx, ty, infer_is_local) || fundamental_ty(tcx, ty) && ty.walk_shallow().any(|t| ty_is_local(tcx, t, infer_is_local)) } -fn fundamental_ty<'tcx>(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool -{ +fn fundamental_ty(tcx: TyCtxt, ty: Ty) -> bool { match ty.sty { ty::TyBox(..) | ty::TyRef(..) => true, @@ -242,11 +237,7 @@ fn fundamental_ty<'tcx>(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool } } -fn ty_is_local_constructor<'tcx>(tcx: &TyCtxt<'tcx>, - ty: Ty<'tcx>, - infer_is_local: InferIsLocal) - -> bool -{ +fn ty_is_local_constructor(tcx: TyCtxt, ty: Ty, infer_is_local: InferIsLocal)-> bool { debug!("ty_is_local_constructor({:?})", ty); match ty.sty { diff --git a/src/librustc/traits/error_reporting.rs b/src/librustc/traits/error_reporting.rs index 286733c7c2..9a69958fea 100644 --- a/src/librustc/traits/error_reporting.rs +++ b/src/librustc/traits/error_reporting.rs @@ -22,19 +22,20 @@ use super::{ SelectionError, ObjectSafetyViolation, MethodViolationCode, - object_safety_violations, }; use fmt_macros::{Parser, Piece, Position}; use hir::def_id::DefId; -use infer::InferCtxt; -use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt}; +use infer::{InferCtxt}; +use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable}; use ty::fast_reject; -use ty::fold::{TypeFoldable, TypeFolder}; +use ty::fold::TypeFolder; +use ty::subst::{self, Subst, TypeSpace}; use util::nodemap::{FnvHashMap, FnvHashSet}; use std::cmp; use std::fmt; +use syntax::ast; use syntax::attr::{AttributeMethods, AttrMetaMethods}; use syntax::codemap::Span; use syntax::errors::DiagnosticBuilder; @@ -42,845 +43,908 @@ use syntax::errors::DiagnosticBuilder; #[derive(Debug, PartialEq, Eq, Hash)] pub struct TraitErrorKey<'tcx> { span: Span, + warning_node_id: Option, predicate: ty::Predicate<'tcx> } -impl<'tcx> TraitErrorKey<'tcx> { - fn from_error<'a>(infcx: &InferCtxt<'a, 'tcx>, - e: &FulfillmentError<'tcx>) -> Self { +impl<'a, 'gcx, 'tcx> TraitErrorKey<'tcx> { + fn from_error(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + e: &FulfillmentError<'tcx>, + warning_node_id: Option) -> Self { let predicate = infcx.resolve_type_vars_if_possible(&e.obligation.predicate); TraitErrorKey { span: e.obligation.cause.span, - predicate: infcx.tcx.erase_regions(&predicate) + predicate: infcx.tcx.erase_regions(&predicate), + warning_node_id: warning_node_id } } } -pub fn report_fulfillment_errors<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - errors: &Vec>) { - for error in errors { - report_fulfillment_error(infcx, error); +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { + pub fn report_fulfillment_errors(&self, errors: &Vec>) { + for error in errors { + self.report_fulfillment_error(error, None); + } } -} -fn report_fulfillment_error<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - error: &FulfillmentError<'tcx>) { - let error_key = TraitErrorKey::from_error(infcx, error); - debug!("report_fulfillment_errors({:?}) - key={:?}", - error, error_key); - if !infcx.reported_trait_errors.borrow_mut().insert(error_key) { - debug!("report_fulfillment_errors: skipping duplicate"); - return; + pub fn report_fulfillment_errors_as_warnings(&self, + errors: &Vec>, + node_id: ast::NodeId) { + for error in errors { + self.report_fulfillment_error(error, Some(node_id)); + } } - match error.code { - FulfillmentErrorCode::CodeSelectionError(ref e) => { - report_selection_error(infcx, &error.obligation, e); + + fn report_fulfillment_error(&self, + error: &FulfillmentError<'tcx>, + warning_node_id: Option) { + let error_key = TraitErrorKey::from_error(self, error, warning_node_id); + debug!("report_fulfillment_errors({:?}) - key={:?}", + error, error_key); + if !self.reported_trait_errors.borrow_mut().insert(error_key) { + debug!("report_fulfillment_errors: skipping duplicate"); + return; + } + match error.code { + FulfillmentErrorCode::CodeSelectionError(ref e) => { + self.report_selection_error(&error.obligation, e, warning_node_id); + } + FulfillmentErrorCode::CodeProjectionError(ref e) => { + self.report_projection_error(&error.obligation, e, warning_node_id); + } + FulfillmentErrorCode::CodeAmbiguity => { + self.maybe_report_ambiguity(&error.obligation); + } } - FulfillmentErrorCode::CodeProjectionError(ref e) => { - report_projection_error(infcx, &error.obligation, e); + } + + fn report_projection_error(&self, + obligation: &PredicateObligation<'tcx>, + error: &MismatchedProjectionTypes<'tcx>, + warning_node_id: Option) + { + let predicate = + self.resolve_type_vars_if_possible(&obligation.predicate); + + if !predicate.references_error() { + if let Some(warning_node_id) = warning_node_id { + self.tcx.sess.add_lint( + ::lint::builtin::UNSIZED_IN_TUPLE, + warning_node_id, + obligation.cause.span, + format!("type mismatch resolving `{}`: {}", + predicate, + error.err)); + } else { + let mut err = struct_span_err!(self.tcx.sess, obligation.cause.span, E0271, + "type mismatch resolving `{}`: {}", + predicate, + error.err); + self.note_obligation_cause(&mut err, obligation); + err.emit(); + } } - FulfillmentErrorCode::CodeAmbiguity => { - maybe_report_ambiguity(infcx, &error.obligation); + } + + fn impl_substs(&self, + did: DefId, + obligation: PredicateObligation<'tcx>) + -> subst::Substs<'tcx> { + let tcx = self.tcx; + + let ity = tcx.lookup_item_type(did); + let (tps, rps, _) = + (ity.generics.types.get_slice(TypeSpace), + ity.generics.regions.get_slice(TypeSpace), + ity.ty); + + let rps = self.region_vars_for_defs(obligation.cause.span, rps); + let mut substs = subst::Substs::new( + subst::VecPerParamSpace::empty(), + subst::VecPerParamSpace::new(rps, Vec::new(), Vec::new())); + self.type_vars_for_defs(obligation.cause.span, + TypeSpace, + &mut substs, + tps); + substs + } + + fn fuzzy_match_tys(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool { + /// returns the fuzzy category of a given type, or None + /// if the type can be equated to any type. + fn type_category<'tcx>(t: Ty<'tcx>) -> Option { + match t.sty { + ty::TyBool => Some(0), + ty::TyChar => Some(1), + ty::TyStr => Some(2), + ty::TyInt(..) | ty::TyUint(..) | + ty::TyInfer(ty::IntVar(..)) => Some(3), + ty::TyFloat(..) | ty::TyInfer(ty::FloatVar(..)) => Some(4), + ty::TyEnum(..) => Some(5), + ty::TyStruct(..) => Some(6), + ty::TyBox(..) | ty::TyRef(..) | ty::TyRawPtr(..) => Some(7), + ty::TyArray(..) | ty::TySlice(..) => Some(8), + ty::TyFnDef(..) | ty::TyFnPtr(..) => Some(9), + ty::TyTrait(..) => Some(10), + ty::TyClosure(..) => Some(11), + ty::TyTuple(..) => Some(12), + ty::TyProjection(..) => Some(13), + ty::TyParam(..) => Some(14), + ty::TyInfer(..) | ty::TyError => None + } + } + + match (type_category(a), type_category(b)) { + (Some(cat_a), Some(cat_b)) => match (&a.sty, &b.sty) { + (&ty::TyStruct(def_a, _), &ty::TyStruct(def_b, _)) | + (&ty::TyEnum(def_a, _), &ty::TyEnum(def_b, _)) => + def_a == def_b, + _ => cat_a == cat_b + }, + // infer and error can be equated to all types + _ => true } } -} -pub fn report_projection_error<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - obligation: &PredicateObligation<'tcx>, - error: &MismatchedProjectionTypes<'tcx>) -{ - let predicate = - infcx.resolve_type_vars_if_possible(&obligation.predicate); - - if !predicate.references_error() { - let mut err = struct_span_err!(infcx.tcx.sess, obligation.cause.span, E0271, - "type mismatch resolving `{}`: {}", - predicate, - error.err); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); + fn impl_similar_to(&self, + trait_ref: ty::PolyTraitRef<'tcx>, + obligation: &PredicateObligation<'tcx>) + -> Option + { + let tcx = self.tcx; + + let trait_ref = tcx.erase_late_bound_regions(&trait_ref); + let trait_self_ty = trait_ref.self_ty(); + + let mut self_match_impls = vec![]; + let mut fuzzy_match_impls = vec![]; + + self.tcx.lookup_trait_def(trait_ref.def_id) + .for_each_relevant_impl(self.tcx, trait_self_ty, |def_id| { + let impl_trait_ref = tcx + .impl_trait_ref(def_id) + .unwrap() + .subst(tcx, &self.impl_substs(def_id, obligation.clone())); + + let impl_self_ty = impl_trait_ref.self_ty(); + + if let Ok(..) = self.can_equate(&trait_self_ty, &impl_self_ty) { + self_match_impls.push(def_id); + + if trait_ref.substs.types.get_slice(TypeSpace).iter() + .zip(impl_trait_ref.substs.types.get_slice(TypeSpace)) + .all(|(u,v)| self.fuzzy_match_tys(u, v)) + { + fuzzy_match_impls.push(def_id); + } + } + }); + + let impl_def_id = if self_match_impls.len() == 1 { + self_match_impls[0] + } else if fuzzy_match_impls.len() == 1 { + fuzzy_match_impls[0] + } else { + return None + }; + + if tcx.has_attr(impl_def_id, "rustc_on_unimplemented") { + Some(impl_def_id) + } else { + None + } } -} -fn on_unimplemented_note<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - span: Span) -> Option { - let trait_ref = trait_ref.skip_binder(); - let def_id = trait_ref.def_id; - let mut report = None; - for item in infcx.tcx.get_attrs(def_id).iter() { - if item.check_name("rustc_on_unimplemented") { - let err_sp = item.meta().span.substitute_dummy(span); - let def = infcx.tcx.lookup_trait_def(def_id); - let trait_str = def.trait_ref.to_string(); - if let Some(ref istring) = item.value_str() { - let mut generic_map = def.generics.types.iter_enumerated() - .map(|(param, i, gen)| { - (gen.name.as_str().to_string(), - trait_ref.substs.types.get(param, i) - .to_string()) - }).collect::>(); - generic_map.insert("Self".to_string(), - trait_ref.self_ty().to_string()); - let parser = Parser::new(&istring); - let mut errored = false; - let err: String = parser.filter_map(|p| { - match p { - Piece::String(s) => Some(s), - Piece::NextArgument(a) => match a.position { - Position::ArgumentNamed(s) => match generic_map.get(s) { - Some(val) => Some(val), - None => { - span_err!(infcx.tcx.sess, err_sp, E0272, - "the #[rustc_on_unimplemented] \ - attribute on \ - trait definition for {} refers to \ - non-existent type parameter {}", - trait_str, s); + fn on_unimplemented_note(&self, + trait_ref: ty::PolyTraitRef<'tcx>, + obligation: &PredicateObligation<'tcx>) -> Option { + let def_id = self.impl_similar_to(trait_ref, obligation) + .unwrap_or(trait_ref.def_id()); + let trait_ref = trait_ref.skip_binder(); + + let span = obligation.cause.span; + let mut report = None; + for item in self.tcx.get_attrs(def_id).iter() { + if item.check_name("rustc_on_unimplemented") { + let err_sp = item.meta().span.substitute_dummy(span); + let def = self.tcx.lookup_trait_def(trait_ref.def_id); + let trait_str = def.trait_ref.to_string(); + if let Some(ref istring) = item.value_str() { + let mut generic_map = def.generics.types.iter_enumerated() + .map(|(param, i, gen)| { + (gen.name.as_str().to_string(), + trait_ref.substs.types.get(param, i) + .to_string()) + }).collect::>(); + generic_map.insert("Self".to_string(), + trait_ref.self_ty().to_string()); + let parser = Parser::new(&istring); + let mut errored = false; + let err: String = parser.filter_map(|p| { + match p { + Piece::String(s) => Some(s), + Piece::NextArgument(a) => match a.position { + Position::ArgumentNamed(s) => match generic_map.get(s) { + Some(val) => Some(val), + None => { + span_err!(self.tcx.sess, err_sp, E0272, + "the #[rustc_on_unimplemented] \ + attribute on \ + trait definition for {} refers to \ + non-existent type parameter {}", + trait_str, s); + errored = true; + None + } + }, + _ => { + span_err!(self.tcx.sess, err_sp, E0273, + "the #[rustc_on_unimplemented] attribute \ + on trait definition for {} must have \ + named format arguments, eg \ + `#[rustc_on_unimplemented = \ + \"foo {{T}}\"]`", trait_str); errored = true; None } - }, - _ => { - span_err!(infcx.tcx.sess, err_sp, E0273, - "the #[rustc_on_unimplemented] \ - attribute on \ - trait definition for {} must have named \ - format arguments, \ - eg `#[rustc_on_unimplemented = \ - \"foo {{T}}\"]`", - trait_str); - errored = true; - None } } + }).collect(); + // Report only if the format string checks out + if !errored { + report = Some(err); } - }).collect(); - // Report only if the format string checks out - if !errored { - report = Some(err); + } else { + span_err!(self.tcx.sess, err_sp, E0274, + "the #[rustc_on_unimplemented] attribute on \ + trait definition for {} must have a value, \ + eg `#[rustc_on_unimplemented = \"foo\"]`", + trait_str); } - } else { - span_err!(infcx.tcx.sess, err_sp, E0274, - "the #[rustc_on_unimplemented] attribute on \ - trait definition for {} must have a value, \ - eg `#[rustc_on_unimplemented = \"foo\"]`", - trait_str); + break; } - break; } + report } - report -} -fn find_similar_impl_candidates<'a, 'tcx>( - infcx: &InferCtxt<'a, 'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>) - -> Vec> -{ - let simp = fast_reject::simplify_type(infcx.tcx, - trait_ref.skip_binder().self_ty(), - true); - let mut impl_candidates = Vec::new(); - let trait_def = infcx.tcx.lookup_trait_def(trait_ref.def_id()); - - match simp { - Some(simp) => trait_def.for_each_impl(infcx.tcx, |def_id| { - let imp = infcx.tcx.impl_trait_ref(def_id).unwrap(); - let imp_simp = fast_reject::simplify_type(infcx.tcx, - imp.self_ty(), - true); - if let Some(imp_simp) = imp_simp { - if simp != imp_simp { - return; + fn find_similar_impl_candidates(&self, + trait_ref: ty::PolyTraitRef<'tcx>) + -> Vec> + { + let simp = fast_reject::simplify_type(self.tcx, + trait_ref.skip_binder().self_ty(), + true); + let mut impl_candidates = Vec::new(); + let trait_def = self.tcx.lookup_trait_def(trait_ref.def_id()); + + match simp { + Some(simp) => trait_def.for_each_impl(self.tcx, |def_id| { + let imp = self.tcx.impl_trait_ref(def_id).unwrap(); + let imp_simp = fast_reject::simplify_type(self.tcx, + imp.self_ty(), + true); + if let Some(imp_simp) = imp_simp { + if simp != imp_simp { + return; + } } - } - impl_candidates.push(imp); - }), - None => trait_def.for_each_impl(infcx.tcx, |def_id| { - impl_candidates.push( - infcx.tcx.impl_trait_ref(def_id).unwrap()); - }) - }; - impl_candidates -} + impl_candidates.push(imp); + }), + None => trait_def.for_each_impl(self.tcx, |def_id| { + impl_candidates.push( + self.tcx.impl_trait_ref(def_id).unwrap()); + }) + }; + impl_candidates + } -fn report_similar_impl_candidates(span: Span, - err: &mut DiagnosticBuilder, - impl_candidates: &[ty::TraitRef]) -{ - err.fileline_help(span, &format!("the following implementations were found:")); + fn report_similar_impl_candidates(&self, + trait_ref: ty::PolyTraitRef<'tcx>, + err: &mut DiagnosticBuilder) + { + let simp = fast_reject::simplify_type(self.tcx, + trait_ref.skip_binder().self_ty(), + true); + let mut impl_candidates = Vec::new(); + let trait_def = self.tcx.lookup_trait_def(trait_ref.def_id()); + + match simp { + Some(simp) => trait_def.for_each_impl(self.tcx, |def_id| { + let imp = self.tcx.impl_trait_ref(def_id).unwrap(); + let imp_simp = fast_reject::simplify_type(self.tcx, + imp.self_ty(), + true); + if let Some(imp_simp) = imp_simp { + if simp != imp_simp { + return; + } + } + impl_candidates.push(imp); + }), + None => trait_def.for_each_impl(self.tcx, |def_id| { + impl_candidates.push( + self.tcx.impl_trait_ref(def_id).unwrap()); + }) + }; - let end = cmp::min(4, impl_candidates.len()); - for candidate in &impl_candidates[0..end] { - err.fileline_help(span, &format!(" {:?}", candidate)); - } - if impl_candidates.len() > 4 { - err.fileline_help(span, &format!("and {} others", impl_candidates.len()-4)); - } -} + if impl_candidates.is_empty() { + return; + } + + err.help(&format!("the following implementations were found:")); -/// Reports that an overflow has occurred and halts compilation. We -/// halt compilation unconditionally because it is important that -/// overflows never be masked -- they basically represent computations -/// whose result could not be truly determined and thus we can't say -/// if the program type checks or not -- and they are unusual -/// occurrences in any case. -pub fn report_overflow_error<'a, 'tcx, T>(infcx: &InferCtxt<'a, 'tcx>, - obligation: &Obligation<'tcx, T>, - suggest_increasing_limit: bool) - -> ! - where T: fmt::Display + TypeFoldable<'tcx> -{ - let predicate = - infcx.resolve_type_vars_if_possible(&obligation.predicate); - let mut err = struct_span_err!(infcx.tcx.sess, obligation.cause.span, E0275, - "overflow evaluating the requirement `{}`", - predicate); - - if suggest_increasing_limit { - suggest_new_overflow_limit(infcx.tcx, &mut err, obligation.cause.span); + let end = cmp::min(4, impl_candidates.len()); + for candidate in &impl_candidates[0..end] { + err.help(&format!(" {:?}", candidate)); + } + if impl_candidates.len() > 4 { + err.help(&format!("and {} others", impl_candidates.len()-4)); + } } - note_obligation_cause(infcx, &mut err, obligation); + /// Reports that an overflow has occurred and halts compilation. We + /// halt compilation unconditionally because it is important that + /// overflows never be masked -- they basically represent computations + /// whose result could not be truly determined and thus we can't say + /// if the program type checks or not -- and they are unusual + /// occurrences in any case. + pub fn report_overflow_error(&self, + obligation: &Obligation<'tcx, T>, + suggest_increasing_limit: bool) -> ! + where T: fmt::Display + TypeFoldable<'tcx> + { + let predicate = + self.resolve_type_vars_if_possible(&obligation.predicate); + let mut err = struct_span_err!(self.tcx.sess, obligation.cause.span, E0275, + "overflow evaluating the requirement `{}`", + predicate); - err.emit(); - infcx.tcx.sess.abort_if_errors(); - bug!(); -} + if suggest_increasing_limit { + self.suggest_new_overflow_limit(&mut err); + } -/// Reports that a cycle was detected which led to overflow and halts -/// compilation. This is equivalent to `report_overflow_error` except -/// that we can give a more helpful error message (and, in particular, -/// we do not suggest increasing the overflow limit, which is not -/// going to help). -pub fn report_overflow_error_cycle<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - cycle: &Vec>) - -> ! -{ - assert!(cycle.len() > 1); + self.note_obligation_cause(&mut err, obligation); - debug!("report_overflow_error_cycle(cycle length = {})", cycle.len()); + err.emit(); + self.tcx.sess.abort_if_errors(); + bug!(); + } - let cycle = infcx.resolve_type_vars_if_possible(cycle); + /// Reports that a cycle was detected which led to overflow and halts + /// compilation. This is equivalent to `report_overflow_error` except + /// that we can give a more helpful error message (and, in particular, + /// we do not suggest increasing the overflow limit, which is not + /// going to help). + pub fn report_overflow_error_cycle(&self, cycle: &[PredicateObligation<'tcx>]) -> ! { + let cycle = self.resolve_type_vars_if_possible(&cycle.to_owned()); + assert!(cycle.len() > 0); - debug!("report_overflow_error_cycle: cycle={:?}", cycle); + debug!("report_overflow_error_cycle: cycle={:?}", cycle); - assert_eq!(&cycle[0].predicate, &cycle.last().unwrap().predicate); + self.report_overflow_error(&cycle[0], false); + } - try_report_overflow_error_type_of_infinite_size(infcx, &cycle); - report_overflow_error(infcx, &cycle[0], false); -} + pub fn report_selection_error(&self, + obligation: &PredicateObligation<'tcx>, + error: &SelectionError<'tcx>, + warning_node_id: Option) + { + let span = obligation.cause.span; + let mut err = match *error { + SelectionError::Unimplemented => { + if let ObligationCauseCode::CompareImplMethodObligation = obligation.cause.code { + span_err!( + self.tcx.sess, span, E0276, + "the requirement `{}` appears on the impl \ + method but not on the corresponding trait method", + obligation.predicate); + return; + } else { + match obligation.predicate { + ty::Predicate::Trait(ref trait_predicate) => { + let trait_predicate = + self.resolve_type_vars_if_possible(trait_predicate); + + if self.tcx.sess.has_errors() && trait_predicate.references_error() { + return; + } else { + let trait_ref = trait_predicate.to_poly_trait_ref(); + + if let Some(warning_node_id) = warning_node_id { + self.tcx.sess.add_lint( + ::lint::builtin::UNSIZED_IN_TUPLE, + warning_node_id, + obligation.cause.span, + format!("the trait bound `{}` is not satisfied", + trait_ref.to_predicate())); + return; + } -/// If a cycle results from evaluated whether something is Sized, that -/// is a particular special case that always results from a struct or -/// enum definition that lacks indirection (e.g., `struct Foo { x: Foo -/// }`). We wish to report a targeted error for this case. -pub fn try_report_overflow_error_type_of_infinite_size<'a, 'tcx>( - infcx: &InferCtxt<'a, 'tcx>, - cycle: &[PredicateObligation<'tcx>]) -{ - let sized_trait = match infcx.tcx.lang_items.sized_trait() { - Some(v) => v, - None => return, - }; - let top_is_sized = { - match cycle[0].predicate { - ty::Predicate::Trait(ref data) => data.def_id() == sized_trait, - _ => false, - } - }; - if !top_is_sized { - return; - } + let mut err = struct_span_err!( + self.tcx.sess, span, E0277, + "the trait bound `{}` is not satisfied", + trait_ref.to_predicate()); + + // Try to report a help message + + if !trait_ref.has_infer_types() && + self.predicate_can_apply(trait_ref) { + // If a where-clause may be useful, remind the + // user that they can add it. + // + // don't display an on-unimplemented note, as + // these notes will often be of the form + // "the type `T` can't be frobnicated" + // which is somewhat confusing. + err.help(&format!("consider adding a `where {}` bound", + trait_ref.to_predicate())); + } else if let Some(s) = self.on_unimplemented_note(trait_ref, + obligation) { + // If it has a custom "#[rustc_on_unimplemented]" + // error message, let's display it! + err.note(&s); + } else { + // If we can't show anything useful, try to find + // similar impls. + let impl_candidates = + self.find_similar_impl_candidates(trait_ref); + if impl_candidates.len() > 0 { + self.report_similar_impl_candidates(trait_ref, &mut err); + } + } + err + } + } - // The only way to have a type of infinite size is to have, - // somewhere, a struct/enum type involved. Identify all such types - // and report the cycle to the user. - - let struct_enum_tys: Vec<_> = - cycle.iter() - .flat_map(|obligation| match obligation.predicate { - ty::Predicate::Trait(ref data) => { - assert_eq!(data.def_id(), sized_trait); - let self_ty = data.skip_binder().trait_ref.self_ty(); // (*) - // (*) ok to skip binder because this is just - // error reporting and regions don't really - // matter - match self_ty.sty { - ty::TyEnum(..) | ty::TyStruct(..) => Some(self_ty), - _ => None, - } - } - _ => { - span_bug!(obligation.cause.span, - "Sized cycle involving non-trait-ref: {:?}", - obligation.predicate); - } - }) - .collect(); - - assert!(!struct_enum_tys.is_empty()); - - // This is a bit tricky. We want to pick a "main type" in the - // listing that is local to the current crate, so we can give a - // good span to the user. But it might not be the first one in our - // cycle list. So find the first one that is local and then - // rotate. - let (main_index, main_def_id) = - struct_enum_tys.iter() - .enumerate() - .filter_map(|(index, ty)| match ty.sty { - ty::TyEnum(adt_def, _) | ty::TyStruct(adt_def, _) - if adt_def.did.is_local() => - Some((index, adt_def.did)), - _ => - None, - }) - .next() - .unwrap(); // should always be SOME local type involved! - - // Rotate so that the "main" type is at index 0. - let struct_enum_tys: Vec<_> = - struct_enum_tys.iter() - .cloned() - .skip(main_index) - .chain(struct_enum_tys.iter().cloned().take(main_index)) - .collect(); - - let tcx = infcx.tcx; - let mut err = recursive_type_with_infinite_size_error(tcx, main_def_id); - let len = struct_enum_tys.len(); - if len > 2 { - let span = tcx.map.span_if_local(main_def_id).unwrap(); - err.fileline_note(span, - &format!("type `{}` is embedded within `{}`...", - struct_enum_tys[0], - struct_enum_tys[1])); - for &next_ty in &struct_enum_tys[1..len-1] { - err.fileline_note(span, - &format!("...which in turn is embedded within `{}`...", next_ty)); - } - err.fileline_note(span, - &format!("...which in turn is embedded within `{}`, \ - completing the cycle.", - struct_enum_tys[len-1])); - } - err.emit(); - infcx.tcx.sess.abort_if_errors(); - bug!(); -} + ty::Predicate::Equate(ref predicate) => { + let predicate = self.resolve_type_vars_if_possible(predicate); + let err = self.equality_predicate(span, + &predicate).err().unwrap(); + struct_span_err!(self.tcx.sess, span, E0278, + "the requirement `{}` is not satisfied (`{}`)", + predicate, err) + } -pub fn recursive_type_with_infinite_size_error<'tcx>(tcx: &TyCtxt<'tcx>, - type_def_id: DefId) - -> DiagnosticBuilder<'tcx> -{ - assert!(type_def_id.is_local()); - let span = tcx.map.span_if_local(type_def_id).unwrap(); - let mut err = struct_span_err!(tcx.sess, span, E0072, "recursive type `{}` has infinite size", - tcx.item_path_str(type_def_id)); - err.fileline_help(span, &format!("insert indirection (e.g., a `Box`, `Rc`, or `&`) \ - at some point to make `{}` representable", - tcx.item_path_str(type_def_id))); - err -} + ty::Predicate::RegionOutlives(ref predicate) => { + let predicate = self.resolve_type_vars_if_possible(predicate); + let err = self.region_outlives_predicate(span, + &predicate).err().unwrap(); + struct_span_err!(self.tcx.sess, span, E0279, + "the requirement `{}` is not satisfied (`{}`)", + predicate, err) + } -pub fn report_selection_error<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - obligation: &PredicateObligation<'tcx>, - error: &SelectionError<'tcx>) -{ - match *error { - SelectionError::Unimplemented => { - if let ObligationCauseCode::CompareImplMethodObligation = obligation.cause.code { - span_err!( - infcx.tcx.sess, obligation.cause.span, E0276, - "the requirement `{}` appears on the impl \ - method but not on the corresponding trait method", - obligation.predicate); - } else { - match obligation.predicate { - ty::Predicate::Trait(ref trait_predicate) => { - let trait_predicate = - infcx.resolve_type_vars_if_possible(trait_predicate); + ty::Predicate::Projection(..) | ty::Predicate::TypeOutlives(..) => { + let predicate = + self.resolve_type_vars_if_possible(&obligation.predicate); + struct_span_err!(self.tcx.sess, span, E0280, + "the requirement `{}` is not satisfied", + predicate) + } - if !infcx.tcx.sess.has_errors() || !trait_predicate.references_error() { - let trait_ref = trait_predicate.to_poly_trait_ref(); - let mut err = struct_span_err!( - infcx.tcx.sess, obligation.cause.span, E0277, - "the trait bound `{}` is not satisfied", - trait_ref.to_predicate()); - - // Try to report a help message - - if !trait_ref.has_infer_types() && - predicate_can_apply(infcx, trait_ref) - { - // If a where-clause may be useful, remind the - // user that they can add it. - // - // don't display an on-unimplemented note, as - // these notes will often be of the form - // "the type `T` can't be frobnicated" - // which is somewhat confusing. - err.fileline_help(obligation.cause.span, &format!( - "consider adding a `where {}` bound", - trait_ref.to_predicate() - )); - } else if let Some(s) = on_unimplemented_note(infcx, trait_ref, - obligation.cause.span) { - // Otherwise, if there is an on-unimplemented note, - // display it. - err.fileline_note(obligation.cause.span, &s); + ty::Predicate::ObjectSafe(trait_def_id) => { + let violations = self.tcx.object_safety_violations(trait_def_id); + let err = self.tcx.report_object_safety_error(span, + trait_def_id, + warning_node_id, + violations); + if let Some(err) = err { + err } else { - // If we can't show anything useful, try to find - // similar impls. - - let impl_candidates = - find_similar_impl_candidates(infcx, trait_ref); - if impl_candidates.len() > 0 { - report_similar_impl_candidates(obligation.cause.span, - &mut err, &impl_candidates); - } + return; } - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); } - }, - ty::Predicate::Equate(ref predicate) => { - let predicate = infcx.resolve_type_vars_if_possible(predicate); - let err = infcx.equality_predicate(obligation.cause.span, - &predicate).err().unwrap(); - let mut err = struct_span_err!( - infcx.tcx.sess, obligation.cause.span, E0278, - "the requirement `{}` is not satisfied (`{}`)", - predicate, - err); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); - } - ty::Predicate::RegionOutlives(ref predicate) => { - let predicate = infcx.resolve_type_vars_if_possible(predicate); - let err = infcx.region_outlives_predicate(obligation.cause.span, - &predicate).err().unwrap(); - let mut err = struct_span_err!( - infcx.tcx.sess, obligation.cause.span, E0279, - "the requirement `{}` is not satisfied (`{}`)", - predicate, - err); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); - } - - ty::Predicate::Projection(..) | ty::Predicate::TypeOutlives(..) => { - let predicate = - infcx.resolve_type_vars_if_possible(&obligation.predicate); - let mut err = struct_span_err!( - infcx.tcx.sess, obligation.cause.span, E0280, - "the requirement `{}` is not satisfied", - predicate); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); - } + ty::Predicate::ClosureKind(closure_def_id, kind) => { + let found_kind = self.closure_kind(closure_def_id).unwrap(); + let closure_span = self.tcx.map.span_if_local(closure_def_id).unwrap(); + let mut err = struct_span_err!( + self.tcx.sess, closure_span, E0525, + "expected a closure that implements the `{}` trait, \ + but this closure only implements `{}`", + kind, + found_kind); + err.span_note( + obligation.cause.span, + &format!("the requirement to implement \ + `{}` derives from here", kind)); + err.emit(); + return; + } - ty::Predicate::ObjectSafe(trait_def_id) => { - let violations = object_safety_violations( - infcx.tcx, trait_def_id); - let mut err = report_object_safety_error(infcx.tcx, - obligation.cause.span, - trait_def_id, - violations); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); - } + ty::Predicate::WellFormed(ty) => { + // WF predicates cannot themselves make + // errors. They can only block due to + // ambiguity; otherwise, they always + // degenerate into other obligations + // (which may fail). + span_bug!(span, "WF predicate not satisfied for {:?}", ty); + } - ty::Predicate::WellFormed(ty) => { - // WF predicates cannot themselves make - // errors. They can only block due to - // ambiguity; otherwise, they always - // degenerate into other obligations - // (which may fail). - span_bug!( - obligation.cause.span, - "WF predicate not satisfied for {:?}", - ty); + ty::Predicate::Rfc1592(ref data) => { + span_bug!( + obligation.cause.span, + "RFC1592 predicate not satisfied for {:?}", + data); + } } } } - } - OutputTypeParameterMismatch(ref expected_trait_ref, ref actual_trait_ref, ref e) => { - let expected_trait_ref = infcx.resolve_type_vars_if_possible(&*expected_trait_ref); - let actual_trait_ref = infcx.resolve_type_vars_if_possible(&*actual_trait_ref); - if !actual_trait_ref.self_ty().references_error() { - let mut err = struct_span_err!( - infcx.tcx.sess, obligation.cause.span, E0281, + OutputTypeParameterMismatch(ref expected_trait_ref, ref actual_trait_ref, ref e) => { + let expected_trait_ref = self.resolve_type_vars_if_possible(&*expected_trait_ref); + let actual_trait_ref = self.resolve_type_vars_if_possible(&*actual_trait_ref); + if actual_trait_ref.self_ty().references_error() { + return; + } + struct_span_err!(self.tcx.sess, span, E0281, "type mismatch: the type `{}` implements the trait `{}`, \ but the trait `{}` is required ({})", expected_trait_ref.self_ty(), expected_trait_ref, actual_trait_ref, - e); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); + e) } - } - TraitNotObjectSafe(did) => { - let violations = object_safety_violations(infcx.tcx, did); - let mut err = report_object_safety_error(infcx.tcx, obligation.cause.span, did, - violations); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); - } + TraitNotObjectSafe(did) => { + let violations = self.tcx.object_safety_violations(did); + let err = self.tcx.report_object_safety_error(span, did, + warning_node_id, + violations); + if let Some(err) = err { + err + } else { + return; + } + } + }; + self.note_obligation_cause(&mut err, obligation); + err.emit(); } } -pub fn report_object_safety_error<'tcx>(tcx: &TyCtxt<'tcx>, - span: Span, - trait_def_id: DefId, - violations: Vec) - -> DiagnosticBuilder<'tcx> -{ - let mut err = struct_span_err!( - tcx.sess, span, E0038, - "the trait `{}` cannot be made into an object", - tcx.item_path_str(trait_def_id)); - - let mut reported_violations = FnvHashSet(); - for violation in violations { - if !reported_violations.insert(violation.clone()) { - continue; - } - match violation { - ObjectSafetyViolation::SizedSelf => { - err.fileline_note( - span, - "the trait cannot require that `Self : Sized`"); - } +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn recursive_type_with_infinite_size_error(self, + type_def_id: DefId) + -> DiagnosticBuilder<'tcx> + { + assert!(type_def_id.is_local()); + let span = self.map.span_if_local(type_def_id).unwrap(); + let mut err = struct_span_err!(self.sess, span, E0072, + "recursive type `{}` has infinite size", + self.item_path_str(type_def_id)); + err.help(&format!("insert indirection (e.g., a `Box`, `Rc`, or `&`) \ + at some point to make `{}` representable", + self.item_path_str(type_def_id))); + err + } - ObjectSafetyViolation::SupertraitSelf => { - err.fileline_note( - span, - "the trait cannot use `Self` as a type parameter \ - in the supertrait listing"); + pub fn report_object_safety_error(self, + span: Span, + trait_def_id: DefId, + warning_node_id: Option, + violations: Vec) + -> Option> + { + let mut err = match warning_node_id { + Some(_) => None, + None => { + Some(struct_span_err!( + self.sess, span, E0038, + "the trait `{}` cannot be made into an object", + self.item_path_str(trait_def_id))) } + }; - ObjectSafetyViolation::Method(method, - MethodViolationCode::StaticMethod) => { - err.fileline_note( - span, - &format!("method `{}` has no receiver", - method.name)); + let mut reported_violations = FnvHashSet(); + for violation in violations { + if !reported_violations.insert(violation.clone()) { + continue; } + let buf; + let note = match violation { + ObjectSafetyViolation::SizedSelf => { + "the trait cannot require that `Self : Sized`" + } - ObjectSafetyViolation::Method(method, - MethodViolationCode::ReferencesSelf) => { - err.fileline_note( - span, - &format!("method `{}` references the `Self` type \ - in its arguments or return type", - method.name)); - } + ObjectSafetyViolation::SupertraitSelf => { + "the trait cannot use `Self` as a type parameter \ + in the supertrait listing" + } + + ObjectSafetyViolation::Method(method, + MethodViolationCode::StaticMethod) => { + buf = format!("method `{}` has no receiver", + method.name); + &buf + } + + ObjectSafetyViolation::Method(method, + MethodViolationCode::ReferencesSelf) => { + buf = format!("method `{}` references the `Self` type \ + in its arguments or return type", + method.name); + &buf + } - ObjectSafetyViolation::Method(method, - MethodViolationCode::Generic) => { - err.fileline_note( - span, - &format!("method `{}` has generic type parameters", - method.name)); + ObjectSafetyViolation::Method(method, + MethodViolationCode::Generic) => { + buf = format!("method `{}` has generic type parameters", + method.name); + &buf + } + }; + match (warning_node_id, &mut err) { + (Some(node_id), &mut None) => { + self.sess.add_lint( + ::lint::builtin::OBJECT_UNSAFE_FRAGMENT, + node_id, + span, + note.to_string()); + } + (None, &mut Some(ref mut err)) => { + err.note(note); + } + _ => unreachable!() } } + err } - err } -pub fn maybe_report_ambiguity<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - obligation: &PredicateObligation<'tcx>) { - // Unable to successfully determine, probably means - // insufficient type information, but could mean - // ambiguous impls. The latter *ought* to be a - // coherence violation, so we don't report it here. - - let predicate = infcx.resolve_type_vars_if_possible(&obligation.predicate); - - debug!("maybe_report_ambiguity(predicate={:?}, obligation={:?})", - predicate, - obligation); - - match predicate { - ty::Predicate::Trait(ref data) => { - let trait_ref = data.to_poly_trait_ref(); - let self_ty = trait_ref.self_ty(); - let all_types = &trait_ref.substs().types; - if all_types.references_error() { - } else { - // Typically, this ambiguity should only happen if - // there are unresolved type inference variables - // (otherwise it would suggest a coherence - // failure). But given #21974 that is not necessarily - // the case -- we can have multiple where clauses that - // are only distinguished by a region, which results - // in an ambiguity even when all types are fully - // known, since we don't dispatch based on region - // relationships. - - // This is kind of a hack: it frequently happens that some earlier - // error prevents types from being fully inferred, and then we get - // a bunch of uninteresting errors saying something like " doesn't implement Sized". It may even be true that we - // could just skip over all checks where the self-ty is an - // inference variable, but I was afraid that there might be an - // inference variable created, registered as an obligation, and - // then never forced by writeback, and hence by skipping here we'd - // be ignoring the fact that we don't KNOW the type works - // out. Though even that would probably be harmless, given that - // we're only talking about builtin traits, which are known to be - // inhabited. But in any case I just threw in this check for - // has_errors() to be sure that compilation isn't happening - // anyway. In that case, why inundate the user. - if !infcx.tcx.sess.has_errors() { - if - infcx.tcx.lang_items.sized_trait() - .map_or(false, |sized_id| sized_id == trait_ref.def_id()) - { - need_type_info(infcx, obligation.cause.span, self_ty); - } else { - let mut err = struct_span_err!(infcx.tcx.sess, obligation.cause.span, E0283, - "type annotations required: \ - cannot resolve `{}`", - predicate); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { + fn maybe_report_ambiguity(&self, obligation: &PredicateObligation<'tcx>) { + // Unable to successfully determine, probably means + // insufficient type information, but could mean + // ambiguous impls. The latter *ought* to be a + // coherence violation, so we don't report it here. + + let predicate = self.resolve_type_vars_if_possible(&obligation.predicate); + + debug!("maybe_report_ambiguity(predicate={:?}, obligation={:?})", + predicate, + obligation); + + // Ambiguity errors are often caused as fallout from earlier + // errors. So just ignore them if this infcx is tainted. + if self.is_tainted_by_errors() { + return; + } + + match predicate { + ty::Predicate::Trait(ref data) => { + let trait_ref = data.to_poly_trait_ref(); + let self_ty = trait_ref.self_ty(); + let all_types = &trait_ref.substs().types; + if all_types.references_error() { + } else { + // Typically, this ambiguity should only happen if + // there are unresolved type inference variables + // (otherwise it would suggest a coherence + // failure). But given #21974 that is not necessarily + // the case -- we can have multiple where clauses that + // are only distinguished by a region, which results + // in an ambiguity even when all types are fully + // known, since we don't dispatch based on region + // relationships. + + // This is kind of a hack: it frequently happens that some earlier + // error prevents types from being fully inferred, and then we get + // a bunch of uninteresting errors saying something like " doesn't implement Sized". It may even be true that we + // could just skip over all checks where the self-ty is an + // inference variable, but I was afraid that there might be an + // inference variable created, registered as an obligation, and + // then never forced by writeback, and hence by skipping here we'd + // be ignoring the fact that we don't KNOW the type works + // out. Though even that would probably be harmless, given that + // we're only talking about builtin traits, which are known to be + // inhabited. But in any case I just threw in this check for + // has_errors() to be sure that compilation isn't happening + // anyway. In that case, why inundate the user. + if !self.tcx.sess.has_errors() { + if + self.tcx.lang_items.sized_trait() + .map_or(false, |sized_id| sized_id == trait_ref.def_id()) + { + self.need_type_info(obligation.cause.span, self_ty); + } else { + let mut err = struct_span_err!(self.tcx.sess, + obligation.cause.span, E0283, + "type annotations required: \ + cannot resolve `{}`", + predicate); + self.note_obligation_cause(&mut err, obligation); + err.emit(); + } } } } - } - ty::Predicate::WellFormed(ty) => { - // Same hacky approach as above to avoid deluging user - // with error messages. - if !ty.references_error() && !infcx.tcx.sess.has_errors() { - need_type_info(infcx, obligation.cause.span, ty); + ty::Predicate::WellFormed(ty) => { + // Same hacky approach as above to avoid deluging user + // with error messages. + if !ty.references_error() && !self.tcx.sess.has_errors() { + self.need_type_info(obligation.cause.span, ty); + } } - } - _ => { - if !infcx.tcx.sess.has_errors() { - let mut err = struct_span_err!(infcx.tcx.sess, obligation.cause.span, E0284, - "type annotations required: cannot resolve `{}`", - predicate); - note_obligation_cause(infcx, &mut err, obligation); - err.emit(); + _ => { + if !self.tcx.sess.has_errors() { + let mut err = struct_span_err!(self.tcx.sess, + obligation.cause.span, E0284, + "type annotations required: \ + cannot resolve `{}`", + predicate); + self.note_obligation_cause(&mut err, obligation); + err.emit(); + } } } } -} -/// Returns whether the trait predicate may apply for *some* assignment -/// to the type parameters. -fn predicate_can_apply<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - pred: ty::PolyTraitRef<'tcx>) - -> bool -{ - struct ParamToVarFolder<'a, 'tcx: 'a> { - infcx: &'a InferCtxt<'a, 'tcx>, - var_map: FnvHashMap, Ty<'tcx>> - } + /// Returns whether the trait predicate may apply for *some* assignment + /// to the type parameters. + fn predicate_can_apply(&self, pred: ty::PolyTraitRef<'tcx>) -> bool { + struct ParamToVarFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, + var_map: FnvHashMap, Ty<'tcx>> + } - impl<'a, 'tcx> TypeFolder<'tcx> for ParamToVarFolder<'a, 'tcx> - { - fn tcx(&self) -> &TyCtxt<'tcx> { self.infcx.tcx } + impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for ParamToVarFolder<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.infcx.tcx } - fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { - if let ty::TyParam(..) = ty.sty { - let infcx = self.infcx; - self.var_map.entry(ty).or_insert_with(|| infcx.next_ty_var()) - } else { - ty.super_fold_with(self) + fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { + if let ty::TyParam(..) = ty.sty { + let infcx = self.infcx; + self.var_map.entry(ty).or_insert_with(|| infcx.next_ty_var()) + } else { + ty.super_fold_with(self) + } } } - } - infcx.probe(|_| { - let mut selcx = SelectionContext::new(infcx); + self.probe(|_| { + let mut selcx = SelectionContext::new(self); - let cleaned_pred = pred.fold_with(&mut ParamToVarFolder { - infcx: infcx, - var_map: FnvHashMap() - }); + let cleaned_pred = pred.fold_with(&mut ParamToVarFolder { + infcx: self, + var_map: FnvHashMap() + }); - let cleaned_pred = super::project::normalize( - &mut selcx, - ObligationCause::dummy(), - &cleaned_pred - ).value; + let cleaned_pred = super::project::normalize( + &mut selcx, + ObligationCause::dummy(), + &cleaned_pred + ).value; - let obligation = Obligation::new( - ObligationCause::dummy(), - cleaned_pred.to_predicate() - ); + let obligation = Obligation::new( + ObligationCause::dummy(), + cleaned_pred.to_predicate() + ); - selcx.evaluate_obligation(&obligation) - }) -} + selcx.evaluate_obligation(&obligation) + }) + } -fn need_type_info<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - span: Span, - ty: Ty<'tcx>) -{ - span_err!(infcx.tcx.sess, span, E0282, - "unable to infer enough type information about `{}`; \ - type annotations or generic parameter binding required", - ty); -} + fn need_type_info(&self, span: Span, ty: Ty<'tcx>) { + span_err!(self.tcx.sess, span, E0282, + "unable to infer enough type information about `{}`; \ + type annotations or generic parameter binding required", + ty); + } -fn note_obligation_cause<'a, 'tcx, T>(infcx: &InferCtxt<'a, 'tcx>, - err: &mut DiagnosticBuilder, - obligation: &Obligation<'tcx, T>) - where T: fmt::Display -{ - note_obligation_cause_code(infcx, - err, - &obligation.predicate, - obligation.cause.span, - &obligation.cause.code); -} + fn note_obligation_cause(&self, + err: &mut DiagnosticBuilder, + obligation: &Obligation<'tcx, T>) + where T: fmt::Display + { + self.note_obligation_cause_code(err, + &obligation.predicate, + &obligation.cause.code); + } -fn note_obligation_cause_code<'a, 'tcx, T>(infcx: &InferCtxt<'a, 'tcx>, - err: &mut DiagnosticBuilder, - predicate: &T, - cause_span: Span, - cause_code: &ObligationCauseCode<'tcx>) - where T: fmt::Display -{ - let tcx = infcx.tcx; - match *cause_code { - ObligationCauseCode::MiscObligation => { } - ObligationCauseCode::SliceOrArrayElem => { - err.fileline_note( - cause_span, - "slice and array elements must have `Sized` type"); - } - ObligationCauseCode::ProjectionWf(data) => { - err.fileline_note( - cause_span, - &format!("required so that the projection `{}` is well-formed", - data)); - } - ObligationCauseCode::ReferenceOutlivesReferent(ref_ty) => { - err.fileline_note( - cause_span, - &format!("required so that reference `{}` does not outlive its referent", - ref_ty)); - } - ObligationCauseCode::ItemObligation(item_def_id) => { - let item_name = tcx.item_path_str(item_def_id); - err.fileline_note( - cause_span, - &format!("required by `{}`", item_name)); - } - ObligationCauseCode::ObjectCastObligation(object_ty) => { - err.fileline_note( - cause_span, - &format!( - "required for the cast to the object type `{}`", - infcx.ty_to_string(object_ty))); - } - ObligationCauseCode::RepeatVec => { - err.fileline_note( - cause_span, - "the `Copy` trait is required because the \ - repeated element will be copied"); - } - ObligationCauseCode::VariableType(_) => { - err.fileline_note( - cause_span, - "all local variables must have a statically known size"); - } - ObligationCauseCode::ReturnType => { - err.fileline_note( - cause_span, - "the return type of a function must have a \ - statically known size"); - } - ObligationCauseCode::AssignmentLhsSized => { - err.fileline_note( - cause_span, - "the left-hand-side of an assignment must have a statically known size"); - } - ObligationCauseCode::StructInitializerSized => { - err.fileline_note( - cause_span, - "structs must have a statically known size to be initialized"); - } - ObligationCauseCode::ClosureCapture(var_id, _, builtin_bound) => { - let def_id = tcx.lang_items.from_builtin_kind(builtin_bound).unwrap(); - let trait_name = tcx.item_path_str(def_id); - let name = tcx.local_var_name_str(var_id); - err.fileline_note( - cause_span, - &format!("the closure that captures `{}` requires that all captured variables \ - implement the trait `{}`", - name, - trait_name)); - } - ObligationCauseCode::FieldSized => { - err.fileline_note( - cause_span, - "only the last field of a struct or enum variant \ - may have a dynamically sized type"); - } - ObligationCauseCode::SharedStatic => { - err.fileline_note( - cause_span, - "shared static variables must have a type that implements `Sync`"); - } - ObligationCauseCode::BuiltinDerivedObligation(ref data) => { - let parent_trait_ref = infcx.resolve_type_vars_if_possible(&data.parent_trait_ref); - err.fileline_note( - cause_span, - &format!("required because it appears within the type `{}`", - parent_trait_ref.0.self_ty())); - let parent_predicate = parent_trait_ref.to_predicate(); - note_obligation_cause_code(infcx, - err, - &parent_predicate, - cause_span, - &data.parent_code); - } - ObligationCauseCode::ImplDerivedObligation(ref data) => { - let parent_trait_ref = infcx.resolve_type_vars_if_possible(&data.parent_trait_ref); - err.fileline_note( - cause_span, - &format!("required because of the requirements on the impl of `{}` for `{}`", - parent_trait_ref, - parent_trait_ref.0.self_ty())); - let parent_predicate = parent_trait_ref.to_predicate(); - note_obligation_cause_code(infcx, - err, - &parent_predicate, - cause_span, - &data.parent_code); - } - ObligationCauseCode::CompareImplMethodObligation => { - err.fileline_note( - cause_span, - &format!("the requirement `{}` appears on the impl method \ - but not on the corresponding trait method", - predicate)); + fn note_obligation_cause_code(&self, + err: &mut DiagnosticBuilder, + predicate: &T, + cause_code: &ObligationCauseCode<'tcx>) + where T: fmt::Display + { + let tcx = self.tcx; + match *cause_code { + ObligationCauseCode::MiscObligation => { } + ObligationCauseCode::SliceOrArrayElem => { + err.note("slice and array elements must have `Sized` type"); + } + ObligationCauseCode::TupleElem => { + err.note("tuple elements must have `Sized` type"); + } + ObligationCauseCode::ProjectionWf(data) => { + err.note(&format!("required so that the projection `{}` is well-formed", + data)); + } + ObligationCauseCode::ReferenceOutlivesReferent(ref_ty) => { + err.note(&format!("required so that reference `{}` does not outlive its referent", + ref_ty)); + } + ObligationCauseCode::ItemObligation(item_def_id) => { + let item_name = tcx.item_path_str(item_def_id); + err.note(&format!("required by `{}`", item_name)); + } + ObligationCauseCode::ObjectCastObligation(object_ty) => { + err.note(&format!("required for the cast to the object type `{}`", + self.ty_to_string(object_ty))); + } + ObligationCauseCode::RepeatVec => { + err.note("the `Copy` trait is required because the \ + repeated element will be copied"); + } + ObligationCauseCode::VariableType(_) => { + err.note("all local variables must have a statically known size"); + } + ObligationCauseCode::ReturnType => { + err.note("the return type of a function must have a \ + statically known size"); + } + ObligationCauseCode::AssignmentLhsSized => { + err.note("the left-hand-side of an assignment must have a statically known size"); + } + ObligationCauseCode::StructInitializerSized => { + err.note("structs must have a statically known size to be initialized"); + } + ObligationCauseCode::ClosureCapture(var_id, _, builtin_bound) => { + let def_id = tcx.lang_items.from_builtin_kind(builtin_bound).unwrap(); + let trait_name = tcx.item_path_str(def_id); + let name = tcx.local_var_name_str(var_id); + err.note( + &format!("the closure that captures `{}` requires that all captured variables \ + implement the trait `{}`", + name, + trait_name)); + } + ObligationCauseCode::FieldSized => { + err.note("only the last field of a struct or enum variant \ + may have a dynamically sized type"); + } + ObligationCauseCode::SharedStatic => { + err.note("shared static variables must have a type that implements `Sync`"); + } + ObligationCauseCode::BuiltinDerivedObligation(ref data) => { + let parent_trait_ref = self.resolve_type_vars_if_possible(&data.parent_trait_ref); + err.note(&format!("required because it appears within the type `{}`", + parent_trait_ref.0.self_ty())); + let parent_predicate = parent_trait_ref.to_predicate(); + self.note_obligation_cause_code(err, + &parent_predicate, + &data.parent_code); + } + ObligationCauseCode::ImplDerivedObligation(ref data) => { + let parent_trait_ref = self.resolve_type_vars_if_possible(&data.parent_trait_ref); + err.note( + &format!("required because of the requirements on the impl of `{}` for `{}`", + parent_trait_ref, + parent_trait_ref.0.self_ty())); + let parent_predicate = parent_trait_ref.to_predicate(); + self.note_obligation_cause_code(err, + &parent_predicate, + &data.parent_code); + } + ObligationCauseCode::CompareImplMethodObligation => { + err.note( + &format!("the requirement `{}` appears on the impl method \ + but not on the corresponding trait method", + predicate)); + } } } -} -fn suggest_new_overflow_limit(tcx: &TyCtxt, err:&mut DiagnosticBuilder, span: Span) { - let current_limit = tcx.sess.recursion_limit.get(); - let suggested_limit = current_limit * 2; - err.fileline_note( - span, - &format!( - "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", - suggested_limit)); + fn suggest_new_overflow_limit(&self, err: &mut DiagnosticBuilder) { + let current_limit = self.tcx.sess.recursion_limit.get(); + let suggested_limit = current_limit * 2; + err.note(&format!( + "consider adding a `#![recursion_limit=\"{}\"]` attribute to your crate", + suggested_limit)); + } } diff --git a/src/librustc/traits/fulfill.rs b/src/librustc/traits/fulfill.rs index 11e8dae871..d9d0367bdc 100644 --- a/src/librustc/traits/fulfill.rs +++ b/src/librustc/traits/fulfill.rs @@ -10,37 +10,37 @@ use dep_graph::DepGraph; use infer::{InferCtxt, InferOk}; -use ty::{self, Ty, TyCtxt, TypeFoldable, ToPolyTraitRef}; -use rustc_data_structures::obligation_forest::{Backtrace, ObligationForest, Error}; -use std::iter; +use ty::{self, Ty, TypeFoldable, ToPolyTraitRef, TyCtxt}; +use rustc_data_structures::obligation_forest::{ObligationForest, Error}; +use rustc_data_structures::obligation_forest::{ForestObligation, ObligationProcessor}; +use std::marker::PhantomData; +use std::mem; use syntax::ast; use util::common::ErrorReported; -use util::nodemap::{FnvHashMap, FnvHashSet, NodeMap}; +use util::nodemap::{FnvHashSet, NodeMap}; use super::CodeAmbiguity; use super::CodeProjectionError; use super::CodeSelectionError; -use super::is_object_safe; use super::FulfillmentError; use super::FulfillmentErrorCode; use super::ObligationCause; use super::PredicateObligation; use super::project; -use super::report_overflow_error_cycle; use super::select::SelectionContext; use super::Unimplemented; -use super::util::predicate_for_builtin_bound; + +impl<'tcx> ForestObligation for PendingPredicateObligation<'tcx> { + type Predicate = ty::Predicate<'tcx>; + + fn as_predicate(&self) -> &Self::Predicate { &self.obligation.predicate } +} pub struct GlobalFulfilledPredicates<'tcx> { set: FnvHashSet>, dep_graph: DepGraph, } -#[derive(Debug)] -pub struct LocalFulfilledPredicates<'tcx> { - set: FnvHashSet> -} - /// The fulfillment context is used to drive trait resolution. It /// consists of a list of obligations that must be (eventually) /// satisfied. The job is to track which are satisfied, which yielded @@ -52,23 +52,12 @@ pub struct LocalFulfilledPredicates<'tcx> { /// method `select_all_or_error` can be used to report any remaining /// ambiguous cases as errors. pub struct FulfillmentContext<'tcx> { - // a simple cache that aims to cache *exact duplicate obligations* - // and avoid adding them twice. This serves a different purpose - // than the `SelectionCache`: it avoids duplicate errors and - // permits recursive obligations, which are often generated from - // traits like `Send` et al. - // - // Note that because of type inference, a predicate can still - // occur twice in the predicates list, for example when 2 - // initially-distinct type variables are unified after being - // inserted. Deduplicating the predicate set on selection had a - // significant performance cost the last time I checked. - duplicate_set: LocalFulfilledPredicates<'tcx>, - // A list of all obligations that have been registered with this // fulfillment context. - predicates: ObligationForest, - LocalFulfilledPredicates<'tcx>>, + predicates: ObligationForest>, + + // A list of new obligations due to RFC1592. + rfc1592_obligations: Vec>, // A set of constraints that regionck must validate. Each // constraint has the form `T:'a`, meaning "some type `T` must @@ -110,12 +99,12 @@ pub struct PendingPredicateObligation<'tcx> { pub stalled_on: Vec>, } -impl<'tcx> FulfillmentContext<'tcx> { +impl<'a, 'gcx, 'tcx> FulfillmentContext<'tcx> { /// Creates a new fulfillment context. pub fn new() -> FulfillmentContext<'tcx> { FulfillmentContext { - duplicate_set: LocalFulfilledPredicates::new(), predicates: ObligationForest::new(), + rfc1592_obligations: Vec::new(), region_obligations: NodeMap(), } } @@ -127,11 +116,11 @@ impl<'tcx> FulfillmentContext<'tcx> { /// `SomeTrait` or a where clause that lets us unify `$0` with /// something concrete. If this fails, we'll unify `$0` with /// `projection_ty` again. - pub fn normalize_projection_type<'a>(&mut self, - infcx: &InferCtxt<'a,'tcx>, - projection_ty: ty::ProjectionTy<'tcx>, - cause: ObligationCause<'tcx>) - -> Ty<'tcx> + pub fn normalize_projection_type(&mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + projection_ty: ty::ProjectionTy<'tcx>, + cause: ObligationCause<'tcx>) + -> Ty<'tcx> { debug!("normalize_projection_type(projection_ty={:?})", projection_ty); @@ -152,13 +141,13 @@ impl<'tcx> FulfillmentContext<'tcx> { normalized.value } - pub fn register_builtin_bound<'a>(&mut self, - infcx: &InferCtxt<'a,'tcx>, - ty: Ty<'tcx>, - builtin_bound: ty::BuiltinBound, - cause: ObligationCause<'tcx>) + pub fn register_builtin_bound(&mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + ty: Ty<'tcx>, + builtin_bound: ty::BuiltinBound, + cause: ObligationCause<'tcx>) { - match predicate_for_builtin_bound(infcx.tcx, cause, builtin_bound, 0, ty) { + match infcx.tcx.predicate_for_builtin_bound(cause, builtin_bound, 0, ty) { Ok(predicate) => { self.register_predicate_obligation(infcx, predicate); } @@ -166,35 +155,38 @@ impl<'tcx> FulfillmentContext<'tcx> { } } - pub fn register_region_obligation<'a>(&mut self, - t_a: Ty<'tcx>, - r_b: ty::Region, - cause: ObligationCause<'tcx>) + pub fn register_region_obligation(&mut self, + t_a: Ty<'tcx>, + r_b: ty::Region, + cause: ObligationCause<'tcx>) { register_region_obligation(t_a, r_b, cause, &mut self.region_obligations); } - pub fn register_predicate_obligation<'a>(&mut self, - infcx: &InferCtxt<'a,'tcx>, - obligation: PredicateObligation<'tcx>) + pub fn register_predicate_obligation(&mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + obligation: PredicateObligation<'tcx>) { // this helps to reduce duplicate errors, as well as making // debug output much nicer to read and so on. let obligation = infcx.resolve_type_vars_if_possible(&obligation); - assert!(!obligation.has_escaping_regions()); - - if self.is_duplicate_or_add(infcx.tcx, &obligation.predicate) { - debug!("register_predicate_obligation({:?}) -- already seen, skip", obligation); - return; + if infcx.tcx.fulfilled_predicates.borrow().check_duplicate(&obligation.predicate) + { + return } - debug!("register_predicate_obligation({:?})", obligation); - let obligation = PendingPredicateObligation { + self.predicates.register_obligation(PendingPredicateObligation { obligation: obligation, stalled_on: vec![] - }; - self.predicates.push_tree(obligation, LocalFulfilledPredicates::new()); + }); + } + + pub fn register_rfc1592_obligation(&mut self, + _infcx: &InferCtxt<'a, 'gcx, 'tcx>, + obligation: PredicateObligation<'tcx>) + { + self.rfc1592_obligations.push(obligation); } pub fn region_obligations(&self, @@ -207,11 +199,27 @@ impl<'tcx> FulfillmentContext<'tcx> { } } - pub fn select_all_or_error<'a>(&mut self, - infcx: &InferCtxt<'a,'tcx>) - -> Result<(),Vec>> + pub fn select_rfc1592_obligations(&mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>) + -> Result<(),Vec>> + { + while !self.rfc1592_obligations.is_empty() { + for obligation in mem::replace(&mut self.rfc1592_obligations, Vec::new()) { + self.register_predicate_obligation(infcx, obligation); + } + + self.select_all_or_error(infcx)?; + } + + Ok(()) + } + + pub fn select_all_or_error(&mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>) + -> Result<(),Vec>> { self.select_where_possible(infcx)?; + let errors: Vec<_> = self.predicates.to_errors(CodeAmbiguity) .into_iter() @@ -224,9 +232,9 @@ impl<'tcx> FulfillmentContext<'tcx> { } } - pub fn select_where_possible<'a>(&mut self, - infcx: &InferCtxt<'a,'tcx>) - -> Result<(),Vec>> + pub fn select_where_possible(&mut self, + infcx: &InferCtxt<'a, 'gcx, 'tcx>) + -> Result<(),Vec>> { let mut selcx = SelectionContext::new(infcx); self.select(&mut selcx) @@ -236,39 +244,10 @@ impl<'tcx> FulfillmentContext<'tcx> { self.predicates.pending_obligations() } - fn is_duplicate_or_add(&mut self, - tcx: &TyCtxt<'tcx>, - predicate: &ty::Predicate<'tcx>) - -> bool { - // For "global" predicates -- that is, predicates that don't - // involve type parameters, inference variables, or regions - // other than 'static -- we can check the cache in the tcx, - // which allows us to leverage work from other threads. Note - // that we don't add anything to this cache yet (unlike the - // local cache). This is because the tcx cache maintains the - // invariant that it only contains things that have been - // proven, and we have not yet proven that `predicate` holds. - if tcx.fulfilled_predicates.borrow().check_duplicate(predicate) { - return true; - } - - // If `predicate` is not global, or not present in the tcx - // cache, we can still check for it in our local cache and add - // it if not present. Note that if we find this predicate in - // the local cache we can stop immediately, without reporting - // any errors, even though we don't know yet if it is - // true. This is because, while we don't yet know if the - // predicate holds, we know that this same fulfillment context - // already is in the process of finding out. - self.duplicate_set.is_duplicate_or_add(predicate) - } - /// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it /// only attempts to select obligations that haven't been seen before. - fn select<'a>(&mut self, - selcx: &mut SelectionContext<'a, 'tcx>) - -> Result<(),Vec>> - { + fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>) + -> Result<(),Vec>> { debug!("select(obligation-forest-size={})", self.predicates.len()); let mut errors = Vec::new(); @@ -277,22 +256,18 @@ impl<'tcx> FulfillmentContext<'tcx> { debug!("select: starting another iteration"); // Process pending obligations. - let outcome = { - let region_obligations = &mut self.region_obligations; - self.predicates.process_obligations( - |obligation, tree, backtrace| process_predicate(selcx, - tree, - obligation, - backtrace, - region_obligations)) - }; - + let outcome = self.predicates.process_obligations(&mut FulfillProcessor { + selcx: selcx, + region_obligations: &mut self.region_obligations, + rfc1592_obligations: &mut self.rfc1592_obligations + }); debug!("select: outcome={:?}", outcome); // these are obligations that were proven to be true. for pending_obligation in outcome.completed { let predicate = &pending_obligation.obligation.predicate; - selcx.tcx().fulfilled_predicates.borrow_mut().add_if_global(predicate); + selcx.tcx().fulfilled_predicates.borrow_mut() + .add_if_global(selcx.tcx(), predicate); } errors.extend( @@ -316,180 +291,46 @@ impl<'tcx> FulfillmentContext<'tcx> { } } -/// Like `process_predicate1`, but wrap result into a pending predicate. -fn process_predicate<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, - tree_cache: &mut LocalFulfilledPredicates<'tcx>, - pending_obligation: &mut PendingPredicateObligation<'tcx>, - backtrace: Backtrace>, - region_obligations: &mut NodeMap>>) - -> Result>>, - FulfillmentErrorCode<'tcx>> -{ - match process_predicate1(selcx, pending_obligation, region_obligations) { - Ok(Some(v)) => process_child_obligations(selcx, - tree_cache, - &pending_obligation.obligation, - backtrace, - v), - Ok(None) => Ok(None), - Err(e) => Err(e) - } -} - -fn process_child_obligations<'a,'tcx>( - selcx: &mut SelectionContext<'a,'tcx>, - tree_cache: &mut LocalFulfilledPredicates<'tcx>, - pending_obligation: &PredicateObligation<'tcx>, - backtrace: Backtrace>, - child_obligations: Vec>) - -> Result>>, - FulfillmentErrorCode<'tcx>> -{ - // FIXME(#30977) The code below is designed to detect (and - // permit) DAGs, while still ensuring that the reasoning - // is acyclic. However, it does a few things - // suboptimally. For example, it refreshes type variables - // a lot, probably more than needed, but also less than - // you might want. - // - // - more than needed: I want to be very sure we don't - // accidentally treat a cycle as a DAG, so I am - // refreshing type variables as we walk the ancestors; - // but we are going to repeat this a lot, which is - // sort of silly, and it would be nicer to refresh - // them *in place* so that later predicate processing - // can benefit from the same work; - // - less than you might want: we only add items in the cache here, - // but maybe we learn more about type variables and could add them into - // the cache later on. - - let tcx = selcx.tcx(); - - let mut ancestor_set = AncestorSet::new(&backtrace); - - let pending_predicate_obligations: Vec<_> = - child_obligations - .into_iter() - .filter_map(|obligation| { - // Probably silly, but remove any inference - // variables. This is actually crucial to the ancestor - // check marked (*) below, but it's not clear that it - // makes sense to ALWAYS do it. - let obligation = selcx.infcx().resolve_type_vars_if_possible(&obligation); - - // Screen out obligations that we know globally - // are true. - if tcx.fulfilled_predicates.borrow().check_duplicate(&obligation.predicate) { - return None; - } - - // Check whether this obligation appears - // somewhere else in the tree. If not, we have to - // process it for sure. - if !tree_cache.is_duplicate_or_add(&obligation.predicate) { - return Some(PendingPredicateObligation { - obligation: obligation, - stalled_on: vec![] - }); - } - - debug!("process_child_obligations: duplicate={:?}", - obligation.predicate); - - // OK, the obligation appears elsewhere in the tree. - // This is either a fatal error or else something we can - // ignore. If the obligation appears in our *ancestors* - // (rather than some more distant relative), that - // indicates a cycle. Cycles are either considered - // resolved (if this is a coinductive case) or a fatal - // error. - if let Some(index) = ancestor_set.has(selcx.infcx(), &obligation.predicate) { - // ~~~ (*) see above - debug!("process_child_obligations: cycle index = {}", index); - - let backtrace = backtrace.clone(); - let cycle: Vec<_> = - iter::once(&obligation) - .chain(Some(pending_obligation)) - .chain(backtrace.take(index + 1).map(|p| &p.obligation)) - .cloned() - .collect(); - if coinductive_match(selcx, &cycle) { - debug!("process_child_obligations: coinductive match"); - None - } else { - report_overflow_error_cycle(selcx.infcx(), &cycle); - } - } else { - // Not a cycle. Just ignore this obligation then, - // we're already in the process of proving it. - debug!("process_child_obligations: not a cycle"); - None - } - }) - .collect(); - - Ok(Some(pending_predicate_obligations)) +struct FulfillProcessor<'a, 'b: 'a, 'gcx: 'tcx, 'tcx: 'b> { + selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, + region_obligations: &'a mut NodeMap>>, + rfc1592_obligations: &'a mut Vec> } -struct AncestorSet<'b, 'tcx: 'b> { - populated: bool, - cache: FnvHashMap, usize>, - backtrace: Backtrace<'b, PendingPredicateObligation<'tcx>>, -} +impl<'a, 'b, 'gcx, 'tcx> ObligationProcessor for FulfillProcessor<'a, 'b, 'gcx, 'tcx> { + type Obligation = PendingPredicateObligation<'tcx>; + type Error = FulfillmentErrorCode<'tcx>; -impl<'b, 'tcx> AncestorSet<'b, 'tcx> { - fn new(backtrace: &Backtrace<'b, PendingPredicateObligation<'tcx>>) -> Self { - AncestorSet { - populated: false, - cache: FnvHashMap(), - backtrace: backtrace.clone(), - } + fn process_obligation(&mut self, + obligation: &mut Self::Obligation) + -> Result>, Self::Error> + { + process_predicate(self.selcx, + obligation, + self.region_obligations, + self.rfc1592_obligations) + .map(|os| os.map(|os| os.into_iter().map(|o| PendingPredicateObligation { + obligation: o, + stalled_on: vec![] + }).collect())) } - /// Checks whether any of the ancestors in the backtrace are equal - /// to `predicate` (`predicate` is assumed to be fully - /// type-resolved). Returns `None` if not; otherwise, returns - /// `Some` with the index within the backtrace. - fn has<'a>(&mut self, - infcx: &InferCtxt<'a, 'tcx>, - predicate: &ty::Predicate<'tcx>) - -> Option { - // the first time, we have to populate the cache - if !self.populated { - let backtrace = self.backtrace.clone(); - for (index, ancestor) in backtrace.enumerate() { - // Ugh. This just feels ridiculously - // inefficient. But we need to compare - // predicates without being concerned about - // the vagaries of type inference, so for now - // just ensure that they are always - // up-to-date. (I suppose we could just use a - // snapshot and check if they are unifiable?) - let resolved_predicate = - infcx.resolve_type_vars_if_possible( - &ancestor.obligation.predicate); - - // Though we try to avoid it, it can happen that a - // cycle already exists in the predecessors. This - // happens if the type variables were not fully known - // at the time that the ancestors were pushed. We'll - // just ignore such cycles for now, on the premise - // that they will repeat themselves and we'll deal - // with them properly then. - self.cache.entry(resolved_predicate) - .or_insert(index); - } - self.populated = true; + fn process_backedge<'c, I>(&mut self, cycle: I, + _marker: PhantomData<&'c PendingPredicateObligation<'tcx>>) + where I: Clone + Iterator>, + { + if coinductive_match(self.selcx, cycle.clone()) { + debug!("process_child_obligations: coinductive match"); + } else { + let cycle : Vec<_> = cycle.map(|c| c.obligation.clone()).collect(); + self.selcx.infcx().report_overflow_error_cycle(&cycle); } - - self.cache.get(predicate).cloned() } } /// Return the set of type variables contained in a trait ref -fn trait_ref_type_vars<'a, 'tcx>(selcx: &mut SelectionContext<'a, 'tcx>, - t: ty::PolyTraitRef<'tcx>) -> Vec> +fn trait_ref_type_vars<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + t: ty::PolyTraitRef<'tcx>) -> Vec> { t.skip_binder() // ok b/c this check doesn't care about regions .input_types() @@ -505,11 +346,13 @@ fn trait_ref_type_vars<'a, 'tcx>(selcx: &mut SelectionContext<'a, 'tcx>, /// - `Ok(Some(v))` if the predicate is true, presuming that `v` are also true /// - `Ok(None)` if we don't have enough info to be sure /// - `Err` if the predicate does not hold -fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, - pending_obligation: &mut PendingPredicateObligation<'tcx>, - region_obligations: &mut NodeMap>>) - -> Result>>, - FulfillmentErrorCode<'tcx>> +fn process_predicate<'a, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + pending_obligation: &mut PendingPredicateObligation<'tcx>, + region_obligations: &mut NodeMap>>, + rfc1592_obligations: &mut Vec>) + -> Result>>, + FulfillmentErrorCode<'tcx>> { // if we were stalled on some unresolved variables, first check // whether any of them have been resolved; if not, don't bother @@ -542,12 +385,12 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, let trait_obligation = obligation.with(data.clone()); match selcx.select(&trait_obligation) { Ok(Some(vtable)) => { - info!("selecting trait `{:?}` at depth {} yielded Ok(Some)", + debug!("selecting trait `{:?}` at depth {} yielded Ok(Some)", data, obligation.recursion_depth); Ok(Some(vtable.nested_obligations())) } Ok(None) => { - info!("selecting trait `{:?}` at depth {} yielded Ok(None)", + debug!("selecting trait `{:?}` at depth {} yielded Ok(None)", data, obligation.recursion_depth); // This is a bit subtle: for the most part, the @@ -645,13 +488,28 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, } ty::Predicate::ObjectSafe(trait_def_id) => { - if !is_object_safe(selcx.tcx(), trait_def_id) { + if !selcx.tcx().is_object_safe(trait_def_id) { Err(CodeSelectionError(Unimplemented)) } else { Ok(Some(Vec::new())) } } + ty::Predicate::ClosureKind(closure_def_id, kind) => { + match selcx.infcx().closure_kind(closure_def_id) { + Some(closure_kind) => { + if closure_kind.extends(kind) { + Ok(Some(vec![])) + } else { + Err(CodeSelectionError(Unimplemented)) + } + } + None => { + Ok(None) + } + } + } + ty::Predicate::WellFormed(ty) => { match ty::wf::obligations(selcx.infcx(), obligation.cause.body_id, ty, obligation.cause.span) { @@ -662,6 +520,14 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, s => Ok(s) } } + + ty::Predicate::Rfc1592(ref inner) => { + rfc1592_obligations.push(PredicateObligation { + predicate: ty::Predicate::clone(inner), + ..obligation.clone() + }); + Ok(Some(vec![])) + } } } @@ -672,27 +538,24 @@ fn process_predicate1<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, /// - it also appears in the backtrace at some position `X`; and, /// - all the predicates at positions `X..` between `X` an the top are /// also defaulted traits. -fn coinductive_match<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, - cycle: &[PredicateObligation<'tcx>]) - -> bool +fn coinductive_match<'a,'c,'gcx,'tcx,I>(selcx: &mut SelectionContext<'a,'gcx,'tcx>, + cycle: I) -> bool + where I: Iterator>, + 'tcx: 'c { - let len = cycle.len(); - - assert_eq!(cycle[0].predicate, cycle[len - 1].predicate); - - cycle[0..len-1] - .iter() + let mut cycle = cycle; + cycle .all(|bt_obligation| { - let result = coinductive_obligation(selcx, bt_obligation); + let result = coinductive_obligation(selcx, &bt_obligation.obligation); debug!("coinductive_match: bt_obligation={:?} coinductive={}", bt_obligation, result); result }) } -fn coinductive_obligation<'a, 'tcx>(selcx: &SelectionContext<'a, 'tcx>, - obligation: &PredicateObligation<'tcx>) - -> bool { +fn coinductive_obligation<'a,'gcx,'tcx>(selcx: &SelectionContext<'a,'gcx,'tcx>, + obligation: &PredicateObligation<'tcx>) + -> bool { match obligation.predicate { ty::Predicate::Trait(ref data) => { selcx.tcx().trait_has_default_impl(data.def_id()) @@ -721,27 +584,8 @@ fn register_region_obligation<'tcx>(t_a: Ty<'tcx>, } -impl<'tcx> LocalFulfilledPredicates<'tcx> { - pub fn new() -> LocalFulfilledPredicates<'tcx> { - LocalFulfilledPredicates { - set: FnvHashSet() - } - } - - fn is_duplicate_or_add(&mut self, key: &ty::Predicate<'tcx>) -> bool { - // For a `LocalFulfilledPredicates`, if we find a match, we - // don't need to add a read edge to the dep-graph. This is - // because it means that the predicate has already been - // considered by this `FulfillmentContext`, and hence the - // containing task will already have an edge. (Here we are - // assuming each `FulfillmentContext` only gets used from one - // task; but to do otherwise makes no sense) - !self.set.insert(key.clone()) - } -} - -impl<'tcx> GlobalFulfilledPredicates<'tcx> { - pub fn new(dep_graph: DepGraph) -> GlobalFulfilledPredicates<'tcx> { +impl<'a, 'gcx, 'tcx> GlobalFulfilledPredicates<'gcx> { + pub fn new(dep_graph: DepGraph) -> GlobalFulfilledPredicates<'gcx> { GlobalFulfilledPredicates { set: FnvHashSet(), dep_graph: dep_graph, @@ -766,24 +610,23 @@ impl<'tcx> GlobalFulfilledPredicates<'tcx> { self.dep_graph.read(data.dep_node()); debug!("check_duplicate: global predicate `{:?}` already proved elsewhere", data); - info!("check_duplicate_trait hit: `{:?}`", data); - true } else { false } } - fn add_if_global(&mut self, key: &ty::Predicate<'tcx>) { + fn add_if_global(&mut self, tcx: TyCtxt<'a, 'gcx, 'tcx>, key: &ty::Predicate<'tcx>) { if let ty::Predicate::Trait(ref data) = *key { // We only add things to the global predicate registry // after the current task has proved them, and hence // already has the required read edges, so we don't need // to add any more edges here. if data.is_global() { - if self.set.insert(data.clone()) { - debug!("add_if_global: global predicate `{:?}` added", data); - info!("check_duplicate_trait entry: `{:?}`", data); + if let Some(data) = tcx.lift_to_global(data) { + if self.set.insert(data.clone()) { + debug!("add_if_global: global predicate `{:?}` added", data); + } } } } diff --git a/src/librustc/traits/mod.rs b/src/librustc/traits/mod.rs index a160465e2e..c5db2a8a78 100644 --- a/src/librustc/traits/mod.rs +++ b/src/librustc/traits/mod.rs @@ -18,46 +18,33 @@ pub use self::ObligationCauseCode::*; use hir::def_id::DefId; use middle::free_region::FreeRegionMap; use ty::subst; -use ty::{self, Ty, TypeFoldable}; -use infer::{self, fixup_err_to_string, InferCtxt}; +use ty::{self, Ty, TyCtxt, TypeFoldable}; +use infer::InferCtxt; use std::rc::Rc; use syntax::ast; use syntax::codemap::{Span, DUMMY_SP}; pub use self::error_reporting::TraitErrorKey; -pub use self::error_reporting::recursive_type_with_infinite_size_error; -pub use self::error_reporting::report_fulfillment_errors; -pub use self::error_reporting::report_overflow_error; -pub use self::error_reporting::report_overflow_error_cycle; -pub use self::error_reporting::report_selection_error; -pub use self::error_reporting::report_object_safety_error; pub use self::coherence::orphan_check; pub use self::coherence::overlapping_impls; pub use self::coherence::OrphanCheckErr; pub use self::fulfill::{FulfillmentContext, GlobalFulfilledPredicates, RegionObligation}; pub use self::project::{MismatchedProjectionTypes, ProjectionMode}; pub use self::project::{normalize, Normalized}; -pub use self::object_safety::is_object_safe; -pub use self::object_safety::astconv_object_safety_violations; -pub use self::object_safety::object_safety_violations; pub use self::object_safety::ObjectSafetyViolation; pub use self::object_safety::MethodViolationCode; -pub use self::object_safety::is_vtable_safe_method; pub use self::select::{EvaluationCache, SelectionContext, SelectionCache}; pub use self::select::{MethodMatchResult, MethodMatched, MethodAmbiguous, MethodDidNotMatch}; pub use self::select::{MethodMatchedData}; // intentionally don't export variants -pub use self::specialize::{Overlap, specialization_graph, specializes, translate_substs}; +pub use self::specialize::{OverlapError, specialization_graph, specializes, translate_substs}; +pub use self::specialize::{SpecializesCache}; pub use self::util::elaborate_predicates; -pub use self::util::get_vtable_index_of_object_method; -pub use self::util::trait_ref_for_builtin_bound; -pub use self::util::predicate_for_trait_def; pub use self::util::supertraits; pub use self::util::Supertraits; pub use self::util::supertrait_def_ids; pub use self::util::SupertraitDefIds; pub use self::util::transitive_bounds; -pub use self::util::upcast; mod coherence; mod error_reporting; @@ -106,9 +93,12 @@ pub enum ObligationCauseCode<'tcx> { /// Not well classified or should be obvious from span. MiscObligation, - /// This is the trait reference from the given projection + /// A slice or array is WF only if `T: Sized` SliceOrArrayElem, + /// A tuple is WF only if its middle elements are Sized + TupleElem, + /// This is the trait reference from the given projection ProjectionWf(ty::ProjectionTy<'tcx>), @@ -250,7 +240,7 @@ pub enum Vtable<'tcx, N> { VtableParam(Vec), /// Virtual calls through an object - VtableObject(VtableObjectData<'tcx>), + VtableObject(VtableObjectData<'tcx, N>), /// Successful resolution for a builtin trait. VtableBuiltin(VtableBuiltinData), @@ -261,7 +251,7 @@ pub enum Vtable<'tcx, N> { VtableClosure(VtableClosureData<'tcx, N>), /// Same as above, but for a fn pointer type with the given signature. - VtableFnPointer(ty::Ty<'tcx>), + VtableFnPointer(VtableFnPointerData<'tcx, N>), } /// Identifies a particular impl in the source, along with a set of @@ -304,14 +294,22 @@ pub struct VtableBuiltinData { /// A vtable for some object-safe trait `Foo` automatically derived /// for the object type `Foo`. #[derive(PartialEq,Eq,Clone)] -pub struct VtableObjectData<'tcx> { +pub struct VtableObjectData<'tcx, N> { /// `Foo` upcast to the obligation trait. This will be some supertrait of `Foo`. pub upcast_trait_ref: ty::PolyTraitRef<'tcx>, /// The vtable is formed by concatenating together the method lists of /// the base object trait and all supertraits; this is the start of /// `upcast_trait_ref`'s methods in that vtable. - pub vtable_base: usize + pub vtable_base: usize, + + pub nested: Vec, +} + +#[derive(Clone, PartialEq, Eq)] +pub struct VtableFnPointerData<'tcx, N> { + pub fn_ty: ty::Ty<'tcx>, + pub nested: Vec } /// Creates predicate obligations from the generic bounds. @@ -327,11 +325,11 @@ pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>, /// `bound` or is not known to meet bound (note that this is /// conservative towards *no impl*, which is the opposite of the /// `evaluate` methods). -pub fn type_known_to_meet_builtin_bound<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, - ty: Ty<'tcx>, - bound: ty::BuiltinBound, - span: Span) - -> bool +pub fn type_known_to_meet_builtin_bound<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + ty: Ty<'tcx>, + bound: ty::BuiltinBound, + span: Span) + -> bool { debug!("type_known_to_meet_builtin_bound(ty={:?}, bound={:?})", ty, @@ -339,7 +337,7 @@ pub fn type_known_to_meet_builtin_bound<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, let cause = ObligationCause::misc(span, ast::DUMMY_NODE_ID); let obligation = - util::predicate_for_builtin_bound(infcx.tcx, cause, bound, 0, ty); + infcx.tcx.predicate_for_builtin_bound(cause, bound, 0, ty); let obligation = match obligation { Ok(o) => o, Err(..) => return false @@ -389,9 +387,10 @@ pub fn type_known_to_meet_builtin_bound<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, // FIXME: this is gonna need to be removed ... /// Normalizes the parameter environment, reporting errors if they occur. -pub fn normalize_param_env_or_error<'a,'tcx>(unnormalized_env: ty::ParameterEnvironment<'a,'tcx>, - cause: ObligationCause<'tcx>) - -> ty::ParameterEnvironment<'a,'tcx> +pub fn normalize_param_env_or_error<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + unnormalized_env: ty::ParameterEnvironment<'tcx>, + cause: ObligationCause<'tcx>) + -> ty::ParameterEnvironment<'tcx> { // I'm not wild about reporting errors here; I'd prefer to // have the errors get reported at a defined place (e.g., @@ -408,7 +407,6 @@ pub fn normalize_param_env_or_error<'a,'tcx>(unnormalized_env: ty::ParameterEnvi // and errors will get reported then; so after typeck we // can be sure that no errors should occur. - let tcx = unnormalized_env.tcx; let span = cause.span; let body_id = cause.body_id; @@ -433,51 +431,54 @@ pub fn normalize_param_env_or_error<'a,'tcx>(unnormalized_env: ty::ParameterEnvi let elaborated_env = unnormalized_env.with_caller_bounds(predicates); - let infcx = infer::new_infer_ctxt(tcx, - &tcx.tables, - Some(elaborated_env), - ProjectionMode::AnyFinal); - let predicates = match fully_normalize(&infcx, - cause, - &infcx.parameter_environment.caller_bounds) { - Ok(predicates) => predicates, - Err(errors) => { - report_fulfillment_errors(&infcx, &errors); - return infcx.parameter_environment; // an unnormalized env is better than nothing - } - }; - - debug!("normalize_param_env_or_error: normalized predicates={:?}", - predicates); + tcx.infer_ctxt(None, Some(elaborated_env), ProjectionMode::AnyFinal).enter(|infcx| { + let predicates = match fully_normalize(&infcx, cause, + &infcx.parameter_environment.caller_bounds) { + Ok(predicates) => predicates, + Err(errors) => { + infcx.report_fulfillment_errors(&errors); + // An unnormalized env is better than nothing. + return infcx.parameter_environment; + } + }; + + debug!("normalize_param_env_or_error: normalized predicates={:?}", + predicates); + + let free_regions = FreeRegionMap::new(); + infcx.resolve_regions_and_report_errors(&free_regions, body_id); + let predicates = match infcx.fully_resolve(&predicates) { + Ok(predicates) => predicates, + Err(fixup_err) => { + // If we encounter a fixup error, it means that some type + // variable wound up unconstrained. I actually don't know + // if this can happen, and I certainly don't expect it to + // happen often, but if it did happen it probably + // represents a legitimate failure due to some kind of + // unconstrained variable, and it seems better not to ICE, + // all things considered. + tcx.sess.span_err(span, &fixup_err.to_string()); + // An unnormalized env is better than nothing. + return infcx.parameter_environment; + } + }; - let free_regions = FreeRegionMap::new(); - infcx.resolve_regions_and_report_errors(&free_regions, body_id); - let predicates = match infcx.fully_resolve(&predicates) { - Ok(predicates) => predicates, - Err(fixup_err) => { - // If we encounter a fixup error, it means that some type - // variable wound up unconstrained. I actually don't know - // if this can happen, and I certainly don't expect it to - // happen often, but if it did happen it probably - // represents a legitimate failure due to some kind of - // unconstrained variable, and it seems better not to ICE, - // all things considered. - let err_msg = fixup_err_to_string(fixup_err); - tcx.sess.span_err(span, &err_msg); - return infcx.parameter_environment; // an unnormalized env is better than nothing - } - }; + let predicates = match tcx.lift_to_global(&predicates) { + Some(predicates) => predicates, + None => return infcx.parameter_environment + }; - debug!("normalize_param_env_or_error: resolved predicates={:?}", - predicates); + debug!("normalize_param_env_or_error: resolved predicates={:?}", + predicates); - infcx.parameter_environment.with_caller_bounds(predicates) + infcx.parameter_environment.with_caller_bounds(predicates) + }) } -pub fn fully_normalize<'a,'tcx,T>(infcx: &InferCtxt<'a,'tcx>, - cause: ObligationCause<'tcx>, - value: &T) - -> Result>> +pub fn fully_normalize<'a, 'gcx, 'tcx, T>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + cause: ObligationCause<'tcx>, + value: &T) + -> Result>> where T : TypeFoldable<'tcx> { debug!("fully_normalize(value={:?})", value); @@ -577,7 +578,20 @@ impl<'tcx, N> Vtable<'tcx, N> { VtableBuiltin(i) => i.nested, VtableDefaultImpl(d) => d.nested, VtableClosure(c) => c.nested, - VtableObject(_) | VtableFnPointer(..) => vec![] + VtableObject(d) => d.nested, + VtableFnPointer(d) => d.nested, + } + } + + fn nested_obligations_mut(&mut self) -> &mut Vec { + match self { + &mut VtableImpl(ref mut i) => &mut i.nested, + &mut VtableParam(ref mut n) => n, + &mut VtableBuiltin(ref mut i) => &mut i.nested, + &mut VtableDefaultImpl(ref mut d) => &mut d.nested, + &mut VtableClosure(ref mut c) => &mut c.nested, + &mut VtableObject(ref mut d) => &mut d.nested, + &mut VtableFnPointer(ref mut d) => &mut d.nested, } } @@ -586,18 +600,25 @@ impl<'tcx, N> Vtable<'tcx, N> { VtableImpl(i) => VtableImpl(VtableImplData { impl_def_id: i.impl_def_id, substs: i.substs, - nested: i.nested.into_iter().map(f).collect() + nested: i.nested.into_iter().map(f).collect(), }), VtableParam(n) => VtableParam(n.into_iter().map(f).collect()), VtableBuiltin(i) => VtableBuiltin(VtableBuiltinData { - nested: i.nested.into_iter().map(f).collect() + nested: i.nested.into_iter().map(f).collect(), + }), + VtableObject(o) => VtableObject(VtableObjectData { + upcast_trait_ref: o.upcast_trait_ref, + vtable_base: o.vtable_base, + nested: o.nested.into_iter().map(f).collect(), }), - VtableObject(o) => VtableObject(o), VtableDefaultImpl(d) => VtableDefaultImpl(VtableDefaultImplData { trait_def_id: d.trait_def_id, - nested: d.nested.into_iter().map(f).collect() + nested: d.nested.into_iter().map(f).collect(), + }), + VtableFnPointer(p) => VtableFnPointer(VtableFnPointerData { + fn_ty: p.fn_ty, + nested: p.nested.into_iter().map(f).collect(), }), - VtableFnPointer(f) => VtableFnPointer(f), VtableClosure(c) => VtableClosure(VtableClosureData { closure_def_id: c.closure_def_id, substs: c.substs, diff --git a/src/librustc/traits/object_safety.rs b/src/librustc/traits/object_safety.rs index 9af2cfbd04..ffa1530a14 100644 --- a/src/librustc/traits/object_safety.rs +++ b/src/librustc/traits/object_safety.rs @@ -53,327 +53,326 @@ pub enum MethodViolationCode { Generic, } -pub fn is_object_safe<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId) - -> bool -{ - // Because we query yes/no results frequently, we keep a cache: - let def = tcx.lookup_trait_def(trait_def_id); +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn is_object_safe(self, trait_def_id: DefId) -> bool { + // Because we query yes/no results frequently, we keep a cache: + let def = self.lookup_trait_def(trait_def_id); - let result = def.object_safety().unwrap_or_else(|| { - let result = object_safety_violations(tcx, trait_def_id).is_empty(); + let result = def.object_safety().unwrap_or_else(|| { + let result = self.object_safety_violations(trait_def_id).is_empty(); - // Record just a yes/no result in the cache; this is what is - // queried most frequently. Note that this may overwrite a - // previous result, but always with the same thing. - def.set_object_safety(result); + // Record just a yes/no result in the cache; this is what is + // queried most frequently. Note that this may overwrite a + // previous result, but always with the same thing. + def.set_object_safety(result); - result - }); - - debug!("is_object_safe({:?}) = {}", trait_def_id, result); + result + }); - result -} + debug!("is_object_safe({:?}) = {}", trait_def_id, result); -/// Returns the object safety violations that affect -/// astconv - currently, Self in supertraits. This is needed -/// because `object_safety_violations` can't be used during -/// type collection. -pub fn astconv_object_safety_violations<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId) - -> Vec> -{ - let mut violations = vec![]; - - if supertraits_reference_self(tcx, trait_def_id) { - violations.push(ObjectSafetyViolation::SupertraitSelf); + result } - debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}", - trait_def_id, - violations); - - violations -} + /// Returns the object safety violations that affect + /// astconv - currently, Self in supertraits. This is needed + /// because `object_safety_violations` can't be used during + /// type collection. + pub fn astconv_object_safety_violations(self, trait_def_id: DefId) + -> Vec> + { + let mut violations = vec![]; -pub fn object_safety_violations<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId) - -> Vec> -{ - traits::supertrait_def_ids(tcx, trait_def_id) - .flat_map(|def_id| object_safety_violations_for_trait(tcx, def_id)) - .collect() -} + if self.supertraits_reference_self(trait_def_id) { + violations.push(ObjectSafetyViolation::SupertraitSelf); + } -fn object_safety_violations_for_trait<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId) - -> Vec> -{ - // Check methods for violations. - let mut violations: Vec<_> = - tcx.trait_items(trait_def_id).iter() - .filter_map(|item| { - match *item { - ty::MethodTraitItem(ref m) => { - object_safety_violation_for_method(tcx, trait_def_id, &m) - .map(|code| ObjectSafetyViolation::Method(m.clone(), code)) - } - _ => None, - } - }) - .collect(); + debug!("astconv_object_safety_violations(trait_def_id={:?}) = {:?}", + trait_def_id, + violations); - // Check the trait itself. - if trait_has_sized_self(tcx, trait_def_id) { - violations.push(ObjectSafetyViolation::SizedSelf); + violations } - if supertraits_reference_self(tcx, trait_def_id) { - violations.push(ObjectSafetyViolation::SupertraitSelf); + + pub fn object_safety_violations(self, trait_def_id: DefId) + -> Vec> + { + traits::supertrait_def_ids(self, trait_def_id) + .flat_map(|def_id| self.object_safety_violations_for_trait(def_id)) + .collect() } - debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}", - trait_def_id, - violations); + fn object_safety_violations_for_trait(self, trait_def_id: DefId) + -> Vec> + { + // Check methods for violations. + let mut violations: Vec<_> = + self.trait_items(trait_def_id).iter() + .filter_map(|item| { + match *item { + ty::MethodTraitItem(ref m) => { + self.object_safety_violation_for_method(trait_def_id, &m) + .map(|code| ObjectSafetyViolation::Method(m.clone(), code)) + } + _ => None, + } + }) + .collect(); - violations -} + // Check the trait itself. + if self.trait_has_sized_self(trait_def_id) { + violations.push(ObjectSafetyViolation::SizedSelf); + } + if self.supertraits_reference_self(trait_def_id) { + violations.push(ObjectSafetyViolation::SupertraitSelf); + } -pub fn supertraits_reference_self<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId) - -> bool -{ - let trait_def = tcx.lookup_trait_def(trait_def_id); - let trait_ref = trait_def.trait_ref.clone(); - let trait_ref = trait_ref.to_poly_trait_ref(); - let predicates = tcx.lookup_super_predicates(trait_def_id); - predicates - .predicates - .into_iter() - .map(|predicate| predicate.subst_supertrait(tcx, &trait_ref)) - .any(|predicate| { - match predicate { - ty::Predicate::Trait(ref data) => { - // In the case of a trait predicate, we can skip the "self" type. - data.0.trait_ref.substs.types.get_slice(TypeSpace) - .iter() - .cloned() - .any(|t| t.has_self_ty()) - } - ty::Predicate::Projection(..) | - ty::Predicate::WellFormed(..) | - ty::Predicate::ObjectSafe(..) | - ty::Predicate::TypeOutlives(..) | - ty::Predicate::RegionOutlives(..) | - ty::Predicate::Equate(..) => { - false - } - } - }) -} + debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}", + trait_def_id, + violations); -fn trait_has_sized_self<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId) - -> bool -{ - let trait_def = tcx.lookup_trait_def(trait_def_id); - let trait_predicates = tcx.lookup_predicates(trait_def_id); - generics_require_sized_self(tcx, &trait_def.generics, &trait_predicates) -} + violations + } -fn generics_require_sized_self<'tcx>(tcx: &TyCtxt<'tcx>, - generics: &ty::Generics<'tcx>, - predicates: &ty::GenericPredicates<'tcx>) - -> bool -{ - let sized_def_id = match tcx.lang_items.sized_trait() { - Some(def_id) => def_id, - None => { return false; /* No Sized trait, can't require it! */ } - }; - - // Search for a predicate like `Self : Sized` amongst the trait bounds. - let free_substs = tcx.construct_free_substs(generics, - tcx.region_maps.node_extent(ast::DUMMY_NODE_ID)); - let predicates = predicates.instantiate(tcx, &free_substs).predicates.into_vec(); - elaborate_predicates(tcx, predicates) - .any(|predicate| { - match predicate { - ty::Predicate::Trait(ref trait_pred) if trait_pred.def_id() == sized_def_id => { - trait_pred.0.self_ty().is_self() - } - ty::Predicate::Projection(..) | - ty::Predicate::Trait(..) | - ty::Predicate::Equate(..) | - ty::Predicate::RegionOutlives(..) | - ty::Predicate::WellFormed(..) | - ty::Predicate::ObjectSafe(..) | - ty::Predicate::TypeOutlives(..) => { - false + fn supertraits_reference_self(self, trait_def_id: DefId) -> bool { + let trait_def = self.lookup_trait_def(trait_def_id); + let trait_ref = trait_def.trait_ref.clone(); + let trait_ref = trait_ref.to_poly_trait_ref(); + let predicates = self.lookup_super_predicates(trait_def_id); + predicates + .predicates + .into_iter() + .map(|predicate| predicate.subst_supertrait(self, &trait_ref)) + .any(|predicate| { + match predicate { + ty::Predicate::Trait(ref data) => { + // In the case of a trait predicate, we can skip the "self" type. + data.0.trait_ref.substs.types.get_slice(TypeSpace) + .iter() + .cloned() + .any(|t| t.has_self_ty()) + } + ty::Predicate::Projection(..) | + ty::Predicate::WellFormed(..) | + ty::Predicate::ObjectSafe(..) | + ty::Predicate::TypeOutlives(..) | + ty::Predicate::RegionOutlives(..) | + ty::Predicate::ClosureKind(..) | + ty::Predicate::Rfc1592(..) | + ty::Predicate::Equate(..) => { + false + } } - } - }) -} - -/// Returns `Some(_)` if this method makes the containing trait not object safe. -fn object_safety_violation_for_method<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId, - method: &ty::Method<'tcx>) - -> Option -{ - // Any method that has a `Self : Sized` requisite is otherwise - // exempt from the regulations. - if generics_require_sized_self(tcx, &method.generics, &method.predicates) { - return None; + }) } - virtual_call_violation_for_method(tcx, trait_def_id, method) -} + fn trait_has_sized_self(self, trait_def_id: DefId) -> bool { + let trait_def = self.lookup_trait_def(trait_def_id); + let trait_predicates = self.lookup_predicates(trait_def_id); + self.generics_require_sized_self(&trait_def.generics, &trait_predicates) + } -/// We say a method is *vtable safe* if it can be invoked on a trait -/// object. Note that object-safe traits can have some -/// non-vtable-safe methods, so long as they require `Self:Sized` or -/// otherwise ensure that they cannot be used when `Self=Trait`. -pub fn is_vtable_safe_method<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId, - method: &ty::Method<'tcx>) + fn generics_require_sized_self(self, + generics: &ty::Generics<'gcx>, + predicates: &ty::GenericPredicates<'gcx>) -> bool -{ - virtual_call_violation_for_method(tcx, trait_def_id, method).is_none() -} + { + let sized_def_id = match self.lang_items.sized_trait() { + Some(def_id) => def_id, + None => { return false; /* No Sized trait, can't require it! */ } + }; + + // Search for a predicate like `Self : Sized` amongst the trait bounds. + let free_substs = self.construct_free_substs(generics, + self.region_maps.node_extent(ast::DUMMY_NODE_ID)); + let predicates = predicates.instantiate(self, &free_substs).predicates.into_vec(); + elaborate_predicates(self, predicates) + .any(|predicate| { + match predicate { + ty::Predicate::Trait(ref trait_pred) if trait_pred.def_id() == sized_def_id => { + trait_pred.0.self_ty().is_self() + } + ty::Predicate::Projection(..) | + ty::Predicate::Trait(..) | + ty::Predicate::Rfc1592(..) | + ty::Predicate::Equate(..) | + ty::Predicate::RegionOutlives(..) | + ty::Predicate::WellFormed(..) | + ty::Predicate::ObjectSafe(..) | + ty::Predicate::ClosureKind(..) | + ty::Predicate::TypeOutlives(..) => { + false + } + } + }) + } -/// Returns `Some(_)` if this method cannot be called on a trait -/// object; this does not necessarily imply that the enclosing trait -/// is not object safe, because the method might have a where clause -/// `Self:Sized`. -fn virtual_call_violation_for_method<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId, - method: &ty::Method<'tcx>) - -> Option -{ - // The method's first parameter must be something that derefs (or - // autorefs) to `&self`. For now, we only accept `self`, `&self` - // and `Box`. - match method.explicit_self { - ty::ExplicitSelfCategory::Static => { - return Some(MethodViolationCode::StaticMethod); + /// Returns `Some(_)` if this method makes the containing trait not object safe. + fn object_safety_violation_for_method(self, + trait_def_id: DefId, + method: &ty::Method<'gcx>) + -> Option + { + // Any method that has a `Self : Sized` requisite is otherwise + // exempt from the regulations. + if self.generics_require_sized_self(&method.generics, &method.predicates) { + return None; } - ty::ExplicitSelfCategory::ByValue | - ty::ExplicitSelfCategory::ByReference(..) | - ty::ExplicitSelfCategory::ByBox => { - } + self.virtual_call_violation_for_method(trait_def_id, method) } - // The `Self` type is erased, so it should not appear in list of - // arguments or return type apart from the receiver. - let ref sig = method.fty.sig; - for &input_ty in &sig.0.inputs[1..] { - if contains_illegal_self_type_reference(tcx, trait_def_id, input_ty) { - return Some(MethodViolationCode::ReferencesSelf); + /// We say a method is *vtable safe* if it can be invoked on a trait + /// object. Note that object-safe traits can have some + /// non-vtable-safe methods, so long as they require `Self:Sized` or + /// otherwise ensure that they cannot be used when `Self=Trait`. + pub fn is_vtable_safe_method(self, + trait_def_id: DefId, + method: &ty::Method<'gcx>) + -> bool + { + // Any method that has a `Self : Sized` requisite can't be called. + if self.generics_require_sized_self(&method.generics, &method.predicates) { + return false; } - } - if let ty::FnConverging(result_type) = sig.0.output { - if contains_illegal_self_type_reference(tcx, trait_def_id, result_type) { - return Some(MethodViolationCode::ReferencesSelf); - } - } - // We can't monomorphize things like `fn foo(...)`. - if !method.generics.types.is_empty_in(subst::FnSpace) { - return Some(MethodViolationCode::Generic); + self.virtual_call_violation_for_method(trait_def_id, method).is_none() } - None -} + /// Returns `Some(_)` if this method cannot be called on a trait + /// object; this does not necessarily imply that the enclosing trait + /// is not object safe, because the method might have a where clause + /// `Self:Sized`. + fn virtual_call_violation_for_method(self, + trait_def_id: DefId, + method: &ty::Method<'tcx>) + -> Option + { + // The method's first parameter must be something that derefs (or + // autorefs) to `&self`. For now, we only accept `self`, `&self` + // and `Box`. + match method.explicit_self { + ty::ExplicitSelfCategory::Static => { + return Some(MethodViolationCode::StaticMethod); + } -fn contains_illegal_self_type_reference<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId, - ty: Ty<'tcx>) - -> bool -{ - // This is somewhat subtle. In general, we want to forbid - // references to `Self` in the argument and return types, - // since the value of `Self` is erased. However, there is one - // exception: it is ok to reference `Self` in order to access - // an associated type of the current trait, since we retain - // the value of those associated types in the object type - // itself. - // - // ```rust - // trait SuperTrait { - // type X; - // } - // - // trait Trait : SuperTrait { - // type Y; - // fn foo(&self, x: Self) // bad - // fn foo(&self) -> Self // bad - // fn foo(&self) -> Option // bad - // fn foo(&self) -> Self::Y // OK, desugars to next example - // fn foo(&self) -> ::Y // OK - // fn foo(&self) -> Self::X // OK, desugars to next example - // fn foo(&self) -> ::X // OK - // } - // ``` - // - // However, it is not as simple as allowing `Self` in a projected - // type, because there are illegal ways to use `Self` as well: - // - // ```rust - // trait Trait : SuperTrait { - // ... - // fn foo(&self) -> ::X; - // } - // ``` - // - // Here we will not have the type of `X` recorded in the - // object type, and we cannot resolve `Self as SomeOtherTrait` - // without knowing what `Self` is. - - let mut supertraits: Option>> = None; - let mut error = false; - ty.maybe_walk(|ty| { - match ty.sty { - ty::TyParam(ref param_ty) => { - if param_ty.space == SelfSpace { - error = true; - } + ty::ExplicitSelfCategory::ByValue | + ty::ExplicitSelfCategory::ByReference(..) | + ty::ExplicitSelfCategory::ByBox => { + } + } - false // no contained types to walk + // The `Self` type is erased, so it should not appear in list of + // arguments or return type apart from the receiver. + let ref sig = method.fty.sig; + for &input_ty in &sig.0.inputs[1..] { + if self.contains_illegal_self_type_reference(trait_def_id, input_ty) { + return Some(MethodViolationCode::ReferencesSelf); + } + } + if let ty::FnConverging(result_type) = sig.0.output { + if self.contains_illegal_self_type_reference(trait_def_id, result_type) { + return Some(MethodViolationCode::ReferencesSelf); } + } - ty::TyProjection(ref data) => { - // This is a projected type `::X`. + // We can't monomorphize things like `fn foo(...)`. + if !method.generics.types.is_empty_in(subst::FnSpace) { + return Some(MethodViolationCode::Generic); + } - // Compute supertraits of current trait lazily. - if supertraits.is_none() { - let trait_def = tcx.lookup_trait_def(trait_def_id); - let trait_ref = ty::Binder(trait_def.trait_ref.clone()); - supertraits = Some(traits::supertraits(tcx, trait_ref).collect()); + None + } + + fn contains_illegal_self_type_reference(self, + trait_def_id: DefId, + ty: Ty<'tcx>) + -> bool + { + // This is somewhat subtle. In general, we want to forbid + // references to `Self` in the argument and return types, + // since the value of `Self` is erased. However, there is one + // exception: it is ok to reference `Self` in order to access + // an associated type of the current trait, since we retain + // the value of those associated types in the object type + // itself. + // + // ```rust + // trait SuperTrait { + // type X; + // } + // + // trait Trait : SuperTrait { + // type Y; + // fn foo(&self, x: Self) // bad + // fn foo(&self) -> Self // bad + // fn foo(&self) -> Option // bad + // fn foo(&self) -> Self::Y // OK, desugars to next example + // fn foo(&self) -> ::Y // OK + // fn foo(&self) -> Self::X // OK, desugars to next example + // fn foo(&self) -> ::X // OK + // } + // ``` + // + // However, it is not as simple as allowing `Self` in a projected + // type, because there are illegal ways to use `Self` as well: + // + // ```rust + // trait Trait : SuperTrait { + // ... + // fn foo(&self) -> ::X; + // } + // ``` + // + // Here we will not have the type of `X` recorded in the + // object type, and we cannot resolve `Self as SomeOtherTrait` + // without knowing what `Self` is. + + let mut supertraits: Option>> = None; + let mut error = false; + ty.maybe_walk(|ty| { + match ty.sty { + ty::TyParam(ref param_ty) => { + if param_ty.space == SelfSpace { + error = true; + } + + false // no contained types to walk } - // Determine whether the trait reference `Foo as - // SomeTrait` is in fact a supertrait of the - // current trait. In that case, this type is - // legal, because the type `X` will be specified - // in the object type. Note that we can just use - // direct equality here because all of these types - // are part of the formal parameter listing, and - // hence there should be no inference variables. - let projection_trait_ref = ty::Binder(data.trait_ref.clone()); - let is_supertrait_of_current_trait = - supertraits.as_ref().unwrap().contains(&projection_trait_ref); - - if is_supertrait_of_current_trait { - false // do not walk contained types, do not report error, do collect $200 - } else { - true // DO walk contained types, POSSIBLY reporting an error + ty::TyProjection(ref data) => { + // This is a projected type `::X`. + + // Compute supertraits of current trait lazily. + if supertraits.is_none() { + let trait_def = self.lookup_trait_def(trait_def_id); + let trait_ref = ty::Binder(trait_def.trait_ref.clone()); + supertraits = Some(traits::supertraits(self, trait_ref).collect()); + } + + // Determine whether the trait reference `Foo as + // SomeTrait` is in fact a supertrait of the + // current trait. In that case, this type is + // legal, because the type `X` will be specified + // in the object type. Note that we can just use + // direct equality here because all of these types + // are part of the formal parameter listing, and + // hence there should be no inference variables. + let projection_trait_ref = ty::Binder(data.trait_ref.clone()); + let is_supertrait_of_current_trait = + supertraits.as_ref().unwrap().contains(&projection_trait_ref); + + if is_supertrait_of_current_trait { + false // do not walk contained types, do not report error, do collect $200 + } else { + true // DO walk contained types, POSSIBLY reporting an error + } } - } - _ => true, // walk contained types, if any - } - }); + _ => true, // walk contained types, if any + } + }); - error + error + } } diff --git a/src/librustc/traits/project.rs b/src/librustc/traits/project.rs index a0d6f5f912..5c7095beb7 100644 --- a/src/librustc/traits/project.rs +++ b/src/librustc/traits/project.rs @@ -11,7 +11,6 @@ //! Code for projecting associated types out of trait references. use super::elaborate_predicates; -use super::report_overflow_error; use super::specialization_graph; use super::translate_substs; use super::Obligation; @@ -20,6 +19,7 @@ use super::PredicateObligation; use super::SelectionContext; use super::SelectionError; use super::VtableClosureData; +use super::VtableFnPointerData; use super::VtableImplData; use super::util; @@ -40,7 +40,7 @@ use std::rc::Rc; pub enum ProjectionMode { /// FIXME (#32205) /// At coherence-checking time, we're still constructing the - /// specialization graph, and thus we only project project + /// specialization graph, and thus we only project /// non-`default` associated types that are defined directly in /// the applicable impl. (This behavior should be improved over /// time, to allow for successful projections modulo cycles @@ -152,14 +152,8 @@ enum ProjectionTyCandidate<'tcx> { // from the definition of `Trait` when you have something like <::B as Trait2>::C TraitDef(ty::PolyProjectionPredicate<'tcx>), - // defined in an impl - Impl(VtableImplData<'tcx, PredicateObligation<'tcx>>), - - // closure return type - Closure(VtableClosureData<'tcx, PredicateObligation<'tcx>>), - - // fn pointer return type - FnPointer(Ty<'tcx>), + // from a "impl" (or a "pseudo-impl" returned by select) + Select, } struct ProjectionTyCandidateSet<'tcx> { @@ -172,8 +166,8 @@ struct ProjectionTyCandidateSet<'tcx> { /// for<...> ::U == V /// /// If successful, this may result in additional obligations. -pub fn poly_project_and_unify_type<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +pub fn poly_project_and_unify_type<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &PolyProjectionObligation<'tcx>) -> Result>>, MismatchedProjectionTypes<'tcx>> { @@ -188,7 +182,7 @@ pub fn poly_project_and_unify_type<'cx,'tcx>( let skol_obligation = obligation.with(skol_predicate); match project_and_unify_type(selcx, &skol_obligation) { Ok(result) => { - match infcx.leak_check(&skol_map, snapshot) { + match infcx.leak_check(false, &skol_map, snapshot) { Ok(()) => Ok(infcx.plug_leaks(skol_map, snapshot, &result)), Err(e) => Err(MismatchedProjectionTypes { err: e }), } @@ -205,8 +199,8 @@ pub fn poly_project_and_unify_type<'cx,'tcx>( /// ::U == V /// /// If successful, this may result in additional obligations. -fn project_and_unify_type<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn project_and_unify_type<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionObligation<'tcx>) -> Result>>, MismatchedProjectionTypes<'tcx>> { @@ -219,10 +213,7 @@ fn project_and_unify_type<'cx,'tcx>( obligation.cause.clone(), obligation.recursion_depth) { Some(n) => n, - None => { - consider_unification_despite_ambiguity(selcx, obligation); - return Ok(None); - } + None => return Ok(None), }; debug!("project_and_unify_type: normalized_ty={:?} obligations={:?}", @@ -231,7 +222,7 @@ fn project_and_unify_type<'cx,'tcx>( let infcx = selcx.infcx(); let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span); - match infer::mk_eqty(infcx, true, origin, normalized_ty, obligation.predicate.ty) { + match infcx.eq_types(true, origin, normalized_ty, obligation.predicate.ty) { Ok(InferOk { obligations: inferred_obligations, .. }) => { // FIXME(#32730) propagate obligations assert!(inferred_obligations.is_empty()); @@ -241,77 +232,27 @@ fn project_and_unify_type<'cx,'tcx>( } } -fn consider_unification_despite_ambiguity<'cx,'tcx>(selcx: &mut SelectionContext<'cx,'tcx>, - obligation: &ProjectionObligation<'tcx>) { - debug!("consider_unification_despite_ambiguity(obligation={:?})", - obligation); - - let def_id = obligation.predicate.projection_ty.trait_ref.def_id; - match selcx.tcx().lang_items.fn_trait_kind(def_id) { - Some(_) => { } - None => { return; } - } - - let infcx = selcx.infcx(); - let self_ty = obligation.predicate.projection_ty.trait_ref.self_ty(); - let self_ty = infcx.shallow_resolve(self_ty); - debug!("consider_unification_despite_ambiguity: self_ty.sty={:?}", - self_ty.sty); - match self_ty.sty { - ty::TyClosure(closure_def_id, ref substs) => { - let closure_typer = selcx.closure_typer(); - let closure_type = closure_typer.closure_type(closure_def_id, substs); - let ty::Binder((_, ret_type)) = - util::closure_trait_ref_and_return_type(infcx.tcx, - def_id, - self_ty, - &closure_type.sig, - util::TupleArgumentsFlag::No); - // We don't have to normalize the return type here - this is only - // reached for TyClosure: Fn inputs where the closure kind is - // still unknown, which should only occur in typeck where the - // closure type is already normalized. - let (ret_type, _) = - infcx.replace_late_bound_regions_with_fresh_var( - obligation.cause.span, - infer::AssocTypeProjection(obligation.predicate.projection_ty.item_name), - &ty::Binder(ret_type)); - - debug!("consider_unification_despite_ambiguity: ret_type={:?}", - ret_type); - let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span); - let obligation_ty = obligation.predicate.ty; - match infer::mk_eqty(infcx, true, origin, obligation_ty, ret_type) { - Ok(InferOk { obligations, .. }) => { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - } - Err(_) => { /* ignore errors */ } - } - } - _ => { } - } -} - /// Normalizes any associated type projections in `value`, replacing /// them with a fully resolved type where possible. The return value /// combines the normalized result and any additional obligations that /// were incurred as result. -pub fn normalize<'a,'b,'tcx,T>(selcx: &'a mut SelectionContext<'b,'tcx>, - cause: ObligationCause<'tcx>, - value: &T) - -> Normalized<'tcx, T> +pub fn normalize<'a, 'b, 'gcx, 'tcx, T>(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, + cause: ObligationCause<'tcx>, + value: &T) + -> Normalized<'tcx, T> where T : TypeFoldable<'tcx> { normalize_with_depth(selcx, cause, 0, value) } /// As `normalize`, but with a custom depth. -pub fn normalize_with_depth<'a,'b,'tcx,T>(selcx: &'a mut SelectionContext<'b,'tcx>, - cause: ObligationCause<'tcx>, - depth: usize, - value: &T) - -> Normalized<'tcx, T> +pub fn normalize_with_depth<'a, 'b, 'gcx, 'tcx, T>( + selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, + cause: ObligationCause<'tcx>, + depth: usize, + value: &T) + -> Normalized<'tcx, T> + where T : TypeFoldable<'tcx> { let mut normalizer = AssociatedTypeNormalizer::new(selcx, cause, depth); @@ -323,18 +264,18 @@ pub fn normalize_with_depth<'a,'b,'tcx,T>(selcx: &'a mut SelectionContext<'b,'tc } } -struct AssociatedTypeNormalizer<'a,'b:'a,'tcx:'b> { - selcx: &'a mut SelectionContext<'b,'tcx>, +struct AssociatedTypeNormalizer<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> { + selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, cause: ObligationCause<'tcx>, obligations: Vec>, depth: usize, } -impl<'a,'b,'tcx> AssociatedTypeNormalizer<'a,'b,'tcx> { - fn new(selcx: &'a mut SelectionContext<'b,'tcx>, +impl<'a, 'b, 'gcx, 'tcx> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> { + fn new(selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, cause: ObligationCause<'tcx>, depth: usize) - -> AssociatedTypeNormalizer<'a,'b,'tcx> + -> AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> { AssociatedTypeNormalizer { selcx: selcx, @@ -355,8 +296,8 @@ impl<'a,'b,'tcx> AssociatedTypeNormalizer<'a,'b,'tcx> { } } -impl<'a,'b,'tcx> TypeFolder<'tcx> for AssociatedTypeNormalizer<'a,'b,'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { +impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a, 'b, 'gcx, 'tcx> { + fn tcx<'c>(&'c self) -> TyCtxt<'c, 'gcx, 'tcx> { self.selcx.tcx() } @@ -424,8 +365,8 @@ impl<'tcx,T> Normalized<'tcx,T> { /// there are unresolved type variables in the projection, we will /// substitute a fresh type variable `$X` and generate a new /// obligation `::Item == $X` for later. -pub fn normalize_projection_type<'a,'b,'tcx>( - selcx: &'a mut SelectionContext<'b,'tcx>, +pub fn normalize_projection_type<'a, 'b, 'gcx, 'tcx>( + selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, depth: usize) @@ -455,8 +396,8 @@ pub fn normalize_projection_type<'a,'b,'tcx>( /// as Trait>::Item`. The result is always a type (and possibly /// additional obligations). Returns `None` in the case of ambiguity, /// which indicates that there are unbound type variables. -fn opt_normalize_projection_type<'a,'b,'tcx>( - selcx: &'a mut SelectionContext<'b,'tcx>, +fn opt_normalize_projection_type<'a, 'b, 'gcx, 'tcx>( + selcx: &'a mut SelectionContext<'b, 'gcx, 'tcx>, projection_ty: ty::ProjectionTy<'tcx>, cause: ObligationCause<'tcx>, depth: usize) @@ -544,11 +485,11 @@ fn opt_normalize_projection_type<'a,'b,'tcx>( /// an error for this obligation, but we legitimately should not, /// because it contains `[type error]`. Yuck! (See issue #29857 for /// one case where this arose.) -fn normalize_to_error<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, - projection_ty: ty::ProjectionTy<'tcx>, - cause: ObligationCause<'tcx>, - depth: usize) - -> NormalizedTy<'tcx> +fn normalize_to_error<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + projection_ty: ty::ProjectionTy<'tcx>, + cause: ObligationCause<'tcx>, + depth: usize) + -> NormalizedTy<'tcx> { let trait_ref = projection_ty.trait_ref.to_poly_trait_ref(); let trait_obligation = Obligation { cause: cause, @@ -567,8 +508,8 @@ enum ProjectedTy<'tcx> { } /// Compute the result of a projection type (if we can). -fn project_type<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn project_type<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>) -> Result, ProjectionTyError<'tcx>> { @@ -578,7 +519,7 @@ fn project_type<'cx,'tcx>( let recursion_limit = selcx.tcx().sess.recursion_limit.get(); if obligation.recursion_depth >= recursion_limit { debug!("project: overflow!"); - report_overflow_error(selcx.infcx(), &obligation, true); + selcx.infcx().report_overflow_error(&obligation, true); } let obligation_trait_ref = @@ -652,10 +593,8 @@ fn project_type<'cx,'tcx>( debug!("retaining param-env candidates only from {:?}", candidates.vec); candidates.vec.retain(|c| match *c { ProjectionTyCandidate::ParamEnv(..) => true, - ProjectionTyCandidate::Impl(..) | - ProjectionTyCandidate::Closure(..) | ProjectionTyCandidate::TraitDef(..) | - ProjectionTyCandidate::FnPointer(..) => false, + ProjectionTyCandidate::Select => false, }); debug!("resulting candidate set: {:?}", candidates.vec); if candidates.vec.len() != 1 { @@ -665,78 +604,12 @@ fn project_type<'cx,'tcx>( assert!(candidates.vec.len() <= 1); - let possible_candidate = candidates.vec.pop().and_then(|candidate| { - // In Any (i.e. trans) mode, all projections succeed; - // otherwise, we need to be sensitive to `default` and - // specialization. - if !selcx.projection_mode().is_any() { - if let ProjectionTyCandidate::Impl(ref impl_data) = candidate { - if let Some(node_item) = assoc_ty_def(selcx, - impl_data.impl_def_id, - obligation.predicate.item_name) { - if node_item.node.is_from_trait() { - if node_item.item.ty.is_some() { - // If the associated type has a default from the - // trait, that should be considered `default` and - // hence not projected. - // - // Note, however, that we allow a projection from - // the trait specifically in the case that the trait - // does *not* give a default. This is purely to - // avoid spurious errors: the situation can only - // arise when *no* impl in the specialization chain - // has provided a definition for the type. When we - // confirm the candidate, we'll turn the projection - // into a TyError, since the actual error will be - // reported in `check_impl_items_against_trait`. - return None; - } - } else if node_item.item.defaultness.is_default() { - return None; - } - } else { - // Normally this situation could only arise througha - // compiler bug, but at coherence-checking time we only look - // at the topmost impl (we don't even consider the trait - // itself) for the definition -- so we can fail to find a - // definition of the type even if it exists. - - // For now, we just unconditionally ICE, because otherwise, - // examples like the following will succeed: - // - // ``` - // trait Assoc { - // type Output; - // } - // - // impl Assoc for T { - // default type Output = bool; - // } - // - // impl Assoc for u8 {} - // impl Assoc for u16 {} - // - // trait Foo {} - // impl Foo for ::Output {} - // impl Foo for ::Output {} - // return None; - // } - // ``` - // - // The essential problem here is that the projection fails, - // leaving two unnormalized types, which appear not to unify - // -- so the overlap check succeeds, when it should fail. - bug!("Tried to project an inherited associated type during \ - coherence checking, which is currently not supported."); - } - } - } - Some(candidate) - }); - - match possible_candidate { + match candidates.vec.pop() { Some(candidate) => { - let (ty, obligations) = confirm_candidate(selcx, obligation, candidate); + let (ty, obligations) = confirm_candidate(selcx, + obligation, + &obligation_trait_ref, + candidate); Ok(ProjectedTy::Progress(ty, obligations)) } None => { @@ -750,8 +623,8 @@ fn project_type<'cx,'tcx>( /// The first thing we have to do is scan through the parameter /// environment to see whether there are any projection predicates /// there that can answer this question. -fn assemble_candidates_from_param_env<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn assemble_candidates_from_param_env<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>) @@ -776,8 +649,8 @@ fn assemble_candidates_from_param_env<'cx,'tcx>( /// ``` /// /// Here, for example, we could conclude that the result is `i32`. -fn assemble_candidates_from_trait_def<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn assemble_candidates_from_trait_def<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>) @@ -808,8 +681,8 @@ fn assemble_candidates_from_trait_def<'cx,'tcx>( bounds) } -fn assemble_candidates_from_predicates<'cx,'tcx,I>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn assemble_candidates_from_predicates<'cx, 'gcx, 'tcx, I>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>, @@ -855,40 +728,8 @@ fn assemble_candidates_from_predicates<'cx,'tcx,I>( } } -fn assemble_candidates_from_object_type<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, - obligation: &ProjectionTyObligation<'tcx>, - obligation_trait_ref: &ty::TraitRef<'tcx>, - candidate_set: &mut ProjectionTyCandidateSet<'tcx>) -{ - let self_ty = obligation_trait_ref.self_ty(); - let object_ty = selcx.infcx().shallow_resolve(self_ty); - debug!("assemble_candidates_from_object_type(object_ty={:?})", - object_ty); - let data = match object_ty.sty { - ty::TyTrait(ref data) => data, - _ => { - span_bug!( - obligation.cause.span, - "assemble_candidates_from_object_type called with non-object: {:?}", - object_ty); - } - }; - let projection_bounds = data.projection_bounds_with_self_ty(selcx.tcx(), object_ty); - let env_predicates = projection_bounds.iter() - .map(|p| p.to_predicate()) - .collect(); - let env_predicates = elaborate_predicates(selcx.tcx(), env_predicates); - assemble_candidates_from_predicates(selcx, - obligation, - obligation_trait_ref, - candidate_set, - ProjectionTyCandidate::ParamEnv, - env_predicates) -} - -fn assemble_candidates_from_impls<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn assemble_candidates_from_impls<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, obligation_trait_ref: &ty::TraitRef<'tcx>, candidate_set: &mut ProjectionTyCandidateSet<'tcx>) @@ -898,82 +739,183 @@ fn assemble_candidates_from_impls<'cx,'tcx>( // start out by selecting the predicate `T as TraitRef<...>`: let poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); let trait_obligation = obligation.with(poly_trait_ref.to_poly_trait_predicate()); - let vtable = match selcx.select(&trait_obligation) { - Ok(Some(vtable)) => vtable, - Ok(None) => { - candidate_set.ambiguous = true; - return Ok(()); - } - Err(e) => { - debug!("assemble_candidates_from_impls: selection error {:?}", - e); - return Err(e); - } - }; + selcx.infcx().probe(|_| { + let vtable = match selcx.select(&trait_obligation) { + Ok(Some(vtable)) => vtable, + Ok(None) => { + candidate_set.ambiguous = true; + return Ok(()); + } + Err(e) => { + debug!("assemble_candidates_from_impls: selection error {:?}", + e); + return Err(e); + } + }; - match vtable { - super::VtableImpl(data) => { - debug!("assemble_candidates_from_impls: impl candidate {:?}", - data); + match vtable { + super::VtableClosure(_) | + super::VtableFnPointer(_) | + super::VtableObject(_) => { + debug!("assemble_candidates_from_impls: vtable={:?}", + vtable); - candidate_set.vec.push( - ProjectionTyCandidate::Impl(data)); - } - super::VtableObject(_) => { - assemble_candidates_from_object_type( - selcx, obligation, obligation_trait_ref, candidate_set); - } - super::VtableClosure(data) => { - candidate_set.vec.push( - ProjectionTyCandidate::Closure(data)); - } - super::VtableFnPointer(fn_type) => { - candidate_set.vec.push( - ProjectionTyCandidate::FnPointer(fn_type)); - } - super::VtableParam(..) => { - // This case tell us nothing about the value of an - // associated type. Consider: - // - // ``` - // trait SomeTrait { type Foo; } - // fn foo(...) { } - // ``` - // - // If the user writes `::Foo`, then the `T - // : SomeTrait` binding does not help us decide what the - // type `Foo` is (at least, not more specifically than - // what we already knew). - // - // But wait, you say! What about an example like this: - // - // ``` - // fn bar>(...) { ... } - // ``` - // - // Doesn't the `T : Sometrait` predicate help - // resolve `T::Foo`? And of course it does, but in fact - // that single predicate is desugared into two predicates - // in the compiler: a trait predicate (`T : SomeTrait`) and a - // projection. And the projection where clause is handled - // in `assemble_candidates_from_param_env`. - } - super::VtableDefaultImpl(..) | - super::VtableBuiltin(..) => { - // These traits have no associated types. - span_bug!( - obligation.cause.span, - "Cannot project an associated type from `{:?}`", - vtable); + candidate_set.vec.push(ProjectionTyCandidate::Select); + } + super::VtableImpl(ref impl_data) if !selcx.projection_mode().is_any() => { + // We have to be careful when projecting out of an + // impl because of specialization. If we are not in + // trans (i.e., projection mode is not "any"), and the + // impl's type is declared as default, then we disable + // projection (even if the trait ref is fully + // monomorphic). In the case where trait ref is not + // fully monomorphic (i.e., includes type parameters), + // this is because those type parameters may + // ultimately be bound to types from other crates that + // may have specialized impls we can't see. In the + // case where the trait ref IS fully monomorphic, this + // is a policy decision that we made in the RFC in + // order to preserve flexibility for the crate that + // defined the specializable impl to specialize later + // for existing types. + // + // In either case, we handle this by not adding a + // candidate for an impl if it contains a `default` + // type. + let opt_node_item = assoc_ty_def(selcx, + impl_data.impl_def_id, + obligation.predicate.item_name); + let new_candidate = if let Some(node_item) = opt_node_item { + if node_item.node.is_from_trait() { + if node_item.item.ty.is_some() { + // The impl inherited a `type Foo = + // Bar` given in the trait, which is + // implicitly default. No candidate. + None + } else { + // The impl did not specify `type` and neither + // did the trait: + // + // ```rust + // trait Foo { type T; } + // impl Foo for Bar { } + // ``` + // + // This is an error, but it will be + // reported in `check_impl_items_against_trait`. + // We accept it here but will flag it as + // an error when we confirm the candidate + // (which will ultimately lead to `normalize_to_error` + // being invoked). + Some(ProjectionTyCandidate::Select) + } + } else if node_item.item.defaultness.is_default() { + // The impl specified `default type Foo = + // Bar`. No candidate. + None + } else { + // The impl specified `type Foo = Bar` + // with no default. Add a candidate. + Some(ProjectionTyCandidate::Select) + } + } else { + // This is saying that neither the trait nor + // the impl contain a definition for this + // associated type. Normally this situation + // could only arise through a compiler bug -- + // if the user wrote a bad item name, it + // should have failed in astconv. **However**, + // at coherence-checking time, we only look at + // the topmost impl (we don't even consider + // the trait itself) for the definition -- and + // so in that case it may be that the trait + // *DOES* have a declaration, but we don't see + // it, and we end up in this branch. + // + // This is kind of tricky to handle actually. + // For now, we just unconditionally ICE, + // because otherwise, examples like the + // following will succeed: + // + // ``` + // trait Assoc { + // type Output; + // } + // + // impl Assoc for T { + // default type Output = bool; + // } + // + // impl Assoc for u8 {} + // impl Assoc for u16 {} + // + // trait Foo {} + // impl Foo for ::Output {} + // impl Foo for ::Output {} + // return None; + // } + // ``` + // + // The essential problem here is that the + // projection fails, leaving two unnormalized + // types, which appear not to unify -- so the + // overlap check succeeds, when it should + // fail. + bug!("Tried to project an inherited associated type during \ + coherence checking, which is currently not supported."); + }; + candidate_set.vec.extend(new_candidate); + } + super::VtableImpl(_) => { + // In trans mode, we can just project out of impls, no prob. + assert!(selcx.projection_mode().is_any()); + candidate_set.vec.push(ProjectionTyCandidate::Select); + } + super::VtableParam(..) => { + // This case tell us nothing about the value of an + // associated type. Consider: + // + // ``` + // trait SomeTrait { type Foo; } + // fn foo(...) { } + // ``` + // + // If the user writes `::Foo`, then the `T + // : SomeTrait` binding does not help us decide what the + // type `Foo` is (at least, not more specifically than + // what we already knew). + // + // But wait, you say! What about an example like this: + // + // ``` + // fn bar>(...) { ... } + // ``` + // + // Doesn't the `T : Sometrait` predicate help + // resolve `T::Foo`? And of course it does, but in fact + // that single predicate is desugared into two predicates + // in the compiler: a trait predicate (`T : SomeTrait`) and a + // projection. And the projection where clause is handled + // in `assemble_candidates_from_param_env`. + } + super::VtableDefaultImpl(..) | + super::VtableBuiltin(..) => { + // These traits have no associated types. + span_bug!( + obligation.cause.span, + "Cannot project an associated type from `{:?}`", + vtable); + } } - } - Ok(()) + Ok(()) + }) } -fn confirm_candidate<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn confirm_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, + obligation_trait_ref: &ty::TraitRef<'tcx>, candidate: ProjectionTyCandidate<'tcx>) -> (Ty<'tcx>, Vec>) { @@ -987,39 +929,140 @@ fn confirm_candidate<'cx,'tcx>( confirm_param_env_candidate(selcx, obligation, poly_projection) } - ProjectionTyCandidate::Impl(impl_vtable) => { - confirm_impl_candidate(selcx, obligation, impl_vtable) + ProjectionTyCandidate::Select => { + confirm_select_candidate(selcx, obligation, obligation_trait_ref) } + } +} - ProjectionTyCandidate::Closure(closure_vtable) => { - confirm_closure_candidate(selcx, obligation, closure_vtable) +fn confirm_select_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, + obligation: &ProjectionTyObligation<'tcx>, + obligation_trait_ref: &ty::TraitRef<'tcx>) + -> (Ty<'tcx>, Vec>) +{ + let poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); + let trait_obligation = obligation.with(poly_trait_ref.to_poly_trait_predicate()); + let vtable = match selcx.select(&trait_obligation) { + Ok(Some(vtable)) => vtable, + _ => { + span_bug!( + obligation.cause.span, + "Failed to select `{:?}`", + trait_obligation); } + }; - ProjectionTyCandidate::FnPointer(fn_type) => { - confirm_fn_pointer_candidate(selcx, obligation, fn_type) - } + match vtable { + super::VtableImpl(data) => + confirm_impl_candidate(selcx, obligation, data), + super::VtableClosure(data) => + confirm_closure_candidate(selcx, obligation, data), + super::VtableFnPointer(data) => + confirm_fn_pointer_candidate(selcx, obligation, data), + super::VtableObject(_) => + confirm_object_candidate(selcx, obligation, obligation_trait_ref), + super::VtableDefaultImpl(..) | + super::VtableParam(..) | + super::VtableBuiltin(..) => + // we don't create Select candidates with this kind of resolution + span_bug!( + obligation.cause.span, + "Cannot project an associated type from `{:?}`", + vtable), } } -fn confirm_fn_pointer_candidate<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn confirm_object_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, + obligation: &ProjectionTyObligation<'tcx>, + obligation_trait_ref: &ty::TraitRef<'tcx>) + -> (Ty<'tcx>, Vec>) +{ + let self_ty = obligation_trait_ref.self_ty(); + let object_ty = selcx.infcx().shallow_resolve(self_ty); + debug!("confirm_object_candidate(object_ty={:?})", + object_ty); + let data = match object_ty.sty { + ty::TyTrait(ref data) => data, + _ => { + span_bug!( + obligation.cause.span, + "confirm_object_candidate called with non-object: {:?}", + object_ty); + } + }; + let projection_bounds = data.projection_bounds_with_self_ty(selcx.tcx(), object_ty); + let env_predicates = projection_bounds.iter() + .map(|p| p.to_predicate()) + .collect(); + let env_predicate = { + let env_predicates = elaborate_predicates(selcx.tcx(), env_predicates); + + // select only those projections that are actually projecting an + // item with the correct name + let env_predicates = env_predicates.filter_map(|p| match p { + ty::Predicate::Projection(data) => + if data.item_name() == obligation.predicate.item_name { + Some(data) + } else { + None + }, + _ => None + }); + + // select those with a relevant trait-ref + let mut env_predicates = env_predicates.filter(|data| { + let origin = TypeOrigin::RelateOutputImplTypes(obligation.cause.span); + let data_poly_trait_ref = data.to_poly_trait_ref(); + let obligation_poly_trait_ref = obligation_trait_ref.to_poly_trait_ref(); + selcx.infcx().probe(|_| { + selcx.infcx().sub_poly_trait_refs(false, + origin, + data_poly_trait_ref, + obligation_poly_trait_ref).is_ok() + }) + }); + + // select the first matching one; there really ought to be one or + // else the object type is not WF, since an object type should + // include all of its projections explicitly + match env_predicates.next() { + Some(env_predicate) => env_predicate, + None => { + debug!("confirm_object_candidate: no env-predicate \ + found in object type `{:?}`; ill-formed", + object_ty); + return (selcx.tcx().types.err, vec!()); + } + } + }; + + confirm_param_env_candidate(selcx, obligation, env_predicate) +} + +fn confirm_fn_pointer_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, - fn_type: Ty<'tcx>) + fn_pointer_vtable: VtableFnPointerData<'tcx, PredicateObligation<'tcx>>) -> (Ty<'tcx>, Vec>) { - let fn_type = selcx.infcx().shallow_resolve(fn_type); + // FIXME(#32730) propagate obligations (fn pointer vtable nested obligations ONLY come from + // unification in inference) + assert!(fn_pointer_vtable.nested.is_empty()); + let fn_type = selcx.infcx().shallow_resolve(fn_pointer_vtable.fn_ty); let sig = fn_type.fn_sig(); confirm_callable_candidate(selcx, obligation, sig, util::TupleArgumentsFlag::Yes) } -fn confirm_closure_candidate<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn confirm_closure_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, vtable: VtableClosureData<'tcx, PredicateObligation<'tcx>>) -> (Ty<'tcx>, Vec>) { let closure_typer = selcx.closure_typer(); - let closure_type = closure_typer.closure_type(vtable.closure_def_id, &vtable.substs); + let closure_type = closure_typer.closure_type(vtable.closure_def_id, vtable.substs); let Normalized { value: closure_type, mut obligations @@ -1035,8 +1078,8 @@ fn confirm_closure_candidate<'cx,'tcx>( (ty, obligations) } -fn confirm_callable_candidate<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn confirm_callable_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, fn_sig: &ty::PolyFnSig<'tcx>, flag: util::TupleArgumentsFlag) @@ -1053,11 +1096,10 @@ fn confirm_callable_candidate<'cx,'tcx>( // Note: we unwrap the binder here but re-create it below (1) let ty::Binder((trait_ref, ret_type)) = - util::closure_trait_ref_and_return_type(tcx, - fn_once_def_id, - obligation.predicate.trait_ref.self_ty(), - fn_sig, - flag); + tcx.closure_trait_ref_and_return_type(fn_once_def_id, + obligation.predicate.trait_ref.self_ty(), + fn_sig, + flag); let predicate = ty::Binder(ty::ProjectionPredicate { // (1) recreate binder here projection_ty: ty::ProjectionTy { @@ -1070,8 +1112,8 @@ fn confirm_callable_candidate<'cx,'tcx>( confirm_param_env_candidate(selcx, obligation, predicate) } -fn confirm_param_env_candidate<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn confirm_param_env_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, poly_projection: ty::PolyProjectionPredicate<'tcx>) -> (Ty<'tcx>, Vec>) @@ -1109,8 +1151,8 @@ fn confirm_param_env_candidate<'cx,'tcx>( (projection.ty, vec!()) } -fn confirm_impl_candidate<'cx,'tcx>( - selcx: &mut SelectionContext<'cx,'tcx>, +fn confirm_impl_candidate<'cx, 'gcx, 'tcx>( + selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>, obligation: &ProjectionTyObligation<'tcx>, impl_vtable: VtableImplData<'tcx, PredicateObligation<'tcx>>) -> (Ty<'tcx>, Vec>) @@ -1149,10 +1191,11 @@ fn confirm_impl_candidate<'cx,'tcx>( /// /// Based on the "projection mode", this lookup may in fact only examine the /// topmost impl. See the comments for `ProjectionMode` for more details. -fn assoc_ty_def<'cx, 'tcx>(selcx: &SelectionContext<'cx, 'tcx>, - impl_def_id: DefId, - assoc_ty_name: ast::Name) - -> Option>>> +fn assoc_ty_def<'cx, 'gcx, 'tcx>( + selcx: &SelectionContext<'cx, 'gcx, 'tcx>, + impl_def_id: DefId, + assoc_ty_name: ast::Name) + -> Option>>> { let trait_def_id = selcx.tcx().impl_trait_ref(impl_def_id).unwrap().def_id; diff --git a/src/librustc/traits/select.rs b/src/librustc/traits/select.rs index 8d7df6e44a..5307749b87 100644 --- a/src/librustc/traits/select.rs +++ b/src/librustc/traits/select.rs @@ -13,7 +13,6 @@ pub use self::MethodMatchResult::*; pub use self::MethodMatchedData::*; use self::SelectionCandidate::*; -use self::BuiltinBoundConditions::*; use self::EvaluationResult::*; use super::coherence; @@ -21,7 +20,6 @@ use super::DerivedObligationCause; use super::project; use super::project::{normalize_with_depth, Normalized}; use super::{PredicateObligation, TraitObligation, ObligationCause}; -use super::report_overflow_error; use super::{ObligationCauseCode, BuiltinDerivedObligation, ImplDerivedObligation}; use super::{SelectionError, Unimplemented, OutputTypeParameterMismatch}; use super::{ObjectCastObligation, Obligation}; @@ -32,8 +30,7 @@ use super::SelectionResult; use super::{VtableBuiltin, VtableImpl, VtableParam, VtableClosure, VtableFnPointer, VtableObject, VtableDefaultImpl}; use super::{VtableImplData, VtableObjectData, VtableBuiltinData, - VtableClosureData, VtableDefaultImplData}; -use super::object_safety; + VtableClosureData, VtableDefaultImplData, VtableFnPointerData}; use super::util; use hir::def_id::DefId; @@ -45,23 +42,33 @@ use traits; use ty::fast_reject; use ty::relate::TypeRelation; +use rustc_data_structures::snapshot_vec::{SnapshotVecDelegate, SnapshotVec}; use std::cell::RefCell; use std::fmt; +use std::marker::PhantomData; use std::rc::Rc; use syntax::abi::Abi; use hir; -use util::common::ErrorReported; use util::nodemap::FnvHashMap; -pub struct SelectionContext<'cx, 'tcx:'cx> { - infcx: &'cx InferCtxt<'cx, 'tcx>, +struct InferredObligationsSnapshotVecDelegate<'tcx> { + phantom: PhantomData<&'tcx i32>, +} +impl<'tcx> SnapshotVecDelegate for InferredObligationsSnapshotVecDelegate<'tcx> { + type Value = PredicateObligation<'tcx>; + type Undo = (); + fn reverse(_: &mut Vec, _: Self::Undo) {} +} + +pub struct SelectionContext<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { + infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, /// Freshener used specifically for skolemizing entries on the /// obligation stack. This ensures that all entries on the stack /// at one time will have the same set of skolemized entries, /// which is important for checking for trait bounds that /// recursively require themselves. - freshener: TypeFreshener<'cx, 'tcx>, + freshener: TypeFreshener<'cx, 'gcx, 'tcx>, /// If true, indicates that the evaluation should be conservative /// and consider the possibility of types outside this crate. @@ -78,6 +85,8 @@ pub struct SelectionContext<'cx, 'tcx:'cx> { /// there is no type that the user could *actually name* that /// would satisfy it. This avoids crippling inference, basically. intercrate: bool, + + inferred_obligations: SnapshotVec>, } // A stack that walks back up the stack frame. @@ -188,7 +197,7 @@ pub enum MethodMatchedData { /// parameter environment. #[derive(PartialEq,Eq,Debug,Clone)] enum SelectionCandidate<'tcx> { - BuiltinCandidate(ty::BuiltinBound), + BuiltinCandidate { has_nested: bool }, ParamCandidate(ty::PolyTraitRef<'tcx>), ImplCandidate(DefId), DefaultImplCandidate(DefId), @@ -198,9 +207,10 @@ enum SelectionCandidate<'tcx> { /// we found an applicable bound in the trait definition. ProjectionCandidate, - /// Implementation of a `Fn`-family trait by one of the - /// anonymous types generated for a `||` expression. - ClosureCandidate(/* closure */ DefId, &'tcx ty::ClosureSubsts<'tcx>), + /// Implementation of a `Fn`-family trait by one of the anonymous types + /// generated for a `||` expression. The ty::ClosureKind informs the + /// confirmation step what ClosureKind obligation to emit. + ClosureCandidate(/* closure */ DefId, ty::ClosureSubsts<'tcx>, ty::ClosureKind), /// Implementation of a `Fn`-family trait by one of the anonymous /// types generated for a fn pointer type (e.g., `fn(int)->int`) @@ -213,6 +223,38 @@ enum SelectionCandidate<'tcx> { BuiltinUnsizeCandidate, } +impl<'a, 'tcx> ty::Lift<'tcx> for SelectionCandidate<'a> { + type Lifted = SelectionCandidate<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + Some(match *self { + BuiltinCandidate { has_nested } => { + BuiltinCandidate { + has_nested: has_nested + } + } + ImplCandidate(def_id) => ImplCandidate(def_id), + DefaultImplCandidate(def_id) => DefaultImplCandidate(def_id), + DefaultImplObjectCandidate(def_id) => { + DefaultImplObjectCandidate(def_id) + } + ProjectionCandidate => ProjectionCandidate, + FnPointerCandidate => FnPointerCandidate, + ObjectCandidate => ObjectCandidate, + BuiltinObjectCandidate => BuiltinObjectCandidate, + BuiltinUnsizeCandidate => BuiltinUnsizeCandidate, + + ParamCandidate(ref trait_ref) => { + return tcx.lift(trait_ref).map(ParamCandidate); + } + ClosureCandidate(def_id, ref substs, kind) => { + return tcx.lift(substs).map(|substs| { + ClosureCandidate(def_id, substs, kind) + }); + } + }) + } +} + struct SelectionCandidateSet<'tcx> { // a list of candidates that definitely apply to the current // obligation (meaning: types unify). @@ -231,10 +273,18 @@ struct EvaluatedCandidate<'tcx> { evaluation: EvaluationResult, } -enum BuiltinBoundConditions<'tcx> { - If(ty::Binder>>), - ParameterBuiltin, - AmbiguousBuiltin +/// When does the builtin impl for `T: Trait` apply? +enum BuiltinImplConditions<'tcx> { + /// The impl is conditional on T1,T2,.. : Trait + Where(ty::Binder>>), + /// There is no built-in impl. There may be some other + /// candidate (a where-clause or user-defined impl). + None, + /// There is *no* impl for this, builtin or not. Ignore + /// all where-clauses. + Never, + /// It is unknown whether there is an impl. + Ambiguous } #[derive(Copy, Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] @@ -257,36 +307,38 @@ pub struct EvaluationCache<'tcx> { hashmap: RefCell, EvaluationResult>> } -impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { - pub fn new(infcx: &'cx InferCtxt<'cx, 'tcx>) -> SelectionContext<'cx, 'tcx> { +impl<'cx, 'gcx, 'tcx> SelectionContext<'cx, 'gcx, 'tcx> { + pub fn new(infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>) -> SelectionContext<'cx, 'gcx, 'tcx> { SelectionContext { infcx: infcx, freshener: infcx.freshener(), intercrate: false, + inferred_obligations: SnapshotVec::new(), } } - pub fn intercrate(infcx: &'cx InferCtxt<'cx, 'tcx>) -> SelectionContext<'cx, 'tcx> { + pub fn intercrate(infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>) -> SelectionContext<'cx, 'gcx, 'tcx> { SelectionContext { infcx: infcx, freshener: infcx.freshener(), intercrate: true, + inferred_obligations: SnapshotVec::new(), } } - pub fn infcx(&self) -> &'cx InferCtxt<'cx, 'tcx> { + pub fn infcx(&self) -> &'cx InferCtxt<'cx, 'gcx, 'tcx> { self.infcx } - pub fn tcx(&self) -> &'cx TyCtxt<'tcx> { + pub fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> { self.infcx.tcx } - pub fn param_env(&self) -> &'cx ty::ParameterEnvironment<'cx, 'tcx> { + pub fn param_env(&self) -> &'cx ty::ParameterEnvironment<'tcx> { self.infcx.param_env() } - pub fn closure_typer(&self) -> &'cx InferCtxt<'cx, 'tcx> { + pub fn closure_typer(&self) -> &'cx InferCtxt<'cx, 'gcx, 'tcx> { self.infcx } @@ -294,6 +346,46 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.infcx.projection_mode() } + /// Wraps the inference context's in_snapshot s.t. snapshot handling is only from the selection + /// context's self. + fn in_snapshot(&mut self, f: F) -> R + where F: FnOnce(&mut Self, &infer::CombinedSnapshot) -> R + { + // The irrefutable nature of the operation means we don't need to snapshot the + // inferred_obligations vector. + self.infcx.in_snapshot(|snapshot| f(self, snapshot)) + } + + /// Wraps a probe s.t. obligations collected during it are ignored and old obligations are + /// retained. + fn probe(&mut self, f: F) -> R + where F: FnOnce(&mut Self, &infer::CombinedSnapshot) -> R + { + let inferred_obligations_snapshot = self.inferred_obligations.start_snapshot(); + let result = self.infcx.probe(|snapshot| f(self, snapshot)); + self.inferred_obligations.rollback_to(inferred_obligations_snapshot); + result + } + + /// Wraps a commit_if_ok s.t. obligations collected during it are not returned in selection if + /// the transaction fails and s.t. old obligations are retained. + fn commit_if_ok(&mut self, f: F) -> Result where + F: FnOnce(&mut Self, &infer::CombinedSnapshot) -> Result + { + let inferred_obligations_snapshot = self.inferred_obligations.start_snapshot(); + match self.infcx.commit_if_ok(|snapshot| f(self, snapshot)) { + Ok(ok) => { + self.inferred_obligations.commit(inferred_obligations_snapshot); + Ok(ok) + }, + Err(err) => { + self.inferred_obligations.rollback_to(inferred_obligations_snapshot); + Err(err) + } + } + } + + /////////////////////////////////////////////////////////////////////////// // Selection // @@ -321,72 +413,16 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let stack = self.push_stack(TraitObligationStackList::empty(), obligation); match self.candidate_from_obligation(&stack)? { - None => { - self.consider_unification_despite_ambiguity(obligation); - Ok(None) - } - Some(candidate) => Ok(Some(self.confirm_candidate(obligation, candidate)?)), - } - } - - /// In the particular case of unboxed closure obligations, we can - /// sometimes do some amount of unification for the - /// argument/return types even though we can't yet fully match obligation. - /// The particular case we are interesting in is an obligation of the form: - /// - /// C : FnFoo - /// - /// where `C` is an unboxed closure type and `FnFoo` is one of the - /// `Fn` traits. Because we know that users cannot write impls for closure types - /// themselves, the only way that `C : FnFoo` can fail to match is under two - /// conditions: - /// - /// 1. The closure kind for `C` is not yet known, because inference isn't complete. - /// 2. The closure kind for `C` *is* known, but doesn't match what is needed. - /// For example, `C` may be a `FnOnce` closure, but a `Fn` closure is needed. - /// - /// In either case, we always know what argument types are - /// expected by `C`, no matter what kind of `Fn` trait it - /// eventually matches. So we can go ahead and unify the argument - /// types, even though the end result is ambiguous. - /// - /// Note that this is safe *even if* the trait would never be - /// matched (case 2 above). After all, in that case, an error will - /// result, so it kind of doesn't matter what we do --- unifying - /// the argument types can only be helpful to the user, because - /// once they patch up the kind of closure that is expected, the - /// argment types won't really change. - fn consider_unification_despite_ambiguity(&mut self, obligation: &TraitObligation<'tcx>) { - // Is this a `C : FnFoo(...)` trait reference for some trait binding `FnFoo`? - match self.tcx().lang_items.fn_trait_kind(obligation.predicate.0.def_id()) { - Some(_) => { } - None => { return; } - } - - // Is the self-type a closure type? We ignore bindings here - // because if it is a closure type, it must be a closure type from - // within this current fn, and hence none of the higher-ranked - // lifetimes can appear inside the self-type. - let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder()); - let (closure_def_id, substs) = match self_ty.sty { - ty::TyClosure(id, ref substs) => (id, substs), - _ => { return; } - }; - assert!(!substs.has_escaping_regions()); - - // It is OK to call the unnormalized variant here - this is only - // reached for TyClosure: Fn inputs where the closure kind is - // still unknown, which should only occur in typeck where the - // closure type is already normalized. - let closure_trait_ref = self.closure_trait_ref_unnormalized(obligation, - closure_def_id, - substs); - - match self.confirm_poly_trait_refs(obligation.cause.clone(), - obligation.predicate.to_poly_trait_ref(), - closure_trait_ref) { - Ok(()) => { } - Err(_) => { /* Silently ignore errors. */ } + None => Ok(None), + Some(candidate) => { + let mut candidate = self.confirm_candidate(obligation, candidate)?; + // FIXME(#32730) remove this assertion once inferred obligations are propagated + // from inference + assert!(self.inferred_obligations.len() == 0); + let inferred_obligations = (*self.inferred_obligations).into_iter().cloned(); + candidate.nested_obligations_mut().extend(inferred_obligations); + Ok(Some(candidate)) + }, } } @@ -408,8 +444,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { debug!("evaluate_obligation({:?})", obligation); - self.infcx.probe(|_| { - self.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation) + self.probe(|this, _| { + this.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation) .may_apply() }) } @@ -424,8 +460,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { debug!("evaluate_obligation_conservatively({:?})", obligation); - self.infcx.probe(|_| { - self.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation) + self.probe(|this, _| { + this.evaluate_predicate_recursively(TraitObligationStackList::empty(), obligation) == EvaluatedToOk }) } @@ -475,6 +511,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } match obligation.predicate { + ty::Predicate::Rfc1592(..) => EvaluatedToOk, + ty::Predicate::Trait(ref t) => { assert!(!t.has_escaping_regions()); let obligation = obligation.with(t.clone()); @@ -485,8 +523,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // does this code ever run? match self.infcx.equality_predicate(obligation.cause.span, p) { Ok(InferOk { obligations, .. }) => { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); + self.inferred_obligations.extend(obligations); EvaluatedToOk }, Err(_) => EvaluatedToErr @@ -510,7 +547,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } ty::Predicate::ObjectSafe(trait_def_id) => { - if object_safety::is_object_safe(self.tcx(), trait_def_id) { + if self.tcx().is_object_safe(trait_def_id) { EvaluatedToOk } else { EvaluatedToErr @@ -532,6 +569,21 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } } } + + ty::Predicate::ClosureKind(closure_def_id, kind) => { + match self.infcx.closure_kind(closure_def_id) { + Some(closure_kind) => { + if closure_kind.extends(kind) { + EvaluatedToOk + } else { + EvaluatedToErr + } + } + None => { + EvaluatedToAmbig + } + } + } } } @@ -653,11 +705,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { { debug!("evaluate_candidate: depth={} candidate={:?}", stack.obligation.recursion_depth, candidate); - let result = self.infcx.probe(|_| { + let result = self.probe(|this, _| { let candidate = (*candidate).clone(); - match self.confirm_candidate(stack.obligation, candidate) { + match this.confirm_candidate(stack.obligation, candidate) { Ok(selection) => { - self.evaluate_predicates_recursively( + this.evaluate_predicates_recursively( stack.list(), selection.nested_obligations().iter()) } @@ -669,23 +721,16 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { result } - fn pick_evaluation_cache(&self) -> &EvaluationCache<'tcx> { - // see comment in `pick_candidate_cache` - if self.intercrate || - !self.param_env().caller_bounds.is_empty() - { - &self.param_env().evaluation_cache - } else - { - &self.tcx().evaluation_cache - } - } - fn check_evaluation_cache(&self, trait_ref: ty::PolyTraitRef<'tcx>) -> Option { - let cache = self.pick_evaluation_cache(); - cache.hashmap.borrow().get(&trait_ref).cloned() + if self.can_use_global_caches() { + let cache = self.tcx().evaluation_cache.hashmap.borrow(); + if let Some(cached) = cache.get(&trait_ref) { + return Some(cached.clone()); + } + } + self.infcx.evaluation_cache.hashmap.borrow().get(&trait_ref).cloned() } fn insert_evaluation_cache(&mut self, @@ -703,8 +748,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { return; } - let cache = self.pick_evaluation_cache(); - cache.hashmap.borrow_mut().insert(trait_ref, result); + if self.can_use_global_caches() { + let mut cache = self.tcx().evaluation_cache.hashmap.borrow_mut(); + if let Some(trait_ref) = self.tcx().lift_to_global(&trait_ref) { + cache.insert(trait_ref, result); + return; + } + } + + self.infcx.evaluation_cache.hashmap.borrow_mut().insert(trait_ref, result); } /////////////////////////////////////////////////////////////////////////// @@ -723,7 +775,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // not update) the cache. let recursion_limit = self.infcx.tcx.sess.recursion_limit.get(); if stack.obligation.recursion_depth >= recursion_limit { - report_overflow_error(self.infcx(), &stack.obligation, true); + self.infcx().report_overflow_error(&stack.obligation, true); } // Check the cache. Note that we skolemize the trait-ref @@ -913,7 +965,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { coherence::trait_ref_is_knowable(self.tcx(), trait_ref) } - fn pick_candidate_cache(&self) -> &SelectionCache<'tcx> { + /// Returns true if the global caches can be used. + /// Do note that if the type itself is not in the + /// global tcx, the local caches will be used. + fn can_use_global_caches(&self) -> bool { // If there are any where-clauses in scope, then we always use // a cache local to this particular scope. Otherwise, we // switch to a global cache. We used to try and draw @@ -922,7 +977,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // rule seems to be pretty clearly safe and also still retains // a very high hit rate (~95% when compiling rustc). if !self.param_env().caller_bounds.is_empty() { - return &self.param_env().selection_cache; + return false; } // Avoid using the master cache during coherence and just rely @@ -933,29 +988,43 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // it's not worth going to more trouble to increase the // hit-rate I don't think. if self.intercrate { - return &self.param_env().selection_cache; + return false; } // Otherwise, we can use the global cache. - &self.tcx().selection_cache + true } fn check_candidate_cache(&mut self, cache_fresh_trait_pred: &ty::PolyTraitPredicate<'tcx>) -> Option>> { - let cache = self.pick_candidate_cache(); - let hashmap = cache.hashmap.borrow(); - hashmap.get(&cache_fresh_trait_pred.0.trait_ref).cloned() + let trait_ref = &cache_fresh_trait_pred.0.trait_ref; + if self.can_use_global_caches() { + let cache = self.tcx().selection_cache.hashmap.borrow(); + if let Some(cached) = cache.get(&trait_ref) { + return Some(cached.clone()); + } + } + self.infcx.selection_cache.hashmap.borrow().get(trait_ref).cloned() } fn insert_candidate_cache(&mut self, cache_fresh_trait_pred: ty::PolyTraitPredicate<'tcx>, candidate: SelectionResult<'tcx, SelectionCandidate<'tcx>>) { - let cache = self.pick_candidate_cache(); - let mut hashmap = cache.hashmap.borrow_mut(); - hashmap.insert(cache_fresh_trait_pred.0.trait_ref.clone(), candidate); + let trait_ref = cache_fresh_trait_pred.0.trait_ref; + if self.can_use_global_caches() { + let mut cache = self.tcx().selection_cache.hashmap.borrow_mut(); + if let Some(trait_ref) = self.tcx().lift_to_global(&trait_ref) { + if let Some(candidate) = self.tcx().lift_to_global(&candidate) { + cache.insert(trait_ref, candidate); + return; + } + } + } + + self.infcx.selection_cache.hashmap.borrow_mut().insert(trait_ref, candidate); } fn should_update_candidate_cache(&mut self, @@ -1041,15 +1110,14 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.assemble_candidates_from_impls(obligation, &mut candidates)?; // For other types, we'll use the builtin rules. - self.assemble_builtin_bound_candidates(ty::BoundCopy, - obligation, - &mut candidates)?; + let copy_conditions = self.copy_conditions(obligation); + self.assemble_builtin_bound_candidates(copy_conditions, &mut candidates)?; } - Some(bound @ ty::BoundSized) => { + Some(ty::BoundSized) => { // Sized is never implementable by end-users, it is // always automatically computed. - self.assemble_builtin_bound_candidates(bound, - obligation, + let sized_conditions = self.sized_conditions(obligation); + self.assemble_builtin_bound_candidates(sized_conditions, &mut candidates)?; } @@ -1101,8 +1169,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { debug!("assemble_candidates_for_projected_tys: trait_def_id={:?}", trait_def_id); - let result = self.infcx.probe(|snapshot| { - self.match_projection_obligation_against_bounds_from_trait(obligation, + let result = self.probe(|this, snapshot| { + this.match_projection_obligation_against_bounds_from_trait(obligation, snapshot) }); @@ -1150,12 +1218,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { util::elaborate_predicates(self.tcx(), bounds.predicates.into_vec()) .filter_to_traits() .find( - |bound| self.infcx.probe( - |_| self.match_projection(obligation, - bound.clone(), - skol_trait_predicate.trait_ref.clone(), - &skol_map, - snapshot))); + |bound| self.probe( + |this, _| this.match_projection(obligation, + bound.clone(), + skol_trait_predicate.trait_ref.clone(), + &skol_map, + snapshot))); debug!("match_projection_obligation_against_bounds_from_trait: \ matching_bound={:?}", @@ -1190,13 +1258,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { trait_bound.clone(), ty::Binder(skol_trait_ref.clone())) { Ok(InferOk { obligations, .. }) => { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); + self.inferred_obligations.extend(obligations); } Err(_) => { return false; } } - self.infcx.leak_check(skol_map, snapshot).is_ok() + self.infcx.leak_check(false, skol_map, snapshot).is_ok() } /// Given an obligation like ``, search the obligations that the caller @@ -1233,10 +1300,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { where_clause_trait_ref: ty::PolyTraitRef<'tcx>) -> EvaluationResult { - self.infcx().probe(move |_| { - match self.match_where_clause_trait_ref(stack.obligation, where_clause_trait_ref) { + self.probe(move |this, _| { + match this.match_where_clause_trait_ref(stack.obligation, where_clause_trait_ref) { Ok(obligations) => { - self.evaluate_predicates_recursively(stack.list(), obligations.iter()) + this.evaluate_predicates_recursively(stack.list(), obligations.iter()) } Err(()) => EvaluatedToErr } @@ -1264,7 +1331,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // type/region parameters let self_ty = *obligation.self_ty().skip_binder(); let (closure_def_id, substs) = match self_ty.sty { - ty::TyClosure(id, ref substs) => (id, substs), + ty::TyClosure(id, substs) => (id, substs), ty::TyInfer(ty::TyVar(_)) => { debug!("assemble_unboxed_closure_candidates: ambiguous self-type"); candidates.ambiguous = true; @@ -1282,12 +1349,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { Some(closure_kind) => { debug!("assemble_unboxed_candidates: closure_kind = {:?}", closure_kind); if closure_kind.extends(kind) { - candidates.vec.push(ClosureCandidate(closure_def_id, substs)); + candidates.vec.push(ClosureCandidate(closure_def_id, substs, kind)); } } None => { debug!("assemble_unboxed_candidates: closure_kind not yet known"); - candidates.ambiguous = true; + candidates.vec.push(ClosureCandidate(closure_def_id, substs, kind)); } } @@ -1355,8 +1422,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { self.tcx(), obligation.predicate.0.trait_ref.self_ty(), |impl_def_id| { - self.infcx.probe(|snapshot| { - if let Ok(_) = self.match_impl(impl_def_id, obligation, snapshot) { + self.probe(|this, snapshot| { + if let Ok(_) = this.match_impl(impl_def_id, obligation, snapshot) { candidates.vec.push(ImplCandidate(impl_def_id)); } }); @@ -1438,36 +1505,36 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // these cases wind up being considered ambiguous due to a // (spurious) ambiguity introduced here. let predicate_trait_ref = obligation.predicate.to_poly_trait_ref(); - if !object_safety::is_object_safe(self.tcx(), predicate_trait_ref.def_id()) { + if !self.tcx().is_object_safe(predicate_trait_ref.def_id()) { return; } - self.infcx.commit_if_ok(|snapshot| { + self.probe(|this, snapshot| { let (self_ty, _) = - self.infcx().skolemize_late_bound_regions(&obligation.self_ty(), snapshot); + this.infcx().skolemize_late_bound_regions(&obligation.self_ty(), snapshot); let poly_trait_ref = match self_ty.sty { ty::TyTrait(ref data) => { - match self.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) { + match this.tcx().lang_items.to_builtin_kind(obligation.predicate.def_id()) { Some(bound @ ty::BoundSend) | Some(bound @ ty::BoundSync) => { if data.bounds.builtin_bounds.contains(&bound) { debug!("assemble_candidates_from_object_ty: matched builtin bound, \ pushing candidate"); candidates.vec.push(BuiltinObjectCandidate); - return Ok(()); + return; } } _ => {} } - data.principal_trait_ref_with_self_ty(self.tcx(), self_ty) + data.principal_trait_ref_with_self_ty(this.tcx(), self_ty) } ty::TyInfer(ty::TyVar(_)) => { debug!("assemble_candidates_from_object_ty: ambiguous"); candidates.ambiguous = true; // could wind up being an object type - return Ok(()); + return; } _ => { - return Ok(()); + return; } }; @@ -1480,11 +1547,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // For example, we may be trying to upcast `Foo` to `Bar`, // but `Foo` is declared as `trait Foo : Bar`. let upcast_trait_refs = - util::supertraits(self.tcx(), poly_trait_ref) + util::supertraits(this.tcx(), poly_trait_ref) .filter(|upcast_trait_ref| { - self.infcx.probe(|_| { + this.probe(|this, _| { let upcast_trait_ref = upcast_trait_ref.clone(); - self.match_poly_trait_ref(obligation, upcast_trait_ref).is_ok() + this.match_poly_trait_ref(obligation, upcast_trait_ref).is_ok() }) }) .count(); @@ -1495,9 +1562,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } else if upcast_trait_refs == 1 { candidates.vec.push(ObjectCandidate); } - - Ok::<(),()>(()) - }).unwrap(); + }) } /// Search for unsizing that might apply to `obligation`. @@ -1618,7 +1683,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { BuiltinObjectCandidate | BuiltinUnsizeCandidate | DefaultImplObjectCandidate(..) | - BuiltinCandidate(..) => { + BuiltinCandidate { .. } => { // We have a where-clause so don't go around looking // for impls. true @@ -1637,7 +1702,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // i.e. EvaluatedToOk: if other.evaluation == EvaluatedToOk { if let ImplCandidate(victim_def) = victim.candidate { - return traits::specializes(self.tcx(), other_def, victim_def); + let tcx = self.tcx().global_tcx(); + return traits::specializes(tcx, other_def, victim_def); } } @@ -1656,229 +1722,131 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // those will hopefully change to library-defined traits in the // future. + // HACK: if this returns an error, selection exits without considering + // other impls. fn assemble_builtin_bound_candidates<'o>(&mut self, - bound: ty::BuiltinBound, - obligation: &TraitObligation<'tcx>, + conditions: BuiltinImplConditions<'tcx>, candidates: &mut SelectionCandidateSet<'tcx>) -> Result<(),SelectionError<'tcx>> { - match self.builtin_bound(bound, obligation) { - Ok(If(..)) => { - debug!("builtin_bound: bound={:?}", - bound); - candidates.vec.push(BuiltinCandidate(bound)); + match conditions { + BuiltinImplConditions::Where(nested) => { + debug!("builtin_bound: nested={:?}", nested); + candidates.vec.push(BuiltinCandidate { + has_nested: nested.skip_binder().len() > 0 + }); Ok(()) } - Ok(ParameterBuiltin) => { Ok(()) } - Ok(AmbiguousBuiltin) => { + BuiltinImplConditions::None => { Ok(()) } + BuiltinImplConditions::Ambiguous => { debug!("assemble_builtin_bound_candidates: ambiguous builtin"); Ok(candidates.ambiguous = true) } - Err(e) => { Err(e) } + BuiltinImplConditions::Never => { Err(Unimplemented) } } } - fn builtin_bound(&mut self, - bound: ty::BuiltinBound, - obligation: &TraitObligation<'tcx>) - -> Result,SelectionError<'tcx>> + fn sized_conditions(&mut self, obligation: &TraitObligation<'tcx>) + -> BuiltinImplConditions<'tcx> { - // Note: these tests operate on types that may contain bound - // regions. To be proper, we ought to skolemize here, but we - // forego the skolemization and defer it until the - // confirmation step. + use self::BuiltinImplConditions::{Ambiguous, None, Never, Where}; - let self_ty = self.infcx.shallow_resolve(obligation.predicate.0.self_ty()); - return match self_ty.sty { - ty::TyInfer(ty::IntVar(_)) | - ty::TyInfer(ty::FloatVar(_)) | - ty::TyUint(_) | - ty::TyInt(_) | - ty::TyBool | - ty::TyFloat(_) | - ty::TyFnDef(..) | - ty::TyFnPtr(_) | - ty::TyChar => { + // NOTE: binder moved to (*) + let self_ty = self.infcx.shallow_resolve( + obligation.predicate.skip_binder().self_ty()); + + match self_ty.sty { + ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) | + ty::TyUint(_) | ty::TyInt(_) | ty::TyBool | ty::TyFloat(_) | + ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyRawPtr(..) | + ty::TyChar | ty::TyBox(_) | ty::TyRef(..) | + ty::TyArray(..) | ty::TyClosure(..) | + ty::TyError => { // safe for everything - ok_if(Vec::new()) + Where(ty::Binder(Vec::new())) } - ty::TyBox(_) => { // Box - match bound { - ty::BoundCopy => Err(Unimplemented), + ty::TyStr | ty::TySlice(_) | ty::TyTrait(..) => Never, - ty::BoundSized => ok_if(Vec::new()), - - ty::BoundSync | ty::BoundSend => { - bug!("Send/Sync shouldn't occur in builtin_bounds()"); - } - } + ty::TyTuple(tys) => { + // FIXME(#33242) we only need to constrain the last field + Where(ty::Binder(tys.to_vec())) } - ty::TyRawPtr(..) => { // *const T, *mut T - match bound { - ty::BoundCopy | ty::BoundSized => ok_if(Vec::new()), - - ty::BoundSync | ty::BoundSend => { - bug!("Send/Sync shouldn't occur in builtin_bounds()"); - } - } + ty::TyStruct(def, substs) | ty::TyEnum(def, substs) => { + let sized_crit = def.sized_constraint(self.tcx()); + // (*) binder moved here + Where(ty::Binder(match sized_crit.sty { + ty::TyTuple(tys) => tys.to_vec().subst(self.tcx(), substs), + ty::TyBool => vec![], + _ => vec![sized_crit.subst(self.tcx(), substs)] + })) } - ty::TyTrait(ref data) => { - match bound { - ty::BoundSized => Err(Unimplemented), - ty::BoundCopy => { - if data.bounds.builtin_bounds.contains(&bound) { - ok_if(Vec::new()) - } else { - // Recursively check all supertraits to find out if any further - // bounds are required and thus we must fulfill. - let principal = - data.principal_trait_ref_with_self_ty(self.tcx(), - self.tcx().types.err); - let copy_def_id = obligation.predicate.def_id(); - for tr in util::supertraits(self.tcx(), principal) { - if tr.def_id() == copy_def_id { - return ok_if(Vec::new()) - } - } + ty::TyProjection(_) | ty::TyParam(_) => None, + ty::TyInfer(ty::TyVar(_)) => Ambiguous, - Err(Unimplemented) - } - } - ty::BoundSync | ty::BoundSend => { - bug!("Send/Sync shouldn't occur in builtin_bounds()"); - } - } + ty::TyInfer(ty::FreshTy(_)) + | ty::TyInfer(ty::FreshIntTy(_)) + | ty::TyInfer(ty::FreshFloatTy(_)) => { + bug!("asked to assemble builtin bounds of unexpected type: {:?}", + self_ty); } + } + } - ty::TyRef(_, ty::TypeAndMut { ty: _, mutbl }) => { - // &mut T or &T - match bound { - ty::BoundCopy => { - match mutbl { - // &mut T is affine and hence never `Copy` - hir::MutMutable => Err(Unimplemented), - - // &T is always copyable - hir::MutImmutable => ok_if(Vec::new()), - } - } - - ty::BoundSized => ok_if(Vec::new()), + fn copy_conditions(&mut self, obligation: &TraitObligation<'tcx>) + -> BuiltinImplConditions<'tcx> + { + // NOTE: binder moved to (*) + let self_ty = self.infcx.shallow_resolve( + obligation.predicate.skip_binder().self_ty()); - ty::BoundSync | ty::BoundSend => { - bug!("Send/Sync shouldn't occur in builtin_bounds()"); - } - } - } + use self::BuiltinImplConditions::{Ambiguous, None, Never, Where}; - ty::TyArray(element_ty, _) => { - // [T; n] - match bound { - ty::BoundCopy => ok_if(vec![element_ty]), - ty::BoundSized => ok_if(Vec::new()), - ty::BoundSync | ty::BoundSend => { - bug!("Send/Sync shouldn't occur in builtin_bounds()"); - } - } + match self_ty.sty { + ty::TyInfer(ty::IntVar(_)) | ty::TyInfer(ty::FloatVar(_)) | + ty::TyUint(_) | ty::TyInt(_) | ty::TyBool | ty::TyFloat(_) | + ty::TyFnDef(..) | ty::TyFnPtr(_) | ty::TyChar | + ty::TyRawPtr(..) | ty::TyError | + ty::TyRef(_, ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) => { + Where(ty::Binder(Vec::new())) } - ty::TyStr | ty::TySlice(_) => { - match bound { - ty::BoundSync | ty::BoundSend => { - bug!("Send/Sync shouldn't occur in builtin_bounds()"); - } - - ty::BoundCopy | ty::BoundSized => Err(Unimplemented), - } + ty::TyBox(_) | ty::TyTrait(..) | ty::TyStr | ty::TySlice(..) | + ty::TyClosure(..) | + ty::TyRef(_, ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => { + Never } - // (T1, ..., Tn) -- meets any bound that all of T1...Tn meet - ty::TyTuple(ref tys) => ok_if(tys.clone()), - - ty::TyClosure(_, ref substs) => { - // FIXME -- This case is tricky. In the case of by-ref - // closures particularly, we need the results of - // inference to decide how to reflect the type of each - // upvar (the upvar may have type `T`, but the runtime - // type could be `&mut`, `&`, or just `T`). For now, - // though, we'll do this unsoundly and assume that all - // captures are by value. Really what we ought to do - // is reserve judgement and then intertwine this - // analysis with closure inference. - - // Unboxed closures shouldn't be - // implicitly copyable - if bound == ty::BoundCopy { - return Ok(ParameterBuiltin); - } - - // Upvars are always local variables or references to - // local variables, and local variables cannot be - // unsized, so the closure struct as a whole must be - // Sized. - if bound == ty::BoundSized { - return ok_if(Vec::new()); - } - - ok_if(substs.upvar_tys.clone()) + ty::TyArray(element_ty, _) => { + // (*) binder moved here + Where(ty::Binder(vec![element_ty])) } - ty::TyStruct(def, substs) | ty::TyEnum(def, substs) => { - let types: Vec = def.all_fields().map(|f| { - f.ty(self.tcx(), substs) - }).collect(); - nominal(bound, types) + ty::TyTuple(tys) => { + // (*) binder moved here + Where(ty::Binder(tys.to_vec())) } - ty::TyProjection(_) | ty::TyParam(_) => { - // Note: A type parameter is only considered to meet a - // particular bound if there is a where clause telling - // us that it does, and that case is handled by - // `assemble_candidates_from_caller_bounds()`. - Ok(ParameterBuiltin) + ty::TyStruct(..) | ty::TyEnum(..) | ty::TyProjection(..) | ty::TyParam(..) => { + // Fallback to whatever user-defined impls exist in this case. + None } ty::TyInfer(ty::TyVar(_)) => { // Unbound type variable. Might or might not have // applicable impls and so forth, depending on what // those type variables wind up being bound to. - debug!("assemble_builtin_bound_candidates: ambiguous builtin"); - Ok(AmbiguousBuiltin) + Ambiguous } - ty::TyError => ok_if(Vec::new()), - ty::TyInfer(ty::FreshTy(_)) | ty::TyInfer(ty::FreshIntTy(_)) | ty::TyInfer(ty::FreshFloatTy(_)) => { bug!("asked to assemble builtin bounds of unexpected type: {:?}", self_ty); } - }; - - fn ok_if<'tcx>(v: Vec>) - -> Result, SelectionError<'tcx>> { - Ok(If(ty::Binder(v))) - } - - fn nominal<'cx, 'tcx>(bound: ty::BuiltinBound, - types: Vec>) - -> Result, SelectionError<'tcx>> - { - // First check for markers and other nonsense. - match bound { - // Fallback to whatever user-defined impls exist in this case. - ty::BoundCopy => Ok(ParameterBuiltin), - - // Sized if all the component types are sized. - ty::BoundSized => ok_if(types), - - // Shouldn't be coming through here. - ty::BoundSend | ty::BoundSync => bug!(), - } } } @@ -1935,7 +1903,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { ty::TyTuple(ref tys) => { // (T1, ..., Tn) -- meets any bound that all of T1...Tn meet - tys.clone() + tys.to_vec() } ty::TyClosure(_, ref substs) => { @@ -1947,7 +1915,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // OIBIT interact? That is, there is no way to say // "make me invariant with respect to this TYPE, but // do not act as though I can reach it" - substs.upvar_tys.clone() + substs.upvar_tys.to_vec() } // for `PhantomData`, we pass `T` @@ -1964,20 +1932,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } fn collect_predicates_for_types(&mut self, - obligation: &TraitObligation<'tcx>, + cause: ObligationCause<'tcx>, + recursion_depth: usize, trait_def_id: DefId, types: ty::Binder>>) -> Vec> { - let derived_cause = match self.tcx().lang_items.to_builtin_kind(trait_def_id) { - Some(_) => { - self.derived_cause(obligation, BuiltinDerivedObligation) - }, - None => { - self.derived_cause(obligation, ImplDerivedObligation) - } - }; - // Because the types were potentially derived from // higher-ranked obligations they may reference late-bound // regions. For example, `for<'a> Foo<&'a int> : Copy` would @@ -1992,40 +1952,28 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // 2. Produce something like `&'0 int : Copy` // 3. Re-bind the regions back to `for<'a> &'a int : Copy` - // Move the binder into the individual types - let bound_types: Vec>> = - types.skip_binder() - .iter() - .map(|&nested_ty| ty::Binder(nested_ty)) - .collect(); + types.skip_binder().into_iter().flat_map(|ty| { // binder moved -\ + let ty: ty::Binder> = ty::Binder(ty); // <----------/ - // For each type, produce a vector of resulting obligations - let obligations: Result>, _> = bound_types.iter().map(|nested_ty| { - self.infcx.commit_if_ok(|snapshot| { + self.in_snapshot(|this, snapshot| { let (skol_ty, skol_map) = - self.infcx().skolemize_late_bound_regions(nested_ty, snapshot); + this.infcx().skolemize_late_bound_regions(&ty, snapshot); let Normalized { value: normalized_ty, mut obligations } = - project::normalize_with_depth(self, - obligation.cause.clone(), - obligation.recursion_depth + 1, + project::normalize_with_depth(this, + cause.clone(), + recursion_depth, &skol_ty); let skol_obligation = - util::predicate_for_trait_def(self.tcx(), - derived_cause.clone(), + this.tcx().predicate_for_trait_def( + cause.clone(), trait_def_id, - obligation.recursion_depth + 1, + recursion_depth, normalized_ty, vec![]); obligations.push(skol_obligation); - Ok(self.infcx().plug_leaks(skol_map, snapshot, &obligations)) + this.infcx().plug_leaks(skol_map, snapshot, &obligations) }) - }).collect(); - - // Flatten those vectors (couldn't do it above due `collect`) - match obligations { - Ok(obligations) => obligations.into_iter().flat_map(|o| o).collect(), - Err(ErrorReported) => Vec::new(), - } + }).collect() } /////////////////////////////////////////////////////////////////////////// @@ -2045,9 +1993,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { candidate); match candidate { - BuiltinCandidate(builtin_bound) => { + BuiltinCandidate { has_nested } => { Ok(VtableBuiltin( - self.confirm_builtin_candidate(obligation, builtin_bound)?)) + self.confirm_builtin_candidate(obligation, has_nested))) } ParamCandidate(param) => { @@ -2066,14 +2014,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } ImplCandidate(impl_def_id) => { - let vtable_impl = - self.confirm_impl_candidate(obligation, impl_def_id)?; - Ok(VtableImpl(vtable_impl)) + Ok(VtableImpl(self.confirm_impl_candidate(obligation, impl_def_id))) } - ClosureCandidate(closure_def_id, substs) => { + ClosureCandidate(closure_def_id, substs, kind) => { let vtable_closure = - self.confirm_closure_candidate(obligation, closure_def_id, substs)?; + self.confirm_closure_candidate(obligation, closure_def_id, substs, kind)?; Ok(VtableClosure(vtable_closure)) } @@ -2092,9 +2038,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { } FnPointerCandidate => { - let fn_type = + let data = self.confirm_fn_pointer_candidate(obligation)?; - Ok(VtableFnPointer(fn_type)) + Ok(VtableFnPointer(data)) } ProjectionCandidate => { @@ -2112,14 +2058,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn confirm_projection_candidate(&mut self, obligation: &TraitObligation<'tcx>) { - let _: Result<(),()> = - self.infcx.commit_if_ok(|snapshot| { - let result = - self.match_projection_obligation_against_bounds_from_trait(obligation, - snapshot); - assert!(result); - Ok(()) - }); + self.in_snapshot(|this, snapshot| { + let result = + this.match_projection_obligation_against_bounds_from_trait(obligation, + snapshot); + assert!(result); + }) } fn confirm_param_candidate(&mut self, @@ -2147,45 +2091,40 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn confirm_builtin_candidate(&mut self, obligation: &TraitObligation<'tcx>, - bound: ty::BuiltinBound) - -> Result>, - SelectionError<'tcx>> + has_nested: bool) + -> VtableBuiltinData> { - debug!("confirm_builtin_candidate({:?})", - obligation); - - match self.builtin_bound(bound, obligation)? { - If(nested) => Ok(self.vtable_builtin_data(obligation, bound, nested)), - AmbiguousBuiltin | ParameterBuiltin => { - span_bug!( - obligation.cause.span, - "builtin bound for {:?} was ambig", - obligation); - } - } - } - - fn vtable_builtin_data(&mut self, - obligation: &TraitObligation<'tcx>, - bound: ty::BuiltinBound, - nested: ty::Binder>>) - -> VtableBuiltinData> - { - debug!("vtable_builtin_data(obligation={:?}, bound={:?}, nested={:?})", - obligation, bound, nested); + debug!("confirm_builtin_candidate({:?}, {:?})", + obligation, has_nested); + + let obligations = if has_nested { + let trait_def = obligation.predicate.def_id(); + let conditions = match trait_def { + _ if Some(trait_def) == self.tcx().lang_items.sized_trait() => { + self.sized_conditions(obligation) + } + _ if Some(trait_def) == self.tcx().lang_items.copy_trait() => { + self.copy_conditions(obligation) + } + _ => bug!("unexpected builtin trait {:?}", trait_def) + }; + let nested = match conditions { + BuiltinImplConditions::Where(nested) => nested, + _ => bug!("obligation {:?} had matched a builtin impl but now doesn't", + obligation) + }; - let trait_def = match self.tcx().lang_items.from_builtin_kind(bound) { - Ok(def_id) => def_id, - Err(_) => { - bug!("builtin trait definition not found"); - } + let cause = self.derived_cause(obligation, BuiltinDerivedObligation); + self.collect_predicates_for_types(cause, + obligation.recursion_depth+1, + trait_def, + nested) + } else { + vec![] }; - let obligations = self.collect_predicates_for_types(obligation, trait_def, nested); - - debug!("vtable_builtin_data: obligations={:?}", + debug!("confirm_builtin_candidate: obligations={:?}", obligations); - VtableBuiltinData { nested: obligations } } @@ -2253,28 +2192,31 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { nested: ty::Binder>>) -> VtableDefaultImplData> { - debug!("vtable_default_impl_data: nested={:?}", nested); + debug!("vtable_default_impl: nested={:?}", nested); - let mut obligations = self.collect_predicates_for_types(obligation, - trait_def_id, - nested); + let cause = self.derived_cause(obligation, BuiltinDerivedObligation); + let mut obligations = self.collect_predicates_for_types( + cause, + obligation.recursion_depth+1, + trait_def_id, + nested); - let trait_obligations: Result,()> = self.infcx.commit_if_ok(|snapshot| { + let trait_obligations = self.in_snapshot(|this, snapshot| { let poly_trait_ref = obligation.predicate.to_poly_trait_ref(); let (trait_ref, skol_map) = - self.infcx().skolemize_late_bound_regions(&poly_trait_ref, snapshot); - Ok(self.impl_or_trait_obligations(obligation.cause.clone(), - obligation.recursion_depth + 1, - trait_def_id, - &trait_ref.substs, - skol_map, - snapshot)) + this.infcx().skolemize_late_bound_regions(&poly_trait_ref, snapshot); + let cause = this.derived_cause(obligation, ImplDerivedObligation); + this.impl_or_trait_obligations(cause, + obligation.recursion_depth + 1, + trait_def_id, + &trait_ref.substs, + skol_map, + snapshot) }); - // no Errors in that code above - obligations.append(&mut trait_obligations.unwrap()); + obligations.extend(trait_obligations); - debug!("vtable_default_impl_data: obligations={:?}", obligations); + debug!("vtable_default_impl: obligations={:?}", obligations); VtableDefaultImplData { trait_def_id: trait_def_id, @@ -2285,8 +2227,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn confirm_impl_candidate(&mut self, obligation: &TraitObligation<'tcx>, impl_def_id: DefId) - -> Result>, - SelectionError<'tcx>> + -> VtableImplData<'tcx, PredicateObligation<'tcx>> { debug!("confirm_impl_candidate({:?},{:?})", obligation, @@ -2294,19 +2235,21 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // First, create the substitutions by matching the impl again, // this time not in a probe. - self.infcx.commit_if_ok(|snapshot| { + self.in_snapshot(|this, snapshot| { let (substs, skol_map) = - self.rematch_impl(impl_def_id, obligation, + this.rematch_impl(impl_def_id, obligation, snapshot); debug!("confirm_impl_candidate substs={:?}", substs); - Ok(self.vtable_impl(impl_def_id, substs, obligation.cause.clone(), - obligation.recursion_depth + 1, skol_map, snapshot)) + let cause = this.derived_cause(obligation, ImplDerivedObligation); + this.vtable_impl(impl_def_id, substs, cause, + obligation.recursion_depth + 1, + skol_map, snapshot) }) } fn vtable_impl(&mut self, impl_def_id: DefId, - mut substs: Normalized<'tcx, Substs<'tcx>>, + mut substs: Normalized<'tcx, &'tcx Substs<'tcx>>, cause: ObligationCause<'tcx>, recursion_depth: usize, skol_map: infer::SkolemizationMap, @@ -2339,13 +2282,13 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { impl_obligations.append(&mut substs.obligations); VtableImplData { impl_def_id: impl_def_id, - substs: self.tcx().mk_substs(substs.value), + substs: substs.value, nested: impl_obligations } } fn confirm_object_candidate(&mut self, obligation: &TraitObligation<'tcx>) - -> VtableObjectData<'tcx> + -> VtableObjectData<'tcx, PredicateObligation<'tcx>> { debug!("confirm_object_candidate({:?})", obligation); @@ -2369,6 +2312,8 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let vtable_base; { + let tcx = self.tcx(); + // We want to find the first supertrait in the list of // supertraits that we can unify with, and do that // unification. We know that there is exactly one in the list @@ -2376,11 +2321,11 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // reported an ambiguity. (When we do find a match, also // record it for later.) let nonmatching = - util::supertraits(self.tcx(), poly_trait_ref) + util::supertraits(tcx, poly_trait_ref) .take_while(|&t| { match - self.infcx.commit_if_ok( - |_| self.match_poly_trait_ref(obligation, t)) + self.commit_if_ok( + |this, _| this.match_poly_trait_ref(obligation, t)) { Ok(_) => { upcast_trait_ref = Some(t); false } Err(_) => { true } @@ -2392,7 +2337,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // entries, so that we can compute the offset for the selected // trait. vtable_base = - nonmatching.map(|t| util::count_own_vtable_entries(self.tcx(), t)) + nonmatching.map(|t| tcx.count_own_vtable_entries(t)) .sum(); } @@ -2400,12 +2345,12 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { VtableObjectData { upcast_trait_ref: upcast_trait_ref.unwrap(), vtable_base: vtable_base, + nested: vec![] } } - fn confirm_fn_pointer_candidate(&mut self, - obligation: &TraitObligation<'tcx>) - -> Result,SelectionError<'tcx>> + fn confirm_fn_pointer_candidate(&mut self, obligation: &TraitObligation<'tcx>) + -> Result>, SelectionError<'tcx>> { debug!("confirm_fn_pointer_candidate({:?})", obligation); @@ -2414,23 +2359,23 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let self_ty = self.infcx.shallow_resolve(*obligation.self_ty().skip_binder()); let sig = self_ty.fn_sig(); let trait_ref = - util::closure_trait_ref_and_return_type(self.tcx(), - obligation.predicate.def_id(), - self_ty, - sig, - util::TupleArgumentsFlag::Yes) + self.tcx().closure_trait_ref_and_return_type(obligation.predicate.def_id(), + self_ty, + sig, + util::TupleArgumentsFlag::Yes) .map_bound(|(trait_ref, _)| trait_ref); self.confirm_poly_trait_refs(obligation.cause.clone(), obligation.predicate.to_poly_trait_ref(), trait_ref)?; - Ok(self_ty) + Ok(VtableFnPointerData { fn_ty: self_ty, nested: vec![] }) } fn confirm_closure_candidate(&mut self, obligation: &TraitObligation<'tcx>, closure_def_id: DefId, - substs: &ty::ClosureSubsts<'tcx>) + substs: ty::ClosureSubsts<'tcx>, + kind: ty::ClosureKind) -> Result>, SelectionError<'tcx>> { @@ -2441,7 +2386,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let Normalized { value: trait_ref, - obligations + mut obligations } = self.closure_trait_ref(obligation, closure_def_id, substs); debug!("confirm_closure_candidate(closure_def_id={:?}, trait_ref={:?}, obligations={:?})", @@ -2453,6 +2398,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { obligation.predicate.to_poly_trait_ref(), trait_ref)?; + obligations.push(Obligation::new( + obligation.cause.clone(), + ty::Predicate::ClosureKind(closure_def_id, kind))); + Ok(VtableClosureData { closure_def_id: closure_def_id, substs: substs.clone(), @@ -2498,8 +2447,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { origin, expected_trait_ref.clone(), obligation_trait_ref.clone()) - // FIXME(#32730) propagate obligations - .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())) + .map(|InferOk { obligations, .. }| self.inferred_obligations.extend(obligations)) .map_err(|e| OutputTypeParameterMismatch(expected_trait_ref, obligation_trait_ref, e)) } @@ -2534,8 +2482,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let InferOk { obligations, .. } = self.infcx.sub_types(false, origin, new_trait, target) .map_err(|_| Unimplemented)?; - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); + self.inferred_obligations.extend(obligations); // Register one obligation for 'a: 'b. let cause = ObligationCause::new(obligation.cause.span, @@ -2550,9 +2497,16 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // T -> Trait. (_, &ty::TyTrait(ref data)) => { - let object_did = data.principal_def_id(); - if !object_safety::is_object_safe(tcx, object_did) { - return Err(TraitNotObjectSafe(object_did)); + let mut object_dids = Some(data.principal_def_id()).into_iter(); + // FIXME(#33243) +// data.bounds.builtin_bounds.iter().flat_map(|bound| { +// tcx.lang_items.from_builtin_kind(bound).ok() +// }) +// .chain(Some(data.principal_def_id())); + if let Some(did) = object_dids.find(|did| { + !tcx.is_object_safe(*did) + }) { + return Err(TraitNotObjectSafe(did)) } let cause = ObligationCause::new(obligation.cause.span, @@ -2576,7 +2530,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { // object type is Foo+Send, this would create an obligation // for the Send check.) for bound in &builtin_bounds { - if let Ok(tr) = util::trait_ref_for_builtin_bound(tcx, bound, source) { + if let Ok(tr) = tcx.trait_ref_for_builtin_bound(bound, source) { push(tr.to_predicate()); } else { return Err(Unimplemented); @@ -2601,8 +2555,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let InferOk { obligations, .. } = self.infcx.sub_types(false, origin, a, b) .map_err(|_| Unimplemented)?; - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); + self.inferred_obligations.extend(obligations); } // Struct -> Struct. @@ -2661,11 +2614,10 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { let InferOk { obligations, .. } = self.infcx.sub_types(false, origin, new_struct, target) .map_err(|_| Unimplemented)?; - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); + self.inferred_obligations.extend(obligations); // Construct the nested Field: Unsize> predicate. - nested.push(util::predicate_for_trait_def(tcx, + nested.push(tcx.predicate_for_trait_def( obligation.cause.clone(), obligation.predicate.def_id(), obligation.recursion_depth + 1, @@ -2693,7 +2645,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { impl_def_id: DefId, obligation: &TraitObligation<'tcx>, snapshot: &infer::CombinedSnapshot) - -> (Normalized<'tcx, Substs<'tcx>>, infer::SkolemizationMap) + -> (Normalized<'tcx, &'tcx Substs<'tcx>>, infer::SkolemizationMap) { match self.match_impl(impl_def_id, obligation, snapshot) { Ok((substs, skol_map)) => (substs, skol_map), @@ -2709,7 +2661,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { impl_def_id: DefId, obligation: &TraitObligation<'tcx>, snapshot: &infer::CombinedSnapshot) - -> Result<(Normalized<'tcx, Substs<'tcx>>, + -> Result<(Normalized<'tcx, &'tcx Substs<'tcx>>, infer::SkolemizationMap), ()> { let impl_trait_ref = self.tcx().impl_trait_ref(impl_def_id).unwrap(); @@ -2756,10 +2708,9 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { debug!("match_impl: failed eq_trait_refs due to `{}`", e); () })?; - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); + self.inferred_obligations.extend(obligations); - if let Err(e) = self.infcx.leak_check(&skol_map, snapshot) { + if let Err(e) = self.infcx.leak_check(false, &skol_map, snapshot) { debug!("match_impl: failed leak check due to `{}`", e); return Err(()); } @@ -2810,7 +2761,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { /// Returns `Ok` if `poly_trait_ref` being true implies that the /// obligation is satisfied. - fn match_poly_trait_ref(&self, + fn match_poly_trait_ref(&mut self, obligation: &TraitObligation<'tcx>, poly_trait_ref: ty::PolyTraitRef<'tcx>) -> Result<(),()> @@ -2824,8 +2775,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { origin, poly_trait_ref, obligation.predicate.to_poly_trait_ref()) - // FIXME(#32730) propagate obligations - .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())) + .map(|InferOk { obligations, .. }| self.inferred_obligations.extend(obligations)) .map_err(|_| ()) } @@ -2859,16 +2809,15 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn closure_trait_ref_unnormalized(&mut self, obligation: &TraitObligation<'tcx>, closure_def_id: DefId, - substs: &ty::ClosureSubsts<'tcx>) + substs: ty::ClosureSubsts<'tcx>) -> ty::PolyTraitRef<'tcx> { let closure_type = self.infcx.closure_type(closure_def_id, substs); let ty::Binder((trait_ref, _)) = - util::closure_trait_ref_and_return_type(self.tcx(), - obligation.predicate.def_id(), - obligation.predicate.0.self_ty(), // (1) - &closure_type.sig, - util::TupleArgumentsFlag::No); + self.tcx().closure_trait_ref_and_return_type(obligation.predicate.def_id(), + obligation.predicate.0.self_ty(), // (1) + &closure_type.sig, + util::TupleArgumentsFlag::No); // (1) Feels icky to skip the binder here, but OTOH we know // that the self-type is an unboxed closure type and hence is // in fact unparameterized (or at least does not reference any @@ -2881,7 +2830,7 @@ impl<'cx, 'tcx> SelectionContext<'cx, 'tcx> { fn closure_trait_ref(&mut self, obligation: &TraitObligation<'tcx>, closure_def_id: DefId, - substs: &ty::ClosureSubsts<'tcx>) + substs: ty::ClosureSubsts<'tcx>) -> Normalized<'tcx, ty::PolyTraitRef<'tcx>> { let trait_ref = self.closure_trait_ref_unnormalized( diff --git a/src/librustc/traits/specialize/mod.rs b/src/librustc/traits/specialize/mod.rs index 63647515a9..b2d14dab9a 100644 --- a/src/librustc/traits/specialize/mod.rs +++ b/src/librustc/traits/specialize/mod.rs @@ -20,9 +20,9 @@ use super::{SelectionContext, FulfillmentContext}; use super::util::{fresh_type_vars_for_impl, impl_trait_ref_and_oblig}; -use middle::cstore::CrateStore; +use rustc_data_structures::fnv::FnvHashMap; use hir::def_id::DefId; -use infer::{self, InferCtxt, TypeOrigin}; +use infer::{InferCtxt, TypeOrigin}; use middle::region; use ty::subst::{Subst, Substs}; use traits::{self, ProjectionMode, ObligationCause, Normalized}; @@ -32,10 +32,10 @@ use syntax::codemap::DUMMY_SP; pub mod specialization_graph; /// Information pertinent to an overlapping impl error. -pub struct Overlap<'a, 'tcx: 'a> { - pub in_context: InferCtxt<'a, 'tcx>, +pub struct OverlapError { pub with_impl: DefId, - pub on_trait_ref: ty::TraitRef<'tcx>, + pub trait_desc: String, + pub self_desc: Option } /// Given a subst for the requested impl, translate it to a subst @@ -73,11 +73,11 @@ pub struct Overlap<'a, 'tcx: 'a> { /// through associated type projection. We deal with such cases by using /// *fulfillment* to relate the two impls, requiring that all projections are /// resolved. -pub fn translate_substs<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - source_impl: DefId, - source_substs: &'tcx Substs<'tcx>, - target_node: specialization_graph::Node) - -> &'tcx Substs<'tcx> { +pub fn translate_substs<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + source_impl: DefId, + source_substs: &'tcx Substs<'tcx>, + target_node: specialization_graph::Node) + -> &'tcx Substs<'tcx> { let source_trait_ref = infcx.tcx .impl_trait_ref(source_impl) .unwrap() @@ -97,7 +97,7 @@ pub fn translate_substs<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, specializaiton failed to hold") }) } - specialization_graph::Node::Trait(..) => source_trait_ref.substs.clone(), + specialization_graph::Node::Trait(..) => source_trait_ref.substs, }; // directly inherent the method generics, since those do not vary across impls @@ -109,7 +109,13 @@ pub fn translate_substs<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, /// Specialization is determined by the sets of types to which the impls apply; /// impl1 specializes impl2 if it applies to a subset of the types impl2 applies /// to. -pub fn specializes(tcx: &TyCtxt, impl1_def_id: DefId, impl2_def_id: DefId) -> bool { +pub fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + impl1_def_id: DefId, + impl2_def_id: DefId) -> bool { + if let Some(r) = tcx.specializes_cache.borrow().check(impl1_def_id, impl2_def_id) { + return r; + } + // The feature gate should prevent introducing new specializations, but not // taking advantage of upstream ones. if !tcx.sess.features.borrow().specialization && @@ -134,8 +140,6 @@ pub fn specializes(tcx: &TyCtxt, impl1_def_id: DefId, impl2_def_id: DefId) -> bo return false; } - let mut infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Topmost); - // create a parameter environment corresponding to a (skolemized) instantiation of impl1 let scheme = tcx.lookup_item_type(impl1_def_id); let predicates = tcx.lookup_predicates(impl1_def_id); @@ -147,18 +151,31 @@ pub fn specializes(tcx: &TyCtxt, impl1_def_id: DefId, impl2_def_id: DefId) -> bo .unwrap() .subst(tcx, &penv.free_substs); - // Normalize the trait reference, adding any obligations that arise into the impl1 assumptions - let Normalized { value: impl1_trait_ref, obligations: normalization_obligations } = { - let selcx = &mut SelectionContext::new(&infcx); - traits::normalize(selcx, ObligationCause::dummy(), &impl1_trait_ref) - }; - penv.caller_bounds.extend(normalization_obligations.into_iter().map(|o| o.predicate)); + let result = tcx.normalizing_infer_ctxt(ProjectionMode::Topmost).enter(|mut infcx| { + // Normalize the trait reference, adding any obligations + // that arise into the impl1 assumptions. + let Normalized { value: impl1_trait_ref, obligations: normalization_obligations } = { + let selcx = &mut SelectionContext::new(&infcx); + traits::normalize(selcx, ObligationCause::dummy(), &impl1_trait_ref) + }; + penv.caller_bounds.extend(normalization_obligations.into_iter().map(|o| { + match tcx.lift_to_global(&o.predicate) { + Some(predicate) => predicate, + None => { + bug!("specializes: obligation `{:?}` has inference types/regions", o); + } + } + })); + + // Install the parameter environment, taking the predicates of impl1 as assumptions: + infcx.parameter_environment = penv; - // Install the parameter environment, taking the predicates of impl1 as assumptions: - infcx.parameter_environment = penv; + // Attempt to prove that impl2 applies, given all of the above. + fulfill_implication(&infcx, impl1_trait_ref, impl2_def_id).is_ok() + }); - // Attempt to prove that impl2 applies, given all of the above. - fulfill_implication(&infcx, impl1_trait_ref, impl2_def_id).is_ok() + tcx.specializes_cache.borrow_mut().insert(impl1_def_id, impl2_def_id, result); + result } /// Attempt to fulfill all obligations of `target_impl` after unification with @@ -166,10 +183,10 @@ pub fn specializes(tcx: &TyCtxt, impl1_def_id: DefId, impl2_def_id: DefId) -> bo /// generics of `target_impl`, including both those needed to unify with /// `source_trait_ref` and those whose identity is determined via a where /// clause in the impl. -fn fulfill_implication<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - source_trait_ref: ty::TraitRef<'tcx>, - target_impl: DefId) - -> Result, ()> { +fn fulfill_implication<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + source_trait_ref: ty::TraitRef<'tcx>, + target_impl: DefId) + -> Result<&'tcx Substs<'tcx>, ()> { infcx.commit_if_ok(|_| { let selcx = &mut SelectionContext::new(&infcx); let target_substs = fresh_type_vars_for_impl(&infcx, DUMMY_SP, target_impl); @@ -178,11 +195,10 @@ fn fulfill_implication<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, &target_substs); // do the impls unify? If not, no specialization. - if let Err(_) = infer::mk_eq_trait_refs(&infcx, - true, - TypeOrigin::Misc(DUMMY_SP), - source_trait_ref, - target_trait_ref) { + if let Err(_) = infcx.eq_trait_refs(true, + TypeOrigin::Misc(DUMMY_SP), + source_trait_ref, + target_trait_ref) { debug!("fulfill_implication: {:?} does not unify with {:?}", source_trait_ref, target_trait_ref); @@ -197,7 +213,7 @@ fn fulfill_implication<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, fulfill_cx.register_predicate_obligation(&infcx, oblig); } - if let Err(errors) = infer::drain_fulfillment_cx(&infcx, &mut fulfill_cx, &()) { + if let Err(errors) = infcx.drain_fulfillment_cx(&mut fulfill_cx, &()) { // no dice! debug!("fulfill_implication: for impls on {:?} and {:?}, could not fulfill: {:?} given \ {:?}", @@ -217,3 +233,23 @@ fn fulfill_implication<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, } }) } + +pub struct SpecializesCache { + map: FnvHashMap<(DefId, DefId), bool> +} + +impl SpecializesCache { + pub fn new() -> Self { + SpecializesCache { + map: FnvHashMap() + } + } + + pub fn check(&self, a: DefId, b: DefId) -> Option { + self.map.get(&(a, b)).cloned() + } + + pub fn insert(&mut self, a: DefId, b: DefId, result: bool) { + self.map.insert((a, b), result); + } +} diff --git a/src/librustc/traits/specialize/specialization_graph.rs b/src/librustc/traits/specialize/specialization_graph.rs index 3c65e368db..ae7deb48f8 100644 --- a/src/librustc/traits/specialize/specialization_graph.rs +++ b/src/librustc/traits/specialize/specialization_graph.rs @@ -11,11 +11,9 @@ use std::cell; use std::rc::Rc; -use super::{Overlap, specializes}; +use super::{OverlapError, specializes}; -use middle::cstore::CrateStore; use hir::def_id::DefId; -use infer; use traits::{self, ProjectionMode}; use ty::{self, TyCtxt, ImplOrTraitItem, TraitDef, TypeFoldable}; use ty::fast_reject::{self, SimplifiedType}; @@ -67,7 +65,7 @@ struct Children { } /// The result of attempting to insert an impl into a group of children. -enum InsertResult<'a, 'tcx: 'a> { +enum Inserted { /// The impl was inserted as a new child in this group of children. BecameNewSibling, @@ -76,13 +74,9 @@ enum InsertResult<'a, 'tcx: 'a> { /// The impl is a specialization of an existing child. ShouldRecurseOn(DefId), - - /// The impl has an unresolvable overlap with an existing child (neither - /// specializes the other). - Overlapped(Overlap<'a, 'tcx>), } -impl Children { +impl<'a, 'gcx, 'tcx> Children { fn new() -> Children { Children { nonblanket_impls: FnvHashMap(), @@ -91,7 +85,9 @@ impl Children { } /// Insert an impl into this set of children without comparing to any existing impls - fn insert_blindly(&mut self, tcx: &TyCtxt, impl_def_id: DefId) { + fn insert_blindly(&mut self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId) { let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); if let Some(sty) = fast_reject::simplify_type(tcx, trait_ref.self_ty(), false) { self.nonblanket_impls.entry(sty).or_insert(vec![]).push(impl_def_id) @@ -102,11 +98,11 @@ impl Children { /// Attempt to insert an impl into this set of children, while comparing for /// specialiation relationships. - fn insert<'a, 'tcx>(&mut self, - tcx: &'a TyCtxt<'tcx>, - impl_def_id: DefId, - simplified_self: Option) - -> InsertResult<'a, 'tcx> + fn insert(&mut self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId, + simplified_self: Option) + -> Result { for slot in match simplified_self { Some(sty) => self.filtered_mut(sty), @@ -114,56 +110,78 @@ impl Children { } { let possible_sibling = *slot; - let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, ProjectionMode::Topmost); - let overlap = traits::overlapping_impls(&infcx, possible_sibling, impl_def_id); - - if let Some(impl_header) = overlap { - let le = specializes(tcx, impl_def_id, possible_sibling); - let ge = specializes(tcx, possible_sibling, impl_def_id); + let tcx = tcx.global_tcx(); + let (le, ge) = tcx.infer_ctxt(None, None, + ProjectionMode::Topmost).enter(|infcx| { + let overlap = traits::overlapping_impls(&infcx, + possible_sibling, + impl_def_id); + if let Some(impl_header) = overlap { + let le = specializes(tcx, impl_def_id, possible_sibling); + let ge = specializes(tcx, possible_sibling, impl_def_id); + + if le == ge { + // overlap, but no specialization; error out + let trait_ref = impl_header.trait_ref.unwrap(); + Err(OverlapError { + with_impl: possible_sibling, + trait_desc: trait_ref.to_string(), + self_desc: trait_ref.substs.self_ty().and_then(|ty| { + // only report the Self type if it has at least + // some outer concrete shell; otherwise, it's + // not adding much information. + if ty.has_concrete_skeleton() { + Some(ty.to_string()) + } else { + None + } + }) + }) + } else { + Ok((le, ge)) + } + } else { + Ok((false, false)) + } + })?; - if le && !ge { - debug!("descending as child of TraitRef {:?}", - tcx.impl_trait_ref(possible_sibling).unwrap()); + if le && !ge { + debug!("descending as child of TraitRef {:?}", + tcx.impl_trait_ref(possible_sibling).unwrap()); - // the impl specializes possible_sibling - return InsertResult::ShouldRecurseOn(possible_sibling); - } else if ge && !le { - debug!("placing as parent of TraitRef {:?}", - tcx.impl_trait_ref(possible_sibling).unwrap()); + // the impl specializes possible_sibling + return Ok(Inserted::ShouldRecurseOn(possible_sibling)); + } else if ge && !le { + debug!("placing as parent of TraitRef {:?}", + tcx.impl_trait_ref(possible_sibling).unwrap()); // possible_sibling specializes the impl *slot = impl_def_id; - return InsertResult::Replaced(possible_sibling); - } else { - // overlap, but no specialization; error out - return InsertResult::Overlapped(Overlap { - with_impl: possible_sibling, - on_trait_ref: impl_header.trait_ref.unwrap(), - in_context: infcx, - }); - } + return Ok(Inserted::Replaced(possible_sibling)); + } else { + // no overlap (error bailed already via ?) } } // no overlap with any potential siblings, so add as a new sibling debug!("placing as new sibling"); self.insert_blindly(tcx, impl_def_id); - InsertResult::BecameNewSibling + Ok(Inserted::BecameNewSibling) } - fn iter_mut<'a>(&'a mut self) -> Box + 'a> { + fn iter_mut(&'a mut self) -> Box + 'a> { let nonblanket = self.nonblanket_impls.iter_mut().flat_map(|(_, v)| v.iter_mut()); Box::new(self.blanket_impls.iter_mut().chain(nonblanket)) } - fn filtered_mut<'a>(&'a mut self, sty: SimplifiedType) - -> Box + 'a> { + fn filtered_mut(&'a mut self, sty: SimplifiedType) + -> Box + 'a> { let nonblanket = self.nonblanket_impls.entry(sty).or_insert(vec![]).iter_mut(); Box::new(self.blanket_impls.iter_mut().chain(nonblanket)) } } -impl Graph { +impl<'a, 'gcx, 'tcx> Graph { pub fn new() -> Graph { Graph { parent: Default::default(), @@ -174,10 +192,10 @@ impl Graph { /// Insert a local impl into the specialization graph. If an existing impl /// conflicts with it (has overlap, but neither specializes the other), /// information about the area of overlap is returned in the `Err`. - pub fn insert<'a, 'tcx>(&mut self, - tcx: &'a TyCtxt<'tcx>, - impl_def_id: DefId) - -> Result<(), Overlap<'a, 'tcx>> { + pub fn insert(&mut self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId) + -> Result<(), OverlapError> { assert!(impl_def_id.is_local()); let trait_ref = tcx.impl_trait_ref(impl_def_id).unwrap(); @@ -206,10 +224,10 @@ impl Graph { // Descend the specialization tree, where `parent` is the current parent node loop { - use self::InsertResult::*; + use self::Inserted::*; let insert_result = self.children.entry(parent).or_insert(Children::new()) - .insert(tcx, impl_def_id, simplified); + .insert(tcx, impl_def_id, simplified)?; match insert_result { BecameNewSibling => { @@ -225,9 +243,6 @@ impl Graph { ShouldRecurseOn(new_parent) => { parent = new_parent; } - Overlapped(error) => { - return Err(error); - } } } @@ -236,7 +251,10 @@ impl Graph { } /// Insert cached metadata mapping from a child impl back to its parent. - pub fn record_impl_from_cstore(&mut self, tcx: &TyCtxt, parent: DefId, child: DefId) { + pub fn record_impl_from_cstore(&mut self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + parent: DefId, + child: DefId) { if self.parent.insert(child, parent).is_some() { bug!("When recording an impl from the crate store, information about its parent \ was already present."); @@ -261,7 +279,7 @@ pub enum Node { Trait(DefId), } -impl Node { +impl<'a, 'gcx, 'tcx> Node { pub fn is_from_trait(&self) -> bool { match *self { Node::Trait(..) => true, @@ -270,11 +288,11 @@ impl Node { } /// Iterate over the items defined directly by the given (impl or trait) node. - pub fn items<'a, 'tcx>(&self, tcx: &'a TyCtxt<'tcx>) -> NodeItems<'a, 'tcx> { + pub fn items(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> NodeItems<'a, 'gcx> { match *self { Node::Impl(impl_def_id) => { NodeItems::Impl { - tcx: tcx, + tcx: tcx.global_tcx(), items: cell::Ref::map(tcx.impl_items.borrow(), |impl_items| &impl_items[&impl_def_id]), idx: 0, @@ -300,7 +318,7 @@ impl Node { /// An iterator over the items defined within a trait or impl. pub enum NodeItems<'a, 'tcx: 'a> { Impl { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, items: cell::Ref<'a, Vec>, idx: usize, }, @@ -409,10 +427,10 @@ impl<'a, 'tcx> Iterator for ConstDefs<'a, 'tcx> { } } -impl<'a, 'tcx> Ancestors<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Ancestors<'a, 'tcx> { /// Search the items from the given ancestors, returning each type definition /// with the given name. - pub fn type_defs(self, tcx: &'a TyCtxt<'tcx>, name: Name) -> TypeDefs<'a, 'tcx> { + pub fn type_defs(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, name: Name) -> TypeDefs<'a, 'gcx> { let iter = self.flat_map(move |node| { node.items(tcx) .filter_map(move |item| { @@ -433,7 +451,7 @@ impl<'a, 'tcx> Ancestors<'a, 'tcx> { /// Search the items from the given ancestors, returning each fn definition /// with the given name. - pub fn fn_defs(self, tcx: &'a TyCtxt<'tcx>, name: Name) -> FnDefs<'a, 'tcx> { + pub fn fn_defs(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, name: Name) -> FnDefs<'a, 'gcx> { let iter = self.flat_map(move |node| { node.items(tcx) .filter_map(move |item| { @@ -454,7 +472,7 @@ impl<'a, 'tcx> Ancestors<'a, 'tcx> { /// Search the items from the given ancestors, returning each const /// definition with the given name. - pub fn const_defs(self, tcx: &'a TyCtxt<'tcx>, name: Name) -> ConstDefs<'a, 'tcx> { + pub fn const_defs(self, tcx: TyCtxt<'a, 'gcx, 'tcx>, name: Name) -> ConstDefs<'a, 'gcx> { let iter = self.flat_map(move |node| { node.items(tcx) .filter_map(move |item| { diff --git a/src/librustc/traits/structural_impls.rs b/src/librustc/traits/structural_impls.rs index 367e5f32ba..e210d2da94 100644 --- a/src/librustc/traits/structural_impls.rs +++ b/src/librustc/traits/structural_impls.rs @@ -10,6 +10,7 @@ use traits; use traits::project::Normalized; +use ty::{Lift, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; use std::fmt; @@ -98,11 +99,20 @@ impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableDefaultImplData { } } -impl<'tcx> fmt::Debug for traits::VtableObjectData<'tcx> { +impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableObjectData<'tcx, N> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "VtableObject(upcast={:?}, vtable_base={})", + write!(f, "VtableObject(upcast={:?}, vtable_base={}, nested={:?})", self.upcast_trait_ref, - self.vtable_base) + self.vtable_base, + self.nested) + } +} + +impl<'tcx, N: fmt::Debug> fmt::Debug for traits::VtableFnPointerData<'tcx, N> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + write!(f, "VtableFnPointer(fn_ty={:?}, nested={:?})", + self.fn_ty, + self.nested) } } @@ -130,9 +140,93 @@ impl<'tcx> fmt::Debug for traits::MismatchedProjectionTypes<'tcx> { } } +/////////////////////////////////////////////////////////////////////////// +// Lift implementations + +impl<'a, 'tcx> Lift<'tcx> for traits::SelectionError<'a> { + type Lifted = traits::SelectionError<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + match *self { + super::Unimplemented => Some(super::Unimplemented), + super::OutputTypeParameterMismatch(a, b, ref err) => { + tcx.lift(&(a, b)).and_then(|(a, b)| { + tcx.lift(err).map(|err| { + super::OutputTypeParameterMismatch(a, b, err) + }) + }) + } + super::TraitNotObjectSafe(def_id) => { + Some(super::TraitNotObjectSafe(def_id)) + } + } + } +} + +// For trans only. +impl<'a, 'tcx> Lift<'tcx> for traits::Vtable<'a, ()> { + type Lifted = traits::Vtable<'tcx, ()>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + match self.clone() { + traits::VtableImpl(traits::VtableImplData { + impl_def_id, + substs, + nested + }) => { + tcx.lift(&substs).map(|substs| { + traits::VtableImpl(traits::VtableImplData { + impl_def_id: impl_def_id, + substs: substs, + nested: nested + }) + }) + } + traits::VtableDefaultImpl(t) => Some(traits::VtableDefaultImpl(t)), + traits::VtableClosure(traits::VtableClosureData { + closure_def_id, + substs, + nested + }) => { + tcx.lift(&substs).map(|substs| { + traits::VtableClosure(traits::VtableClosureData { + closure_def_id: closure_def_id, + substs: substs, + nested: nested + }) + }) + } + traits::VtableFnPointer(traits::VtableFnPointerData { fn_ty, nested }) => { + tcx.lift(&fn_ty).map(|fn_ty| { + traits::VtableFnPointer(traits::VtableFnPointerData { + fn_ty: fn_ty, + nested: nested, + }) + }) + } + traits::VtableParam(n) => Some(traits::VtableParam(n)), + traits::VtableBuiltin(d) => Some(traits::VtableBuiltin(d)), + traits::VtableObject(traits::VtableObjectData { + upcast_trait_ref, + vtable_base, + nested + }) => { + tcx.lift(&upcast_trait_ref).map(|trait_ref| { + traits::VtableObject(traits::VtableObjectData { + upcast_trait_ref: trait_ref, + vtable_base: vtable_base, + nested: nested + }) + }) + } + } + } +} + +/////////////////////////////////////////////////////////////////////////// +// TypeFoldable implementations. + impl<'tcx, O: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::Obligation<'tcx, O> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { traits::Obligation { cause: self.cause.clone(), recursion_depth: self.recursion_depth, @@ -146,11 +240,10 @@ impl<'tcx, O: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::Obligation<'tcx } impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableImplData<'tcx, N> { - fn super_fold_with>(&self, folder: &mut F) -> Self { - let substs = self.substs.fold_with(folder); + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { traits::VtableImplData { impl_def_id: self.impl_def_id, - substs: folder.tcx().mk_substs(substs), + substs: self.substs.fold_with(folder), nested: self.nested.fold_with(folder), } } @@ -161,7 +254,7 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableImplData< } impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableClosureData<'tcx, N> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { traits::VtableClosureData { closure_def_id: self.closure_def_id, substs: self.substs.fold_with(folder), @@ -175,7 +268,7 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableClosureDa } impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableDefaultImplData { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { traits::VtableDefaultImplData { trait_def_id: self.trait_def_id, nested: self.nested.fold_with(folder), @@ -188,7 +281,7 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableDefaultIm } impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableBuiltinData { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { traits::VtableBuiltinData { nested: self.nested.fold_with(folder), } @@ -199,21 +292,35 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableBuiltinDa } } -impl<'tcx> TypeFoldable<'tcx> for traits::VtableObjectData<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { +impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableObjectData<'tcx, N> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { traits::VtableObjectData { upcast_trait_ref: self.upcast_trait_ref.fold_with(folder), - vtable_base: self.vtable_base + vtable_base: self.vtable_base, + nested: self.nested.fold_with(folder), + } + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + self.upcast_trait_ref.visit_with(visitor) || self.nested.visit_with(visitor) + } +} + +impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::VtableFnPointerData<'tcx, N> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + traits::VtableFnPointerData { + fn_ty: self.fn_ty.fold_with(folder), + nested: self.nested.fold_with(folder), } } fn super_visit_with>(&self, visitor: &mut V) -> bool { - self.upcast_trait_ref.visit_with(visitor) + self.fn_ty.visit_with(visitor) || self.nested.visit_with(visitor) } } impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::Vtable<'tcx, N> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { match *self { traits::VtableImpl(ref v) => traits::VtableImpl(v.fold_with(folder)), traits::VtableDefaultImpl(ref t) => traits::VtableDefaultImpl(t.fold_with(folder)), @@ -243,7 +350,7 @@ impl<'tcx, N: TypeFoldable<'tcx>> TypeFoldable<'tcx> for traits::Vtable<'tcx, N> } impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Normalized<'tcx, T> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { Normalized { value: self.value.fold_with(folder), obligations: self.obligations.fold_with(folder), diff --git a/src/librustc/traits/util.rs b/src/librustc/traits/util.rs index 7668b8bf20..f8149565aa 100644 --- a/src/librustc/traits/util.rs +++ b/src/librustc/traits/util.rs @@ -18,13 +18,47 @@ use util::nodemap::FnvHashSet; use super::{Obligation, ObligationCause, PredicateObligation, SelectionContext, Normalized}; -struct PredicateSet<'a,'tcx:'a> { - tcx: &'a TyCtxt<'tcx>, +fn anonymize_predicate<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + pred: &ty::Predicate<'tcx>) + -> ty::Predicate<'tcx> { + match *pred { + ty::Predicate::Trait(ref data) => + ty::Predicate::Trait(tcx.anonymize_late_bound_regions(data)), + + ty::Predicate::Rfc1592(ref data) => + ty::Predicate::Rfc1592(Box::new(anonymize_predicate(tcx, data))), + + ty::Predicate::Equate(ref data) => + ty::Predicate::Equate(tcx.anonymize_late_bound_regions(data)), + + ty::Predicate::RegionOutlives(ref data) => + ty::Predicate::RegionOutlives(tcx.anonymize_late_bound_regions(data)), + + ty::Predicate::TypeOutlives(ref data) => + ty::Predicate::TypeOutlives(tcx.anonymize_late_bound_regions(data)), + + ty::Predicate::Projection(ref data) => + ty::Predicate::Projection(tcx.anonymize_late_bound_regions(data)), + + ty::Predicate::WellFormed(data) => + ty::Predicate::WellFormed(data), + + ty::Predicate::ObjectSafe(data) => + ty::Predicate::ObjectSafe(data), + + ty::Predicate::ClosureKind(closure_def_id, kind) => + ty::Predicate::ClosureKind(closure_def_id, kind) + } +} + + +struct PredicateSet<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, set: FnvHashSet>, } -impl<'a,'tcx> PredicateSet<'a,'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>) -> PredicateSet<'a,'tcx> { +impl<'a, 'gcx, 'tcx> PredicateSet<'a, 'gcx, 'tcx> { + fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> PredicateSet<'a, 'gcx, 'tcx> { PredicateSet { tcx: tcx, set: FnvHashSet() } } @@ -39,29 +73,7 @@ impl<'a,'tcx> PredicateSet<'a,'tcx> { // // to be considered equivalent. So normalize all late-bound // regions before we throw things into the underlying set. - let normalized_pred = match *pred { - ty::Predicate::Trait(ref data) => - ty::Predicate::Trait(self.tcx.anonymize_late_bound_regions(data)), - - ty::Predicate::Equate(ref data) => - ty::Predicate::Equate(self.tcx.anonymize_late_bound_regions(data)), - - ty::Predicate::RegionOutlives(ref data) => - ty::Predicate::RegionOutlives(self.tcx.anonymize_late_bound_regions(data)), - - ty::Predicate::TypeOutlives(ref data) => - ty::Predicate::TypeOutlives(self.tcx.anonymize_late_bound_regions(data)), - - ty::Predicate::Projection(ref data) => - ty::Predicate::Projection(self.tcx.anonymize_late_bound_regions(data)), - - ty::Predicate::WellFormed(data) => - ty::Predicate::WellFormed(data), - - ty::Predicate::ObjectSafe(data) => - ty::Predicate::ObjectSafe(data), - }; - self.set.insert(normalized_pred) + self.set.insert(anonymize_predicate(self.tcx, pred)) } } @@ -76,24 +88,23 @@ impl<'a,'tcx> PredicateSet<'a,'tcx> { /// that `T : PartialOrd` holds as well. Similarly, if we have `trait /// Foo : 'static`, and we know that `T : Foo`, then we know that `T : /// 'static`. -pub struct Elaborator<'cx, 'tcx:'cx> { - tcx: &'cx TyCtxt<'tcx>, +pub struct Elaborator<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { stack: Vec>, - visited: PredicateSet<'cx,'tcx>, + visited: PredicateSet<'a, 'gcx, 'tcx>, } -pub fn elaborate_trait_ref<'cx, 'tcx>( - tcx: &'cx TyCtxt<'tcx>, +pub fn elaborate_trait_ref<'cx, 'gcx, 'tcx>( + tcx: TyCtxt<'cx, 'gcx, 'tcx>, trait_ref: ty::PolyTraitRef<'tcx>) - -> Elaborator<'cx, 'tcx> + -> Elaborator<'cx, 'gcx, 'tcx> { elaborate_predicates(tcx, vec![trait_ref.to_predicate()]) } -pub fn elaborate_trait_refs<'cx, 'tcx>( - tcx: &'cx TyCtxt<'tcx>, +pub fn elaborate_trait_refs<'cx, 'gcx, 'tcx>( + tcx: TyCtxt<'cx, 'gcx, 'tcx>, trait_refs: &[ty::PolyTraitRef<'tcx>]) - -> Elaborator<'cx, 'tcx> + -> Elaborator<'cx, 'gcx, 'tcx> { let predicates = trait_refs.iter() .map(|trait_ref| trait_ref.to_predicate()) @@ -101,31 +112,32 @@ pub fn elaborate_trait_refs<'cx, 'tcx>( elaborate_predicates(tcx, predicates) } -pub fn elaborate_predicates<'cx, 'tcx>( - tcx: &'cx TyCtxt<'tcx>, +pub fn elaborate_predicates<'cx, 'gcx, 'tcx>( + tcx: TyCtxt<'cx, 'gcx, 'tcx>, mut predicates: Vec>) - -> Elaborator<'cx, 'tcx> + -> Elaborator<'cx, 'gcx, 'tcx> { let mut visited = PredicateSet::new(tcx); predicates.retain(|pred| visited.insert(pred)); - Elaborator { tcx: tcx, stack: predicates, visited: visited } + Elaborator { stack: predicates, visited: visited } } -impl<'cx, 'tcx> Elaborator<'cx, 'tcx> { - pub fn filter_to_traits(self) -> FilterToTraits> { +impl<'cx, 'gcx, 'tcx> Elaborator<'cx, 'gcx, 'tcx> { + pub fn filter_to_traits(self) -> FilterToTraits { FilterToTraits::new(self) } fn push(&mut self, predicate: &ty::Predicate<'tcx>) { + let tcx = self.visited.tcx; match *predicate { ty::Predicate::Trait(ref data) => { // Predicates declared on the trait. - let predicates = self.tcx.lookup_super_predicates(data.def_id()); + let predicates = tcx.lookup_super_predicates(data.def_id()); let mut predicates: Vec<_> = predicates.predicates .iter() - .map(|p| p.subst_supertrait(self.tcx, &data.to_poly_trait_ref())) + .map(|p| p.subst_supertrait(tcx, &data.to_poly_trait_ref())) .collect(); debug!("super_predicates: data={:?} predicates={:?}", @@ -140,6 +152,9 @@ impl<'cx, 'tcx> Elaborator<'cx, 'tcx> { self.stack.extend(predicates); } + ty::Predicate::Rfc1592(..) => { + // Nothing to elaborate. + } ty::Predicate::WellFormed(..) => { // Currently, we do not elaborate WF predicates, // although we easily could. @@ -156,6 +171,9 @@ impl<'cx, 'tcx> Elaborator<'cx, 'tcx> { ty::Predicate::Projection(..) => { // Nothing to elaborate in a projection predicate. } + ty::Predicate::ClosureKind(..) => { + // Nothing to elaborate when waiting for a closure's kind to be inferred. + } ty::Predicate::RegionOutlives(..) | ty::Predicate::TypeOutlives(..) => { // Currently, we do not "elaborate" predicates like @@ -182,7 +200,7 @@ impl<'cx, 'tcx> Elaborator<'cx, 'tcx> { } } -impl<'cx, 'tcx> Iterator for Elaborator<'cx, 'tcx> { +impl<'cx, 'gcx, 'tcx> Iterator for Elaborator<'cx, 'gcx, 'tcx> { type Item = ty::Predicate<'tcx>; fn next(&mut self) -> Option> { @@ -203,18 +221,18 @@ impl<'cx, 'tcx> Iterator for Elaborator<'cx, 'tcx> { // Supertrait iterator /////////////////////////////////////////////////////////////////////////// -pub type Supertraits<'cx, 'tcx> = FilterToTraits>; +pub type Supertraits<'cx, 'gcx, 'tcx> = FilterToTraits>; -pub fn supertraits<'cx, 'tcx>(tcx: &'cx TyCtxt<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>) - -> Supertraits<'cx, 'tcx> +pub fn supertraits<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>, + trait_ref: ty::PolyTraitRef<'tcx>) + -> Supertraits<'cx, 'gcx, 'tcx> { elaborate_trait_ref(tcx, trait_ref).filter_to_traits() } -pub fn transitive_bounds<'cx, 'tcx>(tcx: &'cx TyCtxt<'tcx>, - bounds: &[ty::PolyTraitRef<'tcx>]) - -> Supertraits<'cx, 'tcx> +pub fn transitive_bounds<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>, + bounds: &[ty::PolyTraitRef<'tcx>]) + -> Supertraits<'cx, 'gcx, 'tcx> { elaborate_trait_refs(tcx, bounds).filter_to_traits() } @@ -222,15 +240,15 @@ pub fn transitive_bounds<'cx, 'tcx>(tcx: &'cx TyCtxt<'tcx>, /////////////////////////////////////////////////////////////////////////// // Iterator over def-ids of supertraits -pub struct SupertraitDefIds<'cx, 'tcx:'cx> { - tcx: &'cx TyCtxt<'tcx>, +pub struct SupertraitDefIds<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, stack: Vec, visited: FnvHashSet, } -pub fn supertrait_def_ids<'cx, 'tcx>(tcx: &'cx TyCtxt<'tcx>, - trait_def_id: DefId) - -> SupertraitDefIds<'cx, 'tcx> +pub fn supertrait_def_ids<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>, + trait_def_id: DefId) + -> SupertraitDefIds<'cx, 'gcx, 'tcx> { SupertraitDefIds { tcx: tcx, @@ -239,7 +257,7 @@ pub fn supertrait_def_ids<'cx, 'tcx>(tcx: &'cx TyCtxt<'tcx>, } } -impl<'cx, 'tcx> Iterator for SupertraitDefIds<'cx, 'tcx> { +impl<'cx, 'gcx, 'tcx> Iterator for SupertraitDefIds<'cx, 'gcx, 'tcx> { type Item = DefId; fn next(&mut self) -> Option { @@ -302,11 +320,11 @@ impl<'tcx,I:Iterator>> Iterator for FilterToTraits { /// Instantiate all bound parameters of the impl with the given substs, /// returning the resulting trait ref and all obligations that arise. /// The obligations are closed under normalization. -pub fn impl_trait_ref_and_oblig<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, - impl_def_id: DefId, - impl_substs: &Substs<'tcx>) - -> (ty::TraitRef<'tcx>, - Vec>) +pub fn impl_trait_ref_and_oblig<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>, + impl_def_id: DefId, + impl_substs: &Substs<'tcx>) + -> (ty::TraitRef<'tcx>, + Vec>) { let impl_trait_ref = selcx.tcx().impl_trait_ref(impl_def_id).unwrap(); @@ -335,10 +353,10 @@ pub fn impl_trait_ref_and_oblig<'a,'tcx>(selcx: &mut SelectionContext<'a,'tcx>, // declared on the impl declaration e.g., `impl for Box<[(A,B)]>` // would return ($0, $1) where $0 and $1 are freshly instantiated type // variables. -pub fn fresh_type_vars_for_impl<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - span: Span, - impl_def_id: DefId) - -> Substs<'tcx> +pub fn fresh_type_vars_for_impl<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + span: Span, + impl_def_id: DefId) + -> &'tcx Substs<'tcx> { let tcx = infcx.tcx; let impl_generics = tcx.lookup_item_type(impl_def_id).generics; @@ -361,26 +379,6 @@ pub fn predicates_for_generics<'tcx>(cause: ObligationCause<'tcx>, }).collect() } -pub fn trait_ref_for_builtin_bound<'tcx>( - tcx: &TyCtxt<'tcx>, - builtin_bound: ty::BuiltinBound, - param_ty: Ty<'tcx>) - -> Result, ErrorReported> -{ - match tcx.lang_items.from_builtin_kind(builtin_bound) { - Ok(def_id) => { - Ok(ty::TraitRef { - def_id: def_id, - substs: tcx.mk_substs(Substs::empty().with_self_ty(param_ty)) - }) - } - Err(e) => { - tcx.sess.err(&e); - Err(ErrorReported) - } - } -} - pub fn predicate_for_trait_ref<'tcx>( cause: ObligationCause<'tcx>, trait_ref: ty::TraitRef<'tcx>, @@ -394,115 +392,131 @@ pub fn predicate_for_trait_ref<'tcx>( } } -pub fn predicate_for_trait_def<'tcx>( - tcx: &TyCtxt<'tcx>, - cause: ObligationCause<'tcx>, - trait_def_id: DefId, - recursion_depth: usize, - param_ty: Ty<'tcx>, - ty_params: Vec>) - -> PredicateObligation<'tcx> -{ - let trait_ref = ty::TraitRef { - def_id: trait_def_id, - substs: tcx.mk_substs(Substs::new_trait(ty_params, vec![], param_ty)) - }; - predicate_for_trait_ref(cause, trait_ref, recursion_depth) -} - -pub fn predicate_for_builtin_bound<'tcx>( - tcx: &TyCtxt<'tcx>, - cause: ObligationCause<'tcx>, - builtin_bound: ty::BuiltinBound, - recursion_depth: usize, - param_ty: Ty<'tcx>) - -> Result, ErrorReported> -{ - let trait_ref = trait_ref_for_builtin_bound(tcx, builtin_bound, param_ty)?; - Ok(predicate_for_trait_ref(cause, trait_ref, recursion_depth)) -} +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn trait_ref_for_builtin_bound(self, + builtin_bound: ty::BuiltinBound, + param_ty: Ty<'tcx>) + -> Result, ErrorReported> + { + match self.lang_items.from_builtin_kind(builtin_bound) { + Ok(def_id) => { + Ok(ty::TraitRef { + def_id: def_id, + substs: self.mk_substs(Substs::empty().with_self_ty(param_ty)) + }) + } + Err(e) => { + self.sess.err(&e); + Err(ErrorReported) + } + } + } -/// Cast a trait reference into a reference to one of its super -/// traits; returns `None` if `target_trait_def_id` is not a -/// supertrait. -pub fn upcast<'tcx>(tcx: &TyCtxt<'tcx>, - source_trait_ref: ty::PolyTraitRef<'tcx>, - target_trait_def_id: DefId) - -> Vec> -{ - if source_trait_ref.def_id() == target_trait_def_id { - return vec![source_trait_ref]; // shorcut the most common case + pub fn predicate_for_trait_def(self, + cause: ObligationCause<'tcx>, + trait_def_id: DefId, + recursion_depth: usize, + param_ty: Ty<'tcx>, + ty_params: Vec>) + -> PredicateObligation<'tcx> + { + let trait_ref = ty::TraitRef { + def_id: trait_def_id, + substs: self.mk_substs(Substs::new_trait(ty_params, vec![], param_ty)) + }; + predicate_for_trait_ref(cause, trait_ref, recursion_depth) } - supertraits(tcx, source_trait_ref) - .filter(|r| r.def_id() == target_trait_def_id) - .collect() -} + pub fn predicate_for_builtin_bound(self, + cause: ObligationCause<'tcx>, + builtin_bound: ty::BuiltinBound, + recursion_depth: usize, + param_ty: Ty<'tcx>) + -> Result, ErrorReported> + { + let trait_ref = self.trait_ref_for_builtin_bound(builtin_bound, param_ty)?; + Ok(predicate_for_trait_ref(cause, trait_ref, recursion_depth)) + } -/// Given a trait `trait_ref`, returns the number of vtable entries -/// that come from `trait_ref`, excluding its supertraits. Used in -/// computing the vtable base for an upcast trait of a trait object. -pub fn count_own_vtable_entries<'tcx>(tcx: &TyCtxt<'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>) - -> usize { - let mut entries = 0; - // Count number of methods and add them to the total offset. - // Skip over associated types and constants. - for trait_item in &tcx.trait_items(trait_ref.def_id())[..] { - if let ty::MethodTraitItem(_) = *trait_item { - entries += 1; + /// Cast a trait reference into a reference to one of its super + /// traits; returns `None` if `target_trait_def_id` is not a + /// supertrait. + pub fn upcast_choices(self, + source_trait_ref: ty::PolyTraitRef<'tcx>, + target_trait_def_id: DefId) + -> Vec> + { + if source_trait_ref.def_id() == target_trait_def_id { + return vec![source_trait_ref]; // shorcut the most common case } + + supertraits(self, source_trait_ref) + .filter(|r| r.def_id() == target_trait_def_id) + .collect() } - entries -} -/// Given an upcast trait object described by `object`, returns the -/// index of the method `method_def_id` (which should be part of -/// `object.upcast_trait_ref`) within the vtable for `object`. -pub fn get_vtable_index_of_object_method<'tcx>(tcx: &TyCtxt<'tcx>, - object: &super::VtableObjectData<'tcx>, - method_def_id: DefId) -> usize { - // Count number of methods preceding the one we are selecting and - // add them to the total offset. - // Skip over associated types and constants. - let mut entries = object.vtable_base; - for trait_item in &tcx.trait_items(object.upcast_trait_ref.def_id())[..] { - if trait_item.def_id() == method_def_id { - // The item with the ID we were given really ought to be a method. - assert!(match *trait_item { - ty::MethodTraitItem(_) => true, - _ => false - }); - - return entries; + /// Given a trait `trait_ref`, returns the number of vtable entries + /// that come from `trait_ref`, excluding its supertraits. Used in + /// computing the vtable base for an upcast trait of a trait object. + pub fn count_own_vtable_entries(self, trait_ref: ty::PolyTraitRef<'tcx>) -> usize { + let mut entries = 0; + // Count number of methods and add them to the total offset. + // Skip over associated types and constants. + for trait_item in &self.trait_items(trait_ref.def_id())[..] { + if let ty::MethodTraitItem(_) = *trait_item { + entries += 1; + } } - if let ty::MethodTraitItem(_) = *trait_item { - entries += 1; + entries + } + + /// Given an upcast trait object described by `object`, returns the + /// index of the method `method_def_id` (which should be part of + /// `object.upcast_trait_ref`) within the vtable for `object`. + pub fn get_vtable_index_of_object_method(self, + object: &super::VtableObjectData<'tcx, N>, + method_def_id: DefId) -> usize { + // Count number of methods preceding the one we are selecting and + // add them to the total offset. + // Skip over associated types and constants. + let mut entries = object.vtable_base; + for trait_item in &self.trait_items(object.upcast_trait_ref.def_id())[..] { + if trait_item.def_id() == method_def_id { + // The item with the ID we were given really ought to be a method. + assert!(match *trait_item { + ty::MethodTraitItem(_) => true, + _ => false + }); + + return entries; + } + if let ty::MethodTraitItem(_) = *trait_item { + entries += 1; + } } + + bug!("get_vtable_index_of_object_method: {:?} was not found", + method_def_id); } - bug!("get_vtable_index_of_object_method: {:?} was not found", - method_def_id); + pub fn closure_trait_ref_and_return_type(self, + fn_trait_def_id: DefId, + self_ty: Ty<'tcx>, + sig: &ty::PolyFnSig<'tcx>, + tuple_arguments: TupleArgumentsFlag) + -> ty::Binder<(ty::TraitRef<'tcx>, Ty<'tcx>)> + { + let arguments_tuple = match tuple_arguments { + TupleArgumentsFlag::No => sig.0.inputs[0], + TupleArgumentsFlag::Yes => self.mk_tup(sig.0.inputs.to_vec()), + }; + let trait_substs = Substs::new_trait(vec![arguments_tuple], vec![], self_ty); + let trait_ref = ty::TraitRef { + def_id: fn_trait_def_id, + substs: self.mk_substs(trait_substs), + }; + ty::Binder((trait_ref, sig.0.output.unwrap_or(self.mk_nil()))) + } } pub enum TupleArgumentsFlag { Yes, No } - -pub fn closure_trait_ref_and_return_type<'tcx>( - tcx: &TyCtxt<'tcx>, - fn_trait_def_id: DefId, - self_ty: Ty<'tcx>, - sig: &ty::PolyFnSig<'tcx>, - tuple_arguments: TupleArgumentsFlag) - -> ty::Binder<(ty::TraitRef<'tcx>, Ty<'tcx>)> -{ - let arguments_tuple = match tuple_arguments { - TupleArgumentsFlag::No => sig.0.inputs[0], - TupleArgumentsFlag::Yes => tcx.mk_tup(sig.0.inputs.to_vec()), - }; - let trait_substs = Substs::new_trait(vec![arguments_tuple], vec![], self_ty); - let trait_ref = ty::TraitRef { - def_id: fn_trait_def_id, - substs: tcx.mk_substs(trait_substs), - }; - ty::Binder((trait_ref, sig.0.output.unwrap_or(tcx.mk_nil()))) -} diff --git a/src/librustc/ty/_match.rs b/src/librustc/ty/_match.rs index d0ccc3e0fd..39dba57c47 100644 --- a/src/librustc/ty/_match.rs +++ b/src/librustc/ty/_match.rs @@ -28,26 +28,26 @@ use ty::relate::{self, Relate, TypeRelation, RelateResult}; /// Like subtyping, matching is really a binary relation, so the only /// important thing about the result is Ok/Err. Also, matching never /// affects any type variables or unification state. -pub struct Match<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx> +pub struct Match<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> Match<'a, 'tcx> { - pub fn new(tcx: &'a TyCtxt<'tcx>) -> Match<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Match<'a, 'gcx, 'tcx> { + pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Match<'a, 'gcx, 'tcx> { Match { tcx: tcx } } } -impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Match<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> TypeRelation<'a, 'gcx, 'tcx> for Match<'a, 'gcx, 'tcx> { fn tag(&self) -> &'static str { "Match" } - fn tcx(&self) -> &'a TyCtxt<'tcx> { self.tcx } + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.tcx } fn a_is_expected(&self) -> bool { true } // irrelevant - fn relate_with_variance>(&mut self, - _: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T> + fn relate_with_variance>(&mut self, + _: ty::Variance, + a: &T, + b: &T) + -> RelateResult<'tcx, T> { self.relate(a, b) } @@ -89,7 +89,7 @@ impl<'a, 'tcx> TypeRelation<'a, 'tcx> for Match<'a, 'tcx> { fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) -> RelateResult<'tcx, ty::Binder> - where T: Relate<'a,'tcx> + where T: Relate<'tcx> { Ok(ty::Binder(self.relate(a.skip_binder(), b.skip_binder())?)) } diff --git a/src/librustc/ty/adjustment.rs b/src/librustc/ty/adjustment.rs index 41008823c8..71e4903134 100644 --- a/src/librustc/ty/adjustment.rs +++ b/src/librustc/ty/adjustment.rs @@ -136,9 +136,10 @@ pub enum CustomCoerceUnsized { Struct(usize) } -impl<'tcx> ty::TyS<'tcx> { +impl<'a, 'gcx, 'tcx> ty::TyS<'tcx> { /// See `expr_ty_adjusted` - pub fn adjust(&'tcx self, cx: &TyCtxt<'tcx>, + pub fn adjust(&'tcx self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, span: Span, expr_id: ast::NodeId, adjustment: Option<&AutoAdjustment<'tcx>>, @@ -155,9 +156,7 @@ impl<'tcx> ty::TyS<'tcx> { match *adjustment { AdjustReifyFnPointer => { match self.sty { - ty::TyFnDef(_, _, b) => { - cx.mk_ty(ty::TyFnPtr(b)) - } + ty::TyFnDef(_, _, f) => tcx.mk_fn_ptr(f), _ => { bug!("AdjustReifyFnPointer adjustment on non-fn-item: {:?}", self); @@ -167,7 +166,7 @@ impl<'tcx> ty::TyS<'tcx> { AdjustUnsafeFnPointer => { match self.sty { - ty::TyFnPtr(b) => cx.safe_to_unsafe_fn_ty(b), + ty::TyFnPtr(b) => tcx.safe_to_unsafe_fn_ty(b), ref b => { bug!("AdjustUnsafeFnPointer adjustment on non-fn-ptr: {:?}", b); @@ -177,7 +176,7 @@ impl<'tcx> ty::TyS<'tcx> { AdjustMutToConstPointer => { match self.sty { - ty::TyRawPtr(mt) => cx.mk_ptr(ty::TypeAndMut { + ty::TyRawPtr(mt) => tcx.mk_ptr(ty::TypeAndMut { ty: mt.ty, mutbl: hir::MutImmutable }), @@ -194,7 +193,7 @@ impl<'tcx> ty::TyS<'tcx> { if !adjusted_ty.references_error() { for i in 0..adj.autoderefs { adjusted_ty = - adjusted_ty.adjust_for_autoderef(cx, + adjusted_ty.adjust_for_autoderef(tcx, expr_id, span, i as u32, @@ -205,7 +204,7 @@ impl<'tcx> ty::TyS<'tcx> { if let Some(target) = adj.unsize { target } else { - adjusted_ty.adjust_for_autoref(cx, adj.autoref) + adjusted_ty.adjust_for_autoref(tcx, adj.autoref) } } } @@ -215,7 +214,7 @@ impl<'tcx> ty::TyS<'tcx> { } pub fn adjust_for_autoderef(&'tcx self, - cx: &TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'gcx, 'tcx>, expr_id: ast::NodeId, expr_span: Span, autoderef: u32, // how many autoderefs so far? @@ -228,7 +227,7 @@ impl<'tcx> ty::TyS<'tcx> { if let Some(method_ty) = method_type(method_call) { // Method calls always have all late-bound regions // fully instantiated. - let fn_ret = cx.no_late_bound_regions(&method_ty.fn_ret()).unwrap(); + let fn_ret = tcx.no_late_bound_regions(&method_ty.fn_ret()).unwrap(); adjusted_ty = fn_ret.unwrap(); } match adjusted_ty.builtin_deref(true, NoPreference) { @@ -243,16 +242,16 @@ impl<'tcx> ty::TyS<'tcx> { } } - pub fn adjust_for_autoref(&'tcx self, cx: &TyCtxt<'tcx>, + pub fn adjust_for_autoref(&'tcx self, tcx: TyCtxt<'a, 'gcx, 'tcx>, autoref: Option>) -> Ty<'tcx> { match autoref { None => self, Some(AutoPtr(r, m)) => { - cx.mk_ref(r, TypeAndMut { ty: self, mutbl: m }) + tcx.mk_ref(r, TypeAndMut { ty: self, mutbl: m }) } Some(AutoUnsafe(m)) => { - cx.mk_ptr(TypeAndMut { ty: self, mutbl: m }) + tcx.mk_ptr(TypeAndMut { ty: self, mutbl: m }) } } } diff --git a/src/librustc/ty/contents.rs b/src/librustc/ty/contents.rs index f5dedb1324..33b33092b2 100644 --- a/src/librustc/ty/contents.rs +++ b/src/librustc/ty/contents.rs @@ -89,7 +89,7 @@ impl TypeContents { self.intersects(TC::InteriorUnsafe) } - pub fn needs_drop(&self, _: &TyCtxt) -> bool { + pub fn needs_drop(&self, _: TyCtxt) -> bool { self.intersects(TC::NeedsDrop) } @@ -139,15 +139,15 @@ impl fmt::Debug for TypeContents { } } -impl<'tcx> ty::TyS<'tcx> { - pub fn type_contents(&'tcx self, cx: &TyCtxt<'tcx>) -> TypeContents { - return cx.tc_cache.memoize(self, || tc_ty(cx, self, &mut FnvHashMap())); +impl<'a, 'tcx> ty::TyS<'tcx> { + pub fn type_contents(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> TypeContents { + return tcx.tc_cache.memoize(self, || tc_ty(tcx, self, &mut FnvHashMap())); - fn tc_ty<'tcx>(cx: &TyCtxt<'tcx>, - ty: Ty<'tcx>, - cache: &mut FnvHashMap, TypeContents>) -> TypeContents + fn tc_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>, + cache: &mut FnvHashMap, TypeContents>) -> TypeContents { - // Subtle: Note that we are *not* using cx.tc_cache here but rather a + // Subtle: Note that we are *not* using tcx.tc_cache here but rather a // private cache for this walk. This is needed in the case of cyclic // types like: // @@ -163,7 +163,7 @@ impl<'tcx> ty::TyS<'tcx> { // The problem is, as we are doing the computation, we will also // compute an *intermediate* contents for, e.g., Option of // TC::None. This is ok during the computation of List itself, but if - // we stored this intermediate value into cx.tc_cache, then later + // we stored this intermediate value into tcx.tc_cache, then later // requests for the contents of Option would also yield TC::None // which is incorrect. This value was computed based on the crutch // value for the type contents of list. The correct value is @@ -172,7 +172,7 @@ impl<'tcx> ty::TyS<'tcx> { Some(tc) => { return *tc; } None => {} } - match cx.tc_cache.borrow().get(&ty) { // Must check both caches! + match tcx.tc_cache.borrow().get(&ty) { // Must check both caches! Some(tc) => { return *tc; } None => {} } @@ -192,7 +192,7 @@ impl<'tcx> ty::TyS<'tcx> { } ty::TyBox(typ) => { - tc_ty(cx, typ, cache).owned_pointer() + tc_ty(tcx, typ, cache).owned_pointer() } ty::TyTrait(_) => { @@ -208,28 +208,28 @@ impl<'tcx> ty::TyS<'tcx> { } ty::TyArray(ty, _) => { - tc_ty(cx, ty, cache) + tc_ty(tcx, ty, cache) } ty::TySlice(ty) => { - tc_ty(cx, ty, cache) + tc_ty(tcx, ty, cache) } ty::TyStr => TC::None, ty::TyClosure(_, ref substs) => { - TypeContents::union(&substs.upvar_tys, |ty| tc_ty(cx, &ty, cache)) + TypeContents::union(&substs.upvar_tys, |ty| tc_ty(tcx, &ty, cache)) } ty::TyTuple(ref tys) => { TypeContents::union(&tys[..], - |ty| tc_ty(cx, *ty, cache)) + |ty| tc_ty(tcx, *ty, cache)) } ty::TyStruct(def, substs) | ty::TyEnum(def, substs) => { let mut res = TypeContents::union(&def.variants, |v| { TypeContents::union(&v.fields, |f| { - tc_ty(cx, f.ty(cx, substs), cache) + tc_ty(tcx, f.ty(tcx, substs), cache) }) }); @@ -237,7 +237,7 @@ impl<'tcx> ty::TyS<'tcx> { res = res | TC::OwnsDtor; } - apply_lang_items(cx, def.did, res) + apply_lang_items(tcx, def.did, res) } ty::TyProjection(..) | @@ -255,9 +255,10 @@ impl<'tcx> ty::TyS<'tcx> { result } - fn apply_lang_items(cx: &TyCtxt, did: DefId, tc: TypeContents) - -> TypeContents { - if Some(did) == cx.lang_items.unsafe_cell_type() { + fn apply_lang_items<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId, tc: TypeContents) + -> TypeContents { + if Some(did) == tcx.lang_items.unsafe_cell_type() { tc | TC::InteriorUnsafe } else { tc diff --git a/src/librustc/ty/context.rs b/src/librustc/ty/context.rs index 8b07a97b1a..45aa6f881e 100644 --- a/src/librustc/ty/context.rs +++ b/src/librustc/ty/context.rs @@ -11,18 +11,19 @@ //! type context book-keeping use dep_graph::{DepGraph, DepTrackingMap}; -use hir::map as ast_map; use session::Session; use lint; use middle; -use middle::cstore::{CrateStore, LOCAL_CRATE}; +use middle::cstore::LOCAL_CRATE; use hir::def::DefMap; -use hir::def_id::DefId; +use hir::def_id::{DefId, DefIndex}; +use hir::map as ast_map; +use hir::map::{DefKey, DefPath, DefPathData, DisambiguatedDefPathData}; use middle::free_region::FreeRegionMap; use middle::region::RegionMaps; use middle::resolve_lifetime; use middle::stability; -use ty::subst::{self, Subst, Substs}; +use ty::subst::{self, Substs}; use traits; use ty::{self, TraitRef, Ty, TypeAndMut}; use ty::{TyS, TypeVariants}; @@ -35,16 +36,18 @@ use ty::layout::{Layout, TargetDataLayout}; use ty::maps; use util::common::MemoizationMap; use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet}; -use util::nodemap::FnvHashMap; +use util::nodemap::{FnvHashMap, FnvHashSet}; use arena::TypedArena; use std::borrow::Borrow; use std::cell::{Cell, RefCell, Ref}; use std::hash::{Hash, Hasher}; +use std::mem; +use std::ops::Deref; use std::rc::Rc; use syntax::ast::{self, Name, NodeId}; use syntax::attr; -use syntax::parse::token::{self, special_idents}; +use syntax::parse::token::{self, keywords}; use hir; @@ -52,6 +55,7 @@ use hir; pub struct CtxtArenas<'tcx> { // internings type_: TypedArena>, + type_list: TypedArena>>, substs: TypedArena>, bare_fn: TypedArena>, region: TypedArena, @@ -67,6 +71,7 @@ impl<'tcx> CtxtArenas<'tcx> { pub fn new() -> CtxtArenas<'tcx> { CtxtArenas { type_: TypedArena::new(), + type_list: TypedArena::new(), substs: TypedArena::new(), bare_fn: TypedArena::new(), region: TypedArena::new(), @@ -79,6 +84,97 @@ impl<'tcx> CtxtArenas<'tcx> { } } +pub struct CtxtInterners<'tcx> { + /// The arenas that types etc are allocated from. + arenas: &'tcx CtxtArenas<'tcx>, + + /// Specifically use a speedy hash algorithm for these hash sets, + /// they're accessed quite often. + type_: RefCell>>>, + type_list: RefCell]>>>, + substs: RefCell>>>, + bare_fn: RefCell>>>, + region: RefCell>>, + stability: RefCell>, + layout: RefCell>, +} + +impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> { + fn new(arenas: &'tcx CtxtArenas<'tcx>) -> CtxtInterners<'tcx> { + CtxtInterners { + arenas: arenas, + type_: RefCell::new(FnvHashSet()), + type_list: RefCell::new(FnvHashSet()), + substs: RefCell::new(FnvHashSet()), + bare_fn: RefCell::new(FnvHashSet()), + region: RefCell::new(FnvHashSet()), + stability: RefCell::new(FnvHashSet()), + layout: RefCell::new(FnvHashSet()) + } + } + + /// Intern a type. global_interners is Some only if this is + /// a local interner and global_interners is its counterpart. + fn intern_ty(&self, st: TypeVariants<'tcx>, + global_interners: Option<&CtxtInterners<'gcx>>) + -> Ty<'tcx> { + let ty = { + let mut interner = self.type_.borrow_mut(); + let global_interner = global_interners.map(|interners| { + interners.type_.borrow_mut() + }); + if let Some(&Interned(ty)) = interner.get(&st) { + return ty; + } + if let Some(ref interner) = global_interner { + if let Some(&Interned(ty)) = interner.get(&st) { + return ty; + } + } + + let flags = super::flags::FlagComputation::for_sty(&st); + let ty_struct = TyS { + sty: st, + flags: Cell::new(flags.flags), + region_depth: flags.depth, + }; + + // HACK(eddyb) Depend on flags being accurate to + // determine that all contents are in the global tcx. + // See comments on Lift for why we can't use that. + if !flags.flags.intersects(ty::TypeFlags::KEEP_IN_LOCAL_TCX) { + if let Some(interner) = global_interners { + let ty_struct: TyS<'gcx> = unsafe { + mem::transmute(ty_struct) + }; + let ty: Ty<'gcx> = interner.arenas.type_.alloc(ty_struct); + global_interner.unwrap().insert(Interned(ty)); + return ty; + } + } else { + // Make sure we don't end up with inference + // types/regions in the global tcx. + if global_interners.is_none() { + drop(interner); + bug!("Attempted to intern `{:?}` which contains \ + inference types/regions in the global type context", + &ty_struct); + } + } + + // Don't be &mut TyS. + let ty: Ty<'tcx> = self.arenas.type_.alloc(ty_struct); + interner.insert(Interned(ty)); + ty + }; + + debug!("Interned type: {:?} Pointer: {:?}", + ty, ty as *const TyS); + ty + } + +} + pub struct CommonTypes<'tcx> { pub bool: Ty<'tcx>, pub char: Ty<'tcx>, @@ -138,7 +234,7 @@ pub struct Tables<'tcx> { pub fru_field_types: NodeMap>> } -impl<'tcx> Tables<'tcx> { +impl<'a, 'gcx, 'tcx> Tables<'tcx> { pub fn empty() -> Tables<'tcx> { Tables { node_types: FnvHashMap(), @@ -152,48 +248,11 @@ impl<'tcx> Tables<'tcx> { fru_field_types: NodeMap() } } - - pub fn closure_kind(this: &RefCell, - tcx: &TyCtxt<'tcx>, - def_id: DefId) - -> ty::ClosureKind { - // If this is a local def-id, it should be inserted into the - // tables by typeck; else, it will be retreived from - // the external crate metadata. - if let Some(&kind) = this.borrow().closure_kinds.get(&def_id) { - return kind; - } - - let kind = tcx.sess.cstore.closure_kind(tcx, def_id); - this.borrow_mut().closure_kinds.insert(def_id, kind); - kind - } - - pub fn closure_type(this: &RefCell, - tcx: &TyCtxt<'tcx>, - def_id: DefId, - substs: &ClosureSubsts<'tcx>) - -> ty::ClosureTy<'tcx> - { - // If this is a local def-id, it should be inserted into the - // tables by typeck; else, it will be retreived from - // the external crate metadata. - if let Some(ty) = this.borrow().closure_tys.get(&def_id) { - return ty.subst(tcx, &substs.func_substs); - } - - let ty = tcx.sess.cstore.closure_ty(tcx, def_id); - this.borrow_mut().closure_tys.insert(def_id, ty.clone()); - ty.subst(tcx, &substs.func_substs) - } } impl<'tcx> CommonTypes<'tcx> { - fn new(arena: &'tcx TypedArena>, - interner: &RefCell, Ty<'tcx>>>) - -> CommonTypes<'tcx> - { - let mk = |sty| TyCtxt::intern_ty(arena, interner, sty); + fn new(interners: &CtxtInterners<'tcx>) -> CommonTypes<'tcx> { + let mk = |sty| interners.intern_ty(sty, None); CommonTypes { bool: mk(TyBool), char: mk(TyChar), @@ -217,22 +276,23 @@ impl<'tcx> CommonTypes<'tcx> { /// The data structure to keep track of all the information that typechecker /// generates so that so that it can be reused and doesn't have to be redone /// later on. -pub struct TyCtxt<'tcx> { - /// The arenas that types etc are allocated from. - arenas: &'tcx CtxtArenas<'tcx>, +#[derive(Copy, Clone)] +pub struct TyCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + gcx: &'a GlobalCtxt<'gcx>, + interners: &'a CtxtInterners<'tcx> +} - /// Specifically use a speedy hash algorithm for this hash map, it's used - /// quite often. - // FIXME(eddyb) use a FnvHashSet> when equivalent keys can - // queried from a HashSet. - interner: RefCell, Ty<'tcx>>>, +impl<'a, 'gcx, 'tcx> Deref for TyCtxt<'a, 'gcx, 'tcx> { + type Target = &'a GlobalCtxt<'gcx>; + fn deref(&self) -> &Self::Target { + &self.gcx + } +} + +pub struct GlobalCtxt<'tcx> { + global_interners: CtxtInterners<'tcx>, - // FIXME as above, use a hashset if equivalent elements can be queried. - substs_interner: RefCell, &'tcx Substs<'tcx>>>, - bare_fn_interner: RefCell, &'tcx BareFnTy<'tcx>>>, - region_interner: RefCell>, - stability_interner: RefCell>, - layout_interner: RefCell>, + pub specializes_cache: RefCell, pub dep_graph: DepGraph, @@ -289,6 +349,8 @@ pub struct TyCtxt<'tcx> { // scratch every time. pub freevars: RefCell, + pub maybe_unused_trait_imports: NodeSet, + // Records the type of every item. pub tcache: RefCell>>, @@ -298,11 +360,6 @@ pub struct TyCtxt<'tcx> { // Cache for the type-contents routine. FIXME -- track deps? pub tc_cache: RefCell, ty::contents::TypeContents>>, - // Cache for various types within a method body and so forth. - // - // FIXME this should be made local to typeck, but it is currently used by one lint - pub ast_ty_to_ty_cache: RefCell>>, - // FIXME no dep tracking, but we should be able to remove this pub ty_param_defs: RefCell>>, @@ -338,6 +395,10 @@ pub struct TyCtxt<'tcx> { /// about. pub used_mut_nodes: RefCell, + /// Set of trait imports actually used in the method resolution. + /// This is used for warning unused imports. + pub used_trait_imports: RefCell, + /// The set of external nominal types whose implementations have been read. /// This is used for lazy resolution of methods. pub populated_external_types: RefCell, @@ -426,8 +487,18 @@ pub struct TyCtxt<'tcx> { pub layout_cache: RefCell, &'tcx Layout>>, } -impl<'tcx> TyCtxt<'tcx> { - pub fn crate_name(&self, cnum: ast::CrateNum) -> token::InternedString { +impl<'tcx> GlobalCtxt<'tcx> { + /// Get the global TyCtxt. + pub fn global_tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> { + TyCtxt { + gcx: self, + interners: &self.global_interners + } + } +} + +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn crate_name(self, cnum: ast::CrateNum) -> token::InternedString { if cnum == LOCAL_CRATE { self.crate_name.clone() } else { @@ -435,7 +506,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn crate_disambiguator(&self, cnum: ast::CrateNum) -> token::InternedString { + pub fn crate_disambiguator(self, cnum: ast::CrateNum) -> token::InternedString { if cnum == LOCAL_CRATE { self.sess.crate_disambiguator.get().as_str() } else { @@ -443,14 +514,57 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn type_parameter_def(&self, + /// Given a def-key `key` and a crate `krate`, finds the def-index + /// that `krate` assigned to `key`. This `DefIndex` will always be + /// relative to `krate`. + /// + /// Returns `None` if there is no `DefIndex` with that key. + pub fn def_index_for_def_key(self, krate: ast::CrateNum, key: DefKey) + -> Option { + if krate == LOCAL_CRATE { + self.map.def_index_for_def_key(key) + } else { + self.sess.cstore.def_index_for_def_key(krate, key) + } + } + + pub fn retrace_path(self, path: &DefPath) -> Option { + debug!("retrace_path(path={:?})", path); + + let root_key = DefKey { + parent: None, + disambiguated_data: DisambiguatedDefPathData { + data: DefPathData::CrateRoot, + disambiguator: 0, + }, + }; + + let root_index = self.def_index_for_def_key(path.krate, root_key) + .expect("no root key?"); + + debug!("retrace_path: root_index={:?}", root_index); + + let mut index = root_index; + for data in &path.data { + let key = DefKey { parent: Some(index), disambiguated_data: data.clone() }; + debug!("retrace_path: key={:?}", key); + match self.def_index_for_def_key(path.krate, key) { + Some(i) => index = i, + None => return None, + } + } + + Some(DefId { krate: path.krate, index: index }) + } + + pub fn type_parameter_def(self, node_id: NodeId) -> ty::TypeParameterDef<'tcx> { self.ty_param_defs.borrow().get(&node_id).unwrap().clone() } - pub fn node_types(&self) -> Ref>> { + pub fn node_types(self) -> Ref<'a, NodeMap>> { fn projection<'a, 'tcx>(tables: &'a Tables<'tcx>) -> &'a NodeMap> { &tables.node_types } @@ -458,32 +572,32 @@ impl<'tcx> TyCtxt<'tcx> { Ref::map(self.tables.borrow(), projection) } - pub fn node_type_insert(&self, id: NodeId, ty: Ty<'tcx>) { + pub fn node_type_insert(self, id: NodeId, ty: Ty<'gcx>) { self.tables.borrow_mut().node_types.insert(id, ty); } - pub fn intern_trait_def(&self, def: ty::TraitDef<'tcx>) - -> &'tcx ty::TraitDef<'tcx> { + pub fn intern_trait_def(self, def: ty::TraitDef<'gcx>) + -> &'gcx ty::TraitDef<'gcx> { let did = def.trait_ref.def_id; - let interned = self.arenas.trait_defs.alloc(def); + let interned = self.global_interners.arenas.trait_defs.alloc(def); if let Some(prev) = self.trait_defs.borrow_mut().insert(did, interned) { bug!("Tried to overwrite interned TraitDef: {:?}", prev) } interned } - pub fn alloc_trait_def(&self, def: ty::TraitDef<'tcx>) - -> &'tcx ty::TraitDef<'tcx> { - self.arenas.trait_defs.alloc(def) + pub fn alloc_trait_def(self, def: ty::TraitDef<'gcx>) + -> &'gcx ty::TraitDef<'gcx> { + self.global_interners.arenas.trait_defs.alloc(def) } - pub fn intern_adt_def(&self, + pub fn intern_adt_def(self, did: DefId, kind: ty::AdtKind, - variants: Vec>) - -> ty::AdtDefMaster<'tcx> { + variants: Vec>) + -> ty::AdtDefMaster<'gcx> { let def = ty::AdtDefData::new(self, did, kind, variants); - let interned = self.arenas.adt_defs.alloc(def); + let interned = self.global_interners.arenas.adt_defs.alloc(def); // this will need a transmute when reverse-variance is removed if let Some(prev) = self.adt_defs.borrow_mut().insert(did, interned) { bug!("Tried to overwrite interned AdtDef: {:?}", prev) @@ -491,78 +605,86 @@ impl<'tcx> TyCtxt<'tcx> { interned } - pub fn intern_stability(&self, stab: attr::Stability) -> &'tcx attr::Stability { - if let Some(st) = self.stability_interner.borrow().get(&stab) { + pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability { + if let Some(st) = self.global_interners.stability.borrow().get(&stab) { return st; } - let interned = self.arenas.stability.alloc(stab); - if let Some(prev) = self.stability_interner + let interned = self.global_interners.arenas.stability.alloc(stab); + if let Some(prev) = self.global_interners.stability .borrow_mut() - .insert(interned, interned) { + .replace(interned) { bug!("Tried to overwrite interned Stability: {:?}", prev) } interned } - pub fn intern_layout(&self, layout: Layout) -> &'tcx Layout { - if let Some(layout) = self.layout_interner.borrow().get(&layout) { + pub fn intern_layout(self, layout: Layout) -> &'gcx Layout { + if let Some(layout) = self.global_interners.layout.borrow().get(&layout) { return layout; } - let interned = self.arenas.layout.alloc(layout); - if let Some(prev) = self.layout_interner + let interned = self.global_interners.arenas.layout.alloc(layout); + if let Some(prev) = self.global_interners.layout .borrow_mut() - .insert(interned, interned) { + .replace(interned) { bug!("Tried to overwrite interned Layout: {:?}", prev) } interned } - pub fn store_free_region_map(&self, id: NodeId, map: FreeRegionMap) { + pub fn store_free_region_map(self, id: NodeId, map: FreeRegionMap) { if self.free_region_maps.borrow_mut().insert(id, map).is_some() { bug!("Tried to overwrite interned FreeRegionMap for NodeId {:?}", id) } } - pub fn free_region_map(&self, id: NodeId) -> FreeRegionMap { + pub fn free_region_map(self, id: NodeId) -> FreeRegionMap { self.free_region_maps.borrow()[&id].clone() } - pub fn lift>(&self, value: &T) -> Option { + pub fn lift>(self, value: &T) -> Option { value.lift_to_tcx(self) } - /// Create a type context and call the closure with a `&TyCtxt` reference + /// Like lift, but only tries in the global tcx. + pub fn lift_to_global>(self, value: &T) -> Option { + value.lift_to_tcx(self.global_tcx()) + } + + /// Returns true if self is the same as self.global_tcx(). + fn is_global(self) -> bool { + let local = self.interners as *const _; + let global = &self.global_interners as *const _; + local as usize == global as usize + } + + /// Create a type context and call the closure with a `TyCtxt` reference /// to the context. The closure enforces that the type context and any interned /// value (types, substs, etc.) can only be used while `ty::tls` has a valid /// reference to the context, to allow formatting values that need it. pub fn create_and_enter(s: &'tcx Session, - arenas: &'tcx CtxtArenas<'tcx>, - def_map: RefCell, - named_region_map: resolve_lifetime::NamedRegionMap, - map: ast_map::Map<'tcx>, - freevars: FreevarMap, - region_maps: RegionMaps, - lang_items: middle::lang_items::LanguageItems, - stability: stability::Index<'tcx>, + arenas: &'tcx CtxtArenas<'tcx>, + def_map: RefCell, + named_region_map: resolve_lifetime::NamedRegionMap, + map: ast_map::Map<'tcx>, + freevars: FreevarMap, + maybe_unused_trait_imports: NodeSet, + region_maps: RegionMaps, + lang_items: middle::lang_items::LanguageItems, + stability: stability::Index<'tcx>, crate_name: &str, - f: F) -> R - where F: FnOnce(&TyCtxt<'tcx>) -> R + f: F) -> R + where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R { let data_layout = TargetDataLayout::parse(s); - let interner = RefCell::new(FnvHashMap()); - let common_types = CommonTypes::new(&arenas.type_, &interner); + let interners = CtxtInterners::new(arenas); + let common_types = CommonTypes::new(&interners); let dep_graph = map.dep_graph.clone(); let fulfilled_predicates = traits::GlobalFulfilledPredicates::new(dep_graph.clone()); - tls::enter(TyCtxt { - arenas: arenas, - interner: interner, - substs_interner: RefCell::new(FnvHashMap()), - bare_fn_interner: RefCell::new(FnvHashMap()), - region_interner: RefCell::new(FnvHashMap()), - stability_interner: RefCell::new(FnvHashMap()), - layout_interner: RefCell::new(FnvHashMap()), + tls::enter_global(GlobalCtxt { + specializes_cache: RefCell::new(traits::SpecializesCache::new()), + global_interners: interners, dep_graph: dep_graph.clone(), types: common_types, named_region_map: named_region_map, @@ -581,10 +703,10 @@ impl<'tcx> TyCtxt<'tcx> { fulfilled_predicates: RefCell::new(fulfilled_predicates), map: map, freevars: RefCell::new(freevars), + maybe_unused_trait_imports: maybe_unused_trait_imports, tcache: RefCell::new(DepTrackingMap::new(dep_graph.clone())), rcache: RefCell::new(FnvHashMap()), tc_cache: RefCell::new(FnvHashMap()), - ast_ty_to_ty_cache: RefCell::new(NodeMap()), impl_or_trait_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())), trait_item_def_ids: RefCell::new(DepTrackingMap::new(dep_graph.clone())), trait_items_cache: RefCell::new(DepTrackingMap::new(dep_graph.clone())), @@ -595,6 +717,7 @@ impl<'tcx> TyCtxt<'tcx> { impl_items: RefCell::new(DepTrackingMap::new(dep_graph.clone())), used_unsafe: RefCell::new(NodeSet()), used_mut_nodes: RefCell::new(NodeSet()), + used_trait_imports: RefCell::new(NodeSet()), populated_external_types: RefCell::new(DefIdSet()), populated_external_primitive_impls: RefCell::new(DefIdSet()), extern_const_statics: RefCell::new(DefIdMap()), @@ -615,11 +738,27 @@ impl<'tcx> TyCtxt<'tcx> { } } +impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> { + /// Call the closure with a local `TyCtxt` using the given arenas. + pub fn enter_local(&self, arenas: &'tcx CtxtArenas<'tcx>, f: F) -> R + where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R + { + let interners = CtxtInterners::new(arenas); + tls::enter(self, &interners, f) + } +} + /// A trait implemented for all X<'a> types which can be safely and /// efficiently converted to X<'tcx> as long as they are part of the /// provided TyCtxt<'tcx>. /// This can be done, for example, for Ty<'tcx> or &'tcx Substs<'tcx> /// by looking them up in their respective interners. +/// +/// However, this is still not the best implementation as it does +/// need to compare the components, even for interned values. +/// It would be more efficient if TypedArena provided a way to +/// determine whether the address is in the allocated range. +/// /// None is returned if the value or one of the components is not part /// of the provided context. /// For Ty, None can be returned if either the type interner doesn't @@ -628,48 +767,112 @@ impl<'tcx> TyCtxt<'tcx> { /// e.g. `()` or `u8`, was interned in a different context. pub trait Lift<'tcx> { type Lifted; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option; + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option; } impl<'a, 'tcx> Lift<'tcx> for Ty<'a> { type Lifted = Ty<'tcx>; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option> { - if let Some(&ty) = tcx.interner.borrow().get(&self.sty) { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option> { + if let Some(&Interned(ty)) = tcx.interners.type_.borrow().get(&self.sty) { if *self as *const _ == ty as *const _ { return Some(ty); } } - None + // Also try in the global tcx if we're not that. + if !tcx.is_global() { + self.lift_to_tcx(tcx.global_tcx()) + } else { + None + } } } impl<'a, 'tcx> Lift<'tcx> for &'a Substs<'a> { type Lifted = &'tcx Substs<'tcx>; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option<&'tcx Substs<'tcx>> { - if let Some(&substs) = tcx.substs_interner.borrow().get(*self) { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Substs<'tcx>> { + if let Some(&Interned(substs)) = tcx.interners.substs.borrow().get(*self) { if *self as *const _ == substs as *const _ { return Some(substs); } } - None + // Also try in the global tcx if we're not that. + if !tcx.is_global() { + self.lift_to_tcx(tcx.global_tcx()) + } else { + None + } + } +} + +impl<'a, 'tcx> Lift<'tcx> for &'a Region { + type Lifted = &'tcx Region; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Region> { + if let Some(&Interned(region)) = tcx.interners.region.borrow().get(*self) { + if *self as *const _ == region as *const _ { + return Some(region); + } + } + // Also try in the global tcx if we're not that. + if !tcx.is_global() { + self.lift_to_tcx(tcx.global_tcx()) + } else { + None + } + } +} + +impl<'a, 'tcx> Lift<'tcx> for &'a [Ty<'a>] { + type Lifted = &'tcx [Ty<'tcx>]; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx [Ty<'tcx>]> { + if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(*self) { + if *self as *const _ == list as *const _ { + return Some(list); + } + } + // Also try in the global tcx if we're not that. + if !tcx.is_global() { + self.lift_to_tcx(tcx.global_tcx()) + } else { + None + } + } +} + +impl<'a, 'tcx> Lift<'tcx> for &'a BareFnTy<'a> { + type Lifted = &'tcx BareFnTy<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) + -> Option<&'tcx BareFnTy<'tcx>> { + if let Some(&Interned(fty)) = tcx.interners.bare_fn.borrow().get(*self) { + if *self as *const _ == fty as *const _ { + return Some(fty); + } + } + // Also try in the global tcx if we're not that. + if !tcx.is_global() { + self.lift_to_tcx(tcx.global_tcx()) + } else { + None + } } } pub mod tls { - use ty::TyCtxt; + use super::{CtxtInterners, GlobalCtxt, TyCtxt}; use std::cell::Cell; use std::fmt; use syntax::codemap; - /// Marker type used for the scoped TLS slot. + /// Marker types used for the scoped TLS slot. /// The type context cannot be used directly because the scoped TLS /// in libstd doesn't allow types generic over lifetimes. - struct ThreadLocalTyCx; + enum ThreadLocalGlobalCtxt {} + enum ThreadLocalInterners {} thread_local! { - static TLS_TCX: Cell> = Cell::new(None) + static TLS_TCX: Cell> = Cell::new(None) } fn span_debug(span: codemap::Span, f: &mut fmt::Formatter) -> fmt::Result { @@ -678,31 +881,54 @@ pub mod tls { }) } - pub fn enter<'tcx, F: FnOnce(&TyCtxt<'tcx>) -> R, R>(tcx: TyCtxt<'tcx>, f: F) -> R { + pub fn enter_global<'gcx, F, R>(gcx: GlobalCtxt<'gcx>, f: F) -> R + where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'gcx>) -> R + { codemap::SPAN_DEBUG.with(|span_dbg| { let original_span_debug = span_dbg.get(); span_dbg.set(span_debug); - let tls_ptr = &tcx as *const _ as *const ThreadLocalTyCx; - let result = TLS_TCX.with(|tls| { - let prev = tls.get(); - tls.set(Some(tls_ptr)); - let ret = f(&tcx); - tls.set(prev); - ret - }); + let result = enter(&gcx, &gcx.global_interners, f); span_dbg.set(original_span_debug); result }) } - pub fn with R, R>(f: F) -> R { + pub fn enter<'a, 'gcx: 'tcx, 'tcx, F, R>(gcx: &'a GlobalCtxt<'gcx>, + interners: &'a CtxtInterners<'tcx>, + f: F) -> R + where F: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R + { + let gcx_ptr = gcx as *const _ as *const ThreadLocalGlobalCtxt; + let interners_ptr = interners as *const _ as *const ThreadLocalInterners; + TLS_TCX.with(|tls| { + let prev = tls.get(); + tls.set(Some((gcx_ptr, interners_ptr))); + let ret = f(TyCtxt { + gcx: gcx, + interners: interners + }); + tls.set(prev); + ret + }) + } + + pub fn with(f: F) -> R + where F: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R + { TLS_TCX.with(|tcx| { - let tcx = tcx.get().unwrap(); - f(unsafe { &*(tcx as *const TyCtxt) }) + let (gcx, interners) = tcx.get().unwrap(); + let gcx = unsafe { &*(gcx as *const GlobalCtxt) }; + let interners = unsafe { &*(interners as *const CtxtInterners) }; + f(TyCtxt { + gcx: gcx, + interners: interners + }) }) } - pub fn with_opt) -> R, R>(f: F) -> R { + pub fn with_opt(f: F) -> R + where F: for<'a, 'gcx, 'tcx> FnOnce(Option>) -> R + { if TLS_TCX.with(|tcx| tcx.get().is_some()) { with(|v| f(Some(v))) } else { @@ -718,6 +944,8 @@ macro_rules! sty_debug_print { #[allow(non_snake_case)] mod inner { use ty::{self, TyCtxt}; + use ty::context::Interned; + #[derive(Copy, Clone)] struct DebugStat { total: usize, @@ -726,7 +954,7 @@ macro_rules! sty_debug_print { both_infer: usize, } - pub fn go(tcx: &TyCtxt) { + pub fn go(tcx: TyCtxt) { let mut total = DebugStat { total: 0, region_infer: 0, ty_infer: 0, both_infer: 0, @@ -734,7 +962,7 @@ macro_rules! sty_debug_print { $(let mut $variant = total;)* - for (_, t) in tcx.interner.borrow().iter() { + for &Interned(t) in tcx.interners.type_.borrow().iter() { let variant = match t.sty { ty::TyBool | ty::TyChar | ty::TyInt(..) | ty::TyUint(..) | ty::TyFloat(..) | ty::TyStr => continue, @@ -773,131 +1001,172 @@ macro_rules! sty_debug_print { }} } -impl<'tcx> TyCtxt<'tcx> { - pub fn print_debug_stats(&self) { +impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> { + pub fn print_debug_stats(self) { sty_debug_print!( self, TyEnum, TyBox, TyArray, TySlice, TyRawPtr, TyRef, TyFnDef, TyFnPtr, TyTrait, TyStruct, TyClosure, TyTuple, TyParam, TyInfer, TyProjection); - println!("Substs interner: #{}", self.substs_interner.borrow().len()); - println!("BareFnTy interner: #{}", self.bare_fn_interner.borrow().len()); - println!("Region interner: #{}", self.region_interner.borrow().len()); - println!("Stability interner: #{}", self.stability_interner.borrow().len()); - println!("Layout interner: #{}", self.layout_interner.borrow().len()); + println!("Substs interner: #{}", self.interners.substs.borrow().len()); + println!("BareFnTy interner: #{}", self.interners.bare_fn.borrow().len()); + println!("Region interner: #{}", self.interners.region.borrow().len()); + println!("Stability interner: #{}", self.interners.stability.borrow().len()); + println!("Layout interner: #{}", self.interners.layout.borrow().len()); } } -/// An entry in the type interner. -pub struct InternedTy<'tcx> { - ty: Ty<'tcx> -} +/// An entry in an interner. +struct Interned<'tcx, T: 'tcx+?Sized>(&'tcx T); -// NB: An InternedTy compares and hashes as a sty. -impl<'tcx> PartialEq for InternedTy<'tcx> { - fn eq(&self, other: &InternedTy<'tcx>) -> bool { - self.ty.sty == other.ty.sty +// NB: An Interned compares and hashes as a sty. +impl<'tcx> PartialEq for Interned<'tcx, TyS<'tcx>> { + fn eq(&self, other: &Interned<'tcx, TyS<'tcx>>) -> bool { + self.0.sty == other.0.sty } } -impl<'tcx> Eq for InternedTy<'tcx> {} +impl<'tcx> Eq for Interned<'tcx, TyS<'tcx>> {} -impl<'tcx> Hash for InternedTy<'tcx> { +impl<'tcx> Hash for Interned<'tcx, TyS<'tcx>> { fn hash(&self, s: &mut H) { - self.ty.sty.hash(s) + self.0.sty.hash(s) } } -impl<'tcx> Borrow> for InternedTy<'tcx> { - fn borrow<'a>(&'a self) -> &'a TypeVariants<'tcx> { - &self.ty.sty +impl<'tcx: 'lcx, 'lcx> Borrow> for Interned<'tcx, TyS<'tcx>> { + fn borrow<'a>(&'a self) -> &'a TypeVariants<'lcx> { + &self.0.sty } } -fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool { - bounds.is_empty() || - bounds[1..].iter().enumerate().all( - |(index, bound)| bounds[index].sort_key() <= bound.sort_key()) +impl<'tcx: 'lcx, 'lcx> Borrow<[Ty<'lcx>]> for Interned<'tcx, [Ty<'tcx>]> { + fn borrow<'a>(&'a self) -> &'a [Ty<'lcx>] { + self.0 + } } -impl<'tcx> TyCtxt<'tcx> { - // Type constructors - pub fn mk_substs(&self, substs: Substs<'tcx>) -> &'tcx Substs<'tcx> { - if let Some(substs) = self.substs_interner.borrow().get(&substs) { - return *substs; - } +impl<'tcx: 'lcx, 'lcx> Borrow> for Interned<'tcx, Substs<'tcx>> { + fn borrow<'a>(&'a self) -> &'a Substs<'lcx> { + self.0 + } +} - let substs = self.arenas.substs.alloc(substs); - self.substs_interner.borrow_mut().insert(substs, substs); - substs +impl<'tcx: 'lcx, 'lcx> Borrow> for Interned<'tcx, BareFnTy<'tcx>> { + fn borrow<'a>(&'a self) -> &'a BareFnTy<'lcx> { + self.0 } +} - /// Create an unsafe fn ty based on a safe fn ty. - pub fn safe_to_unsafe_fn_ty(&self, bare_fn: &BareFnTy<'tcx>) -> Ty<'tcx> { - assert_eq!(bare_fn.unsafety, hir::Unsafety::Normal); - self.mk_fn_ptr(ty::BareFnTy { - unsafety: hir::Unsafety::Unsafe, - abi: bare_fn.abi, - sig: bare_fn.sig.clone() - }) +impl<'tcx> Borrow for Interned<'tcx, Region> { + fn borrow<'a>(&'a self) -> &'a Region { + self.0 } +} - pub fn mk_bare_fn(&self, bare_fn: BareFnTy<'tcx>) -> &'tcx BareFnTy<'tcx> { - if let Some(bare_fn) = self.bare_fn_interner.borrow().get(&bare_fn) { - return *bare_fn; +macro_rules! items { ($($item:item)+) => ($($item)+) } +macro_rules! impl_interners { + ($lt_tcx:tt, $($name:ident: $method:ident($alloc:ty, $needs_infer:expr)-> $ty:ty),+) => { + items!($(impl<$lt_tcx> PartialEq for Interned<$lt_tcx, $ty> { + fn eq(&self, other: &Self) -> bool { + self.0 == other.0 + } } - let bare_fn = self.arenas.bare_fn.alloc(bare_fn); - self.bare_fn_interner.borrow_mut().insert(bare_fn, bare_fn); - bare_fn - } + impl<$lt_tcx> Eq for Interned<$lt_tcx, $ty> {} - pub fn mk_region(&self, region: Region) -> &'tcx Region { - if let Some(region) = self.region_interner.borrow().get(®ion) { - return *region; + impl<$lt_tcx> Hash for Interned<$lt_tcx, $ty> { + fn hash(&self, s: &mut H) { + self.0.hash(s) + } } - let region = self.arenas.region.alloc(region); - self.region_interner.borrow_mut().insert(region, region); - region - } + impl<'a, 'gcx, $lt_tcx> TyCtxt<'a, 'gcx, $lt_tcx> { + pub fn $method(self, v: $alloc) -> &$lt_tcx $ty { + if let Some(i) = self.interners.$name.borrow().get::<$ty>(&v) { + return i.0; + } + if !self.is_global() { + if let Some(i) = self.global_interners.$name.borrow().get::<$ty>(&v) { + return i.0; + } + } - fn intern_ty(type_arena: &'tcx TypedArena>, - interner: &RefCell, Ty<'tcx>>>, - st: TypeVariants<'tcx>) - -> Ty<'tcx> { - let ty: Ty /* don't be &mut TyS */ = { - let mut interner = interner.borrow_mut(); - match interner.get(&st) { - Some(ty) => return *ty, - _ => () + // HACK(eddyb) Depend on flags being accurate to + // determine that all contents are in the global tcx. + // See comments on Lift for why we can't use that. + if !($needs_infer)(&v) { + if !self.is_global() { + let v = unsafe { + mem::transmute(v) + }; + let i = self.global_interners.arenas.$name.alloc(v); + self.global_interners.$name.borrow_mut().insert(Interned(i)); + return i; + } + } else { + // Make sure we don't end up with inference + // types/regions in the global tcx. + if self.is_global() { + bug!("Attempted to intern `{:?}` which contains \ + inference types/regions in the global type context", + v); + } + } + + let i = self.interners.arenas.$name.alloc(v); + self.interners.$name.borrow_mut().insert(Interned(i)); + i } + })+); + } +} - let flags = super::flags::FlagComputation::for_sty(&st); +fn keep_local<'tcx, T: ty::TypeFoldable<'tcx>>(x: &T) -> bool { + x.has_type_flags(ty::TypeFlags::KEEP_IN_LOCAL_TCX) +} - let ty = match () { - () => type_arena.alloc(TyS { sty: st, - flags: Cell::new(flags.flags), - region_depth: flags.depth, }), - }; +impl_interners!('tcx, + type_list: mk_type_list(Vec>, keep_local) -> [Ty<'tcx>], + substs: mk_substs(Substs<'tcx>, |substs: &Substs| { + keep_local(&substs.types) || keep_local(&substs.regions) + }) -> Substs<'tcx>, + bare_fn: mk_bare_fn(BareFnTy<'tcx>, |fty: &BareFnTy| { + keep_local(&fty.sig) + }) -> BareFnTy<'tcx>, + region: mk_region(Region, keep_local) -> Region +); - interner.insert(InternedTy { ty: ty }, ty); - ty - }; +fn bound_list_is_sorted(bounds: &[ty::PolyProjectionPredicate]) -> bool { + bounds.is_empty() || + bounds[1..].iter().enumerate().all( + |(index, bound)| bounds[index].sort_key() <= bound.sort_key()) +} - debug!("Interned type: {:?} Pointer: {:?}", - ty, ty as *const TyS); - ty +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + /// Create an unsafe fn ty based on a safe fn ty. + pub fn safe_to_unsafe_fn_ty(self, bare_fn: &BareFnTy<'tcx>) -> Ty<'tcx> { + assert_eq!(bare_fn.unsafety, hir::Unsafety::Normal); + self.mk_fn_ptr(self.mk_bare_fn(ty::BareFnTy { + unsafety: hir::Unsafety::Unsafe, + abi: bare_fn.abi, + sig: bare_fn.sig.clone() + })) } - // Interns a type/name combination, stores the resulting box in cx.interner, + // Interns a type/name combination, stores the resulting box in cx.interners, // and returns the box as cast to an unsafe ptr (see comments for Ty above). - pub fn mk_ty(&self, st: TypeVariants<'tcx>) -> Ty<'tcx> { - TyCtxt::intern_ty(&self.arenas.type_, &self.interner, st) + pub fn mk_ty(self, st: TypeVariants<'tcx>) -> Ty<'tcx> { + let global_interners = if !self.is_global() { + Some(&self.global_interners) + } else { + None + }; + self.interners.intern_ty(st, global_interners) } - pub fn mk_mach_int(&self, tm: ast::IntTy) -> Ty<'tcx> { + pub fn mk_mach_int(self, tm: ast::IntTy) -> Ty<'tcx> { match tm { ast::IntTy::Is => self.types.isize, ast::IntTy::I8 => self.types.i8, @@ -907,7 +1176,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn mk_mach_uint(&self, tm: ast::UintTy) -> Ty<'tcx> { + pub fn mk_mach_uint(self, tm: ast::UintTy) -> Ty<'tcx> { match tm { ast::UintTy::Us => self.types.usize, ast::UintTy::U8 => self.types.u8, @@ -917,89 +1186,89 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn mk_mach_float(&self, tm: ast::FloatTy) -> Ty<'tcx> { + pub fn mk_mach_float(self, tm: ast::FloatTy) -> Ty<'tcx> { match tm { ast::FloatTy::F32 => self.types.f32, ast::FloatTy::F64 => self.types.f64, } } - pub fn mk_str(&self) -> Ty<'tcx> { + pub fn mk_str(self) -> Ty<'tcx> { self.mk_ty(TyStr) } - pub fn mk_static_str(&self) -> Ty<'tcx> { + pub fn mk_static_str(self) -> Ty<'tcx> { self.mk_imm_ref(self.mk_region(ty::ReStatic), self.mk_str()) } - pub fn mk_enum(&self, def: AdtDef<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> { + pub fn mk_enum(self, def: AdtDef<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> { // take a copy of substs so that we own the vectors inside self.mk_ty(TyEnum(def, substs)) } - pub fn mk_box(&self, ty: Ty<'tcx>) -> Ty<'tcx> { + pub fn mk_box(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ty(TyBox(ty)) } - pub fn mk_ptr(&self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> { + pub fn mk_ptr(self, tm: TypeAndMut<'tcx>) -> Ty<'tcx> { self.mk_ty(TyRawPtr(tm)) } - pub fn mk_ref(&self, r: &'tcx Region, tm: TypeAndMut<'tcx>) -> Ty<'tcx> { + pub fn mk_ref(self, r: &'tcx Region, tm: TypeAndMut<'tcx>) -> Ty<'tcx> { self.mk_ty(TyRef(r, tm)) } - pub fn mk_mut_ref(&self, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> { + pub fn mk_mut_ref(self, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutMutable}) } - pub fn mk_imm_ref(&self, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> { + pub fn mk_imm_ref(self, r: &'tcx Region, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ref(r, TypeAndMut {ty: ty, mutbl: hir::MutImmutable}) } - pub fn mk_mut_ptr(&self, ty: Ty<'tcx>) -> Ty<'tcx> { + pub fn mk_mut_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutMutable}) } - pub fn mk_imm_ptr(&self, ty: Ty<'tcx>) -> Ty<'tcx> { + pub fn mk_imm_ptr(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ptr(TypeAndMut {ty: ty, mutbl: hir::MutImmutable}) } - pub fn mk_nil_ptr(&self) -> Ty<'tcx> { + pub fn mk_nil_ptr(self) -> Ty<'tcx> { self.mk_imm_ptr(self.mk_nil()) } - pub fn mk_array(&self, ty: Ty<'tcx>, n: usize) -> Ty<'tcx> { + pub fn mk_array(self, ty: Ty<'tcx>, n: usize) -> Ty<'tcx> { self.mk_ty(TyArray(ty, n)) } - pub fn mk_slice(&self, ty: Ty<'tcx>) -> Ty<'tcx> { + pub fn mk_slice(self, ty: Ty<'tcx>) -> Ty<'tcx> { self.mk_ty(TySlice(ty)) } - pub fn mk_tup(&self, ts: Vec>) -> Ty<'tcx> { - self.mk_ty(TyTuple(ts)) + pub fn mk_tup(self, ts: Vec>) -> Ty<'tcx> { + self.mk_ty(TyTuple(self.mk_type_list(ts))) } - pub fn mk_nil(&self) -> Ty<'tcx> { + pub fn mk_nil(self) -> Ty<'tcx> { self.mk_tup(Vec::new()) } - pub fn mk_bool(&self) -> Ty<'tcx> { + pub fn mk_bool(self) -> Ty<'tcx> { self.mk_ty(TyBool) } - pub fn mk_fn_def(&self, def_id: DefId, + pub fn mk_fn_def(self, def_id: DefId, substs: &'tcx Substs<'tcx>, - fty: BareFnTy<'tcx>) -> Ty<'tcx> { - self.mk_ty(TyFnDef(def_id, substs, self.mk_bare_fn(fty))) + fty: &'tcx BareFnTy<'tcx>) -> Ty<'tcx> { + self.mk_ty(TyFnDef(def_id, substs, fty)) } - pub fn mk_fn_ptr(&self, fty: BareFnTy<'tcx>) -> Ty<'tcx> { - self.mk_ty(TyFnPtr(self.mk_bare_fn(fty))) + pub fn mk_fn_ptr(self, fty: &'tcx BareFnTy<'tcx>) -> Ty<'tcx> { + self.mk_ty(TyFnPtr(fty)) } - pub fn mk_trait(&self, + pub fn mk_trait(self, principal: ty::PolyTraitRef<'tcx>, bounds: ExistentialBounds<'tcx>) -> Ty<'tcx> @@ -1013,7 +1282,7 @@ impl<'tcx> TyCtxt<'tcx> { self.mk_ty(TyTrait(inner)) } - pub fn mk_projection(&self, + pub fn mk_projection(self, trait_ref: TraitRef<'tcx>, item_name: Name) -> Ty<'tcx> { @@ -1022,61 +1291,61 @@ impl<'tcx> TyCtxt<'tcx> { self.mk_ty(TyProjection(inner)) } - pub fn mk_struct(&self, def: AdtDef<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> { + pub fn mk_struct(self, def: AdtDef<'tcx>, substs: &'tcx Substs<'tcx>) -> Ty<'tcx> { // take a copy of substs so that we own the vectors inside self.mk_ty(TyStruct(def, substs)) } - pub fn mk_closure(&self, + pub fn mk_closure(self, closure_id: DefId, substs: &'tcx Substs<'tcx>, tys: Vec>) -> Ty<'tcx> { - self.mk_closure_from_closure_substs(closure_id, Box::new(ClosureSubsts { + self.mk_closure_from_closure_substs(closure_id, ClosureSubsts { func_substs: substs, - upvar_tys: tys - })) + upvar_tys: self.mk_type_list(tys) + }) } - pub fn mk_closure_from_closure_substs(&self, + pub fn mk_closure_from_closure_substs(self, closure_id: DefId, - closure_substs: Box>) + closure_substs: ClosureSubsts<'tcx>) -> Ty<'tcx> { self.mk_ty(TyClosure(closure_id, closure_substs)) } - pub fn mk_var(&self, v: TyVid) -> Ty<'tcx> { + pub fn mk_var(self, v: TyVid) -> Ty<'tcx> { self.mk_infer(TyVar(v)) } - pub fn mk_int_var(&self, v: IntVid) -> Ty<'tcx> { + pub fn mk_int_var(self, v: IntVid) -> Ty<'tcx> { self.mk_infer(IntVar(v)) } - pub fn mk_float_var(&self, v: FloatVid) -> Ty<'tcx> { + pub fn mk_float_var(self, v: FloatVid) -> Ty<'tcx> { self.mk_infer(FloatVar(v)) } - pub fn mk_infer(&self, it: InferTy) -> Ty<'tcx> { + pub fn mk_infer(self, it: InferTy) -> Ty<'tcx> { self.mk_ty(TyInfer(it)) } - pub fn mk_param(&self, + pub fn mk_param(self, space: subst::ParamSpace, index: u32, name: Name) -> Ty<'tcx> { self.mk_ty(TyParam(ParamTy { space: space, idx: index, name: name })) } - pub fn mk_self_type(&self) -> Ty<'tcx> { - self.mk_param(subst::SelfSpace, 0, special_idents::type_self.name) + pub fn mk_self_type(self) -> Ty<'tcx> { + self.mk_param(subst::SelfSpace, 0, keywords::SelfType.name()) } - pub fn mk_param_from_def(&self, def: &ty::TypeParameterDef) -> Ty<'tcx> { + pub fn mk_param_from_def(self, def: &ty::TypeParameterDef) -> Ty<'tcx> { self.mk_param(def.space, def.index, def.name) } - pub fn trait_items(&self, trait_did: DefId) -> Rc>> { + pub fn trait_items(self, trait_did: DefId) -> Rc>> { self.trait_items_cache.memoize(trait_did, || { let def_ids = self.trait_item_def_ids(trait_did); Rc::new(def_ids.iter() @@ -1086,7 +1355,7 @@ impl<'tcx> TyCtxt<'tcx> { } /// Obtain the representation annotation for a struct definition. - pub fn lookup_repr_hints(&self, did: DefId) -> Rc> { + pub fn lookup_repr_hints(self, did: DefId) -> Rc> { self.repr_hint_cache.memoize(did, || { Rc::new(if did.is_local() { self.get_attrs(did).iter().flat_map(|meta| { diff --git a/src/librustc/ty/error.rs b/src/librustc/ty/error.rs index 73bafc5e85..b5190f3133 100644 --- a/src/librustc/ty/error.rs +++ b/src/librustc/ty/error.rs @@ -210,14 +210,14 @@ impl<'tcx> fmt::Display for TypeError<'tcx> { } } -impl<'tcx> ty::TyS<'tcx> { - fn sort_string(&self, cx: &TyCtxt) -> String { +impl<'a, 'gcx, 'lcx, 'tcx> ty::TyS<'tcx> { + fn sort_string(&self, tcx: TyCtxt<'a, 'gcx, 'lcx>) -> String { match self.sty { ty::TyBool | ty::TyChar | ty::TyInt(_) | ty::TyUint(_) | ty::TyFloat(_) | ty::TyStr => self.to_string(), ty::TyTuple(ref tys) if tys.is_empty() => self.to_string(), - ty::TyEnum(def, _) => format!("enum `{}`", cx.item_path_str(def.did)), + ty::TyEnum(def, _) => format!("enum `{}`", tcx.item_path_str(def.did)), ty::TyBox(_) => "box".to_string(), ty::TyArray(_, n) => format!("array of {} elements", n), ty::TySlice(_) => "slice".to_string(), @@ -226,10 +226,10 @@ impl<'tcx> ty::TyS<'tcx> { ty::TyFnDef(..) => format!("fn item"), ty::TyFnPtr(_) => "fn pointer".to_string(), ty::TyTrait(ref inner) => { - format!("trait {}", cx.item_path_str(inner.principal_def_id())) + format!("trait {}", tcx.item_path_str(inner.principal_def_id())) } ty::TyStruct(def, _) => { - format!("struct `{}`", cx.item_path_str(def.did)) + format!("struct `{}`", tcx.item_path_str(def.did)) } ty::TyClosure(..) => "closure".to_string(), ty::TyTuple(_) => "tuple".to_string(), @@ -252,8 +252,8 @@ impl<'tcx> ty::TyS<'tcx> { } } -impl<'tcx> TyCtxt<'tcx> { - pub fn note_and_explain_type_err(&self, +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn note_and_explain_type_err(self, db: &mut DiagnosticBuilder, err: &TypeError<'tcx>, sp: Span) { diff --git a/src/librustc/ty/fast_reject.rs b/src/librustc/ty/fast_reject.rs index 29647253ad..25f3552b56 100644 --- a/src/librustc/ty/fast_reject.rs +++ b/src/librustc/ty/fast_reject.rs @@ -43,10 +43,10 @@ pub enum SimplifiedType { /// then we can't say much about whether two types would unify. Put another way, /// `can_simplify_params` should be true if type parameters appear free in `ty` and `false` if they /// are to be considered bound. -pub fn simplify_type(tcx: &TyCtxt, - ty: Ty, - can_simplify_params: bool) - -> Option +pub fn simplify_type<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + ty: Ty, + can_simplify_params: bool) + -> Option { match ty.sty { ty::TyBool => Some(BoolSimplifiedType), diff --git a/src/librustc/ty/flags.rs b/src/librustc/ty/flags.rs index f3dcccedc5..a1da3017fc 100644 --- a/src/librustc/ty/flags.rs +++ b/src/librustc/ty/flags.rs @@ -90,9 +90,15 @@ impl FlagComputation { self.add_tys(&substs.upvar_tys); } - &ty::TyInfer(_) => { + &ty::TyInfer(infer) => { self.add_flags(TypeFlags::HAS_LOCAL_NAMES); // it might, right? - self.add_flags(TypeFlags::HAS_TY_INFER) + self.add_flags(TypeFlags::HAS_TY_INFER); + match infer { + ty::FreshTy(_) | + ty::FreshIntTy(_) | + ty::FreshFloatTy(_) => {} + _ => self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX) + } } &ty::TyEnum(_, substs) | &ty::TyStruct(_, substs) => { @@ -171,7 +177,10 @@ impl FlagComputation { fn add_region(&mut self, r: ty::Region) { match r { ty::ReVar(..) | - ty::ReSkolemized(..) => { self.add_flags(TypeFlags::HAS_RE_INFER); } + ty::ReSkolemized(..) => { + self.add_flags(TypeFlags::HAS_RE_INFER); + self.add_flags(TypeFlags::KEEP_IN_LOCAL_TCX); + } ty::ReLateBound(debruijn, _) => { self.add_depth(debruijn.depth); } ty::ReEarlyBound(..) => { self.add_flags(TypeFlags::HAS_RE_EARLY_BOUND); } ty::ReStatic => {} diff --git a/src/librustc/ty/fold.rs b/src/librustc/ty/fold.rs index 54223e16e1..4a14185b6e 100644 --- a/src/librustc/ty/fold.rs +++ b/src/librustc/ty/fold.rs @@ -50,8 +50,8 @@ use util::nodemap::{FnvHashMap, FnvHashSet}; /// The TypeFoldable trait is implemented for every type that can be folded. /// Basically, every type that has a corresponding method in TypeFolder. pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { - fn super_fold_with>(&self, folder: &mut F) -> Self; - fn fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self; + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { self.super_fold_with(folder) } @@ -113,8 +113,8 @@ pub trait TypeFoldable<'tcx>: fmt::Debug + Clone { /// default implementation that does an "identity" fold. Within each /// identity fold, it should invoke `foo.fold_with(self)` to fold each /// sub-item. -pub trait TypeFolder<'tcx> : Sized { - fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx>; +pub trait TypeFolder<'gcx: 'tcx, 'tcx> : Sized { + fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>; fn fold_binder(&mut self, t: &Binder) -> Binder where T : TypeFoldable<'tcx> @@ -139,8 +139,8 @@ pub trait TypeFolder<'tcx> : Sized { } fn fold_substs(&mut self, - substs: &subst::Substs<'tcx>) - -> subst::Substs<'tcx> { + substs: &'tcx subst::Substs<'tcx>) + -> &'tcx subst::Substs<'tcx> { substs.super_fold_with(self) } @@ -157,8 +157,8 @@ pub trait TypeFolder<'tcx> : Sized { } fn fold_bare_fn_ty(&mut self, - fty: &ty::BareFnTy<'tcx>) - -> ty::BareFnTy<'tcx> + fty: &'tcx ty::BareFnTy<'tcx>) + -> &'tcx ty::BareFnTy<'tcx> { fty.super_fold_with(self) } @@ -201,15 +201,17 @@ pub trait TypeVisitor<'tcx> : Sized { /////////////////////////////////////////////////////////////////////////// // Some sample folders -pub struct BottomUpFolder<'a, 'tcx: 'a, F> where F: FnMut(Ty<'tcx>) -> Ty<'tcx> { - pub tcx: &'a TyCtxt<'tcx>, +pub struct BottomUpFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a, F> + where F: FnMut(Ty<'tcx>) -> Ty<'tcx> +{ + pub tcx: TyCtxt<'a, 'gcx, 'tcx>, pub fldop: F, } -impl<'a, 'tcx, F> TypeFolder<'tcx> for BottomUpFolder<'a, 'tcx, F> where - F: FnMut(Ty<'tcx>) -> Ty<'tcx>, +impl<'a, 'gcx, 'tcx, F> TypeFolder<'gcx, 'tcx> for BottomUpFolder<'a, 'gcx, 'tcx, F> + where F: FnMut(Ty<'tcx>) -> Ty<'tcx>, { - fn tcx(&self) -> &TyCtxt<'tcx> { self.tcx } + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { let t1 = ty.super_fold_with(self); @@ -220,10 +222,10 @@ impl<'a, 'tcx, F> TypeFolder<'tcx> for BottomUpFolder<'a, 'tcx, F> where /////////////////////////////////////////////////////////////////////////// // Region folder -impl<'tcx> TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Collects the free and escaping regions in `value` into `region_set`. Returns /// whether any late-bound regions were skipped - pub fn collect_regions(&self, + pub fn collect_regions(self, value: &T, region_set: &mut FnvHashSet) -> bool @@ -238,7 +240,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Folds the escaping and free regions in `value` using `f`, and /// sets `skipped_regions` to true if any late-bound region was found /// and skipped. - pub fn fold_regions(&self, + pub fn fold_regions(self, value: &T, skipped_regions: &mut bool, mut f: F) @@ -259,17 +261,17 @@ impl<'tcx> TyCtxt<'tcx> { /// visited by this folder; only regions that occur free will be /// visited by `fld_r`. -pub struct RegionFolder<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, +pub struct RegionFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, skipped_regions: &'a mut bool, current_depth: u32, fld_r: &'a mut (FnMut(ty::Region, u32) -> ty::Region + 'a), } -impl<'a, 'tcx> RegionFolder<'a, 'tcx> { - pub fn new(tcx: &'a TyCtxt<'tcx>, +impl<'a, 'gcx, 'tcx> RegionFolder<'a, 'gcx, 'tcx> { + pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, skipped_regions: &'a mut bool, - fld_r: &'a mut F) -> RegionFolder<'a, 'tcx> + fld_r: &'a mut F) -> RegionFolder<'a, 'gcx, 'tcx> where F : FnMut(ty::Region, u32) -> ty::Region { RegionFolder { @@ -281,9 +283,8 @@ impl<'a, 'tcx> RegionFolder<'a, 'tcx> { } } -impl<'a, 'tcx> TypeFolder<'tcx> for RegionFolder<'a, 'tcx> -{ - fn tcx(&self) -> &TyCtxt<'tcx> { self.tcx } +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionFolder<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } fn fold_binder>(&mut self, t: &ty::Binder) -> ty::Binder { self.current_depth += 1; @@ -314,22 +315,21 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionFolder<'a, 'tcx> // Replaces the escaping regions in a type. -struct RegionReplacer<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, +struct RegionReplacer<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, current_depth: u32, fld_r: &'a mut (FnMut(ty::BoundRegion) -> ty::Region + 'a), map: FnvHashMap } -impl<'tcx> TyCtxt<'tcx> { - pub fn replace_late_bound_regions(&self, +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn replace_late_bound_regions(self, value: &Binder, mut f: F) -> (T, FnvHashMap) where F : FnMut(ty::BoundRegion) -> ty::Region, T : TypeFoldable<'tcx>, { - debug!("replace_late_bound_regions({:?})", value); let mut replacer = RegionReplacer::new(self, &mut f); let result = value.skip_binder().fold_with(&mut replacer); (result, replacer.map) @@ -338,7 +338,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Replace any late-bound regions bound in `value` with free variants attached to scope-id /// `scope_id`. - pub fn liberate_late_bound_regions(&self, + pub fn liberate_late_bound_regions(self, all_outlive_scope: region::CodeExtent, value: &Binder) -> T @@ -351,7 +351,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Flattens two binding levels into one. So `for<'a> for<'b> Foo` /// becomes `for<'a,'b> Foo`. - pub fn flatten_late_bound_regions(&self, bound2_value: &Binder>) + pub fn flatten_late_bound_regions(self, bound2_value: &Binder>) -> Binder where T: TypeFoldable<'tcx> { @@ -372,7 +372,7 @@ impl<'tcx> TyCtxt<'tcx> { Binder(value) } - pub fn no_late_bound_regions(&self, value: &Binder) -> Option + pub fn no_late_bound_regions(self, value: &Binder) -> Option where T : TypeFoldable<'tcx> { if value.0.has_escaping_regions() { @@ -382,9 +382,38 @@ impl<'tcx> TyCtxt<'tcx> { } } + /// Returns a set of all late-bound regions that are constrained + /// by `value`, meaning that if we instantiate those LBR with + /// variables and equate `value` with something else, those + /// variables will also be equated. + pub fn collect_constrained_late_bound_regions(&self, value: &Binder) + -> FnvHashSet + where T : TypeFoldable<'tcx> + { + self.collect_late_bound_regions(value, true) + } + + /// Returns a set of all late-bound regions that appear in `value` anywhere. + pub fn collect_referenced_late_bound_regions(&self, value: &Binder) + -> FnvHashSet + where T : TypeFoldable<'tcx> + { + self.collect_late_bound_regions(value, false) + } + + fn collect_late_bound_regions(&self, value: &Binder, just_constraint: bool) + -> FnvHashSet + where T : TypeFoldable<'tcx> + { + let mut collector = LateBoundRegionsCollector::new(just_constraint); + let result = value.skip_binder().visit_with(&mut collector); + assert!(!result); // should never have stopped early + collector.regions + } + /// Replace any late-bound regions bound in `value` with `'static`. Useful in trans but also /// method lookup and a few other places where precise region relationships are not required. - pub fn erase_late_bound_regions(&self, value: &Binder) -> T + pub fn erase_late_bound_regions(self, value: &Binder) -> T where T : TypeFoldable<'tcx> { self.replace_late_bound_regions(value, |_| ty::ReStatic).0 @@ -398,7 +427,7 @@ impl<'tcx> TyCtxt<'tcx> { /// `FnSig`s or `TraitRef`s which are equivalent up to region naming will become /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and /// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization. - pub fn anonymize_late_bound_regions(&self, sig: &Binder) -> Binder + pub fn anonymize_late_bound_regions(self, sig: &Binder) -> Binder where T : TypeFoldable<'tcx>, { let mut counter = 0; @@ -409,8 +438,9 @@ impl<'tcx> TyCtxt<'tcx> { } } -impl<'a, 'tcx> RegionReplacer<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>, fld_r: &'a mut F) -> RegionReplacer<'a, 'tcx> +impl<'a, 'gcx, 'tcx> RegionReplacer<'a, 'gcx, 'tcx> { + fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, fld_r: &'a mut F) + -> RegionReplacer<'a, 'gcx, 'tcx> where F : FnMut(ty::BoundRegion) -> ty::Region { RegionReplacer { @@ -422,9 +452,8 @@ impl<'a, 'tcx> RegionReplacer<'a, 'tcx> { } } -impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> -{ - fn tcx(&self) -> &TyCtxt<'tcx> { self.tcx } +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionReplacer<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } fn fold_binder>(&mut self, t: &ty::Binder) -> ty::Binder { self.current_depth += 1; @@ -444,8 +473,6 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> fn fold_region(&mut self, r: ty::Region) -> ty::Region { match r { ty::ReLateBound(debruijn, br) if debruijn.depth == self.current_depth => { - debug!("RegionReplacer.fold_region({:?}) folding region (current_depth={})", - r, self.current_depth); let fld_r = &mut self.fld_r; let region = *self.map.entry(br).or_insert_with(|| fld_r(br)); if let ty::ReLateBound(debruijn1, br) = region { @@ -466,11 +493,11 @@ impl<'a, 'tcx> TypeFolder<'tcx> for RegionReplacer<'a, 'tcx> /////////////////////////////////////////////////////////////////////////// // Region eraser -impl<'tcx> TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Returns an equivalent value with all free regions removed (note /// that late-bound regions remain, because they are important for /// subtyping, but they are anonymized and normalized as well).. - pub fn erase_regions(&self, value: &T) -> T + pub fn erase_regions(self, value: &T) -> T where T : TypeFoldable<'tcx> { let value1 = value.fold_with(&mut RegionEraser(self)); @@ -478,10 +505,10 @@ impl<'tcx> TyCtxt<'tcx> { value, value1); return value1; - struct RegionEraser<'a, 'tcx: 'a>(&'a TyCtxt<'tcx>); + struct RegionEraser<'a, 'gcx: 'a+'tcx, 'tcx: 'a>(TyCtxt<'a, 'gcx, 'tcx>); - impl<'a, 'tcx> TypeFolder<'tcx> for RegionEraser<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { self.0 } + impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for RegionEraser<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.0 } fn fold_ty(&mut self, ty: Ty<'tcx>) -> Ty<'tcx> { match self.tcx().normalized_cache.borrow().get(&ty).cloned() { @@ -489,9 +516,15 @@ impl<'tcx> TyCtxt<'tcx> { Some(u) => return u } - let t_norm = ty.super_fold_with(self); - self.tcx().normalized_cache.borrow_mut().insert(ty, t_norm); - return t_norm; + // FIXME(eddyb) should local contexts have a cache too? + if let Some(ty_lifted) = self.tcx().lift_to_global(&ty) { + let tcx = self.tcx().global_tcx(); + let t_norm = ty_lifted.super_fold_with(&mut RegionEraser(tcx)); + tcx.normalized_cache.borrow_mut().insert(ty_lifted, t_norm); + t_norm + } else { + ty.super_fold_with(self) + } } fn fold_binder(&mut self, t: &ty::Binder) -> ty::Binder @@ -515,13 +548,6 @@ impl<'tcx> TyCtxt<'tcx> { _ => ty::ReStatic } } - - fn fold_substs(&mut self, - substs: &subst::Substs<'tcx>) - -> subst::Substs<'tcx> { - subst::Substs { regions: substs.regions.fold_with(self), - types: substs.types.fold_with(self) } - } } } } @@ -546,8 +572,10 @@ pub fn shift_region(region: ty::Region, amount: u32) -> ty::Region { } } -pub fn shift_regions<'tcx, T:TypeFoldable<'tcx>>(tcx: &TyCtxt<'tcx>, - amount: u32, value: &T) -> T { +pub fn shift_regions<'a, 'gcx, 'tcx, T>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + amount: u32, value: &T) -> T + where T: TypeFoldable<'tcx> +{ debug!("shift_regions(value={:?}, amount={})", value, amount); @@ -626,3 +654,54 @@ impl<'tcx> TypeVisitor<'tcx> for HasTypeFlagsVisitor { false } } + +/// Collects all the late-bound regions it finds into a hash set. +struct LateBoundRegionsCollector { + current_depth: u32, + regions: FnvHashSet, + just_constrained: bool, +} + +impl LateBoundRegionsCollector { + fn new(just_constrained: bool) -> Self { + LateBoundRegionsCollector { + current_depth: 1, + regions: FnvHashSet(), + just_constrained: just_constrained, + } + } +} + +impl<'tcx> TypeVisitor<'tcx> for LateBoundRegionsCollector { + fn visit_binder>(&mut self, t: &Binder) -> bool { + self.current_depth += 1; + let result = t.super_visit_with(self); + self.current_depth -= 1; + result + } + + fn visit_ty(&mut self, t: Ty<'tcx>) -> bool { + // if we are only looking for "constrained" region, we have to + // ignore the inputs to a projection, as they may not appear + // in the normalized form + if self.just_constrained { + match t.sty { + ty::TyProjection(..) => { return false; } + _ => { } + } + } + + t.super_visit_with(self) + } + + fn visit_region(&mut self, r: ty::Region) -> bool { + match r { + ty::ReLateBound(debruijn, br) if debruijn.depth == self.current_depth => { + self.regions.insert(br); + } + _ => { } + } + false + } +} + diff --git a/src/librustc/ty/item_path.rs b/src/librustc/ty/item_path.rs index 5c1e19aee7..5246c6739d 100644 --- a/src/librustc/ty/item_path.rs +++ b/src/librustc/ty/item_path.rs @@ -14,24 +14,24 @@ use hir::def_id::{DefId, CRATE_DEF_INDEX}; use ty::{self, Ty, TyCtxt}; use syntax::ast; -impl<'tcx> TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { /// Returns a string identifying this def-id. This string is /// suitable for user output. It is relative to the current crate /// root. - pub fn item_path_str(&self, def_id: DefId) -> String { + pub fn item_path_str(self, def_id: DefId) -> String { let mut buffer = LocalPathBuffer::new(RootMode::Local); self.push_item_path(&mut buffer, def_id); buffer.into_string() } /// Returns a string identifying this local node-id. - pub fn node_path_str(&self, id: ast::NodeId) -> String { + pub fn node_path_str(self, id: ast::NodeId) -> String { self.item_path_str(self.map.local_def_id(id)) } /// Returns a string identifying this def-id. This string is /// suitable for user output. It always begins with a crate identifier. - pub fn absolute_item_path_str(&self, def_id: DefId) -> String { + pub fn absolute_item_path_str(self, def_id: DefId) -> String { let mut buffer = LocalPathBuffer::new(RootMode::Absolute); self.push_item_path(&mut buffer, def_id); buffer.into_string() @@ -40,7 +40,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns the "path" to a particular crate. This can proceed in /// various ways, depending on the `root_mode` of the `buffer`. /// (See `RootMode` enum for more details.) - pub fn push_krate_path(&self, buffer: &mut T, cnum: ast::CrateNum) + pub fn push_krate_path(self, buffer: &mut T, cnum: ast::CrateNum) where T: ItemPathBuffer { match *buffer.root_mode() { @@ -83,7 +83,7 @@ impl<'tcx> TyCtxt<'tcx> { /// If possible, this pushes a global path resolving to `external_def_id` that is visible /// from at least one local module and returns true. If the crate defining `external_def_id` is /// declared with an `extern crate`, the path is guarenteed to use the `extern crate`. - pub fn try_push_visible_item_path(&self, buffer: &mut T, external_def_id: DefId) -> bool + pub fn try_push_visible_item_path(self, buffer: &mut T, external_def_id: DefId) -> bool where T: ItemPathBuffer { let visible_parent_map = self.sess.cstore.visible_parent_map(); @@ -116,7 +116,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn push_item_path(&self, buffer: &mut T, def_id: DefId) + pub fn push_item_path(self, buffer: &mut T, def_id: DefId) where T: ItemPathBuffer { match *buffer.root_mode() { @@ -147,6 +147,7 @@ impl<'tcx> TyCtxt<'tcx> { data @ DefPathData::Misc | data @ DefPathData::TypeNs(..) | data @ DefPathData::ValueNs(..) | + data @ DefPathData::Module(..) | data @ DefPathData::TypeParam(..) | data @ DefPathData::LifetimeDef(..) | data @ DefPathData::EnumVariant(..) | @@ -163,7 +164,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - fn push_impl_path(&self, + fn push_impl_path(self, buffer: &mut T, impl_def_id: DefId) where T: ItemPathBuffer @@ -189,7 +190,7 @@ impl<'tcx> TyCtxt<'tcx> { // the impl is either in the same module as the self-type or // as the trait. let self_ty = self.lookup_item_type(impl_def_id).ty; - let in_self_mod = match self.characteristic_def_id_of_type(self_ty) { + let in_self_mod = match characteristic_def_id_of_type(self_ty) { None => false, Some(ty_def_id) => self.parent_def_id(ty_def_id) == Some(parent_def_id), }; @@ -252,7 +253,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - fn push_impl_path_fallback(&self, + fn push_impl_path_fallback(self, buffer: &mut T, impl_def_id: DefId) where T: ItemPathBuffer @@ -268,38 +269,6 @@ impl<'tcx> TyCtxt<'tcx> { buffer.push(&format!("", span_str)); } - /// As a heuristic, when we see an impl, if we see that the - /// 'self-type' is a type defined in the same module as the impl, - /// we can omit including the path to the impl itself. This - /// function tries to find a "characteristic def-id" for a - /// type. It's just a heuristic so it makes some questionable - /// decisions and we may want to adjust it later. - fn characteristic_def_id_of_type(&self, ty: Ty<'tcx>) -> Option { - match ty.sty { - ty::TyStruct(adt_def, _) | - ty::TyEnum(adt_def, _) => - Some(adt_def.did), - - ty::TyTrait(ref data) => - Some(data.principal_def_id()), - - ty::TyBox(subty) => - self.characteristic_def_id_of_type(subty), - - ty::TyRawPtr(mt) | - ty::TyRef(_, mt) => - self.characteristic_def_id_of_type(mt.ty), - - ty::TyTuple(ref tys) => - tys.iter() - .filter_map(|ty| self.characteristic_def_id_of_type(ty)) - .next(), - - _ => - None - } - } - /// Returns the def-id of `def_id`'s parent in the def tree. If /// this returns `None`, then `def_id` represents a crate root or /// inlined root. @@ -309,6 +278,47 @@ impl<'tcx> TyCtxt<'tcx> { } } +/// As a heuristic, when we see an impl, if we see that the +/// 'self-type' is a type defined in the same module as the impl, +/// we can omit including the path to the impl itself. This +/// function tries to find a "characteristic def-id" for a +/// type. It's just a heuristic so it makes some questionable +/// decisions and we may want to adjust it later. +pub fn characteristic_def_id_of_type(ty: Ty) -> Option { + match ty.sty { + ty::TyStruct(adt_def, _) | + ty::TyEnum(adt_def, _) => Some(adt_def.did), + + ty::TyTrait(ref data) => Some(data.principal_def_id()), + + ty::TyArray(subty, _) | + ty::TySlice(subty) | + ty::TyBox(subty) => characteristic_def_id_of_type(subty), + + ty::TyRawPtr(mt) | + ty::TyRef(_, mt) => characteristic_def_id_of_type(mt.ty), + + ty::TyTuple(ref tys) => tys.iter() + .filter_map(|ty| characteristic_def_id_of_type(ty)) + .next(), + + ty::TyFnDef(def_id, _, _) | + ty::TyClosure(def_id, _) => Some(def_id), + + ty::TyBool | + ty::TyChar | + ty::TyInt(_) | + ty::TyUint(_) | + ty::TyStr | + ty::TyFnPtr(_) | + ty::TyProjection(_) | + ty::TyParam(_) | + ty::TyInfer(_) | + ty::TyError | + ty::TyFloat(_) => None, + } +} + /// Unifying Trait for different kinds of item paths we might /// construct. The basic interface is that components get pushed: the /// instance can also customize how we handle the root of a crate. diff --git a/src/librustc/ty/layout.rs b/src/librustc/ty/layout.rs index 3ea691b4dc..82a3b0b8db 100644 --- a/src/librustc/ty/layout.rs +++ b/src/librustc/ty/layout.rs @@ -12,7 +12,7 @@ pub use self::Integer::*; pub use self::Layout::*; pub use self::Primitive::*; -use infer::{InferCtxt, drain_fulfillment_cx_or_panic}; +use infer::InferCtxt; use session::Session; use traits; use ty::{self, Ty, TyCtxt, TypeFoldable}; @@ -364,7 +364,7 @@ impl Integer { /// signed discriminant range and #[repr] attribute. /// N.B.: u64 values above i64::MAX will be treated as signed, but /// that shouldn't affect anything, other than maybe debuginfo. - pub fn repr_discr(tcx: &TyCtxt, hint: attr::ReprAttr, min: i64, max: i64) + pub fn repr_discr(tcx: TyCtxt, hint: attr::ReprAttr, min: i64, max: i64) -> (Integer, bool) { // Theoretically, negative values could be larger in unsigned representation // than the unsigned representation of the signed minimum. However, if there @@ -466,7 +466,7 @@ pub struct Struct { pub offset_after_field: Vec } -impl Struct { +impl<'a, 'gcx, 'tcx> Struct { pub fn new(dl: &TargetDataLayout, packed: bool) -> Struct { Struct { align: if packed { dl.i8_align } else { dl.aggregate_align }, @@ -477,11 +477,11 @@ impl Struct { } /// Extend the Struct with more fields. - pub fn extend<'a, 'tcx, I>(&mut self, dl: &TargetDataLayout, - fields: I, - scapegoat: Ty<'tcx>) - -> Result<(), LayoutError<'tcx>> - where I: Iterator>> { + pub fn extend(&mut self, dl: &TargetDataLayout, + fields: I, + scapegoat: Ty<'gcx>) + -> Result<(), LayoutError<'gcx>> + where I: Iterator>> { self.offset_after_field.reserve(fields.size_hint().0); for field in fields { @@ -527,9 +527,9 @@ impl Struct { } /// Determine whether a structure would be zero-sized, given its fields. - pub fn would_be_zero_sized<'a, 'tcx, I>(dl: &TargetDataLayout, fields: I) - -> Result> - where I: Iterator>> { + pub fn would_be_zero_sized(dl: &TargetDataLayout, fields: I) + -> Result> + where I: Iterator>> { for field in fields { let field = field?; if field.is_unsized() || field.size(dl).bytes() > 0 { @@ -542,10 +542,10 @@ impl Struct { /// Find the path leading to a non-zero leaf field, starting from /// the given type and recursing through aggregates. // FIXME(eddyb) track value ranges and traverse already optimized enums. - pub fn non_zero_field_in_type<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - ty: Ty<'tcx>) - -> Result, LayoutError<'tcx>> { - let tcx = infcx.tcx; + pub fn non_zero_field_in_type(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + ty: Ty<'gcx>) + -> Result, LayoutError<'gcx>> { + let tcx = infcx.tcx.global_tcx(); match (ty.layout(infcx)?, &ty.sty) { (&Scalar { non_zero: true, .. }, _) => Ok(Some(vec![])), (&FatPointer { non_zero: true, .. }, _) => { @@ -581,9 +581,9 @@ impl Struct { // Perhaps one of the upvars of this closure is non-zero // Let's recurse and find out! - (_, &ty::TyClosure(_, box ty::ClosureSubsts { upvar_tys: ref tys, .. })) | + (_, &ty::TyClosure(_, ty::ClosureSubsts { upvar_tys: tys, .. })) | // Can we use one of the fields in this tuple? - (_, &ty::TyTuple(ref tys)) => { + (_, &ty::TyTuple(tys)) => { Struct::non_zero_field_path(infcx, tys.iter().cloned()) } @@ -600,10 +600,10 @@ impl Struct { /// Find the path leading to a non-zero leaf field, starting from /// the given set of fields and recursing through aggregates. - pub fn non_zero_field_path<'a, 'tcx, I>(infcx: &InferCtxt<'a, 'tcx>, - fields: I) - -> Result, LayoutError<'tcx>> - where I: Iterator> { + pub fn non_zero_field_path(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + fields: I) + -> Result, LayoutError<'gcx>> + where I: Iterator> { for (i, ty) in fields.enumerate() { if let Some(mut path) = Struct::non_zero_field_in_type(infcx, ty)? { path.push(i as u32); @@ -736,9 +736,9 @@ impl<'tcx> fmt::Display for LayoutError<'tcx> { } /// Helper function for normalizing associated types in an inference context. -fn normalize_associated_type<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, - ty: Ty<'tcx>) - -> Ty<'tcx> { +fn normalize_associated_type<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + ty: Ty<'gcx>) + -> Ty<'gcx> { if !ty.has_projection_types() { return ty; } @@ -754,14 +754,14 @@ fn normalize_associated_type<'a, 'tcx>(infcx: &InferCtxt<'a, 'tcx>, fulfill_cx.register_predicate_obligation(infcx, obligation); } - drain_fulfillment_cx_or_panic(DUMMY_SP, infcx, &mut fulfill_cx, &result) + infcx.drain_fulfillment_cx_or_panic(DUMMY_SP, &mut fulfill_cx, &result) } -impl Layout { - pub fn compute_uncached<'a, 'tcx>(ty: Ty<'tcx>, - infcx: &InferCtxt<'a, 'tcx>) - -> Result> { - let tcx = infcx.tcx; +impl<'a, 'gcx, 'tcx> Layout { + pub fn compute_uncached(ty: Ty<'gcx>, + infcx: &InferCtxt<'a, 'gcx, 'tcx>) + -> Result> { + let tcx = infcx.tcx.global_tcx(); let dl = &tcx.data_layout; assert!(!ty.has_infer_types()); @@ -790,7 +790,7 @@ impl Layout { ty::TyRef(_, ty::TypeAndMut { ty: pointee, .. }) | ty::TyRawPtr(ty::TypeAndMut { ty: pointee, .. }) => { let non_zero = !ty.is_unsafe_ptr(); - if pointee.is_sized(&infcx.parameter_environment, DUMMY_SP) { + if pointee.is_sized(tcx, &infcx.parameter_environment, DUMMY_SP) { Scalar { value: Pointer, non_zero: non_zero } } else { let unsized_part = tcx.struct_tail(pointee); @@ -844,8 +844,8 @@ impl Layout { } // Tuples. - ty::TyClosure(_, box ty::ClosureSubsts { upvar_tys: ref tys, .. }) | - ty::TyTuple(ref tys) => { + ty::TyClosure(_, ty::ClosureSubsts { upvar_tys: tys, .. }) | + ty::TyTuple(tys) => { let mut st = Struct::new(dl, false); st.extend(dl, tys.iter().map(|ty| ty.layout(infcx)), ty)?; Univariant { variant: st, non_zero: false } @@ -883,7 +883,7 @@ impl Layout { // the unsized field. Several other pieces of code assume that the unsized // field is definitely the last one. if def.dtor_kind().has_drop_flag() && - ty.is_sized(&infcx.parameter_environment, DUMMY_SP) { + ty.is_sized(tcx, &infcx.parameter_environment, DUMMY_SP) { st.extend(dl, Some(Ok(&Scalar { value: Int(I8), non_zero: false @@ -1220,10 +1220,10 @@ pub enum SizeSkeleton<'tcx> { } } -impl<'tcx> SizeSkeleton<'tcx> { - pub fn compute<'a>(ty: Ty<'tcx>, infcx: &InferCtxt<'a, 'tcx>) - -> Result, LayoutError<'tcx>> { - let tcx = infcx.tcx; +impl<'a, 'gcx, 'tcx> SizeSkeleton<'gcx> { + pub fn compute(ty: Ty<'gcx>, infcx: &InferCtxt<'a, 'gcx, 'tcx>) + -> Result, LayoutError<'gcx>> { + let tcx = infcx.tcx.global_tcx(); assert!(!ty.has_infer_types()); // First try computing a static layout. diff --git a/src/librustc/ty/mod.rs b/src/librustc/ty/mod.rs index 76e18565d6..dfb4ec7392 100644 --- a/src/librustc/ty/mod.rs +++ b/src/librustc/ty/mod.rs @@ -21,14 +21,13 @@ pub use self::fold::TypeFoldable; use dep_graph::{self, DepNode}; use hir::map as ast_map; use middle; -use middle::cstore::{self, CrateStore, LOCAL_CRATE}; +use middle::cstore::{self, LOCAL_CRATE}; use hir::def::{self, Def, ExportMap}; use hir::def_id::DefId; use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem}; -use middle::region::{CodeExtent}; +use middle::region::{CodeExtent, ROOT_CODE_EXTENT}; use traits; use ty; -use ty::fold::TypeFolder; use ty::subst::{Subst, Substs, VecPerParamSpace}; use ty::walk::TypeWalker; use util::common::MemoizationMap; @@ -36,7 +35,7 @@ use util::nodemap::NodeSet; use util::nodemap::FnvHashMap; use serialize::{Encodable, Encoder, Decodable, Decoder}; -use std::borrow::{Borrow, Cow}; +use std::borrow::Cow; use std::cell::Cell; use std::hash::{Hash, Hasher}; use std::iter; @@ -109,6 +108,7 @@ pub type Disr = ConstInt; /// The complete set of all analyses described in this module. This is /// produced by the driver and fed to trans and later passes. +#[derive(Clone)] pub struct CrateAnalysis<'a> { pub export_map: ExportMap, pub access_levels: middle::privacy::AccessLevels, @@ -165,10 +165,10 @@ pub struct ImplHeader<'tcx> { pub predicates: Vec>, } -impl<'tcx> ImplHeader<'tcx> { - pub fn with_fresh_ty_vars<'a>(selcx: &mut traits::SelectionContext<'a, 'tcx>, - impl_def_id: DefId) - -> ImplHeader<'tcx> +impl<'a, 'gcx, 'tcx> ImplHeader<'tcx> { + pub fn with_fresh_ty_vars(selcx: &mut traits::SelectionContext<'a, 'gcx, 'tcx>, + impl_def_id: DefId) + -> ImplHeader<'tcx> { let tcx = selcx.tcx(); let impl_generics = tcx.lookup_item_type(impl_def_id).generics; @@ -284,16 +284,43 @@ pub enum Visibility { PrivateExternal, } +pub trait NodeIdTree { + fn is_descendant_of(&self, node: NodeId, ancestor: NodeId) -> bool; +} + +impl<'a> NodeIdTree for ast_map::Map<'a> { + fn is_descendant_of(&self, node: NodeId, ancestor: NodeId) -> bool { + let mut node_ancestor = node; + while node_ancestor != ancestor { + let node_ancestor_parent = self.get_module_parent(node_ancestor); + if node_ancestor_parent == node_ancestor { + return false; + } + node_ancestor = node_ancestor_parent; + } + true + } +} + impl Visibility { - pub fn from_hir(visibility: &hir::Visibility, id: NodeId, tcx: &TyCtxt) -> Self { + pub fn from_hir(visibility: &hir::Visibility, id: NodeId, tcx: TyCtxt) -> Self { match *visibility { hir::Public => Visibility::Public, + hir::Visibility::Crate => Visibility::Restricted(ast::CRATE_NODE_ID), + hir::Visibility::Restricted { id, .. } => match tcx.def_map.borrow().get(&id) { + Some(resolution) => Visibility::Restricted({ + tcx.map.as_local_node_id(resolution.base_def.def_id()).unwrap() + }), + // If there is no resolution, `resolve` will have already reported an error, so + // assume that the visibility is public to avoid reporting more privacy errors. + None => Visibility::Public, + }, hir::Inherited => Visibility::Restricted(tcx.map.get_module_parent(id)), } } /// Returns true if an item with this visibility is accessible from the given block. - pub fn is_accessible_from(self, block: NodeId, map: &ast_map::Map) -> bool { + pub fn is_accessible_from(self, block: NodeId, tree: &T) -> bool { let restriction = match self { // Public items are visible everywhere. Visibility::Public => return true, @@ -303,24 +330,18 @@ impl Visibility { Visibility::Restricted(module) => module, }; - let mut block_ancestor = block; - loop { - if block_ancestor == restriction { return true } - let block_ancestor_parent = map.get_module_parent(block_ancestor); - if block_ancestor_parent == block_ancestor { return false } - block_ancestor = block_ancestor_parent; - } + tree.is_descendant_of(block, restriction) } /// Returns true if this visibility is at least as accessible as the given visibility - pub fn is_at_least(self, vis: Visibility, map: &ast_map::Map) -> bool { + pub fn is_at_least(self, vis: Visibility, tree: &T) -> bool { let vis_restriction = match vis { Visibility::Public => return self == Visibility::Public, Visibility::PrivateExternal => return true, Visibility::Restricted(module) => module, }; - self.is_accessible_from(vis_restriction, map) + self.is_accessible_from(vis_restriction, tree) } } @@ -329,7 +350,7 @@ pub struct Method<'tcx> { pub name: Name, pub generics: Generics<'tcx>, pub predicates: GenericPredicates<'tcx>, - pub fty: BareFnTy<'tcx>, + pub fty: &'tcx BareFnTy<'tcx>, pub explicit_self: ExplicitSelfCategory, pub vis: Visibility, pub defaultness: hir::Defaultness, @@ -341,7 +362,7 @@ impl<'tcx> Method<'tcx> { pub fn new(name: Name, generics: ty::Generics<'tcx>, predicates: GenericPredicates<'tcx>, - fty: BareFnTy<'tcx>, + fty: &'tcx BareFnTy<'tcx>, explicit_self: ExplicitSelfCategory, vis: Visibility, defaultness: hir::Defaultness, @@ -503,6 +524,10 @@ bitflags! { // that are local to a particular fn const HAS_LOCAL_NAMES = 1 << 9, + // Present if the type belongs in a local type context. + // Only set for TyInfer other than Fresh. + const KEEP_IN_LOCAL_TCX = 1 << 10, + const NEEDS_SUBST = TypeFlags::HAS_PARAMS.bits | TypeFlags::HAS_SELF.bits | TypeFlags::HAS_RE_EARLY_BOUND.bits, @@ -519,7 +544,8 @@ bitflags! { TypeFlags::HAS_TY_ERR.bits | TypeFlags::HAS_PROJECTION.bits | TypeFlags::HAS_TY_CLOSURE.bits | - TypeFlags::HAS_LOCAL_NAMES.bits, + TypeFlags::HAS_LOCAL_NAMES.bits | + TypeFlags::KEEP_IN_LOCAL_TCX.bits, // Caches for type_is_sized, type_moves_by_default const SIZEDNESS_CACHED = 1 << 16, @@ -752,27 +778,28 @@ pub struct GenericPredicates<'tcx> { pub predicates: VecPerParamSpace>, } -impl<'tcx> GenericPredicates<'tcx> { +impl<'a, 'gcx, 'tcx> GenericPredicates<'tcx> { pub fn empty() -> GenericPredicates<'tcx> { GenericPredicates { predicates: VecPerParamSpace::empty(), } } - pub fn instantiate(&self, tcx: &TyCtxt<'tcx>, substs: &Substs<'tcx>) + pub fn instantiate(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &Substs<'tcx>) -> InstantiatedPredicates<'tcx> { InstantiatedPredicates { predicates: self.predicates.subst(tcx, substs), } } - pub fn instantiate_supertrait(&self, - tcx: &TyCtxt<'tcx>, + pub fn instantiate_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, poly_trait_ref: &ty::PolyTraitRef<'tcx>) -> InstantiatedPredicates<'tcx> { InstantiatedPredicates { - predicates: self.predicates.map(|pred| pred.subst_supertrait(tcx, poly_trait_ref)) + predicates: self.predicates.map(|pred| { + pred.subst_supertrait(tcx, poly_trait_ref) + }) } } } @@ -784,6 +811,9 @@ pub enum Predicate<'tcx> { /// would be the parameters in the `TypeSpace`. Trait(PolyTraitPredicate<'tcx>), + /// A predicate created by RFC1592 + Rfc1592(Box>), + /// where `T1 == T2`. Equate(PolyEquatePredicate<'tcx>), @@ -802,16 +832,20 @@ pub enum Predicate<'tcx> { /// trait must be object-safe ObjectSafe(DefId), + + /// No direct syntax. May be thought of as `where T : FnFoo<...>` for some 'TypeSpace' + /// substitutions `...` and T being a closure type. Satisfied (or refuted) once we know the + /// closure's kind. + ClosureKind(DefId, ClosureKind), } -impl<'tcx> Predicate<'tcx> { +impl<'a, 'gcx, 'tcx> Predicate<'tcx> { /// Performs a substitution suitable for going from a /// poly-trait-ref to supertraits that must hold if that /// poly-trait-ref holds. This is slightly different from a normal /// substitution in terms of what happens with bound regions. See /// lengthy comment below for details. - pub fn subst_supertrait(&self, - tcx: &TyCtxt<'tcx>, + pub fn subst_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, trait_ref: &ty::PolyTraitRef<'tcx>) -> ty::Predicate<'tcx> { @@ -879,6 +913,8 @@ impl<'tcx> Predicate<'tcx> { match *self { Predicate::Trait(ty::Binder(ref data)) => Predicate::Trait(ty::Binder(data.subst(tcx, substs))), + Predicate::Rfc1592(ref pi) => + Predicate::Rfc1592(Box::new(pi.subst_supertrait(tcx, trait_ref))), Predicate::Equate(ty::Binder(ref data)) => Predicate::Equate(ty::Binder(data.subst(tcx, substs))), Predicate::RegionOutlives(ty::Binder(ref data)) => @@ -891,6 +927,8 @@ impl<'tcx> Predicate<'tcx> { Predicate::WellFormed(data.subst(tcx, substs)), Predicate::ObjectSafe(trait_def_id) => Predicate::ObjectSafe(trait_def_id), + Predicate::ClosureKind(closure_def_id, kind) => + Predicate::ClosureKind(closure_def_id, kind), } } } @@ -1056,6 +1094,9 @@ impl<'tcx> Predicate<'tcx> { ty::Predicate::Trait(ref data) => { data.0.trait_ref.substs.types.as_slice().to_vec() } + ty::Predicate::Rfc1592(ref data) => { + return data.walk_tys() + } ty::Predicate::Equate(ty::Binder(ref data)) => { vec![data.0, data.1] } @@ -1078,6 +1119,9 @@ impl<'tcx> Predicate<'tcx> { ty::Predicate::ObjectSafe(_trait_def_id) => { vec![] } + ty::Predicate::ClosureKind(_closure_def_id, _kind) => { + vec![] + } }; // The only reason to collect into a vector here is that I was @@ -1093,11 +1137,13 @@ impl<'tcx> Predicate<'tcx> { Predicate::Trait(ref t) => { Some(t.to_poly_trait_ref()) } + Predicate::Rfc1592(..) | Predicate::Projection(..) | Predicate::Equate(..) | Predicate::RegionOutlives(..) | Predicate::WellFormed(..) | Predicate::ObjectSafe(..) | + Predicate::ClosureKind(..) | Predicate::TypeOutlives(..) => { None } @@ -1170,11 +1216,9 @@ impl<'tcx> TraitRef<'tcx> { /// future I hope to refine the representation of types so as to make /// more distinctions clearer. #[derive(Clone)] -pub struct ParameterEnvironment<'a, 'tcx:'a> { - pub tcx: &'a TyCtxt<'tcx>, - +pub struct ParameterEnvironment<'tcx> { /// See `construct_free_substs` for details. - pub free_substs: Substs<'tcx>, + pub free_substs: &'tcx Substs<'tcx>, /// Each type parameter has an implicit region bound that /// indicates it must outlive at least the function body (the user @@ -1187,13 +1231,6 @@ pub struct ParameterEnvironment<'a, 'tcx:'a> { /// into Obligations, and elaborated and normalized. pub caller_bounds: Vec>, - /// Caches the results of trait selection. This cache is used - /// for things that have to do with the parameters in scope. - pub selection_cache: traits::SelectionCache<'tcx>, - - /// Caches the results of trait evaluation. - pub evaluation_cache: traits::EvaluationCache<'tcx>, - /// Scope that is attached to free regions for this scope. This /// is usually the id of the fn body, but for more abstract scopes /// like structs we often use the node-id of the struct. @@ -1204,59 +1241,48 @@ pub struct ParameterEnvironment<'a, 'tcx:'a> { pub free_id_outlive: CodeExtent, } -impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> { +impl<'a, 'tcx> ParameterEnvironment<'tcx> { pub fn with_caller_bounds(&self, caller_bounds: Vec>) - -> ParameterEnvironment<'a,'tcx> + -> ParameterEnvironment<'tcx> { ParameterEnvironment { - tcx: self.tcx, - free_substs: self.free_substs.clone(), + free_substs: self.free_substs, implicit_region_bound: self.implicit_region_bound, caller_bounds: caller_bounds, - selection_cache: traits::SelectionCache::new(), - evaluation_cache: traits::EvaluationCache::new(), free_id_outlive: self.free_id_outlive, } } /// Construct a parameter environment given an item, impl item, or trait item - pub fn for_item(cx: &'a TyCtxt<'tcx>, id: NodeId) -> ParameterEnvironment<'a, 'tcx> { - match cx.map.find(id) { + pub fn for_item(tcx: TyCtxt<'a, 'tcx, 'tcx>, id: NodeId) + -> ParameterEnvironment<'tcx> { + match tcx.map.find(id) { Some(ast_map::NodeImplItem(ref impl_item)) => { match impl_item.node { - hir::ImplItemKind::Type(_) => { + hir::ImplItemKind::Type(_) | hir::ImplItemKind::Const(_, _) => { // associated types don't have their own entry (for some reason), // so for now just grab environment for the impl - let impl_id = cx.map.get_parent(id); - let impl_def_id = cx.map.local_def_id(impl_id); - let scheme = cx.lookup_item_type(impl_def_id); - let predicates = cx.lookup_predicates(impl_def_id); - cx.construct_parameter_environment(impl_item.span, - &scheme.generics, - &predicates, - cx.region_maps.item_extent(id)) - } - hir::ImplItemKind::Const(_, _) => { - let def_id = cx.map.local_def_id(id); - let scheme = cx.lookup_item_type(def_id); - let predicates = cx.lookup_predicates(def_id); - cx.construct_parameter_environment(impl_item.span, - &scheme.generics, - &predicates, - cx.region_maps.item_extent(id)) + let impl_id = tcx.map.get_parent(id); + let impl_def_id = tcx.map.local_def_id(impl_id); + let scheme = tcx.lookup_item_type(impl_def_id); + let predicates = tcx.lookup_predicates(impl_def_id); + tcx.construct_parameter_environment(impl_item.span, + &scheme.generics, + &predicates, + tcx.region_maps.item_extent(id)) } hir::ImplItemKind::Method(_, ref body) => { - let method_def_id = cx.map.local_def_id(id); - match cx.impl_or_trait_item(method_def_id) { + let method_def_id = tcx.map.local_def_id(id); + match tcx.impl_or_trait_item(method_def_id) { MethodTraitItem(ref method_ty) => { let method_generics = &method_ty.generics; let method_bounds = &method_ty.predicates; - cx.construct_parameter_environment( + tcx.construct_parameter_environment( impl_item.span, method_generics, method_bounds, - cx.region_maps.call_site_extent(id, body.id)) + tcx.region_maps.call_site_extent(id, body.id)) } _ => { bug!("ParameterEnvironment::for_item(): \ @@ -1268,44 +1294,35 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> { } Some(ast_map::NodeTraitItem(trait_item)) => { match trait_item.node { - hir::TypeTraitItem(..) => { + hir::TypeTraitItem(..) | hir::ConstTraitItem(..) => { // associated types don't have their own entry (for some reason), // so for now just grab environment for the trait - let trait_id = cx.map.get_parent(id); - let trait_def_id = cx.map.local_def_id(trait_id); - let trait_def = cx.lookup_trait_def(trait_def_id); - let predicates = cx.lookup_predicates(trait_def_id); - cx.construct_parameter_environment(trait_item.span, - &trait_def.generics, - &predicates, - cx.region_maps.item_extent(id)) - } - hir::ConstTraitItem(..) => { - let def_id = cx.map.local_def_id(id); - let scheme = cx.lookup_item_type(def_id); - let predicates = cx.lookup_predicates(def_id); - cx.construct_parameter_environment(trait_item.span, - &scheme.generics, - &predicates, - cx.region_maps.item_extent(id)) + let trait_id = tcx.map.get_parent(id); + let trait_def_id = tcx.map.local_def_id(trait_id); + let trait_def = tcx.lookup_trait_def(trait_def_id); + let predicates = tcx.lookup_predicates(trait_def_id); + tcx.construct_parameter_environment(trait_item.span, + &trait_def.generics, + &predicates, + tcx.region_maps.item_extent(id)) } hir::MethodTraitItem(_, ref body) => { // Use call-site for extent (unless this is a // trait method with no default; then fallback // to the method id). - let method_def_id = cx.map.local_def_id(id); - match cx.impl_or_trait_item(method_def_id) { + let method_def_id = tcx.map.local_def_id(id); + match tcx.impl_or_trait_item(method_def_id) { MethodTraitItem(ref method_ty) => { let method_generics = &method_ty.generics; let method_bounds = &method_ty.predicates; let extent = if let Some(ref body) = *body { // default impl: use call_site extent as free_id_outlive bound. - cx.region_maps.call_site_extent(id, body.id) + tcx.region_maps.call_site_extent(id, body.id) } else { // no default impl: use item extent as free_id_outlive bound. - cx.region_maps.item_extent(id) + tcx.region_maps.item_extent(id) }; - cx.construct_parameter_environment( + tcx.construct_parameter_environment( trait_item.span, method_generics, method_bounds, @@ -1324,37 +1341,38 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> { match item.node { hir::ItemFn(_, _, _, _, _, ref body) => { // We assume this is a function. - let fn_def_id = cx.map.local_def_id(id); - let fn_scheme = cx.lookup_item_type(fn_def_id); - let fn_predicates = cx.lookup_predicates(fn_def_id); - - cx.construct_parameter_environment(item.span, - &fn_scheme.generics, - &fn_predicates, - cx.region_maps.call_site_extent(id, - body.id)) + let fn_def_id = tcx.map.local_def_id(id); + let fn_scheme = tcx.lookup_item_type(fn_def_id); + let fn_predicates = tcx.lookup_predicates(fn_def_id); + + tcx.construct_parameter_environment( + item.span, + &fn_scheme.generics, + &fn_predicates, + tcx.region_maps.call_site_extent(id, body.id)) } hir::ItemEnum(..) | hir::ItemStruct(..) | + hir::ItemTy(..) | hir::ItemImpl(..) | hir::ItemConst(..) | hir::ItemStatic(..) => { - let def_id = cx.map.local_def_id(id); - let scheme = cx.lookup_item_type(def_id); - let predicates = cx.lookup_predicates(def_id); - cx.construct_parameter_environment(item.span, - &scheme.generics, - &predicates, - cx.region_maps.item_extent(id)) + let def_id = tcx.map.local_def_id(id); + let scheme = tcx.lookup_item_type(def_id); + let predicates = tcx.lookup_predicates(def_id); + tcx.construct_parameter_environment(item.span, + &scheme.generics, + &predicates, + tcx.region_maps.item_extent(id)) } hir::ItemTrait(..) => { - let def_id = cx.map.local_def_id(id); - let trait_def = cx.lookup_trait_def(def_id); - let predicates = cx.lookup_predicates(def_id); - cx.construct_parameter_environment(item.span, - &trait_def.generics, - &predicates, - cx.region_maps.item_extent(id)) + let def_id = tcx.map.local_def_id(id); + let trait_def = tcx.lookup_trait_def(def_id); + let predicates = tcx.lookup_predicates(def_id); + tcx.construct_parameter_environment(item.span, + &trait_def.generics, + &predicates, + tcx.region_maps.item_extent(id)) } _ => { span_bug!(item.span, @@ -1366,12 +1384,21 @@ impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> { } Some(ast_map::NodeExpr(..)) => { // This is a convenience to allow closures to work. - ParameterEnvironment::for_item(cx, cx.map.get_parent(id)) + ParameterEnvironment::for_item(tcx, tcx.map.get_parent(id)) + } + Some(ast_map::NodeForeignItem(item)) => { + let def_id = tcx.map.local_def_id(id); + let scheme = tcx.lookup_item_type(def_id); + let predicates = tcx.lookup_predicates(def_id); + tcx.construct_parameter_environment(item.span, + &scheme.generics, + &predicates, + ROOT_CODE_EXTENT) } _ => { bug!("ParameterEnvironment::from_item(): \ `{}` is not an item", - cx.map.node_to_string(id)) + tcx.map.node_to_string(id)) } } } @@ -1467,6 +1494,7 @@ pub struct AdtDefData<'tcx, 'container: 'tcx> { pub variants: Vec>, destructor: Cell>, flags: Cell, + sized_constraint: ivar::TyIVar<'tcx, 'container>, } impl<'tcx, 'container> PartialEq for AdtDefData<'tcx, 'container> { @@ -1518,11 +1546,11 @@ impl VariantKind { } } -impl<'tcx, 'container> AdtDefData<'tcx, 'container> { - fn new(tcx: &TyCtxt<'tcx>, +impl<'a, 'gcx, 'tcx, 'container> AdtDefData<'gcx, 'container> { + fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>, did: DefId, kind: AdtKind, - variants: Vec>) -> Self { + variants: Vec>) -> Self { let mut flags = AdtFlags::NO_ADT_FLAGS; let attrs = tcx.get_attrs(did); if attr::contains_name(&attrs, "fundamental") { @@ -1544,11 +1572,12 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { did: did, variants: variants, flags: Cell::new(flags), - destructor: Cell::new(None) + destructor: Cell::new(None), + sized_constraint: ivar::TyIVar::new(), } } - fn calculate_dtorck(&'tcx self, tcx: &TyCtxt<'tcx>) { + fn calculate_dtorck(&'gcx self, tcx: TyCtxt) { if tcx.is_adt_dtorck(self) { self.flags.set(self.flags.get() | AdtFlags::IS_DTORCK); } @@ -1569,7 +1598,7 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { /// true, this type being safe for destruction requires it to be /// alive; Otherwise, only the contents are required to be. #[inline] - pub fn is_dtorck(&'tcx self, tcx: &TyCtxt<'tcx>) -> bool { + pub fn is_dtorck(&'gcx self, tcx: TyCtxt) -> bool { if !self.flags.get().intersects(AdtFlags::IS_DTORCK_VALID) { self.calculate_dtorck(tcx) } @@ -1604,18 +1633,18 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { /// Asserts this is a struct and returns the struct's unique /// variant. - pub fn struct_variant(&self) -> &VariantDefData<'tcx, 'container> { + pub fn struct_variant(&self) -> &VariantDefData<'gcx, 'container> { assert_eq!(self.adt_kind(), AdtKind::Struct); &self.variants[0] } #[inline] - pub fn type_scheme(&self, tcx: &TyCtxt<'tcx>) -> TypeScheme<'tcx> { + pub fn type_scheme(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> TypeScheme<'gcx> { tcx.lookup_item_type(self.did) } #[inline] - pub fn predicates(&self, tcx: &TyCtxt<'tcx>) -> GenericPredicates<'tcx> { + pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> GenericPredicates<'gcx> { tcx.lookup_predicates(self.did) } @@ -1624,10 +1653,10 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { #[inline] pub fn all_fields(&self) -> iter::FlatMap< - slice::Iter>, - slice::Iter>, - for<'s> fn(&'s VariantDefData<'tcx, 'container>) - -> slice::Iter<'s, FieldDefData<'tcx, 'container>> + slice::Iter>, + slice::Iter>, + for<'s> fn(&'s VariantDefData<'gcx, 'container>) + -> slice::Iter<'s, FieldDefData<'gcx, 'container>> > { self.variants.iter().flat_map(VariantDefData::fields_iter) } @@ -1647,7 +1676,7 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { self.variants.iter().all(|v| v.fields.is_empty()) } - pub fn variant_with_id(&self, vid: DefId) -> &VariantDefData<'tcx, 'container> { + pub fn variant_with_id(&self, vid: DefId) -> &VariantDefData<'gcx, 'container> { self.variants .iter() .find(|v| v.did == vid) @@ -1661,7 +1690,7 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { .expect("variant_index_with_id: unknown variant") } - pub fn variant_of_def(&self, def: Def) -> &VariantDefData<'tcx, 'container> { + pub fn variant_of_def(&self, def: Def) -> &VariantDefData<'gcx, 'container> { match def { Def::Variant(_, vid) => self.variant_with_id(vid), Def::Struct(..) | Def::TyAlias(..) => self.struct_variant(), @@ -1687,6 +1716,188 @@ impl<'tcx, 'container> AdtDefData<'tcx, 'container> { } } +impl<'a, 'gcx, 'tcx, 'container> AdtDefData<'tcx, 'container> { + /// Returns a simpler type such that `Self: Sized` if and only + /// if that type is Sized, or `TyErr` if this type is recursive. + /// + /// HACK: instead of returning a list of types, this function can + /// return a tuple. In that case, the result is Sized only if + /// all elements of the tuple are Sized. + /// + /// This is generally the `struct_tail` if this is a struct, or a + /// tuple of them if this is an enum. + /// + /// Oddly enough, checking that the sized-constraint is Sized is + /// actually more expressive than checking all members: + /// the Sized trait is inductive, so an associated type that references + /// Self would prevent its containing ADT from being Sized. + /// + /// Due to normalization being eager, this applies even if + /// the associated type is behind a pointer, e.g. issue #31299. + pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { + let dep_node = DepNode::SizedConstraint(self.did); + match self.sized_constraint.get(dep_node) { + None => { + let global_tcx = tcx.global_tcx(); + let this = global_tcx.lookup_adt_def_master(self.did); + this.calculate_sized_constraint_inner(global_tcx, &mut Vec::new()); + self.sized_constraint(tcx) + } + Some(ty) => ty + } + } +} + +impl<'a, 'tcx> AdtDefData<'tcx, 'tcx> { + /// Calculates the Sized-constraint. + /// + /// As the Sized-constraint of enums can be a *set* of types, + /// the Sized-constraint may need to be a set also. Because introducing + /// a new type of IVar is currently a complex affair, the Sized-constraint + /// may be a tuple. + /// + /// In fact, there are only a few options for the constraint: + /// - `bool`, if the type is always Sized + /// - an obviously-unsized type + /// - a type parameter or projection whose Sizedness can't be known + /// - a tuple of type parameters or projections, if there are multiple + /// such. + /// - a TyError, if a type contained itself. The representability + /// check should catch this case. + fn calculate_sized_constraint_inner(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + stack: &mut Vec>) + { + + let dep_node = DepNode::SizedConstraint(self.did); + + if self.sized_constraint.get(dep_node).is_some() { + return; + } + + if stack.contains(&self) { + debug!("calculate_sized_constraint: {:?} is recursive", self); + // This should be reported as an error by `check_representable`. + // + // Consider the type as Sized in the meanwhile to avoid + // further errors. + self.sized_constraint.fulfill(dep_node, tcx.types.err); + return; + } + + stack.push(self); + + let tys : Vec<_> = + self.variants.iter().flat_map(|v| { + v.fields.last() + }).flat_map(|f| { + self.sized_constraint_for_ty(tcx, stack, f.unsubst_ty()) + }).collect(); + + let self_ = stack.pop().unwrap(); + assert_eq!(self_, self); + + let ty = match tys.len() { + _ if tys.references_error() => tcx.types.err, + 0 => tcx.types.bool, + 1 => tys[0], + _ => tcx.mk_tup(tys) + }; + + match self.sized_constraint.get(dep_node) { + Some(old_ty) => { + debug!("calculate_sized_constraint: {:?} recurred", self); + assert_eq!(old_ty, tcx.types.err) + } + None => { + debug!("calculate_sized_constraint: {:?} => {:?}", self, ty); + self.sized_constraint.fulfill(dep_node, ty) + } + } + } + + fn sized_constraint_for_ty( + &'tcx self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + stack: &mut Vec>, + ty: Ty<'tcx> + ) -> Vec> { + let result = match ty.sty { + TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) | + TyBox(..) | TyRawPtr(..) | TyRef(..) | TyFnDef(..) | TyFnPtr(_) | + TyArray(..) | TyClosure(..) => { + vec![] + } + + TyStr | TyTrait(..) | TySlice(_) | TyError => { + // these are never sized - return the target type + vec![ty] + } + + TyTuple(ref tys) => { + // FIXME(#33242) we only need to constrain the last field + tys.iter().flat_map(|ty| { + self.sized_constraint_for_ty(tcx, stack, ty) + }).collect() + } + + TyEnum(adt, substs) | TyStruct(adt, substs) => { + // recursive case + let adt = tcx.lookup_adt_def_master(adt.did); + adt.calculate_sized_constraint_inner(tcx, stack); + let adt_ty = + adt.sized_constraint + .unwrap(DepNode::SizedConstraint(adt.did)) + .subst(tcx, substs); + debug!("sized_constraint_for_ty({:?}) intermediate = {:?}", + ty, adt_ty); + if let ty::TyTuple(ref tys) = adt_ty.sty { + tys.iter().flat_map(|ty| { + self.sized_constraint_for_ty(tcx, stack, ty) + }).collect() + } else { + self.sized_constraint_for_ty(tcx, stack, adt_ty) + } + } + + TyProjection(..) => { + // must calculate explicitly. + // FIXME: consider special-casing always-Sized projections + vec![ty] + } + + TyParam(..) => { + // perf hack: if there is a `T: Sized` bound, then + // we know that `T` is Sized and do not need to check + // it on the impl. + + let sized_trait = match tcx.lang_items.sized_trait() { + Some(x) => x, + _ => return vec![ty] + }; + let sized_predicate = Binder(TraitRef { + def_id: sized_trait, + substs: tcx.mk_substs(Substs::new_trait( + vec![], vec![], ty + )) + }).to_predicate(); + let predicates = tcx.lookup_predicates(self.did).predicates; + if predicates.into_iter().any(|p| p == sized_predicate) { + vec![] + } else { + vec![ty] + } + } + + TyInfer(..) => { + bug!("unexpected type `{:?}` in sized_constraint_for_ty", + ty) + } + }; + debug!("sized_constraint_for_ty({:?}) = {:?}", ty, result); + result + } +} + impl<'tcx, 'container> VariantDefData<'tcx, 'container> { #[inline] fn fields_iter(&self) -> slice::Iter> { @@ -1721,7 +1932,7 @@ impl<'tcx, 'container> VariantDefData<'tcx, 'container> { } } -impl<'tcx, 'container> FieldDefData<'tcx, 'container> { +impl<'a, 'gcx, 'tcx, 'container> FieldDefData<'tcx, 'container> { pub fn new(did: DefId, name: Name, vis: Visibility) -> Self { @@ -1733,7 +1944,7 @@ impl<'tcx, 'container> FieldDefData<'tcx, 'container> { } } - pub fn ty(&self, tcx: &TyCtxt<'tcx>, subst: &Substs<'tcx>) -> Ty<'tcx> { + pub fn ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, subst: &Substs<'tcx>) -> Ty<'tcx> { self.unsubst_ty().subst(tcx, subst) } @@ -1750,10 +1961,10 @@ impl<'tcx, 'container> FieldDefData<'tcx, 'container> { /// item into the monotype of an item reference. #[derive(Clone)] pub struct ItemSubsts<'tcx> { - pub substs: Substs<'tcx>, + pub substs: &'tcx Substs<'tcx>, } -#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Debug, RustcEncodable, RustcDecodable)] +#[derive(Clone, Copy, PartialOrd, Ord, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] pub enum ClosureKind { // Warning: Ordering is significant here! The ordering is chosen // because the trait Fn is a subtrait of FnMut and so in turn, and @@ -1763,20 +1974,20 @@ pub enum ClosureKind { FnOnce, } -impl ClosureKind { - pub fn trait_did(&self, cx: &TyCtxt) -> DefId { +impl<'a, 'tcx> ClosureKind { + pub fn trait_did(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefId { let result = match *self { - ClosureKind::Fn => cx.lang_items.require(FnTraitLangItem), + ClosureKind::Fn => tcx.lang_items.require(FnTraitLangItem), ClosureKind::FnMut => { - cx.lang_items.require(FnMutTraitLangItem) + tcx.lang_items.require(FnMutTraitLangItem) } ClosureKind::FnOnce => { - cx.lang_items.require(FnOnceTraitLangItem) + tcx.lang_items.require(FnOnceTraitLangItem) } }; match result { Ok(trait_did) => trait_did, - Err(err) => cx.sess.fatal(&err[..]), + Err(err) => tcx.sess.fatal(&err[..]), } } @@ -1835,10 +2046,6 @@ impl<'tcx> TyS<'tcx> { } impl<'tcx> ItemSubsts<'tcx> { - pub fn empty() -> ItemSubsts<'tcx> { - ItemSubsts { substs: Substs::empty() } - } - pub fn is_noop(&self) -> bool { self.substs.is_noop() } @@ -1860,7 +2067,7 @@ impl LvaluePreference { } /// Helper for looking things up in the various maps that are populated during -/// typeck::collect (e.g., `cx.impl_or_trait_items`, `cx.tcache`, etc). All of +/// typeck::collect (e.g., `tcx.impl_or_trait_items`, `tcx.tcache`, etc). All of /// these share the pattern that if the id is local, it should have been loaded /// into the map by the `typeck::collect` phase. If the def-id is external, /// then we have to go consult the crate loading code (and cache the result for @@ -1914,8 +2121,8 @@ impl BorrowKind { } } -impl<'tcx> TyCtxt<'tcx> { - pub fn node_id_to_type(&self, id: NodeId) -> Ty<'tcx> { +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn node_id_to_type(self, id: NodeId) -> Ty<'gcx> { match self.node_id_to_type_opt(id) { Some(ty) => ty, None => bug!("node_id_to_type: no type for node `{}`", @@ -1923,23 +2130,25 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn node_id_to_type_opt(&self, id: NodeId) -> Option> { + pub fn node_id_to_type_opt(self, id: NodeId) -> Option> { self.tables.borrow().node_types.get(&id).cloned() } - pub fn node_id_item_substs(&self, id: NodeId) -> ItemSubsts<'tcx> { + pub fn node_id_item_substs(self, id: NodeId) -> ItemSubsts<'gcx> { match self.tables.borrow().item_substs.get(&id) { - None => ItemSubsts::empty(), + None => ItemSubsts { + substs: self.global_tcx().mk_substs(Substs::empty()) + }, Some(ts) => ts.clone(), } } // Returns the type of a pattern as a monotype. Like @expr_ty, this function // doesn't provide type parameter substitutions. - pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> { + pub fn pat_ty(self, pat: &hir::Pat) -> Ty<'gcx> { self.node_id_to_type(pat.id) } - pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option> { + pub fn pat_ty_opt(self, pat: &hir::Pat) -> Option> { self.node_id_to_type_opt(pat.id) } @@ -1953,11 +2162,11 @@ impl<'tcx> TyCtxt<'tcx> { // NB (2): This type doesn't provide type parameter substitutions; e.g. if you // ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize" // instead of "fn(ty) -> T with T = isize". - pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> { + pub fn expr_ty(self, expr: &hir::Expr) -> Ty<'gcx> { self.node_id_to_type(expr.id) } - pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option> { + pub fn expr_ty_opt(self, expr: &hir::Expr) -> Option> { self.node_id_to_type_opt(expr.id) } @@ -1970,17 +2179,17 @@ impl<'tcx> TyCtxt<'tcx> { /// hard to do, I just hate that code so much I didn't want to touch it /// unless it was to fix it properly, which seemed a distraction from the /// thread at hand! -nmatsakis - pub fn expr_ty_adjusted(&self, expr: &hir::Expr) -> Ty<'tcx> { + pub fn expr_ty_adjusted(self, expr: &hir::Expr) -> Ty<'gcx> { self.expr_ty(expr) - .adjust(self, expr.span, expr.id, + .adjust(self.global_tcx(), expr.span, expr.id, self.tables.borrow().adjustments.get(&expr.id), |method_call| { self.tables.borrow().method_map.get(&method_call).map(|method| method.ty) }) } - pub fn expr_ty_adjusted_opt(&self, expr: &hir::Expr) -> Option> { - self.expr_ty_opt(expr).map(|t| t.adjust(self, + pub fn expr_ty_adjusted_opt(self, expr: &hir::Expr) -> Option> { + self.expr_ty_opt(expr).map(|t| t.adjust(self.global_tcx(), expr.span, expr.id, self.tables.borrow().adjustments.get(&expr.id), @@ -1989,7 +2198,7 @@ impl<'tcx> TyCtxt<'tcx> { })) } - pub fn expr_span(&self, id: NodeId) -> Span { + pub fn expr_span(self, id: NodeId) -> Span { match self.map.find(id) { Some(ast_map::NodeExpr(e)) => { e.span @@ -2003,11 +2212,11 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn local_var_name_str(&self, id: NodeId) -> InternedString { + pub fn local_var_name_str(self, id: NodeId) -> InternedString { match self.map.find(id) { Some(ast_map::NodeLocal(pat)) => { match pat.node { - PatKind::Ident(_, ref path1, _) => path1.node.name.as_str(), + PatKind::Ident(_, ref path1, _) => path1.node.as_str(), _ => { bug!("Variable id {} maps to {:?}, not local", id, pat); }, @@ -2017,7 +2226,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn resolve_expr(&self, expr: &hir::Expr) -> Def { + pub fn resolve_expr(self, expr: &hir::Expr) -> Def { match self.def_map.borrow().get(&expr.id) { Some(def) => def.full_def(), None => { @@ -2026,7 +2235,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn expr_is_lval(&self, expr: &hir::Expr) -> bool { + pub fn expr_is_lval(self, expr: &hir::Expr) -> bool { match expr.node { hir::ExprPath(..) => { // We can't use resolve_expr here, as this needs to run on broken @@ -2088,7 +2297,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn provided_trait_methods(&self, id: DefId) -> Vec>> { + pub fn provided_trait_methods(self, id: DefId) -> Vec>> { if let Some(id) = self.map.as_local_node_id(id) { if let ItemTrait(_, _, _, ref ms) = self.map.expect_item(id).node { ms.iter().filter_map(|ti| { @@ -2109,11 +2318,11 @@ impl<'tcx> TyCtxt<'tcx> { bug!("provided_trait_methods: `{:?}` is not a trait", id) } } else { - self.sess.cstore.provided_trait_methods(self, id) + self.sess.cstore.provided_trait_methods(self.global_tcx(), id) } } - pub fn associated_consts(&self, id: DefId) -> Vec>> { + pub fn associated_consts(self, id: DefId) -> Vec>> { if let Some(id) = self.map.as_local_node_id(id) { match self.map.expect_item(id).node { ItemTrait(_, _, _, ref tis) => { @@ -2153,11 +2362,11 @@ impl<'tcx> TyCtxt<'tcx> { } } } else { - self.sess.cstore.associated_consts(self, id) + self.sess.cstore.associated_consts(self.global_tcx(), id) } } - pub fn trait_impl_polarity(&self, id: DefId) -> Option { + pub fn trait_impl_polarity(self, id: DefId) -> Option { if let Some(id) = self.map.as_local_node_id(id) { match self.map.find(id) { Some(ast_map::NodeItem(item)) => { @@ -2173,7 +2382,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn custom_coerce_unsized_kind(&self, did: DefId) -> adjustment::CustomCoerceUnsized { + pub fn custom_coerce_unsized_kind(self, did: DefId) -> adjustment::CustomCoerceUnsized { self.custom_coerce_unsized_kinds.memoize(did, || { let (kind, src) = if did.krate != LOCAL_CRATE { (self.sess.cstore.custom_coerce_unsized_kind(did), "external") @@ -2192,14 +2401,14 @@ impl<'tcx> TyCtxt<'tcx> { }) } - pub fn impl_or_trait_item(&self, id: DefId) -> ImplOrTraitItem<'tcx> { + pub fn impl_or_trait_item(self, id: DefId) -> ImplOrTraitItem<'gcx> { lookup_locally_or_in_crate_store( "impl_or_trait_items", id, &self.impl_or_trait_items, - || self.sess.cstore.impl_or_trait_item(self, id) + || self.sess.cstore.impl_or_trait_item(self.global_tcx(), id) .expect("missing ImplOrTraitItem in metadata")) } - pub fn trait_item_def_ids(&self, id: DefId) -> Rc> { + pub fn trait_item_def_ids(self, id: DefId) -> Rc> { lookup_locally_or_in_crate_store( "trait_item_def_ids", id, &self.trait_item_def_ids, || Rc::new(self.sess.cstore.trait_item_def_ids(id))) @@ -2207,14 +2416,14 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns the trait-ref corresponding to a given impl, or None if it is /// an inherent impl. - pub fn impl_trait_ref(&self, id: DefId) -> Option> { + pub fn impl_trait_ref(self, id: DefId) -> Option> { lookup_locally_or_in_crate_store( "impl_trait_refs", id, &self.impl_trait_refs, - || self.sess.cstore.impl_trait_ref(self, id)) + || self.sess.cstore.impl_trait_ref(self.global_tcx(), id)) } /// Returns whether this DefId refers to an impl - pub fn is_impl(&self, id: DefId) -> bool { + pub fn is_impl(self, id: DefId) -> bool { if let Some(id) = self.map.as_local_node_id(id) { if let Some(ast_map::NodeItem( &hir::Item { node: hir::ItemImpl(..), .. })) = self.map.find(id) { @@ -2227,11 +2436,11 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn trait_ref_to_def_id(&self, tr: &hir::TraitRef) -> DefId { + pub fn trait_ref_to_def_id(self, tr: &hir::TraitRef) -> DefId { self.def_map.borrow().get(&tr.ref_id).expect("no def-map entry for trait").def_id() } - pub fn def_key(&self, id: DefId) -> ast_map::DefKey { + pub fn def_key(self, id: DefId) -> ast_map::DefKey { if id.is_local() { self.map.def_key(id) } else { @@ -2242,7 +2451,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns the `DefPath` of an item. Note that if `id` is not /// local to this crate -- or is inlined into this crate -- the /// result will be a non-local `DefPath`. - pub fn def_path(&self, id: DefId) -> ast_map::DefPath { + pub fn def_path(self, id: DefId) -> ast_map::DefPath { if id.is_local() { self.map.def_path(id) } else { @@ -2250,7 +2459,7 @@ impl<'tcx> TyCtxt<'tcx> { } } - pub fn item_name(&self, id: DefId) -> ast::Name { + pub fn item_name(self, id: DefId) -> ast::Name { if let Some(id) = self.map.as_local_node_id(id) { self.map.name(id) } else { @@ -2259,55 +2468,55 @@ impl<'tcx> TyCtxt<'tcx> { } // Register a given item type - pub fn register_item_type(&self, did: DefId, ty: TypeScheme<'tcx>) { + pub fn register_item_type(self, did: DefId, ty: TypeScheme<'gcx>) { self.tcache.borrow_mut().insert(did, ty); } // If the given item is in an external crate, looks up its type and adds it to // the type cache. Returns the type parameters and type. - pub fn lookup_item_type(&self, did: DefId) -> TypeScheme<'tcx> { + pub fn lookup_item_type(self, did: DefId) -> TypeScheme<'gcx> { lookup_locally_or_in_crate_store( "tcache", did, &self.tcache, - || self.sess.cstore.item_type(self, did)) + || self.sess.cstore.item_type(self.global_tcx(), did)) } /// Given the did of a trait, returns its canonical trait ref. - pub fn lookup_trait_def(&self, did: DefId) -> &'tcx TraitDef<'tcx> { + pub fn lookup_trait_def(self, did: DefId) -> &'gcx TraitDef<'gcx> { lookup_locally_or_in_crate_store( "trait_defs", did, &self.trait_defs, - || self.alloc_trait_def(self.sess.cstore.trait_def(self, did)) + || self.alloc_trait_def(self.sess.cstore.trait_def(self.global_tcx(), did)) ) } /// Given the did of an ADT, return a master reference to its /// definition. Unless you are planning on fulfilling the ADT's fields, /// use lookup_adt_def instead. - pub fn lookup_adt_def_master(&self, did: DefId) -> AdtDefMaster<'tcx> { + pub fn lookup_adt_def_master(self, did: DefId) -> AdtDefMaster<'gcx> { lookup_locally_or_in_crate_store( "adt_defs", did, &self.adt_defs, - || self.sess.cstore.adt_def(self, did) + || self.sess.cstore.adt_def(self.global_tcx(), did) ) } /// Given the did of an ADT, return a reference to its definition. - pub fn lookup_adt_def(&self, did: DefId) -> AdtDef<'tcx> { + pub fn lookup_adt_def(self, did: DefId) -> AdtDef<'gcx> { // when reverse-variance goes away, a transmute:: // would be needed here. self.lookup_adt_def_master(did) } /// Given the did of an item, returns its full set of predicates. - pub fn lookup_predicates(&self, did: DefId) -> GenericPredicates<'tcx> { + pub fn lookup_predicates(self, did: DefId) -> GenericPredicates<'gcx> { lookup_locally_or_in_crate_store( "predicates", did, &self.predicates, - || self.sess.cstore.item_predicates(self, did)) + || self.sess.cstore.item_predicates(self.global_tcx(), did)) } /// Given the did of a trait, returns its superpredicates. - pub fn lookup_super_predicates(&self, did: DefId) -> GenericPredicates<'tcx> { + pub fn lookup_super_predicates(self, did: DefId) -> GenericPredicates<'gcx> { lookup_locally_or_in_crate_store( "super_predicates", did, &self.super_predicates, - || self.sess.cstore.item_super_predicates(self, did)) + || self.sess.cstore.item_super_predicates(self.global_tcx(), did)) } /// If `type_needs_drop` returns true, then `ty` is definitely @@ -2316,14 +2525,15 @@ impl<'tcx> TyCtxt<'tcx> { /// /// (Note that this implies that if `ty` has a destructor attached, /// then `type_needs_drop` will definitely return `true` for `ty`.) - pub fn type_needs_drop_given_env<'a>(&self, - ty: Ty<'tcx>, - param_env: &ty::ParameterEnvironment<'a,'tcx>) -> bool { + pub fn type_needs_drop_given_env(self, + ty: Ty<'gcx>, + param_env: &ty::ParameterEnvironment<'gcx>) -> bool { // Issue #22536: We first query type_moves_by_default. It sees a // normalized version of the type, and therefore will definitely // know whether the type implements Copy (and thus needs no // cleanup/drop/zeroing) ... - let implements_copy = !ty.moves_by_default(param_env, DUMMY_SP); + let tcx = self.global_tcx(); + let implements_copy = !ty.moves_by_default(tcx, param_env, DUMMY_SP); if implements_copy { return false; } @@ -2338,13 +2548,13 @@ impl<'tcx> TyCtxt<'tcx> { // bound attached (see above), it is sound to treat it as having a // destructor (e.g. zero its memory on move). - let contents = ty.type_contents(self); + let contents = ty.type_contents(tcx); debug!("type_needs_drop ty={:?} contents={:?}", ty, contents); - contents.needs_drop(self) + contents.needs_drop(tcx) } /// Get the attributes of a definition. - pub fn get_attrs(&self, did: DefId) -> Cow<'tcx, [ast::Attribute]> { + pub fn get_attrs(self, did: DefId) -> Cow<'gcx, [ast::Attribute]> { if let Some(id) = self.map.as_local_node_id(did) { Cow::Borrowed(self.map.attrs(id)) } else { @@ -2353,28 +2563,28 @@ impl<'tcx> TyCtxt<'tcx> { } /// Determine whether an item is annotated with an attribute - pub fn has_attr(&self, did: DefId, attr: &str) -> bool { + pub fn has_attr(self, did: DefId, attr: &str) -> bool { self.get_attrs(did).iter().any(|item| item.check_name(attr)) } /// Determine whether an item is annotated with `#[repr(packed)]` - pub fn lookup_packed(&self, did: DefId) -> bool { + pub fn lookup_packed(self, did: DefId) -> bool { self.lookup_repr_hints(did).contains(&attr::ReprPacked) } /// Determine whether an item is annotated with `#[simd]` - pub fn lookup_simd(&self, did: DefId) -> bool { + pub fn lookup_simd(self, did: DefId) -> bool { self.has_attr(did, "simd") || self.lookup_repr_hints(did).contains(&attr::ReprSimd) } - pub fn item_variances(&self, item_id: DefId) -> Rc { + pub fn item_variances(self, item_id: DefId) -> Rc { lookup_locally_or_in_crate_store( "item_variance_map", item_id, &self.item_variance_map, || Rc::new(self.sess.cstore.item_variances(item_id))) } - pub fn trait_has_default_impl(&self, trait_def_id: DefId) -> bool { + pub fn trait_has_default_impl(self, trait_def_id: DefId) -> bool { self.populate_implementations_for_trait_if_necessary(trait_def_id); let def = self.lookup_trait_def(trait_def_id); @@ -2382,13 +2592,13 @@ impl<'tcx> TyCtxt<'tcx> { } /// Records a trait-to-implementation mapping. - pub fn record_trait_has_default_impl(&self, trait_def_id: DefId) { + pub fn record_trait_has_default_impl(self, trait_def_id: DefId) { let def = self.lookup_trait_def(trait_def_id); def.flags.set(def.flags.get() | TraitFlags::HAS_DEFAULT_IMPL) } /// Load primitive inherent implementations if necessary - pub fn populate_implementations_for_primitive_if_necessary(&self, + pub fn populate_implementations_for_primitive_if_necessary(self, primitive_def_id: DefId) { if primitive_def_id.is_local() { return @@ -2414,7 +2624,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Populates the type context with all the inherent implementations for /// the given type if necessary. - pub fn populate_inherent_implementations_for_type_if_necessary(&self, + pub fn populate_inherent_implementations_for_type_if_necessary(self, type_id: DefId) { if type_id.is_local() { return @@ -2444,7 +2654,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Populates the type context with all the implementations for the given /// trait if necessary. - pub fn populate_implementations_for_trait_if_necessary(&self, trait_id: DefId) { + pub fn populate_implementations_for_trait_if_necessary(self, trait_id: DefId) { if trait_id.is_local() { return } @@ -2491,29 +2701,48 @@ impl<'tcx> TyCtxt<'tcx> { def.flags.set(def.flags.get() | TraitFlags::IMPLS_VALID); } - pub fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind { - Tables::closure_kind(&self.tables, self, def_id) + pub fn closure_kind(self, def_id: DefId) -> ty::ClosureKind { + // If this is a local def-id, it should be inserted into the + // tables by typeck; else, it will be retreived from + // the external crate metadata. + if let Some(&kind) = self.tables.borrow().closure_kinds.get(&def_id) { + return kind; + } + + let kind = self.sess.cstore.closure_kind(def_id); + self.tables.borrow_mut().closure_kinds.insert(def_id, kind); + kind } - pub fn closure_type(&self, + pub fn closure_type(self, def_id: DefId, - substs: &ClosureSubsts<'tcx>) + substs: ClosureSubsts<'tcx>) -> ty::ClosureTy<'tcx> { - Tables::closure_type(&self.tables, self, def_id, substs) + // If this is a local def-id, it should be inserted into the + // tables by typeck; else, it will be retreived from + // the external crate metadata. + if let Some(ty) = self.tables.borrow().closure_tys.get(&def_id) { + return ty.subst(self, substs.func_substs); + } + + let ty = self.sess.cstore.closure_ty(self.global_tcx(), def_id); + self.tables.borrow_mut().closure_tys.insert(def_id, ty.clone()); + ty.subst(self, substs.func_substs) } /// Given the def_id of an impl, return the def_id of the trait it implements. /// If it implements no trait, return `None`. - pub fn trait_id_of_impl(&self, def_id: DefId) -> Option { + pub fn trait_id_of_impl(self, def_id: DefId) -> Option { self.impl_trait_ref(def_id).map(|tr| tr.def_id) } /// If the given def ID describes a method belonging to an impl, return the /// ID of the impl that the method belongs to. Otherwise, return `None`. - pub fn impl_of_method(&self, def_id: DefId) -> Option { + pub fn impl_of_method(self, def_id: DefId) -> Option { if def_id.krate != LOCAL_CRATE { - return self.sess.cstore.impl_or_trait_item(self, def_id).and_then(|item| { + return self.sess.cstore.impl_or_trait_item(self.global_tcx(), def_id) + .and_then(|item| { match item.container() { TraitContainer(_) => None, ImplContainer(def_id) => Some(def_id), @@ -2534,9 +2763,9 @@ impl<'tcx> TyCtxt<'tcx> { /// If the given def ID describes an item belonging to a trait (either a /// default method or an implementation of a trait method), return the ID of /// the trait that the method belongs to. Otherwise, return `None`. - pub fn trait_of_item(&self, def_id: DefId) -> Option { + pub fn trait_of_item(self, def_id: DefId) -> Option { if def_id.krate != LOCAL_CRATE { - return self.sess.cstore.trait_of_item(self, def_id); + return self.sess.cstore.trait_of_item(self.global_tcx(), def_id); } match self.impl_or_trait_items.borrow().get(&def_id).cloned() { Some(impl_or_trait_item) => { @@ -2555,7 +2784,7 @@ impl<'tcx> TyCtxt<'tcx> { /// is already that of the original trait method, then the return value is /// the same). /// Otherwise, return `None`. - pub fn trait_item_of_item(&self, def_id: DefId) -> Option { + pub fn trait_item_of_item(self, def_id: DefId) -> Option { let impl_item = match self.impl_or_trait_items.borrow().get(&def_id) { Some(m) => m.clone(), None => return None, @@ -2573,19 +2802,17 @@ impl<'tcx> TyCtxt<'tcx> { /// Construct a parameter environment suitable for static contexts or other contexts where there /// are no free type/lifetime parameters in scope. - pub fn empty_parameter_environment<'a>(&'a self) - -> ParameterEnvironment<'a,'tcx> { + pub fn empty_parameter_environment(self) -> ParameterEnvironment<'tcx> { // for an empty parameter environment, there ARE no free // regions, so it shouldn't matter what we use for the free id let free_id_outlive = self.region_maps.node_extent(ast::DUMMY_NODE_ID); - ty::ParameterEnvironment { tcx: self, - free_substs: Substs::empty(), - caller_bounds: Vec::new(), - implicit_region_bound: ty::ReEmpty, - selection_cache: traits::SelectionCache::new(), - evaluation_cache: traits::EvaluationCache::new(), - free_id_outlive: free_id_outlive } + ty::ParameterEnvironment { + free_substs: self.mk_substs(Substs::empty()), + caller_bounds: Vec::new(), + implicit_region_bound: ty::ReEmpty, + free_id_outlive: free_id_outlive + } } /// Constructs and returns a substitution that can be applied to move from @@ -2593,14 +2820,14 @@ impl<'tcx> TyCtxt<'tcx> { /// In general, this means converting from bound parameters to /// free parameters. Since we currently represent bound/free type /// parameters in the same way, this only has an effect on regions. - pub fn construct_free_substs(&self, generics: &Generics<'tcx>, - free_id_outlive: CodeExtent) -> Substs<'tcx> { + pub fn construct_free_substs(self, generics: &Generics<'gcx>, + free_id_outlive: CodeExtent) -> Substs<'gcx> { // map T => T let mut types = VecPerParamSpace::empty(); for def in generics.types.as_slice() { debug!("construct_parameter_environment(): push_types_from_defs: def={:?}", def); - types.push(def.space, self.mk_param_from_def(def)); + types.push(def.space, self.global_tcx().mk_param_from_def(def)); } // map bound 'a => free 'a @@ -2622,12 +2849,12 @@ impl<'tcx> TyCtxt<'tcx> { /// See `ParameterEnvironment` struct def'n for details. /// If you were using `free_id: NodeId`, you might try `self.region_maps.item_extent(free_id)` /// for the `free_id_outlive` parameter. (But note that that is not always quite right.) - pub fn construct_parameter_environment<'a>(&'a self, - span: Span, - generics: &ty::Generics<'tcx>, - generic_predicates: &ty::GenericPredicates<'tcx>, - free_id_outlive: CodeExtent) - -> ParameterEnvironment<'a, 'tcx> + pub fn construct_parameter_environment(self, + span: Span, + generics: &ty::Generics<'gcx>, + generic_predicates: &ty::GenericPredicates<'gcx>, + free_id_outlive: CodeExtent) + -> ParameterEnvironment<'gcx> { // // Construct the free substs. @@ -2639,8 +2866,9 @@ impl<'tcx> TyCtxt<'tcx> { // Compute the bounds on Self and the type parameters. // - let bounds = generic_predicates.instantiate(self, &free_substs); - let bounds = self.liberate_late_bound_regions(free_id_outlive, &ty::Binder(bounds)); + let tcx = self.global_tcx(); + let bounds = generic_predicates.instantiate(tcx, &free_substs); + let bounds = tcx.liberate_late_bound_regions(free_id_outlive, &ty::Binder(bounds)); let predicates = bounds.predicates.into_vec(); // Finally, we have to normalize the bounds in the environment, in @@ -2657,43 +2885,40 @@ impl<'tcx> TyCtxt<'tcx> { // let unnormalized_env = ty::ParameterEnvironment { - tcx: self, - free_substs: free_substs, + free_substs: tcx.mk_substs(free_substs), implicit_region_bound: ty::ReScope(free_id_outlive), caller_bounds: predicates, - selection_cache: traits::SelectionCache::new(), - evaluation_cache: traits::EvaluationCache::new(), free_id_outlive: free_id_outlive, }; let cause = traits::ObligationCause::misc(span, free_id_outlive.node_id(&self.region_maps)); - traits::normalize_param_env_or_error(unnormalized_env, cause) + traits::normalize_param_env_or_error(tcx, unnormalized_env, cause) } - pub fn is_method_call(&self, expr_id: NodeId) -> bool { + pub fn is_method_call(self, expr_id: NodeId) -> bool { self.tables.borrow().method_map.contains_key(&MethodCall::expr(expr_id)) } - pub fn is_overloaded_autoderef(&self, expr_id: NodeId, autoderefs: u32) -> bool { + pub fn is_overloaded_autoderef(self, expr_id: NodeId, autoderefs: u32) -> bool { self.tables.borrow().method_map.contains_key(&MethodCall::autoderef(expr_id, autoderefs)) } - pub fn upvar_capture(&self, upvar_id: ty::UpvarId) -> Option { + pub fn upvar_capture(self, upvar_id: ty::UpvarId) -> Option { Some(self.tables.borrow().upvar_capture_map.get(&upvar_id).unwrap().clone()) } - pub fn visit_all_items_in_krate(&self, + pub fn visit_all_items_in_krate(self, dep_node_fn: F, visitor: &mut V) - where F: FnMut(DefId) -> DepNode, V: Visitor<'tcx> + where F: FnMut(DefId) -> DepNode, V: Visitor<'gcx> { - dep_graph::visit_all_items_in_krate(self, dep_node_fn, visitor); + dep_graph::visit_all_items_in_krate(self.global_tcx(), dep_node_fn, visitor); } /// Looks up the span of `impl_did` if the impl is local; otherwise returns `Err` /// with the name of the crate containing the impl. - pub fn span_of_impl(&self, impl_did: DefId) -> Result { + pub fn span_of_impl(self, impl_did: DefId) -> Result { if impl_did.is_local() { let node_id = self.map.as_local_node_id(impl_did).unwrap(); Ok(self.map.span(node_id)) @@ -2712,8 +2937,8 @@ pub enum ExplicitSelfCategory { ByBox, } -impl<'tcx> TyCtxt<'tcx> { - pub fn with_freevars(&self, fid: NodeId, f: F) -> T where +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn with_freevars(self, fid: NodeId, f: F) -> T where F: FnOnce(&[hir::Freevar]) -> T, { match self.freevars.borrow().get(&fid) { diff --git a/src/librustc/ty/outlives.rs b/src/librustc/ty/outlives.rs index 898c9d65c7..9ae3325c25 100644 --- a/src/librustc/ty/outlives.rs +++ b/src/librustc/ty/outlives.rs @@ -55,160 +55,157 @@ pub enum Component<'tcx> { EscapingProjection(Vec>), } -/// Returns all the things that must outlive `'a` for the condition -/// `ty0: 'a` to hold. -pub fn components<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, - ty0: Ty<'tcx>) - -> Vec> { - let mut components = vec![]; - compute_components(infcx, ty0, &mut components); - debug!("components({:?}) = {:?}", ty0, components); - components -} +impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> { + /// Returns all the things that must outlive `'a` for the condition + /// `ty0: 'a` to hold. + pub fn outlives_components(&self, ty0: Ty<'tcx>) + -> Vec> { + let mut components = vec![]; + self.compute_components(ty0, &mut components); + debug!("components({:?}) = {:?}", ty0, components); + components + } -fn compute_components<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, - ty: Ty<'tcx>, - out: &mut Vec>) { - // Descend through the types, looking for the various "base" - // components and collecting them into `out`. This is not written - // with `collect()` because of the need to sometimes skip subtrees - // in the `subtys` iterator (e.g., when encountering a - // projection). - match ty.sty { - ty::TyClosure(_, ref substs) => { - // FIXME(#27086). We do not accumulate from substs, since they - // don't represent reachable data. This means that, in - // practice, some of the lifetime parameters might not - // be in scope when the body runs, so long as there is - // no reachable data with that lifetime. For better or - // worse, this is consistent with fn types, however, - // which can also encapsulate data in this fashion - // (though it's somewhat harder, and typically - // requires virtual dispatch). - // - // Note that changing this (in a naive way, at least) - // causes regressions for what appears to be perfectly - // reasonable code like this: - // - // ``` - // fn foo<'a>(p: &Data<'a>) { - // bar(|q: &mut Parser| q.read_addr()) - // } - // fn bar(p: Box) { - // } - // ``` - // - // Note that `p` (and `'a`) are not used in the - // closure at all, but to meet the requirement that - // the closure type `C: 'static` (so it can be coerced - // to the object type), we get the requirement that - // `'a: 'static` since `'a` appears in the closure - // type `C`. - // - // A smarter fix might "prune" unused `func_substs` -- - // this would avoid breaking simple examples like - // this, but would still break others (which might - // indeed be invalid, depending on your POV). Pruning - // would be a subtle process, since we have to see - // what func/type parameters are used and unused, - // taking into consideration UFCS and so forth. + fn compute_components(&self, ty: Ty<'tcx>, out: &mut Vec>) { + // Descend through the types, looking for the various "base" + // components and collecting them into `out`. This is not written + // with `collect()` because of the need to sometimes skip subtrees + // in the `subtys` iterator (e.g., when encountering a + // projection). + match ty.sty { + ty::TyClosure(_, ref substs) => { + // FIXME(#27086). We do not accumulate from substs, since they + // don't represent reachable data. This means that, in + // practice, some of the lifetime parameters might not + // be in scope when the body runs, so long as there is + // no reachable data with that lifetime. For better or + // worse, this is consistent with fn types, however, + // which can also encapsulate data in this fashion + // (though it's somewhat harder, and typically + // requires virtual dispatch). + // + // Note that changing this (in a naive way, at least) + // causes regressions for what appears to be perfectly + // reasonable code like this: + // + // ``` + // fn foo<'a>(p: &Data<'a>) { + // bar(|q: &mut Parser| q.read_addr()) + // } + // fn bar(p: Box) { + // } + // ``` + // + // Note that `p` (and `'a`) are not used in the + // closure at all, but to meet the requirement that + // the closure type `C: 'static` (so it can be coerced + // to the object type), we get the requirement that + // `'a: 'static` since `'a` appears in the closure + // type `C`. + // + // A smarter fix might "prune" unused `func_substs` -- + // this would avoid breaking simple examples like + // this, but would still break others (which might + // indeed be invalid, depending on your POV). Pruning + // would be a subtle process, since we have to see + // what func/type parameters are used and unused, + // taking into consideration UFCS and so forth. - for &upvar_ty in &substs.upvar_tys { - compute_components(infcx, upvar_ty, out); + for &upvar_ty in substs.upvar_tys { + self.compute_components(upvar_ty, out); + } } - } - // OutlivesTypeParameterEnv -- the actual checking that `X:'a` - // is implied by the environment is done in regionck. - ty::TyParam(p) => { - out.push(Component::Param(p)); - } + // OutlivesTypeParameterEnv -- the actual checking that `X:'a` + // is implied by the environment is done in regionck. + ty::TyParam(p) => { + out.push(Component::Param(p)); + } - // For projections, we prefer to generate an obligation like - // `>::Foo: 'a`, because this gives the - // regionck more ways to prove that it holds. However, - // regionck is not (at least currently) prepared to deal with - // higher-ranked regions that may appear in the - // trait-ref. Therefore, if we see any higher-ranke regions, - // we simply fallback to the most restrictive rule, which - // requires that `Pi: 'a` for all `i`. - ty::TyProjection(ref data) => { - if !data.has_escaping_regions() { - // best case: no escaping regions, so push the - // projection and skip the subtree (thus generating no - // constraints for Pi). This defers the choice between - // the rules OutlivesProjectionEnv, - // OutlivesProjectionTraitDef, and - // OutlivesProjectionComponents to regionck. - out.push(Component::Projection(*data)); - } else { - // fallback case: hard code - // OutlivesProjectionComponents. Continue walking - // through and constrain Pi. - let subcomponents = capture_components(infcx, ty); - out.push(Component::EscapingProjection(subcomponents)); + // For projections, we prefer to generate an obligation like + // `>::Foo: 'a`, because this gives the + // regionck more ways to prove that it holds. However, + // regionck is not (at least currently) prepared to deal with + // higher-ranked regions that may appear in the + // trait-ref. Therefore, if we see any higher-ranke regions, + // we simply fallback to the most restrictive rule, which + // requires that `Pi: 'a` for all `i`. + ty::TyProjection(ref data) => { + if !data.has_escaping_regions() { + // best case: no escaping regions, so push the + // projection and skip the subtree (thus generating no + // constraints for Pi). This defers the choice between + // the rules OutlivesProjectionEnv, + // OutlivesProjectionTraitDef, and + // OutlivesProjectionComponents to regionck. + out.push(Component::Projection(*data)); + } else { + // fallback case: hard code + // OutlivesProjectionComponents. Continue walking + // through and constrain Pi. + let subcomponents = self.capture_components(ty); + out.push(Component::EscapingProjection(subcomponents)); + } } - } - // If we encounter an inference variable, try to resolve it - // and proceed with resolved version. If we cannot resolve it, - // then record the unresolved variable as a component. - ty::TyInfer(_) => { - let ty = infcx.resolve_type_vars_if_possible(&ty); - if let ty::TyInfer(infer_ty) = ty.sty { - out.push(Component::UnresolvedInferenceVariable(infer_ty)); - } else { - compute_components(infcx, ty, out); + // If we encounter an inference variable, try to resolve it + // and proceed with resolved version. If we cannot resolve it, + // then record the unresolved variable as a component. + ty::TyInfer(_) => { + let ty = self.resolve_type_vars_if_possible(&ty); + if let ty::TyInfer(infer_ty) = ty.sty { + out.push(Component::UnresolvedInferenceVariable(infer_ty)); + } else { + self.compute_components(ty, out); + } } - } - // Most types do not introduce any region binders, nor - // involve any other subtle cases, and so the WF relation - // simply constraints any regions referenced directly by - // the type and then visits the types that are lexically - // contained within. (The comments refer to relevant rules - // from RFC1214.) - ty::TyBool | // OutlivesScalar - ty::TyChar | // OutlivesScalar - ty::TyInt(..) | // OutlivesScalar - ty::TyUint(..) | // OutlivesScalar - ty::TyFloat(..) | // OutlivesScalar - ty::TyEnum(..) | // OutlivesNominalType - ty::TyStruct(..) | // OutlivesNominalType - ty::TyBox(..) | // OutlivesNominalType (ish) - ty::TyStr | // OutlivesScalar (ish) - ty::TyArray(..) | // ... - ty::TySlice(..) | // ... - ty::TyRawPtr(..) | // ... - ty::TyRef(..) | // OutlivesReference - ty::TyTuple(..) | // ... - ty::TyFnDef(..) | // OutlivesFunction (*) - ty::TyFnPtr(_) | // OutlivesFunction (*) - ty::TyTrait(..) | // OutlivesObject, OutlivesFragment (*) - ty::TyError => { - // (*) Bare functions and traits are both binders. In the - // RFC, this means we would add the bound regions to the - // "bound regions list". In our representation, no such - // list is maintained explicitly, because bound regions - // themselves can be readily identified. + // Most types do not introduce any region binders, nor + // involve any other subtle cases, and so the WF relation + // simply constraints any regions referenced directly by + // the type and then visits the types that are lexically + // contained within. (The comments refer to relevant rules + // from RFC1214.) + ty::TyBool | // OutlivesScalar + ty::TyChar | // OutlivesScalar + ty::TyInt(..) | // OutlivesScalar + ty::TyUint(..) | // OutlivesScalar + ty::TyFloat(..) | // OutlivesScalar + ty::TyEnum(..) | // OutlivesNominalType + ty::TyStruct(..) | // OutlivesNominalType + ty::TyBox(..) | // OutlivesNominalType (ish) + ty::TyStr | // OutlivesScalar (ish) + ty::TyArray(..) | // ... + ty::TySlice(..) | // ... + ty::TyRawPtr(..) | // ... + ty::TyRef(..) | // OutlivesReference + ty::TyTuple(..) | // ... + ty::TyFnDef(..) | // OutlivesFunction (*) + ty::TyFnPtr(_) | // OutlivesFunction (*) + ty::TyTrait(..) | // OutlivesObject, OutlivesFragment (*) + ty::TyError => { + // (*) Bare functions and traits are both binders. In the + // RFC, this means we would add the bound regions to the + // "bound regions list". In our representation, no such + // list is maintained explicitly, because bound regions + // themselves can be readily identified. - push_region_constraints(out, ty.regions()); - for subty in ty.walk_shallow() { - compute_components(infcx, subty, out); + push_region_constraints(out, ty.regions()); + for subty in ty.walk_shallow() { + self.compute_components(subty, out); + } } } } -} -fn capture_components<'a,'tcx>(infcx: &InferCtxt<'a,'tcx>, - ty: Ty<'tcx>) - -> Vec> { - let mut temp = vec![]; - push_region_constraints(&mut temp, ty.regions()); - for subty in ty.walk_shallow() { - compute_components(infcx, subty, &mut temp); + fn capture_components(&self, ty: Ty<'tcx>) -> Vec> { + let mut temp = vec![]; + push_region_constraints(&mut temp, ty.regions()); + for subty in ty.walk_shallow() { + self.compute_components(subty, &mut temp); + } + temp } - temp } fn push_region_constraints<'tcx>(out: &mut Vec>, regions: Vec) { diff --git a/src/librustc/ty/relate.rs b/src/librustc/ty/relate.rs index f14e680e9e..80c727f022 100644 --- a/src/librustc/ty/relate.rs +++ b/src/librustc/ty/relate.rs @@ -28,8 +28,8 @@ pub enum Cause { ExistentialRegionBound, // relating an existential region bound } -pub trait TypeRelation<'a,'tcx> : Sized { - fn tcx(&self) -> &'a TyCtxt<'tcx>; +pub trait TypeRelation<'a, 'gcx: 'a+'tcx, 'tcx: 'a> : Sized { + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx>; /// Returns a static string we can use for printouts. fn tag(&self) -> &'static str; @@ -45,22 +45,22 @@ pub trait TypeRelation<'a,'tcx> : Sized { } /// Generic relation routine suitable for most anything. - fn relate>(&mut self, a: &T, b: &T) -> RelateResult<'tcx, T> { + fn relate>(&mut self, a: &T, b: &T) -> RelateResult<'tcx, T> { Relate::relate(self, a, b) } /// Relete elements of two slices pairwise. - fn relate_zip>(&mut self, a: &[T], b: &[T]) -> RelateResult<'tcx, Vec> { + fn relate_zip>(&mut self, a: &[T], b: &[T]) -> RelateResult<'tcx, Vec> { assert_eq!(a.len(), b.len()); a.iter().zip(b).map(|(a, b)| self.relate(a, b)).collect() } /// Switch variance for the purpose of relating `a` and `b`. - fn relate_with_variance>(&mut self, - variance: ty::Variance, - a: &T, - b: &T) - -> RelateResult<'tcx, T>; + fn relate_with_variance>(&mut self, + variance: ty::Variance, + a: &T, + b: &T) + -> RelateResult<'tcx, T>; // Overrideable relations. You shouldn't typically call these // directly, instead call `relate()`, which in turn calls @@ -76,25 +76,24 @@ pub trait TypeRelation<'a,'tcx> : Sized { fn binders(&mut self, a: &ty::Binder, b: &ty::Binder) -> RelateResult<'tcx, ty::Binder> - where T: Relate<'a,'tcx>; + where T: Relate<'tcx>; } -pub trait Relate<'a,'tcx>: TypeFoldable<'tcx> { - fn relate>(relation: &mut R, - a: &Self, - b: &Self) - -> RelateResult<'tcx, Self>; +pub trait Relate<'tcx>: TypeFoldable<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, a: &Self, b: &Self) + -> RelateResult<'tcx, Self> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a; } /////////////////////////////////////////////////////////////////////////// // Relate impls -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::TypeAndMut<'tcx> { - fn relate(relation: &mut R, - a: &ty::TypeAndMut<'tcx>, - b: &ty::TypeAndMut<'tcx>) - -> RelateResult<'tcx, ty::TypeAndMut<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::TypeAndMut<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::TypeAndMut<'tcx>, + b: &ty::TypeAndMut<'tcx>) + -> RelateResult<'tcx, ty::TypeAndMut<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { debug!("{}.mts({:?}, {:?})", relation.tag(), @@ -117,12 +116,12 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::TypeAndMut<'tcx> { // substitutions are not themselves relatable without more context, // but they is an important subroutine for things that ARE relatable, // like traits etc. -fn relate_item_substs<'a,'tcx:'a,R>(relation: &mut R, - item_def_id: DefId, - a_subst: &Substs<'tcx>, - b_subst: &Substs<'tcx>) - -> RelateResult<'tcx, Substs<'tcx>> - where R: TypeRelation<'a,'tcx> +fn relate_item_substs<'a, 'gcx, 'tcx, R>(relation: &mut R, + item_def_id: DefId, + a_subst: &'tcx Substs<'tcx>, + b_subst: &'tcx Substs<'tcx>) + -> RelateResult<'tcx, &'tcx Substs<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { debug!("substs: item_def_id={:?} a_subst={:?} b_subst={:?}", item_def_id, @@ -139,12 +138,12 @@ fn relate_item_substs<'a,'tcx:'a,R>(relation: &mut R, relate_substs(relation, opt_variances, a_subst, b_subst) } -pub fn relate_substs<'a,'tcx:'a,R>(relation: &mut R, - variances: Option<&ty::ItemVariances>, - a_subst: &Substs<'tcx>, - b_subst: &Substs<'tcx>) - -> RelateResult<'tcx, Substs<'tcx>> - where R: TypeRelation<'a,'tcx> +pub fn relate_substs<'a, 'gcx, 'tcx, R>(relation: &mut R, + variances: Option<&ty::ItemVariances>, + a_subst: &'tcx Substs<'tcx>, + b_subst: &'tcx Substs<'tcx>) + -> RelateResult<'tcx, &'tcx Substs<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let mut substs = Substs::empty(); @@ -167,15 +166,15 @@ pub fn relate_substs<'a,'tcx:'a,R>(relation: &mut R, substs.regions.replace(space, regions); } - Ok(substs) + Ok(relation.tcx().mk_substs(substs)) } -fn relate_type_params<'a,'tcx:'a,R>(relation: &mut R, - variances: Option<&[ty::Variance]>, - a_tys: &[Ty<'tcx>], - b_tys: &[Ty<'tcx>]) - -> RelateResult<'tcx, Vec>> - where R: TypeRelation<'a,'tcx> +fn relate_type_params<'a, 'gcx, 'tcx, R>(relation: &mut R, + variances: Option<&[ty::Variance]>, + a_tys: &[Ty<'tcx>], + b_tys: &[Ty<'tcx>]) + -> RelateResult<'tcx, Vec>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { if a_tys.len() != b_tys.len() { return Err(TypeError::TyParamSize(expected_found(relation, @@ -193,12 +192,12 @@ fn relate_type_params<'a,'tcx:'a,R>(relation: &mut R, .collect() } -fn relate_region_params<'a,'tcx:'a,R>(relation: &mut R, - variances: Option<&[ty::Variance]>, - a_rs: &[ty::Region], - b_rs: &[ty::Region]) - -> RelateResult<'tcx, Vec> - where R: TypeRelation<'a,'tcx> +fn relate_region_params<'a, 'gcx, 'tcx, R>(relation: &mut R, + variances: Option<&[ty::Variance]>, + a_rs: &[ty::Region], + b_rs: &[ty::Region]) + -> RelateResult<'tcx, Vec> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let num_region_params = a_rs.len(); @@ -224,28 +223,30 @@ fn relate_region_params<'a,'tcx:'a,R>(relation: &mut R, .collect() } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::BareFnTy<'tcx> { - fn relate(relation: &mut R, - a: &ty::BareFnTy<'tcx>, - b: &ty::BareFnTy<'tcx>) - -> RelateResult<'tcx, ty::BareFnTy<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for &'tcx ty::BareFnTy<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &&'tcx ty::BareFnTy<'tcx>, + b: &&'tcx ty::BareFnTy<'tcx>) + -> RelateResult<'tcx, &'tcx ty::BareFnTy<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let unsafety = relation.relate(&a.unsafety, &b.unsafety)?; let abi = relation.relate(&a.abi, &b.abi)?; let sig = relation.relate(&a.sig, &b.sig)?; - Ok(ty::BareFnTy {unsafety: unsafety, - abi: abi, - sig: sig}) + Ok(relation.tcx().mk_bare_fn(ty::BareFnTy { + unsafety: unsafety, + abi: abi, + sig: sig + })) } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::FnSig<'tcx> { - fn relate(relation: &mut R, - a: &ty::FnSig<'tcx>, - b: &ty::FnSig<'tcx>) - -> RelateResult<'tcx, ty::FnSig<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::FnSig<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::FnSig<'tcx>, + b: &ty::FnSig<'tcx>) + -> RelateResult<'tcx, ty::FnSig<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { if a.variadic != b.variadic { return Err(TypeError::VariadicMismatch( @@ -272,11 +273,11 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::FnSig<'tcx> { } } -fn relate_arg_vecs<'a,'tcx:'a,R>(relation: &mut R, - a_args: &[Ty<'tcx>], - b_args: &[Ty<'tcx>]) - -> RelateResult<'tcx, Vec>> - where R: TypeRelation<'a,'tcx> +fn relate_arg_vecs<'a, 'gcx, 'tcx, R>(relation: &mut R, + a_args: &[Ty<'tcx>], + b_args: &[Ty<'tcx>]) + -> RelateResult<'tcx, Vec>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { if a_args.len() != b_args.len() { return Err(TypeError::ArgCount); @@ -287,12 +288,12 @@ fn relate_arg_vecs<'a,'tcx:'a,R>(relation: &mut R, .collect() } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ast::Unsafety { - fn relate(relation: &mut R, - a: &ast::Unsafety, - b: &ast::Unsafety) - -> RelateResult<'tcx, ast::Unsafety> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ast::Unsafety { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ast::Unsafety, + b: &ast::Unsafety) + -> RelateResult<'tcx, ast::Unsafety> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { if a != b { Err(TypeError::UnsafetyMismatch(expected_found(relation, a, b))) @@ -302,12 +303,12 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for ast::Unsafety { } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for abi::Abi { - fn relate(relation: &mut R, - a: &abi::Abi, - b: &abi::Abi) - -> RelateResult<'tcx, abi::Abi> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for abi::Abi { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &abi::Abi, + b: &abi::Abi) + -> RelateResult<'tcx, abi::Abi> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { if a == b { Ok(*a) @@ -317,12 +318,12 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for abi::Abi { } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::ProjectionTy<'tcx> { - fn relate(relation: &mut R, - a: &ty::ProjectionTy<'tcx>, - b: &ty::ProjectionTy<'tcx>) - -> RelateResult<'tcx, ty::ProjectionTy<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::ProjectionTy<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::ProjectionTy<'tcx>, + b: &ty::ProjectionTy<'tcx>) + -> RelateResult<'tcx, ty::ProjectionTy<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { if a.item_name != b.item_name { Err(TypeError::ProjectionNameMismatched( @@ -334,12 +335,12 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::ProjectionTy<'tcx> { } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::ProjectionPredicate<'tcx> { - fn relate(relation: &mut R, - a: &ty::ProjectionPredicate<'tcx>, - b: &ty::ProjectionPredicate<'tcx>) - -> RelateResult<'tcx, ty::ProjectionPredicate<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::ProjectionPredicate<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::ProjectionPredicate<'tcx>, + b: &ty::ProjectionPredicate<'tcx>) + -> RelateResult<'tcx, ty::ProjectionPredicate<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let projection_ty = relation.relate(&a.projection_ty, &b.projection_ty)?; let ty = relation.relate(&a.ty, &b.ty)?; @@ -347,12 +348,12 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::ProjectionPredicate<'tcx> { } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for Vec> { - fn relate(relation: &mut R, - a: &Vec>, - b: &Vec>) - -> RelateResult<'tcx, Vec>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for Vec> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &Vec>, + b: &Vec>) + -> RelateResult<'tcx, Vec>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { // To be compatible, `a` and `b` must be for precisely the // same set of traits and item names. We always require that @@ -369,12 +370,12 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for Vec> { } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::ExistentialBounds<'tcx> { - fn relate(relation: &mut R, - a: &ty::ExistentialBounds<'tcx>, - b: &ty::ExistentialBounds<'tcx>) - -> RelateResult<'tcx, ty::ExistentialBounds<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::ExistentialBounds<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::ExistentialBounds<'tcx>, + b: &ty::ExistentialBounds<'tcx>) + -> RelateResult<'tcx, ty::ExistentialBounds<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let r = relation.with_cause( @@ -390,12 +391,12 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::ExistentialBounds<'tcx> { } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::BuiltinBounds { - fn relate(relation: &mut R, - a: &ty::BuiltinBounds, - b: &ty::BuiltinBounds) - -> RelateResult<'tcx, ty::BuiltinBounds> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::BuiltinBounds { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::BuiltinBounds, + b: &ty::BuiltinBounds) + -> RelateResult<'tcx, ty::BuiltinBounds> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { // Two sets of builtin bounds are only relatable if they are // precisely the same (but see the coercion code). @@ -407,29 +408,29 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::BuiltinBounds { } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::TraitRef<'tcx> { - fn relate(relation: &mut R, - a: &ty::TraitRef<'tcx>, - b: &ty::TraitRef<'tcx>) - -> RelateResult<'tcx, ty::TraitRef<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::TraitRef<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::TraitRef<'tcx>, + b: &ty::TraitRef<'tcx>) + -> RelateResult<'tcx, ty::TraitRef<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { // Different traits cannot be related if a.def_id != b.def_id { Err(TypeError::Traits(expected_found(relation, &a.def_id, &b.def_id))) } else { let substs = relate_item_substs(relation, a.def_id, a.substs, b.substs)?; - Ok(ty::TraitRef { def_id: a.def_id, substs: relation.tcx().mk_substs(substs) }) + Ok(ty::TraitRef { def_id: a.def_id, substs: substs }) } } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for Ty<'tcx> { - fn relate(relation: &mut R, - a: &Ty<'tcx>, - b: &Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for Ty<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &Ty<'tcx>, + b: &Ty<'tcx>) + -> RelateResult<'tcx, Ty<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { relation.tys(a, b) } @@ -438,11 +439,11 @@ impl<'a,'tcx:'a> Relate<'a,'tcx> for Ty<'tcx> { /// The main "type relation" routine. Note that this does not handle /// inference artifacts, so you should filter those out before calling /// it. -pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> - where R: TypeRelation<'a,'tcx> +pub fn super_relate_tys<'a, 'gcx, 'tcx, R>(relation: &mut R, + a: Ty<'tcx>, + b: Ty<'tcx>) + -> RelateResult<'tcx, Ty<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let tcx = relation.tcx(); let a_sty = &a.sty; @@ -482,7 +483,7 @@ pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R, if a_def == b_def => { let substs = relate_item_substs(relation, a_def.did, a_substs, b_substs)?; - Ok(tcx.mk_enum(a_def, tcx.mk_substs(substs))) + Ok(tcx.mk_enum(a_def, substs)) } (&ty::TyTrait(ref a_), &ty::TyTrait(ref b_)) => @@ -496,17 +497,17 @@ pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R, if a_def == b_def => { let substs = relate_item_substs(relation, a_def.did, a_substs, b_substs)?; - Ok(tcx.mk_struct(a_def, tcx.mk_substs(substs))) + Ok(tcx.mk_struct(a_def, substs)) } - (&ty::TyClosure(a_id, ref a_substs), - &ty::TyClosure(b_id, ref b_substs)) + (&ty::TyClosure(a_id, a_substs), + &ty::TyClosure(b_id, b_substs)) if a_id == b_id => { // All TyClosure types with the same id represent // the (anonymous) type of the same closure expression. So // all of their regions should be equated. - let substs = relation.relate(a_substs, b_substs)?; + let substs = relation.relate(&a_substs, &b_substs)?; Ok(tcx.mk_closure_from_closure_substs(a_id, substs)) } @@ -545,7 +546,7 @@ pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R, Ok(tcx.mk_slice(t)) } - (&ty::TyTuple(ref as_), &ty::TyTuple(ref bs)) => + (&ty::TyTuple(as_), &ty::TyTuple(bs)) => { if as_.len() == bs.len() { let ts = as_.iter().zip(bs) @@ -565,13 +566,13 @@ pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R, if a_def_id == b_def_id => { let substs = relate_substs(relation, None, a_substs, b_substs)?; - let fty = relation.relate(a_fty, b_fty)?; - Ok(tcx.mk_fn_def(a_def_id, tcx.mk_substs(substs), fty)) + let fty = relation.relate(&a_fty, &b_fty)?; + Ok(tcx.mk_fn_def(a_def_id, substs, fty)) } (&ty::TyFnPtr(a_fty), &ty::TyFnPtr(b_fty)) => { - let fty = relation.relate(a_fty, b_fty)?; + let fty = relation.relate(&a_fty, &b_fty)?; Ok(tcx.mk_fn_ptr(fty)) } @@ -588,63 +589,61 @@ pub fn super_relate_tys<'a,'tcx:'a,R>(relation: &mut R, } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::ClosureSubsts<'tcx> { - fn relate(relation: &mut R, - a: &ty::ClosureSubsts<'tcx>, - b: &ty::ClosureSubsts<'tcx>) - -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::ClosureSubsts<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::ClosureSubsts<'tcx>, + b: &ty::ClosureSubsts<'tcx>) + -> RelateResult<'tcx, ty::ClosureSubsts<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { - let func_substs = relate_substs(relation, None, a.func_substs, b.func_substs)?; + let substs = relate_substs(relation, None, a.func_substs, b.func_substs)?; let upvar_tys = relation.relate_zip(&a.upvar_tys, &b.upvar_tys)?; - Ok(ty::ClosureSubsts { func_substs: relation.tcx().mk_substs(func_substs), - upvar_tys: upvar_tys }) + Ok(ty::ClosureSubsts { + func_substs: substs, + upvar_tys: relation.tcx().mk_type_list(upvar_tys) + }) } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for Substs<'tcx> { - fn relate(relation: &mut R, - a: &Substs<'tcx>, - b: &Substs<'tcx>) - -> RelateResult<'tcx, Substs<'tcx>> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for &'tcx Substs<'tcx> { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &&'tcx Substs<'tcx>, + b: &&'tcx Substs<'tcx>) + -> RelateResult<'tcx, &'tcx Substs<'tcx>> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { relate_substs(relation, None, a, b) } } -impl<'a,'tcx:'a> Relate<'a,'tcx> for ty::Region { - fn relate(relation: &mut R, - a: &ty::Region, - b: &ty::Region) - -> RelateResult<'tcx, ty::Region> - where R: TypeRelation<'a,'tcx> +impl<'tcx> Relate<'tcx> for ty::Region { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::Region, + b: &ty::Region) + -> RelateResult<'tcx, ty::Region> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { relation.regions(*a, *b) } } -impl<'a,'tcx:'a,T> Relate<'a,'tcx> for ty::Binder - where T: Relate<'a,'tcx> -{ - fn relate(relation: &mut R, - a: &ty::Binder, - b: &ty::Binder) - -> RelateResult<'tcx, ty::Binder> - where R: TypeRelation<'a,'tcx> +impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for ty::Binder { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &ty::Binder, + b: &ty::Binder) + -> RelateResult<'tcx, ty::Binder> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { relation.binders(a, b) } } -impl<'a,'tcx:'a,T> Relate<'a,'tcx> for Rc - where T: Relate<'a,'tcx> -{ - fn relate(relation: &mut R, - a: &Rc, - b: &Rc) - -> RelateResult<'tcx, Rc> - where R: TypeRelation<'a,'tcx> +impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Rc { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &Rc, + b: &Rc) + -> RelateResult<'tcx, Rc> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let a: &T = a; let b: &T = b; @@ -652,14 +651,12 @@ impl<'a,'tcx:'a,T> Relate<'a,'tcx> for Rc } } -impl<'a,'tcx:'a,T> Relate<'a,'tcx> for Box - where T: Relate<'a,'tcx> -{ - fn relate(relation: &mut R, - a: &Box, - b: &Box) - -> RelateResult<'tcx, Box> - where R: TypeRelation<'a,'tcx> +impl<'tcx, T: Relate<'tcx>> Relate<'tcx> for Box { + fn relate<'a, 'gcx, R>(relation: &mut R, + a: &Box, + b: &Box) + -> RelateResult<'tcx, Box> + where R: TypeRelation<'a, 'gcx, 'tcx>, 'gcx: 'a+'tcx, 'tcx: 'a { let a: &T = a; let b: &T = b; @@ -670,11 +667,11 @@ impl<'a,'tcx:'a,T> Relate<'a,'tcx> for Box /////////////////////////////////////////////////////////////////////////// // Error handling -pub fn expected_found<'a,'tcx:'a,R,T>(relation: &mut R, - a: &T, - b: &T) - -> ExpectedFound - where R: TypeRelation<'a,'tcx>, T: Clone +pub fn expected_found<'a, 'gcx, 'tcx, R, T>(relation: &mut R, + a: &T, + b: &T) + -> ExpectedFound + where R: TypeRelation<'a, 'gcx, 'tcx>, T: Clone, 'gcx: 'a+'tcx, 'tcx: 'a { expected_found_bool(relation.a_is_expected(), a, b) } diff --git a/src/librustc/ty/structural_impls.rs b/src/librustc/ty/structural_impls.rs index bbf6d0329b..77e980ff31 100644 --- a/src/librustc/ty/structural_impls.rs +++ b/src/librustc/ty/structural_impls.rs @@ -8,8 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use infer::type_variable; use ty::subst::{self, VecPerParamSpace}; -use traits; use ty::{self, Lift, TraitRef, Ty, TyCtxt}; use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor}; @@ -24,14 +24,34 @@ use hir; impl<'tcx, A: Lift<'tcx>, B: Lift<'tcx>> Lift<'tcx> for (A, B) { type Lifted = (A::Lifted, B::Lifted); - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option { + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { tcx.lift(&self.0).and_then(|a| tcx.lift(&self.1).map(|b| (a, b))) } } +impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Option { + type Lifted = Option; + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { + match *self { + Some(ref x) => tcx.lift(x).map(Some), + None => Some(None) + } + } +} + +impl<'tcx, T: Lift<'tcx>, E: Lift<'tcx>> Lift<'tcx> for Result { + type Lifted = Result; + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { + match *self { + Ok(ref x) => tcx.lift(x).map(Ok), + Err(ref e) => tcx.lift(e).map(Err) + } + } +} + impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for [T] { type Lifted = Vec; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option { + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { // type annotation needed to inform `projection_must_outlive` let mut result : Vec<>::Lifted> = Vec::with_capacity(self.len()); @@ -46,16 +66,23 @@ impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for [T] { } } +impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for Vec { + type Lifted = Vec; + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { + tcx.lift(&self[..]) + } +} + impl<'tcx> Lift<'tcx> for ty::Region { type Lifted = Self; - fn lift_to_tcx(&self, _: &TyCtxt<'tcx>) -> Option { + fn lift_to_tcx(&self, _: TyCtxt) -> Option { Some(*self) } } impl<'a, 'tcx> Lift<'tcx> for TraitRef<'a> { type Lifted = TraitRef<'tcx>; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option> { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option> { tcx.lift(&self.substs).map(|substs| TraitRef { def_id: self.def_id, substs: substs @@ -65,7 +92,8 @@ impl<'a, 'tcx> Lift<'tcx> for TraitRef<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::TraitPredicate<'a> { type Lifted = ty::TraitPredicate<'tcx>; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option> { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) + -> Option> { tcx.lift(&self.trait_ref).map(|trait_ref| ty::TraitPredicate { trait_ref: trait_ref }) @@ -74,21 +102,23 @@ impl<'a, 'tcx> Lift<'tcx> for ty::TraitPredicate<'a> { impl<'a, 'tcx> Lift<'tcx> for ty::EquatePredicate<'a> { type Lifted = ty::EquatePredicate<'tcx>; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option> { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) + -> Option> { tcx.lift(&(self.0, self.1)).map(|(a, b)| ty::EquatePredicate(a, b)) } } impl<'tcx, A: Copy+Lift<'tcx>, B: Copy+Lift<'tcx>> Lift<'tcx> for ty::OutlivesPredicate { type Lifted = ty::OutlivesPredicate; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option { + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { tcx.lift(&(self.0, self.1)).map(|(a, b)| ty::OutlivesPredicate(a, b)) } } impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionPredicate<'a> { type Lifted = ty::ProjectionPredicate<'tcx>; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option> { + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) + -> Option> { tcx.lift(&(self.projection_ty.trait_ref, self.ty)).map(|(trait_ref, ty)| { ty::ProjectionPredicate { projection_ty: ty::ProjectionTy { @@ -101,13 +131,196 @@ impl<'a, 'tcx> Lift<'tcx> for ty::ProjectionPredicate<'a> { } } +impl<'a, 'tcx> Lift<'tcx> for ty::Predicate<'a> { + type Lifted = ty::Predicate<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + match *self { + ty::Predicate::Trait(ref binder) => { + tcx.lift(binder).map(ty::Predicate::Trait) + } + ty::Predicate::Equate(ref binder) => { + tcx.lift(binder).map(ty::Predicate::Equate) + } + ty::Predicate::RegionOutlives(ref binder) => { + tcx.lift(binder).map(ty::Predicate::RegionOutlives) + } + ty::Predicate::TypeOutlives(ref binder) => { + tcx.lift(binder).map(ty::Predicate::TypeOutlives) + } + ty::Predicate::Projection(ref binder) => { + tcx.lift(binder).map(ty::Predicate::Projection) + } + ty::Predicate::WellFormed(ty) => { + tcx.lift(&ty).map(ty::Predicate::WellFormed) + } + ty::Predicate::Rfc1592(box ref a) => { + tcx.lift(a).map(|a| ty::Predicate::Rfc1592(Box::new(a))) + } + ty::Predicate::ClosureKind(closure_def_id, kind) => { + Some(ty::Predicate::ClosureKind(closure_def_id, kind)) + } + ty::Predicate::ObjectSafe(trait_def_id) => { + Some(ty::Predicate::ObjectSafe(trait_def_id)) + } + } + } +} + impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::Binder { type Lifted = ty::Binder; - fn lift_to_tcx(&self, tcx: &TyCtxt<'tcx>) -> Option { + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { tcx.lift(&self.0).map(|x| ty::Binder(x)) } } +impl<'a, 'tcx> Lift<'tcx> for ty::ClosureSubsts<'a> { + type Lifted = ty::ClosureSubsts<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + tcx.lift(&(self.func_substs, self.upvar_tys)).map(|(substs, upvar_tys)| { + ty::ClosureSubsts { + func_substs: substs, + upvar_tys: upvar_tys + } + }) + } +} + +impl<'a, 'tcx> Lift<'tcx> for ty::ItemSubsts<'a> { + type Lifted = ty::ItemSubsts<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + tcx.lift(&self.substs).map(|substs| { + ty::ItemSubsts { + substs: substs + } + }) + } +} + +impl<'a, 'tcx> Lift<'tcx> for ty::adjustment::AutoRef<'a> { + type Lifted = ty::adjustment::AutoRef<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + match *self { + ty::adjustment::AutoPtr(r, m) => { + tcx.lift(&r).map(|r| ty::adjustment::AutoPtr(r, m)) + } + ty::adjustment::AutoUnsafe(m) => { + Some(ty::adjustment::AutoUnsafe(m)) + } + } + } +} + +impl<'a, 'tcx> Lift<'tcx> for ty::FnOutput<'a> { + type Lifted = ty::FnOutput<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + match *self { + ty::FnConverging(ty) => { + tcx.lift(&ty).map(ty::FnConverging) + } + ty::FnDiverging => Some(ty::FnDiverging) + } + } +} + +impl<'a, 'tcx> Lift<'tcx> for ty::FnSig<'a> { + type Lifted = ty::FnSig<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + tcx.lift(&self.inputs[..]).and_then(|inputs| { + tcx.lift(&self.output).map(|output| { + ty::FnSig { + inputs: inputs, + output: output, + variadic: self.variadic + } + }) + }) + } +} + +impl<'a, 'tcx> Lift<'tcx> for ty::ClosureTy<'a> { + type Lifted = ty::ClosureTy<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + tcx.lift(&self.sig).map(|sig| { + ty::ClosureTy { + sig: sig, + unsafety: self.unsafety, + abi: self.abi + } + }) + } +} + +impl<'tcx, T: Lift<'tcx>> Lift<'tcx> for ty::error::ExpectedFound { + type Lifted = ty::error::ExpectedFound; + fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option { + tcx.lift(&self.expected).and_then(|expected| { + tcx.lift(&self.found).map(|found| { + ty::error::ExpectedFound { + expected: expected, + found: found + } + }) + }) + } +} + +impl<'a, 'tcx> Lift<'tcx> for type_variable::Default<'a> { + type Lifted = type_variable::Default<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + tcx.lift(&self.ty).map(|ty| { + type_variable::Default { + ty: ty, + origin_span: self.origin_span, + def_id: self.def_id + } + }) + } +} + +impl<'a, 'tcx> Lift<'tcx> for ty::error::TypeError<'a> { + type Lifted = ty::error::TypeError<'tcx>; + fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option { + use ty::error::TypeError::*; + + Some(match *self { + Mismatch => Mismatch, + UnsafetyMismatch(x) => UnsafetyMismatch(x), + AbiMismatch(x) => AbiMismatch(x), + Mutability => Mutability, + BoxMutability => BoxMutability, + PtrMutability => PtrMutability, + RefMutability => RefMutability, + VecMutability => VecMutability, + TupleSize(x) => TupleSize(x), + FixedArraySize(x) => FixedArraySize(x), + TyParamSize(x) => TyParamSize(x), + ArgCount => ArgCount, + RegionsDoesNotOutlive(a, b) => RegionsDoesNotOutlive(a, b), + RegionsNotSame(a, b) => RegionsNotSame(a, b), + RegionsNoOverlap(a, b) => RegionsNoOverlap(a, b), + RegionsInsufficientlyPolymorphic(a, b) => { + RegionsInsufficientlyPolymorphic(a, b) + } + RegionsOverlyPolymorphic(a, b) => RegionsOverlyPolymorphic(a, b), + IntegerAsChar => IntegerAsChar, + IntMismatch(x) => IntMismatch(x), + FloatMismatch(x) => FloatMismatch(x), + Traits(x) => Traits(x), + BuiltinBoundsMismatch(x) => BuiltinBoundsMismatch(x), + VariadicMismatch(x) => VariadicMismatch(x), + CyclicTy => CyclicTy, + ConvergenceMismatch(x) => ConvergenceMismatch(x), + ProjectionNameMismatched(x) => ProjectionNameMismatched(x), + ProjectionBoundsLength(x) => ProjectionBoundsLength(x), + + Sorts(ref x) => return tcx.lift(x).map(Sorts), + TyParamDefaultMismatch(ref x) => { + return tcx.lift(x).map(TyParamDefaultMismatch) + } + }) + } +} + /////////////////////////////////////////////////////////////////////////// // TypeFoldable implementations. // @@ -123,7 +336,7 @@ macro_rules! CopyImpls { ($($ty:ty),+) => { $( impl<'tcx> TypeFoldable<'tcx> for $ty { - fn super_fold_with>(&self, _: &mut F) -> $ty { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _: &mut F) -> $ty { *self } @@ -138,7 +351,7 @@ macro_rules! CopyImpls { CopyImpls! { (), hir::Unsafety, abi::Abi } impl<'tcx, T:TypeFoldable<'tcx>, U:TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T, U) { - fn super_fold_with>(&self, folder: &mut F) -> (T, U) { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> (T, U) { (self.0.fold_with(folder), self.1.fold_with(folder)) } @@ -148,7 +361,7 @@ impl<'tcx, T:TypeFoldable<'tcx>, U:TypeFoldable<'tcx>> TypeFoldable<'tcx> for (T } impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Option { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { self.as_ref().map(|t| t.fold_with(folder)) } @@ -158,7 +371,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Option { } impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Rc { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { Rc::new((**self).fold_with(folder)) } @@ -168,7 +381,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Rc { } impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Box { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { let content: T = (**self).fold_with(folder); box content } @@ -179,7 +392,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Box { } impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Vec { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { self.iter().map(|t| t.fold_with(folder)).collect() } @@ -189,11 +402,11 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for Vec { } impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::Binder(self.0.fold_with(folder)) } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_binder(self) } @@ -207,7 +420,7 @@ impl<'tcx, T:TypeFoldable<'tcx>> TypeFoldable<'tcx> for ty::Binder { } impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for P<[T]> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { self.iter().map(|t| t.fold_with(folder)).collect() } @@ -217,7 +430,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for P<[T]> { } impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for VecPerParamSpace { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { self.map(|elem| elem.fold_with(folder)) } @@ -227,7 +440,7 @@ impl<'tcx, T: TypeFoldable<'tcx>> TypeFoldable<'tcx> for VecPerParamSpace { } impl<'tcx> TypeFoldable<'tcx> for ty::TraitTy<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::TraitTy { principal: self.principal.fold_with(folder), bounds: self.bounds.fold_with(folder), @@ -239,41 +452,38 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TraitTy<'tcx> { } } +impl<'tcx> TypeFoldable<'tcx> for &'tcx [Ty<'tcx>] { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + let tys = self.iter().map(|t| t.fold_with(folder)).collect(); + folder.tcx().mk_type_list(tys) + } + + fn super_visit_with>(&self, visitor: &mut V) -> bool { + self.iter().any(|t| t.visit_with(visitor)) + } +} + impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { let sty = match self.sty { ty::TyBox(typ) => ty::TyBox(typ.fold_with(folder)), - ty::TyRawPtr(ref tm) => ty::TyRawPtr(tm.fold_with(folder)), + ty::TyRawPtr(tm) => ty::TyRawPtr(tm.fold_with(folder)), ty::TyArray(typ, sz) => ty::TyArray(typ.fold_with(folder), sz), ty::TySlice(typ) => ty::TySlice(typ.fold_with(folder)), - ty::TyEnum(tid, ref substs) => { - let substs = substs.fold_with(folder); - ty::TyEnum(tid, folder.tcx().mk_substs(substs)) - } + ty::TyEnum(tid, substs) => ty::TyEnum(tid, substs.fold_with(folder)), ty::TyTrait(ref trait_ty) => ty::TyTrait(trait_ty.fold_with(folder)), - ty::TyTuple(ref ts) => ty::TyTuple(ts.fold_with(folder)), - ty::TyFnDef(def_id, substs, ref f) => { - let substs = substs.fold_with(folder); - let bfn = f.fold_with(folder); + ty::TyTuple(ts) => ty::TyTuple(ts.fold_with(folder)), + ty::TyFnDef(def_id, substs, f) => { ty::TyFnDef(def_id, - folder.tcx().mk_substs(substs), - folder.tcx().mk_bare_fn(bfn)) - } - ty::TyFnPtr(ref f) => { - let bfn = f.fold_with(folder); - ty::TyFnPtr(folder.tcx().mk_bare_fn(bfn)) - } - ty::TyRef(r, ref tm) => { - let r = r.fold_with(folder); - ty::TyRef(folder.tcx().mk_region(r), tm.fold_with(folder)) - } - ty::TyStruct(did, ref substs) => { - let substs = substs.fold_with(folder); - ty::TyStruct(did, folder.tcx().mk_substs(substs)) + substs.fold_with(folder), + f.fold_with(folder)) } - ty::TyClosure(did, ref substs) => { - ty::TyClosure(did, substs.fold_with(folder)) + ty::TyFnPtr(f) => ty::TyFnPtr(f.fold_with(folder)), + ty::TyRef(ref r, tm) => { + ty::TyRef(r.fold_with(folder), tm.fold_with(folder)) } + ty::TyStruct(did, substs) => ty::TyStruct(did, substs.fold_with(folder)), + ty::TyClosure(did, substs) => ty::TyClosure(did, substs.fold_with(folder)), ty::TyProjection(ref data) => ty::TyProjection(data.fold_with(folder)), ty::TyBool | ty::TyChar | ty::TyStr | ty::TyInt(_) | ty::TyUint(_) | ty::TyFloat(_) | ty::TyError | ty::TyInfer(_) | @@ -282,7 +492,7 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { folder.tcx().mk_ty(sty) } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_ty(*self) } @@ -294,7 +504,7 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { ty::TySlice(typ) => typ.visit_with(visitor), ty::TyEnum(_tid, ref substs) => substs.visit_with(visitor), ty::TyTrait(ref trait_ty) => trait_ty.visit_with(visitor), - ty::TyTuple(ref ts) => ts.visit_with(visitor), + ty::TyTuple(ts) => ts.visit_with(visitor), ty::TyFnDef(_, substs, ref f) => { substs.visit_with(visitor) || f.visit_with(visitor) } @@ -314,14 +524,17 @@ impl<'tcx> TypeFoldable<'tcx> for Ty<'tcx> { } } -impl<'tcx> TypeFoldable<'tcx> for ty::BareFnTy<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { - ty::BareFnTy { sig: self.sig.fold_with(folder), - abi: self.abi, - unsafety: self.unsafety } +impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::BareFnTy<'tcx> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + let fty = ty::BareFnTy { + sig: self.sig.fold_with(folder), + abi: self.abi, + unsafety: self.unsafety + }; + folder.tcx().mk_bare_fn(fty) } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_bare_fn_ty(self) } @@ -331,7 +544,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::BareFnTy<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::ClosureTy<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ClosureTy { sig: self.sig.fold_with(folder), unsafety: self.unsafety, @@ -339,7 +552,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ClosureTy<'tcx> { } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_closure_ty(self) } @@ -349,11 +562,11 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ClosureTy<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::TypeAndMut<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::TypeAndMut { ty: self.ty.fold_with(folder), mutbl: self.mutbl } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_mt(self) } @@ -363,14 +576,14 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TypeAndMut<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::FnOutput<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { match *self { ty::FnConverging(ref ty) => ty::FnConverging(ty.fold_with(folder)), ty::FnDiverging => ty::FnDiverging } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_output(self) } @@ -383,13 +596,13 @@ impl<'tcx> TypeFoldable<'tcx> for ty::FnOutput<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::FnSig<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::FnSig { inputs: self.inputs.fold_with(folder), output: self.output.fold_with(folder), variadic: self.variadic } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_fn_sig(self) } @@ -399,15 +612,14 @@ impl<'tcx> TypeFoldable<'tcx> for ty::FnSig<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::TraitRef<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { - let substs = self.substs.fold_with(folder); + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::TraitRef { def_id: self.def_id, - substs: folder.tcx().mk_substs(substs), + substs: self.substs.fold_with(folder), } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_trait_ref(self) } @@ -417,7 +629,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TraitRef<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::ImplHeader<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ImplHeader { impl_def_id: self.impl_def_id, self_ty: self.self_ty.fold_with(folder), @@ -426,7 +638,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ImplHeader<'tcx> { } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_impl_header(self) } @@ -438,11 +650,11 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ImplHeader<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::Region { - fn super_fold_with>(&self, _folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self { *self } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_region(*self) } @@ -455,13 +667,35 @@ impl<'tcx> TypeFoldable<'tcx> for ty::Region { } } -impl<'tcx> TypeFoldable<'tcx> for subst::Substs<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { - subst::Substs { regions: self.regions.fold_with(folder), - types: self.types.fold_with(folder) } +impl<'tcx> TypeFoldable<'tcx> for &'tcx ty::Region { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self { + *self + } + + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + let region = folder.fold_region(**self); + folder.tcx().mk_region(region) } - fn fold_with>(&self, folder: &mut F) -> Self { + fn super_visit_with>(&self, _visitor: &mut V) -> bool { + false + } + + fn visit_with>(&self, visitor: &mut V) -> bool { + visitor.visit_region(**self) + } +} + +impl<'tcx> TypeFoldable<'tcx> for &'tcx subst::Substs<'tcx> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { + let substs = subst::Substs { + regions: self.regions.fold_with(folder), + types: self.types.fold_with(folder) + }; + folder.tcx().mk_substs(substs) + } + + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_substs(self) } @@ -471,10 +705,9 @@ impl<'tcx> TypeFoldable<'tcx> for subst::Substs<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::ClosureSubsts<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { - let func_substs = self.func_substs.fold_with(folder); + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ClosureSubsts { - func_substs: folder.tcx().mk_substs(func_substs), + func_substs: self.func_substs.fold_with(folder), upvar_tys: self.upvar_tys.fold_with(folder), } } @@ -485,7 +718,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ClosureSubsts<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::ItemSubsts<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ItemSubsts { substs: self.substs.fold_with(folder), } @@ -497,17 +730,16 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ItemSubsts<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::AutoRef<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { match *self { - ty::adjustment::AutoPtr(r, m) => { - let r = r.fold_with(folder); - ty::adjustment::AutoPtr(folder.tcx().mk_region(r), m) + ty::adjustment::AutoPtr(ref r, m) => { + ty::adjustment::AutoPtr(r.fold_with(folder), m) } ty::adjustment::AutoUnsafe(m) => ty::adjustment::AutoUnsafe(m) } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_autoref(self) } @@ -520,7 +752,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::adjustment::AutoRef<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::BuiltinBounds { - fn super_fold_with>(&self, _folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, _folder: &mut F) -> Self { *self } @@ -530,7 +762,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::BuiltinBounds { } impl<'tcx> TypeFoldable<'tcx> for ty::ExistentialBounds<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ExistentialBounds { region_bound: self.region_bound.fold_with(folder), builtin_bounds: self.builtin_bounds, @@ -538,7 +770,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ExistentialBounds<'tcx> { } } - fn fold_with>(&self, folder: &mut F) -> Self { + fn fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { folder.fold_existential_bounds(self) } @@ -548,7 +780,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ExistentialBounds<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::TypeParameterDef<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::TypeParameterDef { name: self.name, def_id: self.def_id, @@ -567,7 +799,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::TypeParameterDef<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::ObjectLifetimeDefault { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { match *self { ty::ObjectLifetimeDefault::Ambiguous => ty::ObjectLifetimeDefault::Ambiguous, @@ -589,7 +821,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ObjectLifetimeDefault { } impl<'tcx> TypeFoldable<'tcx> for ty::RegionParameterDef { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::RegionParameterDef { name: self.name, def_id: self.def_id, @@ -605,7 +837,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::RegionParameterDef { } impl<'tcx> TypeFoldable<'tcx> for ty::Generics<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::Generics { types: self.types.fold_with(folder), regions: self.regions.fold_with(folder), @@ -618,7 +850,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::Generics<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::GenericPredicates<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::GenericPredicates { predicates: self.predicates.fold_with(folder), } @@ -630,10 +862,12 @@ impl<'tcx> TypeFoldable<'tcx> for ty::GenericPredicates<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::Predicate<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { match *self { ty::Predicate::Trait(ref a) => ty::Predicate::Trait(a.fold_with(folder)), + ty::Predicate::Rfc1592(ref a) => + ty::Predicate::Rfc1592(a.fold_with(folder)), ty::Predicate::Equate(ref binder) => ty::Predicate::Equate(binder.fold_with(folder)), ty::Predicate::RegionOutlives(ref binder) => @@ -644,6 +878,8 @@ impl<'tcx> TypeFoldable<'tcx> for ty::Predicate<'tcx> { ty::Predicate::Projection(binder.fold_with(folder)), ty::Predicate::WellFormed(data) => ty::Predicate::WellFormed(data.fold_with(folder)), + ty::Predicate::ClosureKind(closure_def_id, kind) => + ty::Predicate::ClosureKind(closure_def_id, kind), ty::Predicate::ObjectSafe(trait_def_id) => ty::Predicate::ObjectSafe(trait_def_id), } @@ -652,18 +888,20 @@ impl<'tcx> TypeFoldable<'tcx> for ty::Predicate<'tcx> { fn super_visit_with>(&self, visitor: &mut V) -> bool { match *self { ty::Predicate::Trait(ref a) => a.visit_with(visitor), + ty::Predicate::Rfc1592(ref a) => a.visit_with(visitor), ty::Predicate::Equate(ref binder) => binder.visit_with(visitor), ty::Predicate::RegionOutlives(ref binder) => binder.visit_with(visitor), ty::Predicate::TypeOutlives(ref binder) => binder.visit_with(visitor), ty::Predicate::Projection(ref binder) => binder.visit_with(visitor), ty::Predicate::WellFormed(data) => data.visit_with(visitor), + ty::Predicate::ClosureKind(_closure_def_id, _kind) => false, ty::Predicate::ObjectSafe(_trait_def_id) => false, } } } impl<'tcx> TypeFoldable<'tcx> for ty::ProjectionPredicate<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ProjectionPredicate { projection_ty: self.projection_ty.fold_with(folder), ty: self.ty.fold_with(folder), @@ -676,7 +914,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ProjectionPredicate<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::ProjectionTy<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ProjectionTy { trait_ref: self.trait_ref.fold_with(folder), item_name: self.item_name, @@ -689,7 +927,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ProjectionTy<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::InstantiatedPredicates<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::InstantiatedPredicates { predicates: self.predicates.fold_with(folder), } @@ -701,7 +939,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::InstantiatedPredicates<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::EquatePredicate<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::EquatePredicate(self.0.fold_with(folder), self.1.fold_with(folder)) } @@ -712,7 +950,7 @@ impl<'tcx> TypeFoldable<'tcx> for ty::EquatePredicate<'tcx> { } impl<'tcx> TypeFoldable<'tcx> for ty::TraitPredicate<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::TraitPredicate { trait_ref: self.trait_ref.fold_with(folder) } @@ -727,7 +965,7 @@ impl<'tcx,T,U> TypeFoldable<'tcx> for ty::OutlivesPredicate where T : TypeFoldable<'tcx>, U : TypeFoldable<'tcx>, { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::OutlivesPredicate(self.0.fold_with(folder), self.1.fold_with(folder)) } @@ -738,7 +976,7 @@ impl<'tcx,T,U> TypeFoldable<'tcx> for ty::OutlivesPredicate } impl<'tcx> TypeFoldable<'tcx> for ty::ClosureUpvar<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ClosureUpvar { def: self.def, span: self.span, @@ -751,15 +989,12 @@ impl<'tcx> TypeFoldable<'tcx> for ty::ClosureUpvar<'tcx> { } } -impl<'a, 'tcx> TypeFoldable<'tcx> for ty::ParameterEnvironment<'a, 'tcx> where 'tcx: 'a { - fn super_fold_with>(&self, folder: &mut F) -> Self { +impl<'tcx> TypeFoldable<'tcx> for ty::ParameterEnvironment<'tcx> { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::ParameterEnvironment { - tcx: self.tcx, free_substs: self.free_substs.fold_with(folder), implicit_region_bound: self.implicit_region_bound.fold_with(folder), caller_bounds: self.caller_bounds.fold_with(folder), - selection_cache: traits::SelectionCache::new(), - evaluation_cache: traits::EvaluationCache::new(), free_id_outlive: self.free_id_outlive, } } @@ -772,7 +1007,7 @@ impl<'a, 'tcx> TypeFoldable<'tcx> for ty::ParameterEnvironment<'a, 'tcx> where ' } impl<'tcx> TypeFoldable<'tcx> for ty::TypeScheme<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { ty::TypeScheme { generics: self.generics.fold_with(folder), ty: self.ty.fold_with(folder), diff --git a/src/librustc/ty/sty.rs b/src/librustc/ty/sty.rs index c0fb60d4dd..32434d40e6 100644 --- a/src/librustc/ty/sty.rs +++ b/src/librustc/ty/sty.rs @@ -14,7 +14,6 @@ use middle::cstore; use hir::def_id::DefId; use middle::region; use ty::subst::{self, Substs}; -use traits; use ty::{self, AdtDef, ToPredicate, TypeFlags, Ty, TyCtxt, TyS, TypeFoldable}; use util::common::ErrorReported; @@ -24,9 +23,9 @@ use std::ops; use std::mem; use syntax::abi; use syntax::ast::{self, Name}; -use syntax::parse::token::special_idents; +use syntax::parse::token::keywords; -use serialize::{Decodable, Decoder}; +use serialize::{Decodable, Decoder, Encodable, Encoder}; use hir; @@ -94,10 +93,8 @@ pub enum TypeVariants<'tcx> { /// Substs here, possibly against intuition, *may* contain `TyParam`s. /// That is, even after substitution it is possible that there are type /// variables. This happens when the `TyEnum` corresponds to an enum - /// definition and not a concrete use of it. To get the correct `TyEnum` - /// from the tcx, use the `NodeId` from the `ast::Ty` and look it up in - /// the `ast_ty_to_ty_cache`. This is probably true for `TyStruct` as - /// well. + /// definition and not a concrete use of it. This is true for `TyStruct` + /// as well. TyEnum(AdtDef<'tcx>, &'tcx Substs<'tcx>), /// A structure type, defined with `struct`. @@ -141,10 +138,10 @@ pub enum TypeVariants<'tcx> { /// The anonymous type of a closure. Used to represent the type of /// `|a| a`. - TyClosure(DefId, Box>), + TyClosure(DefId, ClosureSubsts<'tcx>), /// A tuple type. For example, `(i32, bool)`. - TyTuple(Vec>), + TyTuple(&'tcx [Ty<'tcx>]), /// The projection of an associated type. For example, /// `>::N`. @@ -235,7 +232,7 @@ pub enum TypeVariants<'tcx> { /// closure C wind up influencing the decisions we ought to make for /// closure C (which would then require fixed point iteration to /// handle). Plus it fixes an ICE. :P -#[derive(Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub struct ClosureSubsts<'tcx> { /// Lifetime and type parameters from the enclosing function. /// These are separated out because trans wants to pass them around @@ -245,22 +242,23 @@ pub struct ClosureSubsts<'tcx> { /// The types of the upvars. The list parallels the freevars and /// `upvar_borrows` lists. These are kept distinct so that we can /// easily index into them. - pub upvar_tys: Vec> + pub upvar_tys: &'tcx [Ty<'tcx>] } -impl<'tcx> Decodable for &'tcx ClosureSubsts<'tcx> { - fn decode(s: &mut S) -> Result<&'tcx ClosureSubsts<'tcx>, S::Error> { - let closure_substs = Decodable::decode(s)?; - let dummy_def_id: DefId = unsafe { mem::zeroed() }; - - cstore::tls::with_decoding_context(s, |dcx, _| { - // Intern the value - let ty = dcx.tcx().mk_closure_from_closure_substs(dummy_def_id, - Box::new(closure_substs)); - match ty.sty { - TyClosure(_, ref closure_substs) => Ok(&**closure_substs), - _ => bug!() - } +impl<'tcx> Encodable for ClosureSubsts<'tcx> { + fn encode(&self, s: &mut S) -> Result<(), S::Error> { + (self.func_substs, self.upvar_tys).encode(s) + } +} + +impl<'tcx> Decodable for ClosureSubsts<'tcx> { + fn decode(d: &mut D) -> Result, D::Error> { + let (func_substs, upvar_tys) = Decodable::decode(d)?; + cstore::tls::with_decoding_context(d, |dcx, _| { + Ok(ClosureSubsts { + func_substs: func_substs, + upvar_tys: dcx.tcx().mk_type_list(upvar_tys) + }) }) } } @@ -271,7 +269,7 @@ pub struct TraitTy<'tcx> { pub bounds: ExistentialBounds<'tcx>, } -impl<'tcx> TraitTy<'tcx> { +impl<'a, 'gcx, 'tcx> TraitTy<'tcx> { pub fn principal_def_id(&self) -> DefId { self.principal.0.def_id } @@ -280,8 +278,7 @@ impl<'tcx> TraitTy<'tcx> { /// we convert the principal trait-ref into a normal trait-ref, /// you must give *some* self-type. A common choice is `mk_err()` /// or some skolemized type. - pub fn principal_trait_ref_with_self_ty(&self, - tcx: &TyCtxt<'tcx>, + pub fn principal_trait_ref_with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>) -> ty::PolyTraitRef<'tcx> { @@ -294,8 +291,7 @@ impl<'tcx> TraitTy<'tcx> { }) } - pub fn projection_bounds_with_self_ty(&self, - tcx: &TyCtxt<'tcx>, + pub fn projection_bounds_with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>) -> Vec> { @@ -524,7 +520,7 @@ pub struct ParamTy { pub name: Name, } -impl ParamTy { +impl<'a, 'gcx, 'tcx> ParamTy { pub fn new(space: subst::ParamSpace, index: u32, name: Name) @@ -533,14 +529,14 @@ impl ParamTy { } pub fn for_self() -> ParamTy { - ParamTy::new(subst::SelfSpace, 0, special_idents::type_self.name) + ParamTy::new(subst::SelfSpace, 0, keywords::SelfType.name()) } pub fn for_def(def: &ty::TypeParameterDef) -> ParamTy { ParamTy::new(def.space, def.index, def.name) } - pub fn to_ty<'tcx>(self, tcx: &TyCtxt<'tcx>) -> Ty<'tcx> { + pub fn to_ty(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { tcx.mk_param(self.space, self.idx, self.name) } @@ -633,7 +629,7 @@ pub struct DebruijnIndex { /// to be used. These also support explicit bounds: both the internally-stored /// *scope*, which the region is assumed to outlive, as well as other /// relations stored in the `FreeRegionMap`. Note that these relations -/// aren't checked when you `make_subregion` (or `mk_eqty`), only by +/// aren't checked when you `make_subregion` (or `eq_types`), only by /// `resolve_regions_and_report_errors`. /// /// When working with higher-ranked types, some region relations aren't @@ -765,7 +761,7 @@ impl<'tcx> ExistentialBounds<'tcx> { #[derive(Clone, Copy, PartialEq, Eq, Hash, Debug)] pub struct BuiltinBounds(EnumSet); -impl BuiltinBounds { +impl<'a, 'gcx, 'tcx> BuiltinBounds { pub fn empty() -> BuiltinBounds { BuiltinBounds(EnumSet::new()) } @@ -774,11 +770,11 @@ impl BuiltinBounds { self.into_iter() } - pub fn to_predicates<'tcx>(&self, - tcx: &TyCtxt<'tcx>, - self_ty: Ty<'tcx>) -> Vec> { + pub fn to_predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + self_ty: Ty<'tcx>) + -> Vec> { self.iter().filter_map(|builtin_bound| - match traits::trait_ref_for_builtin_bound(tcx, builtin_bound, self_ty) { + match tcx.trait_ref_for_builtin_bound(builtin_bound, self_ty) { Ok(trait_ref) => Some(trait_ref.to_predicate()), Err(ErrorReported) => { None } } @@ -822,8 +818,8 @@ impl CLike for BuiltinBound { } } -impl<'tcx> TyCtxt<'tcx> { - pub fn try_add_builtin_trait(&self, +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn try_add_builtin_trait(self, trait_def_id: DefId, builtin_bounds: &mut EnumSet) -> bool @@ -887,7 +883,7 @@ impl Region { } // Type utilities -impl<'tcx> TyS<'tcx> { +impl<'a, 'gcx, 'tcx> TyS<'tcx> { pub fn as_opt_param_ty(&self) -> Option { match self.sty { ty::TyParam(ref d) => Some(d.clone()), @@ -902,7 +898,7 @@ impl<'tcx> TyS<'tcx> { } } - pub fn is_empty(&self, _cx: &TyCtxt) -> bool { + pub fn is_empty(&self, _cx: TyCtxt) -> bool { // FIXME(#24885): be smarter here match self.sty { TyEnum(def, _) | TyStruct(def, _) => def.is_empty(), @@ -974,24 +970,24 @@ impl<'tcx> TyS<'tcx> { } } - pub fn sequence_element_type(&self, cx: &TyCtxt<'tcx>) -> Ty<'tcx> { + pub fn sequence_element_type(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match self.sty { TyArray(ty, _) | TySlice(ty) => ty, - TyStr => cx.mk_mach_uint(ast::UintTy::U8), + TyStr => tcx.mk_mach_uint(ast::UintTy::U8), _ => bug!("sequence_element_type called on non-sequence value: {}", self), } } - pub fn simd_type(&self, cx: &TyCtxt<'tcx>) -> Ty<'tcx> { + pub fn simd_type(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'tcx> { match self.sty { TyStruct(def, substs) => { - def.struct_variant().fields[0].ty(cx, substs) + def.struct_variant().fields[0].ty(tcx, substs) } _ => bug!("simd_type called on invalid type") } } - pub fn simd_size(&self, _cx: &TyCtxt) -> usize { + pub fn simd_size(&self, _cx: TyCtxt) -> usize { match self.sty { TyStruct(def, _) => def.struct_variant().fields.len(), _ => bug!("simd_size called on invalid type") diff --git a/src/librustc/ty/subst.rs b/src/librustc/ty/subst.rs index dd547da59e..2db9ceb8a0 100644 --- a/src/librustc/ty/subst.rs +++ b/src/librustc/ty/subst.rs @@ -36,7 +36,7 @@ pub struct Substs<'tcx> { pub regions: VecPerParamSpace, } -impl<'tcx> Substs<'tcx> { +impl<'a, 'gcx, 'tcx> Substs<'tcx> { pub fn new(t: VecPerParamSpace>, r: VecPerParamSpace) -> Substs<'tcx> @@ -114,15 +114,15 @@ impl<'tcx> Substs<'tcx> { Substs { types: types, regions: regions } } - pub fn with_method_from_subst(self, other: &Substs<'tcx>) -> Substs<'tcx> { - let Substs { types, regions } = self; + pub fn with_method_from_subst(&self, other: &Substs<'tcx>) -> Substs<'tcx> { + let Substs { types, regions } = self.clone(); let types = types.with_slice(FnSpace, other.types.get_slice(FnSpace)); let regions = regions.with_slice(FnSpace, other.regions.get_slice(FnSpace)); Substs { types: types, regions: regions } } /// Creates a trait-ref out of this substs, ignoring the FnSpace substs - pub fn to_trait_ref(&self, tcx: &TyCtxt<'tcx>, trait_id: DefId) + pub fn to_trait_ref(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, trait_id: DefId) -> ty::TraitRef<'tcx> { let Substs { mut types, mut regions } = self.clone(); types.truncate(FnSpace, 0); @@ -136,7 +136,6 @@ impl<'tcx> Substs<'tcx> { } impl<'tcx> Encodable for Substs<'tcx> { - fn encode(&self, s: &mut S) -> Result<(), S::Error> { cstore::tls::with_encoding_context(s, |ecx, rbml_w| { ecx.encode_substs(rbml_w, self); @@ -532,22 +531,22 @@ impl<'a,T> IntoIterator for &'a VecPerParamSpace { // there is more information available (for better errors). pub trait Subst<'tcx> : Sized { - fn subst(&self, tcx: &TyCtxt<'tcx>, substs: &Substs<'tcx>) -> Self { + fn subst<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &Substs<'tcx>) -> Self { self.subst_spanned(tcx, substs, None) } - fn subst_spanned(&self, tcx: &TyCtxt<'tcx>, - substs: &Substs<'tcx>, - span: Option) - -> Self; + fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &Substs<'tcx>, + span: Option) + -> Self; } impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T { - fn subst_spanned(&self, - tcx: &TyCtxt<'tcx>, - substs: &Substs<'tcx>, - span: Option) - -> T + fn subst_spanned<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &Substs<'tcx>, + span: Option) + -> T { let mut folder = SubstFolder { tcx: tcx, substs: substs, @@ -562,8 +561,8 @@ impl<'tcx, T:TypeFoldable<'tcx>> Subst<'tcx> for T { /////////////////////////////////////////////////////////////////////////// // The actual substitution engine itself is a type folder. -struct SubstFolder<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, +struct SubstFolder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, substs: &'a Substs<'tcx>, // The location for which the substitution is performed, if available. @@ -579,8 +578,8 @@ struct SubstFolder<'a, 'tcx: 'a> { region_binders_passed: u32, } -impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { self.tcx } +impl<'a, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for SubstFolder<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } fn fold_binder>(&mut self, t: &ty::Binder) -> ty::Binder { self.region_binders_passed += 1; @@ -650,7 +649,7 @@ impl<'a, 'tcx> TypeFolder<'tcx> for SubstFolder<'a, 'tcx> { } } -impl<'a,'tcx> SubstFolder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> SubstFolder<'a, 'gcx, 'tcx> { fn ty_for_param(&self, p: ty::ParamTy, source_ty: Ty<'tcx>) -> Ty<'tcx> { // Look up the type in the substitutions. It really should be in there. let opt_ty = self.substs.types.opt_get(p.space, p.idx as usize); diff --git a/src/librustc/ty/trait_def.rs b/src/librustc/ty/trait_def.rs index 39a3837ae7..a76dfc35dc 100644 --- a/src/librustc/ty/trait_def.rs +++ b/src/librustc/ty/trait_def.rs @@ -14,7 +14,6 @@ use traits::{self, specialization_graph}; use ty; use ty::fast_reject; use ty::{Ty, TyCtxt, TraitRef}; -use std::borrow::{Borrow}; use std::cell::{Cell, RefCell}; use syntax::ast::Name; use hir; @@ -74,7 +73,7 @@ pub struct TraitDef<'tcx> { pub flags: Cell } -impl<'tcx> TraitDef<'tcx> { +impl<'a, 'gcx, 'tcx> TraitDef<'tcx> { pub fn new(unsafety: hir::Unsafety, paren_sugar: bool, generics: ty::Generics<'tcx>, @@ -118,19 +117,18 @@ impl<'tcx> TraitDef<'tcx> { ); } - fn write_trait_impls(&self, tcx: &TyCtxt<'tcx>) { + fn write_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) { tcx.dep_graph.write(DepNode::TraitImpls(self.trait_ref.def_id)); } - fn read_trait_impls(&self, tcx: &TyCtxt<'tcx>) { + fn read_trait_impls(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) { tcx.dep_graph.read(DepNode::TraitImpls(self.trait_ref.def_id)); } /// Records a basic trait-to-implementation mapping. /// /// Returns `true` iff the impl has not previously been recorded. - fn record_impl(&self, - tcx: &TyCtxt<'tcx>, + fn record_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, impl_def_id: DefId, impl_trait_ref: TraitRef<'tcx>) -> bool { @@ -165,8 +163,7 @@ impl<'tcx> TraitDef<'tcx> { } /// Records a trait-to-implementation mapping for a crate-local impl. - pub fn record_local_impl(&self, - tcx: &TyCtxt<'tcx>, + pub fn record_local_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, impl_def_id: DefId, impl_trait_ref: TraitRef<'tcx>) { assert!(impl_def_id.is_local()); @@ -177,10 +174,9 @@ impl<'tcx> TraitDef<'tcx> { /// Records a trait-to-implementation mapping for a non-local impl. /// /// The `parent_impl` is the immediately-less-specialized impl, or the - /// trait's def ID if the impl is is not a specialization -- information that + /// trait's def ID if the impl is not a specialization -- information that /// should be pulled from the metadata. - pub fn record_remote_impl(&self, - tcx: &TyCtxt<'tcx>, + pub fn record_remote_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, impl_def_id: DefId, impl_trait_ref: TraitRef<'tcx>, parent_impl: DefId) { @@ -198,22 +194,22 @@ impl<'tcx> TraitDef<'tcx> { /// Adds a local impl into the specialization graph, returning an error with /// overlap information if the impl overlaps but does not specialize an /// existing impl. - pub fn add_impl_for_specialization<'a>(&self, - tcx: &'a TyCtxt<'tcx>, - impl_def_id: DefId) - -> Result<(), traits::Overlap<'a, 'tcx>> { + pub fn add_impl_for_specialization(&self, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + impl_def_id: DefId) + -> Result<(), traits::OverlapError> { assert!(impl_def_id.is_local()); self.specialization_graph.borrow_mut() .insert(tcx, impl_def_id) } - pub fn ancestors<'a>(&'a self, of_impl: DefId) -> specialization_graph::Ancestors<'a, 'tcx> { + pub fn ancestors(&'a self, of_impl: DefId) -> specialization_graph::Ancestors<'a, 'tcx> { specialization_graph::ancestors(self, of_impl) } - pub fn for_each_impl(&self, tcx: &TyCtxt<'tcx>, mut f: F) { - self.read_trait_impls(tcx); + pub fn for_each_impl(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, mut f: F) { + self.read_trait_impls(tcx); tcx.populate_implementations_for_trait_if_necessary(self.trait_ref.def_id); for &impl_def_id in self.blanket_impls.borrow().iter() { @@ -230,7 +226,7 @@ impl<'tcx> TraitDef<'tcx> { /// Iterate over every impl that could possibly match the /// self-type `self_ty`. pub fn for_each_relevant_impl(&self, - tcx: &TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>, mut f: F) { diff --git a/src/librustc/ty/util.rs b/src/librustc/ty/util.rs index 60fc47426d..4f6188ea3c 100644 --- a/src/librustc/ty/util.rs +++ b/src/librustc/ty/util.rs @@ -13,7 +13,7 @@ use hir::svh::Svh; use hir::def_id::DefId; use ty::subst; -use infer; +use infer::InferCtxt; use hir::pat_util; use traits::{self, ProjectionMode}; use ty::{self, Ty, TyCtxt, TypeAndMut, TypeFlags, TypeFoldable}; @@ -26,35 +26,36 @@ use rustc_const_math::{ConstInt, ConstIsize, ConstUsize}; use std::cmp; use std::hash::{Hash, SipHasher, Hasher}; use syntax::ast::{self, Name}; -use syntax::attr::{self, AttrMetaMethods, SignedInt, UnsignedInt}; +use syntax::attr::{self, SignedInt, UnsignedInt}; use syntax::codemap::Span; use hir; pub trait IntTypeExt { - fn to_ty<'tcx>(&self, cx: &TyCtxt<'tcx>) -> Ty<'tcx>; - fn disr_incr(&self, val: Disr) -> Option; + fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx>; + fn disr_incr<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, val: Option) + -> Option; fn assert_ty_matches(&self, val: Disr); - fn initial_discriminant(&self, tcx: &TyCtxt) -> Disr; + fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Disr; } impl IntTypeExt for attr::IntType { - fn to_ty<'tcx>(&self, cx: &TyCtxt<'tcx>) -> Ty<'tcx> { + fn to_ty<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> { match *self { - SignedInt(ast::IntTy::I8) => cx.types.i8, - SignedInt(ast::IntTy::I16) => cx.types.i16, - SignedInt(ast::IntTy::I32) => cx.types.i32, - SignedInt(ast::IntTy::I64) => cx.types.i64, - SignedInt(ast::IntTy::Is) => cx.types.isize, - UnsignedInt(ast::UintTy::U8) => cx.types.u8, - UnsignedInt(ast::UintTy::U16) => cx.types.u16, - UnsignedInt(ast::UintTy::U32) => cx.types.u32, - UnsignedInt(ast::UintTy::U64) => cx.types.u64, - UnsignedInt(ast::UintTy::Us) => cx.types.usize, + SignedInt(ast::IntTy::I8) => tcx.types.i8, + SignedInt(ast::IntTy::I16) => tcx.types.i16, + SignedInt(ast::IntTy::I32) => tcx.types.i32, + SignedInt(ast::IntTy::I64) => tcx.types.i64, + SignedInt(ast::IntTy::Is) => tcx.types.isize, + UnsignedInt(ast::UintTy::U8) => tcx.types.u8, + UnsignedInt(ast::UintTy::U16) => tcx.types.u16, + UnsignedInt(ast::UintTy::U32) => tcx.types.u32, + UnsignedInt(ast::UintTy::U64) => tcx.types.u64, + UnsignedInt(ast::UintTy::Us) => tcx.types.usize, } } - fn initial_discriminant(&self, tcx: &TyCtxt) -> Disr { + fn initial_discriminant<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Disr { match *self { SignedInt(ast::IntTy::I8) => ConstInt::I8(0), SignedInt(ast::IntTy::I16) => ConstInt::I16(0), @@ -93,9 +94,14 @@ impl IntTypeExt for attr::IntType { } } - fn disr_incr(&self, val: Disr) -> Option { - self.assert_ty_matches(val); - (val + ConstInt::Infer(1)).ok() + fn disr_incr<'a, 'tcx>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, val: Option) + -> Option { + if let Some(val) = val { + self.assert_ty_matches(val); + (val + ConstInt::Infer(1)).ok() + } else { + Some(self.initial_discriminant(tcx)) + } } } @@ -123,63 +129,60 @@ pub enum Representability { SelfRecursive, } -impl<'a, 'tcx> ParameterEnvironment<'a, 'tcx> { - pub fn can_type_implement_copy(&self, self_type: Ty<'tcx>, span: Span) - -> Result<(),CopyImplementationError> { - let tcx = self.tcx; - +impl<'tcx> ParameterEnvironment<'tcx> { + pub fn can_type_implement_copy<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + self_type: Ty<'tcx>, span: Span) + -> Result<(),CopyImplementationError> { // FIXME: (@jroesch) float this code up - let infcx = infer::new_infer_ctxt(tcx, - &tcx.tables, - Some(self.clone()), - ProjectionMode::Topmost); - - let adt = match self_type.sty { - ty::TyStruct(struct_def, substs) => { - for field in struct_def.all_fields() { - let field_ty = field.ty(tcx, substs); - if infcx.type_moves_by_default(field_ty, span) { - return Err(CopyImplementationError::InfrigingField( - field.name)) - } - } - struct_def - } - ty::TyEnum(enum_def, substs) => { - for variant in &enum_def.variants { - for field in &variant.fields { + tcx.infer_ctxt(None, Some(self.clone()), + ProjectionMode::Topmost).enter(|infcx| { + let adt = match self_type.sty { + ty::TyStruct(struct_def, substs) => { + for field in struct_def.all_fields() { let field_ty = field.ty(tcx, substs); if infcx.type_moves_by_default(field_ty, span) { - return Err(CopyImplementationError::InfrigingVariant( - variant.name)) + return Err(CopyImplementationError::InfrigingField( + field.name)) } } + struct_def } - enum_def - } - _ => return Err(CopyImplementationError::NotAnAdt), - }; + ty::TyEnum(enum_def, substs) => { + for variant in &enum_def.variants { + for field in &variant.fields { + let field_ty = field.ty(tcx, substs); + if infcx.type_moves_by_default(field_ty, span) { + return Err(CopyImplementationError::InfrigingVariant( + variant.name)) + } + } + } + enum_def + } + _ => return Err(CopyImplementationError::NotAnAdt) + }; - if adt.has_dtor() { - return Err(CopyImplementationError::HasDestructor) - } + if adt.has_dtor() { + return Err(CopyImplementationError::HasDestructor); + } - Ok(()) + Ok(()) + }) } } -impl<'tcx> TyCtxt<'tcx> { - pub fn pat_contains_ref_binding(&self, pat: &hir::Pat) -> Option { +impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> { + pub fn pat_contains_ref_binding(self, pat: &hir::Pat) -> Option { pat_util::pat_contains_ref_binding(&self.def_map, pat) } - pub fn arm_contains_ref_binding(&self, arm: &hir::Arm) -> Option { + pub fn arm_contains_ref_binding(self, arm: &hir::Arm) -> Option { pat_util::arm_contains_ref_binding(&self.def_map, arm) } /// Returns the type of element at index `i` in tuple or tuple-like type `t`. /// For an enum `t`, `variant` is None only if `t` is a univariant enum. - pub fn positional_element_ty(&self, + pub fn positional_element_ty(self, ty: Ty<'tcx>, i: usize, variant: Option) -> Option> { @@ -201,7 +204,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns the type of element at field `n` in struct or struct-like type `t`. /// For an enum `t`, `variant` must be some def id. - pub fn named_element_ty(&self, + pub fn named_element_ty(self, ty: Ty<'tcx>, n: Name, variant: Option) -> Option> { @@ -219,7 +222,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns the IntType representation. /// This used to ensure `int_ty` doesn't contain `usize` and `isize` /// by converting them to their actual types. That doesn't happen anymore. - pub fn enum_repr_type(&self, opt_hint: Option<&attr::ReprAttr>) -> attr::IntType { + pub fn enum_repr_type(self, opt_hint: Option<&attr::ReprAttr>) -> attr::IntType { match opt_hint { // Feed in the given type Some(&attr::ReprInt(_, int_t)) => int_t, @@ -234,7 +237,7 @@ impl<'tcx> TyCtxt<'tcx> { /// Returns the deeply last field of nested structures, or the same type, /// if not a structure at all. Corresponds to the only possible unsized /// field, and its type can be used to determine unsizing strategy. - pub fn struct_tail(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> { + pub fn struct_tail(self, mut ty: Ty<'tcx>) -> Ty<'tcx> { while let TyStruct(def, substs) = ty.sty { match def.struct_variant().fields.last() { Some(f) => ty = f.ty(self, substs), @@ -249,7 +252,7 @@ impl<'tcx> TyCtxt<'tcx> { /// structure definitions. /// For `(Foo>, Foo)`, the result will be `(Foo, Trait)`, /// whereas struct_tail produces `T`, and `Trait`, respectively. - pub fn struct_lockstep_tails(&self, + pub fn struct_lockstep_tails(self, source: Ty<'tcx>, target: Ty<'tcx>) -> (Ty<'tcx>, Ty<'tcx>) { @@ -284,7 +287,7 @@ impl<'tcx> TyCtxt<'tcx> { /// /// Requires that trait definitions have been processed so that we can /// elaborate predicates and walk supertraits. - pub fn required_region_bounds(&self, + pub fn required_region_bounds(self, erased_self_ty: Ty<'tcx>, predicates: Vec>) -> Vec { @@ -299,9 +302,11 @@ impl<'tcx> TyCtxt<'tcx> { match predicate { ty::Predicate::Projection(..) | ty::Predicate::Trait(..) | + ty::Predicate::Rfc1592(..) | ty::Predicate::Equate(..) | ty::Predicate::WellFormed(..) | ty::Predicate::ObjectSafe(..) | + ty::Predicate::ClosureKind(..) | ty::Predicate::RegionOutlives(..) => { None } @@ -328,13 +333,14 @@ impl<'tcx> TyCtxt<'tcx> { /// Creates a hash of the type `Ty` which will be the same no matter what crate /// context it's calculated within. This is used by the `type_id` intrinsic. - pub fn hash_crate_independent(&self, ty: Ty<'tcx>, svh: &Svh) -> u64 { + pub fn hash_crate_independent(self, ty: Ty<'tcx>, svh: &Svh) -> u64 { let mut state = SipHasher::new(); helper(self, ty, svh, &mut state); return state.finish(); - fn helper<'tcx>(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>, svh: &Svh, - state: &mut SipHasher) { + fn helper<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + ty: Ty<'tcx>, svh: &Svh, + state: &mut SipHasher) { macro_rules! byte { ($b:expr) => { ($b as u8).hash(state) } } macro_rules! hash { ($e:expr) => { $e.hash(state) } } @@ -362,7 +368,7 @@ impl<'tcx> TyCtxt<'tcx> { } else { tcx.sess.cstore.crate_hash(did.krate) }; - h.as_str().hash(state); + h.hash(state); did.index.hash(state); }; let mt = |state: &mut SipHasher, mt: TypeAndMut| { @@ -483,7 +489,7 @@ impl<'tcx> TyCtxt<'tcx> { /// `adt` that do not strictly outlive the adt value itself. /// (This allows programs to make cyclic structures without /// resorting to unasfe means; see RFCs 769 and 1238). - pub fn is_adt_dtorck(&self, adt: ty::AdtDef<'tcx>) -> bool { + pub fn is_adt_dtorck(self, adt: ty::AdtDef) -> bool { let dtor_method = match adt.destructor() { Some(dtor) => dtor, None => return false @@ -502,30 +508,20 @@ impl<'tcx> TyCtxt<'tcx> { } } -impl<'tcx> ty::TyS<'tcx> { - fn impls_bound<'a>(&'tcx self, param_env: &ParameterEnvironment<'a,'tcx>, - bound: ty::BuiltinBound, - span: Span) - -> bool +impl<'a, 'tcx> ty::TyS<'tcx> { + fn impls_bound(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: &ParameterEnvironment<'tcx>, + bound: ty::BuiltinBound, span: Span) -> bool { - let tcx = param_env.tcx; - let infcx = infer::new_infer_ctxt(tcx, - &tcx.tables, - Some(param_env.clone()), - ProjectionMode::Topmost); - - let is_impld = traits::type_known_to_meet_builtin_bound(&infcx, - self, bound, span); - - debug!("Ty::impls_bound({:?}, {:?}) = {:?}", - self, bound, is_impld); - - is_impld + tcx.infer_ctxt(None, Some(param_env.clone()), ProjectionMode::Topmost).enter(|infcx| { + traits::type_known_to_meet_builtin_bound(&infcx, self, bound, span) + }) } // FIXME (@jroesch): I made this public to use it, not sure if should be private - pub fn moves_by_default<'a>(&'tcx self, param_env: &ParameterEnvironment<'a,'tcx>, - span: Span) -> bool { + pub fn moves_by_default(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: &ParameterEnvironment<'tcx>, + span: Span) -> bool { if self.flags.get().intersects(TypeFlags::MOVENESS_CACHED) { return self.flags.get().intersects(TypeFlags::MOVES_BY_DEFAULT); } @@ -546,7 +542,7 @@ impl<'tcx> ty::TyS<'tcx> { TyArray(..) | TySlice(_) | TyTrait(..) | TyTuple(..) | TyClosure(..) | TyEnum(..) | TyStruct(..) | TyProjection(..) | TyParam(..) | TyInfer(..) | TyError => None - }.unwrap_or_else(|| !self.impls_bound(param_env, ty::BoundCopy, span)); + }.unwrap_or_else(|| !self.impls_bound(tcx, param_env, ty::BoundCopy, span)); if !self.has_param_types() && !self.has_self_ty() { self.flags.set(self.flags.get() | if result { @@ -560,18 +556,20 @@ impl<'tcx> ty::TyS<'tcx> { } #[inline] - pub fn is_sized<'a>(&'tcx self, param_env: &ParameterEnvironment<'a,'tcx>, - span: Span) -> bool + pub fn is_sized(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: &ParameterEnvironment<'tcx>, + span: Span) -> bool { if self.flags.get().intersects(TypeFlags::SIZEDNESS_CACHED) { return self.flags.get().intersects(TypeFlags::IS_SIZED); } - self.is_sized_uncached(param_env, span) + self.is_sized_uncached(tcx, param_env, span) } - fn is_sized_uncached<'a>(&'tcx self, param_env: &ParameterEnvironment<'a,'tcx>, - span: Span) -> bool { + fn is_sized_uncached(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: &ParameterEnvironment<'tcx>, + span: Span) -> bool { assert!(!self.needs_infer()); // Fast-path for primitive types @@ -584,7 +582,7 @@ impl<'tcx> ty::TyS<'tcx> { TyEnum(..) | TyStruct(..) | TyProjection(..) | TyParam(..) | TyInfer(..) | TyError => None - }.unwrap_or_else(|| self.impls_bound(param_env, ty::BoundSized, span)); + }.unwrap_or_else(|| self.impls_bound(tcx, param_env, ty::BoundSized, span)); if !self.has_param_types() && !self.has_self_ty() { self.flags.set(self.flags.get() | if result { @@ -598,19 +596,20 @@ impl<'tcx> ty::TyS<'tcx> { } #[inline] - pub fn layout<'a>(&'tcx self, infcx: &infer::InferCtxt<'a, 'tcx>) - -> Result<&'tcx Layout, LayoutError<'tcx>> { + pub fn layout<'lcx>(&'tcx self, infcx: &InferCtxt<'a, 'tcx, 'lcx>) + -> Result<&'tcx Layout, LayoutError<'tcx>> { + let tcx = infcx.tcx.global_tcx(); let can_cache = !self.has_param_types() && !self.has_self_ty(); if can_cache { - if let Some(&cached) = infcx.tcx.layout_cache.borrow().get(&self) { + if let Some(&cached) = tcx.layout_cache.borrow().get(&self) { return Ok(cached); } } let layout = Layout::compute_uncached(self, infcx)?; - let layout = infcx.tcx.intern_layout(layout); + let layout = tcx.intern_layout(layout); if can_cache { - infcx.tcx.layout_cache.borrow_mut().insert(self, layout); + tcx.layout_cache.borrow_mut().insert(self, layout); } Ok(layout) } @@ -618,35 +617,37 @@ impl<'tcx> ty::TyS<'tcx> { /// Check whether a type is representable. This means it cannot contain unboxed /// structural recursion. This check is needed for structs and enums. - pub fn is_representable(&'tcx self, cx: &TyCtxt<'tcx>, sp: Span) -> Representability { + pub fn is_representable(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span) + -> Representability { // Iterate until something non-representable is found - fn find_nonrepresentable<'tcx, It: Iterator>>(cx: &TyCtxt<'tcx>, - sp: Span, - seen: &mut Vec>, - iter: It) - -> Representability { + fn find_nonrepresentable<'a, 'tcx, It>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + sp: Span, + seen: &mut Vec>, + iter: It) + -> Representability + where It: Iterator> { iter.fold(Representability::Representable, - |r, ty| cmp::max(r, is_type_structurally_recursive(cx, sp, seen, ty))) + |r, ty| cmp::max(r, is_type_structurally_recursive(tcx, sp, seen, ty))) } - fn are_inner_types_recursive<'tcx>(cx: &TyCtxt<'tcx>, sp: Span, - seen: &mut Vec>, ty: Ty<'tcx>) - -> Representability { + fn are_inner_types_recursive<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, + seen: &mut Vec>, ty: Ty<'tcx>) + -> Representability { match ty.sty { TyTuple(ref ts) => { - find_nonrepresentable(cx, sp, seen, ts.iter().cloned()) + find_nonrepresentable(tcx, sp, seen, ts.iter().cloned()) } // Fixed-length vectors. // FIXME(#11924) Behavior undecided for zero-length vectors. TyArray(ty, _) => { - is_type_structurally_recursive(cx, sp, seen, ty) + is_type_structurally_recursive(tcx, sp, seen, ty) } TyStruct(def, substs) | TyEnum(def, substs) => { - find_nonrepresentable(cx, + find_nonrepresentable(tcx, sp, seen, - def.all_fields().map(|f| f.ty(cx, substs))) + def.all_fields().map(|f| f.ty(tcx, substs))) } TyClosure(..) => { // this check is run on type definitions, so we don't expect @@ -689,10 +690,10 @@ impl<'tcx> ty::TyS<'tcx> { // Does the type `ty` directly (without indirection through a pointer) // contain any types on stack `seen`? - fn is_type_structurally_recursive<'tcx>(cx: &TyCtxt<'tcx>, - sp: Span, - seen: &mut Vec>, - ty: Ty<'tcx>) -> Representability { + fn is_type_structurally_recursive<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + sp: Span, + seen: &mut Vec>, + ty: Ty<'tcx>) -> Representability { debug!("is_type_structurally_recursive: {:?}", ty); match ty.sty { @@ -744,13 +745,13 @@ impl<'tcx> ty::TyS<'tcx> { // For structs and enums, track all previously seen types by pushing them // onto the 'seen' stack. seen.push(ty); - let out = are_inner_types_recursive(cx, sp, seen, ty); + let out = are_inner_types_recursive(tcx, sp, seen, ty); seen.pop(); out } _ => { // No need to push in other cases. - are_inner_types_recursive(cx, sp, seen, ty) + are_inner_types_recursive(tcx, sp, seen, ty) } } } @@ -761,7 +762,7 @@ impl<'tcx> ty::TyS<'tcx> { // contains a different, structurally recursive type, maintain a stack // of seen types and check recursion for each of them (issues #3008, #3779). let mut seen: Vec = Vec::new(); - let r = is_type_structurally_recursive(cx, sp, &mut seen, self); + let r = is_type_structurally_recursive(tcx, sp, &mut seen, self); debug!("is_type_representable: {:?} is {:?}", self, r); r } diff --git a/src/librustc/ty/wf.rs b/src/librustc/ty/wf.rs index 9ea634bb41..a25994ea69 100644 --- a/src/librustc/ty/wf.rs +++ b/src/librustc/ty/wf.rs @@ -10,7 +10,7 @@ use hir::def_id::DefId; use infer::InferCtxt; -use ty::outlives::{self, Component}; +use ty::outlives::Component; use ty::subst::Substs; use traits; use ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable}; @@ -25,11 +25,11 @@ use util::common::ErrorReported; /// inference variable, returns `None`, because we are not able to /// make any progress at all. This is to prevent "livelock" where we /// say "$0 is WF if $0 is WF". -pub fn obligations<'a,'tcx>(infcx: &InferCtxt<'a, 'tcx>, - body_id: ast::NodeId, - ty: Ty<'tcx>, - span: Span) - -> Option>> +pub fn obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + body_id: ast::NodeId, + ty: Ty<'tcx>, + span: Span) + -> Option>> { let mut wf = WfPredicates { infcx: infcx, body_id: body_id, @@ -49,22 +49,22 @@ pub fn obligations<'a,'tcx>(infcx: &InferCtxt<'a, 'tcx>, /// well-formed. For example, if there is a trait `Set` defined like /// `trait Set`, then the trait reference `Foo: Set` is WF /// if `Bar: Eq`. -pub fn trait_obligations<'a,'tcx>(infcx: &InferCtxt<'a, 'tcx>, - body_id: ast::NodeId, - trait_ref: &ty::TraitRef<'tcx>, - span: Span) - -> Vec> +pub fn trait_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + body_id: ast::NodeId, + trait_ref: &ty::TraitRef<'tcx>, + span: Span) + -> Vec> { let mut wf = WfPredicates { infcx: infcx, body_id: body_id, span: span, out: vec![] }; wf.compute_trait_ref(trait_ref); wf.normalize() } -pub fn predicate_obligations<'a,'tcx>(infcx: &InferCtxt<'a, 'tcx>, - body_id: ast::NodeId, - predicate: &ty::Predicate<'tcx>, - span: Span) - -> Vec> +pub fn predicate_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>, + body_id: ast::NodeId, + predicate: &ty::Predicate<'tcx>, + span: Span) + -> Vec> { let mut wf = WfPredicates { infcx: infcx, body_id: body_id, span: span, out: vec![] }; @@ -92,6 +92,11 @@ pub fn predicate_obligations<'a,'tcx>(infcx: &InferCtxt<'a, 'tcx>, } ty::Predicate::ObjectSafe(_) => { } + ty::Predicate::ClosureKind(..) => { + } + ty::Predicate::Rfc1592(ref data) => { + bug!("RFC1592 predicate `{:?}` in predicate_obligations", data); + } } wf.normalize() @@ -118,8 +123,8 @@ pub enum ImpliedBound<'tcx> { /// Compute the implied bounds that a callee/impl can assume based on /// the fact that caller/projector has ensured that `ty` is WF. See /// the `ImpliedBound` type for more details. -pub fn implied_bounds<'a,'tcx>( - infcx: &'a InferCtxt<'a,'tcx>, +pub fn implied_bounds<'a, 'gcx, 'tcx>( + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, body_id: ast::NodeId, ty: Ty<'tcx>, span: Span) @@ -153,8 +158,10 @@ pub fn implied_bounds<'a,'tcx>( assert!(!obligation.has_escaping_regions()); match obligation.predicate { ty::Predicate::Trait(..) | + ty::Predicate::Rfc1592(..) | ty::Predicate::Equate(..) | ty::Predicate::Projection(..) | + ty::Predicate::ClosureKind(..) | ty::Predicate::ObjectSafe(..) => vec![], @@ -175,7 +182,7 @@ pub fn implied_bounds<'a,'tcx>( match infcx.tcx.no_late_bound_regions(data) { None => vec![], Some(ty::OutlivesPredicate(ty_a, r_b)) => { - let components = outlives::components(infcx, ty_a); + let components = infcx.outlives_components(ty_a); implied_bounds_from_components(r_b, components) } }, @@ -220,14 +227,14 @@ fn implied_bounds_from_components<'tcx>(sub_region: ty::Region, .collect() } -struct WfPredicates<'a,'tcx:'a> { - infcx: &'a InferCtxt<'a, 'tcx>, +struct WfPredicates<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, body_id: ast::NodeId, span: Span, out: Vec>, } -impl<'a,'tcx> WfPredicates<'a,'tcx> { +impl<'a, 'gcx, 'tcx> WfPredicates<'a, 'gcx, 'tcx> { fn cause(&mut self, code: traits::ObligationCauseCode<'tcx>) -> traits::ObligationCause<'tcx> { traits::ObligationCause::new(self.span, self.body_id, code) } @@ -265,7 +272,7 @@ impl<'a,'tcx> WfPredicates<'a,'tcx> { /// into `self.out`. fn compute_projection(&mut self, data: ty::ProjectionTy<'tcx>) { // A projection is well-formed if (a) the trait ref itself is - // WF WF and (b) the trait-ref holds. (It may also be + // WF and (b) the trait-ref holds. (It may also be // normalizable and be WF that way.) self.compute_trait_ref(&data.trait_ref); @@ -277,11 +284,33 @@ impl<'a,'tcx> WfPredicates<'a,'tcx> { } } + fn require_sized(&mut self, subty: Ty<'tcx>, cause: traits::ObligationCauseCode<'tcx>, + rfc1592: bool) { + if !subty.has_escaping_regions() { + let cause = self.cause(cause); + match self.infcx.tcx.trait_ref_for_builtin_bound(ty::BoundSized, subty) { + Ok(trait_ref) => { + let predicate = trait_ref.to_predicate(); + let predicate = if rfc1592 { + ty::Predicate::Rfc1592(box predicate) + } else { + predicate + }; + self.out.push( + traits::Obligation::new(cause, + predicate)); + } + Err(ErrorReported) => { } + } + } + } + /// Push new obligations into `out`. Returns true if it was able /// to generate all the predicates needed to validate that `ty0` /// is WF. Returns false if `ty0` is an unresolved type variable, /// in which case we are not able to simplify at all. fn compute(&mut self, ty0: Ty<'tcx>) -> bool { + let tcx = self.infcx.tcx; let mut subtys = ty0.walk(); while let Some(ty) = subtys.next() { match ty.sty { @@ -298,23 +327,18 @@ impl<'a,'tcx> WfPredicates<'a,'tcx> { ty::TySlice(subty) | ty::TyArray(subty, _) => { - if !subty.has_escaping_regions() { - let cause = self.cause(traits::SliceOrArrayElem); - match traits::trait_ref_for_builtin_bound(self.infcx.tcx, - ty::BoundSized, - subty) { - Ok(trait_ref) => { - self.out.push( - traits::Obligation::new(cause, - trait_ref.to_predicate())); - } - Err(ErrorReported) => { } + self.require_sized(subty, traits::SliceOrArrayElem, false); + } + + ty::TyTuple(ref tys) => { + if let Some((_last, rest)) = tys.split_last() { + for elem in rest { + self.require_sized(elem, traits::TupleElem, true); } } } ty::TyBox(_) | - ty::TyTuple(_) | ty::TyRawPtr(_) => { // simple cases that are WF if their type args are WF } @@ -371,10 +395,25 @@ impl<'a,'tcx> WfPredicates<'a,'tcx> { // checking those let cause = self.cause(traits::MiscObligation); - self.out.push( - traits::Obligation::new( - cause, - ty::Predicate::ObjectSafe(data.principal_def_id()))); + + // FIXME(#33243): remove RFC1592 + self.out.push(traits::Obligation::new( + cause.clone(), + ty::Predicate::ObjectSafe(data.principal_def_id()) + )); + let component_traits = + data.bounds.builtin_bounds.iter().flat_map(|bound| { + tcx.lang_items.from_builtin_kind(bound).ok() + }); +// .chain(Some(data.principal_def_id())); + self.out.extend( + component_traits.map(|did| { traits::Obligation::new( + cause.clone(), + ty::Predicate::Rfc1592( + box ty::Predicate::ObjectSafe(did) + ) + )}) + ); } // Inference variables are the complicated case, since we don't @@ -486,8 +525,8 @@ impl<'a,'tcx> WfPredicates<'a,'tcx> { /// they declare `trait SomeTrait : 'static`, for example, then /// `'static` would appear in the list. The hard work is done by /// `ty::required_region_bounds`, see that for more information. -pub fn object_region_bounds<'tcx>( - tcx: &TyCtxt<'tcx>, +pub fn object_region_bounds<'a, 'gcx, 'tcx>( + tcx: TyCtxt<'a, 'gcx, 'tcx>, principal: &ty::PolyTraitRef<'tcx>, others: ty::BuiltinBounds) -> Vec diff --git a/src/librustc/util/ppaux.rs b/src/librustc/util/ppaux.rs index 2173b919d1..1a802064b6 100644 --- a/src/librustc/util/ppaux.rs +++ b/src/librustc/util/ppaux.rs @@ -18,6 +18,7 @@ use ty::{TyParam, TyRawPtr, TyRef, TyTuple}; use ty::TyClosure; use ty::{TyBox, TyTrait, TyInt, TyUint, TyInfer}; use ty::{self, Ty, TyCtxt, TypeFoldable}; +use ty::fold::{TypeFolder, TypeVisitor}; use std::cell::Cell; use std::fmt; @@ -68,12 +69,12 @@ pub enum Ns { Value } -fn number_of_supplied_defaults<'tcx, GG>(tcx: &ty::TyCtxt<'tcx>, - substs: &subst::Substs, - space: subst::ParamSpace, - get_generics: GG) - -> usize - where GG: FnOnce(&TyCtxt<'tcx>) -> ty::Generics<'tcx> +fn number_of_supplied_defaults<'a, 'gcx, 'tcx, GG>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + substs: &subst::Substs, + space: subst::ParamSpace, + get_generics: GG) + -> usize + where GG: FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> ty::Generics<'tcx> { let generics = get_generics(tcx); @@ -114,7 +115,7 @@ pub fn parameterized(f: &mut fmt::Formatter, projections: &[ty::ProjectionPredicate], get_generics: GG) -> fmt::Result - where GG: for<'tcx> FnOnce(&TyCtxt<'tcx>) -> ty::Generics<'tcx> + where GG: for<'a, 'gcx, 'tcx> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> ty::Generics<'tcx> { if let (Ns::Value, Some(self_ty)) = (ns, substs.self_ty()) { write!(f, "<{} as ", self_ty)?; @@ -230,10 +231,10 @@ pub fn parameterized(f: &mut fmt::Formatter, Ok(()) } -fn in_binder<'tcx, T, U>(f: &mut fmt::Formatter, - tcx: &TyCtxt<'tcx>, - original: &ty::Binder, - lifted: Option>) -> fmt::Result +fn in_binder<'a, 'gcx, 'tcx, T, U>(f: &mut fmt::Formatter, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + original: &ty::Binder, + lifted: Option>) -> fmt::Result where T: fmt::Display, U: fmt::Display + TypeFoldable<'tcx> { // Replace any anonymous late-bound regions with named @@ -293,11 +294,11 @@ fn in_binder<'tcx, T, U>(f: &mut fmt::Formatter, struct TraitAndProjections<'tcx>(ty::TraitRef<'tcx>, Vec>); impl<'tcx> TypeFoldable<'tcx> for TraitAndProjections<'tcx> { - fn super_fold_with>(&self, folder: &mut F) -> Self { + fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self { TraitAndProjections(self.0.fold_with(folder), self.1.fold_with(folder)) } - fn super_visit_with>(&self, visitor: &mut V) -> bool { + fn super_visit_with>(&self, visitor: &mut V) -> bool { self.0.visit_with(visitor) || self.1.visit_with(visitor) } } @@ -459,6 +460,9 @@ impl<'tcx> fmt::Debug for ty::Predicate<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ty::Predicate::Trait(ref a) => write!(f, "{:?}", a), + ty::Predicate::Rfc1592(ref a) => { + write!(f, "RFC1592({:?})", a) + } ty::Predicate::Equate(ref pair) => write!(f, "{:?}", pair), ty::Predicate::RegionOutlives(ref pair) => write!(f, "{:?}", pair), ty::Predicate::TypeOutlives(ref pair) => write!(f, "{:?}", pair), @@ -467,6 +471,9 @@ impl<'tcx> fmt::Debug for ty::Predicate<'tcx> { ty::Predicate::ObjectSafe(trait_def_id) => { write!(f, "ObjectSafe({:?})", trait_def_id) } + ty::Predicate::ClosureKind(closure_def_id, kind) => { + write!(f, "ClosureKind({:?}, {:?})", closure_def_id, kind) + } } } } @@ -490,7 +497,7 @@ impl fmt::Debug for ty::BoundRegion { BrAnon(n) => write!(f, "BrAnon({:?})", n), BrFresh(n) => write!(f, "BrFresh({:?})", n), BrNamed(did, name) => { - write!(f, "BrNamed({:?}, {:?})", did, name) + write!(f, "BrNamed({:?}:{:?}, {:?})", did.krate, did.index, name) } BrEnv => "BrEnv".fmt(f), } @@ -551,7 +558,7 @@ impl<'tcx> fmt::Debug for ty::ClosureUpvar<'tcx> { } } -impl<'a, 'tcx> fmt::Debug for ty::ParameterEnvironment<'a, 'tcx> { +impl<'tcx> fmt::Debug for ty::ParameterEnvironment<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "ParameterEnvironment(\ free_substs={:?}, \ @@ -888,14 +895,14 @@ impl<'tcx> fmt::Display for ty::TypeVariants<'tcx> { TyTrait(ref data) => write!(f, "{}", data), ty::TyProjection(ref data) => write!(f, "{}", data), TyStr => write!(f, "str"), - TyClosure(did, ref substs) => ty::tls::with(|tcx| { + TyClosure(did, substs) => ty::tls::with(|tcx| { write!(f, "[closure")?; if let Some(node_id) = tcx.map.as_local_node_id(did) { write!(f, "@{:?}", tcx.map.span(node_id))?; let mut sep = " "; tcx.with_freevars(node_id, |freevars| { - for (freevar, upvar_ty) in freevars.iter().zip(&substs.upvar_tys) { + for (freevar, upvar_ty) in freevars.iter().zip(substs.upvar_tys) { let node_id = freevar.def.var_id(); write!(f, "{}{}:{}", @@ -1039,10 +1046,21 @@ impl<'tcx> fmt::Display for ty::ProjectionTy<'tcx> { } } +impl fmt::Display for ty::ClosureKind { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + ty::ClosureKind::Fn => write!(f, "Fn"), + ty::ClosureKind::FnMut => write!(f, "FnMut"), + ty::ClosureKind::FnOnce => write!(f, "FnOnce"), + } + } +} + impl<'tcx> fmt::Display for ty::Predicate<'tcx> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { ty::Predicate::Trait(ref data) => write!(f, "{}", data), + ty::Predicate::Rfc1592(ref data) => write!(f, "{}", data), ty::Predicate::Equate(ref predicate) => write!(f, "{}", predicate), ty::Predicate::RegionOutlives(ref predicate) => write!(f, "{}", predicate), ty::Predicate::TypeOutlives(ref predicate) => write!(f, "{}", predicate), @@ -1052,6 +1070,11 @@ impl<'tcx> fmt::Display for ty::Predicate<'tcx> { ty::tls::with(|tcx| { write!(f, "the trait `{}` is object-safe", tcx.item_path_str(trait_def_id)) }), + ty::Predicate::ClosureKind(closure_def_id, kind) => + ty::tls::with(|tcx| { + write!(f, "the closure `{}` implements the trait `{}`", + tcx.item_path_str(closure_def_id), kind) + }), } } } diff --git a/src/librustc_back/dynamic_lib.rs b/src/librustc_back/dynamic_lib.rs index 2f86262afb..38e6006092 100644 --- a/src/librustc_back/dynamic_lib.rs +++ b/src/librustc_back/dynamic_lib.rs @@ -189,12 +189,16 @@ mod dl { pub fn check_for_errors_in(f: F) -> Result where F: FnOnce() -> T, { - use std::sync::StaticMutex; - static LOCK: StaticMutex = StaticMutex::new(); + use std::sync::{Mutex, Once, ONCE_INIT}; + static INIT: Once = ONCE_INIT; + static mut LOCK: *mut Mutex<()> = 0 as *mut _; unsafe { + INIT.call_once(|| { + LOCK = Box::into_raw(Box::new(Mutex::new(()))); + }); // dlerror isn't thread safe, so we need to lock around this entire // sequence - let _guard = LOCK.lock(); + let _guard = (*LOCK).lock(); let _old_error = libc::dlerror(); let result = f(); diff --git a/src/librustc_back/lib.rs b/src/librustc_back/lib.rs index 67b11a930d..6a7bc51d15 100644 --- a/src/librustc_back/lib.rs +++ b/src/librustc_back/lib.rs @@ -38,7 +38,6 @@ #![feature(staged_api)] #![feature(step_by)] #![feature(question_mark)] -#![cfg_attr(unix, feature(static_mutex))] #![cfg_attr(test, feature(test, rand))] extern crate syntax; diff --git a/src/librustc_back/sha2.rs b/src/librustc_back/sha2.rs index ba8107e03c..97fb39c17e 100644 --- a/src/librustc_back/sha2.rs +++ b/src/librustc_back/sha2.rs @@ -25,11 +25,10 @@ fn write_u32_be(dst: &mut[u8], input: u32) { /// Read the value of a vector of bytes as a u32 value in big-endian format. fn read_u32_be(input: &[u8]) -> u32 { - return - (input[0] as u32) << 24 | + (input[0] as u32) << 24 | (input[1] as u32) << 16 | (input[2] as u32) << 8 | - (input[3] as u32); + (input[3] as u32) } /// Read a vector of bytes into a vector of u32s. The values are read in big-endian format. @@ -42,7 +41,7 @@ fn read_u32v_be(dst: &mut[u32], input: &[u8]) { } } -trait ToBits { +trait ToBits: Sized { /// Convert the value in bytes to the number of bits, a tuple where the 1st item is the /// high-order value and the 2nd item is the low order value. fn to_bits(self) -> (Self, Self); @@ -50,7 +49,7 @@ trait ToBits { impl ToBits for u64 { fn to_bits(self) -> (u64, u64) { - return (self >> 61, self << 3); + (self >> 61, self << 3) } } @@ -64,7 +63,7 @@ fn add_bytes_to_bits(bits: u64, bytes: u64) -> u64 { } match bits.checked_add(new_low_bits) { - Some(x) => return x, + Some(x) => x, None => panic!("numeric overflow occurred.") } } @@ -113,10 +112,10 @@ struct FixedBuffer64 { impl FixedBuffer64 { /// Create a new FixedBuffer64 fn new() -> FixedBuffer64 { - return FixedBuffer64 { + FixedBuffer64 { buffer: [0; 64], buffer_idx: 0 - }; + } } } @@ -175,13 +174,13 @@ impl FixedBuffer for FixedBuffer64 { fn next<'s>(&'s mut self, len: usize) -> &'s mut [u8] { self.buffer_idx += len; - return &mut self.buffer[self.buffer_idx - len..self.buffer_idx]; + &mut self.buffer[self.buffer_idx - len..self.buffer_idx] } fn full_buffer<'s>(&'s mut self) -> &'s [u8] { assert!(self.buffer_idx == 64); self.buffer_idx = 0; - return &self.buffer[..64]; + &self.buffer[..64] } fn position(&self) -> usize { self.buffer_idx } @@ -278,7 +277,7 @@ struct Engine256State { impl Engine256State { fn new(h: &[u32; 8]) -> Engine256State { - return Engine256State { + Engine256State { h0: h[0], h1: h[1], h2: h[2], @@ -287,7 +286,7 @@ impl Engine256State { h5: h[5], h6: h[6], h7: h[7] - }; + } } fn reset(&mut self, h: &[u32; 8]) { @@ -433,7 +432,7 @@ struct Engine256 { impl Engine256 { fn new(h: &[u32; 8]) -> Engine256 { - return Engine256 { + Engine256 { length_bits: 0, buffer: FixedBuffer64::new(), state: Engine256State::new(h), @@ -457,17 +456,15 @@ impl Engine256 { } fn finish(&mut self) { - if self.finished { - return; + if !self.finished { + let self_state = &mut self.state; + self.buffer.standard_padding(8, |input: &[u8]| { self_state.process_block(input) }); + write_u32_be(self.buffer.next(4), (self.length_bits >> 32) as u32 ); + write_u32_be(self.buffer.next(4), self.length_bits as u32); + self_state.process_block(self.buffer.full_buffer()); + + self.finished = true; } - - let self_state = &mut self.state; - self.buffer.standard_padding(8, |input: &[u8]| { self_state.process_block(input) }); - write_u32_be(self.buffer.next(4), (self.length_bits >> 32) as u32 ); - write_u32_be(self.buffer.next(4), self.length_bits as u32); - self_state.process_block(self.buffer.full_buffer()); - - self.finished = true; } } @@ -531,7 +528,7 @@ mod tests { use self::rand::isaac::IsaacRng; use serialize::hex::FromHex; use std::u64; - use super::{Digest, Sha256, FixedBuffer}; + use super::{Digest, Sha256}; // A normal addition - no overflow occurs #[test] @@ -648,7 +645,7 @@ mod tests { mod bench { extern crate test; use self::test::Bencher; - use super::{Sha256, FixedBuffer, Digest}; + use super::{Sha256, Digest}; #[bench] pub fn sha256_10(b: &mut Bencher) { diff --git a/src/librustc_back/target/aarch64_apple_ios.rs b/src/librustc_back/target/aarch64_apple_ios.rs index 2f0a043f9a..481338d1ce 100644 --- a/src/librustc_back/target/aarch64_apple_ios.rs +++ b/src/librustc_back/target/aarch64_apple_ios.rs @@ -24,6 +24,7 @@ pub fn target() -> Target { options: TargetOptions { features: "+neon,+fp-armv8,+cyclone".to_string(), eliminate_frame_pointer: false, + max_atomic_width: 128, .. opts(Arch::Arm64) }, } diff --git a/src/librustc_back/target/aarch64_linux_android.rs b/src/librustc_back/target/aarch64_linux_android.rs index c4212e7021..81be546e0c 100644 --- a/src/librustc_back/target/aarch64_linux_android.rs +++ b/src/librustc_back/target/aarch64_linux_android.rs @@ -11,6 +11,11 @@ use target::Target; pub fn target() -> Target { + let mut base = super::android_base::opts(); + base.max_atomic_width = 128; + // As documented in http://developer.android.com/ndk/guides/cpu-features.html + // the neon (ASIMD) and FP must exist on all android aarch64 targets. + base.features = "+neon,+fp-armv8".to_string(); Target { llvm_target: "aarch64-linux-android".to_string(), target_endian: "little".to_string(), @@ -20,6 +25,6 @@ pub fn target() -> Target { target_os: "android".to_string(), target_env: "".to_string(), target_vendor: "unknown".to_string(), - options: super::android_base::opts(), + options: base, } } diff --git a/src/librustc_back/target/aarch64_unknown_linux_gnu.rs b/src/librustc_back/target/aarch64_unknown_linux_gnu.rs index 3bf4e92fb6..aec1bae60c 100644 --- a/src/librustc_back/target/aarch64_unknown_linux_gnu.rs +++ b/src/librustc_back/target/aarch64_unknown_linux_gnu.rs @@ -11,7 +11,8 @@ use target::Target; pub fn target() -> Target { - let base = super::linux_base::opts(); + let mut base = super::linux_base::opts(); + base.max_atomic_width = 128; Target { llvm_target: "aarch64-unknown-linux-gnu".to_string(), target_endian: "little".to_string(), diff --git a/src/librustc_back/target/apple_base.rs b/src/librustc_back/target/apple_base.rs index ffcb6f971a..70c7ea99e1 100644 --- a/src/librustc_back/target/apple_base.rs +++ b/src/librustc_back/target/apple_base.rs @@ -33,9 +33,8 @@ pub fn opts() -> TargetOptions { }).unwrap_or((10, 7)); TargetOptions { - // OSX has -dead_strip, which doesn't rely on ffunction_sections + // OSX has -dead_strip, which doesn't rely on function_sections function_sections: false, - linker: "cc".to_string(), dynamic_linking: true, executables: true, is_like_osx: true, diff --git a/src/librustc_back/target/arm_linux_androideabi.rs b/src/librustc_back/target/arm_linux_androideabi.rs index 0a61b14763..e1b170422c 100644 --- a/src/librustc_back/target/arm_linux_androideabi.rs +++ b/src/librustc_back/target/arm_linux_androideabi.rs @@ -12,7 +12,8 @@ use target::Target; pub fn target() -> Target { let mut base = super::android_base::opts(); - base.features = "+v7".to_string(); + base.features = "+v7,+vfp3,+d16".to_string(); + base.max_atomic_width = 64; Target { llvm_target: "arm-linux-androideabi".to_string(), @@ -21,7 +22,7 @@ pub fn target() -> Target { data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), arch: "arm".to_string(), target_os: "android".to_string(), - target_env: "gnu".to_string(), + target_env: "".to_string(), target_vendor: "unknown".to_string(), options: base, } diff --git a/src/librustc_back/target/arm_unknown_linux_gnueabi.rs b/src/librustc_back/target/arm_unknown_linux_gnueabi.rs index 0cb0949d46..60c4a7c3c9 100644 --- a/src/librustc_back/target/arm_unknown_linux_gnueabi.rs +++ b/src/librustc_back/target/arm_unknown_linux_gnueabi.rs @@ -11,7 +11,8 @@ use target::{Target, TargetOptions}; pub fn target() -> Target { - let base = super::linux_base::opts(); + let mut base = super::linux_base::opts(); + base.max_atomic_width = 64; Target { llvm_target: "arm-unknown-linux-gnueabi".to_string(), target_endian: "little".to_string(), @@ -19,7 +20,7 @@ pub fn target() -> Target { data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), arch: "arm".to_string(), target_os: "linux".to_string(), - target_env: "gnueabi".to_string(), + target_env: "gnu".to_string(), target_vendor: "unknown".to_string(), options: TargetOptions { diff --git a/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs b/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs index 05b9401a06..72128e3064 100644 --- a/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs +++ b/src/librustc_back/target/arm_unknown_linux_gnueabihf.rs @@ -11,7 +11,8 @@ use target::{Target, TargetOptions}; pub fn target() -> Target { - let base = super::linux_base::opts(); + let mut base = super::linux_base::opts(); + base.max_atomic_width = 64; Target { llvm_target: "arm-unknown-linux-gnueabihf".to_string(), target_endian: "little".to_string(), @@ -19,7 +20,7 @@ pub fn target() -> Target { data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), arch: "arm".to_string(), target_os: "linux".to_string(), - target_env: "gnueabihf".to_string(), + target_env: "gnu".to_string(), target_vendor: "unknown".to_string(), options: TargetOptions { diff --git a/src/librustc_back/target/armv7_apple_ios.rs b/src/librustc_back/target/armv7_apple_ios.rs index d131f8b2ef..a2486a1330 100644 --- a/src/librustc_back/target/armv7_apple_ios.rs +++ b/src/librustc_back/target/armv7_apple_ios.rs @@ -23,6 +23,7 @@ pub fn target() -> Target { target_vendor: "apple".to_string(), options: TargetOptions { features: "+v7,+vfp3,+neon".to_string(), + max_atomic_width: 64, .. opts(Arch::Armv7) } } diff --git a/src/librustc_back/target/armv7_linux_androideabi.rs b/src/librustc_back/target/armv7_linux_androideabi.rs new file mode 100644 index 0000000000..fd8f35da16 --- /dev/null +++ b/src/librustc_back/target/armv7_linux_androideabi.rs @@ -0,0 +1,29 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::Target; + +pub fn target() -> Target { + let mut base = super::android_base::opts(); + base.features = "+v7,+thumb2,+vfp3,+d16".to_string(); + base.max_atomic_width = 64; + + Target { + llvm_target: "armv7-none-linux-android".to_string(), + target_endian: "little".to_string(), + target_pointer_width: "32".to_string(), + data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), + arch: "arm".to_string(), + target_os: "android".to_string(), + target_env: "".to_string(), + target_vendor: "unknown".to_string(), + options: base, + } +} diff --git a/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs b/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs index 9c9bb72f76..7bcca3a393 100644 --- a/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs +++ b/src/librustc_back/target/armv7_unknown_linux_gnueabihf.rs @@ -19,12 +19,13 @@ pub fn target() -> Target { data_layout: "e-m:e-p:32:32-i64:64-v128:64:128-a:0:32-n32-S64".to_string(), arch: "arm".to_string(), target_os: "linux".to_string(), - target_env: "gnueabihf".to_string(), + target_env: "gnu".to_string(), target_vendor: "unknown".to_string(), options: TargetOptions { features: "+v7,+vfp3,+neon".to_string(), cpu: "cortex-a8".to_string(), + max_atomic_width: 64, .. base } } diff --git a/src/librustc_back/target/armv7s_apple_ios.rs b/src/librustc_back/target/armv7s_apple_ios.rs index d317589bf3..e5379aa1b4 100644 --- a/src/librustc_back/target/armv7s_apple_ios.rs +++ b/src/librustc_back/target/armv7s_apple_ios.rs @@ -23,6 +23,7 @@ pub fn target() -> Target { target_vendor: "apple".to_string(), options: TargetOptions { features: "+v7,+vfp4,+neon".to_string(), + max_atomic_width: 64, .. opts(Arch::Armv7s) } } diff --git a/src/librustc_back/target/asmjs_unknown_emscripten.rs b/src/librustc_back/target/asmjs_unknown_emscripten.rs index 546f9df605..e620017794 100644 --- a/src/librustc_back/target/asmjs_unknown_emscripten.rs +++ b/src/librustc_back/target/asmjs_unknown_emscripten.rs @@ -22,6 +22,7 @@ pub fn target() -> Target { linker_is_gnu: true, allow_asm: false, obj_is_bitcode: true, + max_atomic_width: 32, .. Default::default() }; Target { diff --git a/src/librustc_back/target/bitrig_base.rs b/src/librustc_back/target/bitrig_base.rs index 8eed36f991..7baf80066b 100644 --- a/src/librustc_back/target/bitrig_base.rs +++ b/src/librustc_back/target/bitrig_base.rs @@ -13,7 +13,6 @@ use std::default::Default; pub fn opts() -> TargetOptions { TargetOptions { - linker: "cc".to_string(), dynamic_linking: true, executables: true, linker_is_gnu: true, diff --git a/src/librustc_back/target/dragonfly_base.rs b/src/librustc_back/target/dragonfly_base.rs index f05319a0cb..e2c4003a8b 100644 --- a/src/librustc_back/target/dragonfly_base.rs +++ b/src/librustc_back/target/dragonfly_base.rs @@ -13,7 +13,6 @@ use std::default::Default; pub fn opts() -> TargetOptions { TargetOptions { - linker: "cc".to_string(), dynamic_linking: true, executables: true, linker_is_gnu: true, diff --git a/src/librustc_back/target/freebsd_base.rs b/src/librustc_back/target/freebsd_base.rs index f05319a0cb..e2c4003a8b 100644 --- a/src/librustc_back/target/freebsd_base.rs +++ b/src/librustc_back/target/freebsd_base.rs @@ -13,7 +13,6 @@ use std::default::Default; pub fn opts() -> TargetOptions { TargetOptions { - linker: "cc".to_string(), dynamic_linking: true, executables: true, linker_is_gnu: true, diff --git a/src/librustc_back/target/i386_apple_ios.rs b/src/librustc_back/target/i386_apple_ios.rs index d149d4bbdc..cf4020eeb5 100644 --- a/src/librustc_back/target/i386_apple_ios.rs +++ b/src/librustc_back/target/i386_apple_ios.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::Target; +use target::{Target, TargetOptions}; use super::apple_ios_base::{opts, Arch}; pub fn target() -> Target { @@ -21,6 +21,9 @@ pub fn target() -> Target { target_os: "ios".to_string(), target_env: "".to_string(), target_vendor: "apple".to_string(), - options: opts(Arch::I386) + options: TargetOptions { + max_atomic_width: 64, + .. opts(Arch::I386) + } } } diff --git a/src/librustc_back/target/i686_apple_darwin.rs b/src/librustc_back/target/i686_apple_darwin.rs index b6e2f4d8e8..302691e9a5 100644 --- a/src/librustc_back/target/i686_apple_darwin.rs +++ b/src/librustc_back/target/i686_apple_darwin.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::apple_base::opts(); base.cpu = "yonah".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m32".to_string()); Target { diff --git a/src/librustc_back/target/i686_linux_android.rs b/src/librustc_back/target/i686_linux_android.rs index b338a971ff..2376de1239 100644 --- a/src/librustc_back/target/i686_linux_android.rs +++ b/src/librustc_back/target/i686_linux_android.rs @@ -12,7 +12,12 @@ use target::Target; pub fn target() -> Target { let mut base = super::android_base::opts(); - base.cpu = "pentium4".to_string(); + + base.max_atomic_width = 64; + + // http://developer.android.com/ndk/guides/abis.html#x86 + base.cpu = "pentiumpro".to_string(); + base.features = "+mmx,+sse,+sse2,+sse3,+ssse3".to_string(); Target { llvm_target: "i686-linux-android".to_string(), @@ -21,7 +26,7 @@ pub fn target() -> Target { data_layout: "e-m:e-p:32:32-f64:32:64-f80:32-n8:16:32-S128".to_string(), arch: "x86".to_string(), target_os: "android".to_string(), - target_env: "gnu".to_string(), + target_env: "".to_string(), target_vendor: "unknown".to_string(), options: base, } diff --git a/src/librustc_back/target/i686_pc_windows_gnu.rs b/src/librustc_back/target/i686_pc_windows_gnu.rs index 48203cc74d..c2cc624c9f 100644 --- a/src/librustc_back/target/i686_pc_windows_gnu.rs +++ b/src/librustc_back/target/i686_pc_windows_gnu.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::windows_base::opts(); base.cpu = "pentium4".to_string(); + base.max_atomic_width = 64; // Mark all dynamic libraries and executables as compatible with the larger 4GiB address // space available to x86 Windows binaries on x86_64. diff --git a/src/librustc_back/target/i686_pc_windows_msvc.rs b/src/librustc_back/target/i686_pc_windows_msvc.rs index 501219ad60..8c1bacc280 100644 --- a/src/librustc_back/target/i686_pc_windows_msvc.rs +++ b/src/librustc_back/target/i686_pc_windows_msvc.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::windows_msvc_base::opts(); base.cpu = "pentium4".to_string(); + base.max_atomic_width = 64; // Mark all dynamic libraries and executables as compatible with the larger 4GiB address // space available to x86 Windows binaries on x86_64. diff --git a/src/librustc_back/target/i686_unknown_dragonfly.rs b/src/librustc_back/target/i686_unknown_dragonfly.rs index cdbbd5eafd..6446ac45f7 100644 --- a/src/librustc_back/target/i686_unknown_dragonfly.rs +++ b/src/librustc_back/target/i686_unknown_dragonfly.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::dragonfly_base::opts(); base.cpu = "pentium4".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m32".to_string()); Target { diff --git a/src/librustc_back/target/i686_unknown_freebsd.rs b/src/librustc_back/target/i686_unknown_freebsd.rs index fadedc2414..a7903d5db6 100644 --- a/src/librustc_back/target/i686_unknown_freebsd.rs +++ b/src/librustc_back/target/i686_unknown_freebsd.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::freebsd_base::opts(); base.cpu = "pentium4".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m32".to_string()); Target { diff --git a/src/librustc_back/target/i686_unknown_linux_gnu.rs b/src/librustc_back/target/i686_unknown_linux_gnu.rs index a1f3ab7690..7813d55707 100644 --- a/src/librustc_back/target/i686_unknown_linux_gnu.rs +++ b/src/librustc_back/target/i686_unknown_linux_gnu.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::linux_base::opts(); base.cpu = "pentium4".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m32".to_string()); Target { diff --git a/src/librustc_back/target/i686_unknown_linux_musl.rs b/src/librustc_back/target/i686_unknown_linux_musl.rs index cce023b843..5274429583 100644 --- a/src/librustc_back/target/i686_unknown_linux_musl.rs +++ b/src/librustc_back/target/i686_unknown_linux_musl.rs @@ -8,31 +8,15 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// See x86_64_unknown_linux_musl for explanation of arguments - use target::Target; pub fn target() -> Target { - let mut base = super::linux_base::opts(); + let mut base = super::linux_musl_base::opts(); base.cpu = "pentium4".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m32".to_string()); base.pre_link_args.push("-Wl,-melf_i386".to_string()); - base.pre_link_args.push("-nostdlib".to_string()); - base.pre_link_args.push("-static".to_string()); - base.pre_link_args.push("-Wl,--eh-frame-hdr".to_string()); - - base.pre_link_args.push("-Wl,-(".to_string()); - base.post_link_args.push("-Wl,-)".to_string()); - - base.pre_link_objects_exe.push("crt1.o".to_string()); - base.pre_link_objects_exe.push("crti.o".to_string()); - base.post_link_objects.push("crtn.o".to_string()); - - base.dynamic_linking = false; - base.has_rpath = false; - base.position_independent_executables = false; - Target { llvm_target: "i686-unknown-linux-musl".to_string(), target_endian: "little".to_string(), diff --git a/src/librustc_back/target/le32_unknown_nacl.rs b/src/librustc_back/target/le32_unknown_nacl.rs index 472b73302a..f4f0262d47 100644 --- a/src/librustc_back/target/le32_unknown_nacl.rs +++ b/src/librustc_back/target/le32_unknown_nacl.rs @@ -25,6 +25,7 @@ pub fn target() -> Target { no_compiler_rt: false, linker_is_gnu: true, allow_asm: false, + max_atomic_width: 32, .. Default::default() }; Target { diff --git a/src/librustc_back/target/linux_musl_base.rs b/src/librustc_back/target/linux_musl_base.rs new file mode 100644 index 0000000000..d55907aeed --- /dev/null +++ b/src/librustc_back/target/linux_musl_base.rs @@ -0,0 +1,71 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use target::TargetOptions; + +pub fn opts() -> TargetOptions { + let mut base = super::linux_base::opts(); + + // Make sure that the linker/gcc really don't pull in anything, including + // default objects, libs, etc. + base.pre_link_args.push("-nostdlib".to_string()); + base.pre_link_args.push("-static".to_string()); + + // At least when this was tested, the linker would not add the + // `GNU_EH_FRAME` program header to executables generated, which is required + // when unwinding to locate the unwinding information. I'm not sure why this + // argument is *not* necessary for normal builds, but it can't hurt! + base.pre_link_args.push("-Wl,--eh-frame-hdr".to_string()); + + // There's a whole bunch of circular dependencies when dealing with MUSL + // unfortunately. To put this in perspective libc is statically linked to + // liblibc and libunwind is statically linked to libstd: + // + // * libcore depends on `fmod` which is in libc (transitively in liblibc). + // liblibc, however, depends on libcore. + // * compiler-rt has personality symbols that depend on libunwind, but + // libunwind is in libstd which depends on compiler-rt. + // + // Recall that linkers discard libraries and object files as much as + // possible, and with all the static linking and archives flying around with + // MUSL the linker is super aggressively stripping out objects. For example + // the first case has fmod stripped from liblibc (it's in its own object + // file) so it's not there when libcore needs it. In the second example all + // the unused symbols from libunwind are stripped (each is in its own object + // file in libstd) before we end up linking compiler-rt which depends on + // those symbols. + // + // To deal with these circular dependencies we just force the compiler to + // link everything as a group, not stripping anything out until everything + // is processed. The linker will still perform a pass to strip out object + // files but it won't do so until all objects/archives have been processed. + base.pre_link_args.push("-Wl,-(".to_string()); + base.post_link_args.push("-Wl,-)".to_string()); + + // When generating a statically linked executable there's generally some + // small setup needed which is listed in these files. These are provided by + // a musl toolchain and are linked by default by the `musl-gcc` script. Note + // that `gcc` also does this by default, it just uses some different files. + // + // Each target directory for musl has these object files included in it so + // they'll be included from there. + base.pre_link_objects_exe.push("crt1.o".to_string()); + base.pre_link_objects_exe.push("crti.o".to_string()); + base.post_link_objects.push("crtn.o".to_string()); + + // MUSL support doesn't currently include dynamic linking, so there's no + // need for dylibs or rpath business. Additionally `-pie` is incompatible + // with `-static`, so we can't pass `-pie`. + base.dynamic_linking = false; + base.has_rpath = false; + base.position_independent_executables = false; + + base +} diff --git a/src/librustc_back/target/mips_unknown_linux_gnu.rs b/src/librustc_back/target/mips_unknown_linux_gnu.rs index 863f5ceab0..794e4d4996 100644 --- a/src/librustc_back/target/mips_unknown_linux_gnu.rs +++ b/src/librustc_back/target/mips_unknown_linux_gnu.rs @@ -23,6 +23,7 @@ pub fn target() -> Target { options: TargetOptions { cpu: "mips32r2".to_string(), features: "+mips32r2,+soft-float".to_string(), + max_atomic_width: 32, ..super::linux_base::opts() }, } diff --git a/src/librustc_back/target/mips_unknown_linux_musl.rs b/src/librustc_back/target/mips_unknown_linux_musl.rs index ac0fde5449..35366659d5 100644 --- a/src/librustc_back/target/mips_unknown_linux_musl.rs +++ b/src/librustc_back/target/mips_unknown_linux_musl.rs @@ -23,6 +23,7 @@ pub fn target() -> Target { options: TargetOptions { cpu: "mips32r2".to_string(), features: "+mips32r2,+soft-float".to_string(), + max_atomic_width: 32, ..super::linux_base::opts() } } diff --git a/src/librustc_back/target/mipsel_unknown_linux_gnu.rs b/src/librustc_back/target/mipsel_unknown_linux_gnu.rs index ff33effa3e..ac1536b3d0 100644 --- a/src/librustc_back/target/mipsel_unknown_linux_gnu.rs +++ b/src/librustc_back/target/mipsel_unknown_linux_gnu.rs @@ -24,6 +24,7 @@ pub fn target() -> Target { options: TargetOptions { cpu: "mips32".to_string(), features: "+mips32".to_string(), + max_atomic_width: 32, ..super::linux_base::opts() }, } diff --git a/src/librustc_back/target/mipsel_unknown_linux_musl.rs b/src/librustc_back/target/mipsel_unknown_linux_musl.rs index d9fb140503..a9ea52c427 100644 --- a/src/librustc_back/target/mipsel_unknown_linux_musl.rs +++ b/src/librustc_back/target/mipsel_unknown_linux_musl.rs @@ -23,6 +23,7 @@ pub fn target() -> Target { options: TargetOptions { cpu: "mips32".to_string(), features: "+mips32".to_string(), + max_atomic_width: 32, ..super::linux_base::opts() } } diff --git a/src/librustc_back/target/mod.rs b/src/librustc_back/target/mod.rs index cdd1e4c799..2163a8a168 100644 --- a/src/librustc_back/target/mod.rs +++ b/src/librustc_back/target/mod.rs @@ -27,8 +27,7 @@ //! rustc will search each directory in the environment variable //! `RUST_TARGET_PATH` for a file named `TRIPLE.json`. The first one found will //! be loaded. If no file is found in any of those directories, a fatal error -//! will be given. `RUST_TARGET_PATH` includes `/etc/rustc` as its last entry, -//! to be searched by default. +//! will be given. //! //! Projects defining their own targets should use //! `--target=path/to/my-awesome-platform.json` instead of adding to @@ -57,6 +56,7 @@ mod bitrig_base; mod dragonfly_base; mod freebsd_base; mod linux_base; +mod linux_musl_base; mod openbsd_base; mod netbsd_base; mod solaris_base; @@ -108,6 +108,7 @@ supported_targets! { ("i686-linux-android", i686_linux_android), ("arm-linux-androideabi", arm_linux_androideabi), + ("armv7-linux-androideabi", armv7_linux_androideabi), ("aarch64-linux-android", aarch64_linux_android), ("i686-unknown-freebsd", i686_unknown_freebsd), @@ -202,7 +203,7 @@ pub struct TargetOptions { pub post_link_args: Vec, /// Default CPU to pass to LLVM. Corresponds to `llc -mcpu=$cpu`. Defaults - /// to "default". + /// to "generic". pub cpu: String, /// Default target features to pass to LLVM. These features will *always* be /// passed, and cannot be disabled even via `-C`. Corresponds to `llc @@ -291,6 +292,10 @@ pub struct TargetOptions { // If we give emcc .o files that are actually .bc files it // will 'just work'. pub obj_is_bitcode: bool, + + /// Maximum integer size in bits that this target can perform atomic + /// operations on. + pub max_atomic_width: u64, } impl Default for TargetOptions { @@ -339,6 +344,7 @@ impl Default for TargetOptions { allow_asm: true, has_elf_tls: false, obj_is_bitcode: false, + max_atomic_width: 0, } } } @@ -391,6 +397,9 @@ impl Target { options: Default::default(), }; + // Default max-atomic-width to target-pointer-width + base.options.max_atomic_width = base.target_pointer_width.parse().unwrap(); + macro_rules! key { ($key_name:ident) => ( { let name = (stringify!($key_name)).replace("_", "-"); @@ -403,6 +412,12 @@ impl Target { .map(|o| o.as_boolean() .map(|s| base.options.$key_name = s)); } ); + ($key_name:ident, u64) => ( { + let name = (stringify!($key_name)).replace("_", "-"); + obj.find(&name[..]) + .map(|o| o.as_u64() + .map(|s| base.options.$key_name = s)); + } ); ($key_name:ident, list) => ( { let name = (stringify!($key_name)).replace("_", "-"); obj.find(&name[..]).map(|o| o.as_array() @@ -440,6 +455,7 @@ impl Target { key!(target_family, optional); key!(is_like_osx, bool); key!(is_like_windows, bool); + key!(is_like_msvc, bool); key!(linker_is_gnu, bool); key!(has_rpath, bool); key!(no_compiler_rt, bool); @@ -449,6 +465,7 @@ impl Target { key!(archive_format); key!(allow_asm, bool); key!(custom_unwind_resume, bool); + key!(max_atomic_width, u64); base } diff --git a/src/librustc_back/target/netbsd_base.rs b/src/librustc_back/target/netbsd_base.rs index bf6a2e1ce7..cc03ed56aa 100644 --- a/src/librustc_back/target/netbsd_base.rs +++ b/src/librustc_back/target/netbsd_base.rs @@ -13,7 +13,6 @@ use std::default::Default; pub fn opts() -> TargetOptions { TargetOptions { - linker: "cc".to_string(), dynamic_linking: true, executables: true, linker_is_gnu: true, diff --git a/src/librustc_back/target/openbsd_base.rs b/src/librustc_back/target/openbsd_base.rs index d3db0e4877..7afdfcd691 100644 --- a/src/librustc_back/target/openbsd_base.rs +++ b/src/librustc_back/target/openbsd_base.rs @@ -13,7 +13,6 @@ use std::default::Default; pub fn opts() -> TargetOptions { TargetOptions { - linker: "cc".to_string(), dynamic_linking: true, executables: true, linker_is_gnu: true, diff --git a/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs b/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs index fe7daaec1c..be4be8e6fc 100644 --- a/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs +++ b/src/librustc_back/target/powerpc64_unknown_linux_gnu.rs @@ -14,6 +14,7 @@ pub fn target() -> Target { let mut base = super::linux_base::opts(); base.cpu = "ppc64".to_string(); base.pre_link_args.push("-m64".to_string()); + base.max_atomic_width = 64; Target { llvm_target: "powerpc64-unknown-linux-gnu".to_string(), diff --git a/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs b/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs index 4aab2b1802..b0a81ce7ec 100644 --- a/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs +++ b/src/librustc_back/target/powerpc64le_unknown_linux_gnu.rs @@ -14,6 +14,7 @@ pub fn target() -> Target { let mut base = super::linux_base::opts(); base.cpu = "ppc64le".to_string(); base.pre_link_args.push("-m64".to_string()); + base.max_atomic_width = 64; Target { llvm_target: "powerpc64le-unknown-linux-gnu".to_string(), diff --git a/src/librustc_back/target/powerpc_unknown_linux_gnu.rs b/src/librustc_back/target/powerpc_unknown_linux_gnu.rs index 1df36442c0..aea57dc4b7 100644 --- a/src/librustc_back/target/powerpc_unknown_linux_gnu.rs +++ b/src/librustc_back/target/powerpc_unknown_linux_gnu.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::linux_base::opts(); base.pre_link_args.push("-m32".to_string()); + base.max_atomic_width = 32; Target { llvm_target: "powerpc-unknown-linux-gnu".to_string(), diff --git a/src/librustc_back/target/solaris_base.rs b/src/librustc_back/target/solaris_base.rs index 4ffa0c69da..a7af0462e5 100644 --- a/src/librustc_back/target/solaris_base.rs +++ b/src/librustc_back/target/solaris_base.rs @@ -13,7 +13,6 @@ use std::default::Default; pub fn opts() -> TargetOptions { TargetOptions { - linker: "cc".to_string(), dynamic_linking: true, executables: true, has_rpath: true, diff --git a/src/librustc_back/target/x86_64_apple_darwin.rs b/src/librustc_back/target/x86_64_apple_darwin.rs index c8b5dd0ecc..5542c9120a 100644 --- a/src/librustc_back/target/x86_64_apple_darwin.rs +++ b/src/librustc_back/target/x86_64_apple_darwin.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::apple_base::opts(); base.cpu = "core2".to_string(); + base.max_atomic_width = 128; // core2 support cmpxchg16b base.eliminate_frame_pointer = false; base.pre_link_args.push("-m64".to_string()); diff --git a/src/librustc_back/target/x86_64_apple_ios.rs b/src/librustc_back/target/x86_64_apple_ios.rs index d038e88f2b..8638241f86 100644 --- a/src/librustc_back/target/x86_64_apple_ios.rs +++ b/src/librustc_back/target/x86_64_apple_ios.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use target::Target; +use target::{Target, TargetOptions}; use super::apple_ios_base::{opts, Arch}; pub fn target() -> Target { @@ -21,6 +21,9 @@ pub fn target() -> Target { target_os: "ios".to_string(), target_env: "".to_string(), target_vendor: "apple".to_string(), - options: opts(Arch::X86_64) + options: TargetOptions { + max_atomic_width: 64, + .. opts(Arch::X86_64) + } } } diff --git a/src/librustc_back/target/x86_64_pc_windows_gnu.rs b/src/librustc_back/target/x86_64_pc_windows_gnu.rs index f0a09ae71e..e243054d02 100644 --- a/src/librustc_back/target/x86_64_pc_windows_gnu.rs +++ b/src/librustc_back/target/x86_64_pc_windows_gnu.rs @@ -14,6 +14,7 @@ pub fn target() -> Target { let mut base = super::windows_base::opts(); base.cpu = "x86-64".to_string(); base.pre_link_args.push("-m64".to_string()); + base.max_atomic_width = 64; Target { llvm_target: "x86_64-pc-windows-gnu".to_string(), diff --git a/src/librustc_back/target/x86_64_pc_windows_msvc.rs b/src/librustc_back/target/x86_64_pc_windows_msvc.rs index b3fbd6ef05..a23a807a02 100644 --- a/src/librustc_back/target/x86_64_pc_windows_msvc.rs +++ b/src/librustc_back/target/x86_64_pc_windows_msvc.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::windows_msvc_base::opts(); base.cpu = "x86-64".to_string(); + base.max_atomic_width = 64; Target { llvm_target: "x86_64-pc-windows-msvc".to_string(), diff --git a/src/librustc_back/target/x86_64_rumprun_netbsd.rs b/src/librustc_back/target/x86_64_rumprun_netbsd.rs index 652159d10f..af5d21c4d9 100644 --- a/src/librustc_back/target/x86_64_rumprun_netbsd.rs +++ b/src/librustc_back/target/x86_64_rumprun_netbsd.rs @@ -15,6 +15,7 @@ pub fn target() -> Target { base.pre_link_args.push("-m64".to_string()); base.linker = "x86_64-rumprun-netbsd-gcc".to_string(); base.ar = "x86_64-rumprun-netbsd-ar".to_string(); + base.max_atomic_width = 64; base.dynamic_linking = false; base.has_rpath = false; diff --git a/src/librustc_back/target/x86_64_sun_solaris.rs b/src/librustc_back/target/x86_64_sun_solaris.rs index 5aa08ea9c8..8f2c905cf2 100644 --- a/src/librustc_back/target/x86_64_sun_solaris.rs +++ b/src/librustc_back/target/x86_64_sun_solaris.rs @@ -14,6 +14,7 @@ pub fn target() -> Target { let mut base = super::solaris_base::opts(); base.pre_link_args.push("-m64".to_string()); base.cpu = "x86-64".to_string(); + base.max_atomic_width = 64; Target { llvm_target: "x86_64-pc-solaris".to_string(), diff --git a/src/librustc_back/target/x86_64_unknown_bitrig.rs b/src/librustc_back/target/x86_64_unknown_bitrig.rs index e8b95ed80d..87753da540 100644 --- a/src/librustc_back/target/x86_64_unknown_bitrig.rs +++ b/src/librustc_back/target/x86_64_unknown_bitrig.rs @@ -12,6 +12,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::bitrig_base::opts(); + base.max_atomic_width = 64; base.pre_link_args.push("-m64".to_string()); Target { diff --git a/src/librustc_back/target/x86_64_unknown_dragonfly.rs b/src/librustc_back/target/x86_64_unknown_dragonfly.rs index 3fa46c31a5..2535071f30 100644 --- a/src/librustc_back/target/x86_64_unknown_dragonfly.rs +++ b/src/librustc_back/target/x86_64_unknown_dragonfly.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::dragonfly_base::opts(); base.cpu = "x86-64".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m64".to_string()); Target { diff --git a/src/librustc_back/target/x86_64_unknown_freebsd.rs b/src/librustc_back/target/x86_64_unknown_freebsd.rs index d345a32179..d3ad0578ae 100644 --- a/src/librustc_back/target/x86_64_unknown_freebsd.rs +++ b/src/librustc_back/target/x86_64_unknown_freebsd.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::freebsd_base::opts(); base.cpu = "x86-64".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m64".to_string()); Target { diff --git a/src/librustc_back/target/x86_64_unknown_linux_gnu.rs b/src/librustc_back/target/x86_64_unknown_linux_gnu.rs index 69e333a135..7908e0d581 100644 --- a/src/librustc_back/target/x86_64_unknown_linux_gnu.rs +++ b/src/librustc_back/target/x86_64_unknown_linux_gnu.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::linux_base::opts(); base.cpu = "x86-64".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m64".to_string()); Target { diff --git a/src/librustc_back/target/x86_64_unknown_linux_musl.rs b/src/librustc_back/target/x86_64_unknown_linux_musl.rs index 622a1fe8ba..3301e0e0dc 100644 --- a/src/librustc_back/target/x86_64_unknown_linux_musl.rs +++ b/src/librustc_back/target/x86_64_unknown_linux_musl.rs @@ -11,64 +11,11 @@ use target::Target; pub fn target() -> Target { - let mut base = super::linux_base::opts(); + let mut base = super::linux_musl_base::opts(); base.cpu = "x86-64".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m64".to_string()); - // Make sure that the linker/gcc really don't pull in anything, including - // default objects, libs, etc. - base.pre_link_args.push("-nostdlib".to_string()); - base.pre_link_args.push("-static".to_string()); - - // At least when this was tested, the linker would not add the - // `GNU_EH_FRAME` program header to executables generated, which is required - // when unwinding to locate the unwinding information. I'm not sure why this - // argument is *not* necessary for normal builds, but it can't hurt! - base.pre_link_args.push("-Wl,--eh-frame-hdr".to_string()); - - // There's a whole bunch of circular dependencies when dealing with MUSL - // unfortunately. To put this in perspective libc is statically linked to - // liblibc and libunwind is statically linked to libstd: - // - // * libcore depends on `fmod` which is in libc (transitively in liblibc). - // liblibc, however, depends on libcore. - // * compiler-rt has personality symbols that depend on libunwind, but - // libunwind is in libstd which depends on compiler-rt. - // - // Recall that linkers discard libraries and object files as much as - // possible, and with all the static linking and archives flying around with - // MUSL the linker is super aggressively stripping out objects. For example - // the first case has fmod stripped from liblibc (it's in its own object - // file) so it's not there when libcore needs it. In the second example all - // the unused symbols from libunwind are stripped (each is in its own object - // file in libstd) before we end up linking compiler-rt which depends on - // those symbols. - // - // To deal with these circular dependencies we just force the compiler to - // link everything as a group, not stripping anything out until everything - // is processed. The linker will still perform a pass to strip out object - // files but it won't do so until all objects/archives have been processed. - base.pre_link_args.push("-Wl,-(".to_string()); - base.post_link_args.push("-Wl,-)".to_string()); - - // When generating a statically linked executable there's generally some - // small setup needed which is listed in these files. These are provided by - // a musl toolchain and are linked by default by the `musl-gcc` script. Note - // that `gcc` also does this by default, it just uses some different files. - // - // Each target directory for musl has these object files included in it so - // they'll be included from there. - base.pre_link_objects_exe.push("crt1.o".to_string()); - base.pre_link_objects_exe.push("crti.o".to_string()); - base.post_link_objects.push("crtn.o".to_string()); - - // MUSL support doesn't currently include dynamic linking, so there's no - // need for dylibs or rpath business. Additionally `-pie` is incompatible - // with `-static`, so we can't pass `-pie`. - base.dynamic_linking = false; - base.has_rpath = false; - base.position_independent_executables = false; - Target { llvm_target: "x86_64-unknown-linux-musl".to_string(), target_endian: "little".to_string(), diff --git a/src/librustc_back/target/x86_64_unknown_netbsd.rs b/src/librustc_back/target/x86_64_unknown_netbsd.rs index 74bf718911..7e6d1b7846 100644 --- a/src/librustc_back/target/x86_64_unknown_netbsd.rs +++ b/src/librustc_back/target/x86_64_unknown_netbsd.rs @@ -12,6 +12,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::netbsd_base::opts(); + base.max_atomic_width = 64; base.pre_link_args.push("-m64".to_string()); Target { diff --git a/src/librustc_back/target/x86_64_unknown_openbsd.rs b/src/librustc_back/target/x86_64_unknown_openbsd.rs index 521de5373d..823b0994b0 100644 --- a/src/librustc_back/target/x86_64_unknown_openbsd.rs +++ b/src/librustc_back/target/x86_64_unknown_openbsd.rs @@ -13,6 +13,7 @@ use target::Target; pub fn target() -> Target { let mut base = super::openbsd_base::opts(); base.cpu = "x86-64".to_string(); + base.max_atomic_width = 64; base.pre_link_args.push("-m64".to_string()); Target { diff --git a/src/librustc_bitflags/Cargo.toml b/src/librustc_bitflags/Cargo.toml index 926ed5960d..d82a72994c 100644 --- a/src/librustc_bitflags/Cargo.toml +++ b/src/librustc_bitflags/Cargo.toml @@ -6,4 +6,4 @@ version = "0.0.0" [lib] name = "rustc_bitflags" path = "lib.rs" -test = false +doctest = false diff --git a/src/librustc_borrowck/Cargo.toml b/src/librustc_borrowck/Cargo.toml index 6da87f97fb..fbc267aaa6 100644 --- a/src/librustc_borrowck/Cargo.toml +++ b/src/librustc_borrowck/Cargo.toml @@ -7,6 +7,7 @@ version = "0.0.0" name = "rustc_borrowck" path = "lib.rs" crate-type = ["dylib"] +test = false [dependencies] log = { path = "../liblog" } diff --git a/src/librustc_borrowck/borrowck/check_loans.rs b/src/librustc_borrowck/borrowck/check_loans.rs index d79ba213ac..36f95f62d0 100644 --- a/src/librustc_borrowck/borrowck/check_loans.rs +++ b/src/librustc_borrowck/borrowck/check_loans.rs @@ -22,12 +22,10 @@ use borrowck::*; use borrowck::InteriorKind::{InteriorElement, InteriorField}; use rustc::middle::expr_use_visitor as euv; use rustc::middle::expr_use_visitor::MutateMode; -use rustc::infer; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; use rustc::middle::region; use rustc::ty::{self, TyCtxt}; -use rustc::traits::ProjectionMode; use syntax::ast; use syntax::codemap::Span; use rustc::hir; @@ -92,7 +90,7 @@ struct CheckLoanCtxt<'a, 'tcx: 'a> { dfcx_loans: &'a LoanDataFlow<'a, 'tcx>, move_data: &'a move_data::FlowedMoveData<'a, 'tcx>, all_loans: &'a [Loan<'tcx>], - param_env: &'a ty::ParameterEnvironment<'a, 'tcx>, + param_env: &'a ty::ParameterEnvironment<'tcx>, } impl<'a, 'tcx> euv::Delegate<'tcx> for CheckLoanCtxt<'a, 'tcx> { @@ -203,11 +201,7 @@ pub fn check_loans<'a, 'b, 'c, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, debug!("check_loans(body id={})", body.id); let param_env = ty::ParameterEnvironment::for_item(bccx.tcx, fn_id); - let infcx = infer::new_infer_ctxt(bccx.tcx, - &bccx.tcx.tables, - Some(param_env), - ProjectionMode::AnyFinal); - + let infcx = bccx.tcx.borrowck_fake_infer_ctxt(param_env); let mut clcx = CheckLoanCtxt { bccx: bccx, dfcx_loans: dfcx_loans, @@ -215,11 +209,7 @@ pub fn check_loans<'a, 'b, 'c, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, all_loans: all_loans, param_env: &infcx.parameter_environment }; - - { - let mut euv = euv::ExprUseVisitor::new(&mut clcx, &infcx); - euv.walk_fn(decl, body); - } + euv::ExprUseVisitor::new(&mut clcx, &infcx).walk_fn(decl, body); } #[derive(PartialEq)] @@ -235,7 +225,7 @@ fn compatible_borrow_kinds(borrow_kind1: ty::BorrowKind, } impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { - pub fn tcx(&self) -> &'a TyCtxt<'tcx> { self.bccx.tcx } + pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.bccx.tcx } pub fn each_issued_loan(&self, node: ast::NodeId, mut op: F) -> bool where F: FnMut(&Loan<'tcx>) -> bool, @@ -447,22 +437,24 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { // borrow ends let common = new_loan.loan_path.common(&old_loan.loan_path); - let (nl, ol, new_loan_msg, old_loan_msg) = + let (nl, ol, new_loan_msg, old_loan_msg) = { if new_loan.loan_path.has_fork(&old_loan.loan_path) && common.is_some() { let nl = self.bccx.loan_path_to_string(&common.unwrap()); let ol = nl.clone(); - let new_loan_msg = format!(" (here through borrowing `{}`)", + let new_loan_msg = format!(" (via `{}`)", self.bccx.loan_path_to_string( &new_loan.loan_path)); - let old_loan_msg = format!(" (through borrowing `{}`)", + let old_loan_msg = format!(" (via `{}`)", self.bccx.loan_path_to_string( &old_loan.loan_path)); (nl, ol, new_loan_msg, old_loan_msg) } else { (self.bccx.loan_path_to_string(&new_loan.loan_path), self.bccx.loan_path_to_string(&old_loan.loan_path), - String::new(), String::new()) - }; + String::new(), + String::new()) + } + }; let ol_pronoun = if new_loan.loan_path == old_loan.loan_path { "it".to_string() @@ -470,102 +462,133 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { format!("`{}`", ol) }; + // We want to assemble all the relevant locations for the error. + // + // 1. Where did the new loan occur. + // - if due to closure creation, where was the variable used in closure? + // 2. Where did old loan occur. + // 3. Where does old loan expire. + + let previous_end_span = + self.tcx().map.span(old_loan.kill_scope.node_id(&self.tcx().region_maps)) + .end_point(); + let mut err = match (new_loan.kind, old_loan.kind) { (ty::MutBorrow, ty::MutBorrow) => { - struct_span_err!(self.bccx, new_loan.span, E0499, - "cannot borrow `{}`{} as mutable \ - more than once at a time", - nl, new_loan_msg) + let mut err = struct_span_err!(self.bccx, new_loan.span, E0499, + "cannot borrow `{}`{} as mutable \ + more than once at a time", + nl, new_loan_msg); + err.span_label( + old_loan.span, + &format!("first mutable borrow occurs here{}", old_loan_msg)); + err.span_label( + new_loan.span, + &format!("second mutable borrow occurs here{}", new_loan_msg)); + err.span_label( + previous_end_span, + &format!("first borrow ends here")); + err + } + + (ty::UniqueImmBorrow, ty::UniqueImmBorrow) => { + let mut err = struct_span_err!(self.bccx, new_loan.span, E0524, + "two closures require unique access to `{}` \ + at the same time", + nl); + err.span_label( + old_loan.span, + &format!("first closure is constructed here")); + err.span_label( + new_loan.span, + &format!("second closure is constructed here")); + err.span_label( + previous_end_span, + &format!("borrow from first closure ends here")); + err } (ty::UniqueImmBorrow, _) => { - struct_span_err!(self.bccx, new_loan.span, E0500, - "closure requires unique access to `{}` \ - but {} is already borrowed{}", - nl, ol_pronoun, old_loan_msg) + let mut err = struct_span_err!(self.bccx, new_loan.span, E0500, + "closure requires unique access to `{}` \ + but {} is already borrowed{}", + nl, ol_pronoun, old_loan_msg); + err.span_label( + new_loan.span, + &format!("closure construction occurs here{}", new_loan_msg)); + err.span_label( + old_loan.span, + &format!("borrow occurs here{}", old_loan_msg)); + err.span_label( + previous_end_span, + &format!("borrow ends here")); + err } (_, ty::UniqueImmBorrow) => { - struct_span_err!(self.bccx, new_loan.span, E0501, - "cannot borrow `{}`{} as {} because \ - previous closure requires unique access", - nl, new_loan_msg, new_loan.kind.to_user_str()) + let mut err = struct_span_err!(self.bccx, new_loan.span, E0501, + "cannot borrow `{}`{} as {} because \ + previous closure requires unique access", + nl, new_loan_msg, new_loan.kind.to_user_str()); + err.span_label( + new_loan.span, + &format!("borrow occurs here{}", new_loan_msg)); + err.span_label( + old_loan.span, + &format!("closure construction occurs here{}", old_loan_msg)); + err.span_label( + previous_end_span, + &format!("borrow from closure ends here")); + err } (_, _) => { - struct_span_err!(self.bccx, new_loan.span, E0502, - "cannot borrow `{}`{} as {} because \ - {} is also borrowed as {}{}", - nl, - new_loan_msg, + let mut err = struct_span_err!(self.bccx, new_loan.span, E0502, + "cannot borrow `{}`{} as {} because \ + {} is also borrowed as {}{}", + nl, + new_loan_msg, + new_loan.kind.to_user_str(), + ol_pronoun, + old_loan.kind.to_user_str(), + old_loan_msg); + err.span_label( + new_loan.span, + &format!("{} borrow occurs here{}", new_loan.kind.to_user_str(), - ol_pronoun, + new_loan_msg)); + err.span_label( + old_loan.span, + &format!("{} borrow occurs here{}", old_loan.kind.to_user_str(), - old_loan_msg) + old_loan_msg)); + err.span_label( + previous_end_span, + &format!("{} borrow ends here", + old_loan.kind.to_user_str())); + err } }; match new_loan.cause { euv::ClosureCapture(span) => { - err.span_note( + err.span_label( span, - &format!("borrow occurs due to use of `{}` in closure", - nl)); + &format!("borrow occurs due to use of `{}` in closure", nl)); } _ => { } } - let rule_summary = match old_loan.kind { - ty::MutBorrow => { - format!("the mutable borrow prevents subsequent \ - moves, borrows, or modification of `{0}` \ - until the borrow ends", - ol) - } - - ty::ImmBorrow => { - format!("the immutable borrow prevents subsequent \ - moves or mutable borrows of `{0}` \ - until the borrow ends", - ol) - } - - ty::UniqueImmBorrow => { - format!("the unique capture prevents subsequent \ - moves or borrows of `{0}` \ - until the borrow ends", - ol) - } - }; - - let borrow_summary = match old_loan.cause { - euv::ClosureCapture(_) => { - format!("previous borrow of `{}` occurs here{} due to \ - use in closure", - ol, old_loan_msg) - } - - euv::OverloadedOperator | - euv::AddrOf | - euv::AutoRef | - euv::AutoUnsafe | - euv::ClosureInvocation | - euv::ForLoop | - euv::RefBinding | - euv::MatchDiscriminant => { - format!("previous borrow of `{}` occurs here{}", - ol, old_loan_msg) + match old_loan.cause { + euv::ClosureCapture(span) => { + err.span_label( + span, + &format!("previous borrow occurs due to use of `{}` in closure", + ol)); } - }; - - err.span_note( - old_loan.span, - &format!("{}; {}", borrow_summary, rule_summary)); + _ => { } + } - let old_loan_span = self.tcx().map.span( - old_loan.kill_scope.node_id(&self.tcx().region_maps)); - err.span_end_note(old_loan_span, - "previous borrow ends here"); err.emit(); return false; } @@ -645,23 +668,41 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { UseOk => { } UseWhileBorrowed(loan_path, loan_span) => { let mut err = match move_kind { - move_data::Captured => - struct_span_err!(self.bccx, span, E0504, + move_data::Captured => { + let mut err = struct_span_err!(self.bccx, span, E0504, "cannot move `{}` into closure because it is borrowed", - &self.bccx.loan_path_to_string(move_path)), + &self.bccx.loan_path_to_string(move_path)); + err.span_label( + loan_span, + &format!("borrow of `{}` occurs here", + &self.bccx.loan_path_to_string(&loan_path)) + ); + err.span_label( + span, + &format!("move into closure occurs here") + ); + err + } move_data::Declared | move_data::MoveExpr | - move_data::MovePat => - struct_span_err!(self.bccx, span, E0505, + move_data::MovePat => { + let mut err = struct_span_err!(self.bccx, span, E0505, "cannot move out of `{}` because it is borrowed", - &self.bccx.loan_path_to_string(move_path)) + &self.bccx.loan_path_to_string(move_path)); + err.span_label( + loan_span, + &format!("borrow of `{}` occurs here", + &self.bccx.loan_path_to_string(&loan_path)) + ); + err.span_label( + span, + &format!("move out of `{}` occurs here", + &self.bccx.loan_path_to_string(move_path)) + ); + err + } }; - err.span_note( - loan_span, - &format!("borrow of `{}` occurs here", - &self.bccx.loan_path_to_string(&loan_path)) - ); err.emit(); } } @@ -827,9 +868,12 @@ impl<'a, 'tcx> CheckLoanCtxt<'a, 'tcx> { struct_span_err!(self.bccx, span, E0506, "cannot assign to `{}` because it is borrowed", self.bccx.loan_path_to_string(loan_path)) - .span_note(loan.span, + .span_label(loan.span, &format!("borrow of `{}` occurs here", self.bccx.loan_path_to_string(loan_path))) + .span_label(span, + &format!("assignment to borrowed `{}` occurs here", + self.bccx.loan_path_to_string(loan_path))) .emit(); } } diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs index 0b1c4efa9a..6ab85d7d44 100644 --- a/src/librustc_borrowck/borrowck/fragments.rs +++ b/src/librustc_borrowck/borrowck/fragments.rs @@ -199,10 +199,10 @@ impl FragmentSets { } } -pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, - tcx: &TyCtxt<'tcx>, - sp: Span, - id: ast::NodeId) { +pub fn instrument_move_fragments<'a, 'tcx>(this: &MoveData<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + sp: Span, + id: ast::NodeId) { let span_err = tcx.map.attrs(id).iter() .any(|a| a.check_name("rustc_move_fragments")); let print = tcx.sess.opts.debugging_opts.print_move_fragments; @@ -245,7 +245,7 @@ pub fn instrument_move_fragments<'tcx>(this: &MoveData<'tcx>, /// /// Note: "left-over fragments" means paths that were not directly referenced in moves nor /// assignments, but must nonetheless be tracked as potential drop obligations. -pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &TyCtxt<'tcx>) { +pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut fragments = this.fragments.borrow_mut(); @@ -346,11 +346,11 @@ pub fn fixup_fragment_sets<'tcx>(this: &MoveData<'tcx>, tcx: &TyCtxt<'tcx>) { /// Adds all of the precisely-tracked siblings of `lp` as potential move paths of interest. For /// example, if `lp` represents `s.x.j`, then adds moves paths for `s.x.i` and `s.x.k`, the /// siblings of `s.x.j`. -fn add_fragment_siblings<'tcx>(this: &MoveData<'tcx>, - tcx: &TyCtxt<'tcx>, - gathered_fragments: &mut Vec, - lp: Rc>, - origin_id: Option) { +fn add_fragment_siblings<'a, 'tcx>(this: &MoveData<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + gathered_fragments: &mut Vec, + lp: Rc>, + origin_id: Option) { match lp.kind { LpVar(_) | LpUpvar(..) => {} // Local variables have no siblings. @@ -405,16 +405,16 @@ fn add_fragment_siblings<'tcx>(this: &MoveData<'tcx>, /// We have determined that `origin_lp` destructures to LpExtend(parent, original_field_name). /// Based on this, add move paths for all of the siblings of `origin_lp`. -fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, - tcx: &TyCtxt<'tcx>, - gathered_fragments: &mut Vec, - parent_lp: &Rc>, - mc: mc::MutabilityCategory, - origin_field_name: &mc::FieldName, - origin_lp: &Rc>, - origin_id: Option, - enum_variant_info: Option<(DefId, - Rc>)>) { +fn add_fragment_siblings_for_extension<'a, 'tcx>(this: &MoveData<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + gathered_fragments: &mut Vec, + parent_lp: &Rc>, + mc: mc::MutabilityCategory, + origin_field_name: &mc::FieldName, + origin_lp: &Rc>, + origin_id: Option, + enum_variant_info: Option<(DefId, + Rc>)>) { let parent_ty = parent_lp.to_type(); let mut add_fragment_sibling_local = |field_name, variant_did| { @@ -504,14 +504,15 @@ fn add_fragment_siblings_for_extension<'tcx>(this: &MoveData<'tcx>, /// Adds the single sibling `LpExtend(parent, new_field_name)` of `origin_lp` (the original /// loan-path). -fn add_fragment_sibling_core<'tcx>(this: &MoveData<'tcx>, - tcx: &TyCtxt<'tcx>, - gathered_fragments: &mut Vec, - parent: Rc>, - mc: mc::MutabilityCategory, - new_field_name: mc::FieldName, - origin_lp: &Rc>, - enum_variant_did: Option) -> MovePathIndex { +fn add_fragment_sibling_core<'a, 'tcx>(this: &MoveData<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + gathered_fragments: &mut Vec, + parent: Rc>, + mc: mc::MutabilityCategory, + new_field_name: mc::FieldName, + origin_lp: &Rc>, + enum_variant_did: Option) + -> MovePathIndex { let opt_variant_did = match parent.kind { LpDowncast(_, variant_did) => Some(variant_did), LpVar(..) | LpUpvar(..) | LpExtend(..) => enum_variant_did, diff --git a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs index c85d69fa8a..83322215e3 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/gather_moves.rs @@ -100,7 +100,7 @@ pub fn gather_move_from_pat<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, let pat_span_path_opt = match move_pat.node { PatKind::Ident(_, ref path1, _) => { Some(MoveSpanAndPath{span: move_pat.span, - name: path1.node.name}) + name: path1.node}) }, _ => None, }; diff --git a/src/librustc_borrowck/borrowck/gather_loans/mod.rs b/src/librustc_borrowck/borrowck/gather_loans/mod.rs index 893c27f0ed..7d4f02bfe1 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/mod.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/mod.rs @@ -19,12 +19,10 @@ use borrowck::*; use borrowck::move_data::MoveData; use rustc::middle::expr_use_visitor as euv; -use rustc::infer; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; use rustc::middle::region; use rustc::ty::{self, TyCtxt}; -use rustc::traits::ProjectionMode; use syntax::ast; use syntax::codemap::Span; @@ -56,14 +54,8 @@ pub fn gather_loans_in_fn<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, }; let param_env = ty::ParameterEnvironment::for_item(bccx.tcx, fn_id); - let infcx = infer::new_infer_ctxt(bccx.tcx, - &bccx.tcx.tables, - Some(param_env), - ProjectionMode::AnyFinal); - { - let mut euv = euv::ExprUseVisitor::new(&mut glcx, &infcx); - euv.walk_fn(decl, body); - } + let infcx = bccx.tcx.borrowck_fake_infer_ctxt(param_env); + euv::ExprUseVisitor::new(&mut glcx, &infcx).walk_fn(decl, body); glcx.report_potential_errors(); let GatherLoanCtxt { all_loans, move_data, .. } = glcx; @@ -180,7 +172,7 @@ fn check_aliasability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, req_kind: ty::BorrowKind) -> Result<(),()> { - let aliasability = cmt.freely_aliasable(bccx.tcx); + let aliasability = cmt.freely_aliasable(); debug!("check_aliasability aliasability={:?} req_kind={:?}", aliasability, req_kind); @@ -257,7 +249,7 @@ fn check_mutability<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, } impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { - pub fn tcx(&self) -> &'a TyCtxt<'tcx> { self.bccx.tcx } + pub fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.bccx.tcx } /// Guarantees that `cmt` is assignable, or reports an error. fn guarantee_assignment_valid(&mut self, @@ -524,22 +516,23 @@ impl<'a, 'tcx> GatherLoanCtxt<'a, 'tcx> { /// sure the loans being taken are sound. struct StaticInitializerCtxt<'a, 'tcx: 'a> { bccx: &'a BorrowckCtxt<'a, 'tcx>, + item_id: ast::NodeId } impl<'a, 'tcx, 'v> Visitor<'v> for StaticInitializerCtxt<'a, 'tcx> { fn visit_expr(&mut self, ex: &Expr) { if let hir::ExprAddrOf(mutbl, ref base) = ex.node { - let infcx = infer::new_infer_ctxt(self.bccx.tcx, - &self.bccx.tcx.tables, - None, - ProjectionMode::AnyFinal); + let param_env = ty::ParameterEnvironment::for_item(self.bccx.tcx, + self.item_id); + let infcx = self.bccx.tcx.borrowck_fake_infer_ctxt(param_env); let mc = mc::MemCategorizationContext::new(&infcx); let base_cmt = mc.cat_expr(&base).unwrap(); let borrow_kind = ty::BorrowKind::from_mutbl(mutbl); // Check that we don't allow borrows of unsafe static items. - if check_aliasability(self.bccx, ex.span, - BorrowViolation(euv::AddrOf), - base_cmt, borrow_kind).is_err() { + let err = check_aliasability(self.bccx, ex.span, + BorrowViolation(euv::AddrOf), + base_cmt, borrow_kind).is_err(); + if err { return; // reported an error, no sense in reporting more. } } @@ -548,12 +541,15 @@ impl<'a, 'tcx, 'v> Visitor<'v> for StaticInitializerCtxt<'a, 'tcx> { } } -pub fn gather_loans_in_static_initializer(bccx: &mut BorrowckCtxt, expr: &hir::Expr) { +pub fn gather_loans_in_static_initializer(bccx: &mut BorrowckCtxt, + item_id: ast::NodeId, + expr: &hir::Expr) { debug!("gather_loans_in_static_initializer(expr={:?})", expr); let mut sicx = StaticInitializerCtxt { - bccx: bccx + bccx: bccx, + item_id: item_id }; sicx.visit_expr(expr); diff --git a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs index 3d94f5b186..c1e8358857 100644 --- a/src/librustc_borrowck/borrowck/gather_loans/move_error.rs +++ b/src/librustc_borrowck/borrowck/gather_loans/move_error.rs @@ -72,7 +72,7 @@ fn report_move_errors<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, let mut err = report_cannot_move_out_of(bccx, error.move_from.clone()); let mut is_first_note = true; for move_to in &error.move_to_places { - note_move_destination(&mut err, move_to.span, + err = note_move_destination(err, move_to.span, move_to.name, is_first_note); is_first_note = false; } @@ -121,18 +121,25 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, Categorization::Deref(_, _, mc::Implicit(..)) | Categorization::Deref(_, _, mc::UnsafePtr(..)) | Categorization::StaticItem => { - struct_span_err!(bccx, move_from.span, E0507, + let mut err = struct_span_err!(bccx, move_from.span, E0507, "cannot move out of {}", - move_from.descriptive_string(bccx.tcx)) + move_from.descriptive_string(bccx.tcx)); + err.span_label( + move_from.span, + &format!("cannot move out of {}", move_from.descriptive_string(bccx.tcx)) + ); + err } Categorization::Interior(ref b, mc::InteriorElement(Kind::Index, _)) => { let expr = bccx.tcx.map.expect_expr(move_from.id); if let hir::ExprIndex(..) = expr.node { - struct_span_err!(bccx, move_from.span, E0508, - "cannot move out of type `{}`, \ - a non-copy fixed-size array", - b.ty) + let mut err = struct_span_err!(bccx, move_from.span, E0508, + "cannot move out of type `{}`, \ + a non-copy fixed-size array", + b.ty); + err.span_label(move_from.span, &format!("cannot move out of here")); + err } else { span_bug!(move_from.span, "this path should not cause illegal move"); } @@ -143,10 +150,12 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, match b.ty.sty { ty::TyStruct(def, _) | ty::TyEnum(def, _) if def.has_dtor() => { - struct_span_err!(bccx, move_from.span, E0509, - "cannot move out of type `{}`, \ - which defines the `Drop` trait", - b.ty) + let mut err = struct_span_err!(bccx, move_from.span, E0509, + "cannot move out of type `{}`, \ + which implements the `Drop` trait", + b.ty); + err.span_label(move_from.span, &format!("cannot move out of here")); + err }, _ => { span_bug!(move_from.span, "this path should not cause illegal move"); @@ -159,23 +168,20 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>, } } -fn note_move_destination(err: &mut DiagnosticBuilder, +fn note_move_destination(mut err: DiagnosticBuilder, move_to_span: codemap::Span, pat_name: ast::Name, - is_first_note: bool) { + is_first_note: bool) -> DiagnosticBuilder { if is_first_note { - err.span_note( + err.span_label( move_to_span, - "attempting to move value to here"); - err.fileline_help( - move_to_span, - &format!("to prevent the move, \ - use `ref {0}` or `ref mut {0}` to capture value by \ - reference", + &format!("hint: to prevent move, use `ref {0}` or `ref mut {0}`", pat_name)); + err } else { - err.span_note(move_to_span, - &format!("and here (use `ref {0}` or `ref mut {0}`)", + err.span_label(move_to_span, + &format!("...and here (use `ref {0}` or `ref mut {0}`)", pat_name)); + err } } diff --git a/src/librustc_borrowck/borrowck/mir/dataflow.rs b/src/librustc_borrowck/borrowck/mir/dataflow.rs index a8176c060a..d6dd176e3b 100644 --- a/src/librustc_borrowck/borrowck/mir/dataflow.rs +++ b/src/librustc_borrowck/borrowck/mir/dataflow.rs @@ -458,8 +458,8 @@ impl DataflowState { } -impl<'tcx> DataflowState> { - pub fn new_move_analysis(mir: &Mir<'tcx>, tcx: &TyCtxt<'tcx>) -> Self { +impl<'a, 'tcx> DataflowState> { + pub fn new_move_analysis(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self { let move_data = MoveData::gather_moves(mir, tcx); DataflowState::new(mir, move_data) } diff --git a/src/librustc_borrowck/borrowck/mir/gather_moves.rs b/src/librustc_borrowck/borrowck/mir/gather_moves.rs index 2b1b743afe..bf3d671bdb 100644 --- a/src/librustc_borrowck/borrowck/mir/gather_moves.rs +++ b/src/librustc_borrowck/borrowck/mir/gather_moves.rs @@ -482,8 +482,8 @@ impl<'a, 'tcx> MovePathDataBuilder<'a, 'tcx> { } } -impl<'tcx> MoveData<'tcx> { - pub fn gather_moves(mir: &Mir<'tcx>, tcx: &TyCtxt<'tcx>) -> Self { +impl<'a, 'tcx> MoveData<'tcx> { + pub fn gather_moves(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self { gather_moves(mir, tcx) } } @@ -494,7 +494,7 @@ enum StmtKind { Aggregate, Drop, CallFn, CallArg, Return, } -fn gather_moves<'tcx>(mir: &Mir<'tcx>, tcx: &TyCtxt<'tcx>) -> MoveData<'tcx> { +fn gather_moves<'a, 'tcx>(mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MoveData<'tcx> { use self::StmtKind as SK; let bbs = mir.all_basic_blocks(); @@ -667,7 +667,7 @@ fn gather_moves<'tcx>(mir: &Mir<'tcx>, tcx: &TyCtxt<'tcx>) -> MoveData<'tcx> { } struct BlockContext<'b, 'a: 'b, 'tcx: 'a> { - tcx: &'b TyCtxt<'tcx>, + tcx: TyCtxt<'b, 'tcx, 'tcx>, moves: &'b mut Vec, builder: MovePathDataBuilder<'a, 'tcx>, path_map: &'b mut Vec>, diff --git a/src/librustc_borrowck/borrowck/mir/mod.rs b/src/librustc_borrowck/borrowck/mir/mod.rs index 672faea58f..bec5ae03d3 100644 --- a/src/librustc_borrowck/borrowck/mir/mod.rs +++ b/src/librustc_borrowck/borrowck/mir/mod.rs @@ -46,11 +46,11 @@ pub fn borrowck_mir<'b, 'a: 'b, 'tcx: 'a>( } let mut mbcx = MirBorrowckCtxt { + flow_state: DataflowState::new_move_analysis(mir, bcx.tcx), bcx: bcx, mir: mir, node_id: id, attributes: attributes, - flow_state: DataflowState::new_move_analysis(mir, bcx.tcx), }; for bb in mir.all_basic_blocks() { diff --git a/src/librustc_borrowck/borrowck/mod.rs b/src/librustc_borrowck/borrowck/mod.rs index d7c928b8d6..36222e172b 100644 --- a/src/librustc_borrowck/borrowck/mod.rs +++ b/src/librustc_borrowck/borrowck/mod.rs @@ -34,14 +34,14 @@ use rustc::middle::free_region::FreeRegionMap; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; use rustc::middle::region; -use rustc::ty::{self, Ty, TyCtxt}; +use rustc::ty::{self, TyCtxt}; use std::fmt; use std::mem; use std::rc::Rc; use syntax::ast; use syntax::attr::AttrMetaMethods; -use syntax::codemap::Span; +use syntax::codemap::{MultiSpan, Span}; use syntax::errors::DiagnosticBuilder; use rustc::hir; @@ -87,20 +87,20 @@ impl<'a, 'tcx, 'v> Visitor<'v> for BorrowckCtxt<'a, 'tcx> { fn visit_trait_item(&mut self, ti: &hir::TraitItem) { if let hir::ConstTraitItem(_, Some(ref expr)) = ti.node { - gather_loans::gather_loans_in_static_initializer(self, &expr); + gather_loans::gather_loans_in_static_initializer(self, ti.id, &expr); } intravisit::walk_trait_item(self, ti); } fn visit_impl_item(&mut self, ii: &hir::ImplItem) { if let hir::ImplItemKind::Const(_, ref expr) = ii.node { - gather_loans::gather_loans_in_static_initializer(self, &expr); + gather_loans::gather_loans_in_static_initializer(self, ii.id, &expr); } intravisit::walk_impl_item(self, ii); } } -pub fn check_crate<'tcx>(tcx: &TyCtxt<'tcx>, mir_map: &MirMap<'tcx>) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, mir_map: &MirMap<'tcx>) { let mut bccx = BorrowckCtxt { tcx: tcx, mir_map: Some(mir_map), @@ -142,7 +142,7 @@ fn borrowck_item(this: &mut BorrowckCtxt, item: &hir::Item) { match item.node { hir::ItemStatic(_, _, ref ex) | hir::ItemConst(_, ref ex) => { - gather_loans::gather_loans_in_static_initializer(this, &ex); + gather_loans::gather_loans_in_static_initializer(this, item.id, &ex); } _ => { } } @@ -244,7 +244,7 @@ fn build_borrowck_dataflow_data<'a, 'tcx>(this: &mut BorrowckCtxt<'a, 'tcx>, /// Accessor for introspective clients inspecting `AnalysisData` and /// the `BorrowckCtxt` itself , e.g. the flowgraph visualizer. pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>( - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, mir_map: Option<&'a MirMap<'tcx>>, fn_parts: FnParts<'a>, cfg: &cfg::CFG) @@ -278,7 +278,7 @@ pub fn build_borrowck_dataflow_data_for_fn<'a, 'tcx>( // Type definitions pub struct BorrowckCtxt<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, // Hacky. As we visit various fns, we have to load up the // free-region map for each one. This map is computed by during @@ -412,10 +412,10 @@ pub enum LoanPathElem { } pub fn closure_to_block(closure_id: ast::NodeId, - tcx: &TyCtxt) -> ast::NodeId { + tcx: TyCtxt) -> ast::NodeId { match tcx.map.get(closure_id) { hir_map::NodeExpr(expr) => match expr.node { - hir::ExprClosure(_, _, ref block) => { + hir::ExprClosure(_, _, ref block, _) => { block.id } _ => { @@ -426,8 +426,8 @@ pub fn closure_to_block(closure_id: ast::NodeId, } } -impl<'tcx> LoanPath<'tcx> { - pub fn kill_scope(&self, tcx: &TyCtxt<'tcx>) -> region::CodeExtent { +impl<'a, 'tcx> LoanPath<'tcx> { + pub fn kill_scope(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> region::CodeExtent { match self.kind { LpVar(local_id) => tcx.region_maps.var_scope(local_id), LpUpvar(upvar_id) => { @@ -620,36 +620,39 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } // General fallback. + let span = err.span.clone(); let mut db = self.struct_span_err( err.span, &self.bckerr_to_string(&err)); - self.note_and_explain_bckerr(&mut db, err); + self.note_and_explain_bckerr(&mut db, err, span); db.emit(); } - pub fn report_use_of_moved_value<'b>(&self, - use_span: Span, - use_kind: MovedValueUseKind, - lp: &LoanPath<'tcx>, - the_move: &move_data::Move, - moved_lp: &LoanPath<'tcx>, - param_env: &ty::ParameterEnvironment<'b,'tcx>) { - let verb = match use_kind { - MovedInUse => "use", - MovedInCapture => "capture", + pub fn report_use_of_moved_value(&self, + use_span: Span, + use_kind: MovedValueUseKind, + lp: &LoanPath<'tcx>, + the_move: &move_data::Move, + moved_lp: &LoanPath<'tcx>, + _param_env: &ty::ParameterEnvironment<'tcx>) { + let (verb, verb_participle) = match use_kind { + MovedInUse => ("use", "used"), + MovedInCapture => ("capture", "captured"), }; - let (ol, moved_lp_msg, mut err) = match the_move.kind { + let (_ol, _moved_lp_msg, mut err) = match the_move.kind { move_data::Declared => { - let err = struct_span_err!( + // If this is an uninitialized variable, just emit a simple warning + // and return. + struct_span_err!( self.tcx.sess, use_span, E0381, "{} of possibly uninitialized variable: `{}`", verb, - self.loan_path_to_string(lp)); - - (self.loan_path_to_string(moved_lp), - String::new(), - err) + self.loan_path_to_string(lp)) + .span_label(use_span, &format!("use of possibly uninitialized `{}`", + self.loan_path_to_string(lp))) + .emit(); + return; } _ => { // If moved_lp is something like `x.a`, and lp is something like `x.b`, we would @@ -688,122 +691,54 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess, use_span, E0382, "{} of {}moved value: `{}`", verb, msg, nl); - (ol, moved_lp_msg, err) + (ol, moved_lp_msg, err)} + }; + + // Get type of value and span where it was previously + // moved. + let (move_span, move_note) = match the_move.kind { + move_data::Declared => { + unreachable!(); } + + move_data::MoveExpr | + move_data::MovePat => + (self.tcx.map.span(the_move.id), ""), + + move_data::Captured => + (match self.tcx.map.expect_expr(the_move.id).node { + hir::ExprClosure(_, _, _, fn_decl_span) => fn_decl_span, + ref r => bug!("Captured({}) maps to non-closure: {:?}", + the_move.id, r), + }, " (into closure)"), }; - match the_move.kind { - move_data::Declared => {} + // Annotate the use and the move in the span. Watch out for + // the case where the use and the move are the same. This + // means the use is in a loop. + err = if use_span == move_span { + err.span_label( + use_span, + &format!("value moved{} here in previous iteration of loop", + move_note)); + err + } else { + err.span_label(use_span, &format!("value {} here after move", verb_participle)) + .span_label(move_span, &format!("value moved{} here", move_note)); + err + }; - move_data::MoveExpr => { - let (expr_ty, expr_span) = match self.tcx - .map - .find(the_move.id) { - Some(hir_map::NodeExpr(expr)) => { - (self.tcx.expr_ty_adjusted(&expr), expr.span) - } - r => { - bug!("MoveExpr({}) maps to {:?}, not Expr", - the_move.id, - r) - } - }; - let (suggestion, _) = - move_suggestion(param_env, expr_span, expr_ty, ("moved by default", "")); - // If the two spans are the same, it's because the expression will be evaluated - // multiple times. Avoid printing the same span and adjust the wording so it makes - // more sense that it's from multiple evalutations. - if expr_span == use_span { - err.note( - &format!("`{}` was previously moved here{} because it has type `{}`, \ - which is {}", - ol, - moved_lp_msg, - expr_ty, - suggestion)); - } else { - err.span_note( - expr_span, - &format!("`{}` moved here{} because it has type `{}`, which is {}", - ol, - moved_lp_msg, - expr_ty, - suggestion)); - } - } + err.note(&format!("move occurs because `{}` has type `{}`, \ + which does not implement the `Copy` trait", + self.loan_path_to_string(moved_lp), + moved_lp.ty)); - move_data::MovePat => { - let pat_ty = self.tcx.node_id_to_type(the_move.id); - let span = self.tcx.map.span(the_move.id); - err.span_note(span, - &format!("`{}` moved here{} because it has type `{}`, \ - which is moved by default", - ol, - moved_lp_msg, - pat_ty)); - match self.tcx.sess.codemap().span_to_snippet(span) { - Ok(string) => { - err.span_suggestion( - span, - &format!("if you would like to borrow the value instead, \ - use a `ref` binding as shown:"), - format!("ref {}", string)); - }, - Err(_) => { - err.fileline_help(span, - "use `ref` to override"); - }, - } - } + // Note: we used to suggest adding a `ref binding` or calling + // `clone` but those suggestions have been removed because + // they are often not what you actually want to do, and were + // not considered particularly helpful. - move_data::Captured => { - let (expr_ty, expr_span) = match self.tcx - .map - .find(the_move.id) { - Some(hir_map::NodeExpr(expr)) => { - (self.tcx.expr_ty_adjusted(&expr), expr.span) - } - r => { - bug!("Captured({}) maps to {:?}, not Expr", - the_move.id, - r) - } - }; - let (suggestion, help) = - move_suggestion(param_env, - expr_span, - expr_ty, - ("moved by default", - "make a copy and capture that instead to override")); - err.span_note( - expr_span, - &format!("`{}` moved into closure environment here{} because it \ - has type `{}`, which is {}", - ol, - moved_lp_msg, - moved_lp.ty, - suggestion)); - err.fileline_help(expr_span, help); - } - } err.emit(); - - fn move_suggestion<'a,'tcx>(param_env: &ty::ParameterEnvironment<'a,'tcx>, - span: Span, - ty: Ty<'tcx>, - default_msgs: (&'static str, &'static str)) - -> (&'static str, &'static str) { - match ty.sty { - _ => { - if ty.moves_by_default(param_env, span) { - ("non-copyable", - "perhaps you meant to use `clone()`?") - } else { - default_msgs - } - } - } - } } pub fn report_partial_reinitialization_of_uninitialized_structure( @@ -833,19 +768,20 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { self.tcx.sess.span_err(s, m); } - pub fn struct_span_err(&self, s: Span, m: &str) -> DiagnosticBuilder<'a> { + pub fn struct_span_err>(&self, s: S, m: &str) + -> DiagnosticBuilder<'a> { self.tcx.sess.struct_span_err(s, m) } - pub fn struct_span_err_with_code(&self, - s: Span, - msg: &str, - code: &str) - -> DiagnosticBuilder<'a> { + pub fn struct_span_err_with_code>(&self, + s: S, + msg: &str, + code: &str) + -> DiagnosticBuilder<'a> { self.tcx.sess.struct_span_err_with_code(s, msg, code) } - pub fn span_err_with_code(&self, s: Span, msg: &str, code: &str) { + pub fn span_err_with_code>(&self, s: S, msg: &str, code: &str) { self.tcx.sess.span_err_with_code(s, msg, code); } @@ -982,8 +918,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { }; if is_closure { - err.fileline_help(span, - "closures behind references must be called via `&mut`"); + err.help("closures behind references must be called via `&mut`"); } err.emit(); } @@ -1017,7 +952,8 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { .emit(); } - pub fn note_and_explain_bckerr(&self, db: &mut DiagnosticBuilder, err: BckError<'tcx>) { + pub fn note_and_explain_bckerr(&self, db: &mut DiagnosticBuilder, err: BckError<'tcx>, + error_span: Span) { let code = err.code; match code { err_mutbl => { @@ -1041,12 +977,12 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { if let Categorization::Local(local_id) = err.cmt.cat { let span = self.tcx.map.span(local_id); if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) { - db.span_suggestion( - span, - &format!("to make the {} mutable, use `mut` as shown:", - self.cmt_to_string(&err.cmt)), - format!("mut {}", snippet)); + if snippet != "self" { + db.span_label(span, + &format!("use `mut {}` here to make mutable", snippet)); + } } + db.span_label(error_span, &format!("cannot borrow mutably")); } } } @@ -1064,6 +1000,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { super_scope, ""); if let Some(span) = statement_scope_span(self.tcx, super_scope) { + db.span_label(error_span, &format!("does not live long enough")); db.span_help(span, "consider using a `let` binding to increase its lifetime"); } @@ -1178,7 +1115,7 @@ impl<'a, 'tcx> BorrowckCtxt<'a, 'tcx> { } } -fn statement_scope_span(tcx: &TyCtxt, region: ty::Region) -> Option { +fn statement_scope_span(tcx: TyCtxt, region: ty::Region) -> Option { match region { ty::ReScope(scope) => { match tcx.map.find(scope.node_id(&tcx.region_maps)) { diff --git a/src/librustc_borrowck/borrowck/move_data.rs b/src/librustc_borrowck/borrowck/move_data.rs index 80e408e9a6..a742260018 100644 --- a/src/librustc_borrowck/borrowck/move_data.rs +++ b/src/librustc_borrowck/borrowck/move_data.rs @@ -212,7 +212,7 @@ fn loan_path_is_precise(loan_path: &LoanPath) -> bool { } } -impl<'tcx> MoveData<'tcx> { +impl<'a, 'tcx> MoveData<'tcx> { pub fn new() -> MoveData<'tcx> { MoveData { paths: RefCell::new(Vec::new()), @@ -272,8 +272,7 @@ impl<'tcx> MoveData<'tcx> { /// Returns the existing move path index for `lp`, if any, and otherwise adds a new index for /// `lp` and any of its base paths that do not yet have an index. - pub fn move_path(&self, - tcx: &TyCtxt<'tcx>, + pub fn move_path(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, lp: Rc>) -> MovePathIndex { match self.path_map.borrow().get(&lp) { Some(&index) => { @@ -364,8 +363,7 @@ impl<'tcx> MoveData<'tcx> { } /// Adds a new move entry for a move of `lp` that occurs at location `id` with kind `kind`. - pub fn add_move(&self, - tcx: &TyCtxt<'tcx>, + pub fn add_move(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, lp: Rc>, id: ast::NodeId, kind: MoveKind) { @@ -392,8 +390,7 @@ impl<'tcx> MoveData<'tcx> { /// Adds a new record for an assignment to `lp` that occurs at location `id` with the given /// `span`. - pub fn add_assignment(&self, - tcx: &TyCtxt<'tcx>, + pub fn add_assignment(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, lp: Rc>, assign_id: ast::NodeId, span: Span, @@ -437,8 +434,7 @@ impl<'tcx> MoveData<'tcx> { /// variant `lp`, that occurs at location `pattern_id`. (One /// should be able to recover the span info from the /// `pattern_id` and the ast_map, I think.) - pub fn add_variant_match(&self, - tcx: &TyCtxt<'tcx>, + pub fn add_variant_match(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, lp: Rc>, pattern_id: ast::NodeId, base_lp: Rc>, @@ -461,7 +457,7 @@ impl<'tcx> MoveData<'tcx> { self.variant_matches.borrow_mut().push(variant_match); } - fn fixup_fragment_sets(&self, tcx: &TyCtxt<'tcx>) { + fn fixup_fragment_sets(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) { fragments::fixup_fragment_sets(self, tcx) } @@ -470,8 +466,7 @@ impl<'tcx> MoveData<'tcx> { /// Moves are generated by moves and killed by assignments and /// scoping. Assignments are generated by assignment to variables and /// killed by scoping. See `README.md` for more details. - fn add_gen_kills(&self, - tcx: &TyCtxt<'tcx>, + fn add_gen_kills(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, dfcx_moves: &mut MoveDataFlow, dfcx_assign: &mut AssignDataFlow) { for (i, the_move) in self.moves.borrow().iter().enumerate() { @@ -600,7 +595,7 @@ impl<'tcx> MoveData<'tcx> { impl<'a, 'tcx> FlowedMoveData<'a, 'tcx> { pub fn new(move_data: MoveData<'tcx>, - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, cfg: &cfg::CFG, id_range: IdRange, decl: &hir::FnDecl, diff --git a/src/librustc_borrowck/diagnostics.rs b/src/librustc_borrowck/diagnostics.rs index 7f6fd9de3d..116e347689 100644 --- a/src/librustc_borrowck/diagnostics.rs +++ b/src/librustc_borrowck/diagnostics.rs @@ -286,6 +286,70 @@ You can read more about cell types in the API documentation: https://doc.rust-lang.org/std/cell/ "##, +E0389: r##" +An attempt was made to mutate data using a non-mutable reference. This +commonly occurs when attempting to assign to a non-mutable reference of a +mutable reference (`&(&mut T)`). + +Example of erroneous code: + +```compile_fail +struct FancyNum { + num: u8 +} + +fn main() { + let mut fancy = FancyNum{ num: 5 }; + let fancy_ref = &(&mut fancy); + fancy_ref.num = 6; // error: cannot assign to data in a `&` reference + println!("{}", fancy_ref.num); +} +``` + +Here, `&mut fancy` is mutable, but `&(&mut fancy)` is not. Creating an +immutable reference to a value borrows it immutably. There can be multiple +references of type `&(&mut T)` that point to the same value, so they must be +immutable to prevent multiple mutable references to the same value. + +To fix this, either remove the outer reference: + +``` +struct FancyNum { + num: u8 +} + +fn main() { + let mut fancy = FancyNum{ num: 5 }; + + let fancy_ref = &mut fancy; + // `fancy_ref` is now &mut FancyNum, rather than &(&mut FancyNum) + + fancy_ref.num = 6; // No error! + + println!("{}", fancy_ref.num); +} +``` + +Or make the outer reference mutable: + +``` +struct FancyNum { + num: u8 +} + +fn main() { + let mut fancy = FancyNum{ num: 5 }; + + let fancy_ref = &mut (&mut fancy); + // `fancy_ref` is now &mut(&mut FancyNum), rather than &(&mut FancyNum) + + fancy_ref.num = 6; // No error! + + println!("{}", fancy_ref.num); +} +``` +"##, + E0499: r##" A variable was borrowed as mutable more than once. Erroneous code example: @@ -314,6 +378,424 @@ let c = &i; // still ok! ``` "##, +E0500: r##" +A borrowed variable was used in another closure. Example of erroneous code: + +```compile_fail +fn you_know_nothing(jon_snow: &mut i32) { + let nights_watch = || { + *jon_snow = 2; + }; + let starks = || { + *jon_snow = 3; // error: closure requires unique access to `jon_snow` + // but it is already borrowed + }; +} +``` + +In here, `jon_snow` is already borrowed by the `nights_watch` closure, so it +cannot be borrowed by the `starks` closure at the same time. To fix this issue, +you can put the closure in its own scope: + +``` +fn you_know_nothing(jon_snow: &mut i32) { + { + let nights_watch = || { + *jon_snow = 2; + }; + } // At this point, `jon_snow` is free. + let starks = || { + *jon_snow = 3; + }; +} +``` + +Or, if the type implements the `Clone` trait, you can clone it between +closures: + +``` +fn you_know_nothing(jon_snow: &mut i32) { + let mut jon_copy = jon_snow.clone(); + let nights_watch = || { + jon_copy = 2; + }; + let starks = || { + *jon_snow = 3; + }; +} +``` +"##, + +E0501: r##" +This error indicates that a mutable variable is being used while it is still +captured by a closure. Because the closure has borrowed the variable, it is not +available for use until the closure goes out of scope. + +Note that a capture will either move or borrow a variable, but in this +situation, the closure is borrowing the variable. Take a look at +http://rustbyexample.com/fn/closures/capture.html for more information about +capturing. + +Example of erroneous code: + +```compile_fail +fn inside_closure(x: &mut i32) { + // Actions which require unique access +} + +fn outside_closure(x: &mut i32) { + // Actions which require unique access +} + +fn foo(a: &mut i32) { + let bar = || { + inside_closure(a) + }; + outside_closure(a); // error: cannot borrow `*a` as mutable because previous + // closure requires unique access. +} +``` + +To fix this error, you can place the closure in its own scope: + +``` +fn inside_closure(x: &mut i32) {} +fn outside_closure(x: &mut i32) {} + +fn foo(a: &mut i32) { + { + let bar = || { + inside_closure(a) + }; + } // borrow on `a` ends. + outside_closure(a); // ok! +} +``` + +Or you can pass the variable as a parameter to the closure: + +``` +fn inside_closure(x: &mut i32) {} +fn outside_closure(x: &mut i32) {} + +fn foo(a: &mut i32) { + let bar = |s: &mut i32| { + inside_closure(s) + }; + outside_closure(a); + bar(a); +} +``` + +It may be possible to define the closure later: + +``` +fn inside_closure(x: &mut i32) {} +fn outside_closure(x: &mut i32) {} + +fn foo(a: &mut i32) { + outside_closure(a); + let bar = || { + inside_closure(a) + }; +} +``` +"##, + +E0502: r##" +This error indicates that you are trying to borrow a variable as mutable when it +has already been borrowed as immutable. + +Example of erroneous code: + +```compile_fail +fn bar(x: &mut i32) {} +fn foo(a: &mut i32) { + let ref y = a; // a is borrowed as immutable. + bar(a); // error: cannot borrow `*a` as mutable because `a` is also borrowed + // as immutable +} +``` +To fix this error, ensure that you don't have any other references to the +variable before trying to access it mutably: +``` +fn bar(x: &mut i32) {} +fn foo(a: &mut i32) { + bar(a); + let ref y = a; // ok! +} +``` +For more information on the rust ownership system, take a look at +https://doc.rust-lang.org/stable/book/references-and-borrowing.html. +"##, + +E0504: r##" +This error occurs when an attempt is made to move a borrowed variable into a +closure. + +Example of erroneous code: + +```compile_fail +struct FancyNum { + num: u8 +} + +fn main() { + let fancy_num = FancyNum { num: 5 }; + let fancy_ref = &fancy_num; + + let x = move || { + println!("child function: {}", fancy_num.num); + // error: cannot move `fancy_num` into closure because it is borrowed + }; + + x(); + println!("main function: {}", fancy_ref.num); +} +``` + +Here, `fancy_num` is borrowed by `fancy_ref` and so cannot be moved into +the closure `x`. There is no way to move a value into a closure while it is +borrowed, as that would invalidate the borrow. + +If the closure can't outlive the value being moved, try using a reference +rather than moving: + +``` +struct FancyNum { + num: u8 +} + +fn main() { + let fancy_num = FancyNum { num: 5 }; + let fancy_ref = &fancy_num; + + let x = move || { + // fancy_ref is usable here because it doesn't move `fancy_num` + println!("child function: {}", fancy_ref.num); + }; + + x(); + + println!("main function: {}", fancy_num.num); +} +``` + +If the value has to be borrowed and then moved, try limiting the lifetime of +the borrow using a scoped block: + +``` +struct FancyNum { + num: u8 +} + +fn main() { + let fancy_num = FancyNum { num: 5 }; + + { + let fancy_ref = &fancy_num; + println!("main function: {}", fancy_ref.num); + // `fancy_ref` goes out of scope here + } + + let x = move || { + // `fancy_num` can be moved now (no more references exist) + println!("child function: {}", fancy_num.num); + }; + + x(); +} +``` + +If the lifetime of a reference isn't enough, such as in the case of threading, +consider using an `Arc` to create a reference-counted value: + +``` +use std::sync::Arc; +use std::thread; + +struct FancyNum { + num: u8 +} + +fn main() { + let fancy_ref1 = Arc::new(FancyNum { num: 5 }); + let fancy_ref2 = fancy_ref1.clone(); + + let x = thread::spawn(move || { + // `fancy_ref1` can be moved and has a `'static` lifetime + println!("child thread: {}", fancy_ref1.num); + }); + + x.join().expect("child thread should finish"); + println!("main thread: {}", fancy_ref2.num); +} +``` +"##, + +E0506: r##" +This error occurs when an attempt is made to assign to a borrowed value. + +Example of erroneous code: + +```compile_fail +struct FancyNum { + num: u8 +} + +fn main() { + let mut fancy_num = FancyNum { num: 5 }; + let fancy_ref = &fancy_num; + fancy_num = FancyNum { num: 6 }; + // error: cannot assign to `fancy_num` because it is borrowed + + println!("Num: {}, Ref: {}", fancy_num.num, fancy_ref.num); +} +``` + +Because `fancy_ref` still holds a reference to `fancy_num`, `fancy_num` can't +be assigned to a new value as it would invalidate the reference. + +Alternatively, we can move out of `fancy_num` into a second `fancy_num`: + +``` +struct FancyNum { + num: u8 +} + +fn main() { + let mut fancy_num = FancyNum { num: 5 }; + let moved_num = fancy_num; + fancy_num = FancyNum { num: 6 }; + + println!("Num: {}, Moved num: {}", fancy_num.num, moved_num.num); +} +``` + +If the value has to be borrowed, try limiting the lifetime of the borrow using +a scoped block: + +``` +struct FancyNum { + num: u8 +} + +fn main() { + let mut fancy_num = FancyNum { num: 5 }; + + { + let fancy_ref = &fancy_num; + println!("Ref: {}", fancy_ref.num); + } + + // Works because `fancy_ref` is no longer in scope + fancy_num = FancyNum { num: 6 }; + println!("Num: {}", fancy_num.num); +} +``` + +Or by moving the reference into a function: + +``` +struct FancyNum { + num: u8 +} + +fn main() { + let mut fancy_num = FancyNum { num: 5 }; + + print_fancy_ref(&fancy_num); + + // Works because function borrow has ended + fancy_num = FancyNum { num: 6 }; + println!("Num: {}", fancy_num.num); +} + +fn print_fancy_ref(fancy_ref: &FancyNum){ + println!("Ref: {}", fancy_ref.num); +} +``` +"##, + +E0505: r##" +A value was moved out while it was still borrowed. +Erroneous code example: + +```compile_fail +struct Value {} + +fn eat(val: Value) {} + +fn main() { + let x = Value{}; + { + let _ref_to_val: &Value = &x; + eat(x); + } +} +``` + +Here, the function `eat` takes the ownership of `x`. However, +`x` cannot be moved because it was borrowed to `_ref_to_val`. +To fix that you can do few different things: + +* Try to avoid moving the variable. +* Release borrow before move. +* Implement the `Copy` trait on the type. + +Examples: + +``` +struct Value {} + +fn eat(val: &Value) {} + +fn main() { + let x = Value{}; + { + let _ref_to_val: &Value = &x; + eat(&x); // pass by reference, if it's possible + } +} +``` + +Or: + +``` +struct Value {} + +fn eat(val: Value) {} + +fn main() { + let x = Value{}; + { + let _ref_to_val: &Value = &x; + } + eat(x); // release borrow and then move it. +} +``` + +Or: + +``` +#[derive(Clone, Copy)] // implement Copy trait +struct Value {} + +fn eat(val: Value) {} + +fn main() { + let x = Value{}; + { + let _ref_to_val: &Value = &x; + eat(x); // it will be copied here. + } +} +``` + +You can find more information about borrowing in the rust-book: +http://doc.rust-lang.org/stable/book/references-and-borrowing.html +"##, + E0507: r##" You tried to move out of a value which was borrowed. Erroneous code example: @@ -429,19 +911,107 @@ You can find more information about borrowing in the rust-book: http://doc.rust-lang.org/stable/book/references-and-borrowing.html "##, +E0509: r##" +This error occurs when an attempt is made to move out of a value whose type +implements the `Drop` trait. + +Example of erroneous code: + +```compile_fail +struct FancyNum { + num: usize +} + +struct DropStruct { + fancy: FancyNum +} + +impl Drop for DropStruct { + fn drop(&mut self) { + // Destruct DropStruct, possibly using FancyNum + } +} + +fn main() { + let drop_struct = DropStruct{fancy: FancyNum{num: 5}}; + let fancy_field = drop_struct.fancy; // Error E0509 + println!("Fancy: {}", fancy_field.num); + // implicit call to `drop_struct.drop()` as drop_struct goes out of scope +} +``` + +Here, we tried to move a field out of a struct of type `DropStruct` which +implements the `Drop` trait. However, a struct cannot be dropped if one or +more of its fields have been moved. + +Structs implementing the `Drop` trait have an implicit destructor that gets +called when they go out of scope. This destructor may use the fields of the +struct, so moving out of the struct could make it impossible to run the +destructor. Therefore, we must think of all values whose type implements the +`Drop` trait as single units whose fields cannot be moved. + +This error can be fixed by creating a reference to the fields of a struct, +enum, or tuple using the `ref` keyword: + +``` +struct FancyNum { + num: usize +} + +struct DropStruct { + fancy: FancyNum +} + +impl Drop for DropStruct { + fn drop(&mut self) { + // Destruct DropStruct, possibly using FancyNum + } +} + +fn main() { + let drop_struct = DropStruct{fancy: FancyNum{num: 5}}; + let ref fancy_field = drop_struct.fancy; // No more errors! + println!("Fancy: {}", fancy_field.num); + // implicit call to `drop_struct.drop()` as drop_struct goes out of scope +} +``` + +Note that this technique can also be used in the arms of a match expression: + +``` +struct FancyNum { + num: usize +} + +enum DropEnum { + Fancy(FancyNum) +} + +impl Drop for DropEnum { + fn drop(&mut self) { + // Destruct DropEnum, possibly using FancyNum + } +} + +fn main() { + // Creates and enum of type `DropEnum`, which implements `Drop` + let drop_enum = DropEnum::Fancy(FancyNum{num: 10}); + match drop_enum { + // Creates a reference to the inside of `DropEnum::Fancy` + DropEnum::Fancy(ref fancy_field) => // No error! + println!("It was fancy-- {}!", fancy_field.num), + } + // implicit call to `drop_enum.drop()` as drop_enum goes out of scope +} +``` +"##, + } register_diagnostics! { E0385, // {} in an aliasable location E0388, // {} in a static location - E0389, // {} in a `&` reference - E0500, // closure requires unique access to `..` but .. is already borrowed - E0501, // cannot borrow `..`.. as .. because previous closure requires unique access - E0502, // cannot borrow `..`.. as .. because .. is also borrowed as ... E0503, // cannot use `..` because it was mutably borrowed - E0504, // cannot move `..` into closure because it is borrowed - E0505, // cannot move out of `..` because it is borrowed - E0506, // cannot assign to `..` because it is borrowed E0508, // cannot move out of type `..`, a non-copy fixed-size array - E0509, // cannot move out of type `..`, which defines the `Drop` trait + E0524, // two closures require unique access to `..` at the same time } diff --git a/src/librustc_const_eval/check_match.rs b/src/librustc_const_eval/check_match.rs index 1e662d456d..2fb5d79658 100644 --- a/src/librustc_const_eval/check_match.rs +++ b/src/librustc_const_eval/check_match.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use self::Constructor::*; +use self::Constructor::*; use self::Usefulness::*; use self::WitnessPreference::*; @@ -22,7 +22,6 @@ use rustc::hir::def_id::{DefId}; use rustc::middle::expr_use_visitor::{ConsumeMode, Delegate, ExprUseVisitor}; use rustc::middle::expr_use_visitor::{LoanCause, MutateMode}; use rustc::middle::expr_use_visitor as euv; -use rustc::infer; use rustc::middle::mem_categorization::{cmt}; use rustc::hir::pat_util::*; use rustc::traits::ProjectionMode; @@ -106,8 +105,8 @@ impl<'a> FromIterator> for Matrix<'a> { //NOTE: appears to be the only place other then InferCtxt to contain a ParamEnv pub struct MatchCheckCtxt<'a, 'tcx: 'a> { - pub tcx: &'a TyCtxt<'tcx>, - pub param_env: ParameterEnvironment<'a, 'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, + pub param_env: ParameterEnvironment<'tcx>, } #[derive(Clone, PartialEq)] @@ -153,7 +152,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for MatchCheckCtxt<'a, 'tcx> { } } -pub fn check_crate(tcx: &TyCtxt) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tcx.visit_all_items_in_krate(DepNode::MatchCheck, &mut MatchCheckCtxt { tcx: tcx, param_env: tcx.empty_parameter_environment(), @@ -241,24 +240,24 @@ fn check_expr(cx: &mut MatchCheckCtxt, ex: &hir::Expr) { fn check_for_bindings_named_the_same_as_variants(cx: &MatchCheckCtxt, pat: &Pat) { pat.walk(|p| { match p.node { - PatKind::Ident(hir::BindByValue(hir::MutImmutable), ident, None) => { + PatKind::Ident(hir::BindByValue(hir::MutImmutable), name, None) => { let pat_ty = cx.tcx.pat_ty(p); if let ty::TyEnum(edef, _) = pat_ty.sty { let def = cx.tcx.def_map.borrow().get(&p.id).map(|d| d.full_def()); if let Some(Def::Local(..)) = def { if edef.variants.iter().any(|variant| - variant.name == ident.node.unhygienic_name + variant.name == name.node.unhygienize() && variant.kind() == VariantKind::Unit ) { let ty_path = cx.tcx.item_path_str(edef.did); let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170, "pattern binding `{}` is named the same as one \ of the variants of the type `{}`", - ident.node, ty_path); - fileline_help!(err, p.span, + name.node, ty_path); + help!(err, "if you meant to match on a variant, \ consider making the path in the pattern qualified: `{}::{}`", - ty_path, ident.node); + ty_path, name.node); err.emit(); } } @@ -341,7 +340,15 @@ fn check_arms(cx: &MatchCheckCtxt, }, hir::MatchSource::Normal => { - span_err!(cx.tcx.sess, pat.span, E0001, "unreachable pattern") + let mut err = struct_span_err!(cx.tcx.sess, pat.span, E0001, + "unreachable pattern"); + // if we had a catchall pattern, hint at that + for row in &seen.0 { + if pat_is_catchall(&cx.tcx.def_map.borrow(), row[0]) { + span_note!(err, row[0].span, "this pattern matches any value"); + } + } + err.emit(); }, hir::MatchSource::TryDesugar => { @@ -361,7 +368,18 @@ fn check_arms(cx: &MatchCheckCtxt, } } -fn raw_pat<'a>(p: &'a Pat) -> &'a Pat { +/// Checks for common cases of "catchall" patterns that may not be intended as such. +fn pat_is_catchall(dm: &DefMap, p: &Pat) -> bool { + match p.node { + PatKind::Ident(_, _, None) => pat_is_binding(dm, p), + PatKind::Ident(_, _, Some(ref s)) => pat_is_catchall(dm, &s), + PatKind::Ref(ref s, _) => pat_is_catchall(dm, &s), + PatKind::Tup(ref v) => v.iter().all(|p| pat_is_catchall(dm, &p)), + _ => false + } +} + +fn raw_pat(p: &Pat) -> &Pat { match p.node { PatKind::Ident(_, _, Some(ref s)) => raw_pat(&s), _ => p @@ -436,13 +454,13 @@ fn const_val_to_expr(value: &ConstVal) -> P { } pub struct StaticInliner<'a, 'tcx: 'a> { - pub tcx: &'a TyCtxt<'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, pub failed: bool, pub renaming_map: Option<&'a mut FnvHashMap<(NodeId, Span), NodeId>>, } impl<'a, 'tcx> StaticInliner<'a, 'tcx> { - pub fn new<'b>(tcx: &'b TyCtxt<'tcx>, + pub fn new<'b>(tcx: TyCtxt<'b, 'tcx, 'tcx>, renaming_map: Option<&'b mut FnvHashMap<(NodeId, Span), NodeId>>) -> StaticInliner<'b, 'tcx> { StaticInliner { @@ -1104,13 +1122,12 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, PatKind::Ident(hir::BindByValue(_), _, ref sub) => { let pat_ty = tcx.node_id_to_type(p.id); //FIXME: (@jroesch) this code should be floated up as well - let infcx = infer::new_infer_ctxt(cx.tcx, - &cx.tcx.tables, - Some(cx.param_env.clone()), - ProjectionMode::AnyFinal); - if infcx.type_moves_by_default(pat_ty, pat.span) { - check_move(p, sub.as_ref().map(|p| &**p)); - } + cx.tcx.infer_ctxt(None, Some(cx.param_env.clone()), + ProjectionMode::AnyFinal).enter(|infcx| { + if infcx.type_moves_by_default(pat_ty, pat.span) { + check_move(p, sub.as_ref().map(|p| &**p)); + } + }); } PatKind::Ident(hir::BindByRef(_), _, _) => { } @@ -1132,24 +1149,21 @@ fn check_legality_of_move_bindings(cx: &MatchCheckCtxt, /// assign. fn check_for_mutation_in_guard<'a, 'tcx>(cx: &'a MatchCheckCtxt<'a, 'tcx>, guard: &hir::Expr) { - let mut checker = MutationChecker { - cx: cx, - }; - - let infcx = infer::new_infer_ctxt(cx.tcx, - &cx.tcx.tables, - Some(checker.cx.param_env.clone()), - ProjectionMode::AnyFinal); - - let mut visitor = ExprUseVisitor::new(&mut checker, &infcx); - visitor.walk_expr(guard); + cx.tcx.infer_ctxt(None, Some(cx.param_env.clone()), + ProjectionMode::AnyFinal).enter(|infcx| { + let mut checker = MutationChecker { + cx: cx, + }; + let mut visitor = ExprUseVisitor::new(&mut checker, &infcx); + visitor.walk_expr(guard); + }); } -struct MutationChecker<'a, 'tcx: 'a> { - cx: &'a MatchCheckCtxt<'a, 'tcx>, +struct MutationChecker<'a, 'gcx: 'a> { + cx: &'a MatchCheckCtxt<'a, 'gcx>, } -impl<'a, 'tcx> Delegate<'tcx> for MutationChecker<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Delegate<'tcx> for MutationChecker<'a, 'gcx> { fn matched_pat(&mut self, _: &Pat, _: cmt, _: euv::MatchMode) {} fn consume(&mut self, _: NodeId, _: Span, _: cmt, _: ConsumeMode) {} fn consume_pat(&mut self, _: &Pat, _: cmt, _: ConsumeMode) {} diff --git a/src/librustc_const_eval/diagnostics.rs b/src/librustc_const_eval/diagnostics.rs index 4f5176f6b0..457d25923c 100644 --- a/src/librustc_const_eval/diagnostics.rs +++ b/src/librustc_const_eval/diagnostics.rs @@ -62,8 +62,6 @@ fn foo(x: Empty) { However, this won't: ```compile_fail -enum Empty {} - fn foo(x: Option) { match x { // empty @@ -191,7 +189,7 @@ inner `String` to be moved into a variable called `s`. let x = Some("s".to_string()); match x { - op_string @ Some(s) => {}, + op_string @ Some(s) => {}, // error: cannot bind by-move with sub-bindings None => {}, } ``` @@ -215,22 +213,63 @@ match Some("hi".to_string()) { The variable `s` has type `String`, and its use in the guard is as a variable of type `String`. The guard code effectively executes in a separate scope to the body of the arm, so the value would be moved into this anonymous scope and -therefore become unavailable in the body of the arm. Although this example seems -innocuous, the problem is most clear when considering functions that take their -argument by value. +therefore becomes unavailable in the body of the arm. -```compile_fail +The problem above can be solved by using the `ref` keyword. + +``` match Some("hi".to_string()) { - Some(s) if { drop(s); false } => (), - Some(s) => {}, // use s. + Some(ref s) if s.len() == 0 => {}, _ => {}, } ``` -The value would be dropped in the guard then become unavailable not only in the -body of that arm but also in all subsequent arms! The solution is to bind by -reference when using guards or refactor the entire expression, perhaps by -putting the condition inside the body of the arm. +Though this example seems innocuous and easy to solve, the problem becomes clear +when it encounters functions which consume the value: + +```compile_fail +struct A{} + +impl A { + fn consume(self) -> usize { + 0 + } +} + +fn main() { + let a = Some(A{}); + match a { + Some(y) if y.consume() > 0 => {} + _ => {} + } +} +``` + +In this situation, even the `ref` keyword cannot solve it, since borrowed +content cannot be moved. This problem cannot be solved generally. If the value +can be cloned, here is a not-so-specific solution: + +``` +#[derive(Clone)] +struct A{} + +impl A { + fn consume(self) -> usize { + 0 + } +} + +fn main() { + let a = Some(A{}); + match a{ + Some(ref y) if y.clone().consume() > 0 => {} + _ => {} + } +} +``` + +If the value will be consumed in the pattern guard, using its clone will not +move its ownership, so the code works. "##, E0009: r##" @@ -247,7 +286,8 @@ struct X { x: (), } let x = Some((X { x: () }, X { x: () })); match x { - Some((y, ref z)) => {}, + Some((y, ref z)) => {}, // error: cannot bind by-move and by-ref in the + // same pattern None => panic!() } ``` @@ -533,6 +573,12 @@ be a compile-time constant. Erroneous code example: let x = [0i32; len]; // error: expected constant integer for repeat count, // found variable ``` + +Working example: + +``` +let x = [0i32; 10]; +``` "##, } diff --git a/src/librustc_const_eval/eval.rs b/src/librustc_const_eval/eval.rs index 45a90bf006..d0be7e203f 100644 --- a/src/librustc_const_eval/eval.rs +++ b/src/librustc_const_eval/eval.rs @@ -17,15 +17,14 @@ use self::EvalHint::*; use rustc::hir::map as ast_map; use rustc::hir::map::blocks::FnLikeNode; -use rustc::middle::cstore::{self, CrateStore, InlinedItem}; -use rustc::{infer, traits}; +use rustc::middle::cstore::{self, InlinedItem}; +use rustc::traits; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; use rustc::hir::pat_util::def_to_path; use rustc::ty::{self, Ty, TyCtxt, subst}; use rustc::ty::util::IntTypeExt; use rustc::traits::ProjectionMode; -use rustc::middle::astconv_util::ast_ty_to_prim_ty; use rustc::util::nodemap::NodeMap; use rustc::lint; @@ -54,10 +53,10 @@ macro_rules! math { } } -fn lookup_variant_by_id<'a>(tcx: &'a ty::TyCtxt, - enum_def: DefId, - variant_def: DefId) - -> Option<&'a Expr> { +fn lookup_variant_by_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + enum_def: DefId, + variant_def: DefId) + -> Option<&'tcx Expr> { fn variant_expr<'a>(variants: &'a [hir::Variant], id: ast::NodeId) -> Option<&'a Expr> { for variant in variants { @@ -90,16 +89,16 @@ fn lookup_variant_by_id<'a>(tcx: &'a ty::TyCtxt, /// /// `substs` is optional and is used for associated constants. /// This generally happens in late/trans const evaluation. -pub fn lookup_const_by_id<'a, 'tcx: 'a>(tcx: &'a TyCtxt<'tcx>, +pub fn lookup_const_by_id<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, - substs: Option>) + substs: Option<&'tcx subst::Substs<'tcx>>) -> Option<(&'tcx Expr, Option>)> { if let Some(node_id) = tcx.map.as_local_node_id(def_id) { match tcx.map.find(node_id) { None => None, Some(ast_map::NodeItem(it)) => match it.node { hir::ItemConst(ref ty, ref const_expr) => { - Some((&const_expr, ast_ty_to_prim_ty(tcx, ty))) + Some((&const_expr, tcx.ast_ty_to_prim_ty(ty))) } _ => None }, @@ -125,7 +124,7 @@ pub fn lookup_const_by_id<'a, 'tcx: 'a>(tcx: &'a TyCtxt<'tcx>, }, Some(ast_map::NodeImplItem(ii)) => match ii.node { hir::ImplItemKind::Const(ref ty, ref expr) => { - Some((&expr, ast_ty_to_prim_ty(tcx, ty))) + Some((&expr, tcx.ast_ty_to_prim_ty(ty))) } _ => None }, @@ -143,7 +142,7 @@ pub fn lookup_const_by_id<'a, 'tcx: 'a>(tcx: &'a TyCtxt<'tcx>, let expr_ty = match tcx.sess.cstore.maybe_get_item_ast(tcx, def_id) { cstore::FoundAst::Found(&InlinedItem::Item(ref item)) => match item.node { hir::ItemConst(ref ty, ref const_expr) => { - Some((&**const_expr, ast_ty_to_prim_ty(tcx, ty))) + Some((&**const_expr, tcx.ast_ty_to_prim_ty(ty))) }, _ => None }, @@ -164,7 +163,7 @@ pub fn lookup_const_by_id<'a, 'tcx: 'a>(tcx: &'a TyCtxt<'tcx>, }, cstore::FoundAst::Found(&InlinedItem::ImplItem(_, ref ii)) => match ii.node { hir::ImplItemKind::Const(ref ty, ref expr) => { - Some((&**expr, ast_ty_to_prim_ty(tcx, ty))) + Some((&**expr, tcx.ast_ty_to_prim_ty(ty))) }, _ => None }, @@ -182,8 +181,9 @@ pub fn lookup_const_by_id<'a, 'tcx: 'a>(tcx: &'a TyCtxt<'tcx>, } } -fn inline_const_fn_from_external_crate(tcx: &TyCtxt, def_id: DefId) - -> Option { +fn inline_const_fn_from_external_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId) + -> Option { match tcx.extern_const_fns.borrow().get(&def_id) { Some(&ast::DUMMY_NODE_ID) => return None, Some(&fn_id) => return Some(fn_id), @@ -205,8 +205,8 @@ fn inline_const_fn_from_external_crate(tcx: &TyCtxt, def_id: DefId) fn_id } -pub fn lookup_const_fn_by_id<'tcx>(tcx: &TyCtxt<'tcx>, def_id: DefId) - -> Option> +pub fn lookup_const_fn_by_id<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) + -> Option> { let fn_id = if let Some(node_id) = tcx.map.as_local_node_id(def_id) { node_id @@ -238,8 +238,11 @@ pub fn lookup_const_fn_by_id<'tcx>(tcx: &TyCtxt<'tcx>, def_id: DefId) } } -pub fn const_expr_to_pat(tcx: &ty::TyCtxt, expr: &Expr, pat_id: ast::NodeId, span: Span) - -> Result, DefId> { +pub fn const_expr_to_pat<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + expr: &Expr, + pat_id: ast::NodeId, + span: Span) + -> Result, DefId> { let pat_ty = tcx.expr_ty(expr); debug!("expr={:?} pat_ty={:?} pat_id={}", expr, pat_ty, pat_id); match pat_ty.sty { @@ -281,7 +284,7 @@ pub fn const_expr_to_pat(tcx: &ty::TyCtxt, expr: &Expr, pat_id: ast::NodeId, spa let path = match def.full_def() { Def::Struct(def_id) => def_to_path(tcx, def_id), Def::Variant(_, variant_did) => def_to_path(tcx, variant_did), - Def::Fn(..) => return Ok(P(hir::Pat { + Def::Fn(..) | Def::Method(..) => return Ok(P(hir::Pat { id: expr.id, node: PatKind::Lit(P(expr.clone())), span: span, @@ -339,7 +342,8 @@ pub fn const_expr_to_pat(tcx: &ty::TyCtxt, expr: &Expr, pat_id: ast::NodeId, spa Ok(P(hir::Pat { id: expr.id, node: pat, span: span })) } -pub fn eval_const_expr(tcx: &TyCtxt, e: &Expr) -> ConstVal { +pub fn eval_const_expr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + e: &Expr) -> ConstVal { match eval_const_expr_partial(tcx, e, ExprTypeChecked, None) { Ok(r) => r, // non-const path still needs to be a fatal error, because enums are funky @@ -364,7 +368,7 @@ pub struct ConstEvalErr { pub kind: ErrKind, } -#[derive(Clone, PartialEq)] +#[derive(Clone)] pub enum ErrKind { CannotCast, CannotCastTo(&'static str), @@ -377,13 +381,6 @@ pub enum ErrKind { NotOn(ConstVal), CallOn(ConstVal), - NegateWithOverflow(i64), - AddiWithOverflow(i64, i64), - SubiWithOverflow(i64, i64), - MuliWithOverflow(i64, i64), - AdduWithOverflow(u64, u64), - SubuWithOverflow(u64, u64), - MuluWithOverflow(u64, u64), DivideByZero, DivideWithOverflow, ModuloByZero, @@ -414,6 +411,8 @@ pub enum ErrKind { /// Expected, Got TypeMismatch(String, ConstInt), BadType(ConstVal), + ErroneousReferencedConstant(Box), + CharCast(ConstInt), } impl From for ErrKind { @@ -438,13 +437,6 @@ impl ConstEvalErr { NotOn(ref const_val) => format!("not on {}", const_val.description()).into_cow(), CallOn(ref const_val) => format!("call on {}", const_val.description()).into_cow(), - NegateWithOverflow(..) => "attempted to negate with overflow".into_cow(), - AddiWithOverflow(..) => "attempted to add with overflow".into_cow(), - SubiWithOverflow(..) => "attempted to sub with overflow".into_cow(), - MuliWithOverflow(..) => "attempted to mul with overflow".into_cow(), - AdduWithOverflow(..) => "attempted to add with overflow".into_cow(), - SubuWithOverflow(..) => "attempted to sub with overflow".into_cow(), - MuluWithOverflow(..) => "attempted to mul with overflow".into_cow(), DivideByZero => "attempted to divide by zero".into_cow(), DivideWithOverflow => "attempted to divide with overflow".into_cow(), ModuloByZero => "attempted remainder with a divisor of zero".into_cow(), @@ -480,6 +472,10 @@ impl ConstEvalErr { expected, got.description()).into_cow() }, BadType(ref i) => format!("value of wrong type: {:?}", i).into_cow(), + ErroneousReferencedConstant(_) => "could not evaluate referenced constant".into_cow(), + CharCast(ref got) => { + format!("only `u8` can be cast as `char`, not `{}`", got.description()).into_cow() + }, } } } @@ -534,10 +530,10 @@ macro_rules! signal { /// guaranteed to be evaluatable. `ty_hint` is usually ExprTypeChecked, /// but a few places need to evaluate constants during type-checking, like /// computing the length of an array. (See also the FIXME above EvalHint.) -pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>, - e: &Expr, - ty_hint: EvalHint<'tcx>, - fn_args: FnArgMap) -> EvalResult { +pub fn eval_const_expr_partial<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + e: &Expr, + ty_hint: EvalHint<'tcx>, + fn_args: FnArgMap) -> EvalResult { // Try to compute the type of the expression based on the EvalHint. // (See also the definition of EvalHint, and the FIXME above EvalHint.) let ety = match ty_hint { @@ -679,7 +675,7 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>, } } hir::ExprCast(ref base, ref target_ty) => { - let ety = ast_ty_to_prim_ty(tcx, &target_ty).or_else(|| ety) + let ety = tcx.ast_ty_to_prim_ty(&target_ty).or(ety) .unwrap_or_else(|| { tcx.sess.span_fatal(target_ty.span, "target type not found for const cast") @@ -696,6 +692,8 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>, let val = match eval_const_expr_partial(tcx, &base, base_hint, fn_args) { Ok(val) => val, + Err(ConstEvalErr { kind: ErroneousReferencedConstant( + box ConstEvalErr { kind: TypeMismatch(_, val), .. }), .. }) | Err(ConstEvalErr { kind: TypeMismatch(_, val), .. }) => { // Something like `5i8 as usize` doesn't need a type hint for the base // instead take the type hint from the inner value @@ -737,19 +735,31 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>, } else { None }; - if let Some((e, ty)) = lookup_const_by_id(tcx, def_id, substs) { + if let Some((expr, ty)) = lookup_const_by_id(tcx, def_id, substs) { let item_hint = match ty { Some(ty) => ty_hint.checked_or(ty), None => ty_hint, }; - eval_const_expr_partial(tcx, e, item_hint, None)? + match eval_const_expr_partial(tcx, expr, item_hint, None) { + Ok(val) => val, + Err(err) => { + debug!("bad reference: {:?}, {:?}", err.description(), err.span); + signal!(e, ErroneousReferencedConstant(box err)) + }, + } } else { signal!(e, NonConstPath); } }, Def::Variant(enum_def, variant_def) => { if let Some(const_expr) = lookup_variant_by_id(tcx, enum_def, variant_def) { - eval_const_expr_partial(tcx, const_expr, ty_hint, None)? + match eval_const_expr_partial(tcx, const_expr, ty_hint, None) { + Ok(val) => val, + Err(err) => { + debug!("bad reference: {:?}, {:?}", err.description(), err.span); + signal!(e, ErroneousReferencedConstant(box err)) + }, + } } else { signal!(e, UnimplementedConstVal("enum variants")); } @@ -801,7 +811,10 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>, debug!("const call({:?})", call_args); eval_const_expr_partial(tcx, &result, ty_hint, Some(&call_args))? }, - hir::ExprLit(ref lit) => lit_to_const(&lit.node, tcx, ety, lit.span)?, + hir::ExprLit(ref lit) => match lit_to_const(&lit.node, tcx, ety, lit.span) { + Ok(val) => val, + Err(err) => signal!(e, err), + }, hir::ExprBlock(ref block) => { match block.expr { Some(ref expr) => eval_const_expr_partial(tcx, &expr, ty_hint, fn_args)?, @@ -907,24 +920,20 @@ pub fn eval_const_expr_partial<'tcx>(tcx: &TyCtxt<'tcx>, }; match (ety.map(|t| &t.sty), result) { - (Some(ref ty_hint), Integral(i)) => Ok(Integral(infer(i, tcx, ty_hint, e.span)?)), + (Some(ref ty_hint), Integral(i)) => match infer(i, tcx, ty_hint) { + Ok(inferred) => Ok(Integral(inferred)), + Err(err) => signal!(e, err), + }, (_, result) => Ok(result), } } -fn infer<'tcx>( - i: ConstInt, - tcx: &TyCtxt<'tcx>, - ty_hint: &ty::TypeVariants<'tcx>, - span: Span -) -> Result { +fn infer<'a, 'tcx>(i: ConstInt, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty_hint: &ty::TypeVariants<'tcx>) + -> Result { use syntax::ast::*; - let err = |e| ConstEvalErr { - span: span, - kind: e, - }; - match (ty_hint, i) { (&ty::TyInt(IntTy::I8), result @ I8(_)) => Ok(result), (&ty::TyInt(IntTy::I16), result @ I16(_)) => Ok(result), @@ -970,78 +979,77 @@ fn infer<'tcx>( Err(_) => Ok(Usize(ConstUsize::Us32(i as u32))), } }, - (&ty::TyUint(_), InferSigned(_)) => Err(err(IntermediateUnsignedNegative)), + (&ty::TyUint(_), InferSigned(_)) => Err(IntermediateUnsignedNegative), - (&ty::TyInt(ity), i) => Err(err(TypeMismatch(ity.to_string(), i))), - (&ty::TyUint(ity), i) => Err(err(TypeMismatch(ity.to_string(), i))), + (&ty::TyInt(ity), i) => Err(TypeMismatch(ity.to_string(), i)), + (&ty::TyUint(ity), i) => Err(TypeMismatch(ity.to_string(), i)), (&ty::TyEnum(ref adt, _), i) => { let hints = tcx.lookup_repr_hints(adt.did); let int_ty = tcx.enum_repr_type(hints.iter().next()); - infer(i, tcx, &int_ty.to_ty(tcx).sty, span) + infer(i, tcx, &int_ty.to_ty(tcx).sty) }, - (_, i) => Err(err(BadType(ConstVal::Integral(i)))), + (_, i) => Err(BadType(ConstVal::Integral(i))), } } -fn resolve_trait_associated_const<'a, 'tcx: 'a>(tcx: &'a TyCtxt<'tcx>, +fn resolve_trait_associated_const<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ti: &'tcx hir::TraitItem, trait_id: DefId, - rcvr_substs: subst::Substs<'tcx>) + rcvr_substs: &'tcx subst::Substs<'tcx>) -> Option<(&'tcx Expr, Option>)> { let trait_ref = ty::Binder( - rcvr_substs.erase_regions().to_trait_ref(tcx, trait_id) + rcvr_substs.clone().erase_regions().to_trait_ref(tcx, trait_id) ); debug!("resolve_trait_associated_const: trait_ref={:?}", trait_ref); tcx.populate_implementations_for_trait_if_necessary(trait_ref.def_id()); - let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, ProjectionMode::AnyFinal); - - let mut selcx = traits::SelectionContext::new(&infcx); - let obligation = traits::Obligation::new(traits::ObligationCause::dummy(), - trait_ref.to_poly_trait_predicate()); - let selection = match selcx.select(&obligation) { - Ok(Some(vtable)) => vtable, - // Still ambiguous, so give up and let the caller decide whether this - // expression is really needed yet. Some associated constant values - // can't be evaluated until monomorphization is done in trans. - Ok(None) => { - return None - } - Err(_) => { - return None - } - }; + tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|infcx| { + let mut selcx = traits::SelectionContext::new(&infcx); + let obligation = traits::Obligation::new(traits::ObligationCause::dummy(), + trait_ref.to_poly_trait_predicate()); + let selection = match selcx.select(&obligation) { + Ok(Some(vtable)) => vtable, + // Still ambiguous, so give up and let the caller decide whether this + // expression is really needed yet. Some associated constant values + // can't be evaluated until monomorphization is done in trans. + Ok(None) => { + return None + } + Err(_) => { + return None + } + }; - // NOTE: this code does not currently account for specialization, but when - // it does so, it should hook into the ProjectionMode to determine when the - // constant should resolve; this will also require plumbing through to this - // function whether we are in "trans mode" to pick the right ProjectionMode - // when constructing the inference context above. - match selection { - traits::VtableImpl(ref impl_data) => { - match tcx.associated_consts(impl_data.impl_def_id) - .iter().find(|ic| ic.name == ti.name) { - Some(ic) => lookup_const_by_id(tcx, ic.def_id, None), - None => match ti.node { - hir::ConstTraitItem(ref ty, Some(ref expr)) => { - Some((&*expr, ast_ty_to_prim_ty(tcx, ty))) + // NOTE: this code does not currently account for specialization, but when + // it does so, it should hook into the ProjectionMode to determine when the + // constant should resolve; this will also require plumbing through to this + // function whether we are in "trans mode" to pick the right ProjectionMode + // when constructing the inference context above. + match selection { + traits::VtableImpl(ref impl_data) => { + match tcx.associated_consts(impl_data.impl_def_id) + .iter().find(|ic| ic.name == ti.name) { + Some(ic) => lookup_const_by_id(tcx, ic.def_id, None), + None => match ti.node { + hir::ConstTraitItem(ref ty, Some(ref expr)) => { + Some((&*expr, tcx.ast_ty_to_prim_ty(ty))) + }, + _ => None, }, - _ => None, - }, + } + } + _ => { + span_bug!(ti.span, + "resolve_trait_associated_const: unexpected vtable type") } } - _ => { - span_bug!( - ti.span, - "resolve_trait_associated_const: unexpected vtable type") - } - } + }) } -fn cast_const_int<'tcx>(tcx: &TyCtxt<'tcx>, val: ConstInt, ty: ty::Ty) -> CastResult { +fn cast_const_int<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, val: ConstInt, ty: ty::Ty) -> CastResult { let v = val.to_u64_unchecked(); match ty.sty { ty::TyBool if v == 0 => Ok(Bool(false)), @@ -1066,28 +1074,27 @@ fn cast_const_int<'tcx>(tcx: &TyCtxt<'tcx>, val: ConstInt, ty: ty::Ty) -> CastRe Err(_) => Ok(Integral(Usize(ConstUsize::Us32(v as u32)))), } }, - ty::TyFloat(ast::FloatTy::F64) if val.is_negative() => { - // FIXME: this could probably be prettier - // there's no easy way to turn an `Infer` into a f64 - let val = (-val).map_err(Math)?; - let val = val.to_u64().unwrap() as f64; - let val = -val; - Ok(Float(val)) + ty::TyFloat(ast::FloatTy::F64) => match val.erase_type() { + Infer(u) => Ok(Float(u as f64)), + InferSigned(i) => Ok(Float(i as f64)), + _ => bug!("ConstInt::erase_type returned something other than Infer/InferSigned"), }, - ty::TyFloat(ast::FloatTy::F64) => Ok(Float(val.to_u64().unwrap() as f64)), - ty::TyFloat(ast::FloatTy::F32) if val.is_negative() => { - let val = (-val).map_err(Math)?; - let val = val.to_u64().unwrap() as f32; - let val = -val; - Ok(Float(val as f64)) + ty::TyFloat(ast::FloatTy::F32) => match val.erase_type() { + Infer(u) => Ok(Float(u as f32 as f64)), + InferSigned(i) => Ok(Float(i as f32 as f64)), + _ => bug!("ConstInt::erase_type returned something other than Infer/InferSigned"), }, - ty::TyFloat(ast::FloatTy::F32) => Ok(Float(val.to_u64().unwrap() as f32 as f64)), ty::TyRawPtr(_) => Err(ErrKind::UnimplementedConstVal("casting an address to a raw ptr")), + ty::TyChar => match infer(val, tcx, &ty::TyUint(ast::UintTy::U8)) { + Ok(U8(u)) => Ok(Char(u as char)), + // can only occur before typeck, typeck blocks `T as char` for `T` != `u8` + _ => Err(CharCast(val)), + }, _ => Err(CannotCast), } } -fn cast_const_float<'tcx>(tcx: &TyCtxt<'tcx>, f: f64, ty: ty::Ty) -> CastResult { +fn cast_const_float<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, f: f64, ty: ty::Ty) -> CastResult { match ty.sty { ty::TyInt(_) if f >= 0.0 => cast_const_int(tcx, Infer(f as u64), ty), ty::TyInt(_) => cast_const_int(tcx, InferSigned(f as i64), ty), @@ -1098,22 +1105,28 @@ fn cast_const_float<'tcx>(tcx: &TyCtxt<'tcx>, f: f64, ty: ty::Ty) -> CastResult } } -fn cast_const<'tcx>(tcx: &TyCtxt<'tcx>, val: ConstVal, ty: ty::Ty) -> CastResult { +fn cast_const<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, val: ConstVal, ty: ty::Ty) -> CastResult { match val { Integral(i) => cast_const_int(tcx, i, ty), Bool(b) => cast_const_int(tcx, Infer(b as u64), ty), Float(f) => cast_const_float(tcx, f, ty), Char(c) => cast_const_int(tcx, Infer(c as u64), ty), Function(_) => Err(UnimplementedConstVal("casting fn pointers")), + ByteStr(_) => match ty.sty { + ty::TyRawPtr(_) => { + Err(ErrKind::UnimplementedConstVal("casting a bytestr to a raw ptr")) + }, + _ => Err(CannotCast), + }, _ => Err(CannotCast), } } -fn lit_to_const<'tcx>(lit: &ast::LitKind, - tcx: &TyCtxt<'tcx>, - ty_hint: Option>, - span: Span, - ) -> Result { +fn lit_to_const<'a, 'tcx>(lit: &ast::LitKind, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty_hint: Option>, + span: Span) + -> Result { use syntax::ast::*; use syntax::ast::LitIntType::*; match *lit { @@ -1121,28 +1134,28 @@ fn lit_to_const<'tcx>(lit: &ast::LitKind, LitKind::ByteStr(ref data) => Ok(ByteStr(data.clone())), LitKind::Byte(n) => Ok(Integral(U8(n))), LitKind::Int(n, Signed(ity)) => { - infer(InferSigned(n as i64), tcx, &ty::TyInt(ity), span).map(Integral) + infer(InferSigned(n as i64), tcx, &ty::TyInt(ity)).map(Integral) }, LitKind::Int(n, Unsuffixed) => { match ty_hint.map(|t| &t.sty) { Some(&ty::TyInt(ity)) => { - infer(InferSigned(n as i64), tcx, &ty::TyInt(ity), span).map(Integral) + infer(InferSigned(n as i64), tcx, &ty::TyInt(ity)).map(Integral) }, Some(&ty::TyUint(uty)) => { - infer(Infer(n), tcx, &ty::TyUint(uty), span).map(Integral) + infer(Infer(n), tcx, &ty::TyUint(uty)).map(Integral) }, None => Ok(Integral(Infer(n))), Some(&ty::TyEnum(ref adt, _)) => { let hints = tcx.lookup_repr_hints(adt.did); let int_ty = tcx.enum_repr_type(hints.iter().next()); - infer(Infer(n), tcx, &int_ty.to_ty(tcx).sty, span).map(Integral) + infer(Infer(n), tcx, &int_ty.to_ty(tcx).sty).map(Integral) }, Some(ty_hint) => bug!("bad ty_hint: {:?}, {:?}", ty_hint, lit), } }, LitKind::Int(n, Unsigned(ity)) => { - infer(Infer(n), tcx, &ty::TyUint(ity), span).map(Integral) + infer(Infer(n), tcx, &ty::TyUint(ity)).map(Integral) }, LitKind::Float(ref n, _) | @@ -1180,9 +1193,9 @@ pub fn compare_const_vals(a: &ConstVal, b: &ConstVal) -> Option { } } -pub fn compare_lit_exprs<'tcx>(tcx: &TyCtxt<'tcx>, - a: &Expr, - b: &Expr) -> Option { +pub fn compare_lit_exprs<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + a: &Expr, + b: &Expr) -> Option { let a = match eval_const_expr_partial(tcx, a, ExprTypeChecked, None) { Ok(a) => a, Err(e) => { @@ -1202,7 +1215,8 @@ pub fn compare_lit_exprs<'tcx>(tcx: &TyCtxt<'tcx>, /// Returns the repeat count for a repeating vector expression. -pub fn eval_repeat_count(tcx: &TyCtxt, count_expr: &hir::Expr) -> usize { +pub fn eval_repeat_count<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + count_expr: &hir::Expr) -> usize { let hint = UncheckedExprHint(tcx.types.usize); match eval_const_expr_partial(tcx, count_expr, hint, None) { Ok(Integral(Usize(count))) => { diff --git a/src/librustc_const_eval/lib.rs b/src/librustc_const_eval/lib.rs index 085888dc21..9ab6a437a5 100644 --- a/src/librustc_const_eval/lib.rs +++ b/src/librustc_const_eval/lib.rs @@ -29,6 +29,8 @@ #![feature(slice_patterns)] #![feature(iter_arith)] #![feature(question_mark)] +#![feature(box_patterns)] +#![feature(box_syntax)] #[macro_use] extern crate syntax; #[macro_use] extern crate log; diff --git a/src/librustc_const_math/int.rs b/src/librustc_const_math/int.rs index 658d4d9a6d..64f03be3b5 100644 --- a/src/librustc_const_math/int.rs +++ b/src/librustc_const_math/int.rs @@ -503,7 +503,7 @@ impl ::std::ops::Shr for ConstInt { I8(a) => Ok(I8(overflowing!(a.overflowing_shr(b), Op::Shr))), I16(a) => Ok(I16(overflowing!(a.overflowing_shr(b), Op::Shr))), I32(a) => Ok(I32(overflowing!(a.overflowing_shr(b), Op::Shr))), - I64(a) => Ok(I64(overflowing!(a.overflowing_shr(b), Op::Shl))), + I64(a) => Ok(I64(overflowing!(a.overflowing_shr(b), Op::Shr))), Isize(Is32(a)) => Ok(Isize(Is32(overflowing!(a.overflowing_shr(b), Op::Shr)))), Isize(Is64(a)) => Ok(Isize(Is64(overflowing!(a.overflowing_shr(b), Op::Shr)))), U8(a) => Ok(U8(overflowing!(a.overflowing_shr(b), Op::Shr))), diff --git a/src/librustc_const_math/lib.rs b/src/librustc_const_math/lib.rs index 9f66aac6e3..59792d16e8 100644 --- a/src/librustc_const_math/lib.rs +++ b/src/librustc_const_math/lib.rs @@ -40,4 +40,4 @@ mod err; pub use int::*; pub use us::*; pub use is::*; -pub use err::ConstMathErr; +pub use err::{ConstMathErr, Op}; diff --git a/src/librustc_data_structures/bitvec.rs b/src/librustc_data_structures/bitvec.rs index 092b406ae9..cb648038c3 100644 --- a/src/librustc_data_structures/bitvec.rs +++ b/src/librustc_data_structures/bitvec.rs @@ -52,9 +52,8 @@ impl BitVector { pub fn grow(&mut self, num_bits: usize) { let num_words = u64s(num_bits); - let extra_words = self.data.len() - num_words; - if extra_words > 0 { - self.data.extend((0..extra_words).map(|_| 0)); + if self.data.len() < num_words { + self.data.resize(num_words, 0) } } @@ -284,15 +283,27 @@ fn union_two_vecs() { #[test] fn grow() { let mut vec1 = BitVector::new(65); - assert!(vec1.insert(3)); - assert!(!vec1.insert(3)); - assert!(vec1.insert(5)); - assert!(vec1.insert(64)); + for index in 0 .. 65 { + assert!(vec1.insert(index)); + assert!(!vec1.insert(index)); + } vec1.grow(128); - assert!(vec1.contains(3)); - assert!(vec1.contains(5)); - assert!(vec1.contains(64)); - assert!(!vec1.contains(126)); + + // Check if the bits set before growing are still set + for index in 0 .. 65 { + assert!(vec1.contains(index)); + } + + // Check if the new bits are all un-set + for index in 65 .. 128 { + assert!(!vec1.contains(index)); + } + + // Check that we can set all new bits without running out of bounds + for index in 65 .. 128 { + assert!(vec1.insert(index)); + assert!(!vec1.insert(index)); + } } #[test] diff --git a/src/librustc_data_structures/graph/mod.rs b/src/librustc_data_structures/graph/mod.rs index 99a87d1e76..731471b060 100644 --- a/src/librustc_data_structures/graph/mod.rs +++ b/src/librustc_data_structures/graph/mod.rs @@ -292,11 +292,15 @@ impl Graph { } } - pub fn depth_traverse<'a>(&'a self, start: NodeIndex) -> DepthFirstTraversal<'a, N, E> { + pub fn depth_traverse<'a>(&'a self, + start: NodeIndex, + direction: Direction) + -> DepthFirstTraversal<'a, N, E> { DepthFirstTraversal { graph: self, stack: vec![start], visited: BitVector::new(self.nodes.len()), + direction: direction, } } } @@ -371,6 +375,7 @@ pub struct DepthFirstTraversal<'g, N: 'g, E: 'g> { graph: &'g Graph, stack: Vec, visited: BitVector, + direction: Direction, } impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> { @@ -382,9 +387,10 @@ impl<'g, N: Debug, E: Debug> Iterator for DepthFirstTraversal<'g, N, E> { continue; } - for (_, edge) in self.graph.outgoing_edges(idx) { - if !self.visited.contains(edge.target().node_id()) { - self.stack.push(edge.target()); + for (_, edge) in self.graph.adjacent_edges(idx, self.direction) { + let target = edge.source_or_target(self.direction); + if !self.visited.contains(target.node_id()) { + self.stack.push(target); } } diff --git a/src/librustc_data_structures/lib.rs b/src/librustc_data_structures/lib.rs index 2234325aa0..926ee85230 100644 --- a/src/librustc_data_structures/lib.rs +++ b/src/librustc_data_structures/lib.rs @@ -28,6 +28,8 @@ #![feature(nonzero)] #![feature(rustc_private)] #![feature(staged_api)] +#![feature(unboxed_closures)] +#![feature(fn_traits)] #![cfg_attr(test, feature(test))] diff --git a/src/librustc_data_structures/obligation_forest/mod.rs b/src/librustc_data_structures/obligation_forest/mod.rs index 4f6d0d7e40..c079146edb 100644 --- a/src/librustc_data_structures/obligation_forest/mod.rs +++ b/src/librustc_data_structures/obligation_forest/mod.rs @@ -15,20 +15,45 @@ //! in the first place). See README.md for a general overview of how //! to use this class. +use fnv::{FnvHashMap, FnvHashSet}; + +use std::cell::Cell; +use std::collections::hash_map::Entry; use std::fmt::Debug; -use std::mem; +use std::hash; +use std::marker::PhantomData; mod node_index; use self::node_index::NodeIndex; -mod tree_index; -use self::tree_index::TreeIndex; - - #[cfg(test)] mod test; -pub struct ObligationForest { +pub trait ForestObligation : Clone + Debug { + type Predicate : Clone + hash::Hash + Eq + Debug; + + fn as_predicate(&self) -> &Self::Predicate; +} + +pub trait ObligationProcessor { + type Obligation : ForestObligation; + type Error : Debug; + + fn process_obligation(&mut self, + obligation: &mut Self::Obligation) + -> Result>, Self::Error>; + + fn process_backedge<'c, I>(&mut self, cycle: I, + _marker: PhantomData<&'c Self::Obligation>) + where I: Clone + Iterator; +} + +struct SnapshotData { + node_len: usize, + cache_list_len: usize, +} + +pub struct ObligationForest { /// The list of obligations. In between calls to /// `process_obligations`, this list only contains nodes in the /// `Pending` or `Success` state (with a non-zero number of @@ -42,51 +67,66 @@ pub struct ObligationForest { /// at a higher index than its parent. This is needed by the /// backtrace iterator (which uses `split_at`). nodes: Vec>, - trees: Vec>, - snapshots: Vec, + /// A cache of predicates that have been successfully completed. + done_cache: FnvHashSet, + /// An cache of the nodes in `nodes`, indexed by predicate. + waiting_cache: FnvHashMap, + /// A list of the obligations added in snapshots, to allow + /// for their removal. + cache_list: Vec, + snapshots: Vec, + scratch: Option>, } pub struct Snapshot { len: usize, } -struct Tree { - root: NodeIndex, - state: T, -} - +#[derive(Debug)] struct Node { - state: NodeState, + obligation: O, + state: Cell, + + /// Obligations that depend on this obligation for their + /// completion. They must all be in a non-pending state. + dependents: Vec, + /// The parent of a node - the original obligation of + /// which it is a subobligation. Except for error reporting, + /// this is just another member of `dependents`. parent: Option, - tree: TreeIndex, } /// The state of one node in some tree within the forest. This /// represents the current state of processing for the obligation (of /// type `O`) associated with this node. -#[derive(Debug)] -enum NodeState { - /// Obligation not yet resolved to success or error. - Pending { - obligation: O, - }, - - /// Obligation resolved to success; `num_incomplete_children` - /// indicates the number of children still in an "incomplete" - /// state. Incomplete means that either the child is still - /// pending, or it has children which are incomplete. (Basically, - /// there is pending work somewhere in the subtree of the child.) - /// - /// Once all children have completed, success nodes are removed - /// from the vector by the compression step. - Success { - obligation: O, - num_incomplete_children: usize, - }, +/// +/// Outside of ObligationForest methods, nodes should be either Pending +/// or Waiting. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +enum NodeState { + /// Obligations for which selection had not yet returned a + /// non-ambiguous result. + Pending, + + /// This obligation was selected successfuly, but may or + /// may not have subobligations. + Success, + + /// This obligation was selected sucessfully, but it has + /// a pending subobligation. + Waiting, + + /// This obligation, along with its subobligations, are complete, + /// and will be removed in the next collection. + Done, /// This obligation was resolved to an error. Error nodes are /// removed from the vector by the compression step. Error, + + /// This is a temporary state used in DFS loops to detect cycles, + /// it should not exist outside of these DFSes. + OnDfsStack, } #[derive(Debug)] @@ -113,12 +153,15 @@ pub struct Error { pub backtrace: Vec, } -impl ObligationForest { - pub fn new() -> ObligationForest { +impl ObligationForest { + pub fn new() -> ObligationForest { ObligationForest { - trees: vec![], nodes: vec![], snapshots: vec![], + done_cache: FnvHashSet(), + waiting_cache: FnvHashMap(), + cache_list: vec![], + scratch: Some(vec![]), } } @@ -129,57 +172,81 @@ impl ObligationForest { } pub fn start_snapshot(&mut self) -> Snapshot { - self.snapshots.push(self.trees.len()); + self.snapshots.push(SnapshotData { + node_len: self.nodes.len(), + cache_list_len: self.cache_list.len() + }); Snapshot { len: self.snapshots.len() } } pub fn commit_snapshot(&mut self, snapshot: Snapshot) { assert_eq!(snapshot.len, self.snapshots.len()); - let trees_len = self.snapshots.pop().unwrap(); - assert!(self.trees.len() >= trees_len); + let info = self.snapshots.pop().unwrap(); + assert!(self.nodes.len() >= info.node_len); + assert!(self.cache_list.len() >= info.cache_list_len); } pub fn rollback_snapshot(&mut self, snapshot: Snapshot) { // Check that we are obeying stack discipline. assert_eq!(snapshot.len, self.snapshots.len()); - let trees_len = self.snapshots.pop().unwrap(); + let info = self.snapshots.pop().unwrap(); - // If nothing happened in snapshot, done. - if self.trees.len() == trees_len { - return; + for entry in &self.cache_list[info.cache_list_len..] { + self.done_cache.remove(entry); + self.waiting_cache.remove(entry); } - // Find root of first tree; because nothing can happen in a - // snapshot but pushing trees, all nodes after that should be - // roots of other trees as well - let first_root_index = self.trees[trees_len].root.get(); - debug_assert!(self.nodes[first_root_index..] - .iter() - .zip(first_root_index..) - .all(|(root, root_index)| { - self.trees[root.tree.get()].root.get() == root_index - })); - - // Pop off tree/root pairs pushed during snapshot. - self.trees.truncate(trees_len); - self.nodes.truncate(first_root_index); + self.nodes.truncate(info.node_len); + self.cache_list.truncate(info.cache_list_len); } pub fn in_snapshot(&self) -> bool { !self.snapshots.is_empty() } - /// Adds a new tree to the forest. + /// Registers an obligation /// - /// This CAN be done during a snapshot. - pub fn push_tree(&mut self, obligation: O, tree_state: T) { - let index = NodeIndex::new(self.nodes.len()); - let tree = TreeIndex::new(self.trees.len()); - self.trees.push(Tree { - root: index, - state: tree_state, - }); - self.nodes.push(Node::new(tree, None, obligation)); + /// This CAN be done in a snapshot + pub fn register_obligation(&mut self, obligation: O) { + // Ignore errors here - there is no guarantee of success. + let _ = self.register_obligation_at(obligation, None); + } + + // returns Err(()) if we already know this obligation failed. + fn register_obligation_at(&mut self, obligation: O, parent: Option) + -> Result<(), ()> + { + if self.done_cache.contains(obligation.as_predicate()) { + return Ok(()) + } + + match self.waiting_cache.entry(obligation.as_predicate().clone()) { + Entry::Occupied(o) => { + debug!("register_obligation_at({:?}, {:?}) - duplicate of {:?}!", + obligation, parent, o.get()); + if let Some(parent) = parent { + if self.nodes[o.get().get()].dependents.contains(&parent) { + debug!("register_obligation_at({:?}, {:?}) - duplicate subobligation", + obligation, parent); + } else { + self.nodes[o.get().get()].dependents.push(parent); + } + } + if let NodeState::Error = self.nodes[o.get().get()].state.get() { + Err(()) + } else { + Ok(()) + } + } + Entry::Vacant(v) => { + debug!("register_obligation_at({:?}, {:?}) - ok", + obligation, parent); + v.insert(NodeIndex::new(self.nodes.len())); + self.cache_list.push(obligation.as_predicate().clone()); + self.nodes.push(Node::new(parent, obligation)); + Ok(()) + } + } } /// Convert all remaining obligations to the given error. @@ -189,10 +256,8 @@ impl ObligationForest { assert!(!self.in_snapshot()); let mut errors = vec![]; for index in 0..self.nodes.len() { - debug_assert!(!self.nodes[index].is_popped()); - self.inherit_error(index); - if let NodeState::Pending { .. } = self.nodes[index].state { - let backtrace = self.backtrace(index); + if let NodeState::Pending = self.nodes[index].state.get() { + let backtrace = self.error_at(index); errors.push(Error { error: error.clone(), backtrace: backtrace, @@ -210,22 +275,17 @@ impl ObligationForest { { self.nodes .iter() - .filter_map(|n| { - match n.state { - NodeState::Pending { ref obligation } => Some(obligation), - _ => None, - } - }) - .cloned() + .filter(|n| n.state.get() == NodeState::Pending) + .map(|n| n.obligation.clone()) .collect() } - /// Process the obligations. + /// Perform a pass through the obligation list. This must + /// be called in a loop until `outcome.stalled` is false. /// /// This CANNOT be unrolled (presently, at least). - pub fn process_obligations(&mut self, mut action: F) -> Outcome - where E: Debug, - F: FnMut(&mut O, &mut T, Backtrace) -> Result>, E> + pub fn process_obligations

(&mut self, processor: &mut P) -> Outcome + where P: ObligationProcessor { debug!("process_obligations(len={})", self.nodes.len()); assert!(!self.in_snapshot()); // cannot unroll this action @@ -233,33 +293,18 @@ impl ObligationForest { let mut errors = vec![]; let mut stalled = true; - // We maintain the invariant that the list is in pre-order, so - // parents occur before their children. Also, whenever an - // error occurs, we propagate it from the child all the way to - // the root of the tree. Together, these two facts mean that - // when we visit a node, we can check if its root is in error, - // and we will find out if any prior node within this forest - // encountered an error. - for index in 0..self.nodes.len() { - debug_assert!(!self.nodes[index].is_popped()); - self.inherit_error(index); - debug!("process_obligations: node {} == {:?}", index, - self.nodes[index].state); - - let result = { - let Node { tree, parent, .. } = self.nodes[index]; - let (prefix, suffix) = self.nodes.split_at_mut(index); - let backtrace = Backtrace::new(prefix, parent); - match suffix[0].state { - NodeState::Error | - NodeState::Success { .. } => continue, - NodeState::Pending { ref mut obligation } => { - action(obligation, &mut self.trees[tree.get()].state, backtrace) - } + self.nodes[index]); + + let result = match self.nodes[index] { + Node { state: ref _state, ref mut obligation, .. } + if _state.get() == NodeState::Pending => + { + processor.process_obligation(obligation) } + _ => continue }; debug!("process_obligations: node {} got result {:?}", @@ -273,10 +318,22 @@ impl ObligationForest { Ok(Some(children)) => { // if we saw a Some(_) result, we are not (yet) stalled stalled = false; - self.success(index, children); + self.nodes[index].state.set(NodeState::Success); + + for child in children { + let st = self.register_obligation_at( + child, + Some(NodeIndex::new(index)) + ); + if let Err(()) = st { + // error already reported - propagate it + // to our node. + self.error_at(index); + } + } } Err(err) => { - let backtrace = self.backtrace(index); + let backtrace = self.error_at(index); errors.push(Error { error: err, backtrace: backtrace, @@ -285,259 +342,292 @@ impl ObligationForest { } } + self.mark_as_waiting(); + self.process_cycles(processor); + // Now we have to compress the result - let successful_obligations = self.compress(); + let completed_obligations = self.compress(); debug!("process_obligations: complete"); Outcome { - completed: successful_obligations, + completed: completed_obligations, errors: errors, stalled: stalled, } } - /// Indicates that node `index` has been processed successfully, - /// yielding `children` as the derivative work. If children is an - /// empty vector, this will update the ref count on the parent of - /// `index` to indicate that a child has completed - /// successfully. Otherwise, adds new nodes to represent the child - /// work. - fn success(&mut self, index: usize, children: Vec) { - debug!("success(index={}, children={:?})", index, children); - - let num_incomplete_children = children.len(); - - if num_incomplete_children == 0 { - // if there is no work left to be done, decrement parent's ref count - self.update_parent(index); - } else { - // create child work - let tree_index = self.nodes[index].tree; - let node_index = NodeIndex::new(index); - self.nodes.extend(children.into_iter() - .map(|o| Node::new(tree_index, Some(node_index), o))); + /// Mark all NodeState::Success nodes as NodeState::Done and + /// report all cycles between them. This should be called + /// after `mark_as_waiting` marks all nodes with pending + /// subobligations as NodeState::Waiting. + fn process_cycles

(&mut self, processor: &mut P) + where P: ObligationProcessor + { + let mut stack = self.scratch.take().unwrap(); + + for node in 0..self.nodes.len() { + self.find_cycles_from_node(&mut stack, processor, node); } - // change state from `Pending` to `Success`, temporarily swapping in `Error` - let state = mem::replace(&mut self.nodes[index].state, NodeState::Error); - self.nodes[index].state = match state { - NodeState::Pending { obligation } => { - NodeState::Success { - obligation: obligation, - num_incomplete_children: num_incomplete_children, - } - } - NodeState::Success { .. } | - NodeState::Error => unreachable!(), - }; + self.scratch = Some(stack); } - /// Decrements the ref count on the parent of `child`; if the - /// parent's ref count then reaches zero, proceeds recursively. - fn update_parent(&mut self, child: usize) { - debug!("update_parent(child={})", child); - if let Some(parent) = self.nodes[child].parent { - let parent = parent.get(); - match self.nodes[parent].state { - NodeState::Success { ref mut num_incomplete_children, .. } => { - *num_incomplete_children -= 1; - if *num_incomplete_children > 0 { - return; + fn find_cycles_from_node

(&self, stack: &mut Vec, + processor: &mut P, index: usize) + where P: ObligationProcessor + { + let node = &self.nodes[index]; + let state = node.state.get(); + match state { + NodeState::OnDfsStack => { + let index = + stack.iter().rposition(|n| *n == index).unwrap(); + // I need a Clone closure + #[derive(Clone)] + struct GetObligation<'a, O: 'a>(&'a [Node]); + impl<'a, 'b, O> FnOnce<(&'b usize,)> for GetObligation<'a, O> { + type Output = &'a O; + extern "rust-call" fn call_once(self, args: (&'b usize,)) -> &'a O { + &self.0[*args.0].obligation + } + } + impl<'a, 'b, O> FnMut<(&'b usize,)> for GetObligation<'a, O> { + extern "rust-call" fn call_mut(&mut self, args: (&'b usize,)) -> &'a O { + &self.0[*args.0].obligation } } - _ => unreachable!(), - } - self.update_parent(parent); - } - } - /// If the root of `child` is in an error state, places `child` - /// into an error state. This is used during processing so that we - /// skip the remaining obligations from a tree once some other - /// node in the tree is found to be in error. - fn inherit_error(&mut self, child: usize) { - let tree = self.nodes[child].tree; - let root = self.trees[tree.get()].root; - if let NodeState::Error = self.nodes[root.get()].state { - self.nodes[child].state = NodeState::Error; - } + processor.process_backedge(stack[index..].iter().map(GetObligation(&self.nodes)), + PhantomData); + } + NodeState::Success => { + node.state.set(NodeState::OnDfsStack); + stack.push(index); + if let Some(parent) = node.parent { + self.find_cycles_from_node(stack, processor, parent.get()); + } + for dependent in &node.dependents { + self.find_cycles_from_node(stack, processor, dependent.get()); + } + stack.pop(); + node.state.set(NodeState::Done); + }, + NodeState::Waiting | NodeState::Pending => { + // this node is still reachable from some pending node. We + // will get to it when they are all processed. + } + NodeState::Done | NodeState::Error => { + // already processed that node + } + }; } /// Returns a vector of obligations for `p` and all of its /// ancestors, putting them into the error state in the process. - /// The fact that the root is now marked as an error is used by - /// `inherit_error` above to propagate the error state to the - /// remainder of the tree. - fn backtrace(&mut self, mut p: usize) -> Vec { + fn error_at(&mut self, p: usize) -> Vec { + let mut error_stack = self.scratch.take().unwrap(); let mut trace = vec![]; + + let mut n = p; loop { - let state = mem::replace(&mut self.nodes[p].state, NodeState::Error); - match state { - NodeState::Pending { obligation } | - NodeState::Success { obligation, .. } => { - trace.push(obligation); - } - NodeState::Error => { - // we should not encounter an error, because if - // there was an error in the ancestors, it should - // have been propagated down and we should never - // have tried to process this obligation - panic!("encountered error in node {:?} when collecting stack trace", - p); - } - } + self.nodes[n].state.set(NodeState::Error); + trace.push(self.nodes[n].obligation.clone()); + error_stack.extend(self.nodes[n].dependents.iter().map(|x| x.get())); // loop to the parent - match self.nodes[p].parent { - Some(q) => { - p = q.get(); - } - None => { - return trace; - } + match self.nodes[n].parent { + Some(q) => n = q.get(), + None => break + } + } + + loop { + // non-standard `while let` to bypass #6393 + let i = match error_stack.pop() { + Some(i) => i, + None => break + }; + + let node = &self.nodes[i]; + + match node.state.get() { + NodeState::Error => continue, + _ => node.state.set(NodeState::Error) + } + + error_stack.extend( + node.dependents.iter().cloned().chain(node.parent).map(|x| x.get()) + ); + } + + self.scratch = Some(error_stack); + trace + } + + /// Marks all nodes that depend on a pending node as NodeState;:Waiting. + fn mark_as_waiting(&self) { + for node in &self.nodes { + if node.state.get() == NodeState::Waiting { + node.state.set(NodeState::Success); + } + } + + for node in &self.nodes { + if node.state.get() == NodeState::Pending { + self.mark_as_waiting_from(node) } } } + fn mark_as_waiting_from(&self, node: &Node) { + match node.state.get() { + NodeState::Pending | NodeState::Done => {}, + NodeState::Waiting | NodeState::Error | NodeState::OnDfsStack => return, + NodeState::Success => { + node.state.set(NodeState::Waiting); + } + } + + if let Some(parent) = node.parent { + self.mark_as_waiting_from(&self.nodes[parent.get()]); + } + + for dependent in &node.dependents { + self.mark_as_waiting_from(&self.nodes[dependent.get()]); + } + } + /// Compresses the vector, removing all popped nodes. This adjusts /// the indices and hence invalidates any outstanding /// indices. Cannot be used during a transaction. + /// + /// Beforehand, all nodes must be marked as `Done` and no cycles + /// on these nodes may be present. This is done by e.g. `process_cycles`. + #[inline(never)] fn compress(&mut self) -> Vec { assert!(!self.in_snapshot()); // didn't write code to unroll this action - let mut node_rewrites: Vec<_> = (0..self.nodes.len()).collect(); - let mut tree_rewrites: Vec<_> = (0..self.trees.len()).collect(); - // Finish propagating error state. Note that in this case we - // only have to check immediate parents, rather than all - // ancestors, because all errors have already occurred that - // are going to occur. let nodes_len = self.nodes.len(); - for i in 0..nodes_len { - if !self.nodes[i].is_popped() { - self.inherit_error(i); - } - } + let mut node_rewrites: Vec<_> = self.scratch.take().unwrap(); + node_rewrites.extend(0..nodes_len); + let mut dead_nodes = 0; - // Determine which trees to remove by checking if their root - // is popped. - let mut dead_trees = 0; - let trees_len = self.trees.len(); - for i in 0..trees_len { - let root_node = self.trees[i].root; - if self.nodes[root_node.get()].is_popped() { - dead_trees += 1; - } else if dead_trees > 0 { - self.trees.swap(i, i - dead_trees); - tree_rewrites[i] -= dead_trees; + // Now move all popped nodes to the end. Try to keep the order. + // + // LOOP INVARIANT: + // self.nodes[0..i - dead_nodes] are the first remaining nodes + // self.nodes[i - dead_nodes..i] are all dead + // self.nodes[i..] are unchanged + for i in 0..self.nodes.len() { + match self.nodes[i].state.get() { + NodeState::Done => { + self.waiting_cache.remove(self.nodes[i].obligation.as_predicate()); + // FIXME(HashMap): why can't I get my key back? + self.done_cache.insert(self.nodes[i].obligation.as_predicate().clone()); + } + NodeState::Error => { + // We *intentionally* remove the node from the cache at this point. Otherwise + // tests must come up with a different type on every type error they + // check against. + self.waiting_cache.remove(self.nodes[i].obligation.as_predicate()); + } + _ => {} } - } - // Now go through and move all nodes that are either - // successful or which have an error over into to the end of - // the list, preserving the relative order of the survivors - // (which is important for the `inherit_error` logic). - let mut dead_nodes = 0; - for i in 0..nodes_len { if self.nodes[i].is_popped() { + node_rewrites[i] = nodes_len; dead_nodes += 1; - } else if dead_nodes > 0 { - self.nodes.swap(i, i - dead_nodes); - node_rewrites[i] -= dead_nodes; + } else { + if dead_nodes > 0 { + self.nodes.swap(i, i - dead_nodes); + node_rewrites[i] -= dead_nodes; + } } } // No compression needed. - if dead_nodes == 0 && dead_trees == 0 { + if dead_nodes == 0 { + node_rewrites.truncate(0); + self.scratch = Some(node_rewrites); return vec![]; } - // Pop off the trees we killed. - self.trees.truncate(trees_len - dead_trees); - // Pop off all the nodes we killed and extract the success // stories. let successful = (0..dead_nodes) .map(|_| self.nodes.pop().unwrap()) .flat_map(|node| { - match node.state { + match node.state.get() { NodeState::Error => None, - NodeState::Pending { .. } => unreachable!(), - NodeState::Success { obligation, num_incomplete_children } => { - assert_eq!(num_incomplete_children, 0); - Some(obligation) - } + NodeState::Done => Some(node.obligation), + _ => unreachable!() } }) - .collect(); + .collect(); + self.apply_rewrites(&node_rewrites); + + node_rewrites.truncate(0); + self.scratch = Some(node_rewrites); + + successful + } + + fn apply_rewrites(&mut self, node_rewrites: &[usize]) { + let nodes_len = node_rewrites.len(); - // Adjust the various indices, since we compressed things. - for tree in &mut self.trees { - tree.root = NodeIndex::new(node_rewrites[tree.root.get()]); - } for node in &mut self.nodes { - if let Some(ref mut index) = node.parent { + if let Some(index) = node.parent { let new_index = node_rewrites[index.get()]; - debug_assert!(new_index < (nodes_len - dead_nodes)); - *index = NodeIndex::new(new_index); + if new_index >= nodes_len { + // parent dead due to error + node.parent = None; + } else { + node.parent = Some(NodeIndex::new(new_index)); + } } - node.tree = TreeIndex::new(tree_rewrites[node.tree.get()]); + let mut i = 0; + while i < node.dependents.len() { + let new_index = node_rewrites[node.dependents[i].get()]; + if new_index >= nodes_len { + node.dependents.swap_remove(i); + } else { + node.dependents[i] = NodeIndex::new(new_index); + i += 1; + } + } } - successful + let mut kill_list = vec![]; + for (predicate, index) in self.waiting_cache.iter_mut() { + let new_index = node_rewrites[index.get()]; + if new_index >= nodes_len { + kill_list.push(predicate.clone()); + } else { + *index = NodeIndex::new(new_index); + } + } + + for predicate in kill_list { self.waiting_cache.remove(&predicate); } } } impl Node { - fn new(tree: TreeIndex, parent: Option, obligation: O) -> Node { + fn new(parent: Option, obligation: O) -> Node { Node { + obligation: obligation, parent: parent, - state: NodeState::Pending { obligation: obligation }, - tree: tree, + state: Cell::new(NodeState::Pending), + dependents: vec![], } } fn is_popped(&self) -> bool { - match self.state { - NodeState::Pending { .. } => false, - NodeState::Success { num_incomplete_children, .. } => num_incomplete_children == 0, - NodeState::Error => true, - } - } -} - -#[derive(Clone)] -pub struct Backtrace<'b, O: 'b> { - nodes: &'b [Node], - pointer: Option, -} - -impl<'b, O> Backtrace<'b, O> { - fn new(nodes: &'b [Node], pointer: Option) -> Backtrace<'b, O> { - Backtrace { - nodes: nodes, - pointer: pointer, - } - } -} - -impl<'b, O> Iterator for Backtrace<'b, O> { - type Item = &'b O; - - fn next(&mut self) -> Option<&'b O> { - debug!("Backtrace: self.pointer = {:?}", self.pointer); - if let Some(p) = self.pointer { - self.pointer = self.nodes[p.get()].parent; - match self.nodes[p.get()].state { - NodeState::Pending { ref obligation } | - NodeState::Success { ref obligation, .. } => Some(obligation), - NodeState::Error => { - panic!("Backtrace encountered an error."); - } - } - } else { - None + match self.state.get() { + NodeState::Pending | NodeState::Waiting => false, + NodeState::Error | NodeState::Done => true, + NodeState::OnDfsStack | NodeState::Success => unreachable!() } } } diff --git a/src/librustc_data_structures/obligation_forest/test.rs b/src/librustc_data_structures/obligation_forest/test.rs index a8c2427021..a95b2b84b3 100644 --- a/src/librustc_data_structures/obligation_forest/test.rs +++ b/src/librustc_data_structures/obligation_forest/test.rs @@ -8,30 +8,82 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use super::{ObligationForest, Outcome, Error}; +#![cfg(test)] + +use super::{ObligationForest, ObligationProcessor, Outcome, Error}; + +use std::fmt; +use std::marker::PhantomData; + +impl<'a> super::ForestObligation for &'a str { + type Predicate = &'a str; + + fn as_predicate(&self) -> &Self::Predicate { + self + } +} + +struct ClosureObligationProcessor { + process_obligation: OF, + _process_backedge: BF, + marker: PhantomData<(O, E)>, +} + +#[allow(non_snake_case)] +fn C(of: OF, bf: BF) -> ClosureObligationProcessor + where OF: FnMut(&mut O) -> Result>, &'static str>, + BF: FnMut(&[O]) +{ + ClosureObligationProcessor { + process_obligation: of, + _process_backedge: bf, + marker: PhantomData + } +} + +impl ObligationProcessor for ClosureObligationProcessor + where O: super::ForestObligation + fmt::Debug, + E: fmt::Debug, + OF: FnMut(&mut O) -> Result>, E>, + BF: FnMut(&[O]) +{ + type Obligation = O; + type Error = E; + + fn process_obligation(&mut self, + obligation: &mut Self::Obligation) + -> Result>, Self::Error> + { + (self.process_obligation)(obligation) + } + + fn process_backedge<'c, I>(&mut self, _cycle: I, + _marker: PhantomData<&'c Self::Obligation>) + where I: Clone + Iterator { + } +} + #[test] fn push_pop() { let mut forest = ObligationForest::new(); - forest.push_tree("A", "A"); - forest.push_tree("B", "B"); - forest.push_tree("C", "C"); + forest.register_obligation("A"); + forest.register_obligation("B"); + forest.register_obligation("C"); // first round, B errors out, A has subtasks, and C completes, creating this: // A |-> A.1 // |-> A.2 // |-> A.3 - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations(|obligation, - tree, - _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); - match *obligation { - "A" => Ok(Some(vec!["A.1", "A.2", "A.3"])), - "B" => Err("B is for broken"), - "C" => Ok(Some(vec![])), - _ => unreachable!(), - } - }); + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A" => Ok(Some(vec!["A.1", "A.2", "A.3"])), + "B" => Err("B is for broken"), + "C" => Ok(Some(vec![])), + _ => unreachable!(), + } + }, |_| {})); assert_eq!(ok, vec!["C"]); assert_eq!(err, vec![Error { @@ -45,10 +97,9 @@ fn push_pop() { // |-> A.3 |-> A.3.i // D |-> D.1 // |-> D.2 - forest.push_tree("D", "D"); - let Outcome { completed: ok, errors: err, .. }: Outcome<&'static str, ()> = - forest.process_obligations(|obligation, tree, _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); + forest.register_obligation("D"); + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { match *obligation { "A.1" => Ok(None), "A.2" => Ok(None), @@ -56,45 +107,43 @@ fn push_pop() { "D" => Ok(Some(vec!["D.1", "D.2"])), _ => unreachable!(), } - }); + }, |_| {})); assert_eq!(ok, Vec::<&'static str>::new()); assert_eq!(err, Vec::new()); // third round: ok in A.1 but trigger an error in A.2. Check that it - // propagates to A.3.i, but not D.1 or D.2. + // propagates to A, but not D.1 or D.2. // D |-> D.1 |-> D.1.i // |-> D.2 |-> D.2.i - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations(|obligation, - tree, - _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); - match *obligation { - "A.1" => Ok(Some(vec![])), - "A.2" => Err("A is for apple"), - "D.1" => Ok(Some(vec!["D.1.i"])), - "D.2" => Ok(Some(vec!["D.2.i"])), - _ => unreachable!(), - } - }); - assert_eq!(ok, vec!["A.1"]); + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A.1" => Ok(Some(vec![])), + "A.2" => Err("A is for apple"), + "A.3.i" => Ok(Some(vec![])), + "D.1" => Ok(Some(vec!["D.1.i"])), + "D.2" => Ok(Some(vec!["D.2.i"])), + _ => unreachable!(), + } + }, |_| {})); + assert_eq!(ok, vec!["A.3", "A.1", "A.3.i"]); assert_eq!(err, vec![Error { error: "A is for apple", backtrace: vec!["A.2", "A"], }]); - // fourth round: error in D.1.i that should propagate to D.2.i - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations(|obligation, - tree, - _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); - match *obligation { - "D.1.i" => Err("D is for dumb"), - _ => panic!("unexpected obligation {:?}", obligation), - } - }); - assert_eq!(ok, Vec::<&'static str>::new()); + // fourth round: error in D.1.i + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "D.1.i" => Err("D is for dumb"), + "D.2.i" => Ok(Some(vec![])), + _ => panic!("unexpected obligation {:?}", obligation), + } + }, |_| {})); + assert_eq!(ok, vec!["D.2.i", "D.2"]); assert_eq!(err, vec![Error { error: "D is for dumb", @@ -113,60 +162,54 @@ fn push_pop() { #[test] fn success_in_grandchildren() { let mut forest = ObligationForest::new(); - forest.push_tree("A", "A"); + forest.register_obligation("A"); let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); + forest.process_obligations(&mut C(|obligation| { match *obligation { "A" => Ok(Some(vec!["A.1", "A.2", "A.3"])), _ => unreachable!(), } - }); + }, |_| {})); assert!(ok.is_empty()); assert!(err.is_empty()); let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); + forest.process_obligations(&mut C(|obligation| { match *obligation { "A.1" => Ok(Some(vec![])), "A.2" => Ok(Some(vec!["A.2.i", "A.2.ii"])), "A.3" => Ok(Some(vec![])), _ => unreachable!(), } - }); + }, |_| {})); assert_eq!(ok, vec!["A.3", "A.1"]); assert!(err.is_empty()); let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); + forest.process_obligations(&mut C(|obligation| { match *obligation { "A.2.i" => Ok(Some(vec!["A.2.i.a"])), "A.2.ii" => Ok(Some(vec![])), _ => unreachable!(), } - }); + }, |_| {})); assert_eq!(ok, vec!["A.2.ii"]); assert!(err.is_empty()); let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); + forest.process_obligations(&mut C(|obligation| { match *obligation { "A.2.i.a" => Ok(Some(vec![])), _ => unreachable!(), } - }); + }, |_| {})); assert_eq!(ok, vec!["A.2.i.a", "A.2.i", "A.2", "A"]); assert!(err.is_empty()); - let Outcome { completed: ok, errors: err, .. } = forest.process_obligations::<(), _>(|_, - _, - _| { - unreachable!() - }); + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|_| unreachable!(), |_| {})); + assert!(ok.is_empty()); assert!(err.is_empty()); } @@ -174,63 +217,244 @@ fn success_in_grandchildren() { #[test] fn to_errors_no_throw() { // check that converting multiple children with common parent (A) - // only yields one of them (and does not panic, in particular). + // yields to correct errors (and does not panic, in particular). let mut forest = ObligationForest::new(); - forest.push_tree("A", "A"); + forest.register_obligation("A"); let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, _| { - assert_eq!(obligation.chars().next(), tree.chars().next()); + forest.process_obligations(&mut C(|obligation| { match *obligation { "A" => Ok(Some(vec!["A.1", "A.2", "A.3"])), _ => unreachable!(), } - }); + }, |_|{})); assert_eq!(ok.len(), 0); assert_eq!(err.len(), 0); let errors = forest.to_errors(()); - assert_eq!(errors.len(), 1); + assert_eq!(errors[0].backtrace, vec!["A.1", "A"]); + assert_eq!(errors[1].backtrace, vec!["A.2", "A"]); + assert_eq!(errors[2].backtrace, vec!["A.3", "A"]); + assert_eq!(errors.len(), 3); } #[test] -fn backtrace() { - // check that converting multiple children with common parent (A) - // only yields one of them (and does not panic, in particular). +fn diamond() { + // check that diamond dependencies are handled correctly let mut forest = ObligationForest::new(); - forest.push_tree("A", "A"); + forest.register_obligation("A"); let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, mut backtrace| { - assert_eq!(obligation.chars().next(), tree.chars().next()); - assert!(backtrace.next().is_none()); + forest.process_obligations(&mut C(|obligation| { match *obligation { - "A" => Ok(Some(vec!["A.1"])), + "A" => Ok(Some(vec!["A.1", "A.2"])), _ => unreachable!(), } - }); - assert!(ok.is_empty()); - assert!(err.is_empty()); + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err.len(), 0); + let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, mut backtrace| { - assert_eq!(obligation.chars().next(), tree.chars().next()); - assert!(backtrace.next().unwrap() == &"A"); - assert!(backtrace.next().is_none()); + forest.process_obligations(&mut C(|obligation| { match *obligation { - "A.1" => Ok(Some(vec!["A.1.i"])), + "A.1" => Ok(Some(vec!["D"])), + "A.2" => Ok(Some(vec!["D"])), _ => unreachable!(), } - }); - assert!(ok.is_empty()); - assert!(err.is_empty()); + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err.len(), 0); + + let mut d_count = 0; let Outcome { completed: ok, errors: err, .. } = - forest.process_obligations::<(), _>(|obligation, tree, mut backtrace| { - assert_eq!(obligation.chars().next(), tree.chars().next()); - assert!(backtrace.next().unwrap() == &"A.1"); - assert!(backtrace.next().unwrap() == &"A"); - assert!(backtrace.next().is_none()); + forest.process_obligations(&mut C(|obligation| { match *obligation { - "A.1.i" => Ok(None), + "D" => { d_count += 1; Ok(Some(vec![])) }, _ => unreachable!(), } - }); + }, |_|{})); + assert_eq!(d_count, 1); + assert_eq!(ok, vec!["D", "A.2", "A.1", "A"]); + assert_eq!(err.len(), 0); + + let errors = forest.to_errors(()); + assert_eq!(errors.len(), 0); + + forest.register_obligation("A'"); + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A'" => Ok(Some(vec!["A'.1", "A'.2"])), + _ => unreachable!(), + } + }, |_|{})); assert_eq!(ok.len(), 0); - assert!(err.is_empty()); + assert_eq!(err.len(), 0); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A'.1" => Ok(Some(vec!["D'", "A'"])), + "A'.2" => Ok(Some(vec!["D'"])), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err.len(), 0); + + let mut d_count = 0; + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "D'" => { d_count += 1; Err("operation failed") }, + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(d_count, 1); + assert_eq!(ok.len(), 0); + assert_eq!(err, vec![super::Error { + error: "operation failed", + backtrace: vec!["D'", "A'.1", "A'"] + }]); + + let errors = forest.to_errors(()); + assert_eq!(errors.len(), 0); +} + +#[test] +fn done_dependency() { + // check that the local cache works + let mut forest = ObligationForest::new(); + forest.register_obligation("A: Sized"); + forest.register_obligation("B: Sized"); + forest.register_obligation("C: Sized"); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A: Sized" | "B: Sized" | "C: Sized" => Ok(Some(vec![])), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok, vec!["C: Sized", "B: Sized", "A: Sized"]); + assert_eq!(err.len(), 0); + + forest.register_obligation("(A,B,C): Sized"); + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "(A,B,C): Sized" => Ok(Some(vec![ + "A: Sized", + "B: Sized", + "C: Sized" + ])), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok, vec!["(A,B,C): Sized"]); + assert_eq!(err.len(), 0); + + +} + + +#[test] +fn orphan() { + // check that orphaned nodes are handled correctly + let mut forest = ObligationForest::new(); + forest.register_obligation("A"); + forest.register_obligation("B"); + forest.register_obligation("C1"); + forest.register_obligation("C2"); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A" => Ok(Some(vec!["D", "E"])), + "B" => Ok(None), + "C1" => Ok(Some(vec![])), + "C2" => Ok(Some(vec![])), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok, vec!["C2", "C1"]); + assert_eq!(err.len(), 0); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "D" | "E" => Ok(None), + "B" => Ok(Some(vec!["D"])), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err.len(), 0); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "D" => Ok(None), + "E" => Err("E is for error"), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err, vec![super::Error { + error: "E is for error", + backtrace: vec!["E", "A"] + }]); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "D" => Err("D is dead"), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err, vec![super::Error { + error: "D is dead", + backtrace: vec!["D"] + }]); + + let errors = forest.to_errors(()); + assert_eq!(errors.len(), 0); +} + +#[test] +fn simultaneous_register_and_error() { + // check that registering a failed obligation works correctly + let mut forest = ObligationForest::new(); + forest.register_obligation("A"); + forest.register_obligation("B"); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A" => Err("An error"), + "B" => Ok(Some(vec!["A"])), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err, vec![super::Error { + error: "An error", + backtrace: vec!["A"] + }]); + + let mut forest = ObligationForest::new(); + forest.register_obligation("B"); + forest.register_obligation("A"); + + let Outcome { completed: ok, errors: err, .. } = + forest.process_obligations(&mut C(|obligation| { + match *obligation { + "A" => Err("An error"), + "B" => Ok(Some(vec!["A"])), + _ => unreachable!(), + } + }, |_|{})); + assert_eq!(ok.len(), 0); + assert_eq!(err, vec![super::Error { + error: "An error", + backtrace: vec!["A"] + }]); } diff --git a/src/librustc_data_structures/snapshot_vec.rs b/src/librustc_data_structures/snapshot_vec.rs index 614e7aae74..dac074ab91 100644 --- a/src/librustc_data_structures/snapshot_vec.rs +++ b/src/librustc_data_structures/snapshot_vec.rs @@ -213,3 +213,11 @@ impl ops::IndexMut for SnapshotVec { self.get_mut(index) } } + +impl Extend for SnapshotVec { + fn extend(&mut self, iterable: T) where T: IntoIterator { + for item in iterable { + self.push(item); + } + } +} diff --git a/src/librustc_driver/driver.rs b/src/librustc_driver/driver.rs index 67c52bb6c3..1f3df1ff6f 100644 --- a/src/librustc_driver/driver.rs +++ b/src/librustc_driver/driver.rs @@ -10,7 +10,8 @@ use rustc::dep_graph::DepGraph; use rustc::hir; -use rustc::hir::map as hir_map; +use rustc::hir::{map as hir_map, FreevarMap, TraitMap}; +use rustc::hir::def::DefMap; use rustc_mir as mir; use rustc::mir::mir_map::MirMap; use rustc::session::{Session, CompileResult, compile_result_from_err_count}; @@ -27,7 +28,7 @@ use rustc_borrowck as borrowck; use rustc_incremental; use rustc_resolve as resolve; use rustc_metadata::macro_import; -use rustc_metadata::creader::LocalCrateReader; +use rustc_metadata::creader::read_local_crates; use rustc_metadata::cstore::CStore; use rustc_trans::back::link; use rustc_trans::back::write; @@ -36,13 +37,14 @@ use rustc_typeck as typeck; use rustc_privacy; use rustc_plugin::registry::Registry; use rustc_plugin as plugin; -use rustc::hir::lowering::{lower_crate, LoweringContext}; -use rustc_passes::{no_asm, loops, consts, const_fn, rvalues, static_recursion}; +use rustc::hir::lowering::lower_crate; +use rustc_passes::{no_asm, loops, consts, rvalues, static_recursion}; use rustc_const_eval::check_match; use super::Compilation; use serialize::json; +use std::cell::RefCell; use std::collections::HashMap; use std::env; use std::ffi::{OsString, OsStr}; @@ -59,6 +61,14 @@ use syntax::visit; use syntax; use syntax_ext; +#[derive(Clone)] +pub struct Resolutions { + pub def_map: RefCell, + pub freevars: FreevarMap, + pub trait_map: TraitMap, + pub maybe_unused_trait_imports: NodeSet, +} + pub fn compile_input(sess: &Session, cstore: &CStore, cfg: ast::CrateConfig, @@ -69,7 +79,7 @@ pub fn compile_input(sess: &Session, control: &CompileController) -> CompileResult { macro_rules! controller_entry_point { ($point: ident, $tsess: expr, $make_state: expr, $phase_result: expr) => {{ - let state = $make_state; + let state = &mut $make_state; let phase_result: &CompileResult = &$phase_result; if phase_result.is_ok() || control.$point.run_callback_on_error { (control.$point.callback)(state); @@ -94,17 +104,24 @@ pub fn compile_input(sess: &Session, } }; + let mut compile_state = CompileState::state_after_parse(input, + sess, + outdir, + output, + krate, + &cstore); controller_entry_point!(after_parse, sess, - CompileState::state_after_parse(input, sess, outdir, &krate), + compile_state, Ok(())); + let krate = compile_state.krate.unwrap(); let outputs = build_output_filenames(input, outdir, output, &krate.attrs, sess); let id = link::find_crate_name(Some(sess), &krate.attrs, input); let expanded_crate = phase_2_configure_and_expand(sess, &cstore, krate, - &id[..], + &id, addl_plugins)?; (outputs, expanded_crate, id) @@ -115,42 +132,74 @@ pub fn compile_input(sess: &Session, CompileState::state_after_expand(input, sess, outdir, + output, + &cstore, &expanded_crate, - &id[..]), + &id), + Ok(())); + + write_out_deps(sess, &outputs, &id); + + controller_entry_point!(after_write_deps, + sess, + CompileState::state_after_write_deps(input, + sess, + outdir, + output, + &cstore, + &expanded_crate, + &id), Ok(())); let expanded_crate = assign_node_ids(sess, expanded_crate); - // Lower ast -> hir. - let lcx = LoweringContext::new(sess, Some(&expanded_crate)); - let dep_graph = DepGraph::new(sess.opts.build_dep_graph()); - let mut hir_forest = time(sess.time_passes(), - "lowering ast -> hir", - || hir_map::Forest::new(lower_crate(&lcx, &expanded_crate), - dep_graph)); + + // Collect defintions for def ids. + let mut defs = time(sess.time_passes(), + "collecting defs", + || hir_map::collect_definitions(&expanded_crate)); + + time(sess.time_passes(), + "external crate/lib resolution", + || read_local_crates(sess, &cstore, &defs, &expanded_crate, &id, &sess.dep_graph)); + + time(sess.time_passes(), + "early lint checks", + || lint::check_ast_crate(sess, &expanded_crate)); + + let (analysis, resolutions, mut hir_forest) = { + lower_and_resolve(sess, &id, &mut defs, &expanded_crate, + &sess.dep_graph, control.make_glob_map) + }; // Discard MTWT tables that aren't required past lowering to HIR. - if !sess.opts.debugging_opts.keep_mtwt_tables && - !sess.opts.debugging_opts.save_analysis { + if !keep_mtwt_tables(sess) { syntax::ext::mtwt::clear_tables(); } let arenas = ty::CtxtArenas::new(); - let hir_map = make_map(sess, &mut hir_forest); - write_out_deps(sess, &outputs, &id); + // Construct the HIR map + let hir_forest = &mut hir_forest; + let hir_map = time(sess.time_passes(), + "indexing hir", + move || hir_map::map_crate(hir_forest, defs)); { let _ignore = hir_map.dep_graph.in_ignore(); - controller_entry_point!(after_write_deps, + controller_entry_point!(after_hir_lowering, sess, - CompileState::state_after_write_deps(input, - sess, - outdir, - &hir_map, - &expanded_crate, - &hir_map.krate(), - &id[..], - &lcx), + CompileState::state_after_hir_lowering(input, + sess, + outdir, + output, + &arenas, + &cstore, + &hir_map, + &analysis, + &resolutions, + &expanded_crate, + &hir_map.krate(), + &id), Ok(())); } @@ -158,12 +207,7 @@ pub fn compile_input(sess: &Session, hir::check_attr::check_crate(sess, &expanded_crate); }); - time(sess.time_passes(), - "early lint checks", - || lint::check_ast_crate(sess, &expanded_crate)); - - let opt_crate = if sess.opts.debugging_opts.keep_ast || - sess.opts.debugging_opts.save_analysis { + let opt_crate = if keep_ast(sess) { Some(&expanded_crate) } else { drop(expanded_crate); @@ -171,30 +215,30 @@ pub fn compile_input(sess: &Session, }; phase_3_run_analysis_passes(sess, - &cstore, hir_map, + analysis, + resolutions, &arenas, &id, - control.make_glob_map, |tcx, mir_map, analysis, result| { { // Eventually, we will want to track plugins. let _ignore = tcx.dep_graph.in_ignore(); - let state = CompileState::state_after_analysis(input, - &tcx.sess, - outdir, - opt_crate, - tcx.map.krate(), - &analysis, - mir_map.as_ref(), - tcx, - &lcx, - &id); - (control.after_analysis.callback)(state); + let mut state = CompileState::state_after_analysis(input, + sess, + outdir, + output, + opt_crate, + tcx.map.krate(), + &analysis, + mir_map.as_ref(), + tcx, + &id); + (control.after_analysis.callback)(&mut state); if control.after_analysis.stop == Compilation::Stop { - return Err(0usize); + return result.and_then(|_| Err(0usize)); } } @@ -224,7 +268,7 @@ pub fn compile_input(sess: &Session, controller_entry_point!(after_llvm, sess, - CompileState::state_after_llvm(input, sess, outdir, &trans), + CompileState::state_after_llvm(input, sess, outdir, output, &trans), phase5_result); phase5_result?; @@ -233,6 +277,16 @@ pub fn compile_input(sess: &Session, Ok(()) } +fn keep_mtwt_tables(sess: &Session) -> bool { + sess.opts.debugging_opts.keep_mtwt_tables +} + +fn keep_ast(sess: &Session) -> bool { + sess.opts.debugging_opts.keep_ast || + sess.opts.debugging_opts.save_analysis || + sess.opts.debugging_opts.save_analysis_csv +} + /// The name used for source code that doesn't originate in a file /// (e.g. source from stdin or a string) pub fn anon_src() -> String { @@ -265,6 +319,7 @@ pub struct CompileController<'a> { pub after_parse: PhaseController<'a>, pub after_expand: PhaseController<'a>, pub after_write_deps: PhaseController<'a>, + pub after_hir_lowering: PhaseController<'a>, pub after_analysis: PhaseController<'a>, pub after_llvm: PhaseController<'a>, @@ -277,6 +332,7 @@ impl<'a> CompileController<'a> { after_parse: PhaseController::basic(), after_expand: PhaseController::basic(), after_write_deps: PhaseController::basic(), + after_hir_lowering: PhaseController::basic(), after_analysis: PhaseController::basic(), after_llvm: PhaseController::basic(), make_glob_map: resolve::MakeGlobMap::No, @@ -289,7 +345,7 @@ pub struct PhaseController<'a> { // If true then the compiler will try to run the callback even if the phase // ends with an error. Note that this is not always possible. pub run_callback_on_error: bool, - pub callback: Box () + 'a>, + pub callback: Box, } impl<'a> PhaseController<'a> { @@ -305,118 +361,163 @@ impl<'a> PhaseController<'a> { /// State that is passed to a callback. What state is available depends on when /// during compilation the callback is made. See the various constructor methods /// (`state_*`) in the impl to see which data is provided for any given entry point. -pub struct CompileState<'a, 'ast: 'a, 'tcx: 'a> { +pub struct CompileState<'a, 'b, 'ast: 'a, 'tcx: 'b> where 'ast: 'tcx { pub input: &'a Input, - pub session: &'a Session, - pub cfg: Option<&'a ast::CrateConfig>, - pub krate: Option<&'a ast::Crate>, + pub session: &'ast Session, + pub krate: Option, + pub cstore: Option<&'a CStore>, pub crate_name: Option<&'a str>, pub output_filenames: Option<&'a OutputFilenames>, pub out_dir: Option<&'a Path>, + pub out_file: Option<&'a Path>, + pub arenas: Option<&'ast ty::CtxtArenas<'ast>>, pub expanded_crate: Option<&'a ast::Crate>, pub hir_crate: Option<&'a hir::Crate>, pub ast_map: Option<&'a hir_map::Map<'ast>>, - pub mir_map: Option<&'a MirMap<'tcx>>, + pub resolutions: Option<&'a Resolutions>, + pub mir_map: Option<&'b MirMap<'tcx>>, pub analysis: Option<&'a ty::CrateAnalysis<'a>>, - pub tcx: Option<&'a TyCtxt<'tcx>>, - pub lcx: Option<&'a LoweringContext<'a>>, + pub tcx: Option>, pub trans: Option<&'a trans::CrateTranslation>, } -impl<'a, 'ast, 'tcx> CompileState<'a, 'ast, 'tcx> { +impl<'a, 'b, 'ast, 'tcx> CompileState<'a, 'b, 'ast, 'tcx> { fn empty(input: &'a Input, - session: &'a Session, + session: &'ast Session, out_dir: &'a Option) - -> CompileState<'a, 'ast, 'tcx> { + -> CompileState<'a, 'b, 'ast, 'tcx> { CompileState { input: input, session: session, out_dir: out_dir.as_ref().map(|s| &**s), - cfg: None, + out_file: None, + arenas: None, krate: None, + cstore: None, crate_name: None, output_filenames: None, expanded_crate: None, hir_crate: None, ast_map: None, + resolutions: None, analysis: None, mir_map: None, tcx: None, - lcx: None, trans: None, } } fn state_after_parse(input: &'a Input, - session: &'a Session, + session: &'ast Session, out_dir: &'a Option, - krate: &'a ast::Crate) - -> CompileState<'a, 'ast, 'tcx> { - CompileState { krate: Some(krate), ..CompileState::empty(input, session, out_dir) } + out_file: &'a Option, + krate: ast::Crate, + cstore: &'a CStore) + -> CompileState<'a, 'b, 'ast, 'tcx> { + CompileState { + krate: Some(krate), + cstore: Some(cstore), + out_file: out_file.as_ref().map(|s| &**s), + ..CompileState::empty(input, session, out_dir) + } } fn state_after_expand(input: &'a Input, - session: &'a Session, + session: &'ast Session, out_dir: &'a Option, + out_file: &'a Option, + cstore: &'a CStore, expanded_crate: &'a ast::Crate, crate_name: &'a str) - -> CompileState<'a, 'ast, 'tcx> { + -> CompileState<'a, 'b, 'ast, 'tcx> { CompileState { crate_name: Some(crate_name), + cstore: Some(cstore), expanded_crate: Some(expanded_crate), + out_file: out_file.as_ref().map(|s| &**s), ..CompileState::empty(input, session, out_dir) } } fn state_after_write_deps(input: &'a Input, - session: &'a Session, + session: &'ast Session, out_dir: &'a Option, - hir_map: &'a hir_map::Map<'ast>, + out_file: &'a Option, + cstore: &'a CStore, krate: &'a ast::Crate, - hir_crate: &'a hir::Crate, - crate_name: &'a str, - lcx: &'a LoweringContext<'a>) - -> CompileState<'a, 'ast, 'tcx> { + crate_name: &'a str) + -> CompileState<'a, 'b, 'ast, 'tcx> { + CompileState { + crate_name: Some(crate_name), + cstore: Some(cstore), + expanded_crate: Some(krate), + out_file: out_file.as_ref().map(|s| &**s), + ..CompileState::empty(input, session, out_dir) + } + } + + fn state_after_hir_lowering(input: &'a Input, + session: &'ast Session, + out_dir: &'a Option, + out_file: &'a Option, + arenas: &'ast ty::CtxtArenas<'ast>, + cstore: &'a CStore, + hir_map: &'a hir_map::Map<'ast>, + analysis: &'a ty::CrateAnalysis, + resolutions: &'a Resolutions, + krate: &'a ast::Crate, + hir_crate: &'a hir::Crate, + crate_name: &'a str) + -> CompileState<'a, 'b, 'ast, 'tcx> { CompileState { crate_name: Some(crate_name), + arenas: Some(arenas), + cstore: Some(cstore), ast_map: Some(hir_map), - krate: Some(krate), + analysis: Some(analysis), + resolutions: Some(resolutions), + expanded_crate: Some(krate), hir_crate: Some(hir_crate), - lcx: Some(lcx), + out_file: out_file.as_ref().map(|s| &**s), ..CompileState::empty(input, session, out_dir) } } fn state_after_analysis(input: &'a Input, - session: &'a Session, + session: &'ast Session, out_dir: &'a Option, + out_file: &'a Option, krate: Option<&'a ast::Crate>, hir_crate: &'a hir::Crate, - analysis: &'a ty::CrateAnalysis, - mir_map: Option<&'a MirMap<'tcx>>, - tcx: &'a TyCtxt<'tcx>, - lcx: &'a LoweringContext<'a>, + analysis: &'a ty::CrateAnalysis<'a>, + mir_map: Option<&'b MirMap<'tcx>>, + tcx: TyCtxt<'b, 'tcx, 'tcx>, crate_name: &'a str) - -> CompileState<'a, 'ast, 'tcx> { + -> CompileState<'a, 'b, 'ast, 'tcx> { CompileState { analysis: Some(analysis), mir_map: mir_map, tcx: Some(tcx), - krate: krate, + expanded_crate: krate, hir_crate: Some(hir_crate), - lcx: Some(lcx), crate_name: Some(crate_name), + out_file: out_file.as_ref().map(|s| &**s), ..CompileState::empty(input, session, out_dir) } } fn state_after_llvm(input: &'a Input, - session: &'a Session, + session: &'ast Session, out_dir: &'a Option, + out_file: &'a Option, trans: &'a trans::CrateTranslation) - -> CompileState<'a, 'ast, 'tcx> { - CompileState { trans: Some(trans), ..CompileState::empty(input, session, out_dir) } + -> CompileState<'a, 'b, 'ast, 'tcx> { + CompileState { + trans: Some(trans), + out_file: out_file.as_ref().map(|s| &**s), + ..CompileState::empty(input, session, out_dir) + } } } @@ -512,19 +613,13 @@ pub fn phase_2_configure_and_expand(sess: &Session, middle::recursion_limit::update_recursion_limit(sess, &krate); }); - time(time_passes, "gated macro checking", || { - sess.track_errors(|| { - let features = - syntax::feature_gate::check_crate_macros(sess.codemap(), - &sess.parse_sess.span_diagnostic, - &krate); - - // these need to be set "early" so that expansion sees `quote` if enabled. - *sess.features.borrow_mut() = features; - }) + // these need to be set "early" so that expansion sees `quote` if enabled. + sess.track_errors(|| { + *sess.features.borrow_mut() = + syntax::feature_gate::get_features(&sess.parse_sess.span_diagnostic, + &krate); })?; - krate = time(time_passes, "crate injection", || { syntax::std_inject::maybe_inject_crates_ref(krate, sess.opts.alt_std_name.clone()) }); @@ -625,21 +720,6 @@ pub fn phase_2_configure_and_expand(sess: &Session, ret }); - // Needs to go *after* expansion to be able to check the results - // of macro expansion. This runs before #[cfg] to try to catch as - // much as possible (e.g. help the programmer avoid platform - // specific differences) - time(time_passes, "complete gated feature checking 1", || { - sess.track_errors(|| { - let features = syntax::feature_gate::check_crate(sess.codemap(), - &sess.parse_sess.span_diagnostic, - &krate, - &attributes, - sess.opts.unstable_features); - *sess.features.borrow_mut() = features; - }) - })?; - // JBC: make CFG processing part of expansion to avoid this problem: // strip again, in case expansion added anything with a #[cfg]. @@ -678,10 +758,8 @@ pub fn phase_2_configure_and_expand(sess: &Session, "checking for inline asm in case the target doesn't support it", || no_asm::check_crate(sess, &krate)); - // One final feature gating of the true AST that gets compiled - // later, to make sure we've got everything (e.g. configuration - // can insert new attributes via `cfg_attr`) - time(time_passes, "complete gated feature checking 2", || { + // Needs to go *after* expansion to be able to check the results of macro expansion. + time(time_passes, "complete gated feature checking", || { sess.track_errors(|| { let features = syntax::feature_gate::check_crate(sess.codemap(), &sess.parse_sess.span_diagnostic, @@ -692,10 +770,6 @@ pub fn phase_2_configure_and_expand(sess: &Session, }) })?; - time(time_passes, - "const fn bodies and arguments", - || const_fn::check_crate(sess, &krate))?; - if sess.opts.debugging_opts.input_stats { println!("Post-expansion node count: {}", count_nodes(&krate)); } @@ -726,27 +800,53 @@ pub fn assign_node_ids(sess: &Session, krate: ast::Crate) -> ast::Crate { krate } -pub fn make_map<'ast>(sess: &Session, - forest: &'ast mut hir_map::Forest) - -> hir_map::Map<'ast> { - // Construct the HIR map - time(sess.time_passes(), - "indexing hir", - move || hir_map::map_crate(forest)) +pub fn lower_and_resolve<'a>(sess: &Session, + id: &'a str, + defs: &mut hir_map::Definitions, + krate: &ast::Crate, + dep_graph: &DepGraph, + make_glob_map: resolve::MakeGlobMap) + -> (ty::CrateAnalysis<'a>, Resolutions, hir_map::Forest) { + resolve::with_resolver(sess, defs, make_glob_map, |mut resolver| { + time(sess.time_passes(), "name resolution", || { + resolve::resolve_crate(&mut resolver, krate); + }); + + // Lower ast -> hir. + let hir_forest = time(sess.time_passes(), "lowering ast -> hir", || { + hir_map::Forest::new(lower_crate(sess, krate, sess, &mut resolver), dep_graph) + }); + + (ty::CrateAnalysis { + export_map: resolver.export_map, + access_levels: AccessLevels::default(), + reachable: NodeSet(), + name: &id, + glob_map: if resolver.make_glob_map { Some(resolver.glob_map) } else { None }, + }, Resolutions { + def_map: RefCell::new(resolver.def_map), + freevars: resolver.freevars, + trait_map: resolver.trait_map, + maybe_unused_trait_imports: resolver.maybe_unused_trait_imports, + }, hir_forest) + }) } /// Run the resolution, typechecking, region checking and other /// miscellaneous analysis passes on the crate. Return various /// structures carrying the results of the analysis. pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, - cstore: &CStore, hir_map: hir_map::Map<'tcx>, + mut analysis: ty::CrateAnalysis, + resolutions: Resolutions, arenas: &'tcx ty::CtxtArenas<'tcx>, name: &str, - make_glob_map: resolve::MakeGlobMap, f: F) -> Result - where F: FnOnce(&TyCtxt<'tcx>, Option>, ty::CrateAnalysis, CompileResult) -> R + where F: for<'a> FnOnce(TyCtxt<'a, 'tcx, 'tcx>, + Option>, + ty::CrateAnalysis, + CompileResult) -> R { macro_rules! try_with_f { ($e: expr, ($t: expr, $m: expr, $a: expr)) => { @@ -762,39 +862,17 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, let time_passes = sess.time_passes(); - time(time_passes, - "external crate/lib resolution", - || LocalCrateReader::new(sess, cstore, &hir_map, name).read_crates()); - let lang_items = time(time_passes, "language item collection", || { sess.track_errors(|| { middle::lang_items::collect_language_items(&sess, &hir_map) }) })?; - let resolve::CrateMap { - def_map, - freevars, - export_map, - trait_map, - glob_map, - } = time(time_passes, - "resolution", - || resolve::resolve_crate(sess, &hir_map, make_glob_map)); - - let mut analysis = ty::CrateAnalysis { - export_map: export_map, - access_levels: AccessLevels::default(), - reachable: NodeSet(), - name: name, - glob_map: glob_map, - }; - let named_region_map = time(time_passes, "lifetime resolution", || middle::resolve_lifetime::krate(sess, &hir_map, - &def_map.borrow()))?; + &resolutions.def_map.borrow()))?; time(time_passes, "looking for entry point", @@ -814,21 +892,23 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, time(time_passes, "static item recursion checking", - || static_recursion::check_crate(sess, &def_map.borrow(), &hir_map))?; + || static_recursion::check_crate(sess, &resolutions.def_map.borrow(), &hir_map))?; let index = stability::Index::new(&hir_map); + let trait_map = resolutions.trait_map; TyCtxt::create_and_enter(sess, - arenas, - def_map, - named_region_map, - hir_map, - freevars, - region_map, - lang_items, - index, - name, - |tcx| { + arenas, + resolutions.def_map, + named_region_map, + hir_map, + resolutions.freevars, + resolutions.maybe_unused_trait_imports, + region_map, + lang_items, + index, + name, + |tcx| { time(time_passes, "load_dep_graph", || rustc_incremental::load_dep_graph(tcx)); @@ -883,6 +963,7 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, let mut passes = sess.mir_passes.borrow_mut(); // Push all the built-in passes. passes.push_pass(box mir::transform::remove_dead_blocks::RemoveDeadBlocks); + passes.push_pass(box mir::transform::qualify_consts::QualifyAndPromoteConstants); passes.push_pass(box mir::transform::type_check::TypeckMir); passes.push_pass(box mir::transform::simplify_cfg::SimplifyCfg); passes.push_pass(box mir::transform::remove_dead_blocks::RemoveDeadBlocks); @@ -937,9 +1018,10 @@ pub fn phase_3_run_analysis_passes<'tcx, F, R>(sess: &'tcx Session, } /// Run the translation phase to LLVM, after which the AST and analysis can -pub fn phase_4_translate_to_llvm<'tcx>(tcx: &TyCtxt<'tcx>, - mut mir_map: MirMap<'tcx>, - analysis: ty::CrateAnalysis) -> trans::CrateTranslation { +pub fn phase_4_translate_to_llvm<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + mut mir_map: MirMap<'tcx>, + analysis: ty::CrateAnalysis) + -> trans::CrateTranslation { let time_passes = tcx.sess.time_passes(); time(time_passes, @@ -953,7 +1035,7 @@ pub fn phase_4_translate_to_llvm<'tcx>(tcx: &TyCtxt<'tcx>, passes.push_pass(box mir::transform::no_landing_pads::NoLandingPads); passes.push_pass(box mir::transform::remove_dead_blocks::RemoveDeadBlocks); passes.push_pass(box mir::transform::erase_regions::EraseRegions); - passes.push_pass(box mir::transform::break_critical_edges::BreakCriticalEdges); + passes.push_pass(box mir::transform::break_cleanup_edges::BreakCleanupEdges); passes.run_passes(tcx, &mut mir_map); }); @@ -1092,6 +1174,9 @@ pub fn collect_crate_types(session: &Session, attrs: &[ast::Attribute]) -> Vec { Some(config::CrateTypeDylib) } + Some(ref n) if *n == "cdylib" => { + Some(config::CrateTypeCdylib) + } Some(ref n) if *n == "lib" => { Some(config::default_lib_output()) } diff --git a/src/librustc_driver/lib.rs b/src/librustc_driver/lib.rs index 769449b96d..06133c508d 100644 --- a/src/librustc_driver/lib.rs +++ b/src/librustc_driver/lib.rs @@ -31,6 +31,7 @@ #![feature(set_stdio)] #![feature(staged_api)] #![feature(question_mark)] +#![feature(unboxed_closures)] extern crate arena; extern crate flate; @@ -66,10 +67,10 @@ use pretty::{PpMode, UserIdentifiedItem}; use rustc_resolve as resolve; use rustc_save_analysis as save; use rustc_trans::back::link; -use rustc::session::{config, Session, build_session, CompileResult}; +use rustc::dep_graph::DepGraph; +use rustc::session::{self, config, Session, build_session, CompileResult}; use rustc::session::config::{Input, PrintRequest, OutputType, ErrorOutputType}; use rustc::session::config::{get_unstable_features_setting, nightly_options}; -use rustc::middle::cstore::CrateStore; use rustc::lint::Lint; use rustc::lint; use rustc_metadata::loader; @@ -91,13 +92,11 @@ use std::thread; use rustc::session::early_error; -use syntax::ast; -use syntax::parse::{self, PResult}; -use syntax::errors; +use syntax::{ast, errors, diagnostics}; +use syntax::codemap::{CodeMap, FileLoader, RealFileLoader, MultiSpan}; use syntax::errors::emitter::Emitter; -use syntax::diagnostics; -use syntax::parse::token; use syntax::feature_gate::{GatedCfg, UnstableFeatures}; +use syntax::parse::{self, PResult, token}; #[cfg(test)] pub mod test; @@ -138,7 +137,8 @@ pub fn run(args: Vec) -> isize { None => { let mut emitter = errors::emitter::BasicEmitter::stderr(errors::ColorConfig::Auto); - emitter.emit(None, &abort_msg(err_count), None, errors::Level::Fatal); + emitter.emit(&MultiSpan::new(), &abort_msg(err_count), None, + errors::Level::Fatal); exit_on_err(); } } @@ -148,11 +148,20 @@ pub fn run(args: Vec) -> isize { 0 } -// Parse args and run the compiler. This is the primary entry point for rustc. -// See comments on CompilerCalls below for details about the callbacks argument. pub fn run_compiler<'a>(args: &[String], callbacks: &mut CompilerCalls<'a>) -> (CompileResult, Option) { + run_compiler_with_file_loader(args, callbacks, box RealFileLoader) +} + +// Parse args and run the compiler. This is the primary entry point for rustc. +// See comments on CompilerCalls below for details about the callbacks argument. +// The FileLoader provides a way to load files from sources other than the file system. +pub fn run_compiler_with_file_loader<'a, L>(args: &[String], + callbacks: &mut CompilerCalls<'a>, + loader: Box) + -> (CompileResult, Option) + where L: FileLoader + 'static { macro_rules! do_or_return {($expr: expr, $sess: expr) => { match $expr { Compilation::Stop => return (Ok(()), $sess), @@ -188,29 +197,23 @@ pub fn run_compiler<'a>(args: &[String], }, }; - let cstore = Rc::new(CStore::new(token::get_ident_interner())); - let sess = build_session(sopts, input_file_path, descriptions, cstore.clone()); + let dep_graph = DepGraph::new(sopts.build_dep_graph()); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); + let codemap = Rc::new(CodeMap::with_file_loader(loader)); + let sess = session::build_session_with_codemap(sopts, + &dep_graph, + input_file_path, + descriptions, + cstore.clone(), + codemap); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let mut cfg = config::build_configuration(&sess); target_features::add_configuration(&mut cfg, &sess); do_or_return!(callbacks.late_callback(&matches, &sess, &input, &odir, &ofile), Some(sess)); - // It is somewhat unfortunate that this is hardwired in - this is forced by - // the fact that pretty_print_input requires the session by value. - let pretty = callbacks.parse_pretty(&sess, &matches); - match pretty { - Some((ppm, opt_uii)) => { - pretty::pretty_print_input(sess, &cstore, cfg, &input, ppm, opt_uii, ofile); - return (Ok(()), None); - } - None => { - // continue - } - } - let plugins = sess.opts.debugging_opts.extra_plugins.clone(); - let control = callbacks.build_controller(&sess); + let control = callbacks.build_controller(&sess, &matches); (driver::compile_input(&sess, &cstore, cfg, &input, &odir, &ofile, Some(plugins), &control), Some(sess)) @@ -241,6 +244,27 @@ fn make_input(free_matches: &[String]) -> Option<(Input, Option)> { } } +fn parse_pretty(sess: &Session, + matches: &getopts::Matches) + -> Option<(PpMode, Option)> { + let pretty = if sess.opts.debugging_opts.unstable_options { + matches.opt_default("pretty", "normal").map(|a| { + // stable pretty-print variants only + pretty::parse_pretty(sess, &a, false) + }) + } else { + None + }; + if pretty.is_none() && sess.unstable_options() { + matches.opt_str("unpretty").map(|a| { + // extended with unstable pretty-print variants + pretty::parse_pretty(sess, &a, true) + }) + } else { + pretty + } +} + // Whether to stop or continue compilation. #[derive(Copy, Clone, Debug, Eq, PartialEq)] pub enum Compilation { @@ -310,29 +334,9 @@ pub trait CompilerCalls<'a> { None } - // Parse pretty printing information from the arguments. The implementer can - // choose to ignore this (the default will return None) which will skip pretty - // printing. If you do want to pretty print, it is recommended to use the - // implementation of this method from RustcDefaultCalls. - // FIXME, this is a terrible bit of API. Parsing of pretty printing stuff - // should be done as part of the framework and the implementor should customise - // handling of it. However, that is not possible atm because pretty printing - // essentially goes off and takes another path through the compiler which - // means the session is either moved or not depending on what parse_pretty - // returns (we could fix this by cloning, but it's another hack). The proper - // solution is to handle pretty printing as if it were a compiler extension, - // extending CompileController to make this work (see for example the treatment - // of save-analysis in RustcDefaultCalls::build_controller). - fn parse_pretty(&mut self, - _sess: &Session, - _matches: &getopts::Matches) - -> Option<(PpMode, Option)> { - None - } - // Create a CompilController struct for controlling the behaviour of // compilation. - fn build_controller(&mut self, &Session) -> CompileController<'a>; + fn build_controller(&mut self, &Session, &getopts::Matches) -> CompileController<'a>; } // CompilerCalls instance for a regular rustc build. @@ -350,7 +354,13 @@ fn handle_explain(code: &str, match descriptions.find_description(&normalised) { Some(ref description) => { // Slice off the leading newline and print. - print!("{}", &description[1..]); + print!("{}", &(&description[1..]).split("\n").map(|x| { + format!("{}\n", if x.starts_with("```") { + "```" + } else { + x + }) + }).collect::()); } None => { early_error(output, &format!("no extended information for {}", code)); @@ -372,7 +382,7 @@ fn check_cfg(sopts: &config::Options, match item.node { ast::MetaItemKind::List(ref pred, _) => { saw_invalid_predicate = true; - emitter.emit(None, + emitter.emit(&MultiSpan::new(), &format!("invalid predicate in --cfg command line argument: `{}`", pred), None, @@ -418,9 +428,13 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { describe_lints(&ls, false); return None; } - let cstore = Rc::new(CStore::new(token::get_ident_interner())); - let sess = build_session(sopts.clone(), None, descriptions.clone(), - cstore.clone()); + let dep_graph = DepGraph::new(sopts.build_dep_graph()); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); + let sess = build_session(sopts.clone(), + &dep_graph, + None, + descriptions.clone(), + cstore.clone()); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let should_stop = RustcDefaultCalls::print_crate_info(&sess, None, odir, ofile); if should_stop == Compilation::Stop { @@ -435,28 +449,6 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { None } - fn parse_pretty(&mut self, - sess: &Session, - matches: &getopts::Matches) - -> Option<(PpMode, Option)> { - let pretty = if sess.opts.debugging_opts.unstable_options { - matches.opt_default("pretty", "normal").map(|a| { - // stable pretty-print variants only - pretty::parse_pretty(sess, &a, false) - }) - } else { - None - }; - if pretty.is_none() && sess.unstable_options() { - matches.opt_str("unpretty").map(|a| { - // extended with unstable pretty-print variants - pretty::parse_pretty(sess, &a, true) - }) - } else { - pretty - } - } - fn late_callback(&mut self, matches: &getopts::Matches, sess: &Session, @@ -468,9 +460,48 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { .and_then(|| RustcDefaultCalls::list_metadata(sess, matches, input)) } - fn build_controller(&mut self, sess: &Session) -> CompileController<'a> { + fn build_controller(&mut self, + sess: &Session, + matches: &getopts::Matches) + -> CompileController<'a> { let mut control = CompileController::basic(); + if let Some((ppm, opt_uii)) = parse_pretty(sess, matches) { + if ppm.needs_ast_map(&opt_uii) { + control.after_hir_lowering.stop = Compilation::Stop; + + control.after_parse.callback = box move |state| { + state.krate = Some(pretty::fold_crate(state.krate.take().unwrap(), ppm)); + }; + control.after_hir_lowering.callback = box move |state| { + pretty::print_after_hir_lowering(state.session, + state.ast_map.unwrap(), + state.analysis.unwrap(), + state.resolutions.unwrap(), + state.input, + &state.expanded_crate.take().unwrap(), + state.crate_name.unwrap(), + ppm, + state.arenas.unwrap(), + opt_uii.clone(), + state.out_file); + }; + } else { + control.after_parse.stop = Compilation::Stop; + + control.after_parse.callback = box move |state| { + let krate = pretty::fold_crate(state.krate.take().unwrap(), ppm); + pretty::print_after_parsing(state.session, + state.input, + &krate, + ppm, + state.out_file); + }; + } + + return control; + } + if sess.opts.parse_only || sess.opts.debugging_opts.show_span.is_some() || sess.opts.debugging_opts.ast_json_noexpand { control.after_parse.stop = Compilation::Stop; @@ -488,15 +519,15 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { control.after_llvm.stop = Compilation::Stop; } - if sess.opts.debugging_opts.save_analysis { + if save_analysis(sess) { control.after_analysis.callback = box |state| { time(state.session.time_passes(), "save analysis", || { save::process_crate(state.tcx.unwrap(), - state.lcx.unwrap(), - state.krate.unwrap(), + state.expanded_crate.unwrap(), state.analysis.unwrap(), state.crate_name.unwrap(), - state.out_dir) + state.out_dir, + save_analysis_format(state.session)) }); }; control.after_analysis.run_callback_on_error = true; @@ -507,6 +538,21 @@ impl<'a> CompilerCalls<'a> for RustcDefaultCalls { } } +fn save_analysis(sess: &Session) -> bool { + sess.opts.debugging_opts.save_analysis || + sess.opts.debugging_opts.save_analysis_csv +} + +fn save_analysis_format(sess: &Session) -> save::Format { + if sess.opts.debugging_opts.save_analysis { + save::Format::Json + } else if sess.opts.debugging_opts.save_analysis_csv { + save::Format::Csv + } else { + unreachable!(); + } +} + impl RustcDefaultCalls { pub fn list_metadata(sess: &Session, matches: &getopts::Matches, input: &Input) -> Compilation { let r = matches.opt_strs("Z"); @@ -1006,19 +1052,19 @@ pub fn monitor(f: F) { // a .span_bug or .bug call has already printed what // it wants to print. if !value.is::() { - emitter.emit(None, "unexpected panic", None, errors::Level::Bug); + emitter.emit(&MultiSpan::new(), "unexpected panic", None, errors::Level::Bug); } let xs = ["the compiler unexpectedly panicked. this is a bug.".to_string(), format!("we would appreciate a bug report: {}", BUG_REPORT_URL)]; for note in &xs { - emitter.emit(None, ¬e[..], None, errors::Level::Note) + emitter.emit(&MultiSpan::new(), ¬e[..], None, errors::Level::Note) } if match env::var_os("RUST_BACKTRACE") { Some(val) => &val != "0", None => false, } { - emitter.emit(None, + emitter.emit(&MultiSpan::new(), "run with `RUST_BACKTRACE=1` for a backtrace", None, errors::Level::Note); diff --git a/src/librustc_driver/pretty.rs b/src/librustc_driver/pretty.rs index cde5ba1985..8c84e561e3 100644 --- a/src/librustc_driver/pretty.rs +++ b/src/librustc_driver/pretty.rs @@ -15,20 +15,17 @@ pub use self::PpSourceMode::*; pub use self::PpMode::*; use self::NodesMatchingUII::*; -use rustc_trans::back::link; +use abort_on_err; +use driver::{self, Resolutions}; -use {driver, abort_on_err}; - -use rustc::dep_graph::DepGraph; use rustc::ty::{self, TyCtxt}; use rustc::cfg; use rustc::cfg::graphviz::LabelledCFG; +use rustc::dep_graph::DepGraph; use rustc::session::Session; use rustc::session::config::Input; use rustc_borrowck as borrowck; use rustc_borrowck::graphviz as borrowck_dot; -use rustc_resolve as resolve; -use rustc_metadata::cstore::CStore; use rustc_mir::pretty::write_mir_pretty; use rustc_mir::graphviz::write_mir_graphviz; @@ -47,13 +44,12 @@ use std::fs::File; use std::io::{self, Write}; use std::iter; use std::option; -use std::path::PathBuf; +use std::path::Path; use std::str::FromStr; use rustc::hir::map as hir_map; use rustc::hir::map::{blocks, NodePrinter}; use rustc::hir; -use rustc::hir::lowering::{lower_crate, LoweringContext}; use rustc::hir::print as pprust_hir; use rustc::mir::mir_map::MirMap; @@ -87,6 +83,32 @@ pub enum PpMode { PpmMirCFG, } +impl PpMode { + pub fn needs_ast_map(&self, opt_uii: &Option) -> bool { + match *self { + PpmSource(PpmNormal) | + PpmSource(PpmEveryBodyLoops) | + PpmSource(PpmIdentified) => opt_uii.is_some(), + + PpmSource(PpmExpanded) | + PpmSource(PpmExpandedIdentified) | + PpmSource(PpmExpandedHygiene) | + PpmHir(_) | + PpmMir | + PpmMirCFG | + PpmFlowGraph(_) => true, + PpmSource(PpmTyped) => panic!("invalid state"), + } + } + + pub fn needs_analysis(&self) -> bool { + match *self { + PpmMir | PpmMirCFG | PpmFlowGraph(_) => true, + _ => false, + } + } +} + pub fn parse_pretty(sess: &Session, name: &str, extended: bool) @@ -145,7 +167,7 @@ impl PpSourceMode { /// Constructs a `PrinterSupport` object and passes it to `f`. fn call_with_pp_support<'tcx, A, B, F>(&self, sess: &'tcx Session, - ast_map: Option>, + ast_map: Option<&hir_map::Map<'tcx>>, payload: B, f: F) -> A @@ -155,7 +177,7 @@ impl PpSourceMode { PpmNormal | PpmEveryBodyLoops | PpmExpanded => { let annotation = NoAnn { sess: sess, - ast_map: ast_map, + ast_map: ast_map.map(|m| m.clone()), }; f(&annotation, payload) } @@ -163,14 +185,14 @@ impl PpSourceMode { PpmIdentified | PpmExpandedIdentified => { let annotation = IdentifiedAnnotation { sess: sess, - ast_map: ast_map, + ast_map: ast_map.map(|m| m.clone()), }; f(&annotation, payload) } PpmExpandedHygiene => { let annotation = HygieneAnnotation { sess: sess, - ast_map: ast_map, + ast_map: ast_map.map(|m| m.clone()), }; f(&annotation, payload) } @@ -179,8 +201,9 @@ impl PpSourceMode { } fn call_with_pp_support_hir<'tcx, A, B, F>(&self, sess: &'tcx Session, - cstore: &CStore, ast_map: &hir_map::Map<'tcx>, + analysis: &ty::CrateAnalysis, + resolutions: &Resolutions, arenas: &'tcx ty::CtxtArenas<'tcx>, id: &str, payload: B, @@ -206,11 +229,11 @@ impl PpSourceMode { } PpmTyped => { abort_on_err(driver::phase_3_run_analysis_passes(sess, - cstore, ast_map.clone(), + analysis.clone(), + resolutions.clone(), arenas, id, - resolve::MakeGlobMap::No, |tcx, _, _, _| { let annotation = TypedAnnotation { tcx: tcx, @@ -446,7 +469,7 @@ impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> { struct TypedAnnotation<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, } impl<'b, 'tcx> HirPrinterSupport<'tcx> for TypedAnnotation<'b, 'tcx> { @@ -582,40 +605,6 @@ impl UserIdentifiedItem { } } -fn needs_ast_map(ppm: &PpMode, opt_uii: &Option) -> bool { - match *ppm { - PpmSource(PpmNormal) | - PpmSource(PpmEveryBodyLoops) | - PpmSource(PpmIdentified) => opt_uii.is_some(), - - PpmSource(PpmExpanded) | - PpmSource(PpmExpandedIdentified) | - PpmSource(PpmExpandedHygiene) | - PpmHir(_) | - PpmMir | - PpmMirCFG | - PpmFlowGraph(_) => true, - PpmSource(PpmTyped) => panic!("invalid state"), - } -} - -fn needs_expansion(ppm: &PpMode) -> bool { - match *ppm { - PpmSource(PpmNormal) | - PpmSource(PpmEveryBodyLoops) | - PpmSource(PpmIdentified) => false, - - PpmSource(PpmExpanded) | - PpmSource(PpmExpandedIdentified) | - PpmSource(PpmExpandedHygiene) | - PpmHir(_) | - PpmMir | - PpmMirCFG | - PpmFlowGraph(_) => true, - PpmSource(PpmTyped) => panic!("invalid state"), - } -} - struct ReplaceBodyWithLoop { within_static_or_const: bool, } @@ -700,87 +689,180 @@ impl fold::Folder for ReplaceBodyWithLoop { } } -pub fn pretty_print_input(sess: Session, - cstore: &CStore, - cfg: ast::CrateConfig, - input: &Input, - ppm: PpMode, - opt_uii: Option, - ofile: Option) { - let krate = panictry!(driver::phase_1_parse_input(&sess, cfg, input)); - - let krate = if let PpmSource(PpmEveryBodyLoops) = ppm { - let mut fold = ReplaceBodyWithLoop::new(); - fold.fold_crate(krate) - } else { - krate +fn print_flowgraph<'a, 'tcx, W: Write>(variants: Vec, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + mir_map: Option<&MirMap<'tcx>>, + code: blocks::Code, + mode: PpFlowGraphMode, + mut out: W) + -> io::Result<()> { + let cfg = match code { + blocks::BlockCode(block) => cfg::CFG::new(tcx, &block), + blocks::FnLikeCode(fn_like) => cfg::CFG::new(tcx, &fn_like.body()), + }; + let labelled_edges = mode != PpFlowGraphMode::UnlabelledEdges; + let lcfg = LabelledCFG { + ast_map: &tcx.map, + cfg: &cfg, + name: format!("node_{}", code.id()), + labelled_edges: labelled_edges, }; - let id = link::find_crate_name(Some(&sess), &krate.attrs, input); + match code { + _ if variants.is_empty() => { + let r = dot::render(&lcfg, &mut out); + return expand_err_details(r); + } + blocks::BlockCode(_) => { + tcx.sess.err("--pretty flowgraph with -Z flowgraph-print annotations requires \ + fn-like node id."); + return Ok(()); + } + blocks::FnLikeCode(fn_like) => { + let (bccx, analysis_data) = + borrowck::build_borrowck_dataflow_data_for_fn(tcx, + mir_map, + fn_like.to_fn_parts(), + &cfg); - let is_expanded = needs_expansion(&ppm); - let compute_ast_map = needs_ast_map(&ppm, &opt_uii); - let krate = if compute_ast_map { - match driver::phase_2_configure_and_expand(&sess, &cstore, krate, &id[..], None) { - Err(_) => return, - Ok(k) => driver::assign_node_ids(&sess, k), + let lcfg = borrowck_dot::DataflowLabeller { + inner: lcfg, + variants: variants, + borrowck_ctxt: &bccx, + analysis_data: &analysis_data, + }; + let r = dot::render(&lcfg, &mut out); + return expand_err_details(r); } - } else { - krate - }; + } - // There is some twisted, god-forsaken tangle of lifetimes here which makes - // the ordering of stuff super-finicky. - let mut hir_forest; - let lcx = LoweringContext::new(&sess, Some(&krate)); - let arenas = ty::CtxtArenas::new(); - let dep_graph = DepGraph::new(false); - let _ignore = dep_graph.in_ignore(); - let ast_map = if compute_ast_map { - hir_forest = hir_map::Forest::new(lower_crate(&lcx, &krate), dep_graph.clone()); - let map = driver::make_map(&sess, &mut hir_forest); - Some(map) + fn expand_err_details(r: io::Result<()>) -> io::Result<()> { + r.map_err(|ioerr| { + io::Error::new(io::ErrorKind::Other, + &format!("graphviz::render failed: {}", ioerr)[..]) + }) + } +} + +pub fn fold_crate(krate: ast::Crate, ppm: PpMode) -> ast::Crate { + if let PpmSource(PpmEveryBodyLoops) = ppm { + let mut fold = ReplaceBodyWithLoop::new(); + fold.fold_crate(krate) } else { - None - }; + krate + } +} +fn get_source(input: &Input, sess: &Session) -> (Vec, String) { let src_name = driver::source_name(input); let src = sess.codemap() - .get_filemap(&src_name[..]) + .get_filemap(&src_name) .src .as_ref() .unwrap() .as_bytes() .to_vec(); - let mut rdr = &src[..]; + (src, src_name) +} + +fn write_output(out: Vec, ofile: Option<&Path>) { + match ofile { + None => print!("{}", String::from_utf8(out).unwrap()), + Some(p) => { + match File::create(p) { + Ok(mut w) => w.write_all(&out).unwrap(), + Err(e) => panic!("print-print failed to open {} due to {}", p.display(), e), + } + } + } +} +pub fn print_after_parsing(sess: &Session, + input: &Input, + krate: &ast::Crate, + ppm: PpMode, + ofile: Option<&Path>) { + let dep_graph = DepGraph::new(false); + let _ignore = dep_graph.in_ignore(); + + let (src, src_name) = get_source(input, sess); + + let mut rdr = &*src; + let mut out = Vec::new(); + + if let PpmSource(s) = ppm { + // Silently ignores an identified node. + let out: &mut Write = &mut out; + s.call_with_pp_support(sess, None, box out, |annotation, out| { + debug!("pretty printing source code {:?}", s); + let sess = annotation.sess(); + pprust::print_crate(sess.codemap(), + sess.diagnostic(), + krate, + src_name.to_string(), + &mut rdr, + out, + annotation.pp_ann(), + false) + }).unwrap() + } else { + unreachable!(); + }; + + write_output(out, ofile); +} + +pub fn print_after_hir_lowering<'tcx, 'a: 'tcx>(sess: &'a Session, + ast_map: &hir_map::Map<'tcx>, + analysis: &ty::CrateAnalysis, + resolutions: &Resolutions, + input: &Input, + krate: &ast::Crate, + crate_name: &str, + ppm: PpMode, + arenas: &'tcx ty::CtxtArenas<'tcx>, + opt_uii: Option, + ofile: Option<&Path>) { + let dep_graph = DepGraph::new(false); + let _ignore = dep_graph.in_ignore(); + + if ppm.needs_analysis() { + print_with_analysis(sess, ast_map, analysis, resolutions, + crate_name, arenas, ppm, opt_uii, ofile); + return; + } + + let (src, src_name) = get_source(input, sess); + + let mut rdr = &src[..]; let mut out = Vec::new(); match (ppm, opt_uii) { (PpmSource(s), _) => { // Silently ignores an identified node. let out: &mut Write = &mut out; - s.call_with_pp_support(&sess, ast_map, box out, |annotation, out| { + s.call_with_pp_support(sess, Some(ast_map), box out, |annotation, out| { debug!("pretty printing source code {:?}", s); let sess = annotation.sess(); pprust::print_crate(sess.codemap(), sess.diagnostic(), - &krate, + krate, src_name.to_string(), &mut rdr, out, annotation.pp_ann(), - is_expanded) + true) }) } (PpmHir(s), None) => { let out: &mut Write = &mut out; - s.call_with_pp_support_hir(&sess, - cstore, - &ast_map.unwrap(), - &arenas, - &id, + s.call_with_pp_support_hir(sess, + ast_map, + analysis, + resolutions, + arenas, + crate_name, box out, |annotation, out, krate| { debug!("pretty printing source code {:?}", s); @@ -792,17 +874,18 @@ pub fn pretty_print_input(sess: Session, &mut rdr, out, annotation.pp_ann(), - is_expanded) + true) }) } (PpmHir(s), Some(uii)) => { let out: &mut Write = &mut out; - s.call_with_pp_support_hir(&sess, - cstore, - &ast_map.unwrap(), - &arenas, - &id, + s.call_with_pp_support_hir(sess, + ast_map, + analysis, + resolutions, + arenas, + crate_name, (out,uii), |annotation, (out,uii), _| { debug!("pretty printing source code {:?}", s); @@ -828,158 +911,104 @@ pub fn pretty_print_input(sess: Session, } pp::eof(&mut pp_state.s) }) - } + } + _ => unreachable!(), + }.unwrap(); + + write_output(out, ofile); +} + +// In an ideal world, this would be a public function called by the driver after +// analsysis is performed. However, we want to call `phase_3_run_analysis_passes` +// with a different callback than the standard driver, so that isn't easy. +// Instead, we call that function ourselves. +fn print_with_analysis<'tcx, 'a: 'tcx>(sess: &'a Session, + ast_map: &hir_map::Map<'tcx>, + analysis: &ty::CrateAnalysis, + resolutions: &Resolutions, + crate_name: &str, + arenas: &'tcx ty::CtxtArenas<'tcx>, + ppm: PpMode, + uii: Option, + ofile: Option<&Path>) { + let nodeid = if let Some(uii) = uii { + debug!("pretty printing for {:?}", uii); + Some(uii.to_one_node_id("--unpretty", sess, &ast_map)) + } else { + debug!("pretty printing for whole crate"); + None + }; - (pp_type@PpmMir, uii) | (pp_type@PpmMirCFG, uii) => { - let ast_map = ast_map.expect("--unpretty missing ast_map"); - let nodeid = if let Some(uii) = uii { - debug!("pretty printing MIR for {:?}", uii); - Some(uii.to_one_node_id("--unpretty", &sess, &ast_map)) - } else { - debug!("pretty printing MIR for whole crate"); - None - }; - abort_on_err(driver::phase_3_run_analysis_passes(&sess, - &cstore, - ast_map, - &arenas, - &id, - resolve::MakeGlobMap::No, - |tcx, mir_map, _, _| { + let mut out = Vec::new(); + + abort_on_err(driver::phase_3_run_analysis_passes(sess, + ast_map.clone(), + analysis.clone(), + resolutions.clone(), + arenas, + crate_name, + |tcx, mir_map, _, _| { + match ppm { + PpmMir | PpmMirCFG => { if let Some(mir_map) = mir_map { if let Some(nodeid) = nodeid { let mir = mir_map.map.get(&nodeid).unwrap_or_else(|| { sess.fatal(&format!("no MIR map entry for node {}", nodeid)) }); - match pp_type { + match ppm { PpmMir => write_mir_pretty(tcx, iter::once((&nodeid, mir)), &mut out), - _ => write_mir_graphviz(tcx, iter::once((&nodeid, mir)), &mut out) + PpmMirCFG => { + write_mir_graphviz(tcx, iter::once((&nodeid, mir)), &mut out) + } + _ => unreachable!(), }?; } else { - match pp_type { + match ppm { PpmMir => write_mir_pretty(tcx, mir_map.map.iter(), &mut out), - _ => write_mir_graphviz(tcx, mir_map.map.iter(), &mut out) + PpmMirCFG => write_mir_graphviz(tcx, mir_map.map.iter(), &mut out), + _ => unreachable!(), }?; } } Ok(()) - }), &sess) - } - - (PpmFlowGraph(mode), opt_uii) => { - debug!("pretty printing flow graph for {:?}", opt_uii); - let uii = opt_uii.unwrap_or_else(|| { - sess.fatal(&format!("`pretty flowgraph=..` needs NodeId (int) or - \ - unique path suffix (b::c::d)")) + } + PpmFlowGraph(mode) => { + let nodeid = nodeid.expect("`pretty flowgraph=..` needs NodeId (int) or \ + unique path suffix (b::c::d)"); + let node = tcx.map.find(nodeid).unwrap_or_else(|| { + tcx.sess.fatal(&format!("--pretty flowgraph couldn't find id: {}", nodeid)) + }); - }); - let ast_map = ast_map.expect("--pretty flowgraph missing ast_map"); - let nodeid = uii.to_one_node_id("--pretty", &sess, &ast_map); + let code = blocks::Code::from_node(node); + match code { + Some(code) => { + let variants = gather_flowgraph_variants(tcx.sess); - let node = ast_map.find(nodeid).unwrap_or_else(|| { - sess.fatal(&format!("--pretty flowgraph couldn't find id: {}", nodeid)) - }); + let out: &mut Write = &mut out; - let code = blocks::Code::from_node(node); - let out: &mut Write = &mut out; - match code { - Some(code) => { - let variants = gather_flowgraph_variants(&sess); - abort_on_err(driver::phase_3_run_analysis_passes(&sess, - &cstore, - ast_map, - &arenas, - &id, - resolve::MakeGlobMap::No, - |tcx, mir_map, _, _| { print_flowgraph(variants, tcx, mir_map.as_ref(), code, mode, out) - }), &sess) - } - None => { - let message = format!("--pretty=flowgraph needs block, fn, or method; got \ - {:?}", - node); - - // point to what was found, if there's an - // accessible span. - match ast_map.opt_span(nodeid) { - Some(sp) => sess.span_fatal(sp, &message[..]), - None => sess.fatal(&message[..]), + } + None => { + let message = format!("--pretty=flowgraph needs block, fn, or method; got \ + {:?}", + node); + + // Point to what was found, if there's an accessible span. + match tcx.map.opt_span(nodeid) { + Some(sp) => tcx.sess.span_fatal(sp, &message), + None => tcx.sess.fatal(&message), + } } } } + _ => unreachable!(), } - } - .unwrap(); + }), sess).unwrap(); - match ofile { - None => print!("{}", String::from_utf8(out).unwrap()), - Some(p) => { - match File::create(&p) { - Ok(mut w) => w.write_all(&out).unwrap(), - Err(e) => panic!("print-print failed to open {} due to {}", p.display(), e), - } - } - } -} - -fn print_flowgraph<'tcx, W: Write>(variants: Vec, - tcx: &TyCtxt<'tcx>, - mir_map: Option<&MirMap<'tcx>>, - code: blocks::Code, - mode: PpFlowGraphMode, - mut out: W) - -> io::Result<()> { - let cfg = match code { - blocks::BlockCode(block) => cfg::CFG::new(tcx, &block), - blocks::FnLikeCode(fn_like) => cfg::CFG::new(tcx, &fn_like.body()), - }; - let labelled_edges = mode != PpFlowGraphMode::UnlabelledEdges; - let lcfg = LabelledCFG { - ast_map: &tcx.map, - cfg: &cfg, - name: format!("node_{}", code.id()), - labelled_edges: labelled_edges, - }; - - match code { - _ if variants.is_empty() => { - let r = dot::render(&lcfg, &mut out); - return expand_err_details(r); - } - blocks::BlockCode(_) => { - tcx.sess.err("--pretty flowgraph with -Z flowgraph-print annotations requires \ - fn-like node id."); - return Ok(()); - } - blocks::FnLikeCode(fn_like) => { - let (bccx, analysis_data) = - borrowck::build_borrowck_dataflow_data_for_fn(tcx, - mir_map, - fn_like.to_fn_parts(), - &cfg); - - let lcfg = borrowck_dot::DataflowLabeller { - inner: lcfg, - variants: variants, - borrowck_ctxt: &bccx, - analysis_data: &analysis_data, - }; - let r = dot::render(&lcfg, &mut out); - return expand_err_details(r); - } - } - - fn expand_err_details(r: io::Result<()>) -> io::Result<()> { - r.map_err(|ioerr| { - io::Error::new(io::ErrorKind::Other, - &format!("graphviz::render failed: {}", ioerr)[..]) - }) - } + write_output(out, ofile); } diff --git a/src/librustc_driver/target_features.rs b/src/librustc_driver/target_features.rs index 27ffb595a4..fad0af19a1 100644 --- a/src/librustc_driver/target_features.rs +++ b/src/librustc_driver/target_features.rs @@ -9,79 +9,54 @@ // except according to those terms. use syntax::{ast, attr}; +use llvm::LLVMRustHasFeature; use rustc::session::Session; +use rustc_trans::back::write::create_target_machine; use syntax::parse::token::InternedString; use syntax::parse::token::intern_and_get_ident as intern; +use libc::c_char; + +// WARNING: the features must be known to LLVM or the feature +// detection code will walk past the end of the feature array, +// leading to crashes. + +const ARM_WHITELIST: &'static [&'static str] = &[ + "neon\0", + "vfp2\0", + "vfp3\0", + "vfp4\0", +]; + +const X86_WHITELIST: &'static [&'static str] = &[ + "avx\0", + "avx2\0", + "sse\0", + "sse2\0", + "sse3\0", + "sse4.1\0", + "sse4.2\0", + "ssse3\0", +]; /// Add `target_feature = "..."` cfgs for a variety of platform /// specific features (SSE, NEON etc.). /// -/// This uses a scheme similar to that employed by clang: reimplement -/// the target feature knowledge. *Theoretically* we could query LLVM -/// since that has perfect knowledge about what things are enabled in -/// code-generation, however, it is extremely non-obvious how to do -/// this successfully. Each platform defines a subclass of a -/// SubtargetInfo, which knows all this information, but the ways to -/// query them do not seem to be public. +/// This is performed by checking whether a whitelisted set of +/// features is available on the target machine, by querying LLVM. pub fn add_configuration(cfg: &mut ast::CrateConfig, sess: &Session) { - let tf = InternedString::new("target_feature"); - macro_rules! fillout { - ($($func: ident, $name: expr;)*) => {{ - $(if $func(sess) { - cfg.push(attr::mk_name_value_item_str(tf.clone(), intern($name))) - })* - }} - } - fillout! { - has_sse, "sse"; - has_sse2, "sse2"; - has_sse3, "sse3"; - has_ssse3, "ssse3"; - has_sse41, "sse4.1"; - has_sse42, "sse4.2"; - has_avx, "avx"; - has_avx2, "avx2"; - has_neon, "neon"; - has_vfp, "vfp"; - } -} - + let target_machine = create_target_machine(sess); -fn features_contain(sess: &Session, s: &str) -> bool { - sess.target.target.options.features.contains(s) || sess.opts.cg.target_feature.contains(s) -} + let whitelist = match &*sess.target.target.arch { + "arm" => ARM_WHITELIST, + "x86" | "x86_64" => X86_WHITELIST, + _ => &[], + }; -pub fn has_sse(sess: &Session) -> bool { - features_contain(sess, "+sse") || has_sse2(sess) -} -pub fn has_sse2(sess: &Session) -> bool { - // x86-64 requires at least SSE2 support - sess.target.target.arch == "x86_64" || features_contain(sess, "+sse2") || has_sse3(sess) -} -pub fn has_sse3(sess: &Session) -> bool { - features_contain(sess, "+sse3") || has_ssse3(sess) -} -pub fn has_ssse3(sess: &Session) -> bool { - features_contain(sess, "+ssse3") || has_sse41(sess) -} -pub fn has_sse41(sess: &Session) -> bool { - features_contain(sess, "+sse4.1") || has_sse42(sess) -} -pub fn has_sse42(sess: &Session) -> bool { - features_contain(sess, "+sse4.2") || has_avx(sess) -} -pub fn has_avx(sess: &Session) -> bool { - features_contain(sess, "+avx") || has_avx2(sess) -} -pub fn has_avx2(sess: &Session) -> bool { - features_contain(sess, "+avx2") -} - -pub fn has_neon(sess: &Session) -> bool { - // AArch64 requires NEON support - sess.target.target.arch == "aarch64" || features_contain(sess, "+neon") -} -pub fn has_vfp(sess: &Session) -> bool { - // AArch64 requires VFP support - sess.target.target.arch == "aarch64" || features_contain(sess, "+vfp") + let tf = InternedString::new("target_feature"); + for feat in whitelist { + assert_eq!(feat.chars().last(), Some('\0')); + if unsafe { LLVMRustHasFeature(target_machine, feat.as_ptr() as *const c_char) } { + cfg.push(attr::mk_name_value_item_str(tf.clone(), intern(&feat[..feat.len()-1]))) + } + } } diff --git a/src/librustc_driver/test.rs b/src/librustc_driver/test.rs index fc12d54628..e0d693c423 100644 --- a/src/librustc_driver/test.rs +++ b/src/librustc_driver/test.rs @@ -13,7 +13,7 @@ use driver; use rustc::dep_graph::DepGraph; use rustc_lint; -use rustc_resolve as resolve; +use rustc_resolve::MakeGlobMap; use rustc::middle::lang_items; use rustc::middle::free_region::FreeRegionMap; use rustc::middle::region::{self, CodeExtent}; @@ -24,26 +24,25 @@ use rustc::ty::subst; use rustc::ty::subst::Subst; use rustc::traits::ProjectionMode; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; -use rustc::ty::relate::TypeRelation; use rustc::infer::{self, InferOk, InferResult, TypeOrigin}; use rustc_metadata::cstore::CStore; +use rustc_metadata::creader::read_local_crates; use rustc::hir::map as hir_map; use rustc::session::{self, config}; use std::rc::Rc; use syntax::ast; use syntax::abi::Abi; -use syntax::codemap::{MultiSpan, CodeMap, DUMMY_SP}; +use syntax::codemap::{CodeMap, DUMMY_SP}; use syntax::errors; -use syntax::errors::emitter::Emitter; +use syntax::errors::emitter::{CoreEmitter, Emitter}; use syntax::errors::{Level, RenderSpan}; use syntax::parse::token; use syntax::feature_gate::UnstableFeatures; -use rustc::hir::lowering::{lower_crate, LoweringContext}; use rustc::hir; -struct Env<'a, 'tcx: 'a> { - infcx: &'a infer::InferCtxt<'a, 'tcx>, +struct Env<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a infer::InferCtxt<'a, 'gcx, 'tcx>, } struct RH<'a> { @@ -71,21 +70,20 @@ fn remove_message(e: &mut ExpectErrorEmitter, msg: &str, lvl: Level) { e.messages.remove(i); } None => { + debug!("Unexpected error: {} Expected: {:?}", msg, e.messages); panic!("Unexpected error: {} Expected: {:?}", msg, e.messages); } } } -impl Emitter for ExpectErrorEmitter { - fn emit(&mut self, - _sp: Option<&MultiSpan>, - msg: &str, - _: Option<&str>, - lvl: Level) { - remove_message(self, msg, lvl); - } - - fn custom_emit(&mut self, _sp: &RenderSpan, msg: &str, lvl: Level) { +impl CoreEmitter for ExpectErrorEmitter { + fn emit_message(&mut self, + _sp: &RenderSpan, + msg: &str, + _: Option<&str>, + lvl: Level, + _is_header: bool, + _show_snippet: bool) { remove_message(self, msg, lvl); } } @@ -106,8 +104,10 @@ fn test_env(source_string: &str, options.unstable_features = UnstableFeatures::Allow; let diagnostic_handler = errors::Handler::with_emitter(true, false, emitter); - let cstore = Rc::new(CStore::new(token::get_ident_interner())); - let sess = session::build_session_(options, None, diagnostic_handler, + let dep_graph = DepGraph::new(false); + let _ignore = dep_graph.in_ignore(); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); + let sess = session::build_session_(options, &dep_graph, None, diagnostic_handler, Rc::new(CodeMap::new()), cstore.clone()); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); let krate_config = Vec::new(); @@ -120,45 +120,47 @@ fn test_env(source_string: &str, .expect("phase 2 aborted"); let krate = driver::assign_node_ids(&sess, krate); - let lcx = LoweringContext::new(&sess, Some(&krate)); - let dep_graph = DepGraph::new(false); + let mut defs = hir_map::collect_definitions(&krate); + read_local_crates(&sess, &cstore, &defs, &krate, "test_crate", &dep_graph); let _ignore = dep_graph.in_ignore(); - let mut hir_forest = hir_map::Forest::new(lower_crate(&lcx, &krate), dep_graph.clone()); + + let (_, resolutions, mut hir_forest) = { + driver::lower_and_resolve(&sess, "test-crate", &mut defs, &krate, + &sess.dep_graph, MakeGlobMap::No) + }; + let arenas = ty::CtxtArenas::new(); - let ast_map = driver::make_map(&sess, &mut hir_forest); + let ast_map = hir_map::map_crate(&mut hir_forest, defs); // run just enough stuff to build a tcx: let lang_items = lang_items::collect_language_items(&sess, &ast_map); - let resolve::CrateMap { def_map, freevars, .. } = - resolve::resolve_crate(&sess, &ast_map, resolve::MakeGlobMap::No); - let named_region_map = resolve_lifetime::krate(&sess, &ast_map, &def_map.borrow()); + let named_region_map = resolve_lifetime::krate(&sess, &ast_map, &resolutions.def_map.borrow()); let region_map = region::resolve_crate(&sess, &ast_map); let index = stability::Index::new(&ast_map); TyCtxt::create_and_enter(&sess, - &arenas, - def_map, - named_region_map.unwrap(), - ast_map, - freevars, - region_map, - lang_items, - index, - "test_crate", - |tcx| { - let infcx = infer::new_infer_ctxt(tcx, - &tcx.tables, - None, - ProjectionMode::AnyFinal); - body(Env { infcx: &infcx }); - let free_regions = FreeRegionMap::new(); - infcx.resolve_regions_and_report_errors(&free_regions, - ast::CRATE_NODE_ID); - assert_eq!(tcx.sess.err_count(), expected_err_count); - }); + &arenas, + resolutions.def_map, + named_region_map.unwrap(), + ast_map, + resolutions.freevars, + resolutions.maybe_unused_trait_imports, + region_map, + lang_items, + index, + "test_crate", + |tcx| { + tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|infcx| { + + body(Env { infcx: &infcx }); + let free_regions = FreeRegionMap::new(); + infcx.resolve_regions_and_report_errors(&free_regions, ast::CRATE_NODE_ID); + assert_eq!(tcx.sess.err_count(), expected_err_count); + }); + }); } -impl<'a, 'tcx> Env<'a, 'tcx> { - pub fn tcx(&self) -> &TyCtxt<'tcx> { +impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> { + pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.infcx.tcx } @@ -240,17 +242,14 @@ impl<'a, 'tcx> Env<'a, 'tcx> { } pub fn make_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool { - match infer::mk_subty(self.infcx, true, TypeOrigin::Misc(DUMMY_SP), a, b) { + match self.infcx.sub_types(true, TypeOrigin::Misc(DUMMY_SP), a, b) { Ok(_) => true, Err(ref e) => panic!("Encountered error: {}", e), } } pub fn is_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> bool { - match infer::can_mk_subty(self.infcx, a, b) { - Ok(_) => true, - Err(_) => false, - } + self.infcx.can_sub_types(a, b).is_ok() } pub fn assert_subtype(&self, a: Ty<'tcx>, b: Ty<'tcx>) { @@ -266,7 +265,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { pub fn t_fn(&self, input_tys: &[Ty<'tcx>], output_ty: Ty<'tcx>) -> Ty<'tcx> { let input_args = input_tys.iter().cloned().collect(); - self.infcx.tcx.mk_fn_ptr(ty::BareFnTy { + self.infcx.tcx.mk_fn_ptr(self.infcx.tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: Abi::Rust, sig: ty::Binder(ty::FnSig { @@ -274,7 +273,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { output: ty::FnConverging(output_ty), variadic: false, }), - }) + })) } pub fn t_nil(&self) -> Ty<'tcx> { @@ -355,25 +354,25 @@ impl<'a, 'tcx> Env<'a, 'tcx> { infer::TypeTrace::dummy(self.tcx()) } - pub fn sub(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { + pub fn sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { let trace = self.dummy_type_trace(); - self.infcx.sub(true, trace, t1, t2) + self.infcx.sub(true, trace, &t1, &t2) } - pub fn lub(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { + pub fn lub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { let trace = self.dummy_type_trace(); - self.infcx.lub(true, trace, t1, t2) + self.infcx.lub(true, trace, &t1, &t2) } - pub fn glb(&self, t1: &Ty<'tcx>, t2: &Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { + pub fn glb(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> InferResult<'tcx, Ty<'tcx>> { let trace = self.dummy_type_trace(); - self.infcx.glb(true, trace, t1, t2) + self.infcx.glb(true, trace, &t1, &t2) } /// Checks that `t1 <: t2` is true (this may register additional /// region checks). pub fn check_sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) { - match self.sub(&t1, &t2) { + match self.sub(t1, t2) { Ok(InferOk { obligations, .. }) => { // FIXME(#32730) once obligations are being propagated, assert the right thing. assert!(obligations.is_empty()); @@ -387,7 +386,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { /// Checks that `t1 <: t2` is false (this may register additional /// region checks). pub fn check_not_sub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) { - match self.sub(&t1, &t2) { + match self.sub(t1, t2) { Err(_) => {} Ok(_) => { panic!("unexpected success computing sub({:?},{:?})", t1, t2); @@ -397,7 +396,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { /// Checks that `LUB(t1,t2) == t_lub` pub fn check_lub(&self, t1: Ty<'tcx>, t2: Ty<'tcx>, t_lub: Ty<'tcx>) { - match self.lub(&t1, &t2) { + match self.lub(t1, t2) { Ok(InferOk { obligations, value: t }) => { // FIXME(#32730) once obligations are being propagated, assert the right thing. assert!(obligations.is_empty()); @@ -413,7 +412,7 @@ impl<'a, 'tcx> Env<'a, 'tcx> { /// Checks that `GLB(t1,t2) == t_glb` pub fn check_glb(&self, t1: Ty<'tcx>, t2: Ty<'tcx>, t_glb: Ty<'tcx>) { debug!("check_glb(t1={}, t2={}, t_glb={})", t1, t2, t_glb); - match self.glb(&t1, &t2) { + match self.glb(t1, t2) { Err(e) => { panic!("unexpected error computing LUB: {:?}", e) } @@ -445,7 +444,7 @@ fn contravariant_region_ptr_ok() { #[test] fn contravariant_region_ptr_err() { - test_env(EMPTY_SOURCE_STR, errors(&["lifetime mismatch"]), |env| { + test_env(EMPTY_SOURCE_STR, errors(&["mismatched types"]), |env| { env.create_simple_region_hierarchy(); let t_rptr1 = env.t_rptr_scope(1); let t_rptr10 = env.t_rptr_scope(10); diff --git a/src/librustc_incremental/assert_dep_graph.rs b/src/librustc_incremental/assert_dep_graph.rs index 88d8ed8d58..9dc50a6306 100644 --- a/src/librustc_incremental/assert_dep_graph.rs +++ b/src/librustc_incremental/assert_dep_graph.rs @@ -44,6 +44,7 @@ use graphviz as dot; use rustc::dep_graph::{DepGraphQuery, DepNode}; +use rustc::dep_graph::debug::{DepNodeFilter, EdgeFilter}; use rustc::hir::def_id::DefId; use rustc::ty::TyCtxt; use rustc_data_structures::fnv::{FnvHashMap, FnvHashSet}; @@ -63,7 +64,7 @@ const IF_THIS_CHANGED: &'static str = "rustc_if_this_changed"; const THEN_THIS_WOULD_NEED: &'static str = "rustc_then_this_would_need"; const ID: &'static str = "id"; -pub fn assert_dep_graph(tcx: &TyCtxt) { +pub fn assert_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let _ignore = tcx.dep_graph.in_ignore(); if tcx.sess.opts.debugging_opts.dump_dep_graph { @@ -98,7 +99,7 @@ type TargetHashMap = FnvHashSet<(Span, InternedString, ast::NodeId, DepNode)>>; struct IfThisChanged<'a, 'tcx:'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, if_this_changed: SourceHashMap, then_this_would_need: TargetHashMap, } @@ -172,9 +173,9 @@ impl<'a, 'tcx> Visitor<'tcx> for IfThisChanged<'a, 'tcx> { } } -fn check_paths(tcx: &TyCtxt, - if_this_changed: &SourceHashMap, - then_this_would_need: &TargetHashMap) +fn check_paths<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + if_this_changed: &SourceHashMap, + then_this_would_need: &TargetHashMap) { // Return early here so as not to construct the query, which is not cheap. if if_this_changed.is_empty() { @@ -195,7 +196,7 @@ fn check_paths(tcx: &TyCtxt, }; for &(_, source_def_id, source_dep_node) in sources { - let dependents = query.transitive_dependents(source_dep_node); + let dependents = query.transitive_successors(source_dep_node); for &(target_span, ref target_pass, _, ref target_dep_node) in targets { if !dependents.contains(&target_dep_node) { tcx.sess.span_err( @@ -213,19 +214,18 @@ fn check_paths(tcx: &TyCtxt, } } -fn dump_graph(tcx: &TyCtxt) { +fn dump_graph(tcx: TyCtxt) { let path: String = env::var("RUST_DEP_GRAPH").unwrap_or_else(|_| format!("dep_graph")); let query = tcx.dep_graph.query(); let nodes = match env::var("RUST_DEP_GRAPH_FILTER") { Ok(string) => { // Expect one of: "-> target", "source -> target", or "source ->". - let parts: Vec<_> = string.split("->").collect(); - if parts.len() > 2 { - bug!("Invalid RUST_DEP_GRAPH_FILTER: expected '[source] -> [target]'"); - } - let sources = node_set(&query, &parts[0]); - let targets = node_set(&query, &parts[1]); + let edge_filter = EdgeFilter::new(&string).unwrap_or_else(|e| { + bug!("invalid filter: {}", e) + }); + let sources = node_set(&query, &edge_filter.source); + let targets = node_set(&query, &edge_filter.target); filter_nodes(&query, &sources, &targets) } Err(_) => { @@ -295,26 +295,16 @@ impl<'a, 'tcx> dot::Labeller<'a> for GraphvizDepGraph { // Given an optional filter like `"x,y,z"`, returns either `None` (no // filter) or the set of nodes whose labels contain all of those // substrings. -fn node_set(query: &DepGraphQuery, filter: &str) +fn node_set(query: &DepGraphQuery, filter: &DepNodeFilter) -> Option>> { debug!("node_set(filter={:?})", filter); - if filter.trim().is_empty() { + if filter.accepts_all() { return None; } - let filters: Vec<&str> = filter.split("&").map(|s| s.trim()).collect(); - - debug!("node_set: filters={:?}", filters); - - Some(query.nodes() - .into_iter() - .filter(|n| { - let s = format!("{:?}", n); - filters.iter().all(|f| s.contains(f)) - }) - .collect()) + Some(query.nodes().into_iter().filter(|n| filter.test(n)).collect()) } fn filter_nodes(query: &DepGraphQuery, diff --git a/src/librustc_incremental/calculate_svh.rs b/src/librustc_incremental/calculate_svh.rs index ab1c6f5ace..24ecce1148 100644 --- a/src/librustc_incremental/calculate_svh.rs +++ b/src/librustc_incremental/calculate_svh.rs @@ -14,21 +14,21 @@ use std::hash::{Hash, SipHasher, Hasher}; use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; use rustc::hir::svh::Svh; -use rustc::ty; +use rustc::ty::TyCtxt; use rustc::hir::intravisit::{self, Visitor}; use self::svh_visitor::StrictVersionHashVisitor; pub trait SvhCalculate { /// Calculate the SVH for an entire krate. - fn calculate_krate_hash(&self) -> Svh; + fn calculate_krate_hash(self) -> Svh; /// Calculate the SVH for a particular item. - fn calculate_item_hash(&self, def_id: DefId) -> u64; + fn calculate_item_hash(self, def_id: DefId) -> u64; } -impl<'tcx> SvhCalculate for ty::TyCtxt<'tcx> { - fn calculate_krate_hash(&self) -> Svh { +impl<'a, 'tcx> SvhCalculate for TyCtxt<'a, 'tcx, 'tcx> { + fn calculate_krate_hash(self) -> Svh { // FIXME (#14132): This is better than it used to be, but it still not // ideal. We now attempt to hash only the relevant portions of the // Crate AST as well as the top-level crate attributes. (However, @@ -72,12 +72,14 @@ impl<'tcx> SvhCalculate for ty::TyCtxt<'tcx> { attr.node.value.hash(&mut state); } - Svh::from_hash(state.finish()) + Svh::new(state.finish()) } - fn calculate_item_hash(&self, def_id: DefId) -> u64 { + fn calculate_item_hash(self, def_id: DefId) -> u64 { assert!(def_id.is_local()); + debug!("calculate_item_hash(def_id={:?})", def_id); + let mut state = SipHasher::new(); { @@ -89,11 +91,16 @@ impl<'tcx> SvhCalculate for ty::TyCtxt<'tcx> { intravisit::walk_crate(&mut visit, krate); } else { let node_id = self.map.as_local_node_id(def_id).unwrap(); - visit.visit_item(self.map.expect_item(node_id)); + let item = self.map.expect_item(node_id); + visit.visit_item(item); } } - state.finish() + let hash = state.finish(); + + debug!("calculate_item_hash: def_id={:?} hash={:?}", def_id, hash); + + hash } } @@ -109,7 +116,7 @@ mod svh_visitor { use syntax::ast::{self, Name, NodeId}; use syntax::codemap::Span; use syntax::parse::token; - use rustc::ty; + use rustc::ty::TyCtxt; use rustc::hir; use rustc::hir::*; use rustc::hir::intravisit as visit; @@ -118,13 +125,13 @@ mod svh_visitor { use std::hash::{Hash, SipHasher}; pub struct StrictVersionHashVisitor<'a, 'tcx: 'a> { - pub tcx: &'a ty::TyCtxt<'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, pub st: &'a mut SipHasher, } impl<'a, 'tcx> StrictVersionHashVisitor<'a, 'tcx> { pub fn new(st: &'a mut SipHasher, - tcx: &'a ty::TyCtxt<'tcx>) + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self { StrictVersionHashVisitor { st: st, tcx: tcx } } @@ -172,7 +179,6 @@ mod svh_visitor { SawImplItem, SawStructField, SawVariant, - SawExplicitSelf, SawPath, SawBlock, SawPat, @@ -245,7 +251,7 @@ mod svh_visitor { ExprType(..) => SawExprType, ExprIf(..) => SawExprIf, ExprWhile(..) => SawExprWhile, - ExprLoop(_, id) => SawExprLoop(id.map(|id| id.name.as_str())), + ExprLoop(_, id) => SawExprLoop(id.map(|id| id.as_str())), ExprMatch(..) => SawExprMatch, ExprClosure(..) => SawExprClosure, ExprBlock(..) => SawExprBlock, @@ -256,8 +262,8 @@ mod svh_visitor { ExprIndex(..) => SawExprIndex, ExprPath(ref qself, _) => SawExprPath(qself.as_ref().map(|q| q.position)), ExprAddrOf(m, _) => SawExprAddrOf(m), - ExprBreak(id) => SawExprBreak(id.map(|id| id.node.name.as_str())), - ExprAgain(id) => SawExprAgain(id.map(|id| id.node.name.as_str())), + ExprBreak(id) => SawExprBreak(id.map(|id| id.node.as_str())), + ExprAgain(id) => SawExprAgain(id.map(|id| id.node.as_str())), ExprRet(..) => SawExprRet, ExprInlineAsm(ref a,_,_) => SawExprInlineAsm(a), ExprStruct(..) => SawExprStruct, @@ -391,10 +397,6 @@ mod svh_visitor { SawStructField.hash(self.st); visit::walk_struct_field(self, s) } - fn visit_explicit_self(&mut self, es: &'a ExplicitSelf) { - SawExplicitSelf.hash(self.st); visit::walk_explicit_self(self, es) - } - fn visit_path(&mut self, path: &'a Path, _: ast::NodeId) { SawPath.hash(self.st); visit::walk_path(self, path) } diff --git a/src/librustc_incremental/persist/data.rs b/src/librustc_incremental/persist/data.rs index 8be8bd60b2..f57ab19a52 100644 --- a/src/librustc_incremental/persist/data.rs +++ b/src/librustc_incremental/persist/data.rs @@ -11,15 +11,35 @@ //! The data that we will serialize and deserialize. use rustc::dep_graph::DepNode; -use rustc_serialize::{Decoder as RustcDecoder, - Encodable as RustcEncodable, Encoder as RustcEncoder}; +use rustc::hir::def_id::DefIndex; use super::directory::DefPathIndex; +/// Data for use when recompiling the **current crate**. #[derive(Debug, RustcEncodable, RustcDecodable)] pub struct SerializedDepGraph { pub nodes: Vec>, pub edges: Vec, + + /// These are hashes of two things: + /// - the HIR nodes in this crate + /// - the metadata nodes from dependent crates we use + /// + /// In each case, we store a hash summarizing the contents of + /// those items as they were at the time we did this compilation. + /// In the case of HIR nodes, this hash is derived by walking the + /// HIR itself. In the case of metadata nodes, the hash is loaded + /// from saved state. + /// + /// When we do the next compile, we will load these back up and + /// compare them against the hashes we see at that time, which + /// will tell us what has changed, either in this crate or in some + /// crate that we depend on. + /// + /// Because they will be reloaded, we don't store the DefId (which + /// will be different when we next compile) related to each node, + /// but rather the `DefPathIndex`. This can then be retraced + /// to find the current def-id. pub hashes: Vec, } @@ -27,9 +47,45 @@ pub type SerializedEdge = (DepNode, DepNode); #[derive(Debug, RustcEncodable, RustcDecodable)] pub struct SerializedHash { - pub index: DefPathIndex, + /// node being hashed; either a Hir or MetaData variant, in + /// practice + pub node: DepNode, /// the hash itself, computed by `calculate_item_hash` pub hash: u64, } +/// Data for use when downstream crates get recompiled. +#[derive(Debug, RustcEncodable, RustcDecodable)] +pub struct SerializedMetadataHashes { + /// For each def-id defined in this crate that appears in the + /// metadata, we hash all the inputs that were used when producing + /// the metadata. We save this after compilation is done. Then, + /// when some downstream crate is being recompiled, it can compare + /// the hashes we saved against the hashes that it saw from + /// before; this will tell it which of the items in this crate + /// changed, which in turn implies what items in the downstream + /// crate need to be recompiled. + /// + /// Note that we store the def-ids here. This is because we don't + /// reload this file when we recompile this crate, we will just + /// regenerate it completely with the current hashes and new def-ids. + /// + /// Then downstream creates will load up their + /// `SerializedDepGraph`, which may contain `MetaData(X)` nodes + /// where `X` refers to some item in this crate. That `X` will be + /// a `DefPathIndex` that gets retracted to the current `DefId` + /// (matching the one found in this structure). + pub hashes: Vec, +} + +/// The hash for some metadata that (when saving) will be exported +/// from this crate, or which (when importing) was exported by an +/// upstream crate. +#[derive(Debug, RustcEncodable, RustcDecodable)] +pub struct SerializedMetadataHash { + pub def_index: DefIndex, + + /// the hash itself, computed by `calculate_item_hash` + pub hash: u64, +} diff --git a/src/librustc_incremental/persist/directory.rs b/src/librustc_incremental/persist/directory.rs index 0d0054cadc..f9e90f3932 100644 --- a/src/librustc_incremental/persist/directory.rs +++ b/src/librustc_incremental/persist/directory.rs @@ -16,10 +16,8 @@ use rustc::dep_graph::DepNode; use rustc::hir::map::DefPath; use rustc::hir::def_id::DefId; -use rustc::ty; +use rustc::ty::TyCtxt; use rustc::util::nodemap::DefIdMap; -use rustc_serialize::{Decoder as RustcDecoder, - Encodable as RustcEncodable, Encoder as RustcEncoder}; use std::fmt::{self, Debug}; /// Index into the DefIdDirectory @@ -41,9 +39,9 @@ impl DefIdDirectory { DefIdDirectory { paths: vec![] } } - pub fn retrace(&self, tcx: &ty::TyCtxt) -> RetracedDefIdDirectory { + pub fn retrace(&self, tcx: TyCtxt) -> RetracedDefIdDirectory { let ids = self.paths.iter() - .map(|path| tcx.map.retrace_path(path)) + .map(|path| tcx.retrace_path(path)) .collect(); RetracedDefIdDirectory { ids: ids } } @@ -65,13 +63,13 @@ impl RetracedDefIdDirectory { } pub struct DefIdDirectoryBuilder<'a,'tcx:'a> { - tcx: &'a ty::TyCtxt<'tcx>, - hash: DefIdMap>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + hash: DefIdMap, directory: DefIdDirectory, } impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> { - pub fn new(tcx: &'a ty::TyCtxt<'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> { + pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> DefIdDirectoryBuilder<'a, 'tcx> { DefIdDirectoryBuilder { tcx: tcx, hash: DefIdMap(), @@ -79,29 +77,22 @@ impl<'a,'tcx> DefIdDirectoryBuilder<'a,'tcx> { } } - pub fn add(&mut self, def_id: DefId) -> Option { - if !def_id.is_local() { - // FIXME(#32015) clarify story about cross-crate dep tracking - return None; - } - + pub fn add(&mut self, def_id: DefId) -> DefPathIndex { + debug!("DefIdDirectoryBuilder: def_id={:?}", def_id); let tcx = self.tcx; let paths = &mut self.directory.paths; self.hash.entry(def_id) .or_insert_with(|| { let def_path = tcx.def_path(def_id); - if !def_path.is_local() { - return None; - } let index = paths.len() as u32; paths.push(def_path); - Some(DefPathIndex { index: index }) + DefPathIndex { index: index } }) .clone() } - pub fn map(&mut self, node: DepNode) -> Option> { - node.map_def(|&def_id| self.add(def_id)) + pub fn map(&mut self, node: DepNode) -> DepNode { + node.map_def(|&def_id| Some(self.add(def_id))).unwrap() } pub fn into_directory(self) -> DefIdDirectory { diff --git a/src/librustc_incremental/persist/dirty_clean.rs b/src/librustc_incremental/persist/dirty_clean.rs index 35fa69520b..dee4d667b8 100644 --- a/src/librustc_incremental/persist/dirty_clean.rs +++ b/src/librustc_incremental/persist/dirty_clean.rs @@ -31,14 +31,14 @@ use rustc::hir::intravisit::Visitor; use syntax::ast::{self, Attribute, MetaItem}; use syntax::attr::AttrMetaMethods; use syntax::parse::token::InternedString; -use rustc::ty; +use rustc::ty::TyCtxt; const DIRTY: &'static str = "rustc_dirty"; const CLEAN: &'static str = "rustc_clean"; const LABEL: &'static str = "label"; const CFG: &'static str = "cfg"; -pub fn check_dirty_clean_annotations(tcx: &ty::TyCtxt) { +pub fn check_dirty_clean_annotations<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let _ignore = tcx.dep_graph.in_ignore(); let query = tcx.dep_graph.query(); let krate = tcx.map.krate(); @@ -49,7 +49,7 @@ pub fn check_dirty_clean_annotations(tcx: &ty::TyCtxt) { } pub struct DirtyCleanVisitor<'a, 'tcx:'a> { - tcx: &'a ty::TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, query: &'a DepGraphQuery, } diff --git a/src/librustc_incremental/persist/hash.rs b/src/librustc_incremental/persist/hash.rs new file mode 100644 index 0000000000..b729f25b87 --- /dev/null +++ b/src/librustc_incremental/persist/hash.rs @@ -0,0 +1,163 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use calculate_svh::SvhCalculate; +use rbml::Error; +use rbml::opaque::Decoder; +use rustc::dep_graph::DepNode; +use rustc::hir::def_id::DefId; +use rustc::hir::svh::Svh; +use rustc::ty::TyCtxt; +use rustc_data_structures::fnv::FnvHashMap; +use rustc_serialize::Decodable; +use std::io::{ErrorKind, Read}; +use std::fs::File; +use syntax::ast; + +use super::data::*; +use super::util::*; + +pub struct HashContext<'a, 'tcx: 'a> { + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, + item_metadata_hashes: FnvHashMap, + crate_hashes: FnvHashMap, +} + +impl<'a, 'tcx> HashContext<'a, 'tcx> { + pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self { + HashContext { + tcx: tcx, + item_metadata_hashes: FnvHashMap(), + crate_hashes: FnvHashMap(), + } + } + + pub fn hash(&mut self, dep_node: DepNode) -> Option { + match dep_node { + // HIR nodes (which always come from our crate) are an input: + DepNode::Hir(def_id) => { + assert!(def_id.is_local()); + Some(self.hir_hash(def_id)) + } + + // MetaData from other crates is an *input* to us. + // MetaData nodes from *our* crates are an *output*; we + // don't hash them, but we do compute a hash for them and + // save it for others to use. + DepNode::MetaData(def_id) if !def_id.is_local() => { + Some(self.metadata_hash(def_id)) + } + + _ => { + // Other kinds of nodes represent computed by-products + // that we don't hash directly; instead, they should + // have some transitive dependency on a Hir or + // MetaData node, so we'll just hash that + None + } + } + } + + fn hir_hash(&mut self, def_id: DefId) -> u64 { + assert!(def_id.is_local()); + // FIXME(#32753) -- should we use a distinct hash here + self.tcx.calculate_item_hash(def_id) + } + + fn metadata_hash(&mut self, def_id: DefId) -> u64 { + debug!("metadata_hash(def_id={:?})", def_id); + + assert!(!def_id.is_local()); + loop { + // check whether we have a result cached for this def-id + if let Some(&hash) = self.item_metadata_hashes.get(&def_id) { + debug!("metadata_hash: def_id={:?} hash={:?}", def_id, hash); + return hash; + } + + // check whether we did not find detailed metadata for this + // krate; in that case, we just use the krate's overall hash + if let Some(&hash) = self.crate_hashes.get(&def_id.krate) { + debug!("metadata_hash: def_id={:?} crate_hash={:?}", def_id, hash); + + // micro-"optimization": avoid a cache miss if we ask + // for metadata from this particular def-id again. + self.item_metadata_hashes.insert(def_id, hash.as_u64()); + + return hash.as_u64(); + } + + // otherwise, load the data and repeat. + self.load_data(def_id.krate); + assert!(self.crate_hashes.contains_key(&def_id.krate)); + } + } + + fn load_data(&mut self, cnum: ast::CrateNum) { + debug!("load_data(cnum={})", cnum); + + let svh = self.tcx.sess.cstore.crate_hash(cnum); + let old = self.crate_hashes.insert(cnum, svh); + debug!("load_data: svh={}", svh); + assert!(old.is_none(), "loaded data for crate {:?} twice", cnum); + + if let Some(path) = metadata_hash_path(self.tcx, cnum) { + debug!("load_data: path={:?}", path); + let mut data = vec![]; + match + File::open(&path) + .and_then(|mut file| file.read_to_end(&mut data)) + { + Ok(_) => { + match self.load_from_data(cnum, &data) { + Ok(()) => { } + Err(err) => { + bug!("decoding error in dep-graph from `{}`: {}", + path.display(), err); + } + } + } + Err(err) => { + match err.kind() { + ErrorKind::NotFound => { + // If the file is not found, that's ok. + } + _ => { + self.tcx.sess.err( + &format!("could not load dep information from `{}`: {}", + path.display(), err)); + return; + } + } + } + } + } + } + + fn load_from_data(&mut self, cnum: ast::CrateNum, data: &[u8]) -> Result<(), Error> { + debug!("load_from_data(cnum={})", cnum); + + // Load up the hashes for the def-ids from this crate. + let mut decoder = Decoder::new(data, 0); + let serialized_hashes = try!(SerializedMetadataHashes::decode(&mut decoder)); + for serialized_hash in serialized_hashes.hashes { + // the hashes are stored with just a def-index, which is + // always relative to the old crate; convert that to use + // our internal crate number + let def_id = DefId { krate: cnum, index: serialized_hash.def_index }; + + // record the hash for this dep-node + let old = self.item_metadata_hashes.insert(def_id, serialized_hash.hash); + debug!("load_from_data: def_id={:?} hash={}", def_id, serialized_hash.hash); + assert!(old.is_none(), "already have hash for {:?}", def_id); + } + Ok(()) + } +} diff --git a/src/librustc_incremental/persist/load.rs b/src/librustc_incremental/persist/load.rs index 196c4511b0..e3fd290443 100644 --- a/src/librustc_incremental/persist/load.rs +++ b/src/librustc_incremental/persist/load.rs @@ -10,12 +10,11 @@ //! Code to save/load the dep-graph from files. -use calculate_svh::SvhCalculate; use rbml::Error; use rbml::opaque::Decoder; use rustc::dep_graph::DepNode; use rustc::hir::def_id::DefId; -use rustc::ty; +use rustc::ty::TyCtxt; use rustc_data_structures::fnv::FnvHashSet; use rustc_serialize::Decodable as RustcDecodable; use std::io::Read; @@ -25,6 +24,7 @@ use std::path::Path; use super::data::*; use super::directory::*; use super::dirty_clean; +use super::hash::*; use super::util::*; type DirtyNodes = FnvHashSet>; @@ -37,7 +37,7 @@ type CleanEdges = Vec<(DepNode, DepNode)>; /// early in compilation, before we've really done any work, but /// actually it doesn't matter all that much.) See `README.md` for /// more general overview. -pub fn load_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>) { +pub fn load_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let _ignore = tcx.dep_graph.in_ignore(); if let Some(dep_graph) = dep_graph_path(tcx) { @@ -47,7 +47,7 @@ pub fn load_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>) { } } -pub fn load_dep_graph_if_exists<'tcx>(tcx: &ty::TyCtxt<'tcx>, path: &Path) { +pub fn load_dep_graph_if_exists<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, path: &Path) { if !path.exists() { return; } @@ -74,8 +74,9 @@ pub fn load_dep_graph_if_exists<'tcx>(tcx: &ty::TyCtxt<'tcx>, path: &Path) { } } -pub fn decode_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>, data: &[u8]) - -> Result<(), Error> +pub fn decode_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + data: &[u8]) + -> Result<(), Error> { // Deserialize the directory and dep-graph. let mut decoder = Decoder::new(data, 0); @@ -129,21 +130,21 @@ pub fn decode_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>, data: &[u8]) Ok(()) } -fn initial_dirty_nodes<'tcx>(tcx: &ty::TyCtxt<'tcx>, - hashed_items: &[SerializedHash], - retraced: &RetracedDefIdDirectory) - -> DirtyNodes { +fn initial_dirty_nodes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + hashes: &[SerializedHash], + retraced: &RetracedDefIdDirectory) + -> DirtyNodes { + let mut hcx = HashContext::new(tcx); let mut items_removed = false; let mut dirty_nodes = FnvHashSet(); - for hashed_item in hashed_items { - match retraced.def_id(hashed_item.index) { - Some(def_id) => { - // FIXME(#32753) -- should we use a distinct hash here - let current_hash = tcx.calculate_item_hash(def_id); + for hash in hashes { + match hash.node.map_def(|&i| retraced.def_id(i)) { + Some(dep_node) => { + let current_hash = hcx.hash(dep_node).unwrap(); debug!("initial_dirty_nodes: hash of {:?} is {:?}, was {:?}", - def_id, current_hash, hashed_item.hash); - if current_hash != hashed_item.hash { - dirty_nodes.insert(DepNode::Hir(def_id)); + dep_node, current_hash, hash.hash); + if current_hash != hash.hash { + dirty_nodes.insert(dep_node); } } None => { diff --git a/src/librustc_incremental/persist/mod.rs b/src/librustc_incremental/persist/mod.rs index 8d04fd30a1..72ccc29c97 100644 --- a/src/librustc_incremental/persist/mod.rs +++ b/src/librustc_incremental/persist/mod.rs @@ -15,6 +15,7 @@ mod data; mod directory; mod dirty_clean; +mod hash; mod load; mod save; mod util; diff --git a/src/librustc_incremental/persist/save.rs b/src/librustc_incremental/persist/save.rs index d88f9e42b0..7deb1ca36d 100644 --- a/src/librustc_incremental/persist/save.rs +++ b/src/librustc_incremental/persist/save.rs @@ -8,117 +8,115 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use calculate_svh::SvhCalculate; use rbml::opaque::Encoder; use rustc::dep_graph::DepNode; -use rustc::ty; +use rustc::middle::cstore::LOCAL_CRATE; +use rustc::ty::TyCtxt; use rustc_serialize::{Encodable as RustcEncodable}; +use std::hash::{Hasher, SipHasher}; use std::io::{self, Cursor, Write}; use std::fs::{self, File}; +use std::path::PathBuf; use super::data::*; use super::directory::*; +use super::hash::*; use super::util::*; -pub fn save_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>) { +pub fn save_dep_graph<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let _ignore = tcx.dep_graph.in_ignore(); + let mut hcx = HashContext::new(tcx); + save_in(&mut hcx, dep_graph_path(tcx), encode_dep_graph); + save_in(&mut hcx, metadata_hash_path(tcx, LOCAL_CRATE), encode_metadata_hashes); +} - if let Some(dep_graph) = dep_graph_path(tcx) { - // FIXME(#32754) lock file? - - // delete the old dep-graph, if any - if dep_graph.exists() { - match fs::remove_file(&dep_graph) { - Ok(()) => { } - Err(err) => { - tcx.sess.err( - &format!("unable to delete old dep-graph at `{}`: {}", - dep_graph.display(), err)); - return; - } - } - } +fn save_in<'a, 'tcx, F>(hcx: &mut HashContext<'a, 'tcx>, + opt_path_buf: Option, + encode: F) + where F: FnOnce(&mut HashContext<'a, 'tcx>, &mut Encoder) -> io::Result<()> +{ + let tcx = hcx.tcx; + + let path_buf = match opt_path_buf { + Some(p) => p, + None => return + }; + + // FIXME(#32754) lock file? - // generate the data in a memory buffer - let mut wr = Cursor::new(Vec::new()); - match encode_dep_graph(tcx, &mut Encoder::new(&mut wr)) { + // delete the old dep-graph, if any + if path_buf.exists() { + match fs::remove_file(&path_buf) { Ok(()) => { } Err(err) => { tcx.sess.err( - &format!("could not encode dep-graph to `{}`: {}", - dep_graph.display(), err)); + &format!("unable to delete old dep-graph at `{}`: {}", + path_buf.display(), err)); return; } } + } - // write the data out - let data = wr.into_inner(); - match - File::create(&dep_graph) - .and_then(|mut file| file.write_all(&data)) - { - Ok(_) => { } - Err(err) => { - tcx.sess.err( - &format!("failed to write dep-graph to `{}`: {}", - dep_graph.display(), err)); - return; - } + // generate the data in a memory buffer + let mut wr = Cursor::new(Vec::new()); + match encode(hcx, &mut Encoder::new(&mut wr)) { + Ok(()) => { } + Err(err) => { + tcx.sess.err( + &format!("could not encode dep-graph to `{}`: {}", + path_buf.display(), err)); + return; + } + } + + // write the data out + let data = wr.into_inner(); + match + File::create(&path_buf) + .and_then(|mut file| file.write_all(&data)) + { + Ok(_) => { } + Err(err) => { + tcx.sess.err( + &format!("failed to write dep-graph to `{}`: {}", + path_buf.display(), err)); + return; } } } -pub fn encode_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>, - encoder: &mut Encoder) - -> io::Result<()> +pub fn encode_dep_graph<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>, + encoder: &mut Encoder) + -> io::Result<()> { - // Here we take advantage of how RBML allows us to skip around - // and encode the depgraph as a two-part structure: - // - // ``` - // [SerializedDepGraph] // tag 0 - // [DefIdDirectory] // tag 1 - // ``` - // - // Then later we can load the directory by skipping to find tag 1. - + let tcx = hcx.tcx; let query = tcx.dep_graph.query(); let mut builder = DefIdDirectoryBuilder::new(tcx); - // Create hashes for things we can persist. + // Create hashes for inputs. let hashes = query.nodes() .into_iter() - .filter_map(|dep_node| match dep_node { - DepNode::Hir(def_id) => { - assert!(def_id.is_local()); - builder.add(def_id) - .map(|index| { - // FIXME(#32753) -- should we use a distinct hash here - let hash = tcx.calculate_item_hash(def_id); - SerializedHash { index: index, hash: hash } - }) - } - _ => None + .filter_map(|dep_node| { + hcx.hash(dep_node) + .map(|hash| { + let node = builder.map(dep_node); + SerializedHash { node: node, hash: hash } + }) }) .collect(); - // Create the serialized dep-graph, dropping nodes that are - // from other crates or from inlined items. - // - // FIXME(#32015) fix handling of other crates + // Create the serialized dep-graph. let graph = SerializedDepGraph { nodes: query.nodes().into_iter() - .flat_map(|node| builder.map(node)) + .map(|node| builder.map(node)) .collect(), edges: query.edges().into_iter() - .flat_map(|(source_node, target_node)| { - builder.map(source_node) - .and_then(|source| { - builder.map(target_node) - .map(|target| (source, target)) - }) + .map(|(source_node, target_node)| { + let source = builder.map(source_node); + let target = builder.map(target_node); + (source, target) }) .collect(), hashes: hashes, @@ -134,3 +132,63 @@ pub fn encode_dep_graph<'tcx>(tcx: &ty::TyCtxt<'tcx>, Ok(()) } +pub fn encode_metadata_hashes<'a, 'tcx>(hcx: &mut HashContext<'a, 'tcx>, + encoder: &mut Encoder) + -> io::Result<()> +{ + let tcx = hcx.tcx; + let query = tcx.dep_graph.query(); + + let serialized_hashes = { + // Identify the `MetaData(X)` nodes where `X` is local. These are + // the metadata items we export. Downstream crates will want to + // see a hash that tells them whether we might have changed the + // metadata for a given item since they last compiled. + let meta_data_def_ids = + query.nodes() + .into_iter() + .filter_map(|dep_node| match dep_node { + DepNode::MetaData(def_id) if def_id.is_local() => Some(def_id), + _ => None, + }); + + // To create the hash for each item `X`, we don't hash the raw + // bytes of the metadata (though in principle we + // could). Instead, we walk the predecessors of `MetaData(X)` + // from the dep-graph. This corresponds to all the inputs that + // were read to construct the metadata. To create the hash for + // the metadata, we hash (the hash of) all of those inputs. + let hashes = + meta_data_def_ids + .map(|def_id| { + assert!(def_id.is_local()); + let dep_node = DepNode::MetaData(def_id); + let mut state = SipHasher::new(); + debug!("save: computing metadata hash for {:?}", dep_node); + for node in query.transitive_predecessors(dep_node) { + if let Some(hash) = hcx.hash(node) { + debug!("save: predecessor {:?} has hash {}", node, hash); + state.write_u64(hash.to_le()); + } else { + debug!("save: predecessor {:?} cannot be hashed", node); + } + } + let hash = state.finish(); + debug!("save: metadata hash for {:?} is {}", dep_node, hash); + SerializedMetadataHash { + def_index: def_id.index, + hash: hash, + } + }); + + // Collect these up into a vector. + SerializedMetadataHashes { + hashes: hashes.collect() + } + }; + + // Encode everything. + try!(serialized_hashes.encode(encoder)); + + Ok(()) +} diff --git a/src/librustc_incremental/persist/serialize.rs b/src/librustc_incremental/persist/serialize.rs deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/src/librustc_incremental/persist/util.rs b/src/librustc_incremental/persist/util.rs index 9b4e5997ef..a77a9607e7 100644 --- a/src/librustc_incremental/persist/util.rs +++ b/src/librustc_incremental/persist/util.rs @@ -8,15 +8,27 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rustc::ty; +use rustc::middle::cstore::LOCAL_CRATE; +use rustc::ty::TyCtxt; + use std::fs; -use std::path::PathBuf; +use std::io; +use std::path::{Path, PathBuf}; +use syntax::ast; + +pub fn dep_graph_path(tcx: TyCtxt) -> Option { + path(tcx, LOCAL_CRATE, "local") +} -pub fn dep_graph_path<'tcx>(tcx: &ty::TyCtxt<'tcx>) -> Option { +pub fn metadata_hash_path(tcx: TyCtxt, cnum: ast::CrateNum) -> Option { + path(tcx, cnum, "metadata") +} + +fn path(tcx: TyCtxt, cnum: ast::CrateNum, suffix: &str) -> Option { // For now, just save/load dep-graph from // directory/dep_graph.rbml tcx.sess.opts.incremental.as_ref().and_then(|incr_dir| { - match fs::create_dir_all(&incr_dir){ + match create_dir_racy(&incr_dir) { Ok(()) => {} Err(err) => { tcx.sess.err( @@ -26,7 +38,34 @@ pub fn dep_graph_path<'tcx>(tcx: &ty::TyCtxt<'tcx>) -> Option { } } - Some(incr_dir.join("dep_graph.rbml")) + let crate_name = tcx.crate_name(cnum); + let crate_disambiguator = tcx.crate_disambiguator(cnum); + let file_name = format!("{}-{}.{}.bin", + crate_name, + crate_disambiguator, + suffix); + Some(incr_dir.join(file_name)) }) } +// Like std::fs::create_dir_all, except handles concurrent calls among multiple +// threads or processes. +fn create_dir_racy(path: &Path) -> io::Result<()> { + match fs::create_dir(path) { + Ok(()) => return Ok(()), + Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => return Ok(()), + Err(ref e) if e.kind() == io::ErrorKind::NotFound => {} + Err(e) => return Err(e), + } + match path.parent() { + Some(p) => try!(create_dir_racy(p)), + None => return Err(io::Error::new(io::ErrorKind::Other, + "failed to create whole tree")), + } + match fs::create_dir(path) { + Ok(()) => Ok(()), + Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()), + Err(e) => Err(e), + } +} + diff --git a/src/librustc_lint/Cargo.toml b/src/librustc_lint/Cargo.toml index 3f0cd397e7..7674cc529b 100644 --- a/src/librustc_lint/Cargo.toml +++ b/src/librustc_lint/Cargo.toml @@ -7,6 +7,7 @@ version = "0.0.0" name = "rustc_lint" path = "lib.rs" crate-type = ["dylib"] +test = false [dependencies] log = { path = "../liblog" } diff --git a/src/librustc_lint/bad_style.rs b/src/librustc_lint/bad_style.rs index f4fb226d35..27a6e433c7 100644 --- a/src/librustc_lint/bad_style.rs +++ b/src/librustc_lint/bad_style.rs @@ -277,7 +277,7 @@ impl LateLintPass for NonSnakeCase { if let &PatKind::Ident(_, ref path1, _) = &p.node { let def = cx.tcx.def_map.borrow().get(&p.id).map(|d| d.full_def()); if let Some(Def::Local(..)) = def { - self.check_snake_case(cx, "variable", &path1.node.name.as_str(), Some(p.span)); + self.check_snake_case(cx, "variable", &path1.node.as_str(), Some(p.span)); } } } @@ -363,7 +363,7 @@ impl LateLintPass for NonUpperCaseGlobals { match (&p.node, cx.tcx.def_map.borrow().get(&p.id).map(|d| d.full_def())) { (&PatKind::Ident(_, ref path1, _), Some(Def::Const(..))) => { NonUpperCaseGlobals::check_upper_case(cx, "constant in pattern", - path1.node.name, p.span); + path1.node, p.span); } _ => {} } diff --git a/src/librustc_lint/builtin.rs b/src/librustc_lint/builtin.rs index 5e3a47701e..49b59aea46 100644 --- a/src/librustc_lint/builtin.rs +++ b/src/librustc_lint/builtin.rs @@ -29,10 +29,9 @@ //! a `pub fn new()`. use rustc::hir::def::Def; -use middle::cstore::CrateStore; use rustc::hir::def_id::DefId; use middle::stability; -use rustc::{cfg, infer}; +use rustc::cfg; use rustc::ty::subst::Substs; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::adjustment; @@ -173,7 +172,7 @@ impl LateLintPass for NonShorthandFieldPatterns { }); for fieldpat in field_pats { if let PatKind::Ident(_, ident, None) = fieldpat.node.pat.node { - if ident.node.unhygienic_name == fieldpat.node.name { + if ident.node.unhygienize() == fieldpat.node.name { cx.span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, &format!("the `{}:` in this pattern is redundant and can \ be removed", ident.node)) @@ -497,10 +496,10 @@ impl LateLintPass for MissingCopyImplementations { let parameter_environment = cx.tcx.empty_parameter_environment(); // FIXME (@jroesch) should probably inver this so that the parameter env still impls this // method - if !ty.moves_by_default(¶meter_environment, item.span) { + if !ty.moves_by_default(cx.tcx, ¶meter_environment, item.span) { return; } - if parameter_environment.can_type_implement_copy(ty, item.span).is_ok() { + if parameter_environment.can_type_implement_copy(cx.tcx, ty, item.span).is_ok() { cx.span_lint(MISSING_COPY_IMPLEMENTATIONS, item.span, "type could implement `Copy`; consider adding `impl \ @@ -764,8 +763,8 @@ impl LateLintPass for UnconditionalRecursion { for call in &self_call_spans { db.span_note(*call, "recursive call site"); } - db.fileline_help(sp, "a `loop` may express intention \ - better if this is on purpose"); + db.help("a `loop` may express intention \ + better if this is on purpose"); } db.emit(); } @@ -776,7 +775,7 @@ impl LateLintPass for UnconditionalRecursion { // Functions for identifying if the given Expr NodeId `id` // represents a call to the function `fn_id`/method `method`. - fn expr_refers_to_this_fn(tcx: &TyCtxt, + fn expr_refers_to_this_fn(tcx: TyCtxt, fn_id: ast::NodeId, id: ast::NodeId) -> bool { match tcx.map.get(id) { @@ -792,9 +791,9 @@ impl LateLintPass for UnconditionalRecursion { } // Check if the expression `id` performs a call to `method`. - fn expr_refers_to_this_method(tcx: &TyCtxt, - method: &ty::Method, - id: ast::NodeId) -> bool { + fn expr_refers_to_this_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + method: &ty::Method, + id: ast::NodeId) -> bool { // Check for method calls and overloaded operators. let opt_m = tcx.tables.borrow().method_map.get(&ty::MethodCall::expr(id)).cloned(); if let Some(m) = opt_m { @@ -823,11 +822,7 @@ impl LateLintPass for UnconditionalRecursion { hir_map::NodeExpr(&hir::Expr { node: hir::ExprCall(ref callee, _), .. }) => { match tcx.def_map.borrow().get(&callee.id).map(|d| d.full_def()) { Some(Def::Method(def_id)) => { - let item_substs = - tcx.tables.borrow().item_substs - .get(&callee.id) - .cloned() - .unwrap_or_else(|| ty::ItemSubsts::empty()); + let item_substs = tcx.node_id_item_substs(callee.id); method_call_refers_to_method( tcx, method, def_id, &item_substs.substs, id) } @@ -840,11 +835,11 @@ impl LateLintPass for UnconditionalRecursion { // Check if the method call to the method with the ID `callee_id` // and instantiated with `callee_substs` refers to method `method`. - fn method_call_refers_to_method<'tcx>(tcx: &TyCtxt<'tcx>, - method: &ty::Method, - callee_id: DefId, - callee_substs: &Substs<'tcx>, - expr_id: ast::NodeId) -> bool { + fn method_call_refers_to_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + method: &ty::Method, + callee_id: DefId, + callee_substs: &Substs<'tcx>, + expr_id: ast::NodeId) -> bool { let callee_item = tcx.impl_or_trait_item(callee_id); match callee_item.container() { @@ -869,39 +864,37 @@ impl LateLintPass for UnconditionalRecursion { // checking, so it's always local let node_id = tcx.map.as_local_node_id(method.def_id).unwrap(); - let param_env = ty::ParameterEnvironment::for_item(tcx, node_id); - let infcx = infer::new_infer_ctxt(tcx, - &tcx.tables, - Some(param_env), - ProjectionMode::AnyFinal); - let mut selcx = traits::SelectionContext::new(&infcx); - match selcx.select(&obligation) { - // The method comes from a `T: Trait` bound. - // If `T` is `Self`, then this call is inside - // a default method definition. - Ok(Some(traits::VtableParam(_))) => { - let self_ty = callee_substs.self_ty(); - let on_self = self_ty.map_or(false, |t| t.is_self()); - // We can only be recurring in a default - // method if we're being called literally - // on the `Self` type. - on_self && callee_id == method.def_id - } + let param_env = Some(ty::ParameterEnvironment::for_item(tcx, node_id)); + tcx.infer_ctxt(None, param_env, ProjectionMode::AnyFinal).enter(|infcx| { + let mut selcx = traits::SelectionContext::new(&infcx); + match selcx.select(&obligation) { + // The method comes from a `T: Trait` bound. + // If `T` is `Self`, then this call is inside + // a default method definition. + Ok(Some(traits::VtableParam(_))) => { + let self_ty = callee_substs.self_ty(); + let on_self = self_ty.map_or(false, |t| t.is_self()); + // We can only be recurring in a default + // method if we're being called literally + // on the `Self` type. + on_self && callee_id == method.def_id + } - // The `impl` is known, so we check that with a - // special case: - Ok(Some(traits::VtableImpl(vtable_impl))) => { - let container = ty::ImplContainer(vtable_impl.impl_def_id); - // It matches if it comes from the same impl, - // and has the same method name. - container == method.container - && callee_item.name() == method.name - } + // The `impl` is known, so we check that with a + // special case: + Ok(Some(traits::VtableImpl(vtable_impl))) => { + let container = ty::ImplContainer(vtable_impl.impl_def_id); + // It matches if it comes from the same impl, + // and has the same method name. + container == method.container + && callee_item.name() == method.name + } - // There's no way to know if this call is - // recursive, so we assume it's not. - _ => return false - } + // There's no way to know if this call is + // recursive, so we assume it's not. + _ => false + } + }) } } } diff --git a/src/librustc_lint/lib.rs b/src/librustc_lint/lib.rs index e7c9097a56..9fca6d3d20 100644 --- a/src/librustc_lint/lib.rs +++ b/src/librustc_lint/lib.rs @@ -190,6 +190,18 @@ pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) { id: LintId::of(ILLEGAL_STRUCT_OR_ENUM_CONSTANT_PATTERN), reference: "RFC 1445 ", }, + FutureIncompatibleInfo { + id: LintId::of(UNSIZED_IN_TUPLE), + reference: "issue #33242 ", + }, + FutureIncompatibleInfo { + id: LintId::of(OBJECT_UNSAFE_FRAGMENT), + reference: "issue #33243 ", + }, + FutureIncompatibleInfo { + id: LintId::of(HR_LIFETIME_IN_ASSOC_TYPE), + reference: "issue #33685 ", + }, ]); // We have one lint pass defined specially diff --git a/src/librustc_lint/types.rs b/src/librustc_lint/types.rs index 891731cb29..892924db6f 100644 --- a/src/librustc_lint/types.rs +++ b/src/librustc_lint/types.rs @@ -11,7 +11,6 @@ #![allow(non_snake_case)] use rustc::hir::def_id::DefId; -use rustc::infer; use rustc::ty::subst::Substs; use rustc::ty::{self, Ty, TyCtxt}; use middle::const_val::ConstVal; @@ -26,7 +25,7 @@ use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64}; use syntax::ast; use syntax::abi::Abi; -use syntax::attr::{self, AttrMetaMethods}; +use syntax::attr; use syntax::codemap::{self, Span}; use rustc::hir; @@ -295,8 +294,10 @@ impl LateLintPass for TypeLimits { } } - fn check_limits(tcx: &TyCtxt, binop: hir::BinOp, - l: &hir::Expr, r: &hir::Expr) -> bool { + fn check_limits<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + binop: hir::BinOp, + l: &hir::Expr, + r: &hir::Expr) -> bool { let (lit, expr, swap) = match (&l.node, &r.node) { (&hir::ExprLit(_), _) => (l, r, true), (_, &hir::ExprLit(_)) => (r, l, false), @@ -376,10 +377,10 @@ enum FfiResult { /// to function pointers and references, but could be /// expanded to cover NonZero raw pointers and newtypes. /// FIXME: This duplicates code in trans. -fn is_repr_nullable_ptr<'tcx>(tcx: &TyCtxt<'tcx>, - def: ty::AdtDef<'tcx>, - substs: &Substs<'tcx>) - -> bool { +fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + def: ty::AdtDef<'tcx>, + substs: &Substs<'tcx>) + -> bool { if def.variants.len() == 2 { let data_idx; @@ -410,7 +411,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { ty: Ty<'tcx>) -> FfiResult { use self::FfiResult::*; - let cx = &self.cx.tcx; + let cx = self.cx.tcx; // Protect against infinite recursion, for example // `struct S(*mut S);`. @@ -439,7 +440,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { } for field in &def.struct_variant().fields { - let field_ty = infer::normalize_associated_type(cx, &field.ty(cx, substs)); + let field_ty = cx.normalize_associated_type(&field.ty(cx, substs)); let r = self.check_type_for_ffi(cache, field_ty); match r { FfiSafe => {} @@ -494,7 +495,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { // Check the contained variants. for variant in &def.variants { for field in &variant.fields { - let arg = infer::normalize_associated_type(cx, &field.ty(cx, substs)); + let arg = cx.normalize_associated_type(&field.ty(cx, substs)); let r = self.check_type_for_ffi(cache, arg); match r { FfiSafe => {} @@ -596,7 +597,7 @@ impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> { fn check_type_for_ffi_and_report_errors(&mut self, sp: Span, ty: Ty<'tcx>) { // it is only OK to use this function because extern fns cannot have // any generic types right now: - let ty = infer::normalize_associated_type(self.cx.tcx, &ty); + let ty = self.cx.tcx.normalize_associated_type(&ty); match self.check_type_for_ffi(&mut FnvHashSet(), ty) { FfiResult::FfiSafe => {} diff --git a/src/librustc_llvm/build.rs b/src/librustc_llvm/build.rs index dcfb518ba7..0c6db2cb8b 100644 --- a/src/librustc_llvm/build.rs +++ b/src/librustc_llvm/build.rs @@ -100,6 +100,13 @@ fn main() { } cfg.flag(flag); } + + for component in &components[..] { + let mut flag = String::from("-DLLVM_COMPONENT_"); + flag.push_str(&component.to_uppercase()); + cfg.flag(&flag); + } + cfg.file("../rustllvm/ExecutionEngineWrapper.cpp") .file("../rustllvm/PassWrapper.cpp") .file("../rustllvm/RustWrapper.cpp") diff --git a/src/librustc_llvm/lib.rs b/src/librustc_llvm/lib.rs index 4df2da801f..ea0d8eae75 100644 --- a/src/librustc_llvm/lib.rs +++ b/src/librustc_llvm/lib.rs @@ -44,6 +44,7 @@ pub use self::FileType::*; pub use self::MetadataType::*; pub use self::AsmDialect::*; pub use self::CodeGenOptLevel::*; +pub use self::CodeGenOptSize::*; pub use self::RelocMode::*; pub use self::CodeGenModel::*; pub use self::DiagnosticKind::*; @@ -97,7 +98,7 @@ pub enum Visibility { // DLLExportLinkage, GhostLinkage and LinkOnceODRAutoHideLinkage. // LinkerPrivateLinkage and LinkerPrivateWeakLinkage are not included either; // they've been removed in upstream LLVM commit r203866. -#[derive(Copy, Clone)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] pub enum Linkage { ExternalLinkage = 0, AvailableExternallyLinkage = 1, @@ -375,6 +376,14 @@ pub enum CodeGenOptLevel { CodeGenLevelAggressive = 3, } +#[derive(Copy, Clone, PartialEq)] +#[repr(C)] +pub enum CodeGenOptSize { + CodeGenOptSizeNone = 0, + CodeGenOptSizeDefault = 1, + CodeGenOptSizeAggressive = 2, +} + #[derive(Copy, Clone, PartialEq)] #[repr(C)] pub enum RelocMode { @@ -2013,6 +2022,9 @@ extern { pub fn LLVMRustFindAndCreatePass(Pass: *const c_char) -> PassRef; pub fn LLVMRustAddPass(PM: PassManagerRef, Pass: PassRef); + pub fn LLVMRustHasFeature(T: TargetMachineRef, + s: *const c_char) -> bool; + pub fn LLVMRustCreateTargetMachine(Triple: *const c_char, CPU: *const c_char, Features: *const c_char, diff --git a/src/librustc_metadata/astencode.rs b/src/librustc_metadata/astencode.rs index 6fd9f27f0f..ad0e8e1c37 100644 --- a/src/librustc_metadata/astencode.rs +++ b/src/librustc_metadata/astencode.rs @@ -49,18 +49,18 @@ use std::fmt::Debug; use rbml::reader; use rbml::writer::Encoder; use rbml; -use serialize; -use serialize::{Decodable, Decoder, DecoderHelpers, Encodable}; -use serialize::EncoderHelpers; +use rustc_serialize as serialize; +use rustc_serialize::{Decodable, Decoder, DecoderHelpers}; +use rustc_serialize::{Encodable, EncoderHelpers}; #[cfg(test)] use std::io::Cursor; #[cfg(test)] use syntax::parse; #[cfg(test)] use syntax::ast::NodeId; #[cfg(test)] use rustc::hir::print as pprust; -#[cfg(test)] use rustc::hir::lowering::{lower_item, LoweringContext}; +#[cfg(test)] use rustc::hir::lowering::{LoweringContext, DummyResolver}; struct DecodeContext<'a, 'b, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: &'b cstore::crate_metadata, from_id_range: IdRange, to_id_range: IdRange, @@ -122,13 +122,13 @@ impl<'a, 'b, 'c, 'tcx> ast_map::FoldOps for &'a DecodeContext<'b, 'c, 'tcx> { /// Decodes an item from its AST in the cdata's metadata and adds it to the /// ast-map. -pub fn decode_inlined_item<'tcx>(cdata: &cstore::crate_metadata, - tcx: &TyCtxt<'tcx>, - parent_def_path: ast_map::DefPath, - parent_did: DefId, - ast_doc: rbml::Doc, - orig_did: DefId) - -> &'tcx InlinedItem { +pub fn decode_inlined_item<'a, 'tcx>(cdata: &cstore::crate_metadata, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + parent_def_path: ast_map::DefPath, + parent_did: DefId, + ast_doc: rbml::Doc, + orig_did: DefId) + -> &'tcx InlinedItem { debug!("> Decoding inlined fn: {:?}", tcx.item_path_str(orig_did)); let mut ast_dsr = reader::Decoder::new(ast_doc); let from_id_range = Decodable::decode(&mut ast_dsr).unwrap(); @@ -362,11 +362,8 @@ impl tr for Def { match *self { Def::Fn(did) => Def::Fn(did.tr(dcx)), Def::Method(did) => Def::Method(did.tr(dcx)), - Def::SelfTy(opt_did, impl_ids) => { Def::SelfTy(opt_did.map(|did| did.tr(dcx)), - impl_ids.map(|(nid1, nid2)| { - (dcx.tr_id(nid1), - dcx.tr_id(nid2)) - })) } + Def::SelfTy(opt_did, impl_id) => { Def::SelfTy(opt_did.map(|did| did.tr(dcx)), + impl_id.map(|id| dcx.tr_id(id))) } Def::Mod(did) => { Def::Mod(did.tr(dcx)) } Def::ForeignMod(did) => { Def::ForeignMod(did.tr(dcx)) } Def::Static(did, m) => { Def::Static(did.tr(dcx), m) } @@ -445,7 +442,7 @@ fn encode_method_callee<'a, 'tcx>(ecx: &e::EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, autoderef: u32, method: &ty::MethodCallee<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; rbml_w.emit_struct("MethodCallee", 4, |rbml_w| { rbml_w.emit_struct_field("autoderef", 0, |rbml_w| { @@ -561,7 +558,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { } fn emit_upvar_capture(&mut self, ecx: &e::EncodeContext, capture: &ty::UpvarCapture) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_enum("UpvarCapture", |this| { match *capture { @@ -589,7 +586,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { fn emit_auto_adjustment<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, adj: &adjustment::AutoAdjustment<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_enum("AutoAdjustment", |this| { match *adj { @@ -621,7 +618,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { fn emit_autoref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, autoref: &adjustment::AutoRef<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_enum("AutoRef", |this| { match autoref { @@ -643,7 +640,7 @@ impl<'a, 'tcx> rbml_writer_helpers<'tcx> for Encoder<'a> { fn emit_auto_deref_ref<'b>(&mut self, ecx: &e::EncodeContext<'b, 'tcx>, auto_deref_ref: &adjustment::AutoDerefRef<'tcx>) { - use serialize::Encoder; + use rustc_serialize::Encoder; self.emit_struct("AutoDerefRef", 2, |this| { this.emit_struct_field("autoderefs", 0, |this| auto_deref_ref.autoderefs.encode(this)); @@ -861,21 +858,19 @@ trait rbml_decoder_decoder_helpers<'tcx> { // Versions of the type reading functions that don't need the full // DecodeContext. - fn read_ty_nodcx(&mut self, - tcx: &TyCtxt<'tcx>, cdata: &cstore::crate_metadata) -> Ty<'tcx>; - fn read_tys_nodcx(&mut self, - tcx: &TyCtxt<'tcx>, - cdata: &cstore::crate_metadata) -> Vec>; - fn read_substs_nodcx(&mut self, tcx: &TyCtxt<'tcx>, - cdata: &cstore::crate_metadata) - -> subst::Substs<'tcx>; + fn read_ty_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + cdata: &cstore::crate_metadata) -> Ty<'tcx>; + fn read_tys_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + cdata: &cstore::crate_metadata) -> Vec>; + fn read_substs_nodcx<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + cdata: &cstore::crate_metadata) + -> subst::Substs<'tcx>; } impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> { - fn read_ty_nodcx(&mut self, - tcx: &TyCtxt<'tcx>, - cdata: &cstore::crate_metadata) - -> Ty<'tcx> { + fn read_ty_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>, + cdata: &cstore::crate_metadata) + -> Ty<'tcx> { self.read_opaque(|_, doc| { Ok( tydecode::TyDecoder::with_doc(tcx, cdata.cnum, doc, @@ -884,19 +879,17 @@ impl<'a, 'tcx> rbml_decoder_decoder_helpers<'tcx> for reader::Decoder<'a> { }).unwrap() } - fn read_tys_nodcx(&mut self, - tcx: &TyCtxt<'tcx>, - cdata: &cstore::crate_metadata) -> Vec> { + fn read_tys_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>, + cdata: &cstore::crate_metadata) -> Vec> { self.read_to_vec(|this| Ok(this.read_ty_nodcx(tcx, cdata)) ) .unwrap() .into_iter() .collect() } - fn read_substs_nodcx(&mut self, - tcx: &TyCtxt<'tcx>, - cdata: &cstore::crate_metadata) - -> subst::Substs<'tcx> + fn read_substs_nodcx<'b>(&mut self, tcx: TyCtxt<'b, 'tcx, 'tcx>, + cdata: &cstore::crate_metadata) + -> subst::Substs<'tcx> { self.read_opaque(|_, doc| { Ok( @@ -1153,7 +1146,7 @@ fn decode_side_tables(dcx: &DecodeContext, } c::tag_table_item_subst => { let item_substs = ty::ItemSubsts { - substs: val_dsr.read_substs(dcx) + substs: dcx.tcx.mk_substs(val_dsr.read_substs(dcx)) }; dcx.tcx.tables.borrow_mut().item_substs.insert( id, item_substs); @@ -1325,6 +1318,14 @@ fn mk_ctxt() -> parse::ParseSess { parse::ParseSess::new() } +#[cfg(test)] +fn with_testing_context T>(f: F) -> T { + let assigner = FakeNodeIdAssigner; + let mut resolver = DummyResolver; + let mut lcx = LoweringContext::testing_context(&assigner, &mut resolver); + f(&mut lcx) +} + #[cfg(test)] fn roundtrip(in_item: hir::Item) { let mut wr = Cursor::new(Vec::new()); @@ -1338,34 +1339,34 @@ fn roundtrip(in_item: hir::Item) { #[test] fn test_basic() { let cx = mk_ctxt(); - let fnia = FakeNodeIdAssigner; - let lcx = LoweringContext::new(&fnia, None); - roundtrip(lower_item(&lcx, "e_item!(&cx, - fn foo() {} - ).unwrap())); + with_testing_context(|lcx| { + roundtrip(lcx.lower_item("e_item!(&cx, + fn foo() {} + ).unwrap())); + }); } #[test] fn test_smalltalk() { let cx = mk_ctxt(); - let fnia = FakeNodeIdAssigner; - let lcx = LoweringContext::new(&fnia, None); - roundtrip(lower_item(&lcx, "e_item!(&cx, - fn foo() -> isize { 3 + 4 } // first smalltalk program ever executed. - ).unwrap())); + with_testing_context(|lcx| { + roundtrip(lcx.lower_item("e_item!(&cx, + fn foo() -> isize { 3 + 4 } // first smalltalk program ever executed. + ).unwrap())); + }); } #[test] fn test_more() { let cx = mk_ctxt(); - let fnia = FakeNodeIdAssigner; - let lcx = LoweringContext::new(&fnia, None); - roundtrip(lower_item(&lcx, "e_item!(&cx, - fn foo(x: usize, y: usize) -> usize { - let z = x + y; - return z; - } - ).unwrap())); + with_testing_context(|lcx| { + roundtrip(lcx.lower_item("e_item!(&cx, + fn foo(x: usize, y: usize) -> usize { + let z = x + y; + return z; + } + ).unwrap())); + }); } #[test] @@ -1377,21 +1378,22 @@ fn test_simplification() { return alist {eq_fn: eq_int, data: Vec::new()}; } ).unwrap(); - let fnia = FakeNodeIdAssigner; - let lcx = LoweringContext::new(&fnia, None); - let hir_item = lower_item(&lcx, &item); - let item_in = InlinedItemRef::Item(&hir_item); - let item_out = simplify_ast(item_in); - let item_exp = InlinedItem::Item(P(lower_item(&lcx, "e_item!(&cx, - fn new_int_alist() -> alist { - return alist {eq_fn: eq_int, data: Vec::new()}; + let cx = mk_ctxt(); + with_testing_context(|lcx| { + let hir_item = lcx.lower_item(&item); + let item_in = InlinedItemRef::Item(&hir_item); + let item_out = simplify_ast(item_in); + let item_exp = InlinedItem::Item(P(lcx.lower_item("e_item!(&cx, + fn new_int_alist() -> alist { + return alist {eq_fn: eq_int, data: Vec::new()}; + } + ).unwrap()))); + match (item_out, item_exp) { + (InlinedItem::Item(item_out), InlinedItem::Item(item_exp)) => { + assert!(pprust::item_to_string(&item_out) == + pprust::item_to_string(&item_exp)); + } + _ => bug!() } - ).unwrap()))); - match (item_out, item_exp) { - (InlinedItem::Item(item_out), InlinedItem::Item(item_exp)) => { - assert!(pprust::item_to_string(&item_out) == - pprust::item_to_string(&item_exp)); - } - _ => bug!() - } + }); } diff --git a/src/librustc_metadata/common.rs b/src/librustc_metadata/common.rs index ea4e257542..b3ca399132 100644 --- a/src/librustc_metadata/common.rs +++ b/src/librustc_metadata/common.rs @@ -250,3 +250,9 @@ pub fn rustc_version() -> String { option_env!("CFG_VERSION").unwrap_or("unknown version") ) } + +pub const tag_panic_strategy: usize = 0x114; + +// NB: increment this if you change the format of metadata such that +// rustc_version can't be found. +pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2]; diff --git a/src/librustc_metadata/creader.rs b/src/librustc_metadata/creader.rs index df9072835b..90f4ebc1a1 100644 --- a/src/librustc_metadata/creader.rs +++ b/src/librustc_metadata/creader.rs @@ -12,14 +12,14 @@ //! Validates all used crates and extern libraries and loads their metadata -use common::rustc_version; use cstore::{self, CStore, CrateSource, MetadataBlob}; use decoder; use loader::{self, CratePaths}; use rustc::hir::svh::Svh; -use rustc::dep_graph::DepNode; +use rustc::dep_graph::{DepGraph, DepNode}; use rustc::session::{config, Session}; +use rustc::session::config::PanicStrategy; use rustc::session::search_paths::PathKind; use rustc::middle::cstore::{CrateStore, validate_crate_name, ExternCrate}; use rustc::util::nodemap::FnvHashMap; @@ -37,15 +37,15 @@ use syntax::parse; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::parse::token::InternedString; -use rustc::hir::intravisit::Visitor; -use rustc::hir; +use syntax::visit; use log; -pub struct LocalCrateReader<'a, 'b:'a> { +struct LocalCrateReader<'a> { sess: &'a Session, cstore: &'a CStore, creader: CrateReader<'a>, - ast_map: &'a hir_map::Map<'b>, + krate: &'a ast::Crate, + definitions: &'a hir_map::Definitions, } pub struct CrateReader<'a> { @@ -56,9 +56,10 @@ pub struct CrateReader<'a> { local_crate_name: String, } -impl<'a, 'b, 'hir> Visitor<'hir> for LocalCrateReader<'a, 'b> { - fn visit_item(&mut self, a: &'hir hir::Item) { +impl<'a, 'ast> visit::Visitor<'ast> for LocalCrateReader<'a> { + fn visit_item(&mut self, a: &'ast ast::Item) { self.process_item(a); + visit::walk_item(self, a); } } @@ -80,11 +81,8 @@ fn dump_crates(cstore: &CStore) { fn should_link(i: &ast::Item) -> bool { !attr::contains_name(&i.attrs, "no_link") } -// Dup for the hir -fn should_link_hir(i: &hir::Item) -> bool { - !attr::contains_name(&i.attrs, "no_link") -} +#[derive(Debug)] struct CrateInfo { ident: String, name: String, @@ -144,6 +142,11 @@ impl PMDSource { } } +enum LoadResult { + Previous(ast::CrateNum), + Loaded(loader::Library), +} + impl<'a> CrateReader<'a> { pub fn new(sess: &'a Session, cstore: &'a CStore, @@ -181,31 +184,6 @@ impl<'a> CrateReader<'a> { } } - // Dup of the above, but for the hir - fn extract_crate_info_hir(&self, i: &hir::Item) -> Option { - match i.node { - hir::ItemExternCrate(ref path_opt) => { - debug!("resolving extern crate stmt. ident: {} path_opt: {:?}", - i.name, path_opt); - let name = match *path_opt { - Some(name) => { - validate_crate_name(Some(self.sess), &name.as_str(), - Some(i.span)); - name.to_string() - } - None => i.name.to_string(), - }; - Some(CrateInfo { - ident: i.name.to_string(), - name: name, - id: i.id, - should_link: should_link_hir(i), - }) - } - _ => None - } - } - fn existing_match(&self, name: &str, hash: Option<&Svh>, kind: PathKind) -> Option { let mut ret = None; @@ -255,25 +233,6 @@ impl<'a> CrateReader<'a> { return ret; } - fn verify_rustc_version(&self, - name: &str, - span: Span, - metadata: &MetadataBlob) { - let crate_rustc_version = decoder::crate_rustc_version(metadata.as_slice()); - if crate_rustc_version != Some(rustc_version()) { - let mut err = struct_span_fatal!(self.sess, span, E0514, - "the crate `{}` has been compiled with {}, which is \ - incompatible with this version of rustc", - name, - crate_rustc_version - .as_ref().map(|s| &**s) - .unwrap_or("an old version of rustc")); - err.fileline_help(span, "consider removing the compiled binaries and recompiling \ - with your current version of rustc"); - err.emit(); - } - } - fn verify_no_symbol_conflicts(&self, span: Span, metadata: &MetadataBlob) { @@ -315,7 +274,6 @@ impl<'a> CrateReader<'a> { explicitly_linked: bool) -> (ast::CrateNum, Rc, cstore::CrateSource) { - self.verify_rustc_version(name, span, &lib.metadata); self.verify_no_symbol_conflicts(span, &lib.metadata); // Claim this crate number and cache it @@ -345,6 +303,7 @@ impl<'a> CrateReader<'a> { extern_crate: Cell::new(None), index: decoder::load_index(metadata.as_slice()), xref_index: decoder::load_xrefs(metadata.as_slice()), + key_map: decoder::load_key_map(metadata.as_slice()), data: metadata, cnum_map: RefCell::new(cnum_map), cnum: cnum, @@ -383,12 +342,8 @@ impl<'a> CrateReader<'a> { kind: PathKind, explicitly_linked: bool) -> (ast::CrateNum, Rc, cstore::CrateSource) { - enum LookupResult { - Previous(ast::CrateNum), - Loaded(loader::Library), - } let result = match self.existing_match(name, hash, kind) { - Some(cnum) => LookupResult::Previous(cnum), + Some(cnum) => LoadResult::Previous(cnum), None => { let mut load_ctxt = loader::Context { sess: self.sess, @@ -403,42 +358,62 @@ impl<'a> CrateReader<'a> { rejected_via_hash: vec!(), rejected_via_triple: vec!(), rejected_via_kind: vec!(), + rejected_via_version: vec!(), should_match_name: true, }; - let library = load_ctxt.load_library_crate(); - - // In the case that we're loading a crate, but not matching - // against a hash, we could load a crate which has the same hash - // as an already loaded crate. If this is the case prevent - // duplicates by just using the first crate. - let meta_hash = decoder::get_crate_hash(library.metadata - .as_slice()); - let mut result = LookupResult::Loaded(library); - self.cstore.iter_crate_data(|cnum, data| { - if data.name() == name && meta_hash == data.hash() { - assert!(hash.is_none()); - result = LookupResult::Previous(cnum); - } - }); - result + match self.load(&mut load_ctxt) { + Some(result) => result, + None => load_ctxt.report_load_errs(), + } } }; match result { - LookupResult::Previous(cnum) => { + LoadResult::Previous(cnum) => { let data = self.cstore.get_crate_data(cnum); if explicitly_linked && !data.explicitly_linked.get() { data.explicitly_linked.set(explicitly_linked); } (cnum, data, self.cstore.used_crate_source(cnum)) } - LookupResult::Loaded(library) => { + LoadResult::Loaded(library) => { self.register_crate(root, ident, name, span, library, explicitly_linked) } } } + fn load(&mut self, loader: &mut loader::Context) -> Option { + let library = match loader.maybe_load_library_crate() { + Some(lib) => lib, + None => return None, + }; + + // In the case that we're loading a crate, but not matching + // against a hash, we could load a crate which has the same hash + // as an already loaded crate. If this is the case prevent + // duplicates by just using the first crate. + // + // Note that we only do this for target triple crates, though, as we + // don't want to match a host crate against an equivalent target one + // already loaded. + if loader.triple == self.sess.opts.target_triple { + let meta_hash = decoder::get_crate_hash(library.metadata.as_slice()); + let meta_name = decoder::get_crate_name(library.metadata.as_slice()) + .to_string(); + let mut result = LoadResult::Loaded(library); + self.cstore.iter_crate_data(|cnum, data| { + if data.name() == meta_name && meta_hash == data.hash() { + assert!(loader.hash.is_none()); + result = LoadResult::Previous(cnum); + } + }); + Some(result) + } else { + Some(LoadResult::Loaded(library)) + } + } + fn update_extern_crate(&mut self, cnum: ast::CrateNum, mut extern_crate: ExternCrate) @@ -511,37 +486,49 @@ impl<'a> CrateReader<'a> { rejected_via_hash: vec!(), rejected_via_triple: vec!(), rejected_via_kind: vec!(), + rejected_via_version: vec!(), should_match_name: true, }; - let library = match load_ctxt.maybe_load_library_crate() { - Some(l) => l, - None if is_cross => { - // Try loading from target crates. This will abort later if we - // try to load a plugin registrar function, - target_only = true; - should_link = info.should_link; - - load_ctxt.target = &self.sess.target.target; - load_ctxt.triple = target_triple; - load_ctxt.filesearch = self.sess.target_filesearch(PathKind::Crate); - load_ctxt.load_library_crate() + let library = self.load(&mut load_ctxt).or_else(|| { + if !is_cross { + return None } - None => { load_ctxt.report_load_errs(); }, + // Try loading from target crates. This will abort later if we + // try to load a plugin registrar function, + target_only = true; + should_link = info.should_link; + + load_ctxt.target = &self.sess.target.target; + load_ctxt.triple = target_triple; + load_ctxt.filesearch = self.sess.target_filesearch(PathKind::Crate); + + self.load(&mut load_ctxt) + }); + let library = match library { + Some(l) => l, + None => load_ctxt.report_load_errs(), }; - let dylib = library.dylib.clone(); - let register = should_link && self.existing_match(&info.name, - None, - PathKind::Crate).is_none(); - let metadata = if register { - // Register crate now to avoid double-reading metadata - let (_, cmd, _) = self.register_crate(&None, &info.ident, - &info.name, span, library, - true); - PMDSource::Registered(cmd) - } else { - // Not registering the crate; just hold on to the metadata - PMDSource::Owned(library.metadata) + let (dylib, metadata) = match library { + LoadResult::Previous(cnum) => { + let dylib = self.cstore.opt_used_crate_source(cnum).unwrap().dylib; + let data = self.cstore.get_crate_data(cnum); + (dylib, PMDSource::Registered(data)) + } + LoadResult::Loaded(library) => { + let dylib = library.dylib.clone(); + let metadata = if should_link { + // Register crate now to avoid double-reading metadata + let (_, cmd, _) = self.register_crate(&None, &info.ident, + &info.name, span, + library, true); + PMDSource::Registered(cmd) + } else { + // Not registering the crate; just hold on to the metadata + PMDSource::Owned(library.metadata) + }; + (dylib, metadata) + } }; ExtensionCrate { @@ -657,6 +644,85 @@ impl<'a> CrateReader<'a> { } } + fn inject_panic_runtime(&mut self, krate: &ast::Crate) { + // If we're only compiling an rlib, then there's no need to select a + // panic runtime, so we just skip this section entirely. + let any_non_rlib = self.sess.crate_types.borrow().iter().any(|ct| { + *ct != config::CrateTypeRlib + }); + if !any_non_rlib { + info!("panic runtime injection skipped, only generating rlib"); + return + } + + // If we need a panic runtime, we try to find an existing one here. At + // the same time we perform some general validation of the DAG we've got + // going such as ensuring everything has a compatible panic strategy. + // + // The logic for finding the panic runtime here is pretty much the same + // as the allocator case with the only addition that the panic strategy + // compilation mode also comes into play. + let desired_strategy = self.sess.opts.cg.panic.clone(); + let mut runtime_found = false; + let mut needs_panic_runtime = attr::contains_name(&krate.attrs, + "needs_panic_runtime"); + self.cstore.iter_crate_data(|cnum, data| { + needs_panic_runtime = needs_panic_runtime || data.needs_panic_runtime(); + if data.is_panic_runtime() { + // Inject a dependency from all #![needs_panic_runtime] to this + // #![panic_runtime] crate. + self.inject_dependency_if(cnum, "a panic runtime", + &|data| data.needs_panic_runtime()); + runtime_found = runtime_found || data.explicitly_linked.get(); + } + }); + + // If an explicitly linked and matching panic runtime was found, or if + // we just don't need one at all, then we're done here and there's + // nothing else to do. + if !needs_panic_runtime || runtime_found { + return + } + + // By this point we know that we (a) need a panic runtime and (b) no + // panic runtime was explicitly linked. Here we just load an appropriate + // default runtime for our panic strategy and then inject the + // dependencies. + // + // We may resolve to an already loaded crate (as the crate may not have + // been explicitly linked prior to this) and we may re-inject + // dependencies again, but both of those situations are fine. + // + // Also note that we have yet to perform validation of the crate graph + // in terms of everyone has a compatible panic runtime format, that's + // performed later as part of the `dependency_format` module. + let name = match desired_strategy { + PanicStrategy::Unwind => "panic_unwind", + PanicStrategy::Abort => "panic_abort", + }; + info!("panic runtime not found -- loading {}", name); + + let (cnum, data, _) = self.resolve_crate(&None, name, name, None, + codemap::DUMMY_SP, + PathKind::Crate, false); + + // Sanity check the loaded crate to ensure it is indeed a panic runtime + // and the panic strategy is indeed what we thought it was. + if !data.is_panic_runtime() { + self.sess.err(&format!("the crate `{}` is not a panic runtime", + name)); + } + if data.panic_strategy() != desired_strategy { + self.sess.err(&format!("the crate `{}` does not have the panic \ + strategy `{}`", + name, desired_strategy.desc())); + } + + self.sess.injected_panic_runtime.set(Some(cnum)); + self.inject_dependency_if(cnum, "a panic runtime", + &|data| data.needs_panic_runtime()); + } + fn inject_allocator_crate(&mut self) { // Make sure that we actually need an allocator, if none of our // dependencies need one then we definitely don't! @@ -668,8 +734,9 @@ impl<'a> CrateReader<'a> { self.cstore.iter_crate_data(|cnum, data| { needs_allocator = needs_allocator || data.needs_allocator(); if data.is_allocator() { - debug!("{} required by rlib and is an allocator", data.name()); - self.inject_allocator_dependency(cnum); + info!("{} required by rlib and is an allocator", data.name()); + self.inject_dependency_if(cnum, "an allocator", + &|data| data.needs_allocator()); found_required_allocator = found_required_allocator || data.explicitly_linked.get(); } @@ -689,6 +756,7 @@ impl<'a> CrateReader<'a> { match *ct { config::CrateTypeExecutable => need_exe_alloc = true, config::CrateTypeDylib | + config::CrateTypeCdylib | config::CrateTypeStaticlib => need_lib_alloc = true, config::CrateTypeRlib => {} } @@ -719,87 +787,99 @@ impl<'a> CrateReader<'a> { codemap::DUMMY_SP, PathKind::Crate, false); - // To ensure that the `-Z allocation-crate=foo` option isn't abused, and - // to ensure that the allocator is indeed an allocator, we verify that - // the crate loaded here is indeed tagged #![allocator]. + // Sanity check the crate we loaded to ensure that it is indeed an + // allocator. if !data.is_allocator() { self.sess.err(&format!("the allocator crate `{}` is not tagged \ with #![allocator]", data.name())); } self.sess.injected_allocator.set(Some(cnum)); - self.inject_allocator_dependency(cnum); + self.inject_dependency_if(cnum, "an allocator", + &|data| data.needs_allocator()); } - fn inject_allocator_dependency(&self, allocator: ast::CrateNum) { + fn inject_dependency_if(&self, + krate: ast::CrateNum, + what: &str, + needs_dep: &Fn(&cstore::crate_metadata) -> bool) { + // don't perform this validation if the session has errors, as one of + // those errors may indicate a circular dependency which could cause + // this to stack overflow. + if self.sess.has_errors() { + return + } + // Before we inject any dependencies, make sure we don't inject a - // circular dependency by validating that this allocator crate doesn't - // transitively depend on any `#![needs_allocator]` crates. - validate(self, allocator, allocator); - - // All crates tagged with `needs_allocator` do not explicitly depend on - // the allocator selected for this compile, but in order for this - // compilation to be successfully linked we need to inject a dependency - // (to order the crates on the command line correctly). - // - // Here we inject a dependency from all crates with #![needs_allocator] - // to the crate tagged with #![allocator] for this compilation unit. + // circular dependency by validating that this crate doesn't + // transitively depend on any crates satisfying `needs_dep`. + validate(self, krate, krate, what, needs_dep); + + // All crates satisfying `needs_dep` do not explicitly depend on the + // crate provided for this compile, but in order for this compilation to + // be successfully linked we need to inject a dependency (to order the + // crates on the command line correctly). self.cstore.iter_crate_data(|cnum, data| { - if !data.needs_allocator() { + if !needs_dep(data) { return } - info!("injecting a dep from {} to {}", cnum, allocator); + info!("injecting a dep from {} to {}", cnum, krate); let mut cnum_map = data.cnum_map.borrow_mut(); let remote_cnum = cnum_map.len() + 1; - let prev = cnum_map.insert(remote_cnum as ast::CrateNum, allocator); + let prev = cnum_map.insert(remote_cnum as ast::CrateNum, krate); assert!(prev.is_none()); }); - fn validate(me: &CrateReader, krate: ast::CrateNum, - allocator: ast::CrateNum) { + fn validate(me: &CrateReader, + krate: ast::CrateNum, + root: ast::CrateNum, + what: &str, + needs_dep: &Fn(&cstore::crate_metadata) -> bool) { let data = me.cstore.get_crate_data(krate); - if data.needs_allocator() { + if needs_dep(&data) { let krate_name = data.name(); - let data = me.cstore.get_crate_data(allocator); - let alloc_name = data.name(); - me.sess.err(&format!("the allocator crate `{}` cannot depend \ - on a crate that needs an allocator, but \ - it depends on `{}`", alloc_name, + let data = me.cstore.get_crate_data(root); + let root_name = data.name(); + me.sess.err(&format!("the crate `{}` cannot depend \ + on a crate that needs {}, but \ + it depends on `{}`", root_name, what, krate_name)); } for (_, &dep) in data.cnum_map.borrow().iter() { - validate(me, dep, allocator); + validate(me, dep, root, what, needs_dep); } } } } -impl<'a, 'b> LocalCrateReader<'a, 'b> { - pub fn new(sess: &'a Session, - cstore: &'a CStore, - map: &'a hir_map::Map<'b>, - local_crate_name: &str) - -> LocalCrateReader<'a, 'b> { +impl<'a> LocalCrateReader<'a> { + fn new(sess: &'a Session, + cstore: &'a CStore, + defs: &'a hir_map::Definitions, + krate: &'a ast::Crate, + local_crate_name: &str) + -> LocalCrateReader<'a> { LocalCrateReader { sess: sess, cstore: cstore, creader: CrateReader::new(sess, cstore, local_crate_name), - ast_map: map, + krate: krate, + definitions: defs, } } // Traverses an AST, reading all the information about use'd crates and // extern libraries necessary for later resolving, typechecking, linking, // etc. - pub fn read_crates(&mut self) { - let _task = self.ast_map.dep_graph.in_task(DepNode::CrateReader); - let krate = self.ast_map.krate(); + fn read_crates(&mut self, dep_graph: &DepGraph) { + let _task = dep_graph.in_task(DepNode::CrateReader); - self.process_crate(krate); - krate.visit_all_items(self); + self.process_crate(self.krate); + visit::walk_crate(self, self.krate); self.creader.inject_allocator_crate(); + self.creader.inject_panic_runtime(self.krate); if log_enabled!(log::INFO) { dump_crates(&self.cstore); @@ -811,34 +891,33 @@ impl<'a, 'b> LocalCrateReader<'a, 'b> { self.creader.register_statically_included_foreign_items(); } - fn process_crate(&self, c: &hir::Crate) { + fn process_crate(&self, c: &ast::Crate) { for a in c.attrs.iter().filter(|m| m.name() == "link_args") { - match a.value_str() { - Some(ref linkarg) => self.cstore.add_used_link_args(&linkarg), - None => { /* fallthrough */ } + if let Some(ref linkarg) = a.value_str() { + self.cstore.add_used_link_args(&linkarg); } } } - fn process_item(&mut self, i: &hir::Item) { + fn process_item(&mut self, i: &ast::Item) { match i.node { - hir::ItemExternCrate(_) => { - if !should_link_hir(i) { + ast::ItemKind::ExternCrate(_) => { + if !should_link(i) { return; } - match self.creader.extract_crate_info_hir(i) { + match self.creader.extract_crate_info(i) { Some(info) => { let (cnum, _, _) = self.creader.resolve_crate(&None, - &info.ident, - &info.name, - None, - i.span, - PathKind::Crate, - true); - let def_id = self.ast_map.local_def_id(i.id); + &info.ident, + &info.name, + None, + i.span, + PathKind::Crate, + true); - let len = self.ast_map.def_path(def_id).data.len(); + let def_id = self.definitions.opt_local_def_id(i.id).unwrap(); + let len = self.definitions.def_path(def_id.index).data.len(); self.creader.update_extern_crate(cnum, ExternCrate { @@ -852,12 +931,12 @@ impl<'a, 'b> LocalCrateReader<'a, 'b> { None => () } } - hir::ItemForeignMod(ref fm) => self.process_foreign_mod(i, fm), + ast::ItemKind::ForeignMod(ref fm) => self.process_foreign_mod(i, fm), _ => { } } } - fn process_foreign_mod(&mut self, i: &hir::Item, fm: &hir::ForeignMod) { + fn process_foreign_mod(&mut self, i: &ast::Item, fm: &ast::ForeignMod) { if fm.abi == Abi::Rust || fm.abi == Abi::RustIntrinsic || fm.abi == Abi::PlatformIntrinsic { return; } @@ -916,6 +995,17 @@ impl<'a, 'b> LocalCrateReader<'a, 'b> { } } +/// Traverses an AST, reading all the information about use'd crates and extern +/// libraries necessary for later resolving, typechecking, linking, etc. +pub fn read_local_crates(sess: & Session, + cstore: & CStore, + defs: & hir_map::Definitions, + krate: & ast::Crate, + local_crate_name: &str, + dep_graph: &DepGraph) { + LocalCrateReader::new(sess, cstore, defs, krate, local_crate_name).read_crates(dep_graph) +} + /// Imports the codemap from an external crate into the codemap of the crate /// currently being compiled (the "local crate"). /// diff --git a/src/librustc_metadata/csearch.rs b/src/librustc_metadata/csearch.rs index 8e7be0e3a0..d1f6f7e1ff 100644 --- a/src/librustc_metadata/csearch.rs +++ b/src/librustc_metadata/csearch.rs @@ -9,6 +9,7 @@ // except according to those terms. use cstore; +use common; use decoder; use encoder; use loader; @@ -20,10 +21,13 @@ use middle::lang_items; use rustc::ty::{self, Ty, TyCtxt, VariantKind}; use rustc::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; +use rustc::dep_graph::DepNode; use rustc::hir::map as hir_map; +use rustc::hir::map::DefKey; use rustc::mir::repr::Mir; use rustc::mir::mir_map::MirMap; use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap}; +use rustc::session::config::PanicStrategy; use std::cell::RefCell; use std::rc::Rc; @@ -36,99 +40,112 @@ use rustc_back::target::Target; use rustc::hir; impl<'tcx> CrateStore<'tcx> for cstore::CStore { - fn stability(&self, def: DefId) -> Option - { + fn stability(&self, def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_stability(&cdata, def.index) } - fn deprecation(&self, def: DefId) -> Option - { + fn deprecation(&self, def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_deprecation(&cdata, def.index) } fn visibility(&self, def: DefId) -> ty::Visibility { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_visibility(&cdata, def.index) } - fn closure_kind(&self, _tcx: &TyCtxt<'tcx>, def_id: DefId) -> ty::ClosureKind + fn closure_kind(&self, def_id: DefId) -> ty::ClosureKind { assert!(!def_id.is_local()); + self.dep_graph.read(DepNode::MetaData(def_id)); let cdata = self.get_crate_data(def_id.krate); decoder::closure_kind(&cdata, def_id.index) } - fn closure_ty(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) -> ty::ClosureTy<'tcx> - { + fn closure_ty<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> ty::ClosureTy<'tcx> { assert!(!def_id.is_local()); + self.dep_graph.read(DepNode::MetaData(def_id)); let cdata = self.get_crate_data(def_id.krate); decoder::closure_ty(&cdata, def_id.index, tcx) } fn item_variances(&self, def: DefId) -> ty::ItemVariances { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_item_variances(&cdata, def.index) } fn repr_attrs(&self, def: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_repr_attrs(&cdata, def.index) } - fn item_type(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::TypeScheme<'tcx> + fn item_type<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::TypeScheme<'tcx> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_type(&cdata, def.index, tcx) } - fn item_predicates(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx> + fn item_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_predicates(&cdata, def.index, tcx) } - fn item_super_predicates(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> ty::GenericPredicates<'tcx> + fn item_super_predicates<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> ty::GenericPredicates<'tcx> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_super_predicates(&cdata, def.index, tcx) } fn item_attrs(&self, def_id: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def_id)); let cdata = self.get_crate_data(def_id.krate); decoder::get_item_attrs(&cdata, def_id.index) } fn item_symbol(&self, def: DefId) -> String { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_symbol(&cdata, def.index) } - fn trait_def(&self, tcx: &TyCtxt<'tcx>, def: DefId) -> ty::TraitDef<'tcx> + fn trait_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::TraitDef<'tcx> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_trait_def(&cdata, def.index, tcx) } - fn adt_def(&self, tcx: &TyCtxt<'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> + fn adt_def<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) -> ty::AdtDefMaster<'tcx> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_adt_def(&self.intr, &cdata, def.index, tcx) } fn method_arg_names(&self, did: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(did)); let cdata = self.get_crate_data(did.krate); decoder::get_method_arg_names(&cdata, did.index) } fn item_name(&self, def: DefId) -> ast::Name { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_item_name(&self.intr, &cdata, def.index) } @@ -136,6 +153,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn inherent_implementations_for_type(&self, def_id: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def_id)); let mut result = vec![]; let cdata = self.get_crate_data(def_id.krate); decoder::each_inherent_implementation_for_type(&cdata, def_id.index, @@ -145,6 +163,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn implementations_of_trait(&self, def_id: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def_id)); let mut result = vec![]; self.iter_crate_data(|_, cdata| { decoder::each_implementation_for_trait(cdata, def_id, &mut |iid| { @@ -154,9 +173,10 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { result } - fn provided_trait_methods(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Vec>> + fn provided_trait_methods<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Vec>> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_provided_trait_methods(self.intr.clone(), &cdata, def.index, tcx) } @@ -164,25 +184,29 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn trait_item_def_ids(&self, def: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_trait_item_def_ids(&cdata, def.index) } fn impl_items(&self, impl_def_id: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(impl_def_id)); let cdata = self.get_crate_data(impl_def_id.krate); decoder::get_impl_items(&cdata, impl_def_id.index) } fn impl_polarity(&self, def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_impl_polarity(&cdata, def.index) } - fn impl_trait_ref(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option> + fn impl_trait_ref<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_impl_trait(&cdata, def.index, tcx) } @@ -190,31 +214,36 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn custom_coerce_unsized_kind(&self, def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_custom_coerce_unsized_kind(&cdata, def.index) } // FIXME: killme - fn associated_consts(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Vec>> { + fn associated_consts<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Vec>> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_associated_consts(self.intr.clone(), &cdata, def.index, tcx) } fn impl_parent(&self, impl_def: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(impl_def)); let cdata = self.get_crate_data(impl_def.krate); decoder::get_parent_impl(&*cdata, impl_def.index) } - fn trait_of_item(&self, tcx: &TyCtxt<'tcx>, def_id: DefId) -> Option + fn trait_of_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def_id)); let cdata = self.get_crate_data(def_id.krate); decoder::get_trait_of_item(&cdata, def_id.index, tcx) } - fn impl_or_trait_item(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option> + fn impl_or_trait_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_impl_or_trait_item( self.intr.clone(), @@ -225,34 +254,40 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn is_const_fn(&self, did: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(did)); let cdata = self.get_crate_data(did.krate); decoder::is_const_fn(&cdata, did.index) } fn is_defaulted_trait(&self, trait_def_id: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(trait_def_id)); let cdata = self.get_crate_data(trait_def_id.krate); decoder::is_defaulted_trait(&cdata, trait_def_id.index) } fn is_impl(&self, did: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(did)); let cdata = self.get_crate_data(did.krate); decoder::is_impl(&cdata, did.index) } fn is_default_impl(&self, impl_did: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(impl_did)); let cdata = self.get_crate_data(impl_did.krate); decoder::is_default_impl(&cdata, impl_did.index) } - fn is_extern_item(&self, tcx: &TyCtxt<'tcx>, did: DefId) -> bool { + fn is_extern_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(did)); let cdata = self.get_crate_data(did.krate); decoder::is_extern_item(&cdata, did.index, tcx) } fn is_static_method(&self, def: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::is_static_method(&cdata, def.index) } @@ -263,6 +298,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { } fn is_typedef(&self, did: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(did)); let cdata = self.get_crate_data(did.krate); decoder::is_typedef(&cdata, did.index) } @@ -306,6 +342,15 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { self.get_crate_data(cnum).is_allocator() } + fn is_panic_runtime(&self, cnum: ast::CrateNum) -> bool + { + self.get_crate_data(cnum).is_panic_runtime() + } + + fn panic_strategy(&self, cnum: ast::CrateNum) -> PanicStrategy { + self.get_crate_data(cnum).panic_strategy() + } + fn crate_attrs(&self, cnum: ast::CrateNum) -> Vec { decoder::get_crate_attributes(self.get_crate_data(cnum).data()) @@ -365,44 +410,59 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { decoder::get_reachable_ids(&cdata) } + fn def_index_for_def_key(&self, + cnum: ast::CrateNum, + def: DefKey) + -> Option { + let cdata = self.get_crate_data(cnum); + cdata.key_map.get(&def).cloned() + } + /// Returns the `DefKey` for a given `DefId`. This indicates the /// parent `DefId` as well as some idea of what kind of data the /// `DefId` refers to. fn def_key(&self, def: DefId) -> hir_map::DefKey { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::def_key(&cdata, def.index) } fn relative_def_path(&self, def: DefId) -> hir_map::DefPath { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::def_path(&cdata, def.index) } fn variant_kind(&self, def_id: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(def_id)); let cdata = self.get_crate_data(def_id.krate); decoder::get_variant_kind(&cdata, def_id.index) } fn struct_ctor_def_id(&self, struct_def_id: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(struct_def_id)); let cdata = self.get_crate_data(struct_def_id.krate); decoder::get_struct_ctor_def_id(&cdata, struct_def_id.index) } fn tuple_struct_definition_if_ctor(&self, did: DefId) -> Option { + self.dep_graph.read(DepNode::MetaData(did)); let cdata = self.get_crate_data(did.krate); decoder::get_tuple_struct_definition_if_ctor(&cdata, did.index) } fn struct_field_names(&self, def: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::get_struct_field_names(&self.intr, &cdata, def.index) } fn item_children(&self, def_id: DefId) -> Vec { + self.dep_graph.read(DepNode::MetaData(def_id)); let mut result = vec![]; let crate_data = self.get_crate_data(def_id.krate); let get_crate_data = |cnum| self.get_crate_data(cnum); @@ -432,20 +492,23 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { result } - fn maybe_get_item_ast(&'tcx self, tcx: &TyCtxt<'tcx>, def: DefId) - -> FoundAst<'tcx> + fn maybe_get_item_ast<'a>(&'tcx self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> FoundAst<'tcx> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::maybe_get_item_ast(&cdata, tcx, def.index) } - fn maybe_get_item_mir(&self, tcx: &TyCtxt<'tcx>, def: DefId) - -> Option> { + fn maybe_get_item_mir<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) + -> Option> { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::maybe_get_item_mir(&cdata, tcx, def.index) } fn is_item_mir_available(&self, def: DefId) -> bool { + self.dep_graph.read(DepNode::MetaData(def)); let cdata = self.get_crate_data(def.krate); decoder::is_item_mir_available(&cdata, def.index) } @@ -476,11 +539,11 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { { loader::meta_section_name(target) } - fn encode_type(&self, - tcx: &TyCtxt<'tcx>, - ty: Ty<'tcx>, - def_id_to_string: fn(&TyCtxt<'tcx>, DefId) -> String) - -> Vec + fn encode_type<'a>(&self, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>, + def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) + -> Vec { encoder::encoded_ty(tcx, ty, def_id_to_string) } @@ -500,14 +563,13 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { self.do_extern_mod_stmt_cnum(emod_id) } - fn encode_metadata(&self, - tcx: &TyCtxt<'tcx>, - reexports: &def::ExportMap, - item_symbols: &RefCell>, - link_meta: &LinkMeta, - reachable: &NodeSet, - mir_map: &MirMap<'tcx>, - krate: &hir::Crate) -> Vec + fn encode_metadata<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + reexports: &def::ExportMap, + item_symbols: &RefCell>, + link_meta: &LinkMeta, + reachable: &NodeSet, + mir_map: &MirMap<'tcx>, + krate: &hir::Crate) -> Vec { let ecx = encoder::EncodeContext { diag: tcx.sess.diagnostic(), @@ -526,7 +588,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { fn metadata_encoding_version(&self) -> &[u8] { - encoder::metadata_encoding_version + common::metadata_encoding_version } /// Returns a map from a sufficiently visible external item (i.e. an external item that is @@ -536,7 +598,7 @@ impl<'tcx> CrateStore<'tcx> for cstore::CStore { let mut visible_parent_map = self.visible_parent_map.borrow_mut(); if !visible_parent_map.is_empty() { return visible_parent_map; } - use rustc::middle::cstore::{CrateStore, ChildItem}; + use rustc::middle::cstore::ChildItem; use std::collections::vec_deque::VecDeque; use std::collections::hash_map::Entry; for cnum in 1 .. self.next_crate_num() { diff --git a/src/librustc_metadata/cstore.rs b/src/librustc_metadata/cstore.rs index d5a9adafe7..5464f7e295 100644 --- a/src/librustc_metadata/cstore.rs +++ b/src/librustc_metadata/cstore.rs @@ -15,14 +15,18 @@ pub use self::MetadataBlob::*; +use common; use creader; use decoder; use index; use loader; -use rustc::hir::def_id::DefId; +use rustc::dep_graph::DepGraph; +use rustc::hir::def_id::{DefIndex, DefId}; +use rustc::hir::map::DefKey; use rustc::hir::svh::Svh; use rustc::middle::cstore::{ExternCrate}; +use rustc::session::config::PanicStrategy; use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet, DefIdMap}; use std::cell::{RefCell, Ref, Cell}; @@ -77,6 +81,13 @@ pub struct crate_metadata { pub index: index::Index, pub xref_index: index::DenseIndex, + /// For each public item in this crate, we encode a key. When the + /// crate is loaded, we read all the keys and put them in this + /// hashmap, which gives the reverse mapping. This allows us to + /// quickly retrace a `DefPath`, which is needed for incremental + /// compilation support. + pub key_map: FnvHashMap, + /// Flag if this crate is required by an rlib version of this crate, or in /// other words whether it was explicitly linked to. An example of a crate /// where this is false is when an allocator crate is injected into the @@ -85,6 +96,7 @@ pub struct crate_metadata { } pub struct CStore { + pub dep_graph: DepGraph, metas: RefCell>>, /// Map from NodeId's of local extern crate statements to crate numbers extern_mod_crate_map: RefCell>, @@ -97,8 +109,10 @@ pub struct CStore { } impl CStore { - pub fn new(intr: Rc) -> CStore { + pub fn new(dep_graph: &DepGraph, + intr: Rc) -> CStore { CStore { + dep_graph: dep_graph.clone(), metas: RefCell::new(FnvHashMap()), extern_mod_crate_map: RefCell::new(FnvHashMap()), used_crate_sources: RefCell::new(Vec::new()), @@ -281,23 +295,42 @@ impl crate_metadata { let attrs = decoder::get_crate_attributes(self.data()); attr::contains_name(&attrs, "needs_allocator") } + + pub fn is_panic_runtime(&self) -> bool { + let attrs = decoder::get_crate_attributes(self.data()); + attr::contains_name(&attrs, "panic_runtime") + } + + pub fn needs_panic_runtime(&self) -> bool { + let attrs = decoder::get_crate_attributes(self.data()); + attr::contains_name(&attrs, "needs_panic_runtime") + } + + pub fn panic_strategy(&self) -> PanicStrategy { + decoder::get_panic_strategy(self.data()) + } } impl MetadataBlob { - pub fn as_slice<'a>(&'a self) -> &'a [u8] { - let slice = match *self { + pub fn as_slice_raw<'a>(&'a self) -> &'a [u8] { + match *self { MetadataVec(ref vec) => &vec[..], MetadataArchive(ref ar) => ar.as_slice(), - }; - if slice.len() < 4 { + } + } + + pub fn as_slice<'a>(&'a self) -> &'a [u8] { + let slice = self.as_slice_raw(); + let len_offset = 4 + common::metadata_encoding_version.len(); + if slice.len() < len_offset+4 { &[] // corrupt metadata } else { - let len = (((slice[0] as u32) << 24) | - ((slice[1] as u32) << 16) | - ((slice[2] as u32) << 8) | - ((slice[3] as u32) << 0)) as usize; - if len + 4 <= slice.len() { - &slice[4.. len + 4] + let len = (((slice[len_offset+0] as u32) << 24) | + ((slice[len_offset+1] as u32) << 16) | + ((slice[len_offset+2] as u32) << 8) | + ((slice[len_offset+3] as u32) << 0)) as usize; + if len <= slice.len() - 4 - len_offset { + &slice[len_offset + 4..len_offset + len + 4] } else { &[] // corrupt or old metadata } diff --git a/src/librustc_metadata/decoder.rs b/src/librustc_metadata/decoder.rs index 6a634d061f..b6f35074b7 100644 --- a/src/librustc_metadata/decoder.rs +++ b/src/librustc_metadata/decoder.rs @@ -17,6 +17,7 @@ use self::Family::*; use astencode::decode_inlined_item; use cstore::{self, crate_metadata}; use common::*; +use def_key; use encoder::def_to_u64; use index; use tls_context; @@ -24,8 +25,10 @@ use tydecode::TyDecoder; use rustc::hir::svh::Svh; use rustc::hir::map as hir_map; +use rustc::hir::map::DefKey; use rustc::util::nodemap::FnvHashMap; use rustc::hir; +use rustc::session::config::PanicStrategy; use middle::cstore::{LOCAL_CRATE, FoundAst, InlinedItem, LinkagePreference}; use middle::cstore::{DefLike, DlDef, DlField, DlImpl, tls}; @@ -42,14 +45,13 @@ use rustc::mir; use rustc::mir::visit::MutVisitor; use std::cell::Cell; -use std::io::prelude::*; use std::io; use std::rc::Rc; use std::str; use rbml::reader; use rbml; -use serialize::Decodable; +use rustc_serialize::Decodable; use syntax::attr; use syntax::parse::token::{self, IdentInterner}; use syntax::ast; @@ -70,7 +72,10 @@ impl crate_metadata { fn lookup_item(&self, item_id: DefIndex) -> rbml::Doc { match self.get_item(item_id) { - None => bug!("lookup_item: id not found: {:?}", item_id), + None => bug!("lookup_item: id not found: {:?} in crate {:?} with number {}", + item_id, + self.name, + self.cnum), Some(d) => d } } @@ -91,6 +96,29 @@ pub fn load_xrefs(data: &[u8]) -> index::DenseIndex { index::DenseIndex::from_buf(index.data, index.start, index.end) } +// Go through each item in the metadata and create a map from that +// item's def-key to the item's DefIndex. +pub fn load_key_map(data: &[u8]) -> FnvHashMap { + let root_doc = rbml::Doc::new(data); + let items_doc = reader::get_doc(root_doc, tag_items); + let items_data_doc = reader::get_doc(items_doc, tag_items_data); + reader::docs(items_data_doc) + .filter(|&(tag, _)| tag == tag_items_data_item) + .map(|(_, item_doc)| { + // load def-key from item + let key = item_def_key(item_doc); + + // load def-index from item; we only encode the full def-id, + // so just pull out the index + let def_id_doc = reader::get_doc(item_doc, tag_def_id); + let def_id = untranslated_def_id(def_id_doc); + assert!(def_id.is_local()); // local to the crate we are decoding, that is + + (key, def_id.index) + }) + .collect() +} + #[derive(Clone, Copy, Debug, PartialEq)] enum Family { ImmStatic, // c @@ -189,10 +217,14 @@ fn item_symbol(item: rbml::Doc) -> String { reader::get_doc(item, tag_items_data_item_symbol).as_str().to_string() } -fn translated_def_id(cdata: Cmd, d: rbml::Doc) -> DefId { +fn untranslated_def_id(d: rbml::Doc) -> DefId { let id = reader::doc_as_u64(d); let index = DefIndex::new((id & 0xFFFF_FFFF) as usize); - let def_id = DefId { krate: (id >> 32) as u32, index: index }; + DefId { krate: (id >> 32) as u32, index: index } +} + +fn translated_def_id(cdata: Cmd, d: rbml::Doc) -> DefId { + let def_id = untranslated_def_id(d); translate_def_id(cdata, def_id) } @@ -222,14 +254,15 @@ fn variant_disr_val(d: rbml::Doc) -> Option { }) } -fn doc_type<'tcx>(doc: rbml::Doc, tcx: &TyCtxt<'tcx>, cdata: Cmd) -> Ty<'tcx> { +fn doc_type<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) -> Ty<'tcx> { let tp = reader::get_doc(doc, tag_items_data_item_type); TyDecoder::with_doc(tcx, cdata.cnum, tp, &mut |did| translate_def_id(cdata, did)) .parse_ty() } -fn maybe_doc_type<'tcx>(doc: rbml::Doc, tcx: &TyCtxt<'tcx>, cdata: Cmd) -> Option> { +fn maybe_doc_type<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) + -> Option> { reader::maybe_get_doc(doc, tag_items_data_item_type).map(|tp| { TyDecoder::with_doc(tcx, cdata.cnum, tp, &mut |did| translate_def_id(cdata, did)) @@ -237,20 +270,20 @@ fn maybe_doc_type<'tcx>(doc: rbml::Doc, tcx: &TyCtxt<'tcx>, cdata: Cmd) -> Optio }) } -pub fn item_type<'tcx>(_item_id: DefId, item: rbml::Doc, - tcx: &TyCtxt<'tcx>, cdata: Cmd) -> Ty<'tcx> { +pub fn item_type<'a, 'tcx>(_item_id: DefId, item: rbml::Doc, + tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) -> Ty<'tcx> { doc_type(item, tcx, cdata) } -fn doc_trait_ref<'tcx>(doc: rbml::Doc, tcx: &TyCtxt<'tcx>, cdata: Cmd) - -> ty::TraitRef<'tcx> { +fn doc_trait_ref<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) + -> ty::TraitRef<'tcx> { TyDecoder::with_doc(tcx, cdata.cnum, doc, &mut |did| translate_def_id(cdata, did)) .parse_trait_ref() } -fn item_trait_ref<'tcx>(doc: rbml::Doc, tcx: &TyCtxt<'tcx>, cdata: Cmd) - -> ty::TraitRef<'tcx> { +fn item_trait_ref<'a, 'tcx>(doc: rbml::Doc, tcx: TyCtxt<'a, 'tcx, 'tcx>, cdata: Cmd) + -> ty::TraitRef<'tcx> { let tp = reader::get_doc(doc, tag_item_trait_ref); doc_trait_ref(tp, tcx, cdata) } @@ -349,9 +382,9 @@ fn parse_associated_type_names(item_doc: rbml::Doc) -> Vec { .collect() } -pub fn get_trait_def<'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: &TyCtxt<'tcx>) -> ty::TraitDef<'tcx> +pub fn get_trait_def<'a, 'tcx>(cdata: Cmd, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) -> ty::TraitDef<'tcx> { let item_doc = cdata.lookup_item(item_id); let generics = doc_generics(item_doc, tcx, cdata, tag_item_generics); @@ -366,10 +399,11 @@ pub fn get_trait_def<'tcx>(cdata: Cmd, associated_type_names) } -pub fn get_adt_def<'tcx>(intr: &IdentInterner, - cdata: Cmd, - item_id: DefIndex, - tcx: &TyCtxt<'tcx>) -> ty::AdtDefMaster<'tcx> +pub fn get_adt_def<'a, 'tcx>(intr: &IdentInterner, + cdata: Cmd, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::AdtDefMaster<'tcx> { fn expect_variant_kind(family: Family) -> ty::VariantKind { match family_to_variant_kind(family) { @@ -494,26 +528,26 @@ pub fn get_adt_def<'tcx>(intr: &IdentInterner, adt } -pub fn get_predicates<'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: &TyCtxt<'tcx>) - -> ty::GenericPredicates<'tcx> +pub fn get_predicates<'a, 'tcx>(cdata: Cmd, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::GenericPredicates<'tcx> { let item_doc = cdata.lookup_item(item_id); doc_predicates(item_doc, tcx, cdata, tag_item_generics) } -pub fn get_super_predicates<'tcx>(cdata: Cmd, - item_id: DefIndex, - tcx: &TyCtxt<'tcx>) - -> ty::GenericPredicates<'tcx> +pub fn get_super_predicates<'a, 'tcx>(cdata: Cmd, + item_id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::GenericPredicates<'tcx> { let item_doc = cdata.lookup_item(item_id); doc_predicates(item_doc, tcx, cdata, tag_item_super_predicates) } -pub fn get_type<'tcx>(cdata: Cmd, id: DefIndex, tcx: &TyCtxt<'tcx>) - -> ty::TypeScheme<'tcx> +pub fn get_type<'a, 'tcx>(cdata: Cmd, id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::TypeScheme<'tcx> { let item_doc = cdata.lookup_item(id); let t = item_type(DefId { krate: cdata.cnum, index: id }, item_doc, tcx, @@ -589,10 +623,10 @@ pub fn get_custom_coerce_unsized_kind<'tcx>( }) } -pub fn get_impl_trait<'tcx>(cdata: Cmd, - id: DefIndex, - tcx: &TyCtxt<'tcx>) - -> Option> +pub fn get_impl_trait<'a, 'tcx>(cdata: Cmd, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Option> { let item_doc = cdata.lookup_item(id); let fam = item_family(item_doc); @@ -774,8 +808,8 @@ pub fn get_item_name(intr: &IdentInterner, cdata: Cmd, id: DefIndex) -> ast::Nam item_name(intr, cdata.lookup_item(id)) } -pub fn maybe_get_item_ast<'tcx>(cdata: Cmd, tcx: &TyCtxt<'tcx>, id: DefIndex) - -> FoundAst<'tcx> { +pub fn maybe_get_item_ast<'a, 'tcx>(cdata: Cmd, tcx: TyCtxt<'a, 'tcx, 'tcx>, id: DefIndex) + -> FoundAst<'tcx> { debug!("Looking up item: {:?}", id); let item_doc = cdata.lookup_item(id); let item_did = item_def_id(item_doc, cdata); @@ -826,10 +860,10 @@ pub fn is_item_mir_available<'tcx>(cdata: Cmd, id: DefIndex) -> bool { false } -pub fn maybe_get_item_mir<'tcx>(cdata: Cmd, - tcx: &TyCtxt<'tcx>, - id: DefIndex) - -> Option> { +pub fn maybe_get_item_mir<'a, 'tcx>(cdata: Cmd, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: DefIndex) + -> Option> { let item_doc = cdata.lookup_item(id); return reader::maybe_get_doc(item_doc, tag_mir as usize).map(|mir_doc| { @@ -854,6 +888,9 @@ pub fn maybe_get_item_mir<'tcx>(cdata: Cmd, }; def_id_and_span_translator.visit_mir(&mut mir); + for promoted in &mut mir.promoted { + def_id_and_span_translator.visit_mir(promoted); + } mir }); @@ -939,11 +976,11 @@ pub fn is_static_method(cdata: Cmd, id: DefIndex) -> bool { } } -pub fn get_impl_or_trait_item<'tcx>(intr: Rc, - cdata: Cmd, - id: DefIndex, - tcx: &TyCtxt<'tcx>) - -> Option> { +pub fn get_impl_or_trait_item<'a, 'tcx>(intr: Rc, + cdata: Cmd, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Option> { let item_doc = cdata.lookup_item(id); let def_id = item_def_id(item_doc, cdata); @@ -981,7 +1018,7 @@ pub fn get_impl_or_trait_item<'tcx>(intr: Rc, let predicates = doc_predicates(item_doc, tcx, cdata, tag_method_ty_generics); let ity = tcx.lookup_item_type(def_id).ty; let fty = match ity.sty { - ty::TyFnDef(_, _, fty) => fty.clone(), + ty::TyFnDef(_, _, fty) => fty, _ => bug!( "the type {:?} of the method {:?} is not a function?", ity, name) @@ -1034,11 +1071,11 @@ pub fn get_item_variances(cdata: Cmd, id: DefIndex) -> ty::ItemVariances { Decodable::decode(&mut decoder).unwrap() } -pub fn get_provided_trait_methods<'tcx>(intr: Rc, - cdata: Cmd, - id: DefIndex, - tcx: &TyCtxt<'tcx>) - -> Vec>> { +pub fn get_provided_trait_methods<'a, 'tcx>(intr: Rc, + cdata: Cmd, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Vec>> { let item = cdata.lookup_item(id); reader::tagged_docs(item, tag_item_trait_item).filter_map(|mth_id| { @@ -1061,11 +1098,11 @@ pub fn get_provided_trait_methods<'tcx>(intr: Rc, }).collect() } -pub fn get_associated_consts<'tcx>(intr: Rc, - cdata: Cmd, - id: DefIndex, - tcx: &TyCtxt<'tcx>) - -> Vec>> { +pub fn get_associated_consts<'a, 'tcx>(intr: Rc, + cdata: Cmd, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Vec>> { let item = cdata.lookup_item(id); [tag_item_trait_item, tag_item_impl_item].iter().flat_map(|&tag| { @@ -1242,7 +1279,7 @@ pub fn get_crate_deps(data: &[u8]) -> Vec { reader::tagged_docs(depsdoc, tag_crate_dep).enumerate().map(|(crate_num, depdoc)| { let name = docstr(depdoc, tag_crate_dep_crate_name); - let hash = Svh::new(docstr(depdoc, tag_crate_dep_hash)); + let hash = Svh::new(reader::doc_as_u64(reader::get_doc(depdoc, tag_crate_dep_hash))); let doc = reader::get_doc(depdoc, tag_crate_dep_explicitly_linked); let explicitly_linked = reader::doc_as_u8(doc) != 0; CrateDep { @@ -1266,14 +1303,14 @@ fn list_crate_deps(data: &[u8], out: &mut io::Write) -> io::Result<()> { pub fn maybe_get_crate_hash(data: &[u8]) -> Option { let cratedoc = rbml::Doc::new(data); reader::maybe_get_doc(cratedoc, tag_crate_hash).map(|doc| { - Svh::new(doc.as_str_slice().to_string()) + Svh::new(reader::doc_as_u64(doc)) }) } pub fn get_crate_hash(data: &[u8]) -> Svh { let cratedoc = rbml::Doc::new(data); let hashdoc = reader::get_doc(cratedoc, tag_crate_hash); - Svh::new(hashdoc.as_str_slice().to_string()) + Svh::new(reader::doc_as_u64(hashdoc)) } pub fn maybe_get_crate_name(data: &[u8]) -> Option<&str> { @@ -1438,8 +1475,10 @@ pub fn each_implementation_for_trait(cdata: Cmd, } } -pub fn get_trait_of_item(cdata: Cmd, id: DefIndex, tcx: &TyCtxt) - -> Option { +pub fn get_trait_of_item<'a, 'tcx>(cdata: Cmd, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> Option { let item_doc = cdata.lookup_item(id); let parent_item_id = match item_parent_item(cdata, item_doc) { None => return None, @@ -1573,7 +1612,10 @@ pub fn is_const_fn(cdata: Cmd, id: DefIndex) -> bool { } } -pub fn is_extern_item(cdata: Cmd, id: DefIndex, tcx: &TyCtxt) -> bool { +pub fn is_extern_item<'a, 'tcx>(cdata: Cmd, + id: DefIndex, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> bool { let item_doc = match cdata.get_item(id) { Some(doc) => doc, None => return false, @@ -1608,11 +1650,11 @@ pub fn is_impl(cdata: Cmd, id: DefIndex) -> bool { } } -fn doc_generics<'tcx>(base_doc: rbml::Doc, - tcx: &TyCtxt<'tcx>, - cdata: Cmd, - tag: usize) - -> ty::Generics<'tcx> +fn doc_generics<'a, 'tcx>(base_doc: rbml::Doc, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + cdata: Cmd, + tag: usize) + -> ty::Generics<'tcx> { let doc = reader::get_doc(base_doc, tag); @@ -1656,10 +1698,10 @@ fn doc_generics<'tcx>(base_doc: rbml::Doc, ty::Generics { types: types, regions: regions } } -fn doc_predicate<'tcx>(cdata: Cmd, - doc: rbml::Doc, - tcx: &TyCtxt<'tcx>) - -> ty::Predicate<'tcx> +fn doc_predicate<'a, 'tcx>(cdata: Cmd, + doc: rbml::Doc, + tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::Predicate<'tcx> { let predicate_pos = cdata.xref_index.lookup( cdata.data(), reader::doc_as_u32(doc)).unwrap() as usize; @@ -1669,11 +1711,11 @@ fn doc_predicate<'tcx>(cdata: Cmd, ).parse_predicate() } -fn doc_predicates<'tcx>(base_doc: rbml::Doc, - tcx: &TyCtxt<'tcx>, - cdata: Cmd, - tag: usize) - -> ty::GenericPredicates<'tcx> +fn doc_predicates<'a, 'tcx>(base_doc: rbml::Doc, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + cdata: Cmd, + tag: usize) + -> ty::GenericPredicates<'tcx> { let doc = reader::get_doc(base_doc, tag); @@ -1725,8 +1767,8 @@ pub fn closure_kind(cdata: Cmd, closure_id: DefIndex) -> ty::ClosureKind { ty::ClosureKind::decode(&mut decoder).unwrap() } -pub fn closure_ty<'tcx>(cdata: Cmd, closure_id: DefIndex, tcx: &TyCtxt<'tcx>) - -> ty::ClosureTy<'tcx> { +pub fn closure_ty<'a, 'tcx>(cdata: Cmd, closure_id: DefIndex, tcx: TyCtxt<'a, 'tcx, 'tcx>) + -> ty::ClosureTy<'tcx> { let closure_doc = cdata.lookup_item(closure_id); let closure_ty_doc = reader::get_doc(closure_doc, tag_items_closure_ty); TyDecoder::with_doc(tcx, cdata.cnum, closure_ty_doc, &mut |did| translate_def_id(cdata, did)) @@ -1736,10 +1778,18 @@ pub fn closure_ty<'tcx>(cdata: Cmd, closure_id: DefIndex, tcx: &TyCtxt<'tcx>) pub fn def_key(cdata: Cmd, id: DefIndex) -> hir_map::DefKey { debug!("def_key: id={:?}", id); let item_doc = cdata.lookup_item(id); + item_def_key(item_doc) +} + +fn item_def_key(item_doc: rbml::Doc) -> hir_map::DefKey { match reader::maybe_get_doc(item_doc, tag_def_key) { Some(def_key_doc) => { let mut decoder = reader::Decoder::new(def_key_doc); - hir_map::DefKey::decode(&mut decoder).unwrap() + let simple_key = def_key::DefKey::decode(&mut decoder).unwrap(); + let name = reader::maybe_get_doc(item_doc, tag_paths_data_name).map(|name| { + token::intern(name.as_str_slice()) + }); + def_key::recover_def_key(simple_key, name) } None => { bug!("failed to find block with tag {:?} for item with family {:?}", @@ -1753,3 +1803,13 @@ pub fn def_path(cdata: Cmd, id: DefIndex) -> hir_map::DefPath { debug!("def_path(id={:?})", id); hir_map::DefPath::make(cdata.cnum, id, |parent| def_key(cdata, parent)) } + +pub fn get_panic_strategy(data: &[u8]) -> PanicStrategy { + let crate_doc = rbml::Doc::new(data); + let strat_doc = reader::get_doc(crate_doc, tag_panic_strategy); + match reader::doc_as_u8(strat_doc) { + b'U' => PanicStrategy::Unwind, + b'A' => PanicStrategy::Abort, + b => panic!("unknown panic strategy in metadata: {}", b), + } +} diff --git a/src/librustc_metadata/def_key.rs b/src/librustc_metadata/def_key.rs new file mode 100644 index 0000000000..05ad333ed3 --- /dev/null +++ b/src/librustc_metadata/def_key.rs @@ -0,0 +1,107 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::hir::def_id::DefIndex; +use rustc::hir::map as hir_map; +use syntax::ast::Name; + +#[derive(RustcEncodable, RustcDecodable)] +pub struct DefKey { + pub parent: Option, + pub disambiguated_data: DisambiguatedDefPathData, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub struct DisambiguatedDefPathData { + pub data: DefPathData, + pub disambiguator: u32, +} + +#[derive(RustcEncodable, RustcDecodable)] +pub enum DefPathData { + CrateRoot, + Misc, + Impl, + TypeNs, + ValueNs, + Module, + MacroDef, + ClosureExpr, + TypeParam, + LifetimeDef, + EnumVariant, + Field, + StructCtor, + Initializer, + Binding, +} + +pub fn simplify_def_key(key: hir_map::DefKey) -> DefKey { + let data = DisambiguatedDefPathData { + data: simplify_def_path_data(key.disambiguated_data.data), + disambiguator: key.disambiguated_data.disambiguator, + }; + DefKey { + parent: key.parent, + disambiguated_data: data, + } +} + +fn simplify_def_path_data(data: hir_map::DefPathData) -> DefPathData { + match data { + hir_map::DefPathData::CrateRoot => DefPathData::CrateRoot, + hir_map::DefPathData::InlinedRoot(_) => bug!("unexpected DefPathData"), + hir_map::DefPathData::Misc => DefPathData::Misc, + hir_map::DefPathData::Impl => DefPathData::Impl, + hir_map::DefPathData::TypeNs(_) => DefPathData::TypeNs, + hir_map::DefPathData::ValueNs(_) => DefPathData::ValueNs, + hir_map::DefPathData::Module(_) => DefPathData::Module, + hir_map::DefPathData::MacroDef(_) => DefPathData::MacroDef, + hir_map::DefPathData::ClosureExpr => DefPathData::ClosureExpr, + hir_map::DefPathData::TypeParam(_) => DefPathData::TypeParam, + hir_map::DefPathData::LifetimeDef(_) => DefPathData::LifetimeDef, + hir_map::DefPathData::EnumVariant(_) => DefPathData::EnumVariant, + hir_map::DefPathData::Field(_) => DefPathData::Field, + hir_map::DefPathData::StructCtor => DefPathData::StructCtor, + hir_map::DefPathData::Initializer => DefPathData::Initializer, + hir_map::DefPathData::Binding(_) => DefPathData::Binding, + } +} + +pub fn recover_def_key(key: DefKey, name: Option) -> hir_map::DefKey { + let data = hir_map::DisambiguatedDefPathData { + data: recover_def_path_data(key.disambiguated_data.data, name), + disambiguator: key.disambiguated_data.disambiguator, + }; + hir_map::DefKey { + parent: key.parent, + disambiguated_data: data, + } +} + +fn recover_def_path_data(data: DefPathData, name: Option) -> hir_map::DefPathData { + match data { + DefPathData::CrateRoot => hir_map::DefPathData::CrateRoot, + DefPathData::Misc => hir_map::DefPathData::Misc, + DefPathData::Impl => hir_map::DefPathData::Impl, + DefPathData::TypeNs => hir_map::DefPathData::TypeNs(name.unwrap()), + DefPathData::ValueNs => hir_map::DefPathData::ValueNs(name.unwrap()), + DefPathData::Module => hir_map::DefPathData::Module(name.unwrap()), + DefPathData::MacroDef => hir_map::DefPathData::MacroDef(name.unwrap()), + DefPathData::ClosureExpr => hir_map::DefPathData::ClosureExpr, + DefPathData::TypeParam => hir_map::DefPathData::TypeParam(name.unwrap()), + DefPathData::LifetimeDef => hir_map::DefPathData::LifetimeDef(name.unwrap()), + DefPathData::EnumVariant => hir_map::DefPathData::EnumVariant(name.unwrap()), + DefPathData::Field => hir_map::DefPathData::Field(name.unwrap()), + DefPathData::StructCtor => hir_map::DefPathData::StructCtor, + DefPathData::Initializer => hir_map::DefPathData::Initializer, + DefPathData::Binding => hir_map::DefPathData::Binding(name.unwrap()), + } +} diff --git a/src/librustc_metadata/diagnostics.rs b/src/librustc_metadata/diagnostics.rs index 8fa23de9a2..ae9f500c5d 100644 --- a/src/librustc_metadata/diagnostics.rs +++ b/src/librustc_metadata/diagnostics.rs @@ -26,6 +26,27 @@ name. Example: ``` "##, +E0455: r##" +Linking with `kind=framework` is only supported when targeting OS X, +as frameworks are specific to that operating system. + +Erroneous code example: + +```compile_fail" +#[link(name = "FooCoreServices", kind = "framework")] extern {} +// OS used to compile is Linux for example +``` + +To solve this error you can use conditional compilation: + +``` +#[cfg_attr(target="macos", link(name = "FooCoreServices", kind = "framework"))] +extern {} +``` + +See more: https://doc.rust-lang.org/book/conditional-compilation.html +"##, + E0458: r##" An unknown "kind" was specified for a link attribute. Erroneous code example: @@ -73,7 +94,6 @@ well, and you link to them the same way. } register_diagnostics! { - E0455, // native frameworks are only available on OSX targets E0456, // plugin `..` is not available for triple `..` E0457, // plugin `..` only found in rlib format, but must be available... E0514, // metadata version mismatch diff --git a/src/librustc_metadata/encoder.rs b/src/librustc_metadata/encoder.rs index 69f61cf97c..e862dbb173 100644 --- a/src/librustc_metadata/encoder.rs +++ b/src/librustc_metadata/encoder.rs @@ -17,14 +17,15 @@ use astencode::encode_inlined_item; use common::*; use cstore; use decoder; +use def_key; use tyencode; use index::{self, IndexData}; -use middle::cstore::{LOCAL_CRATE, CrateStore, InlinedItemRef, LinkMeta, tls}; +use middle::cstore::{LOCAL_CRATE, InlinedItemRef, LinkMeta, tls}; use rustc::hir::def; use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; use middle::dependency_format::Linkage; -use middle::stability; +use rustc::dep_graph::{DepGraph, DepNode, DepTask}; use rustc::ty::subst; use rustc::traits::specialization_graph; use rustc::ty::{self, Ty, TyCtxt}; @@ -32,10 +33,10 @@ use rustc::ty::util::IntTypeExt; use rustc::hir::svh::Svh; use rustc::mir::mir_map::MirMap; -use rustc::session::config; +use rustc::session::config::{self, PanicStrategy}; use rustc::util::nodemap::{FnvHashMap, NodeMap, NodeSet}; -use serialize::Encodable; +use rustc_serialize::Encodable; use std::cell::RefCell; use std::io::prelude::*; use std::io::{Cursor, SeekFrom}; @@ -45,7 +46,6 @@ use syntax::abi::Abi; use syntax::ast::{self, NodeId, Name, CRATE_NODE_ID, CrateNum}; use syntax::codemap::BytePos; use syntax::attr; -use syntax::attr::AttrMetaMethods; use syntax::errors::Handler; use syntax; use rbml::writer::Encoder; @@ -53,10 +53,11 @@ use rbml::writer::Encoder; use rustc::hir::{self, PatKind}; use rustc::hir::intravisit::Visitor; use rustc::hir::intravisit; +use rustc::hir::map::DefKey; pub struct EncodeContext<'a, 'tcx: 'a> { pub diag: &'a Handler, - pub tcx: &'a TyCtxt<'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, pub reexports: &'a def::ExportMap, pub item_symbols: &'a RefCell>, pub link_meta: &'a LinkMeta, @@ -76,15 +77,23 @@ impl<'a, 'tcx> EncodeContext<'a,'tcx> { #[derive(PartialEq, Eq, Hash)] pub enum XRef<'tcx> { Predicate(ty::Predicate<'tcx>) } -struct CrateIndex<'tcx> { +struct CrateIndex<'a, 'tcx> { + dep_graph: &'a DepGraph, items: IndexData, xrefs: FnvHashMap, u32>, // sequentially-assigned } -impl<'tcx> CrateIndex<'tcx> { - fn record(&mut self, id: DefId, rbml_w: &mut Encoder) { +impl<'a, 'tcx> CrateIndex<'a, 'tcx> { + /// Records that `id` is being emitted at the current offset. + /// This data is later used to construct the item index in the + /// metadata so we can quickly find the data for a given item. + /// + /// Returns a dep-graph task that you should keep live as long as + /// the data for this item is being emitted. + fn record(&mut self, id: DefId, rbml_w: &mut Encoder) -> DepTask<'a> { let position = rbml_w.mark_stable_position(); self.items.record(id, position); + self.dep_graph.in_task(DepNode::MetaData(id)) } fn add_xref(&mut self, xref: XRef<'tcx>) -> u32 { @@ -101,6 +110,13 @@ fn encode_def_id(rbml_w: &mut Encoder, id: DefId) { rbml_w.wr_tagged_u64(tag_def_id, def_to_u64(id)); } +fn encode_def_key(rbml_w: &mut Encoder, key: DefKey) { + let simple_key = def_key::simplify_def_key(key); + rbml_w.start_tag(tag_def_key); + simple_key.encode(rbml_w); + rbml_w.end_tag(); +} + /// For every DefId that we create a metadata item for, we include a /// serialized copy of its DefKey, which allows us to recreate a path. fn encode_def_id_and_key(ecx: &EncodeContext, @@ -108,17 +124,8 @@ fn encode_def_id_and_key(ecx: &EncodeContext, def_id: DefId) { encode_def_id(rbml_w, def_id); - encode_def_key(ecx, rbml_w, def_id); -} - -fn encode_def_key(ecx: &EncodeContext, - rbml_w: &mut Encoder, - def_id: DefId) -{ - rbml_w.start_tag(tag_def_key); let def_key = ecx.tcx.map.def_key(def_id); - def_key.encode(rbml_w); - rbml_w.end_tag(); + encode_def_key(rbml_w, def_key); } fn encode_trait_ref<'a, 'tcx>(rbml_w: &mut Encoder, @@ -141,7 +148,7 @@ pub fn def_to_u64(did: DefId) -> u64 { (did.krate as u64) << 32 | (did.index.as_usize() as u64) } -pub fn def_to_string(_tcx: &TyCtxt, did: DefId) -> String { +pub fn def_to_string(_tcx: TyCtxt, did: DefId) -> String { format!("{}:{}", did.krate, did.index.as_usize()) } @@ -156,7 +163,7 @@ fn encode_item_variances(rbml_w: &mut Encoder, fn encode_bounds_and_type_for_item<'a, 'tcx>(rbml_w: &mut Encoder, ecx: &EncodeContext<'a, 'tcx>, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, id: NodeId) { encode_bounds_and_type(rbml_w, ecx, @@ -167,7 +174,7 @@ fn encode_bounds_and_type_for_item<'a, 'tcx>(rbml_w: &mut Encoder, fn encode_bounds_and_type<'a, 'tcx>(rbml_w: &mut Encoder, ecx: &EncodeContext<'a, 'tcx>, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, scheme: &ty::TypeScheme<'tcx>, predicates: &ty::GenericPredicates<'tcx>) { encode_generics(rbml_w, ecx, index, @@ -250,25 +257,21 @@ fn encode_enum_variant_info<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, did: DefId, vis: &hir::Visibility, - index: &mut CrateIndex<'tcx>) { + index: &mut CrateIndex<'a, 'tcx>) { debug!("encode_enum_variant_info(did={:?})", did); let repr_hints = ecx.tcx.lookup_repr_hints(did); let repr_type = ecx.tcx.enum_repr_type(repr_hints.get(0)); - let mut disr_val = repr_type.initial_discriminant(&ecx.tcx); + let mut disr_val = repr_type.initial_discriminant(ecx.tcx); let def = ecx.tcx.lookup_adt_def(did); for variant in &def.variants { let vid = variant.did; let variant_node_id = ecx.local_id(vid); - if let ty::VariantKind::Struct = variant.kind() { - // tuple-like enum variant fields aren't really items so - // don't try to encode them. - for field in &variant.fields { - encode_field(ecx, rbml_w, field, index); - } + for field in &variant.fields { + encode_field(ecx, rbml_w, field, index); } - index.record(vid, rbml_w); + let _task = index.record(vid, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, vid); encode_family(rbml_w, match variant.kind() { @@ -284,8 +287,8 @@ fn encode_enum_variant_info<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_attributes(rbml_w, &attrs); encode_repr_attrs(rbml_w, ecx, &attrs); - let stab = stability::lookup_stability(ecx.tcx, vid); - let depr = stability::lookup_deprecation(ecx.tcx, vid); + let stab = ecx.tcx.lookup_stability(vid); + let depr = ecx.tcx.lookup_deprecation(vid); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -299,6 +302,7 @@ fn encode_enum_variant_info<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_bounds_and_type_for_item(rbml_w, ecx, index, variant_node_id); rbml_w.end_tag(); + disr_val = disr_val.wrap_incr(); } } @@ -377,8 +381,8 @@ fn encode_info_for_mod(ecx: &EncodeContext, encode_visibility(rbml_w, vis); - let stab = stability::lookup_stability(ecx.tcx, ecx.tcx.map.local_def_id(id)); - let depr = stability::lookup_deprecation(ecx.tcx, ecx.tcx.map.local_def_id(id)); + let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(id)); + let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(id)); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -473,11 +477,11 @@ fn encode_item_sort(rbml_w: &mut Encoder, sort: char) { fn encode_field<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, field: ty::FieldDef<'tcx>, - index: &mut CrateIndex<'tcx>) { + index: &mut CrateIndex<'a, 'tcx>) { let nm = field.name; let id = ecx.local_id(field.did); - index.record(field.did, rbml_w); + let _task = index.record(field.did, rbml_w); rbml_w.start_tag(tag_items_data_item); debug!("encode_field: encoding {} {}", nm, id); encode_struct_field_family(rbml_w, field.vis); @@ -485,8 +489,8 @@ fn encode_field<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_bounds_and_type_for_item(rbml_w, ecx, index, id); encode_def_id_and_key(ecx, rbml_w, field.did); - let stab = stability::lookup_stability(ecx.tcx, field.did); - let depr = stability::lookup_deprecation(ecx.tcx, field.did); + let stab = ecx.tcx.lookup_stability(field.did); + let depr = ecx.tcx.lookup_deprecation(field.did); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -497,12 +501,12 @@ fn encode_info_for_struct_ctor<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, name: Name, struct_def: &hir::VariantData, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, struct_id: NodeId) { let ctor_id = struct_def.id(); let ctor_def_id = ecx.tcx.map.local_def_id(ctor_id); - index.record(ctor_def_id, rbml_w); + let _task = index.record(ctor_def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, ctor_def_id); encode_family(rbml_w, match *struct_def { @@ -518,8 +522,8 @@ fn encode_info_for_struct_ctor<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_symbol(ecx, rbml_w, ctor_id); } - let stab = stability::lookup_stability(ecx.tcx, ecx.tcx.map.local_def_id(ctor_id)); - let depr= stability::lookup_deprecation(ecx.tcx, ecx.tcx.map.local_def_id(ctor_id)); + let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(ctor_id)); + let depr= ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(ctor_id)); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -533,7 +537,7 @@ fn encode_info_for_struct_ctor<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder, ecx: &EncodeContext<'a, 'tcx>, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, generics: &ty::Generics<'tcx>, predicates: &ty::GenericPredicates<'tcx>, tag: usize) @@ -578,7 +582,7 @@ fn encode_generics<'a, 'tcx>(rbml_w: &mut Encoder, fn encode_predicates_in_current_doc<'a,'tcx>(rbml_w: &mut Encoder, _ecx: &EncodeContext<'a,'tcx>, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, predicates: &ty::GenericPredicates<'tcx>) { for (space, _, predicate) in predicates.predicates.iter_enumerated() { @@ -595,7 +599,7 @@ fn encode_predicates_in_current_doc<'a,'tcx>(rbml_w: &mut Encoder, fn encode_predicates<'a,'tcx>(rbml_w: &mut Encoder, ecx: &EncodeContext<'a,'tcx>, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, predicates: &ty::GenericPredicates<'tcx>, tag: usize) { @@ -606,7 +610,7 @@ fn encode_predicates<'a,'tcx>(rbml_w: &mut Encoder, fn encode_method_ty_fields<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, method_ty: &ty::Method<'tcx>) { encode_def_id_and_key(ecx, rbml_w, method_ty.def_id); encode_name(rbml_w, method_ty.name); @@ -625,7 +629,7 @@ fn encode_method_ty_fields<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, fn encode_info_for_associated_const<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, associated_const: &ty::AssociatedConst, parent_id: NodeId, impl_item_opt: Option<&hir::ImplItem>) { @@ -633,7 +637,7 @@ fn encode_info_for_associated_const<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, associated_const.def_id, associated_const.name); - index.record(associated_const.def_id, rbml_w); + let _task = index.record(associated_const.def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, associated_const.def_id); @@ -647,8 +651,8 @@ fn encode_info_for_associated_const<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_bounds_and_type_for_item(rbml_w, ecx, index, ecx.local_id(associated_const.def_id)); - let stab = stability::lookup_stability(ecx.tcx, associated_const.def_id); - let depr = stability::lookup_deprecation(ecx.tcx, associated_const.def_id); + let stab = ecx.tcx.lookup_stability(associated_const.def_id); + let depr = ecx.tcx.lookup_deprecation(associated_const.def_id); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -667,7 +671,7 @@ fn encode_info_for_associated_const<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, m: &ty::Method<'tcx>, is_default_impl: bool, parent_id: NodeId, @@ -675,15 +679,15 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, debug!("encode_info_for_method: {:?} {:?}", m.def_id, m.name); - index.record(m.def_id, rbml_w); + let _task = index.record(m.def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_method_ty_fields(ecx, rbml_w, index, m); encode_parent_item(rbml_w, ecx.tcx.map.local_def_id(parent_id)); encode_item_sort(rbml_w, 'r'); - let stab = stability::lookup_stability(ecx.tcx, m.def_id); - let depr = stability::lookup_deprecation(ecx.tcx, m.def_id); + let stab = ecx.tcx.lookup_stability(m.def_id); + let depr = ecx.tcx.lookup_deprecation(m.def_id); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -719,7 +723,7 @@ fn encode_info_for_method<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, fn encode_info_for_associated_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, - index: &mut CrateIndex<'tcx>, + index: &mut CrateIndex<'a, 'tcx>, associated_type: &ty::AssociatedType<'tcx>, parent_id: NodeId, impl_item_opt: Option<&hir::ImplItem>) { @@ -727,7 +731,7 @@ fn encode_info_for_associated_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, associated_type.def_id, associated_type.name); - index.record(associated_type.def_id, rbml_w); + let _task = index.record(associated_type.def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, associated_type.def_id); @@ -737,8 +741,8 @@ fn encode_info_for_associated_type<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_parent_item(rbml_w, ecx.tcx.map.local_def_id(parent_id)); encode_item_sort(rbml_w, 't'); - let stab = stability::lookup_stability(ecx.tcx, associated_type.def_id); - let depr = stability::lookup_deprecation(ecx.tcx, associated_type.def_id); + let stab = ecx.tcx.lookup_stability(associated_type.def_id); + let depr = ecx.tcx.lookup_deprecation(associated_type.def_id); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -764,7 +768,7 @@ fn encode_method_argument_names(rbml_w: &mut Encoder, for arg in &decl.inputs { let tag = tag_method_argument_name; if let PatKind::Ident(_, ref path1, _) = arg.pat.node { - let name = path1.node.name.as_str(); + let name = path1.node.as_str(); rbml_w.wr_tagged_bytes(tag, name.as_bytes()); } else { rbml_w.wr_tagged_bytes(tag, &[]); @@ -865,7 +869,7 @@ fn encode_xrefs<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, item: &hir::Item, - index: &mut CrateIndex<'tcx>) { + index: &mut CrateIndex<'a, 'tcx>) { let tcx = ecx.tcx; debug!("encoding info for item at {}", @@ -873,12 +877,15 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, let vis = &item.vis; let def_id = ecx.tcx.map.local_def_id(item.id); - let stab = stability::lookup_stability(tcx, ecx.tcx.map.local_def_id(item.id)); - let depr = stability::lookup_deprecation(tcx, ecx.tcx.map.local_def_id(item.id)); + + let (stab, depr) = tcx.dep_graph.with_task(DepNode::MetaData(def_id), || { + (tcx.lookup_stability(ecx.tcx.map.local_def_id(item.id)), + tcx.lookup_deprecation(ecx.tcx.map.local_def_id(item.id))) + }); match item.node { hir::ItemStatic(_, m, _) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); if m == hir::MutMutable { @@ -896,7 +903,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w.end_tag(); } hir::ItemConst(_, _) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_family(rbml_w, 'C'); @@ -911,7 +918,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w.end_tag(); } hir::ItemFn(ref decl, _, constness, _, ref generics, _) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_family(rbml_w, FN_FAMILY); @@ -935,7 +942,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w.end_tag(); } hir::ItemMod(ref m) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); encode_info_for_mod(ecx, rbml_w, m, @@ -945,7 +952,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, &item.vis); } hir::ItemForeignMod(ref fm) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_family(rbml_w, 'n'); @@ -962,7 +969,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w.end_tag(); } hir::ItemTy(..) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_family(rbml_w, 'y'); @@ -974,7 +981,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w.end_tag(); } hir::ItemEnum(ref enum_definition, _) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); @@ -1005,12 +1012,12 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, index); } hir::ItemStruct(ref struct_def, _) => { + /* Index the class*/ + let _task = index.record(def_id, rbml_w); + let def = ecx.tcx.lookup_adt_def(def_id); let variant = def.struct_variant(); - /* Index the class*/ - index.record(def_id, rbml_w); - /* Now, make an item for the class itself */ rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); @@ -1058,7 +1065,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, } } hir::ItemDefaultImpl(unsafety, _) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_family(rbml_w, 'd'); @@ -1070,12 +1077,13 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w.end_tag(); } hir::ItemImpl(unsafety, polarity, _, _, _, ref ast_items) => { + let _task = index.record(def_id, rbml_w); + // We need to encode information about the default methods we // have inherited, so we drive this based on the impl structure. let impl_items = tcx.impl_items.borrow(); let items = impl_items.get(&def_id).unwrap(); - index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_family(rbml_w, 'i'); @@ -1172,7 +1180,7 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, } } hir::ItemTrait(_, _, _, ref ms) => { - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_family(rbml_w, 'I'); @@ -1227,13 +1235,13 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, for (i, &item_def_id) in r.iter().enumerate() { assert_eq!(item_def_id.def_id().krate, LOCAL_CRATE); - index.record(item_def_id.def_id(), rbml_w); + let _task = index.record(item_def_id.def_id(), rbml_w); rbml_w.start_tag(tag_items_data_item); encode_parent_item(rbml_w, def_id); - let stab = stability::lookup_stability(tcx, item_def_id.def_id()); - let depr = stability::lookup_deprecation(tcx, item_def_id.def_id()); + let stab = tcx.lookup_stability(item_def_id.def_id()); + let depr = tcx.lookup_deprecation(item_def_id.def_id()); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); @@ -1338,12 +1346,12 @@ fn encode_info_for_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, fn encode_info_for_foreign_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder, nitem: &hir::ForeignItem, - index: &mut CrateIndex<'tcx>) { + index: &mut CrateIndex<'a, 'tcx>) { debug!("writing foreign item {}", ecx.tcx.node_path_str(nitem.id)); let def_id = ecx.tcx.map.local_def_id(nitem.id); let abi = ecx.tcx.map.get_foreign_abi(nitem.id); - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); encode_visibility(rbml_w, &nitem.vis); @@ -1359,8 +1367,8 @@ fn encode_info_for_foreign_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, encode_symbol(ecx, rbml_w, nitem.id); } encode_attributes(rbml_w, &nitem.attrs); - let stab = stability::lookup_stability(ecx.tcx, ecx.tcx.map.local_def_id(nitem.id)); - let depr = stability::lookup_deprecation(ecx.tcx, ecx.tcx.map.local_def_id(nitem.id)); + let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(nitem.id)); + let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(nitem.id)); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); encode_method_argument_names(rbml_w, &fndecl); @@ -1373,8 +1381,8 @@ fn encode_info_for_foreign_item<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, } encode_bounds_and_type_for_item(rbml_w, ecx, index, nitem.id); encode_attributes(rbml_w, &nitem.attrs); - let stab = stability::lookup_stability(ecx.tcx, ecx.tcx.map.local_def_id(nitem.id)); - let depr = stability::lookup_deprecation(ecx.tcx, ecx.tcx.map.local_def_id(nitem.id)); + let stab = ecx.tcx.lookup_stability(ecx.tcx.map.local_def_id(nitem.id)); + let depr = ecx.tcx.lookup_deprecation(ecx.tcx.map.local_def_id(nitem.id)); encode_stability(rbml_w, stab); encode_deprecation(rbml_w, depr); encode_symbol(ecx, rbml_w, nitem.id); @@ -1392,7 +1400,7 @@ fn my_visit_expr(expr: &hir::Expr, hir::ExprClosure(..) => { let def_id = ecx.tcx.map.local_def_id(expr.id); - index.record(def_id, rbml_w); + let _task = index.record(def_id, rbml_w); rbml_w.start_tag(tag_items_data_item); encode_def_id_and_key(ecx, rbml_w, def_id); @@ -1416,8 +1424,8 @@ fn my_visit_expr(expr: &hir::Expr, struct EncodeVisitor<'a, 'b:'a, 'c:'a, 'tcx:'c> { rbml_w_for_visit_item: &'a mut Encoder<'b>, - ecx: &'a EncodeContext<'c,'tcx>, - index: &'a mut CrateIndex<'tcx>, + ecx: &'a EncodeContext<'c, 'tcx>, + index: &'a mut CrateIndex<'c, 'tcx>, } impl<'a, 'b, 'c, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'c, 'tcx> { @@ -1437,23 +1445,26 @@ impl<'a, 'b, 'c, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'c, 'tcx> { fn encode_info_for_items<'a, 'tcx>(ecx: &EncodeContext<'a, 'tcx>, rbml_w: &mut Encoder) - -> CrateIndex<'tcx> { + -> CrateIndex<'a, 'tcx> { let krate = ecx.tcx.map.krate(); let mut index = CrateIndex { + dep_graph: &ecx.tcx.dep_graph, items: IndexData::new(ecx.tcx.map.num_local_def_ids()), xrefs: FnvHashMap() }; rbml_w.start_tag(tag_items_data); - index.record(DefId::local(CRATE_DEF_INDEX), rbml_w); - encode_info_for_mod(ecx, - rbml_w, - &krate.module, - &[], - CRATE_NODE_ID, - syntax::parse::token::intern(&ecx.link_meta.crate_name), - &hir::Public); + { + let _task = index.record(DefId::local(CRATE_DEF_INDEX), rbml_w); + encode_info_for_mod(ecx, + rbml_w, + &krate.module, + &[], + CRATE_NODE_ID, + syntax::parse::token::intern(&ecx.link_meta.crate_name), + &hir::Public); + } krate.visit_all_items(&mut EncodeVisitor { index: &mut index, @@ -1699,7 +1710,7 @@ fn encode_struct_field_attrs(ecx: &EncodeContext, struct ImplVisitor<'a, 'tcx:'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, impls: FnvHashMap> } @@ -1782,14 +1793,14 @@ fn encode_crate_dep(rbml_w: &mut Encoder, rbml_w.start_tag(tag_crate_dep); rbml_w.wr_tagged_str(tag_crate_dep_crate_name, &dep.name()); let hash = decoder::get_crate_hash(dep.data()); - rbml_w.wr_tagged_str(tag_crate_dep_hash, hash.as_str()); + rbml_w.wr_tagged_u64(tag_crate_dep_hash, hash.as_u64()); rbml_w.wr_tagged_u8(tag_crate_dep_explicitly_linked, dep.explicitly_linked.get() as u8); rbml_w.end_tag(); } fn encode_hash(rbml_w: &mut Encoder, hash: &Svh) { - rbml_w.wr_tagged_str(tag_crate_hash, hash.as_str()); + rbml_w.wr_tagged_u64(tag_crate_hash, hash.as_u64()); } fn encode_rustc_version(rbml_w: &mut Encoder) { @@ -1829,9 +1840,16 @@ fn encode_dylib_dependency_formats(rbml_w: &mut Encoder, ecx: &EncodeContext) { } } -// NB: Increment this as you change the metadata encoding version. -#[allow(non_upper_case_globals)] -pub const metadata_encoding_version : &'static [u8] = &[b'r', b'u', b's', b't', 0, 0, 0, 2 ]; +fn encode_panic_strategy(rbml_w: &mut Encoder, ecx: &EncodeContext) { + match ecx.tcx.sess.opts.cg.panic { + PanicStrategy::Unwind => { + rbml_w.wr_tagged_u8(tag_panic_strategy, b'U'); + } + PanicStrategy::Abort => { + rbml_w.wr_tagged_u8(tag_panic_strategy, b'A'); + } + } +} pub fn encode_metadata(ecx: EncodeContext, krate: &hir::Crate) -> Vec { let mut wr = Cursor::new(Vec::new()); @@ -1866,12 +1884,25 @@ pub fn encode_metadata(ecx: EncodeContext, krate: &hir::Crate) -> Vec { // the length of the metadata to the start of the metadata. Later on this // will allow us to slice the metadata to the precise length that we just // generated regardless of trailing bytes that end up in it. - let len = v.len() as u32; - v.insert(0, (len >> 0) as u8); - v.insert(0, (len >> 8) as u8); - v.insert(0, (len >> 16) as u8); - v.insert(0, (len >> 24) as u8); - return v; + // + // We also need to store the metadata encoding version here, because + // rlibs don't have it. To get older versions of rustc to ignore + // this metadata, there are 4 zero bytes at the start, which are + // treated as a length of 0 by old compilers. + + let len = v.len(); + let mut result = vec![]; + result.push(0); + result.push(0); + result.push(0); + result.push(0); + result.extend(metadata_encoding_version.iter().cloned()); + result.push((len >> 24) as u8); + result.push((len >> 16) as u8); + result.push((len >> 8) as u8); + result.push((len >> 0) as u8); + result.extend(v); + result } fn encode_metadata_inner(rbml_w: &mut Encoder, @@ -1916,6 +1947,7 @@ fn encode_metadata_inner(rbml_w: &mut Encoder, encode_hash(rbml_w, &ecx.link_meta.crate_hash); encode_crate_disambiguator(rbml_w, &ecx.tcx.sess.crate_disambiguator.get().as_str()); encode_dylib_dependency_formats(rbml_w, &ecx); + encode_panic_strategy(rbml_w, &ecx); let mut i = rbml_w.writer.seek(SeekFrom::Current(0)).unwrap(); encode_attributes(rbml_w, &krate.attrs); @@ -2006,10 +2038,10 @@ fn encode_metadata_inner(rbml_w: &mut Encoder, } // Get the encoded string for a type -pub fn encoded_ty<'tcx>(tcx: &TyCtxt<'tcx>, - t: Ty<'tcx>, - def_id_to_string: fn(&TyCtxt<'tcx>, DefId) -> String) - -> Vec { +pub fn encoded_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + t: Ty<'tcx>, + def_id_to_string: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String) + -> Vec { let mut wr = Cursor::new(Vec::new()); tyencode::enc_ty(&mut wr, &tyencode::ctxt { diag: tcx.sess.diagnostic(), diff --git a/src/librustc_metadata/index.rs b/src/librustc_metadata/index.rs index 9c066f9f5f..b850073462 100644 --- a/src/librustc_metadata/index.rs +++ b/src/librustc_metadata/index.rs @@ -75,7 +75,7 @@ impl IndexData { pub fn record(&mut self, def_id: DefId, position: u64) { assert!(def_id.is_local()); - self.record_index(def_id.index, position) + self.record_index(def_id.index, position); } pub fn record_index(&mut self, item: DefIndex, position: u64) { diff --git a/src/librustc_metadata/lib.rs b/src/librustc_metadata/lib.rs index 139462d41b..f7ea60c407 100644 --- a/src/librustc_metadata/lib.rs +++ b/src/librustc_metadata/lib.rs @@ -31,7 +31,7 @@ extern crate flate; extern crate rbml; -extern crate serialize; +extern crate serialize as rustc_serialize; // used by deriving #[macro_use] extern crate rustc; @@ -48,6 +48,7 @@ pub mod diagnostics; pub mod astencode; pub mod common; +pub mod def_key; pub mod tyencode; pub mod tydecode; pub mod encoder; diff --git a/src/librustc_metadata/loader.rs b/src/librustc_metadata/loader.rs index c7cd8ae2dd..dc10391b6a 100644 --- a/src/librustc_metadata/loader.rs +++ b/src/librustc_metadata/loader.rs @@ -213,8 +213,8 @@ //! metadata::loader or metadata::creader for all the juicy details! use cstore::{MetadataBlob, MetadataVec, MetadataArchive}; +use common::{metadata_encoding_version, rustc_version}; use decoder; -use encoder; use rustc::hir::svh::Svh; use rustc::session::Session; @@ -231,8 +231,8 @@ use rustc_back::target::Target; use std::cmp; use std::collections::HashMap; +use std::fmt; use std::fs; -use std::io::prelude::*; use std::io; use std::path::{Path, PathBuf}; use std::ptr; @@ -260,6 +260,7 @@ pub struct Context<'a> { pub rejected_via_hash: Vec, pub rejected_via_triple: Vec, pub rejected_via_kind: Vec, + pub rejected_via_version: Vec, pub should_match_name: bool, } @@ -283,6 +284,21 @@ pub struct CratePaths { pub const METADATA_FILENAME: &'static str = "rust.metadata.bin"; +#[derive(Copy, Clone, PartialEq)] +enum CrateFlavor { + Rlib, + Dylib +} + +impl fmt::Display for CrateFlavor { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + f.write_str(match *self { + CrateFlavor::Rlib => "rlib", + CrateFlavor::Dylib => "dylib" + }) + } +} + impl CratePaths { fn paths(&self) -> Vec { match (&self.dylib, &self.rlib) { @@ -321,6 +337,10 @@ impl<'a> Context<'a> { struct_span_err!(self.sess, self.span, E0462, "found staticlib `{}` instead of rlib or dylib{}", self.ident, add) + } else if !self.rejected_via_version.is_empty() { + struct_span_err!(self.sess, self.span, E0514, + "found crate `{}` compiled by an incompatible version of rustc{}", + self.ident, add) } else { struct_span_err!(self.sess, self.span, E0463, "can't find crate for `{}`{}", @@ -330,39 +350,42 @@ impl<'a> Context<'a> { if !self.rejected_via_triple.is_empty() { let mismatches = self.rejected_via_triple.iter(); for (i, &CrateMismatch{ ref path, ref got }) in mismatches.enumerate() { - err.fileline_note(self.span, - &format!("crate `{}`, path #{}, triple {}: {}", - self.ident, i+1, got, path.display())); + err.note(&format!("crate `{}`, path #{}, triple {}: {}", + self.ident, i+1, got, path.display())); } } if !self.rejected_via_hash.is_empty() { - err.span_note(self.span, "perhaps this crate needs \ - to be recompiled?"); + err.note("perhaps that crate needs to be recompiled?"); let mismatches = self.rejected_via_hash.iter(); for (i, &CrateMismatch{ ref path, .. }) in mismatches.enumerate() { - err.fileline_note(self.span, - &format!("crate `{}` path #{}: {}", - self.ident, i+1, path.display())); + err.note(&format!("crate `{}` path #{}: {}", + self.ident, i+1, path.display())); } match self.root { &None => {} &Some(ref r) => { for (i, path) in r.paths().iter().enumerate() { - err.fileline_note(self.span, - &format!("crate `{}` path #{}: {}", - r.ident, i+1, path.display())); + err.note(&format!("crate `{}` path #{}: {}", + r.ident, i+1, path.display())); } } } } if !self.rejected_via_kind.is_empty() { - err.fileline_help(self.span, "please recompile this crate using \ - --crate-type lib"); + err.help("please recompile that crate using --crate-type lib"); let mismatches = self.rejected_via_kind.iter(); for (i, &CrateMismatch { ref path, .. }) in mismatches.enumerate() { - err.fileline_note(self.span, - &format!("crate `{}` path #{}: {}", - self.ident, i+1, path.display())); + err.note(&format!("crate `{}` path #{}: {}", + self.ident, i+1, path.display())); + } + } + if !self.rejected_via_version.is_empty() { + err.help(&format!("please recompile that crate using this compiler ({})", + rustc_version())); + let mismatches = self.rejected_via_version.iter(); + for (i, &CrateMismatch { ref path, ref got }) in mismatches.enumerate() { + err.note(&format!("crate `{}` path #{}: {} compiled by {:?}", + self.ident, i+1, path.display(), got)); } } @@ -454,20 +477,17 @@ impl<'a> Context<'a> { // A Library candidate is created if the metadata for the set of // libraries corresponds to the crate id and hash criteria that this // search is being performed for. - let mut libraries = Vec::new(); + let mut libraries = HashMap::new(); for (_hash, (rlibs, dylibs)) in candidates { - let mut metadata = None; - let rlib = self.extract_one(rlibs, "rlib", &mut metadata); - let dylib = self.extract_one(dylibs, "dylib", &mut metadata); - match metadata { - Some(metadata) => { - libraries.push(Library { - dylib: dylib, - rlib: rlib, - metadata: metadata, - }) - } - None => {} + let mut slot = None; + let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); + let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); + if let Some((h, m)) = slot { + libraries.insert(h, Library { + dylib: dylib, + rlib: rlib, + metadata: m, + }); } } @@ -476,13 +496,13 @@ impl<'a> Context<'a> { // libraries or not. match libraries.len() { 0 => None, - 1 => Some(libraries.into_iter().next().unwrap()), + 1 => Some(libraries.into_iter().next().unwrap().1), _ => { let mut err = struct_span_err!(self.sess, self.span, E0464, "multiple matching crates for `{}`", self.crate_name); err.note("candidates:"); - for lib in &libraries { + for (_, lib) in libraries { match lib.dylib { Some((ref p, _)) => { err.note(&format!("path: {}", @@ -515,14 +535,14 @@ impl<'a> Context<'a> { // read the metadata from it if `*slot` is `None`. If the metadata couldn't // be read, it is assumed that the file isn't a valid rust library (no // errors are emitted). - fn extract_one(&mut self, m: HashMap, flavor: &str, - slot: &mut Option) -> Option<(PathBuf, PathKind)> { - let mut ret = None::<(PathBuf, PathKind)>; + fn extract_one(&mut self, m: HashMap, flavor: CrateFlavor, + slot: &mut Option<(Svh, MetadataBlob)>) -> Option<(PathBuf, PathKind)> { + let mut ret: Option<(PathBuf, PathKind)> = None; let mut error = 0; if slot.is_some() { // FIXME(#10786): for an optimization, we only read one of the - // library's metadata sections. In theory we should + // libraries' metadata sections. In theory we should // read both, but reading dylib metadata is quite // slow. if m.is_empty() { @@ -535,10 +555,10 @@ impl<'a> Context<'a> { let mut err: Option = None; for (lib, kind) in m { info!("{} reading metadata from: {}", flavor, lib.display()); - let metadata = match get_metadata_section(self.target, &lib) { + let (hash, metadata) = match get_metadata_section(self.target, flavor, &lib) { Ok(blob) => { - if self.crate_matches(blob.as_slice(), &lib) { - blob + if let Some(h) = self.crate_matches(blob.as_slice(), &lib) { + (h, blob) } else { info!("metadata mismatch"); continue @@ -549,12 +569,8 @@ impl<'a> Context<'a> { continue } }; - // If we've already found a candidate and we're not matching hashes, - // emit an error about duplicate candidates found. If we're matching - // based on a hash, however, then if we've gotten this far both - // candidates have the same hash, so they're not actually - // duplicates that we should warn about. - if ret.is_some() && self.hash.is_none() { + // If we see multiple hashes, emit an error about duplicate candidates. + if slot.as_ref().map_or(false, |s| s.0 != hash) { let mut e = struct_span_err!(self.sess, self.span, E0465, "multiple {} candidates for `{}` found", flavor, self.crate_name); @@ -567,7 +583,7 @@ impl<'a> Context<'a> { } err = Some(e); error = 1; - ret = None; + *slot = None; } if error > 0 { error += 1; @@ -576,7 +592,7 @@ impl<'a> Context<'a> { lib.display())); continue } - *slot = Some(metadata); + *slot = Some((hash, metadata)); ret = Some((lib, kind)); } @@ -588,22 +604,31 @@ impl<'a> Context<'a> { } } - fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> bool { + fn crate_matches(&mut self, crate_data: &[u8], libpath: &Path) -> Option { + let crate_rustc_version = decoder::crate_rustc_version(crate_data); + if crate_rustc_version != Some(rustc_version()) { + let message = crate_rustc_version.unwrap_or(format!("an unknown compiler")); + info!("Rejecting via version: expected {} got {}", rustc_version(), message); + self.rejected_via_version.push(CrateMismatch { + path: libpath.to_path_buf(), + got: message + }); + return None; + } + if self.should_match_name { match decoder::maybe_get_crate_name(crate_data) { Some(ref name) if self.crate_name == *name => {} - _ => { info!("Rejecting via crate name"); return false } + _ => { info!("Rejecting via crate name"); return None } } } let hash = match decoder::maybe_get_crate_hash(crate_data) { - Some(hash) => hash, None => { - info!("Rejecting via lack of crate hash"); - return false; - } + None => { info!("Rejecting via lack of crate hash"); return None; } + Some(h) => h, }; let triple = match decoder::get_crate_triple(crate_data) { - None => { debug!("triple not present"); return false } + None => { debug!("triple not present"); return None } Some(t) => t, }; if triple != self.triple { @@ -612,24 +637,21 @@ impl<'a> Context<'a> { path: libpath.to_path_buf(), got: triple.to_string() }); - return false; + return None; } - match self.hash { - None => true, - Some(myhash) => { - if *myhash != hash { - info!("Rejecting via hash: expected {} got {}", *myhash, hash); - self.rejected_via_hash.push(CrateMismatch { - path: libpath.to_path_buf(), - got: myhash.as_str().to_string() - }); - false - } else { - true - } + if let Some(myhash) = self.hash { + if *myhash != hash { + info!("Rejecting via hash: expected {} got {}", *myhash, hash); + self.rejected_via_hash.push(CrateMismatch { + path: libpath.to_path_buf(), + got: myhash.to_string() + }); + return None; } } + + Some(hash) } @@ -701,13 +723,13 @@ impl<'a> Context<'a> { }; // Extract the rlib/dylib pair. - let mut metadata = None; - let rlib = self.extract_one(rlibs, "rlib", &mut metadata); - let dylib = self.extract_one(dylibs, "dylib", &mut metadata); + let mut slot = None; + let rlib = self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot); + let dylib = self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot); if rlib.is_none() && dylib.is_none() { return None } - match metadata { - Some(metadata) => Some(Library { + match slot { + Some((_, metadata)) => Some(Library { dylib: dylib, rlib: rlib, metadata: metadata, @@ -745,22 +767,37 @@ impl ArchiveMetadata { pub fn as_slice<'a>(&'a self) -> &'a [u8] { unsafe { &*self.data } } } +fn verify_decompressed_encoding_version(blob: &MetadataBlob, filename: &Path) + -> Result<(), String> +{ + let data = blob.as_slice_raw(); + if data.len() < 4+metadata_encoding_version.len() || + !<[u8]>::eq(&data[..4], &[0, 0, 0, 0]) || + &data[4..4+metadata_encoding_version.len()] != metadata_encoding_version + { + Err((format!("incompatible metadata version found: '{}'", + filename.display()))) + } else { + Ok(()) + } +} + // Just a small wrapper to time how long reading metadata takes. -fn get_metadata_section(target: &Target, filename: &Path) +fn get_metadata_section(target: &Target, flavor: CrateFlavor, filename: &Path) -> Result { let start = Instant::now(); - let ret = get_metadata_section_imp(target, filename); + let ret = get_metadata_section_imp(target, flavor, filename); info!("reading {:?} => {:?}", filename.file_name().unwrap(), start.elapsed()); return ret } -fn get_metadata_section_imp(target: &Target, filename: &Path) +fn get_metadata_section_imp(target: &Target, flavor: CrateFlavor, filename: &Path) -> Result { if !filename.exists() { return Err(format!("no such file: '{}'", filename.display())); } - if filename.file_name().unwrap().to_str().unwrap().ends_with(".rlib") { + if flavor == CrateFlavor::Rlib { // Use ArchiveRO for speed here, it's backed by LLVM and uses mmap // internally to read the file. We also avoid even using a memcpy by // just keeping the archive along while the metadata is in use. @@ -775,7 +812,10 @@ fn get_metadata_section_imp(target: &Target, filename: &Path) return match ArchiveMetadata::new(archive).map(|ar| MetadataArchive(ar)) { None => Err(format!("failed to read rlib metadata: '{}'", filename.display())), - Some(blob) => Ok(blob) + Some(blob) => { + try!(verify_decompressed_encoding_version(&blob, filename)); + Ok(blob) + } }; } unsafe { @@ -804,12 +844,12 @@ fn get_metadata_section_imp(target: &Target, filename: &Path) let cbuf = llvm::LLVMGetSectionContents(si.llsi); let csz = llvm::LLVMGetSectionSize(si.llsi) as usize; let cvbuf: *const u8 = cbuf as *const u8; - let vlen = encoder::metadata_encoding_version.len(); + let vlen = metadata_encoding_version.len(); debug!("checking {} bytes of metadata-version stamp", vlen); let minsz = cmp::min(vlen, csz); let buf0 = slice::from_raw_parts(cvbuf, minsz); - let version_ok = buf0 == encoder::metadata_encoding_version; + let version_ok = buf0 == metadata_encoding_version; if !version_ok { return Err((format!("incompatible metadata version found: '{}'", filename.display()))); @@ -820,7 +860,11 @@ fn get_metadata_section_imp(target: &Target, filename: &Path) csz - vlen); let bytes = slice::from_raw_parts(cvbuf1, csz - vlen); match flate::inflate_bytes(bytes) { - Ok(inflated) => return Ok(MetadataVec(inflated)), + Ok(inflated) => { + let blob = MetadataVec(inflated); + try!(verify_decompressed_encoding_version(&blob, filename)); + return Ok(blob); + } Err(_) => {} } } @@ -864,7 +908,9 @@ pub fn read_meta_section_name(target: &Target) -> &'static str { // A diagnostic function for dumping crate metadata to an output stream pub fn list_file_metadata(target: &Target, path: &Path, out: &mut io::Write) -> io::Result<()> { - match get_metadata_section(target, path) { + let filename = path.file_name().unwrap().to_str().unwrap(); + let flavor = if filename.ends_with(".rlib") { CrateFlavor::Rlib } else { CrateFlavor::Dylib }; + match get_metadata_section(target, flavor, path) { Ok(bytes) => decoder::list_crate_metadata(bytes.as_slice(), out), Err(msg) => { write!(out, "{}\n", msg) diff --git a/src/librustc_metadata/tls_context.rs b/src/librustc_metadata/tls_context.rs index 782c7cba26..23142ca80e 100644 --- a/src/librustc_metadata/tls_context.rs +++ b/src/librustc_metadata/tls_context.rs @@ -25,8 +25,8 @@ use tyencode; impl<'a, 'tcx: 'a> tls::EncodingContext<'tcx> for encoder::EncodeContext<'a, 'tcx> { - fn tcx<'s>(&'s self) -> &'s TyCtxt<'tcx> { - &self.tcx + fn tcx<'s>(&'s self) -> TyCtxt<'s, 'tcx, 'tcx> { + self.tcx } fn encode_ty(&self, encoder: &mut OpaqueEncoder, t: ty::Ty<'tcx>) { @@ -40,13 +40,13 @@ impl<'a, 'tcx: 'a> tls::EncodingContext<'tcx> for encoder::EncodeContext<'a, 'tc pub struct DecodingContext<'a, 'tcx: 'a> { pub crate_metadata: Cmd<'a>, - pub tcx: &'a TyCtxt<'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, } impl<'a, 'tcx: 'a> tls::DecodingContext<'tcx> for DecodingContext<'a, 'tcx> { - fn tcx<'s>(&'s self) -> &'s TyCtxt<'tcx> { - &self.tcx + fn tcx<'s>(&'s self) -> TyCtxt<'s, 'tcx, 'tcx> { + self.tcx } fn decode_ty(&self, decoder: &mut OpaqueDecoder) -> ty::Ty<'tcx> { diff --git a/src/librustc_metadata/tydecode.rs b/src/librustc_metadata/tydecode.rs index 3004246d1d..c94af9c5b3 100644 --- a/src/librustc_metadata/tydecode.rs +++ b/src/librustc_metadata/tydecode.rs @@ -41,12 +41,12 @@ pub struct TyDecoder<'a, 'tcx: 'a> { data: &'a [u8], krate: ast::CrateNum, pos: usize, - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, conv_def_id: DefIdConvert<'a>, } impl<'a,'tcx> TyDecoder<'a,'tcx> { - pub fn with_doc(tcx: &'a TyCtxt<'tcx>, + pub fn with_doc(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: ast::CrateNum, doc: rbml::Doc<'a>, conv: DefIdConvert<'a>) @@ -57,7 +57,7 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> { pub fn new(data: &'a [u8], crate_num: ast::CrateNum, pos: usize, - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, conv: DefIdConvert<'a>) -> TyDecoder<'a, 'tcx> { TyDecoder { @@ -502,15 +502,15 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> { } } - pub fn parse_bare_fn_ty(&mut self) -> ty::BareFnTy<'tcx> { + pub fn parse_bare_fn_ty(&mut self) -> &'tcx ty::BareFnTy<'tcx> { let unsafety = parse_unsafety(self.next()); let abi = self.parse_abi_set(); let sig = self.parse_sig(); - ty::BareFnTy { + self.tcx.mk_bare_fn(ty::BareFnTy { unsafety: unsafety, abi: abi, sig: sig - } + }) } fn parse_sig(&mut self) -> ty::PolyFnSig<'tcx> { @@ -553,6 +553,18 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> { assert_eq!(self.next(), '|'); ty::Predicate::ObjectSafe(def_id) } + 'c' => { + let def_id = self.parse_def(); + assert_eq!(self.next(), '|'); + let kind = match self.next() { + 'f' => ty::ClosureKind::Fn, + 'm' => ty::ClosureKind::FnMut, + 'o' => ty::ClosureKind::FnOnce, + c => bug!("Encountered invalid character in metadata: {}", c) + }; + assert_eq!(self.next(), '|'); + ty::Predicate::ClosureKind(def_id, kind) + } c => bug!("Encountered invalid character in metadata: {}", c) } } diff --git a/src/librustc_metadata/tyencode.rs b/src/librustc_metadata/tyencode.rs index f49c2e22c6..343c452f89 100644 --- a/src/librustc_metadata/tyencode.rs +++ b/src/librustc_metadata/tyencode.rs @@ -37,9 +37,9 @@ use encoder; pub struct ctxt<'a, 'tcx: 'a> { pub diag: &'a Handler, // Def -> str Callback: - pub ds: fn(&TyCtxt<'tcx>, DefId) -> String, + pub ds: for<'b> fn(TyCtxt<'b, 'tcx, 'tcx>, DefId) -> String, // The type context. - pub tcx: &'a TyCtxt<'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, pub abbrevs: &'a abbrev_map<'tcx> } @@ -110,7 +110,7 @@ pub fn enc_ty<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, t: Ty<'tcx enc_existential_bounds(w, cx, bounds); write!(w, "]"); } - ty::TyTuple(ref ts) => { + ty::TyTuple(ts) => { write!(w, "T["); for t in ts { enc_ty(w, cx, *t); } write!(w, "]"); @@ -156,10 +156,10 @@ pub fn enc_ty<'a, 'tcx>(w: &mut Cursor>, cx: &ctxt<'a, 'tcx>, t: Ty<'tcx enc_substs(w, cx, substs); write!(w, "]"); } - ty::TyClosure(def, ref substs) => { + ty::TyClosure(def, substs) => { write!(w, "k[{}|", (cx.ds)(cx.tcx, def)); - enc_substs(w, cx, &substs.func_substs); - for ty in &substs.upvar_tys { + enc_substs(w, cx, substs.func_substs); + for ty in substs.upvar_tys { enc_ty(w, cx, ty); } write!(w, "."); @@ -449,6 +449,9 @@ pub fn enc_predicate<'a, 'tcx>(w: &mut Cursor>, p: &ty::Predicate<'tcx>) { match *p { + ty::Predicate::Rfc1592(..) => { + bug!("RFC1592 predicate in metadata `{:?}`", p); + } ty::Predicate::Trait(ref trait_ref) => { write!(w, "t"); enc_trait_ref(w, cx, trait_ref.0.trait_ref); @@ -479,6 +482,14 @@ pub fn enc_predicate<'a, 'tcx>(w: &mut Cursor>, ty::Predicate::ObjectSafe(trait_def_id) => { write!(w, "O{}|", (cx.ds)(cx.tcx, trait_def_id)); } + ty::Predicate::ClosureKind(closure_def_id, kind) => { + let kind_char = match kind { + ty::ClosureKind::Fn => 'f', + ty::ClosureKind::FnMut => 'm', + ty::ClosureKind::FnOnce => 'o', + }; + write!(w, "c{}|{}|", (cx.ds)(cx.tcx, closure_def_id), kind_char); + } } } diff --git a/src/librustc_mir/Cargo.toml b/src/librustc_mir/Cargo.toml index 1c41ca6f41..77dccb7e0d 100644 --- a/src/librustc_mir/Cargo.toml +++ b/src/librustc_mir/Cargo.toml @@ -16,4 +16,5 @@ rustc_back = { path = "../librustc_back" } rustc_const_eval = { path = "../librustc_const_eval" } rustc_const_math = { path = "../librustc_const_math" } rustc_data_structures = { path = "../librustc_data_structures" } +rustc_bitflags = { path = "../librustc_bitflags" } syntax = { path = "../libsyntax" } diff --git a/src/librustc_mir/build/block.rs b/src/librustc_mir/build/block.rs index 8c98408e23..c1626b93f0 100644 --- a/src/librustc_mir/build/block.rs +++ b/src/librustc_mir/build/block.rs @@ -13,9 +13,11 @@ use hair::*; use rustc::mir::repr::*; use rustc::hir; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { pub fn ast_block(&mut self, destination: &Lvalue<'tcx>, + // FIXME(#32959): temporary measure for the issue + dest_is_unit: bool, mut block: BasicBlock, ast_block: &'tcx hir::Block) -> BlockAnd<()> { @@ -44,11 +46,7 @@ impl<'a,'tcx> Builder<'a,'tcx> { StmtKind::Expr { scope, expr } => { unpack!(block = this.in_scope(scope, block, |this, _| { let expr = this.hir.mirror(expr); - let expr_span = expr.span; - let temp = this.temp(expr.ty.clone()); - unpack!(block = this.into(&temp, block, expr)); - unpack!(block = this.build_drop(block, expr_span, temp)); - block.unit() + this.stmt_expr(block, expr) })); } StmtKind::Let { remainder_scope, init_scope, pattern, initializer } => { @@ -70,7 +68,7 @@ impl<'a,'tcx> Builder<'a,'tcx> { // of the block. if let Some(expr) = expr { unpack!(block = this.into(destination, block, expr)); - } else { + } else if dest_is_unit { // FIXME(#31472) let scope_id = this.innermost_scope_id(); this.cfg.push_assign_unit(block, scope_id, span, destination); diff --git a/src/librustc_mir/build/expr/as_constant.rs b/src/librustc_mir/build/expr/as_constant.rs index d97245a5fc..a08d14d9e2 100644 --- a/src/librustc_mir/build/expr/as_constant.rs +++ b/src/librustc_mir/build/expr/as_constant.rs @@ -14,7 +14,7 @@ use build::Builder; use hair::*; use rustc::mir::repr::*; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr`, yielding a compile-time constant. Assumes that /// `expr` is a valid compile-time constant! pub fn as_constant(&mut self, expr: M) -> Constant<'tcx> diff --git a/src/librustc_mir/build/expr/as_lvalue.rs b/src/librustc_mir/build/expr/as_lvalue.rs index 0c9323f4af..15ea3f0e6e 100644 --- a/src/librustc_mir/build/expr/as_lvalue.rs +++ b/src/librustc_mir/build/expr/as_lvalue.rs @@ -15,7 +15,7 @@ use build::expr::category::Category; use hair::*; use rustc::mir::repr::*; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr`, yielding an lvalue that we can move from etc. pub fn as_lvalue(&mut self, block: BasicBlock, diff --git a/src/librustc_mir/build/expr/as_operand.rs b/src/librustc_mir/build/expr/as_operand.rs index 661d01ce98..a059f2bdde 100644 --- a/src/librustc_mir/build/expr/as_operand.rs +++ b/src/librustc_mir/build/expr/as_operand.rs @@ -15,7 +15,7 @@ use build::expr::category::Category; use hair::*; use rustc::mir::repr::*; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr` into a value that can be used as an operand. /// If `expr` is an lvalue like `x`, this will introduce a /// temporary `tmp = x`, so that we capture the value of `x` at diff --git a/src/librustc_mir/build/expr/as_rvalue.rs b/src/librustc_mir/build/expr/as_rvalue.rs index b340d933e6..2a73346240 100644 --- a/src/librustc_mir/build/expr/as_rvalue.rs +++ b/src/librustc_mir/build/expr/as_rvalue.rs @@ -17,7 +17,7 @@ use build::expr::category::{Category, RvalueFunc}; use hair::*; use rustc::mir::repr::*; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr`, yielding an rvalue. pub fn as_rvalue(&mut self, block: BasicBlock, expr: M) -> BlockAnd> where M: Mirror<'tcx, Output = Expr<'tcx>> @@ -87,12 +87,9 @@ impl<'a,'tcx> Builder<'a,'tcx> { } ExprKind::Cast { source } => { let source = this.hir.mirror(source); - if source.ty == expr.ty { - this.expr_as_rvalue(block, source) - } else { - let source = unpack!(block = this.as_operand(block, source)); - block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty)) - } + + let source = unpack!(block = this.as_operand(block, source)); + block.and(Rvalue::Cast(CastKind::Misc, source, expr.ty)) } ExprKind::ReifyFnPointer { source } => { let source = unpack!(block = this.as_operand(block, source)); @@ -189,6 +186,11 @@ impl<'a,'tcx> Builder<'a,'tcx> { block.and(Rvalue::Aggregate(AggregateKind::Adt(adt_def, variant_index, substs), fields)) } + ExprKind::Assign { .. } | + ExprKind::AssignOp { .. } => { + block = unpack!(this.stmt_expr(block, expr)); + block.and(this.unit_rvalue()) + } ExprKind::Literal { .. } | ExprKind::Block { .. } | ExprKind::Match { .. } | @@ -201,8 +203,6 @@ impl<'a,'tcx> Builder<'a,'tcx> { ExprKind::Index { .. } | ExprKind::VarRef { .. } | ExprKind::SelfRef | - ExprKind::Assign { .. } | - ExprKind::AssignOp { .. } | ExprKind::Break { .. } | ExprKind::Continue { .. } | ExprKind::Return { .. } | diff --git a/src/librustc_mir/build/expr/as_temp.rs b/src/librustc_mir/build/expr/as_temp.rs index a2f7d2c9d7..f33d3dd519 100644 --- a/src/librustc_mir/build/expr/as_temp.rs +++ b/src/librustc_mir/build/expr/as_temp.rs @@ -15,7 +15,7 @@ use build::expr::category::Category; use hair::*; use rustc::mir::repr::*; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr` into a fresh temporary. This is used when building /// up rvalues so as to freeze the value that will be consumed. pub fn as_temp(&mut self, block: BasicBlock, expr: M) -> BlockAnd> @@ -35,13 +35,8 @@ impl<'a,'tcx> Builder<'a,'tcx> { let expr_ty = expr.ty.clone(); let temp = this.temp(expr_ty.clone()); - let temp_lifetime = match expr.temp_lifetime { - Some(t) => t, - None => { - span_bug!(expr.span, "no temp_lifetime for expr"); - } - }; - this.schedule_drop(expr.span, temp_lifetime, &temp, expr_ty); + let temp_lifetime = expr.temp_lifetime; + let expr_span = expr.span; // Careful here not to cause an infinite cycle. If we always // called `into`, then for lvalues like `x.f`, it would @@ -52,7 +47,6 @@ impl<'a,'tcx> Builder<'a,'tcx> { // course) `as_temp`. match Category::of(&expr.kind).unwrap() { Category::Lvalue => { - let expr_span = expr.span; let lvalue = unpack!(block = this.as_lvalue(block, expr)); let rvalue = Rvalue::Use(Operand::Consume(lvalue)); let scope_id = this.innermost_scope_id(); @@ -63,6 +57,13 @@ impl<'a,'tcx> Builder<'a,'tcx> { } } + // In constants, temp_lifetime is None. We should not need to drop + // anything because no values with a destructor can be created in + // a constant at this time, even if the type may need dropping. + if let Some(temp_lifetime) = temp_lifetime { + this.schedule_drop(expr_span, temp_lifetime, &temp, expr_ty); + } + block.and(temp) } } diff --git a/src/librustc_mir/build/expr/into.rs b/src/librustc_mir/build/expr/into.rs index 231d7da10a..41610c9037 100644 --- a/src/librustc_mir/build/expr/into.rs +++ b/src/librustc_mir/build/expr/into.rs @@ -12,14 +12,11 @@ use build::{BlockAnd, BlockAndExtension, Builder}; use build::expr::category::{Category, RvalueFunc}; -use build::scope::LoopScope; use hair::*; -use rustc::middle::region::CodeExtent; use rustc::ty; use rustc::mir::repr::*; -use syntax::codemap::Span; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Compile `expr`, storing the result into `destination`, which /// is assumed to be uninitialized. pub fn into_expr(&mut self, @@ -43,7 +40,7 @@ impl<'a,'tcx> Builder<'a,'tcx> { this.in_scope(extent, block, |this, _| this.into(destination, block, value)) } ExprKind::Block { body: ast_block } => { - this.ast_block(destination, block, ast_block) + this.ast_block(destination, expr.ty.is_nil(), block, ast_block) } ExprKind::Match { discriminant, arms } => { this.match_expr(destination, expr_span, block, discriminant, arms) @@ -207,64 +204,6 @@ impl<'a,'tcx> Builder<'a,'tcx> { } exit_block.unit() } - ExprKind::Assign { lhs, rhs } => { - // Note: we evaluate assignments right-to-left. This - // is better for borrowck interaction with overloaded - // operators like x[j] = x[i]. - let lhs = this.hir.mirror(lhs); - let lhs_span = lhs.span; - let rhs = unpack!(block = this.as_operand(block, rhs)); - let lhs = unpack!(block = this.as_lvalue(block, lhs)); - unpack!(block = this.build_drop(block, lhs_span, lhs.clone())); - this.cfg.push_assign(block, scope_id, expr_span, &lhs, Rvalue::Use(rhs)); - block.unit() - } - ExprKind::AssignOp { op, lhs, rhs } => { - // FIXME(#28160) there is an interesting semantics - // question raised here -- should we "freeze" the - // value of the lhs here? I'm inclined to think not, - // since it seems closer to the semantics of the - // overloaded version, which takes `&mut self`. This - // only affects weird things like `x += {x += 1; x}` - // -- is that equal to `x + (x + 1)` or `2*(x+1)`? - - // As above, RTL. - let rhs = unpack!(block = this.as_operand(block, rhs)); - let lhs = unpack!(block = this.as_lvalue(block, lhs)); - - // we don't have to drop prior contents or anything - // because AssignOp is only legal for Copy types - // (overloaded ops should be desugared into a call). - this.cfg.push_assign(block, scope_id, expr_span, &lhs, - Rvalue::BinaryOp(op, - Operand::Consume(lhs.clone()), - rhs)); - - block.unit() - } - ExprKind::Continue { label } => { - this.break_or_continue(expr_span, label, block, - |loop_scope| loop_scope.continue_block) - } - ExprKind::Break { label } => { - this.break_or_continue(expr_span, label, block, |loop_scope| { - loop_scope.might_break = true; - loop_scope.break_block - }) - } - ExprKind::Return { value } => { - block = match value { - Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)), - None => { - this.cfg.push_assign_unit(block, scope_id, - expr_span, &Lvalue::ReturnPointer); - block - } - }; - let extent = this.extent_of_return_scope(); - this.exit_scope(expr_span, extent, block, END_BLOCK); - this.cfg.start_new_block().unit() - } ExprKind::Call { ty, fun, args } => { let diverges = match ty.sty { ty::TyFnDef(_, _, ref f) | ty::TyFnPtr(ref f) => { @@ -293,6 +232,15 @@ impl<'a,'tcx> Builder<'a,'tcx> { success.unit() } + // These cases don't actually need a destination + ExprKind::Assign { .. } | + ExprKind::AssignOp { .. } | + ExprKind::Continue { .. } | + ExprKind::Break { .. } | + ExprKind::Return {.. } => { + this.stmt_expr(block, expr) + } + // these are the cases that are more naturally handled by some other mode ExprKind::Unary { .. } | ExprKind::Binary { .. } | @@ -326,20 +274,4 @@ impl<'a,'tcx> Builder<'a,'tcx> { } } } - - fn break_or_continue(&mut self, - span: Span, - label: Option, - block: BasicBlock, - exit_selector: F) - -> BlockAnd<()> - where F: FnOnce(&mut LoopScope) -> BasicBlock - { - let (exit_block, extent) = { - let loop_scope = self.find_loop_scope(span, label); - (exit_selector(loop_scope), loop_scope.extent) - }; - self.exit_scope(span, extent, block, exit_block); - self.cfg.start_new_block().unit() - } } diff --git a/src/librustc_mir/build/expr/mod.rs b/src/librustc_mir/build/expr/mod.rs index 0f168f307a..17b34f4586 100644 --- a/src/librustc_mir/build/expr/mod.rs +++ b/src/librustc_mir/build/expr/mod.rs @@ -77,3 +77,4 @@ mod as_operand; mod as_temp; mod category; mod into; +mod stmt; diff --git a/src/librustc_mir/build/expr/stmt.rs b/src/librustc_mir/build/expr/stmt.rs new file mode 100644 index 0000000000..9629396f48 --- /dev/null +++ b/src/librustc_mir/build/expr/stmt.rs @@ -0,0 +1,135 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use build::{BlockAnd, BlockAndExtension, Builder}; +use build::scope::LoopScope; +use hair::*; +use rustc::middle::region::CodeExtent; +use rustc::mir::repr::*; +use syntax::codemap::Span; + +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { + + pub fn stmt_expr(&mut self, mut block: BasicBlock, expr: Expr<'tcx>) -> BlockAnd<()> { + let this = self; + let expr_span = expr.span; + let scope_id = this.innermost_scope_id(); + // Handle a number of expressions that don't need a destination at all. This + // avoids needing a mountain of temporary `()` variables. + match expr.kind { + ExprKind::Scope { extent, value } => { + let value = this.hir.mirror(value); + this.in_scope(extent, block, |this, _| this.stmt_expr(block, value)) + } + ExprKind::Assign { lhs, rhs } => { + let lhs = this.hir.mirror(lhs); + let rhs = this.hir.mirror(rhs); + let scope_id = this.innermost_scope_id(); + let lhs_span = lhs.span; + + let lhs_ty = lhs.ty; + let rhs_ty = rhs.ty; + + let lhs_needs_drop = this.hir.needs_drop(lhs_ty); + let rhs_needs_drop = this.hir.needs_drop(rhs_ty); + + // Note: we evaluate assignments right-to-left. This + // is better for borrowck interaction with overloaded + // operators like x[j] = x[i]. + + // Generate better code for things that don't need to be + // dropped. + let rhs = if lhs_needs_drop || rhs_needs_drop { + let op = unpack!(block = this.as_operand(block, rhs)); + Rvalue::Use(op) + } else { + unpack!(block = this.as_rvalue(block, rhs)) + }; + + let lhs = unpack!(block = this.as_lvalue(block, lhs)); + unpack!(block = this.build_drop(block, lhs_span, lhs.clone(), lhs_ty)); + this.cfg.push_assign(block, scope_id, expr_span, &lhs, rhs); + block.unit() + } + ExprKind::AssignOp { op, lhs, rhs } => { + // FIXME(#28160) there is an interesting semantics + // question raised here -- should we "freeze" the + // value of the lhs here? I'm inclined to think not, + // since it seems closer to the semantics of the + // overloaded version, which takes `&mut self`. This + // only affects weird things like `x += {x += 1; x}` + // -- is that equal to `x + (x + 1)` or `2*(x+1)`? + + // As above, RTL. + let rhs = unpack!(block = this.as_operand(block, rhs)); + let lhs = unpack!(block = this.as_lvalue(block, lhs)); + + // we don't have to drop prior contents or anything + // because AssignOp is only legal for Copy types + // (overloaded ops should be desugared into a call). + this.cfg.push_assign(block, scope_id, expr_span, &lhs, + Rvalue::BinaryOp(op, + Operand::Consume(lhs.clone()), + rhs)); + + block.unit() + } + ExprKind::Continue { label } => { + this.break_or_continue(expr_span, label, block, + |loop_scope| loop_scope.continue_block) + } + ExprKind::Break { label } => { + this.break_or_continue(expr_span, label, block, |loop_scope| { + loop_scope.might_break = true; + loop_scope.break_block + }) + } + ExprKind::Return { value } => { + block = match value { + Some(value) => unpack!(this.into(&Lvalue::ReturnPointer, block, value)), + None => { + this.cfg.push_assign_unit(block, scope_id, + expr_span, &Lvalue::ReturnPointer); + block + } + }; + let extent = this.extent_of_return_scope(); + let return_block = this.return_block(); + this.exit_scope(expr_span, extent, block, return_block); + this.cfg.start_new_block().unit() + } + _ => { + let expr_span = expr.span; + let expr_ty = expr.ty; + let temp = this.temp(expr.ty.clone()); + unpack!(block = this.into(&temp, block, expr)); + unpack!(block = this.build_drop(block, expr_span, temp, expr_ty)); + block.unit() + } + } + } + + fn break_or_continue(&mut self, + span: Span, + label: Option, + block: BasicBlock, + exit_selector: F) + -> BlockAnd<()> + where F: FnOnce(&mut LoopScope) -> BasicBlock + { + let (exit_block, extent) = { + let loop_scope = self.find_loop_scope(span, label); + (exit_selector(loop_scope), loop_scope.extent) + }; + self.exit_scope(span, extent, block, exit_block); + self.cfg.start_new_block().unit() + } + +} diff --git a/src/librustc_mir/build/into.rs b/src/librustc_mir/build/into.rs index 77d9d92632..17ccb701c2 100644 --- a/src/librustc_mir/build/into.rs +++ b/src/librustc_mir/build/into.rs @@ -19,14 +19,14 @@ use hair::*; use rustc::mir::repr::*; pub trait EvalInto<'tcx> { - fn eval_into<'a>(self, - builder: &mut Builder<'a, 'tcx>, - destination: &Lvalue<'tcx>, - block: BasicBlock) - -> BlockAnd<()>; + fn eval_into<'a, 'gcx>(self, + builder: &mut Builder<'a, 'gcx, 'tcx>, + destination: &Lvalue<'tcx>, + block: BasicBlock) + -> BlockAnd<()>; } -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { pub fn into(&mut self, destination: &Lvalue<'tcx>, block: BasicBlock, @@ -39,22 +39,22 @@ impl<'a,'tcx> Builder<'a,'tcx> { } impl<'tcx> EvalInto<'tcx> for ExprRef<'tcx> { - fn eval_into<'a>(self, - builder: &mut Builder<'a, 'tcx>, - destination: &Lvalue<'tcx>, - block: BasicBlock) - -> BlockAnd<()> { + fn eval_into<'a, 'gcx>(self, + builder: &mut Builder<'a, 'gcx, 'tcx>, + destination: &Lvalue<'tcx>, + block: BasicBlock) + -> BlockAnd<()> { let expr = builder.hir.mirror(self); builder.into_expr(destination, block, expr) } } impl<'tcx> EvalInto<'tcx> for Expr<'tcx> { - fn eval_into<'a>(self, - builder: &mut Builder<'a, 'tcx>, - destination: &Lvalue<'tcx>, - block: BasicBlock) - -> BlockAnd<()> { + fn eval_into<'a, 'gcx>(self, + builder: &mut Builder<'a, 'gcx, 'tcx>, + destination: &Lvalue<'tcx>, + block: BasicBlock) + -> BlockAnd<()> { builder.into_expr(destination, block, self) } } diff --git a/src/librustc_mir/build/matches/mod.rs b/src/librustc_mir/build/matches/mod.rs index cabf5c9554..c1a0e1f9a6 100644 --- a/src/librustc_mir/build/matches/mod.rs +++ b/src/librustc_mir/build/matches/mod.rs @@ -27,7 +27,7 @@ mod simplify; mod test; mod util; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { pub fn match_expr(&mut self, destination: &Lvalue<'tcx>, span: Span, @@ -37,25 +37,28 @@ impl<'a,'tcx> Builder<'a,'tcx> { -> BlockAnd<()> { let discriminant_lvalue = unpack!(block = self.as_lvalue(block, discriminant)); - // Before we do anything, create uninitialized variables with - // suitable extent for all of the bindings in this match. It's - // easiest to do this up front because some of these arms may - // be unreachable or reachable multiple times. - let var_scope_id = self.innermost_scope_id(); - for arm in &arms { - self.declare_bindings(var_scope_id, &arm.patterns[0]); - } - let mut arm_blocks = ArmBlocks { blocks: arms.iter() .map(|_| self.cfg.start_new_block()) .collect(), }; - let arm_bodies: Vec> = - arms.iter() - .map(|arm| arm.body.clone()) - .collect(); + // Get the body expressions and their scopes, while declaring bindings. + let arm_bodies: Vec<_> = arms.iter().enumerate().map(|(i, arm)| { + // Assume that all expressions are wrapped in Scope. + let body = self.hir.mirror(arm.body.clone()); + match body.kind { + ExprKind::Scope { extent, value } => { + let scope_id = self.push_scope(extent, arm_blocks.blocks[i]); + self.declare_bindings(scope_id, &arm.patterns[0]); + (extent, self.scopes.pop().unwrap(), value) + } + _ => { + span_bug!(body.span, "arm body is not wrapped in Scope {:?}", + body.kind); + } + } + }).collect(); // assemble a list of candidates: there is one candidate per // pattern, which means there may be more than one candidate @@ -95,11 +98,15 @@ impl<'a,'tcx> Builder<'a,'tcx> { // all the arm blocks will rejoin here let end_block = self.cfg.start_new_block(); - for (arm_index, arm_body) in arm_bodies.into_iter().enumerate() { + let scope_id = self.innermost_scope_id(); + for (arm_index, (extent, scope, body)) in arm_bodies.into_iter().enumerate() { let mut arm_block = arm_blocks.blocks[arm_index]; - unpack!(arm_block = self.into(destination, arm_block, arm_body)); + // Re-enter the scope we created the bindings in. + self.scopes.push(scope); + unpack!(arm_block = self.into(destination, arm_block, body)); + unpack!(arm_block = self.pop_scope(extent, arm_block)); self.cfg.terminate(arm_block, - var_scope_id, + scope_id, span, TerminatorKind::Goto { target: end_block }); } @@ -297,7 +304,7 @@ pub struct Test<'tcx> { /////////////////////////////////////////////////////////////////////////// // Main matching algorithm -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// The main match algorithm. It begins with a set of candidates /// `candidates` and has the job of generating code to determine /// which of these candidates, if any, is the correct one. The @@ -434,7 +441,7 @@ impl<'a,'tcx> Builder<'a,'tcx> { /// But there may also be candidates that the test just doesn't /// apply to. For example, consider the case of #29740: /// - /// ```rust + /// ```rust,ignore /// match x { /// "foo" => ..., /// "bar" => ..., diff --git a/src/librustc_mir/build/matches/simplify.rs b/src/librustc_mir/build/matches/simplify.rs index a3337badf8..c707bb8a27 100644 --- a/src/librustc_mir/build/matches/simplify.rs +++ b/src/librustc_mir/build/matches/simplify.rs @@ -29,7 +29,7 @@ use rustc::mir::repr::*; use std::mem; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { pub fn simplify_candidate<'pat>(&mut self, mut block: BasicBlock, candidate: &mut Candidate<'pat, 'tcx>) diff --git a/src/librustc_mir/build/matches/test.rs b/src/librustc_mir/build/matches/test.rs index f70d4321a4..e53584a3f8 100644 --- a/src/librustc_mir/build/matches/test.rs +++ b/src/librustc_mir/build/matches/test.rs @@ -24,7 +24,7 @@ use rustc::ty::{self, Ty}; use rustc::mir::repr::*; use syntax::codemap::Span; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Identifies what test is needed to decide if `match_pair` is applicable. /// /// It is a bug to call this with a simplifyable pattern. diff --git a/src/librustc_mir/build/matches/util.rs b/src/librustc_mir/build/matches/util.rs index 101d759430..5eb58f7612 100644 --- a/src/librustc_mir/build/matches/util.rs +++ b/src/librustc_mir/build/matches/util.rs @@ -14,7 +14,7 @@ use hair::*; use rustc::mir::repr::*; use std::u32; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { pub fn field_match_pairs<'pat>(&mut self, lvalue: Lvalue<'tcx>, subpatterns: &'pat [FieldPattern<'tcx>]) @@ -32,14 +32,18 @@ impl<'a,'tcx> Builder<'a,'tcx> { /// this function converts the prefix (`x`, `y`) and suffix (`z`) into /// distinct match pairs: /// + /// ```rust,ignore /// lv[0 of 3] @ x // see ProjectionElem::ConstantIndex (and its Debug impl) /// lv[1 of 3] @ y // to explain the `[x of y]` notation /// lv[-1 of 3] @ z + /// ``` /// /// If a slice like `s` is present, then the function also creates /// a temporary like: /// + /// ```rust,ignore /// tmp0 = lv[2..-1] // using the special Rvalue::Slice + /// ``` /// /// and creates a match pair `tmp0 @ s` pub fn prefix_suffix_slice<'pat>(&mut self, diff --git a/src/librustc_mir/build/misc.rs b/src/librustc_mir/build/misc.rs index 86f15a6319..7317c6f9b3 100644 --- a/src/librustc_mir/build/misc.rs +++ b/src/librustc_mir/build/misc.rs @@ -17,7 +17,7 @@ use rustc::mir::repr::*; use std::u32; use syntax::codemap::Span; -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { /// Add a new temporary value of type `ty` storing the result of /// evaluating `expr`. /// @@ -46,6 +46,10 @@ impl<'a,'tcx> Builder<'a,'tcx> { Operand::Constant(constant) } + pub fn unit_rvalue(&mut self) -> Rvalue<'tcx> { + Rvalue::Aggregate(AggregateKind::Tuple, vec![]) + } + pub fn push_usize(&mut self, block: BasicBlock, scope_id: ScopeId, diff --git a/src/librustc_mir/build/mod.rs b/src/librustc_mir/build/mod.rs index 2e5b6a952b..d75cf3b758 100644 --- a/src/librustc_mir/build/mod.rs +++ b/src/librustc_mir/build/mod.rs @@ -9,38 +9,41 @@ // except according to those terms. use hair::cx::Cx; -use rustc::middle::region::{CodeExtent, CodeExtentData}; -use rustc::ty::{FnOutput, Ty}; +use rustc::middle::region::{CodeExtent, CodeExtentData, ROOT_CODE_EXTENT}; +use rustc::ty::{self, Ty}; use rustc::mir::repr::*; use rustc_data_structures::fnv::FnvHashMap; use rustc::hir; +use rustc::hir::pat_util::pat_is_binding; use std::ops::{Index, IndexMut}; +use syntax::abi::Abi; use syntax::ast; use syntax::codemap::Span; +use syntax::parse::token::keywords; -pub struct Builder<'a, 'tcx: 'a> { - hir: Cx<'a, 'tcx>, +pub struct Builder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + hir: Cx<'a, 'gcx, 'tcx>, cfg: CFG<'tcx>, fn_span: Span, - // the current set of scopes, updated as we traverse; - // see the `scope` module for more details + /// the current set of scopes, updated as we traverse; + /// see the `scope` module for more details scopes: Vec>, - // for each scope, a span of blocks that defines it; - // we track these for use in region and borrow checking, - // but these are liable to get out of date once optimization - // begins. They are also hopefully temporary, and will be - // no longer needed when we adopt graph-based regions. + /// for each scope, a span of blocks that defines it; + /// we track these for use in region and borrow checking, + /// but these are liable to get out of date once optimization + /// begins. They are also hopefully temporary, and will be + /// no longer needed when we adopt graph-based regions. scope_auxiliary: ScopeAuxiliaryVec, - // the current set of loops; see the `scope` module for more - // details + /// the current set of loops; see the `scope` module for more + /// details loop_scopes: Vec, - // the vector of all scopes that we have created thus far; - // we track this for debuginfo later + /// the vector of all scopes that we have created thus far; + /// we track this for debuginfo later scope_datas: Vec, var_decls: Vec>, @@ -48,9 +51,11 @@ pub struct Builder<'a, 'tcx: 'a> { temp_decls: Vec>, unit_temp: Option>, - // cached block with a RESUME terminator; we create this at the - // first panic + /// cached block with the RESUME terminator; this is created + /// when first set of cleanups are built. cached_resume_block: Option, + /// cached block with the RETURN terminator + cached_return_block: Option, } struct CFG<'tcx> { @@ -79,7 +84,7 @@ pub struct ScopeAuxiliary { pub postdoms: Vec, } -#[derive(Copy, Clone, PartialEq, Eq, Hash)] +#[derive(Copy, Clone, PartialEq, Eq, Debug, Hash)] pub struct Location { /// the location is within this block pub block: BasicBlock, @@ -155,126 +160,201 @@ macro_rules! unpack { /////////////////////////////////////////////////////////////////////////// /// the main entry point for building MIR for a function -pub fn construct<'a,'tcx>(hir: Cx<'a,'tcx>, - span: Span, - fn_id: ast::NodeId, - body_id: ast::NodeId, - implicit_arguments: Vec>, - explicit_arguments: Vec<(Ty<'tcx>, &'tcx hir::Pat)>, - return_ty: FnOutput<'tcx>, - ast_block: &'tcx hir::Block) - -> (Mir<'tcx>, ScopeAuxiliaryVec) { +pub fn construct_fn<'a, 'gcx, 'tcx, A>(hir: Cx<'a, 'gcx, 'tcx>, + fn_id: ast::NodeId, + arguments: A, + return_ty: ty::FnOutput<'gcx>, + ast_block: &'gcx hir::Block) + -> (Mir<'tcx>, ScopeAuxiliaryVec) + where A: Iterator, Option<&'gcx hir::Pat>)> +{ let tcx = hir.tcx(); - let cfg = CFG { basic_blocks: vec![] }; - - let mut builder = Builder { - hir: hir, - cfg: cfg, - fn_span: span, - scopes: vec![], - scope_datas: vec![], - scope_auxiliary: ScopeAuxiliaryVec { vec: vec![] }, - loop_scopes: vec![], - temp_decls: vec![], - var_decls: vec![], - var_indices: FnvHashMap(), - unit_temp: None, - cached_resume_block: None, - }; - - assert_eq!(builder.cfg.start_new_block(), START_BLOCK); - assert_eq!(builder.cfg.start_new_block(), END_BLOCK); - + let span = tcx.map.span(fn_id); + let mut builder = Builder::new(hir, span); - let mut arg_decls = None; // assigned to `Some` in closures below + let body_id = ast_block.id; let call_site_extent = tcx.region_maps.lookup_code_extent( CodeExtentData::CallSiteScope { fn_id: fn_id, body_id: body_id }); - let _ = builder.in_scope(call_site_extent, START_BLOCK, |builder, call_site_scope_id| { - let mut block = START_BLOCK; - let arg_extent = - tcx.region_maps.lookup_code_extent( - CodeExtentData::ParameterScope { fn_id: fn_id, body_id: body_id }); - unpack!(block = builder.in_scope(arg_extent, block, |builder, arg_scope_id| { - arg_decls = Some(unpack!(block = builder.args_and_body(block, - implicit_arguments, - explicit_arguments, - arg_scope_id, - ast_block))); - block.unit() + let arg_extent = + tcx.region_maps.lookup_code_extent( + CodeExtentData::ParameterScope { fn_id: fn_id, body_id: body_id }); + let mut block = START_BLOCK; + let mut arg_decls = unpack!(block = builder.in_scope(call_site_extent, block, + |builder, call_site_scope_id| { + let arg_decls = unpack!(block = builder.in_scope(arg_extent, block, + |builder, arg_scope_id| { + builder.args_and_body(block, return_ty, arguments, arg_scope_id, ast_block) })); + let return_block = builder.return_block(); builder.cfg.terminate(block, call_site_scope_id, span, - TerminatorKind::Goto { target: END_BLOCK }); - builder.cfg.terminate(END_BLOCK, call_site_scope_id, span, + TerminatorKind::Goto { target: return_block }); + builder.cfg.terminate(return_block, call_site_scope_id, span, TerminatorKind::Return); + return_block.and(arg_decls) + })); + assert_eq!(block, builder.return_block()); + + match tcx.node_id_to_type(fn_id).sty { + ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => { + // RustCall pseudo-ABI untuples the last argument. + if let Some(arg_decl) = arg_decls.last_mut() { + arg_decl.spread = true; + } + } + _ => {} + } - END_BLOCK.unit() + // Gather the upvars of a closure, if any. + let upvar_decls: Vec<_> = tcx.with_freevars(fn_id, |freevars| { + freevars.iter().map(|fv| { + let by_ref = tcx.upvar_capture(ty::UpvarId { + var_id: fv.def.var_id(), + closure_expr_id: fn_id + }).map_or(false, |capture| match capture { + ty::UpvarCapture::ByValue => false, + ty::UpvarCapture::ByRef(..) => true + }); + let mut decl = UpvarDecl { + debug_name: keywords::Invalid.name(), + by_ref: by_ref + }; + if let Some(hir::map::NodeLocal(pat)) = tcx.map.find(fv.def.var_id()) { + if let hir::PatKind::Ident(_, ref ident, _) = pat.node { + decl.debug_name = ident.node; + } + } + decl + }).collect() }); - assert!( - builder.cfg.basic_blocks - .iter() - .enumerate() - .all(|(index, block)| { - if block.terminator.is_none() { - bug!("no terminator on block {:?} in fn {:?}", - index, fn_id) - } - true - })); - - ( - Mir { - basic_blocks: builder.cfg.basic_blocks, - scopes: builder.scope_datas, - var_decls: builder.var_decls, - arg_decls: arg_decls.take().expect("args never built?"), - temp_decls: builder.temp_decls, - return_ty: return_ty, - span: span - }, - builder.scope_auxiliary, - ) + builder.finish(upvar_decls, arg_decls, return_ty) +} + +pub fn construct_const<'a, 'gcx, 'tcx>(hir: Cx<'a, 'gcx, 'tcx>, + item_id: ast::NodeId, + ast_expr: &'tcx hir::Expr) + -> (Mir<'tcx>, ScopeAuxiliaryVec) { + let tcx = hir.tcx(); + let span = tcx.map.span(item_id); + let mut builder = Builder::new(hir, span); + + let extent = ROOT_CODE_EXTENT; + let mut block = START_BLOCK; + let _ = builder.in_scope(extent, block, |builder, call_site_scope_id| { + let expr = builder.hir.mirror(ast_expr); + unpack!(block = builder.into(&Lvalue::ReturnPointer, block, expr)); + + let return_block = builder.return_block(); + builder.cfg.terminate(block, call_site_scope_id, span, + TerminatorKind::Goto { target: return_block }); + builder.cfg.terminate(return_block, call_site_scope_id, span, + TerminatorKind::Return); + + return_block.unit() + }); + + let ty = tcx.expr_ty_adjusted(ast_expr); + builder.finish(vec![], vec![], ty::FnConverging(ty)) } -impl<'a,'tcx> Builder<'a,'tcx> { - fn args_and_body(&mut self, - mut block: BasicBlock, - implicit_arguments: Vec>, - explicit_arguments: Vec<(Ty<'tcx>, &'tcx hir::Pat)>, - argument_scope_id: ScopeId, - ast_block: &'tcx hir::Block) - -> BlockAnd>> +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { + fn new(hir: Cx<'a, 'gcx, 'tcx>, span: Span) -> Builder<'a, 'gcx, 'tcx> { + let mut builder = Builder { + hir: hir, + cfg: CFG { basic_blocks: vec![] }, + fn_span: span, + scopes: vec![], + scope_datas: vec![], + scope_auxiliary: ScopeAuxiliaryVec { vec: vec![] }, + loop_scopes: vec![], + temp_decls: vec![], + var_decls: vec![], + var_indices: FnvHashMap(), + unit_temp: None, + cached_resume_block: None, + cached_return_block: None + }; + + assert_eq!(builder.cfg.start_new_block(), START_BLOCK); + + builder + } + + fn finish(self, + upvar_decls: Vec, + arg_decls: Vec>, + return_ty: ty::FnOutput<'tcx>) + -> (Mir<'tcx>, ScopeAuxiliaryVec) { + for (index, block) in self.cfg.basic_blocks.iter().enumerate() { + if block.terminator.is_none() { + span_bug!(self.fn_span, "no terminator on block {:?}", index); + } + } + + (Mir { + basic_blocks: self.cfg.basic_blocks, + scopes: self.scope_datas, + promoted: vec![], + var_decls: self.var_decls, + arg_decls: arg_decls, + temp_decls: self.temp_decls, + upvar_decls: upvar_decls, + return_ty: return_ty, + span: self.fn_span + }, self.scope_auxiliary) + } + + fn args_and_body(&mut self, + mut block: BasicBlock, + return_ty: ty::FnOutput<'tcx>, + arguments: A, + argument_scope_id: ScopeId, + ast_block: &'gcx hir::Block) + -> BlockAnd>> + where A: Iterator, Option<&'gcx hir::Pat>)> { // to start, translate the argument patterns and collect the argument types. - let implicits = implicit_arguments.into_iter().map(|ty| (ty, None)); - let explicits = explicit_arguments.into_iter().map(|(ty, pat)| (ty, Some(pat))); - let arg_decls = - implicits - .chain(explicits) - .enumerate() - .map(|(index, (ty, pattern))| { - let lvalue = Lvalue::Arg(index as u32); - if let Some(pattern) = pattern { - let pattern = self.hir.irrefutable_pat(pattern); - unpack!(block = self.lvalue_into_pattern(block, - argument_scope_id, - pattern, - &lvalue)); - } - - // Make sure we drop (parts of) the argument even when not matched on. - let argument_extent = self.scope_auxiliary[argument_scope_id].extent; - self.schedule_drop(pattern.as_ref().map_or(ast_block.span, |pat| pat.span), - argument_extent, &lvalue, ty); + let arg_decls = arguments.enumerate().map(|(index, (ty, pattern))| { + let lvalue = Lvalue::Arg(index as u32); + if let Some(pattern) = pattern { + let pattern = self.hir.irrefutable_pat(pattern); + unpack!(block = self.lvalue_into_pattern(block, + argument_scope_id, + pattern, + &lvalue)); + } - ArgDecl { ty: ty, spread: false } - }) - .collect(); + // Make sure we drop (parts of) the argument even when not matched on. + let argument_extent = self.scope_auxiliary[argument_scope_id].extent; + self.schedule_drop(pattern.as_ref().map_or(ast_block.span, |pat| pat.span), + argument_extent, &lvalue, ty); + + let mut name = keywords::Invalid.name(); + if let Some(pat) = pattern { + if let hir::PatKind::Ident(_, ref ident, _) = pat.node { + if pat_is_binding(&self.hir.tcx().def_map.borrow(), pat) { + name = ident.node; + } + } + } + ArgDecl { + ty: ty, + spread: false, + debug_name: name + } + }).collect(); + + // FIXME(#32959): temporary hack for the issue at hand + let return_is_unit = if let ty::FnConverging(t) = return_ty { + t.is_nil() + } else { + false + }; // start the first basic block and translate the body - unpack!(block = self.ast_block(&Lvalue::ReturnPointer, block, ast_block)); + unpack!(block = self.ast_block(&Lvalue::ReturnPointer, return_is_unit, block, ast_block)); block.and(arg_decls) } @@ -290,6 +370,17 @@ impl<'a,'tcx> Builder<'a,'tcx> { } } } + + fn return_block(&mut self) -> BasicBlock { + match self.cached_return_block { + Some(rb) => rb, + None => { + let rb = self.cfg.start_new_block(); + self.cached_return_block = Some(rb); + rb + } + } + } } /////////////////////////////////////////////////////////////////////////// diff --git a/src/librustc_mir/build/scope.rs b/src/librustc_mir/build/scope.rs index bda9cf058f..071c8d618c 100644 --- a/src/librustc_mir/build/scope.rs +++ b/src/librustc_mir/build/scope.rs @@ -47,7 +47,7 @@ set of scheduled drops up front, and so whenever we exit from the scope we only drop the values scheduled thus far. For example, consider the scope S corresponding to this loop: -``` +```rust,ignore loop { let x = ...; if cond { break; } @@ -206,7 +206,7 @@ impl<'tcx> Scope<'tcx> { } } -impl<'a,'tcx> Builder<'a,'tcx> { +impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> { // Adding and removing scopes // ========================== /// Start a loop scope, which tracks where `continue` and `break` @@ -218,7 +218,7 @@ impl<'a,'tcx> Builder<'a,'tcx> { break_block: BasicBlock, f: F) -> bool - where F: FnOnce(&mut Builder<'a, 'tcx>) + where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) { let extent = self.extent_of_innermost_scope(); let loop_scope = LoopScope { @@ -237,7 +237,7 @@ impl<'a,'tcx> Builder<'a,'tcx> { /// Convenience wrapper that pushes a scope and then executes `f` /// to build its contents, popping the scope afterwards. pub fn in_scope(&mut self, extent: CodeExtent, mut block: BasicBlock, f: F) -> BlockAnd - where F: FnOnce(&mut Builder<'a, 'tcx>, ScopeId) -> BlockAnd + where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>, ScopeId) -> BlockAnd { debug!("in_scope(extent={:?}, block={:?})", extent, block); let id = self.push_scope(extent, block); @@ -255,7 +255,9 @@ impl<'a,'tcx> Builder<'a,'tcx> { debug!("push_scope({:?})", extent); let parent_id = self.scopes.last().map(|s| s.id); let id = ScopeId::new(self.scope_datas.len()); + let tcx = self.hir.tcx(); self.scope_datas.push(ScopeData { + span: extent.span(&tcx.region_maps, &tcx.map).unwrap_or(DUMMY_SP), parent_scope: parent_id, }); self.scopes.push(Scope { @@ -495,8 +497,11 @@ impl<'a,'tcx> Builder<'a,'tcx> { pub fn build_drop(&mut self, block: BasicBlock, span: Span, - value: Lvalue<'tcx>) - -> BlockAnd<()> { + value: Lvalue<'tcx>, + ty: Ty<'tcx>) -> BlockAnd<()> { + if !self.hir.needs_drop(ty) { + return block.unit(); + } let scope_id = self.innermost_scope_id(); let next_target = self.cfg.start_new_block(); let diverge_target = self.diverge_cleanup(); @@ -657,12 +662,12 @@ fn build_scope_drops<'tcx>(cfg: &mut CFG<'tcx>, block.unit() } -fn build_diverge_scope<'tcx>(tcx: &TyCtxt<'tcx>, - cfg: &mut CFG<'tcx>, - unit_temp: &Lvalue<'tcx>, - scope: &mut Scope<'tcx>, - mut target: BasicBlock) - -> BasicBlock +fn build_diverge_scope<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + cfg: &mut CFG<'tcx>, + unit_temp: &Lvalue<'tcx>, + scope: &mut Scope<'tcx>, + mut target: BasicBlock) + -> BasicBlock { // Build up the drops in **reverse** order. The end result will // look like: @@ -716,11 +721,11 @@ fn build_diverge_scope<'tcx>(tcx: &TyCtxt<'tcx>, target } -fn build_free<'tcx>(tcx: &TyCtxt<'tcx>, - unit_temp: &Lvalue<'tcx>, - data: &FreeData<'tcx>, - target: BasicBlock) - -> TerminatorKind<'tcx> { +fn build_free<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + unit_temp: &Lvalue<'tcx>, + data: &FreeData<'tcx>, + target: BasicBlock) + -> TerminatorKind<'tcx> { let free_func = tcx.lang_items.require(lang_items::BoxFreeFnLangItem) .unwrap_or_else(|e| tcx.sess.fatal(&e)); let substs = tcx.mk_substs(Substs::new( diff --git a/src/librustc_mir/diagnostics.rs b/src/librustc_mir/diagnostics.rs new file mode 100644 index 0000000000..65d51d2052 --- /dev/null +++ b/src/librustc_mir/diagnostics.rs @@ -0,0 +1,387 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(non_snake_case)] + +register_long_diagnostics! { + +E0010: r##" +The value of statics and constants must be known at compile time, and they live +for the entire lifetime of a program. Creating a boxed value allocates memory on +the heap at runtime, and therefore cannot be done at compile time. Erroneous +code example: + +```compile_fail +#![feature(box_syntax)] + +const CON : Box = box 0; +``` +"##, + +E0013: r##" +Static and const variables can refer to other const variables. But a const +variable cannot refer to a static variable. For example, `Y` cannot refer to +`X` here: + +```compile_fail +static X: i32 = 42; +const Y: i32 = X; +``` + +To fix this, the value can be extracted as a const and then used: + +``` +const A: i32 = 42; +static X: i32 = A; +const Y: i32 = A; +``` +"##, + +// FIXME(#24111) Change the language here when const fn stabilizes +E0015: r##" +The only functions that can be called in static or constant expressions are +`const` functions, and struct/enum constructors. `const` functions are only +available on a nightly compiler. Rust currently does not support more general +compile-time function execution. + +``` +const FOO: Option = Some(1); // enum constructor +struct Bar {x: u8} +const BAR: Bar = Bar {x: 1}; // struct constructor +``` + +See [RFC 911] for more details on the design of `const fn`s. + +[RFC 911]: https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md +"##, + +E0016: r##" +Blocks in constants may only contain items (such as constant, function +definition, etc...) and a tail expression. Erroneous code example: + +```compile_fail +const FOO: i32 = { let x = 0; x }; // 'x' isn't an item! +``` + +To avoid it, you have to replace the non-item object: + +``` +const FOO: i32 = { const X : i32 = 0; X }; +``` +"##, + +E0017: r##" +References in statics and constants may only refer to immutable values. +Erroneous code example: + +```compile_fail +static X: i32 = 1; +const C: i32 = 2; + +// these three are not allowed: +const CR: &'static mut i32 = &mut C; +static STATIC_REF: &'static mut i32 = &mut X; +static CONST_REF: &'static mut i32 = &mut C; +``` + +Statics are shared everywhere, and if they refer to mutable data one might +violate memory safety since holding multiple mutable references to shared data +is not allowed. + +If you really want global mutable state, try using `static mut` or a global +`UnsafeCell`. +"##, + +E0018: r##" + +The value of static and constant integers must be known at compile time. You +can't cast a pointer to an integer because the address of a pointer can +vary. + +For example, if you write: + +```compile_fail +static MY_STATIC: u32 = 42; +static MY_STATIC_ADDR: usize = &MY_STATIC as *const _ as usize; +static WHAT: usize = (MY_STATIC_ADDR^17) + MY_STATIC_ADDR; +``` + +Then `MY_STATIC_ADDR` would contain the address of `MY_STATIC`. However, +the address can change when the program is linked, as well as change +between different executions due to ASLR, and many linkers would +not be able to calculate the value of `WHAT`. + +On the other hand, static and constant pointers can point either to +a known numeric address or to the address of a symbol. + +``` +static MY_STATIC_ADDR: &'static u32 = &MY_STATIC; +// ... and also +static MY_STATIC_ADDR2: *const u32 = &MY_STATIC; + +const CONST_ADDR: *const u8 = 0x5f3759df as *const u8; +``` + +This does not pose a problem by itself because they can't be +accessed directly. +"##, + +E0019: r##" +A function call isn't allowed in the const's initialization expression +because the expression's value must be known at compile-time. Erroneous code +example: + +```compile_fail +enum Test { + V1 +} + +impl Test { + fn test(&self) -> i32 { + 12 + } +} + +fn main() { + const FOO: Test = Test::V1; + + const A: i32 = FOO.test(); // You can't call Test::func() here ! +} +``` + +Remember: you can't use a function call inside a const's initialization +expression! However, you can totally use it anywhere else: + +``` +fn main() { + const FOO: Test = Test::V1; + + FOO.func(); // here is good + let x = FOO.func(); // or even here! +} +``` +"##, + +E0022: r##" +Constant functions are not allowed to mutate anything. Thus, binding to an +argument with a mutable pattern is not allowed. For example, + +```compile_fail +const fn foo(mut x: u8) { + // do stuff +} +``` + +Is incorrect because the function body may not mutate `x`. + +Remove any mutable bindings from the argument list to fix this error. In case +you need to mutate the argument, try lazily initializing a global variable +instead of using a `const fn`, or refactoring the code to a functional style to +avoid mutation if possible. +"##, + +E0394: r##" +From [RFC 246]: + + > It is invalid for a static to reference another static by value. It is + > required that all references be borrowed. + +[RFC 246]: https://github.com/rust-lang/rfcs/pull/246 +"##, + + +E0395: r##" +The value assigned to a constant scalar must be known at compile time, +which is not the case when comparing raw pointers. + +Erroneous code example: + +```compile_fail +static FOO: i32 = 42; +static BAR: i32 = 42; + +static BAZ: bool = { (&FOO as *const i32) == (&BAR as *const i32) }; +// error: raw pointers cannot be compared in statics! +``` + +The address assigned by the linker to `FOO` and `BAR` may or may not +be identical, so the value of `BAZ` can't be determined. + +If you want to do the comparison, please do it at run-time. + +For example: + +``` +static FOO: i32 = 42; +static BAR: i32 = 42; + +let baz: bool = { (&FOO as *const i32) == (&BAR as *const i32) }; +// baz isn't a constant expression so it's ok +``` +"##, + +E0396: r##" +The value behind a raw pointer can't be determined at compile-time +(or even link-time), which means it can't be used in a constant +expression. Erroneous code example: + +```compile_fail +const REG_ADDR: *const u8 = 0x5f3759df as *const u8; + +const VALUE: u8 = unsafe { *REG_ADDR }; +// error: raw pointers cannot be dereferenced in constants +``` + +A possible fix is to dereference your pointer at some point in run-time. + +For example: + +``` +const REG_ADDR: *const u8 = 0x5f3759df as *const u8; + +let reg_value = unsafe { *REG_ADDR }; +``` +"##, + +E0492: r##" +A borrow of a constant containing interior mutability was attempted. Erroneous +code example: + +```compile_fail +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; + +const A: AtomicUsize = ATOMIC_USIZE_INIT; +static B: &'static AtomicUsize = &A; +// error: cannot borrow a constant which contains interior mutability, create a +// static instead +``` + +A `const` represents a constant value that should never change. If one takes +a `&` reference to the constant, then one is taking a pointer to some memory +location containing the value. Normally this is perfectly fine: most values +can't be changed via a shared `&` pointer, but interior mutability would allow +it. That is, a constant value could be mutated. On the other hand, a `static` is +explicitly a single memory location, which can be mutated at will. + +So, in order to solve this error, either use statics which are `Sync`: + +``` +use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; + +static A: AtomicUsize = ATOMIC_USIZE_INIT; +static B: &'static AtomicUsize = &A; // ok! +``` + +You can also have this error while using a cell type: + +```compile_fail +#![feature(const_fn)] + +use std::cell::Cell; + +const A: Cell = Cell::new(1); +const B: &'static Cell = &A; +// error: cannot borrow a constant which contains interior mutability, create +// a static instead + +// or: +struct C { a: Cell } + +const D: C = C { a: Cell::new(1) }; +const E: &'static Cell = &D.a; // error + +// or: +const F: &'static C = &D; // error +``` + +This is because cell types do operations that are not thread-safe. Due to this, +they don't implement Sync and thus can't be placed in statics. In this +case, `StaticMutex` would work just fine, but it isn't stable yet: +https://doc.rust-lang.org/nightly/std/sync/struct.StaticMutex.html + +However, if you still wish to use these types, you can achieve this by an unsafe +wrapper: + +``` +#![feature(const_fn)] + +use std::cell::Cell; +use std::marker::Sync; + +struct NotThreadSafe { + value: Cell, +} + +unsafe impl Sync for NotThreadSafe {} + +static A: NotThreadSafe = NotThreadSafe { value : Cell::new(1) }; +static B: &'static NotThreadSafe = &A; // ok! +``` + +Remember this solution is unsafe! You will have to ensure that accesses to the +cell are synchronized. +"##, + +E0493: r##" +A type with a destructor was assigned to an invalid type of variable. Erroneous +code example: + +```compile_fail +struct Foo { + a: u32 +} + +impl Drop for Foo { + fn drop(&mut self) {} +} + +const F : Foo = Foo { a : 0 }; +// error: constants are not allowed to have destructors +static S : Foo = Foo { a : 0 }; +// error: destructors in statics are an unstable feature +``` + +To solve this issue, please use a type which does allow the usage of type with +destructors. +"##, + +E0494: r##" +A reference of an interior static was assigned to another const/static. +Erroneous code example: + +```compile_fail +struct Foo { + a: u32 +} + +static S : Foo = Foo { a : 0 }; +static A : &'static u32 = &S.a; +// error: cannot refer to the interior of another static, use a +// constant instead +``` + +The "base" variable has to be a const if you want another static/const variable +to refer to one of its fields. Example: + +``` +struct Foo { + a: u32 +} + +const S : Foo = Foo { a : 0 }; +static A : &'static u32 = &S.a; // ok! +``` +"##, + +} + +register_diagnostics! { + E0526, // shuffle indices are not constant +} diff --git a/src/librustc_mir/graphviz.rs b/src/librustc_mir/graphviz.rs index 069bd7826b..6a34d9ff0b 100644 --- a/src/librustc_mir/graphviz.rs +++ b/src/librustc_mir/graphviz.rs @@ -10,13 +10,15 @@ use dot; use rustc::mir::repr::*; -use rustc::ty; +use rustc::ty::{self, TyCtxt}; use std::fmt::Debug; use std::io::{self, Write}; use syntax::ast::NodeId; /// Write a graphviz DOT graph of a list of MIRs. -pub fn write_mir_graphviz<'a, 't, W, I>(tcx: &ty::TyCtxt<'t>, iter: I, w: &mut W) -> io::Result<()> +pub fn write_mir_graphviz<'a, 'b, 'tcx, W, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>, + iter: I, w: &mut W) + -> io::Result<()> where W: Write, I: Iterator)> { for (&nodeid, mir) in iter { writeln!(w, "digraph Mir_{} {{", nodeid)?; @@ -116,8 +118,11 @@ fn write_edges(source: BasicBlock, mir: &Mir, w: &mut W) -> io::Result /// Write the graphviz DOT label for the overall graph. This is essentially a block of text that /// will appear below the graph, showing the type of the `fn` this MIR represents and the types of /// all the variables and temporaries. -fn write_graph_label(tcx: &ty::TyCtxt, nid: NodeId, mir: &Mir, w: &mut W) --> io::Result<()> { +fn write_graph_label<'a, 'tcx, W: Write>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + nid: NodeId, + mir: &Mir, + w: &mut W) + -> io::Result<()> { write!(w, " label= Mirror<'tcx> for &'tcx hir::Block { type Output = Block<'tcx>; - fn make_mirror<'a>(self, cx: &mut Cx<'a, 'tcx>) -> Block<'tcx> { + fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Block<'tcx> { // We have to eagerly translate the "spine" of the statements // in order to get the lexical scoping correctly. let stmts = mirror_stmts(cx, self.id, &*self.stmts); @@ -31,10 +31,10 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Block { } } -fn mirror_stmts<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, - block_id: ast::NodeId, - stmts: &'tcx [hir::Stmt]) - -> Vec> +fn mirror_stmts<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + block_id: ast::NodeId, + stmts: &'tcx [hir::Stmt]) + -> Vec> { let mut result = vec![]; for (index, stmt) in stmts.iter().enumerate() { @@ -74,7 +74,9 @@ fn mirror_stmts<'a,'tcx:'a>(cx: &mut Cx<'a,'tcx>, return result; } -pub fn to_expr_ref<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, block: &'tcx hir::Block) -> ExprRef<'tcx> { +pub fn to_expr_ref<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + block: &'tcx hir::Block) + -> ExprRef<'tcx> { let block_ty = cx.tcx.node_id_to_type(block.id); let temp_lifetime = cx.tcx.region_maps.temporary_scope(block.id); let expr = Expr { diff --git a/src/librustc_mir/hair/cx/expr.rs b/src/librustc_mir/hair/cx/expr.rs index 12dcb32da3..1e7164a62c 100644 --- a/src/librustc_mir/hair/cx/expr.rs +++ b/src/librustc_mir/hair/cx/expr.rs @@ -21,6 +21,7 @@ use rustc_const_eval as const_eval; use rustc::middle::region::CodeExtent; use rustc::hir::pat_util; use rustc::ty::{self, VariantDef, Ty}; +use rustc::ty::cast::CastKind as TyCastKind; use rustc::mir::repr::*; use rustc::hir; use syntax::ptr::P; @@ -28,399 +29,13 @@ use syntax::ptr::P; impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr { type Output = Expr<'tcx>; - fn make_mirror<'a>(self, cx: &mut Cx<'a, 'tcx>) -> Expr<'tcx> { - debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span); - - let expr_ty = cx.tcx.expr_ty(self); // note: no adjustments (yet)! + fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> { let temp_lifetime = cx.tcx.region_maps.temporary_scope(self.id); let expr_extent = cx.tcx.region_maps.node_extent(self.id); - let kind = match self.node { - // Here comes the interesting stuff: - hir::ExprMethodCall(_, _, ref args) => { - // Rewrite a.b(c) into UFCS form like Trait::b(a, c) - let expr = method_callee(cx, self, ty::MethodCall::expr(self.id)); - let args = args.iter() - .map(|e| e.to_ref()) - .collect(); - ExprKind::Call { - ty: expr.ty, - fun: expr.to_ref(), - args: args, - } - } - - hir::ExprCall(ref fun, ref args) => { - if cx.tcx.is_method_call(self.id) { - // The callee is something implementing Fn, FnMut, or FnOnce. - // Find the actual method implementation being called and - // build the appropriate UFCS call expression with the - // callee-object as self parameter. - - // rewrite f(u, v) into FnOnce::call_once(f, (u, v)) - - let method = method_callee(cx, self, ty::MethodCall::expr(self.id)); - - let sig = match method.ty.sty { - ty::TyFnDef(_, _, fn_ty) => &fn_ty.sig, - _ => span_bug!(self.span, "type of method is not an fn") - }; - - let sig = cx.tcx.no_late_bound_regions(sig).unwrap_or_else(|| { - span_bug!(self.span, "method call has late-bound regions") - }); - - assert_eq!(sig.inputs.len(), 2); - - let tupled_args = Expr { - ty: sig.inputs[1], - temp_lifetime: temp_lifetime, - span: self.span, - kind: ExprKind::Tuple { - fields: args.iter().map(ToRef::to_ref).collect() - } - }; - - ExprKind::Call { - ty: method.ty, - fun: method.to_ref(), - args: vec![fun.to_ref(), tupled_args.to_ref()] - } - } else { - let adt_data = if let hir::ExprPath(..) = fun.node { - // Tuple-like ADTs are represented as ExprCall. We convert them here. - expr_ty.ty_adt_def().and_then(|adt_def|{ - match cx.tcx.def_map.borrow()[&fun.id].full_def() { - Def::Variant(_, variant_id) => { - Some((adt_def, adt_def.variant_index_with_id(variant_id))) - }, - Def::Struct(..) => { - Some((adt_def, 0)) - }, - _ => None - } - }) - } else { None }; - if let Some((adt_def, index)) = adt_data { - let substs = cx.tcx.mk_substs(cx.tcx.node_id_item_substs(fun.id).substs); - let field_refs = args.iter().enumerate().map(|(idx, e)| FieldExprRef { - name: Field::new(idx), - expr: e.to_ref() - }).collect(); - ExprKind::Adt { - adt_def: adt_def, - substs: substs, - variant_index: index, - fields: field_refs, - base: None - } - } else { - ExprKind::Call { - ty: cx.tcx.node_id_to_type(fun.id), - fun: fun.to_ref(), - args: args.to_ref(), - } - } - } - } - - hir::ExprAddrOf(mutbl, ref expr) => { - let region = match expr_ty.sty { - ty::TyRef(r, _) => r, - _ => span_bug!(expr.span, "type of & not region"), - }; - ExprKind::Borrow { - region: *region, - borrow_kind: to_borrow_kind(mutbl), - arg: expr.to_ref(), - } - } - - hir::ExprBlock(ref blk) => { - ExprKind::Block { body: &blk } - } - - hir::ExprAssign(ref lhs, ref rhs) => { - ExprKind::Assign { - lhs: lhs.to_ref(), - rhs: rhs.to_ref(), - } - } - - hir::ExprAssignOp(op, ref lhs, ref rhs) => { - if cx.tcx.is_method_call(self.id) { - let pass_args = if op.node.is_by_value() { - PassArgs::ByValue - } else { - PassArgs::ByRef - }; - overloaded_operator(cx, self, ty::MethodCall::expr(self.id), - pass_args, lhs.to_ref(), vec![rhs]) - } else { - ExprKind::AssignOp { - op: bin_op(op.node), - lhs: lhs.to_ref(), - rhs: rhs.to_ref(), - } - } - } - - hir::ExprLit(..) => ExprKind::Literal { - literal: cx.const_eval_literal(self) - }, - - hir::ExprBinary(op, ref lhs, ref rhs) => { - if cx.tcx.is_method_call(self.id) { - let pass_args = if op.node.is_by_value() { - PassArgs::ByValue - } else { - PassArgs::ByRef - }; - overloaded_operator(cx, self, ty::MethodCall::expr(self.id), - pass_args, lhs.to_ref(), vec![rhs]) - } else { - // FIXME overflow - match op.node { - hir::BinOp_::BiAnd => { - ExprKind::LogicalOp { - op: LogicalOp::And, - lhs: lhs.to_ref(), - rhs: rhs.to_ref(), - } - } - hir::BinOp_::BiOr => { - ExprKind::LogicalOp { - op: LogicalOp::Or, - lhs: lhs.to_ref(), - rhs: rhs.to_ref(), - } - } - _ => { - let op = bin_op(op.node); - ExprKind::Binary { - op: op, - lhs: lhs.to_ref(), - rhs: rhs.to_ref(), - } - } - } - } - } - - hir::ExprIndex(ref lhs, ref index) => { - if cx.tcx.is_method_call(self.id) { - overloaded_lvalue(cx, self, ty::MethodCall::expr(self.id), - PassArgs::ByValue, lhs.to_ref(), vec![index]) - } else { - ExprKind::Index { - lhs: lhs.to_ref(), - index: index.to_ref(), - } - } - } - - hir::ExprUnary(hir::UnOp::UnDeref, ref arg) => { - if cx.tcx.is_method_call(self.id) { - overloaded_lvalue(cx, self, ty::MethodCall::expr(self.id), - PassArgs::ByValue, arg.to_ref(), vec![]) - } else { - ExprKind::Deref { arg: arg.to_ref() } - } - } - - hir::ExprUnary(hir::UnOp::UnNot, ref arg) => { - if cx.tcx.is_method_call(self.id) { - overloaded_operator(cx, self, ty::MethodCall::expr(self.id), - PassArgs::ByValue, arg.to_ref(), vec![]) - } else { - ExprKind::Unary { - op: UnOp::Not, - arg: arg.to_ref(), - } - } - } - - hir::ExprUnary(hir::UnOp::UnNeg, ref arg) => { - if cx.tcx.is_method_call(self.id) { - overloaded_operator(cx, self, ty::MethodCall::expr(self.id), - PassArgs::ByValue, arg.to_ref(), vec![]) - } else { - // FIXME runtime-overflow - if let hir::ExprLit(_) = arg.node { - ExprKind::Literal { - literal: cx.const_eval_literal(self), - } - } else { - ExprKind::Unary { - op: UnOp::Neg, - arg: arg.to_ref(), - } - } - } - } - - hir::ExprStruct(_, ref fields, ref base) => { - match expr_ty.sty { - ty::TyStruct(adt, substs) => { - let field_refs = field_refs(&adt.variants[0], fields); - ExprKind::Adt { - adt_def: adt, - variant_index: 0, - substs: substs, - fields: field_refs, - base: base.as_ref().map(|base| { - FruInfo { - base: base.to_ref(), - field_types: cx.tcx.tables - .borrow() - .fru_field_types[&self.id] - .clone() - } - }) - } - } - ty::TyEnum(adt, substs) => { - match cx.tcx.def_map.borrow()[&self.id].full_def() { - Def::Variant(enum_id, variant_id) => { - debug_assert!(adt.did == enum_id); - assert!(base.is_none()); - - let index = adt.variant_index_with_id(variant_id); - let field_refs = field_refs(&adt.variants[index], fields); - ExprKind::Adt { - adt_def: adt, - variant_index: index, - substs: substs, - fields: field_refs, - base: None - } - } - ref def => { - span_bug!( - self.span, - "unexpected def: {:?}", - def); - } - } - } - _ => { - span_bug!( - self.span, - "unexpected type for struct literal: {:?}", - expr_ty); - } - } - } - - hir::ExprClosure(..) => { - let closure_ty = cx.tcx.expr_ty(self); - let (def_id, substs) = match closure_ty.sty { - ty::TyClosure(def_id, ref substs) => (def_id, substs), - _ => { - span_bug!(self.span, - "closure expr w/o closure type: {:?}", - closure_ty); - } - }; - let upvars = cx.tcx.with_freevars(self.id, |freevars| { - freevars.iter() - .enumerate() - .map(|(i, fv)| capture_freevar(cx, self, fv, substs.upvar_tys[i])) - .collect() - }); - ExprKind::Closure { - closure_id: def_id, - substs: &substs, - upvars: upvars, - } - } - - hir::ExprPath(..) => { - convert_path_expr(cx, self) - } - - hir::ExprInlineAsm(ref asm, ref outputs, ref inputs) => { - ExprKind::InlineAsm { - asm: asm, - outputs: outputs.to_ref(), - inputs: inputs.to_ref() - } - } - - // Now comes the rote stuff: - - hir::ExprRepeat(ref v, ref c) => ExprKind::Repeat { - value: v.to_ref(), - count: TypedConstVal { - ty: cx.tcx.expr_ty(c), - span: c.span, - value: match const_eval::eval_const_expr(cx.tcx, c) { - ConstVal::Integral(ConstInt::Usize(u)) => u, - other => bug!("constant evaluation of repeat count yielded {:?}", other), - }, - } - }, - hir::ExprRet(ref v) => - ExprKind::Return { value: v.to_ref() }, - hir::ExprBreak(label) => - ExprKind::Break { label: label.map(|_| loop_label(cx, self)) }, - hir::ExprAgain(label) => - ExprKind::Continue { label: label.map(|_| loop_label(cx, self)) }, - hir::ExprMatch(ref discr, ref arms, _) => - ExprKind::Match { discriminant: discr.to_ref(), - arms: arms.iter().map(|a| convert_arm(cx, a)).collect() }, - hir::ExprIf(ref cond, ref then, ref otherwise) => - ExprKind::If { condition: cond.to_ref(), - then: block::to_expr_ref(cx, then), - otherwise: otherwise.to_ref() }, - hir::ExprWhile(ref cond, ref body, _) => - ExprKind::Loop { condition: Some(cond.to_ref()), - body: block::to_expr_ref(cx, body) }, - hir::ExprLoop(ref body, _) => - ExprKind::Loop { condition: None, - body: block::to_expr_ref(cx, body) }, - hir::ExprField(ref source, name) => { - let index = match cx.tcx.expr_ty_adjusted(source).sty { - ty::TyStruct(adt_def, _) => - adt_def.variants[0].index_of_field_named(name.node), - ref ty => - span_bug!( - self.span, - "field of non-struct: {:?}", - ty), - }; - let index = index.unwrap_or_else(|| { - span_bug!( - self.span, - "no index found for field `{}`", - name.node) - }); - ExprKind::Field { lhs: source.to_ref(), name: Field::new(index) } - } - hir::ExprTupField(ref source, index) => - ExprKind::Field { lhs: source.to_ref(), - name: Field::new(index.node as usize) }, - hir::ExprCast(ref source, _) => - ExprKind::Cast { source: source.to_ref() }, - hir::ExprType(ref source, _) => - return source.make_mirror(cx), - hir::ExprBox(ref value) => - ExprKind::Box { - value: value.to_ref(), - value_extents: cx.tcx.region_maps.node_extent(value.id) - }, - hir::ExprVec(ref fields) => - ExprKind::Vec { fields: fields.to_ref() }, - hir::ExprTup(ref fields) => - ExprKind::Tuple { fields: fields.to_ref() }, - }; + debug!("Expr::make_mirror(): id={}, span={:?}", self.id, self.span); - let mut expr = Expr { - temp_lifetime: temp_lifetime, - ty: expr_ty, - span: self.span, - kind: kind, - }; + let mut expr = make_mirror_unadjusted(cx, self); debug!("make_mirror: unadjusted-expr={:?} applying adjustments={:?}", expr, cx.tcx.tables.borrow().adjustments.get(&self.id)); @@ -587,10 +202,430 @@ impl<'tcx> Mirror<'tcx> for &'tcx hir::Expr { } } -fn method_callee<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, - expr: &hir::Expr, - method_call: ty::MethodCall) - -> Expr<'tcx> { +fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + expr: &'tcx hir::Expr) + -> Expr<'tcx> { + let expr_ty = cx.tcx.expr_ty(expr); + let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); + + let kind = match expr.node { + // Here comes the interesting stuff: + hir::ExprMethodCall(_, _, ref args) => { + // Rewrite a.b(c) into UFCS form like Trait::b(a, c) + let expr = method_callee(cx, expr, ty::MethodCall::expr(expr.id)); + let args = args.iter() + .map(|e| e.to_ref()) + .collect(); + ExprKind::Call { + ty: expr.ty, + fun: expr.to_ref(), + args: args, + } + } + + hir::ExprCall(ref fun, ref args) => { + if cx.tcx.is_method_call(expr.id) { + // The callee is something implementing Fn, FnMut, or FnOnce. + // Find the actual method implementation being called and + // build the appropriate UFCS call expression with the + // callee-object as expr parameter. + + // rewrite f(u, v) into FnOnce::call_once(f, (u, v)) + + let method = method_callee(cx, expr, ty::MethodCall::expr(expr.id)); + + let sig = match method.ty.sty { + ty::TyFnDef(_, _, fn_ty) => &fn_ty.sig, + _ => span_bug!(expr.span, "type of method is not an fn") + }; + + let sig = cx.tcx.no_late_bound_regions(sig).unwrap_or_else(|| { + span_bug!(expr.span, "method call has late-bound regions") + }); + + assert_eq!(sig.inputs.len(), 2); + + let tupled_args = Expr { + ty: sig.inputs[1], + temp_lifetime: temp_lifetime, + span: expr.span, + kind: ExprKind::Tuple { + fields: args.iter().map(ToRef::to_ref).collect() + } + }; + + ExprKind::Call { + ty: method.ty, + fun: method.to_ref(), + args: vec![fun.to_ref(), tupled_args.to_ref()] + } + } else { + let adt_data = if let hir::ExprPath(..) = fun.node { + // Tuple-like ADTs are represented as ExprCall. We convert them here. + expr_ty.ty_adt_def().and_then(|adt_def|{ + match cx.tcx.def_map.borrow()[&fun.id].full_def() { + Def::Variant(_, variant_id) => { + Some((adt_def, adt_def.variant_index_with_id(variant_id))) + }, + Def::Struct(..) => { + Some((adt_def, 0)) + }, + _ => None + } + }) + } else { None }; + if let Some((adt_def, index)) = adt_data { + let substs = cx.tcx.node_id_item_substs(fun.id).substs; + let field_refs = args.iter().enumerate().map(|(idx, e)| FieldExprRef { + name: Field::new(idx), + expr: e.to_ref() + }).collect(); + ExprKind::Adt { + adt_def: adt_def, + substs: substs, + variant_index: index, + fields: field_refs, + base: None + } + } else { + ExprKind::Call { + ty: cx.tcx.node_id_to_type(fun.id), + fun: fun.to_ref(), + args: args.to_ref(), + } + } + } + } + + hir::ExprAddrOf(mutbl, ref expr) => { + let region = match expr_ty.sty { + ty::TyRef(r, _) => r, + _ => span_bug!(expr.span, "type of & not region"), + }; + ExprKind::Borrow { + region: *region, + borrow_kind: to_borrow_kind(mutbl), + arg: expr.to_ref(), + } + } + + hir::ExprBlock(ref blk) => { + ExprKind::Block { body: &blk } + } + + hir::ExprAssign(ref lhs, ref rhs) => { + ExprKind::Assign { + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + + hir::ExprAssignOp(op, ref lhs, ref rhs) => { + if cx.tcx.is_method_call(expr.id) { + let pass_args = if op.node.is_by_value() { + PassArgs::ByValue + } else { + PassArgs::ByRef + }; + overloaded_operator(cx, expr, ty::MethodCall::expr(expr.id), + pass_args, lhs.to_ref(), vec![rhs]) + } else { + ExprKind::AssignOp { + op: bin_op(op.node), + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + } + + hir::ExprLit(..) => ExprKind::Literal { + literal: cx.const_eval_literal(expr) + }, + + hir::ExprBinary(op, ref lhs, ref rhs) => { + if cx.tcx.is_method_call(expr.id) { + let pass_args = if op.node.is_by_value() { + PassArgs::ByValue + } else { + PassArgs::ByRef + }; + overloaded_operator(cx, expr, ty::MethodCall::expr(expr.id), + pass_args, lhs.to_ref(), vec![rhs]) + } else { + // FIXME overflow + match (op.node, cx.constness) { + // FIXME(eddyb) use logical ops in constants when + // they can handle that kind of control-flow. + (hir::BinOp_::BiAnd, hir::Constness::Const) => { + ExprKind::Binary { + op: BinOp::BitAnd, + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + (hir::BinOp_::BiOr, hir::Constness::Const) => { + ExprKind::Binary { + op: BinOp::BitOr, + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + + (hir::BinOp_::BiAnd, hir::Constness::NotConst) => { + ExprKind::LogicalOp { + op: LogicalOp::And, + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + (hir::BinOp_::BiOr, hir::Constness::NotConst) => { + ExprKind::LogicalOp { + op: LogicalOp::Or, + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + + _ => { + let op = bin_op(op.node); + ExprKind::Binary { + op: op, + lhs: lhs.to_ref(), + rhs: rhs.to_ref(), + } + } + } + } + } + + hir::ExprIndex(ref lhs, ref index) => { + if cx.tcx.is_method_call(expr.id) { + overloaded_lvalue(cx, expr, ty::MethodCall::expr(expr.id), + PassArgs::ByValue, lhs.to_ref(), vec![index]) + } else { + ExprKind::Index { + lhs: lhs.to_ref(), + index: index.to_ref(), + } + } + } + + hir::ExprUnary(hir::UnOp::UnDeref, ref arg) => { + if cx.tcx.is_method_call(expr.id) { + overloaded_lvalue(cx, expr, ty::MethodCall::expr(expr.id), + PassArgs::ByValue, arg.to_ref(), vec![]) + } else { + ExprKind::Deref { arg: arg.to_ref() } + } + } + + hir::ExprUnary(hir::UnOp::UnNot, ref arg) => { + if cx.tcx.is_method_call(expr.id) { + overloaded_operator(cx, expr, ty::MethodCall::expr(expr.id), + PassArgs::ByValue, arg.to_ref(), vec![]) + } else { + ExprKind::Unary { + op: UnOp::Not, + arg: arg.to_ref(), + } + } + } + + hir::ExprUnary(hir::UnOp::UnNeg, ref arg) => { + if cx.tcx.is_method_call(expr.id) { + overloaded_operator(cx, expr, ty::MethodCall::expr(expr.id), + PassArgs::ByValue, arg.to_ref(), vec![]) + } else { + // FIXME runtime-overflow + if let hir::ExprLit(_) = arg.node { + ExprKind::Literal { + literal: cx.const_eval_literal(expr), + } + } else { + ExprKind::Unary { + op: UnOp::Neg, + arg: arg.to_ref(), + } + } + } + } + + hir::ExprStruct(_, ref fields, ref base) => { + match expr_ty.sty { + ty::TyStruct(adt, substs) => { + let field_refs = field_refs(&adt.variants[0], fields); + ExprKind::Adt { + adt_def: adt, + variant_index: 0, + substs: substs, + fields: field_refs, + base: base.as_ref().map(|base| { + FruInfo { + base: base.to_ref(), + field_types: cx.tcx.tables + .borrow() + .fru_field_types[&expr.id] + .clone() + } + }) + } + } + ty::TyEnum(adt, substs) => { + match cx.tcx.def_map.borrow()[&expr.id].full_def() { + Def::Variant(enum_id, variant_id) => { + debug_assert!(adt.did == enum_id); + assert!(base.is_none()); + + let index = adt.variant_index_with_id(variant_id); + let field_refs = field_refs(&adt.variants[index], fields); + ExprKind::Adt { + adt_def: adt, + variant_index: index, + substs: substs, + fields: field_refs, + base: None + } + } + ref def => { + span_bug!( + expr.span, + "unexpected def: {:?}", + def); + } + } + } + _ => { + span_bug!( + expr.span, + "unexpected type for struct literal: {:?}", + expr_ty); + } + } + } + + hir::ExprClosure(..) => { + let closure_ty = cx.tcx.expr_ty(expr); + let (def_id, substs) = match closure_ty.sty { + ty::TyClosure(def_id, substs) => (def_id, substs), + _ => { + span_bug!(expr.span, + "closure expr w/o closure type: {:?}", + closure_ty); + } + }; + let upvars = cx.tcx.with_freevars(expr.id, |freevars| { + freevars.iter() + .enumerate() + .map(|(i, fv)| capture_freevar(cx, expr, fv, substs.upvar_tys[i])) + .collect() + }); + ExprKind::Closure { + closure_id: def_id, + substs: substs, + upvars: upvars, + } + } + + hir::ExprPath(..) => { + convert_path_expr(cx, expr) + } + + hir::ExprInlineAsm(ref asm, ref outputs, ref inputs) => { + ExprKind::InlineAsm { + asm: asm, + outputs: outputs.to_ref(), + inputs: inputs.to_ref() + } + } + + // Now comes the rote stuff: + + hir::ExprRepeat(ref v, ref c) => ExprKind::Repeat { + value: v.to_ref(), + count: TypedConstVal { + ty: cx.tcx.expr_ty(c), + span: c.span, + value: match const_eval::eval_const_expr(cx.tcx.global_tcx(), c) { + ConstVal::Integral(ConstInt::Usize(u)) => u, + other => bug!("constant evaluation of repeat count yielded {:?}", other), + }, + } + }, + hir::ExprRet(ref v) => + ExprKind::Return { value: v.to_ref() }, + hir::ExprBreak(label) => + ExprKind::Break { label: label.map(|_| loop_label(cx, expr)) }, + hir::ExprAgain(label) => + ExprKind::Continue { label: label.map(|_| loop_label(cx, expr)) }, + hir::ExprMatch(ref discr, ref arms, _) => + ExprKind::Match { discriminant: discr.to_ref(), + arms: arms.iter().map(|a| convert_arm(cx, a)).collect() }, + hir::ExprIf(ref cond, ref then, ref otherwise) => + ExprKind::If { condition: cond.to_ref(), + then: block::to_expr_ref(cx, then), + otherwise: otherwise.to_ref() }, + hir::ExprWhile(ref cond, ref body, _) => + ExprKind::Loop { condition: Some(cond.to_ref()), + body: block::to_expr_ref(cx, body) }, + hir::ExprLoop(ref body, _) => + ExprKind::Loop { condition: None, + body: block::to_expr_ref(cx, body) }, + hir::ExprField(ref source, name) => { + let index = match cx.tcx.expr_ty_adjusted(source).sty { + ty::TyStruct(adt_def, _) => + adt_def.variants[0].index_of_field_named(name.node), + ref ty => + span_bug!( + expr.span, + "field of non-struct: {:?}", + ty), + }; + let index = index.unwrap_or_else(|| { + span_bug!( + expr.span, + "no index found for field `{}`", + name.node) + }); + ExprKind::Field { lhs: source.to_ref(), name: Field::new(index) } + } + hir::ExprTupField(ref source, index) => + ExprKind::Field { lhs: source.to_ref(), + name: Field::new(index.node as usize) }, + hir::ExprCast(ref source, _) => { + // Check to see if this cast is a "coercion cast", where the cast is actually done + // using a coercion (or is a no-op). + if let Some(&TyCastKind::CoercionCast) = cx.tcx.cast_kinds.borrow().get(&source.id) { + // Skip the actual cast itexpr, as it's now a no-op. + return source.make_mirror(cx); + } else { + ExprKind::Cast { source: source.to_ref() } + } + } + hir::ExprType(ref source, _) => + return source.make_mirror(cx), + hir::ExprBox(ref value) => + ExprKind::Box { + value: value.to_ref(), + value_extents: cx.tcx.region_maps.node_extent(value.id) + }, + hir::ExprVec(ref fields) => + ExprKind::Vec { fields: fields.to_ref() }, + hir::ExprTup(ref fields) => + ExprKind::Tuple { fields: fields.to_ref() }, + }; + + Expr { + temp_lifetime: temp_lifetime, + ty: expr_ty, + span: expr.span, + kind: kind, + } +} + +fn method_callee<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + expr: &hir::Expr, + method_call: ty::MethodCall) + -> Expr<'tcx> { let tables = cx.tcx.tables.borrow(); let callee = &tables.method_map[&method_call]; let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); @@ -614,7 +649,8 @@ fn to_borrow_kind(m: hir::Mutability) -> BorrowKind { } } -fn convert_arm<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, arm: &'tcx hir::Arm) -> Arm<'tcx> { +fn convert_arm<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + arm: &'tcx hir::Arm) -> Arm<'tcx> { let mut map; let opt_map = if arm.pats.len() == 1 { None @@ -633,8 +669,10 @@ fn convert_arm<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, arm: &'tcx hir::Arm) -> Arm< } } -fn convert_path_expr<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, expr: &'tcx hir::Expr) -> ExprKind<'tcx> { - let substs = cx.tcx.mk_substs(cx.tcx.node_id_item_substs(expr.id).substs); +fn convert_path_expr<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + expr: &'tcx hir::Expr) + -> ExprKind<'tcx> { + let substs = cx.tcx.node_id_item_substs(expr.id).substs; // Otherwise there may be def_map borrow conflicts let def = cx.tcx.def_map.borrow()[&expr.id].full_def(); let def_id = match def { @@ -676,7 +714,8 @@ fn convert_path_expr<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, expr: &'tcx hir::Expr) Def::Const(def_id) | Def::AssociatedConst(def_id) => { let substs = Some(cx.tcx.node_id_item_substs(expr.id).substs); - if let Some((e, _)) = const_eval::lookup_const_by_id(cx.tcx, def_id, substs) { + let tcx = cx.tcx.global_tcx(); + if let Some((e, _)) = const_eval::lookup_const_by_id(tcx, def_id, substs) { // FIXME ConstVal can't be yet used with adjustments, as they would be lost. if !cx.tcx.tables.borrow().adjustments.contains_key(&e.id) { if let Some(v) = cx.try_const_eval_literal(e) { @@ -705,10 +744,10 @@ fn convert_path_expr<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, expr: &'tcx hir::Expr) } } -fn convert_var<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, - expr: &'tcx hir::Expr, - def: Def) - -> ExprKind<'tcx> { +fn convert_var<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + expr: &'tcx hir::Expr, + def: Def) + -> ExprKind<'tcx> { let temp_lifetime = cx.tcx.region_maps.temporary_scope(expr.id); match def { @@ -725,7 +764,7 @@ fn convert_var<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, let body_id = match cx.tcx.map.find(closure_expr_id) { Some(map::NodeExpr(expr)) => { match expr.node { - hir::ExprClosure(_, _, ref body) => body.id, + hir::ExprClosure(_, _, ref body, _) => body.id, _ => { span_bug!(expr.span, "closure expr is not a closure expr"); } @@ -870,13 +909,13 @@ enum PassArgs { ByRef, } -fn overloaded_operator<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, - expr: &'tcx hir::Expr, - method_call: ty::MethodCall, - pass_args: PassArgs, - receiver: ExprRef<'tcx>, - args: Vec<&'tcx P>) - -> ExprKind<'tcx> { +fn overloaded_operator<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + expr: &'tcx hir::Expr, + method_call: ty::MethodCall, + pass_args: PassArgs, + receiver: ExprRef<'tcx>, + args: Vec<&'tcx P>) + -> ExprKind<'tcx> { // the receiver has all the adjustments that are needed, so we can // just push a reference to it let mut argrefs = vec![receiver]; @@ -921,13 +960,13 @@ fn overloaded_operator<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, } } -fn overloaded_lvalue<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, - expr: &'tcx hir::Expr, - method_call: ty::MethodCall, - pass_args: PassArgs, - receiver: ExprRef<'tcx>, - args: Vec<&'tcx P>) - -> ExprKind<'tcx> { +fn overloaded_lvalue<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + expr: &'tcx hir::Expr, + method_call: ty::MethodCall, + pass_args: PassArgs, + receiver: ExprRef<'tcx>, + args: Vec<&'tcx P>) + -> ExprKind<'tcx> { // For an overloaded *x or x[y] expression of type T, the method // call returns an &T and we must add the deref so that the types // line up (this is because `*x` and `x[y]` represent lvalues): @@ -956,11 +995,11 @@ fn overloaded_lvalue<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, ExprKind::Deref { arg: ref_expr.to_ref() } } -fn capture_freevar<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, - closure_expr: &'tcx hir::Expr, - freevar: &hir::Freevar, - freevar_ty: Ty<'tcx>) - -> ExprRef<'tcx> { +fn capture_freevar<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + closure_expr: &'tcx hir::Expr, + freevar: &hir::Freevar, + freevar_ty: Ty<'tcx>) + -> ExprRef<'tcx> { let id_var = freevar.def.var_id(); let upvar_id = ty::UpvarId { var_id: id_var, @@ -997,7 +1036,8 @@ fn capture_freevar<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, } } -fn loop_label<'a, 'tcx: 'a>(cx: &mut Cx<'a, 'tcx>, expr: &'tcx hir::Expr) -> CodeExtent { +fn loop_label<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>, + expr: &'tcx hir::Expr) -> CodeExtent { match cx.tcx.def_map.borrow().get(&expr.id).map(|d| d.full_def()) { Some(Def::Label(loop_id)) => cx.tcx.region_maps.node_extent(loop_id), d => { diff --git a/src/librustc_mir/hair/cx/mod.rs b/src/librustc_mir/hair/cx/mod.rs index c3a5fbd967..fad6cfb7ae 100644 --- a/src/librustc_mir/hair/cx/mod.rs +++ b/src/librustc_mir/hair/cx/mod.rs @@ -29,21 +29,25 @@ use rustc::hir; use rustc_const_math::{ConstInt, ConstUsize}; #[derive(Copy, Clone)] -pub struct Cx<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, - infcx: &'a InferCtxt<'a, 'tcx>, +pub struct Cx<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, + constness: hir::Constness } -impl<'a,'tcx> Cx<'a,'tcx> { - pub fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> Cx<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { + pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, + constness: hir::Constness) + -> Cx<'a, 'gcx, 'tcx> { Cx { tcx: infcx.tcx, infcx: infcx, + constness: constness, } } } -impl<'a,'tcx:'a> Cx<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> { /// Normalizes `ast` into the appropriate `mirror` type. pub fn mirror>(&mut self, ast: M) -> M::Output { ast.make_mirror(self) @@ -81,12 +85,15 @@ impl<'a,'tcx:'a> Cx<'a, 'tcx> { } pub fn const_eval_literal(&mut self, e: &hir::Expr) -> Literal<'tcx> { - Literal::Value { value: const_eval::eval_const_expr(self.tcx, e) } + Literal::Value { + value: const_eval::eval_const_expr(self.tcx.global_tcx(), e) + } } pub fn try_const_eval_literal(&mut self, e: &hir::Expr) -> Option> { let hint = const_eval::EvalHint::ExprTypeChecked; - const_eval::eval_const_expr_partial(self.tcx, e, hint, None).ok().and_then(|v| { + let tcx = self.tcx.global_tcx(); + const_eval::eval_const_expr_partial(tcx, e, hint, None).ok().and_then(|v| { match v { // All of these contain local IDs, unsuitable for storing in MIR. ConstVal::Struct(_) | ConstVal::Tuple(_) | @@ -126,21 +133,25 @@ impl<'a,'tcx:'a> Cx<'a, 'tcx> { bug!("found no method `{}` in `{:?}`", method_name, trait_def_id); } - pub fn num_variants(&mut self, adt_def: ty::AdtDef<'tcx>) -> usize { + pub fn num_variants(&mut self, adt_def: ty::AdtDef) -> usize { adt_def.variants.len() } - pub fn all_fields(&mut self, adt_def: ty::AdtDef<'tcx>, variant_index: usize) -> Vec { + pub fn all_fields(&mut self, adt_def: ty::AdtDef, variant_index: usize) -> Vec { (0..adt_def.variants[variant_index].fields.len()) .map(Field::new) .collect() } pub fn needs_drop(&mut self, ty: Ty<'tcx>) -> bool { + let ty = self.tcx.lift_to_global(&ty).unwrap_or_else(|| { + bug!("MIR: Cx::needs_drop({}) got \ + type with inference types/regions", ty); + }); self.tcx.type_needs_drop_given_env(ty, &self.infcx.parameter_environment) } - pub fn tcx(&self) -> &'a TyCtxt<'tcx> { + pub fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.tcx } } diff --git a/src/librustc_mir/hair/cx/pattern.rs b/src/librustc_mir/hair/cx/pattern.rs index 990ba9e786..0118b97dd7 100644 --- a/src/librustc_mir/hair/cx/pattern.rs +++ b/src/librustc_mir/hair/cx/pattern.rs @@ -34,12 +34,12 @@ use syntax::ptr::P; /// _ => { ... } /// } /// ``` -struct PatCx<'patcx, 'cx: 'patcx, 'tcx: 'cx> { - cx: &'patcx mut Cx<'cx, 'tcx>, +struct PatCx<'patcx, 'cx: 'patcx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { + cx: &'patcx mut Cx<'cx, 'gcx, 'tcx>, binding_map: Option<&'patcx FnvHashMap>, } -impl<'cx, 'tcx> Cx<'cx, 'tcx> { +impl<'cx, 'gcx, 'tcx> Cx<'cx, 'gcx, 'tcx> { pub fn irrefutable_pat(&mut self, pat: &hir::Pat) -> Pattern<'tcx> { PatCx::new(self, None).to_pattern(pat) } @@ -52,10 +52,10 @@ impl<'cx, 'tcx> Cx<'cx, 'tcx> { } } -impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> { - fn new(cx: &'patcx mut Cx<'cx, 'tcx>, +impl<'patcx, 'cx, 'gcx, 'tcx> PatCx<'patcx, 'cx, 'gcx, 'tcx> { + fn new(cx: &'patcx mut Cx<'cx, 'gcx, 'tcx>, binding_map: Option<&'patcx FnvHashMap>) - -> PatCx<'patcx, 'cx, 'tcx> { + -> PatCx<'patcx, 'cx, 'gcx, 'tcx> { PatCx { cx: cx, binding_map: binding_map, @@ -69,14 +69,14 @@ impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> { PatKind::Wild => PatternKind::Wild, PatKind::Lit(ref value) => { - let value = const_eval::eval_const_expr(self.cx.tcx, value); + let value = const_eval::eval_const_expr(self.cx.tcx.global_tcx(), value); PatternKind::Constant { value: value } } PatKind::Range(ref lo, ref hi) => { - let lo = const_eval::eval_const_expr(self.cx.tcx, lo); + let lo = const_eval::eval_const_expr(self.cx.tcx.global_tcx(), lo); let lo = Literal::Value { value: lo }; - let hi = const_eval::eval_const_expr(self.cx.tcx, hi); + let hi = const_eval::eval_const_expr(self.cx.tcx.global_tcx(), hi); let hi = Literal::Value { value: hi }; PatternKind::Range { lo: lo, hi: hi } }, @@ -87,10 +87,11 @@ impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> { let def = self.cx.tcx.def_map.borrow().get(&pat.id).unwrap().full_def(); match def { Def::Const(def_id) | Def::AssociatedConst(def_id) => { + let tcx = self.cx.tcx.global_tcx(); let substs = Some(self.cx.tcx.node_id_item_substs(pat.id).substs); - match const_eval::lookup_const_by_id(self.cx.tcx, def_id, substs) { + match const_eval::lookup_const_by_id(tcx, def_id, substs) { Some((const_expr, _const_ty)) => { - match const_eval::const_expr_to_pat(self.cx.tcx, + match const_eval::const_expr_to_pat(tcx, const_expr, pat.id, pat.span) { @@ -165,7 +166,7 @@ impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> { { let id = match self.binding_map { None => pat.id, - Some(ref map) => map[&ident.node.name], + Some(ref map) => map[&ident.node], }; let var_ty = self.cx.tcx.node_id_to_type(pat.id); let region = match var_ty.sty { @@ -196,7 +197,7 @@ impl<'patcx, 'cx, 'tcx> PatCx<'patcx, 'cx, 'tcx> { PatternKind::Binding { mutability: mutability, mode: mode, - name: ident.node.name, + name: ident.node, var: id, ty: var_ty, subpattern: self.to_opt_pattern(sub), diff --git a/src/librustc_mir/hair/mod.rs b/src/librustc_mir/hair/mod.rs index 51f2cc2687..020fbb6fcd 100644 --- a/src/librustc_mir/hair/mod.rs +++ b/src/librustc_mir/hair/mod.rs @@ -222,7 +222,7 @@ pub enum ExprKind<'tcx> { }, Closure { closure_id: DefId, - substs: &'tcx ClosureSubsts<'tcx>, + substs: ClosureSubsts<'tcx>, upvars: Vec>, }, Literal { @@ -358,13 +358,13 @@ pub struct FieldPattern<'tcx> { pub trait Mirror<'tcx> { type Output; - fn make_mirror<'a>(self, cx: &mut Cx<'a, 'tcx>) -> Self::Output; + fn make_mirror<'a, 'gcx>(self, cx: &mut Cx<'a, 'gcx, 'tcx>) -> Self::Output; } impl<'tcx> Mirror<'tcx> for Expr<'tcx> { type Output = Expr<'tcx>; - fn make_mirror<'a>(self, _: &mut Cx<'a, 'tcx>) -> Expr<'tcx> { + fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> { self } } @@ -372,7 +372,7 @@ impl<'tcx> Mirror<'tcx> for Expr<'tcx> { impl<'tcx> Mirror<'tcx> for ExprRef<'tcx> { type Output = Expr<'tcx>; - fn make_mirror<'a>(self, hir: &mut Cx<'a, 'tcx>) -> Expr<'tcx> { + fn make_mirror<'a, 'gcx>(self, hir: &mut Cx<'a, 'gcx, 'tcx>) -> Expr<'tcx> { match self { ExprRef::Hair(h) => h.make_mirror(hir), ExprRef::Mirror(m) => *m, @@ -383,7 +383,7 @@ impl<'tcx> Mirror<'tcx> for ExprRef<'tcx> { impl<'tcx> Mirror<'tcx> for Stmt<'tcx> { type Output = Stmt<'tcx>; - fn make_mirror<'a>(self, _: &mut Cx<'a, 'tcx>) -> Stmt<'tcx> { + fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Stmt<'tcx> { self } } @@ -391,7 +391,7 @@ impl<'tcx> Mirror<'tcx> for Stmt<'tcx> { impl<'tcx> Mirror<'tcx> for StmtRef<'tcx> { type Output = Stmt<'tcx>; - fn make_mirror<'a>(self, _: &mut Cx<'a,'tcx>) -> Stmt<'tcx> { + fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Stmt<'tcx> { match self { StmtRef::Mirror(m) => *m, } @@ -401,7 +401,7 @@ impl<'tcx> Mirror<'tcx> for StmtRef<'tcx> { impl<'tcx> Mirror<'tcx> for Block<'tcx> { type Output = Block<'tcx>; - fn make_mirror<'a>(self, _: &mut Cx<'a, 'tcx>) -> Block<'tcx> { + fn make_mirror<'a, 'gcx>(self, _: &mut Cx<'a, 'gcx, 'tcx>) -> Block<'tcx> { self } } diff --git a/src/librustc_mir/lib.rs b/src/librustc_mir/lib.rs index ced73f34e0..79d11e78bd 100644 --- a/src/librustc_mir/lib.rs +++ b/src/librustc_mir/lib.rs @@ -20,7 +20,9 @@ Rust MIR: a lowered representation of Rust. Also: an experiment! #![cfg_attr(not(stage0), deny(warnings))] #![unstable(feature = "rustc_private", issue = "27812")] +#![feature(associated_consts)] #![feature(box_patterns)] +#![feature(rustc_diagnostic_macros)] #![feature(rustc_private)] #![feature(staged_api)] #![feature(question_mark)] @@ -31,10 +33,16 @@ extern crate graphviz as dot; extern crate rustc; extern crate rustc_data_structures; extern crate rustc_back; +#[macro_use] +#[no_link] +extern crate rustc_bitflags; +#[macro_use] extern crate syntax; extern crate rustc_const_math; extern crate rustc_const_eval; +pub mod diagnostics; + pub mod build; pub mod graphviz; mod hair; diff --git a/src/librustc_mir/mir_map.rs b/src/librustc_mir/mir_map.rs index 40334f652e..73cfdeda74 100644 --- a/src/librustc_mir/mir_map.rs +++ b/src/librustc_mir/mir_map.rs @@ -16,33 +16,34 @@ //! - `#[rustc_mir(graphviz="file.gv")]` //! - `#[rustc_mir(pretty="file.mir")]` -extern crate syntax; - use build; use rustc::dep_graph::DepNode; use rustc::mir::repr::Mir; +use rustc::mir::transform::MirSource; +use rustc::mir::visit::MutVisitor; use pretty; use hair::cx::Cx; use rustc::mir::mir_map::MirMap; -use rustc::infer; +use rustc::infer::InferCtxtBuilder; use rustc::traits::ProjectionMode; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::util::common::ErrorReported; +use rustc::ty::subst::Substs; use rustc::util::nodemap::NodeMap; use rustc::hir; -use rustc::hir::intravisit::{self, Visitor}; -use syntax::abi::Abi; +use rustc::hir::intravisit::{self, FnKind, Visitor}; +use rustc::hir::map::blocks::FnLikeNode; use syntax::ast; -use syntax::attr::AttrMetaMethods; use syntax::codemap::Span; -pub fn build_mir_for_crate<'tcx>(tcx: &TyCtxt<'tcx>) -> MirMap<'tcx> { +use std::mem; + +pub fn build_mir_for_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> MirMap<'tcx> { let mut map = MirMap { map: NodeMap(), }; { - let mut dump = OuterDump { + let mut dump = BuildMir { tcx: tcx, map: &mut map, }; @@ -51,163 +52,223 @@ pub fn build_mir_for_crate<'tcx>(tcx: &TyCtxt<'tcx>) -> MirMap<'tcx> { map } +/// A pass to lift all the types and substitutions in a Mir +/// to the global tcx. Sadly, we don't have a "folder" that +/// can change 'tcx so we have to transmute afterwards. +struct GlobalizeMir<'a, 'gcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'gcx>, + span: Span +} + +impl<'a, 'gcx: 'tcx, 'tcx> MutVisitor<'tcx> for GlobalizeMir<'a, 'gcx> { + fn visit_ty(&mut self, ty: &mut Ty<'tcx>) { + if let Some(lifted) = self.tcx.lift(ty) { + *ty = lifted; + } else { + span_bug!(self.span, + "found type `{:?}` with inference types/regions in MIR", + ty); + } + } + + fn visit_substs(&mut self, substs: &mut &'tcx Substs<'tcx>) { + if let Some(lifted) = self.tcx.lift(substs) { + *substs = lifted; + } else { + span_bug!(self.span, + "found substs `{:?}` with inference types/regions in MIR", + substs); + } + } +} + /////////////////////////////////////////////////////////////////////////// -// OuterDump -- walks a crate, looking for fn items and methods to build MIR from +// BuildMir -- walks a crate, looking for fn items and methods to build MIR from -struct OuterDump<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, +struct BuildMir<'a, 'tcx: 'a> { + tcx: TyCtxt<'a, 'tcx, 'tcx>, map: &'a mut MirMap<'tcx>, } -impl<'a, 'tcx> OuterDump<'a, 'tcx> { - fn visit_mir(&mut self, attributes: &'a [ast::Attribute], mut walk_op: OP) - where OP: for<'m> FnMut(&mut InnerDump<'a, 'm, 'tcx>) - { - let mut closure_dump = InnerDump { - tcx: self.tcx, - attr: None, - map: &mut *self.map, - }; - for attr in attributes { - if attr.check_name("rustc_mir") { - closure_dump.attr = Some(attr); - } +/// Helper type of a temporary returned by BuildMir::cx(...). +/// Necessary because we can't write the following bound: +/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Cx<'b, 'gcx, 'tcx>). +struct CxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + src: MirSource, + infcx: InferCtxtBuilder<'a, 'gcx, 'tcx>, + map: &'a mut MirMap<'gcx>, +} + +impl<'a, 'gcx, 'tcx> BuildMir<'a, 'gcx> { + fn cx<'b>(&'b mut self, src: MirSource) -> CxBuilder<'b, 'gcx, 'tcx> { + let param_env = ty::ParameterEnvironment::for_item(self.tcx, src.item_id()); + CxBuilder { + src: src, + infcx: self.tcx.infer_ctxt(None, Some(param_env), ProjectionMode::AnyFinal), + map: self.map } - walk_op(&mut closure_dump); } } +impl<'a, 'gcx, 'tcx> CxBuilder<'a, 'gcx, 'tcx> { + fn build(&'tcx mut self, f: F) + where F: for<'b> FnOnce(Cx<'b, 'gcx, 'tcx>) -> (Mir<'tcx>, build::ScopeAuxiliaryVec) + { + let src = self.src; + let mir = self.infcx.enter(|infcx| { + let constness = match src { + MirSource::Const(_) | + MirSource::Static(..) => hir::Constness::Const, + MirSource::Fn(id) => { + let fn_like = FnLikeNode::from_node(infcx.tcx.map.get(id)); + match fn_like.map(|f| f.kind()) { + Some(FnKind::ItemFn(_, _, _, c, _, _, _)) => c, + Some(FnKind::Method(_, m, _, _)) => m.constness, + _ => hir::Constness::NotConst + } + } + MirSource::Promoted(..) => bug!() + }; + let (mut mir, scope_auxiliary) = f(Cx::new(&infcx, constness)); -impl<'a, 'tcx> Visitor<'tcx> for OuterDump<'a, 'tcx> { - fn visit_item(&mut self, item: &'tcx hir::Item) { - self.visit_mir(&item.attrs, |c| intravisit::walk_item(c, item)); - intravisit::walk_item(self, item); + // Convert the Mir to global types. + let mut globalizer = GlobalizeMir { + tcx: infcx.tcx.global_tcx(), + span: mir.span + }; + globalizer.visit_mir(&mut mir); + let mir = unsafe { + mem::transmute::>(mir) + }; + + pretty::dump_mir(infcx.tcx.global_tcx(), "mir_map", &0, + src, &mir, Some(&scope_auxiliary)); + + mir + }); + + assert!(self.map.map.insert(src.item_id(), mir).is_none()) } +} - fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) { - match trait_item.node { - hir::MethodTraitItem(_, Some(_)) => { - self.visit_mir(&trait_item.attrs, |c| intravisit::walk_trait_item(c, trait_item)); - } - hir::MethodTraitItem(_, None) | - hir::ConstTraitItem(..) | - hir::TypeTraitItem(..) => {} +impl<'a, 'gcx> BuildMir<'a, 'gcx> { + fn build_const_integer(&mut self, expr: &'gcx hir::Expr) { + // FIXME(eddyb) Closures should have separate + // function definition IDs and expression IDs. + // Type-checking should not let closures get + // this far in an integer constant position. + if let hir::ExprClosure(..) = expr.node { + return; } - intravisit::walk_trait_item(self, trait_item); + self.cx(MirSource::Const(expr.id)).build(|cx| { + build::construct_const(cx, expr.id, expr) + }); } +} - fn visit_impl_item(&mut self, impl_item: &'tcx hir::ImplItem) { - match impl_item.node { - hir::ImplItemKind::Method(..) => { - self.visit_mir(&impl_item.attrs, |c| intravisit::walk_impl_item(c, impl_item)); +impl<'a, 'tcx> Visitor<'tcx> for BuildMir<'a, 'tcx> { + // Const and static items. + fn visit_item(&mut self, item: &'tcx hir::Item) { + match item.node { + hir::ItemConst(_, ref expr) => { + self.cx(MirSource::Const(item.id)).build(|cx| { + build::construct_const(cx, item.id, expr) + }); } - hir::ImplItemKind::Const(..) | hir::ImplItemKind::Type(..) => {} + hir::ItemStatic(_, m, ref expr) => { + self.cx(MirSource::Static(item.id, m)).build(|cx| { + build::construct_const(cx, item.id, expr) + }); + } + _ => {} } - intravisit::walk_impl_item(self, impl_item); + intravisit::walk_item(self, item); } -} -/////////////////////////////////////////////////////////////////////////// -// InnerDump -- dumps MIR for a single fn and its contained closures + // Trait associated const defaults. + fn visit_trait_item(&mut self, item: &'tcx hir::TraitItem) { + if let hir::ConstTraitItem(_, Some(ref expr)) = item.node { + self.cx(MirSource::Const(item.id)).build(|cx| { + build::construct_const(cx, item.id, expr) + }); + } + intravisit::walk_trait_item(self, item); + } -struct InnerDump<'a, 'm, 'tcx: 'a + 'm> { - tcx: &'a TyCtxt<'tcx>, - map: &'m mut MirMap<'tcx>, - attr: Option<&'a ast::Attribute>, -} + // Impl associated const. + fn visit_impl_item(&mut self, item: &'tcx hir::ImplItem) { + if let hir::ImplItemKind::Const(_, ref expr) = item.node { + self.cx(MirSource::Const(item.id)).build(|cx| { + build::construct_const(cx, item.id, expr) + }); + } + intravisit::walk_impl_item(self, item); + } -impl<'a, 'm, 'tcx> Visitor<'tcx> for InnerDump<'a,'m,'tcx> { - fn visit_trait_item(&mut self, _: &'tcx hir::TraitItem) { - // ignore methods; the outer dump will call us for them independently + // Repeat counts, i.e. [expr; constant]. + fn visit_expr(&mut self, expr: &'tcx hir::Expr) { + if let hir::ExprRepeat(_, ref count) = expr.node { + self.build_const_integer(count); + } + intravisit::walk_expr(self, expr); } - fn visit_impl_item(&mut self, _: &'tcx hir::ImplItem) { - // ignore methods; the outer dump will call us for them independently + // Array lengths, i.e. [T; constant]. + fn visit_ty(&mut self, ty: &'tcx hir::Ty) { + if let hir::TyFixedLengthVec(_, ref length) = ty.node { + self.build_const_integer(length); + } + intravisit::walk_ty(self, ty); + } + + // Enum variant discriminant values. + fn visit_variant(&mut self, v: &'tcx hir::Variant, + g: &'tcx hir::Generics, item_id: ast::NodeId) { + if let Some(ref expr) = v.node.disr_expr { + self.build_const_integer(expr); + } + intravisit::walk_variant(self, v, g, item_id); } fn visit_fn(&mut self, - fk: intravisit::FnKind<'tcx>, + fk: FnKind<'tcx>, decl: &'tcx hir::FnDecl, body: &'tcx hir::Block, span: Span, id: ast::NodeId) { - let implicit_arg_tys = if let intravisit::FnKind::Closure(..) = fk { - vec![closure_self_ty(&self.tcx, id, body.id)] + // fetch the fully liberated fn signature (that is, all bound + // types/lifetimes replaced) + let fn_sig = match self.tcx.tables.borrow().liberated_fn_sigs.get(&id) { + Some(f) => f.clone(), + None => { + span_bug!(span, "no liberated fn sig for {:?}", id); + } + }; + + let implicit_argument = if let FnKind::Closure(..) = fk { + Some((closure_self_ty(self.tcx, id, body.id), None)) } else { - vec![] + None }; - let param_env = ty::ParameterEnvironment::for_item(self.tcx, id); - let infcx = infer::new_infer_ctxt(self.tcx, - &self.tcx.tables, - Some(param_env), - ProjectionMode::AnyFinal); + let explicit_arguments = + decl.inputs + .iter() + .enumerate() + .map(|(index, arg)| { + (fn_sig.inputs[index], Some(&*arg.pat)) + }); - match build_mir(Cx::new(&infcx), implicit_arg_tys, id, span, decl, body) { - Ok(mir) => assert!(self.map.map.insert(id, mir).is_none()), - Err(ErrorReported) => {} - } + let arguments = implicit_argument.into_iter().chain(explicit_arguments); + self.cx(MirSource::Fn(id)).build(|cx| { + build::construct_fn(cx, id, arguments, fn_sig.output, body) + }); intravisit::walk_fn(self, fk, decl, body, span); } } -fn build_mir<'a,'tcx:'a>(cx: Cx<'a,'tcx>, - implicit_arg_tys: Vec>, - fn_id: ast::NodeId, - span: Span, - decl: &'tcx hir::FnDecl, - body: &'tcx hir::Block) - -> Result, ErrorReported> { - // fetch the fully liberated fn signature (that is, all bound - // types/lifetimes replaced) - let fn_sig = match cx.tcx().tables.borrow().liberated_fn_sigs.get(&fn_id) { - Some(f) => f.clone(), - None => { - span_bug!(span, "no liberated fn sig for {:?}", fn_id); - } - }; - - let arguments = - decl.inputs - .iter() - .enumerate() - .map(|(index, arg)| { - (fn_sig.inputs[index], &*arg.pat) - }) - .collect(); - - let (mut mir, scope_auxiliary) = - build::construct(cx, - span, - fn_id, - body.id, - implicit_arg_tys, - arguments, - fn_sig.output, - body); - - match cx.tcx().node_id_to_type(fn_id).sty { - ty::TyFnDef(_, _, f) if f.abi == Abi::RustCall => { - // RustCall pseudo-ABI untuples the last argument. - if let Some(arg_decl) = mir.arg_decls.last_mut() { - arg_decl.spread = true; - } - } - _ => {} - } - - pretty::dump_mir(cx.tcx(), - "mir_map", - &0, - fn_id, - &mir, - Some(&scope_auxiliary)); - - Ok(mir) -} - -fn closure_self_ty<'a, 'tcx>(tcx: &TyCtxt<'tcx>, +fn closure_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, closure_expr_id: ast::NodeId, body_id: ast::NodeId) -> Ty<'tcx> { diff --git a/src/librustc_mir/pretty.rs b/src/librustc_mir/pretty.rs index d0c66b340d..fb29cbd5fa 100644 --- a/src/librustc_mir/pretty.rs +++ b/src/librustc_mir/pretty.rs @@ -9,7 +9,9 @@ // except according to those terms. use build::{Location, ScopeAuxiliaryVec}; +use rustc::hir; use rustc::mir::repr::*; +use rustc::mir::transform::MirSource; use rustc::ty::{self, TyCtxt}; use rustc_data_structures::fnv::FnvHashMap; use std::fmt::Display; @@ -19,11 +21,13 @@ use syntax::ast::NodeId; use syntax::codemap::Span; const INDENT: &'static str = " "; +/// Alignment for lining up comments following MIR statements +const ALIGN: usize = 40; /// If the session is properly configured, dumps a human-readable /// representation of the mir into: /// -/// ``` +/// ```text /// rustc.node.. /// ``` /// @@ -34,16 +38,17 @@ const INDENT: &'static str = " "; /// - `substring1&substring2,...` -- `&`-separated list of substrings /// that can appear in the pass-name or the `item_path_str` for the given /// node-id. If any one of the substrings match, the data is dumped out. -pub fn dump_mir<'a, 'tcx>(tcx: &TyCtxt<'tcx>, +pub fn dump_mir<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, pass_name: &str, disambiguator: &Display, - node_id: NodeId, + src: MirSource, mir: &Mir<'tcx>, auxiliary: Option<&ScopeAuxiliaryVec>) { let filters = match tcx.sess.opts.debugging_opts.dump_mir { None => return, Some(ref filters) => filters, }; + let node_id = src.item_id(); let node_path = tcx.item_path_str(tcx.map.local_def_id(node_id)); let is_matched = filters.split("&") @@ -64,20 +69,34 @@ pub fn dump_mir<'a, 'tcx>(tcx: &TyCtxt<'tcx>, try!(writeln!(file, "// pass_name = {}", pass_name)); try!(writeln!(file, "// disambiguator = {}", disambiguator)); try!(writeln!(file, "")); - try!(write_mir_fn(tcx, node_id, mir, &mut file, auxiliary)); + try!(write_mir_fn(tcx, src, mir, &mut file, auxiliary)); Ok(()) }); } /// Write out a human-readable textual representation for the given MIR. -pub fn write_mir_pretty<'a, 'tcx, I>(tcx: &TyCtxt<'tcx>, - iter: I, - w: &mut Write) - -> io::Result<()> +pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>, + iter: I, + w: &mut Write) + -> io::Result<()> where I: Iterator)>, 'tcx: 'a { - for (&node_id, mir) in iter { - write_mir_fn(tcx, node_id, mir, w, None)?; + let mut first = true; + for (&id, mir) in iter { + if first { + first = false; + } else { + // Put empty lines between all items + writeln!(w, "")?; + } + + let src = MirSource::from_node(tcx, id); + write_mir_fn(tcx, src, mir, w, None)?; + + for (i, mir) in mir.promoted.iter().enumerate() { + writeln!(w, "")?; + write_mir_fn(tcx, MirSource::Promoted(id, i), mir, w, None)?; + } } Ok(()) } @@ -87,12 +106,12 @@ enum Annotation { ExitScope(ScopeId), } -pub fn write_mir_fn<'tcx>(tcx: &TyCtxt<'tcx>, - node_id: NodeId, - mir: &Mir<'tcx>, - w: &mut Write, - auxiliary: Option<&ScopeAuxiliaryVec>) - -> io::Result<()> { +pub fn write_mir_fn<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + src: MirSource, + mir: &Mir<'tcx>, + w: &mut Write, + auxiliary: Option<&ScopeAuxiliaryVec>) + -> io::Result<()> { // compute scope/entry exit annotations let mut annotations = FnvHashMap(); if let Some(auxiliary) = auxiliary { @@ -111,7 +130,7 @@ pub fn write_mir_fn<'tcx>(tcx: &TyCtxt<'tcx>, } } - write_mir_intro(tcx, node_id, mir, w)?; + write_mir_intro(tcx, src, mir, w)?; for block in mir.all_basic_blocks() { write_basic_block(tcx, block, mir, w, &annotations)?; } @@ -123,14 +142,17 @@ pub fn write_mir_fn<'tcx>(tcx: &TyCtxt<'tcx>, .or_insert(vec![]) .push(ScopeId::new(index)); } - write_scope_tree(tcx, mir, auxiliary, &scope_tree, w, None, 1)?; + + writeln!(w, "{}scope tree:", INDENT)?; + write_scope_tree(tcx, mir, auxiliary, &scope_tree, w, None, 1, false)?; + writeln!(w, "")?; writeln!(w, "}}")?; Ok(()) } /// Write out a human-readable textual representation for the given basic block. -fn write_basic_block(tcx: &TyCtxt, +fn write_basic_block(tcx: TyCtxt, block: BasicBlock, mir: &Mir, w: &mut Write, @@ -139,7 +161,7 @@ fn write_basic_block(tcx: &TyCtxt, let data = mir.basic_block_data(block); // Basic block label at the top. - writeln!(w, "\n{}{:?}: {{", INDENT, block)?; + writeln!(w, "{}{:?}: {{", INDENT, block)?; // List of statements in the middle. let mut current_location = Location { block: block, statement_index: 0 }; @@ -157,48 +179,61 @@ fn write_basic_block(tcx: &TyCtxt, } } - writeln!(w, "{0}{0}{1:?}; // {2}", - INDENT, - statement, + let indented_mir = format!("{0}{0}{1:?};", INDENT, statement); + writeln!(w, "{0:1$} // {2}", + indented_mir, + ALIGN, comment(tcx, statement.scope, statement.span))?; current_location.statement_index += 1; } // Terminator at the bottom. - writeln!(w, "{0}{0}{1:?}; // {2}", - INDENT, - data.terminator().kind, + let indented_terminator = format!("{0}{0}{1:?};", INDENT, data.terminator().kind); + writeln!(w, "{0:1$} // {2}", + indented_terminator, + ALIGN, comment(tcx, data.terminator().scope, data.terminator().span))?; - writeln!(w, "{}}}", INDENT) + writeln!(w, "{}}}\n", INDENT) } -fn comment(tcx: &TyCtxt, - scope: ScopeId, - span: Span) - -> String { - format!("Scope({}) at {}", scope.index(), tcx.sess.codemap().span_to_string(span)) +fn comment(tcx: TyCtxt, scope: ScopeId, span: Span) -> String { + format!("scope {} at {}", scope.index(), tcx.sess.codemap().span_to_string(span)) } -fn write_scope_tree(tcx: &TyCtxt, +fn write_scope_tree(tcx: TyCtxt, mir: &Mir, auxiliary: Option<&ScopeAuxiliaryVec>, scope_tree: &FnvHashMap, Vec>, w: &mut Write, parent: Option, - depth: usize) + depth: usize, + same_line: bool) -> io::Result<()> { - for &child in scope_tree.get(&parent).unwrap_or(&vec![]) { - let indent = depth * INDENT.len(); + let indent = if same_line { + 0 + } else { + depth * INDENT.len() + }; + + let children = match scope_tree.get(&parent) { + Some(childs) => childs, + None => return Ok(()), + }; + + for (index, &child) in children.iter().enumerate() { + if index == 0 && same_line { + // We know we're going to output a scope, so prefix it with a space to separate it from + // the previous scopes on this line + write!(w, " ")?; + } + let data = &mir.scopes[child]; assert_eq!(data.parent_scope, parent); - writeln!(w, "{0:1$}Scope({2}) {{", "", indent, child.index())?; + write!(w, "{0:1$}{2}", "", indent, child.index())?; let indent = indent + INDENT.len(); - if let Some(parent) = parent { - writeln!(w, "{0:1$}Parent: Scope({2})", "", indent, parent.index())?; - } if let Some(auxiliary) = auxiliary { let extent = auxiliary[child].extent; @@ -206,43 +241,88 @@ fn write_scope_tree(tcx: &TyCtxt, writeln!(w, "{0:1$}Extent: {2:?}", "", indent, data)?; } - write_scope_tree(tcx, mir, auxiliary, scope_tree, w, - Some(child), depth + 1)?; + let child_count = scope_tree.get(&Some(child)).map(Vec::len).unwrap_or(0); + if child_count < 2 { + // Skip the braces when there's no or only a single subscope + write_scope_tree(tcx, mir, auxiliary, scope_tree, w, + Some(child), depth, true)?; + } else { + // 2 or more child scopes? Put them in braces and on new lines. + writeln!(w, " {{")?; + write_scope_tree(tcx, mir, auxiliary, scope_tree, w, + Some(child), depth + 1, false)?; + + write!(w, "\n{0:1$}}}", "", depth * INDENT.len())?; + } + + if !same_line && index + 1 < children.len() { + writeln!(w, "")?; + } } + Ok(()) } /// Write out a human-readable textual representation of the MIR's `fn` type and the types of its /// local variables (both user-defined bindings and compiler temporaries). -fn write_mir_intro(tcx: &TyCtxt, nid: NodeId, mir: &Mir, w: &mut Write) - -> io::Result<()> { - write!(w, "fn {}(", tcx.node_path_str(nid))?; - - // fn argument types. - for (i, arg) in mir.arg_decls.iter().enumerate() { - if i > 0 { - write!(w, ", ")?; - } - write!(w, "{:?}: {}", Lvalue::Arg(i as u32), arg.ty)?; +fn write_mir_intro<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + src: MirSource, + mir: &Mir, + w: &mut Write) + -> io::Result<()> { + match src { + MirSource::Fn(_) => write!(w, "fn")?, + MirSource::Const(_) => write!(w, "const")?, + MirSource::Static(_, hir::MutImmutable) => write!(w, "static")?, + MirSource::Static(_, hir::MutMutable) => write!(w, "static mut")?, + MirSource::Promoted(_, i) => write!(w, "promoted{} in", i)? } - write!(w, ") -> ")?; + write!(w, " {}", tcx.node_path_str(src.item_id()))?; + + if let MirSource::Fn(_) = src { + write!(w, "(")?; + + // fn argument types. + for (i, arg) in mir.arg_decls.iter().enumerate() { + if i > 0 { + write!(w, ", ")?; + } + write!(w, "{:?}: {}", Lvalue::Arg(i as u32), arg.ty)?; + } - // fn return type. - match mir.return_ty { - ty::FnOutput::FnConverging(ty) => write!(w, "{}", ty)?, - ty::FnOutput::FnDiverging => write!(w, "!")?, + write!(w, ") -> ")?; + + // fn return type. + match mir.return_ty { + ty::FnOutput::FnConverging(ty) => write!(w, "{}", ty)?, + ty::FnOutput::FnDiverging => write!(w, "!")?, + } + } else { + assert!(mir.arg_decls.is_empty()); + write!(w, ": {} =", mir.return_ty.unwrap())?; } writeln!(w, " {{")?; // User variable types (including the user's name in a comment). for (i, var) in mir.var_decls.iter().enumerate() { - write!(w, "{}let ", INDENT)?; - if var.mutability == Mutability::Mut { - write!(w, "mut ")?; - } - writeln!(w, "{:?}: {}; // {}", Lvalue::Var(i as u32), var.ty, var.name)?; + let mut_str = if var.mutability == Mutability::Mut { + "mut " + } else { + "" + }; + + let indented_var = format!("{}let {}{:?}: {};", + INDENT, + mut_str, + Lvalue::Var(i as u32), + var.ty); + writeln!(w, "{0:1$} // \"{2}\" in {3}", + indented_var, + ALIGN, + var.name, + comment(tcx, var.scope, var.span))?; } // Compiler-introduced temporary types. @@ -250,5 +330,10 @@ fn write_mir_intro(tcx: &TyCtxt, nid: NodeId, mir: &Mir, w: &mut Write) writeln!(w, "{}let mut {:?}: {};", INDENT, Lvalue::Temp(i as u32), temp.ty)?; } + // Wrote any declaration? Add an empty line before the first block is printed. + if !mir.var_decls.is_empty() || !mir.temp_decls.is_empty() { + writeln!(w, "")?; + } + Ok(()) } diff --git a/src/librustc_mir/transform/break_cleanup_edges.rs b/src/librustc_mir/transform/break_cleanup_edges.rs new file mode 100644 index 0000000000..0eb6223a71 --- /dev/null +++ b/src/librustc_mir/transform/break_cleanup_edges.rs @@ -0,0 +1,111 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::ty::TyCtxt; +use rustc::mir::repr::*; +use rustc::mir::transform::{MirPass, MirSource, Pass}; + +use rustc_data_structures::bitvec::BitVector; + +use pretty; + +use traversal; + +pub struct BreakCleanupEdges; + +/** + * Breaks outgoing critical edges for call terminators in the MIR. + * + * Critical edges are edges that are neither the only edge leaving a + * block, nor the only edge entering one. + * + * When you want something to happen "along" an edge, you can either + * do at the end of the predecessor block, or at the start of the + * successor block. Critical edges have to be broken in order to prevent + * "edge actions" from affecting other edges. We need this for calls that are + * translated to LLVM invoke instructions, because invoke is a block terminator + * in LLVM so we can't insert any code to handle the call's result into the + * block that performs the call. + * + * This function will break those edges by inserting new blocks along them. + * + * NOTE: Simplify CFG will happily undo most of the work this pass does. + * + */ + +impl<'tcx> MirPass<'tcx> for BreakCleanupEdges { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) { + let mut pred_count = vec![0u32; mir.basic_blocks.len()]; + + // Build the precedecessor map for the MIR + for (_, data) in traversal::preorder(mir) { + if let Some(ref term) = data.terminator { + for &tgt in term.successors().iter() { + pred_count[tgt.index()] += 1; + } + } + } + + let cleanup_map : BitVector = mir.basic_blocks + .iter().map(|bb| bb.is_cleanup).collect(); + + // We need a place to store the new blocks generated + let mut new_blocks = Vec::new(); + + let bbs = mir.all_basic_blocks(); + let cur_len = mir.basic_blocks.len(); + + for &bb in &bbs { + let data = mir.basic_block_data_mut(bb); + + if let Some(ref mut term) = data.terminator { + if term_is_invoke(term) { + let term_span = term.span; + let term_scope = term.scope; + let succs = term.successors_mut(); + for tgt in succs { + let num_preds = pred_count[tgt.index()]; + if num_preds > 1 { + // It's a critical edge, break it + let goto = Terminator { + span: term_span, + scope: term_scope, + kind: TerminatorKind::Goto { target: *tgt } + }; + let mut data = BasicBlockData::new(Some(goto)); + data.is_cleanup = cleanup_map.contains(tgt.index()); + + // Get the index it will be when inserted into the MIR + let idx = cur_len + new_blocks.len(); + new_blocks.push(data); + *tgt = BasicBlock::new(idx); + } + } + } + } + } + + pretty::dump_mir(tcx, "break_cleanup_edges", &0, src, mir, None); + debug!("Broke {} N edges", new_blocks.len()); + + mir.basic_blocks.extend_from_slice(&new_blocks); + } +} + +impl Pass for BreakCleanupEdges {} + +// Returns true if the terminator is a call that would use an invoke in LLVM. +fn term_is_invoke(term: &Terminator) -> bool { + match term.kind { + TerminatorKind::Call { cleanup: Some(_), .. } | + TerminatorKind::Drop { unwind: Some(_), .. } => true, + _ => false + } +} diff --git a/src/librustc_mir/transform/break_critical_edges.rs b/src/librustc_mir/transform/break_critical_edges.rs deleted file mode 100644 index e1fb5dfd43..0000000000 --- a/src/librustc_mir/transform/break_critical_edges.rs +++ /dev/null @@ -1,117 +0,0 @@ -// Copyright 2016 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -use rustc::ty::TyCtxt; -use rustc::mir::repr::*; -use rustc::mir::transform::{MirPass, Pass}; -use syntax::ast::NodeId; - -use rustc_data_structures::bitvec::BitVector; - -use traversal; - -pub struct BreakCriticalEdges; - -/** - * Breaks critical edges in the MIR. - * - * Critical edges are edges that are neither the only edge leaving a - * block, nor the only edge entering one. - * - * When you want something to happen "along" an edge, you can either - * do at the end of the predecessor block, or at the start of the - * successor block. Critical edges have to be broken in order to prevent - * "edge actions" from affecting other edges. - * - * This function will break those edges by inserting new blocks along them. - * - * A special case is Drop and Call terminators with unwind/cleanup successors, - * They use `invoke` in LLVM, which terminates a block, meaning that code cannot - * be inserted after them, so even if an edge is the only edge leaving a block - * like that, we still insert blocks if the edge is one of many entering the - * target. - * - * NOTE: Simplify CFG will happily undo most of the work this pass does. - * - */ - -impl<'tcx> MirPass<'tcx> for BreakCriticalEdges { - fn run_pass(&mut self, _: &TyCtxt<'tcx>, _: NodeId, mir: &mut Mir<'tcx>) { - break_critical_edges(mir); - } -} - -impl Pass for BreakCriticalEdges {} - -fn break_critical_edges(mir: &mut Mir) { - let mut pred_count = vec![0u32; mir.basic_blocks.len()]; - - // Build the precedecessor map for the MIR - for (_, data) in traversal::preorder(mir) { - if let Some(ref term) = data.terminator { - for &tgt in term.successors().iter() { - pred_count[tgt.index()] += 1; - } - } - } - - let cleanup_map : BitVector = mir.basic_blocks - .iter().map(|bb| bb.is_cleanup).collect(); - - // We need a place to store the new blocks generated - let mut new_blocks = Vec::new(); - - let bbs = mir.all_basic_blocks(); - let cur_len = mir.basic_blocks.len(); - - for &bb in &bbs { - let data = mir.basic_block_data_mut(bb); - - if let Some(ref mut term) = data.terminator { - let is_invoke = term_is_invoke(term); - let term_span = term.span; - let term_scope = term.scope; - let succs = term.successors_mut(); - if succs.len() > 1 || (succs.len() > 0 && is_invoke) { - for tgt in succs { - let num_preds = pred_count[tgt.index()]; - if num_preds > 1 { - // It's a critical edge, break it - let goto = Terminator { - span: term_span, - scope: term_scope, - kind: TerminatorKind::Goto { target: *tgt } - }; - let mut data = BasicBlockData::new(Some(goto)); - data.is_cleanup = cleanup_map.contains(tgt.index()); - - // Get the index it will be when inserted into the MIR - let idx = cur_len + new_blocks.len(); - new_blocks.push(data); - *tgt = BasicBlock::new(idx); - } - } - } - } - } - - debug!("Broke {} N edges", new_blocks.len()); - - mir.basic_blocks.extend_from_slice(&new_blocks); -} - -// Returns true if the terminator would use an invoke in LLVM. -fn term_is_invoke(term: &Terminator) -> bool { - match term.kind { - TerminatorKind::Call { cleanup: Some(_), .. } | - TerminatorKind::Drop { unwind: Some(_), .. } => true, - _ => false - } -} diff --git a/src/librustc_mir/transform/erase_regions.rs b/src/librustc_mir/transform/erase_regions.rs index 12bfa3aebc..485ca3ea84 100644 --- a/src/librustc_mir/transform/erase_regions.rs +++ b/src/librustc_mir/transform/erase_regions.rs @@ -16,15 +16,14 @@ use rustc::ty::subst::Substs; use rustc::ty::{Ty, TyCtxt}; use rustc::mir::repr::*; use rustc::mir::visit::MutVisitor; -use rustc::mir::transform::{MirPass, Pass}; -use syntax::ast::NodeId; +use rustc::mir::transform::{MirPass, MirSource, Pass}; struct EraseRegionsVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, } impl<'a, 'tcx> EraseRegionsVisitor<'a, 'tcx> { - pub fn new(tcx: &'a TyCtxt<'tcx>) -> Self { + pub fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Self { EraseRegionsVisitor { tcx: tcx } @@ -38,7 +37,7 @@ impl<'a, 'tcx> MutVisitor<'tcx> for EraseRegionsVisitor<'a, 'tcx> { } fn visit_substs(&mut self, substs: &mut &'tcx Substs<'tcx>) { - *substs = self.tcx.mk_substs(self.tcx.erase_regions(*substs)); + *substs = self.tcx.erase_regions(&{*substs}); } } @@ -47,7 +46,8 @@ pub struct EraseRegions; impl Pass for EraseRegions {} impl<'tcx> MirPass<'tcx> for EraseRegions { - fn run_pass(&mut self, tcx: &TyCtxt<'tcx>, _: NodeId, mir: &mut Mir<'tcx>) { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + _: MirSource, mir: &mut Mir<'tcx>) { EraseRegionsVisitor::new(tcx).visit_mir(mir); } } diff --git a/src/librustc_mir/transform/mod.rs b/src/librustc_mir/transform/mod.rs index a52a8edc21..0dcb7ef84d 100644 --- a/src/librustc_mir/transform/mod.rs +++ b/src/librustc_mir/transform/mod.rs @@ -13,4 +13,6 @@ pub mod simplify_cfg; pub mod erase_regions; pub mod no_landing_pads; pub mod type_check; -pub mod break_critical_edges; +pub mod break_cleanup_edges; +pub mod promote_consts; +pub mod qualify_consts; diff --git a/src/librustc_mir/transform/no_landing_pads.rs b/src/librustc_mir/transform/no_landing_pads.rs index edfe75b843..de05032fa5 100644 --- a/src/librustc_mir/transform/no_landing_pads.rs +++ b/src/librustc_mir/transform/no_landing_pads.rs @@ -14,8 +14,7 @@ use rustc::ty::TyCtxt; use rustc::mir::repr::*; use rustc::mir::visit::MutVisitor; -use rustc::mir::transform::{Pass, MirPass}; -use syntax::ast::NodeId; +use rustc::mir::transform::{Pass, MirPass, MirSource}; pub struct NoLandingPads; @@ -42,7 +41,8 @@ impl<'tcx> MutVisitor<'tcx> for NoLandingPads { } impl<'tcx> MirPass<'tcx> for NoLandingPads { - fn run_pass(&mut self, tcx: &TyCtxt<'tcx>, _: NodeId, mir: &mut Mir<'tcx>) { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + _: MirSource, mir: &mut Mir<'tcx>) { if tcx.sess.no_landing_pads() { self.visit_mir(mir); } diff --git a/src/librustc_mir/transform/promote_consts.rs b/src/librustc_mir/transform/promote_consts.rs new file mode 100644 index 0000000000..431568b004 --- /dev/null +++ b/src/librustc_mir/transform/promote_consts.rs @@ -0,0 +1,412 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A pass that promotes borrows of constant rvalues. +//! +//! The rvalues considered constant are trees of temps, +//! each with exactly one initialization, and holding +//! a constant value with no interior mutability. +//! They are placed into a new MIR constant body in +//! `promoted` and the borrow rvalue is replaced with +//! a `Literal::Promoted` using the index into `promoted` +//! of that constant MIR. +//! +//! This pass assumes that every use is dominated by an +//! initialization and can otherwise silence errors, if +//! move analysis runs after promotion on broken MIR. + +use rustc::mir::repr::*; +use rustc::mir::visit::{LvalueContext, MutVisitor, Visitor}; +use rustc::ty::{self, TyCtxt}; +use syntax::codemap::Span; + +use build::Location; +use traversal::ReversePostorder; + +use std::mem; + +/// State of a temporary during collection and promotion. +#[derive(Copy, Clone, PartialEq, Eq, Debug)] +pub enum TempState { + /// No references to this temp. + Undefined, + /// One direct assignment and any number of direct uses. + /// A borrow of this temp is promotable if the assigned + /// value is qualified as constant. + Defined { + location: Location, + uses: usize + }, + /// Any other combination of assignments/uses. + Unpromotable, + /// This temp was part of an rvalue which got extracted + /// during promotion and needs cleanup. + PromotedOut +} + +impl TempState { + pub fn is_promotable(&self) -> bool { + if let TempState::Defined { uses, .. } = *self { + uses > 0 + } else { + false + } + } +} + +/// A "root candidate" for promotion, which will become the +/// returned value in a promoted MIR, unless it's a subset +/// of a larger candidate. +pub enum Candidate { + /// Borrow of a constant temporary. + Ref(Location), + + /// Array of indices found in the third argument of + /// a call to one of the simd_shuffleN intrinsics. + ShuffleIndices(BasicBlock) +} + +struct TempCollector { + temps: Vec, + location: Location, + span: Span +} + +impl<'tcx> Visitor<'tcx> for TempCollector { + fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext) { + self.super_lvalue(lvalue, context); + if let Lvalue::Temp(index) = *lvalue { + // Ignore drops, if the temp gets promoted, + // then it's constant and thus drop is noop. + if let LvalueContext::Drop = context { + return; + } + + let temp = &mut self.temps[index as usize]; + if *temp == TempState::Undefined { + match context { + LvalueContext::Store | + LvalueContext::Call => { + *temp = TempState::Defined { + location: self.location, + uses: 0 + }; + return; + } + _ => { /* mark as unpromotable below */ } + } + } else if let TempState::Defined { ref mut uses, .. } = *temp { + match context { + LvalueContext::Borrow {..} | + LvalueContext::Consume | + LvalueContext::Inspect => { + *uses += 1; + return; + } + _ => { /* mark as unpromotable below */ } + } + } + *temp = TempState::Unpromotable; + } + } + + fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>) { + assert_eq!(self.location.block, bb); + self.span = statement.span; + self.super_statement(bb, statement); + self.location.statement_index += 1; + } + + fn visit_terminator(&mut self, bb: BasicBlock, terminator: &Terminator<'tcx>) { + self.span = terminator.span; + self.super_terminator(bb, terminator); + } + + fn visit_basic_block_data(&mut self, bb: BasicBlock, data: &BasicBlockData<'tcx>) { + self.location.statement_index = 0; + self.location.block = bb; + self.super_basic_block_data(bb, data); + } +} + +pub fn collect_temps(mir: &Mir, rpo: &mut ReversePostorder) -> Vec { + let mut collector = TempCollector { + temps: vec![TempState::Undefined; mir.temp_decls.len()], + location: Location { + block: START_BLOCK, + statement_index: 0 + }, + span: mir.span + }; + for (bb, data) in rpo { + collector.visit_basic_block_data(bb, data); + } + collector.temps +} + +struct Promoter<'a, 'tcx: 'a> { + source: &'a mut Mir<'tcx>, + promoted: Mir<'tcx>, + temps: &'a mut Vec, + + /// If true, all nested temps are also kept in the + /// source MIR, not moved to the promoted MIR. + keep_original: bool +} + +impl<'a, 'tcx> Promoter<'a, 'tcx> { + fn new_block(&mut self) -> BasicBlock { + let index = self.promoted.basic_blocks.len(); + self.promoted.basic_blocks.push(BasicBlockData { + statements: vec![], + terminator: Some(Terminator { + span: self.promoted.span, + scope: ScopeId::new(0), + kind: TerminatorKind::Return + }), + is_cleanup: false + }); + BasicBlock::new(index) + } + + fn assign(&mut self, dest: Lvalue<'tcx>, rvalue: Rvalue<'tcx>, span: Span) { + let data = self.promoted.basic_blocks.last_mut().unwrap(); + data.statements.push(Statement { + span: span, + scope: ScopeId::new(0), + kind: StatementKind::Assign(dest, rvalue) + }); + } + + /// Copy the initialization of this temp to the + /// promoted MIR, recursing through temps. + fn promote_temp(&mut self, index: u32) -> u32 { + let index = index as usize; + let old_keep_original = self.keep_original; + let (bb, stmt_idx) = match self.temps[index] { + TempState::Defined { + location: Location { block, statement_index }, + uses + } if uses > 0 => { + if uses > 1 { + self.keep_original = true; + } + (block, statement_index) + } + temp => { + span_bug!(self.promoted.span, "tmp{} not promotable: {:?}", + index, temp); + } + }; + if !self.keep_original { + self.temps[index] = TempState::PromotedOut; + } + + let no_stmts = self.source[bb].statements.len(); + + // First, take the Rvalue or Call out of the source MIR, + // or duplicate it, depending on keep_original. + let (mut rvalue, mut call) = (None, None); + let span = if stmt_idx < no_stmts { + let statement = &mut self.source[bb].statements[stmt_idx]; + let StatementKind::Assign(_, ref mut rhs) = statement.kind; + if self.keep_original { + rvalue = Some(rhs.clone()); + } else { + let unit = Rvalue::Aggregate(AggregateKind::Tuple, vec![]); + rvalue = Some(mem::replace(rhs, unit)); + } + statement.span + } else if self.keep_original { + let terminator = self.source[bb].terminator().clone(); + call = Some(terminator.kind); + terminator.span + } else { + let terminator = self.source[bb].terminator_mut(); + let target = match terminator.kind { + TerminatorKind::Call { + destination: ref mut dest @ Some(_), + ref mut cleanup, .. + } => { + // No cleanup necessary. + cleanup.take(); + + // We'll put a new destination in later. + dest.take().unwrap().1 + } + ref kind => { + span_bug!(terminator.span, "{:?} not promotable", kind); + } + }; + call = Some(mem::replace(&mut terminator.kind, TerminatorKind::Goto { + target: target + })); + terminator.span + }; + + // Then, recurse for components in the Rvalue or Call. + if stmt_idx < no_stmts { + self.visit_rvalue(rvalue.as_mut().unwrap()); + } else { + self.visit_terminator_kind(bb, call.as_mut().unwrap()); + } + + let new_index = self.promoted.temp_decls.len() as u32; + let new_temp = Lvalue::Temp(new_index); + self.promoted.temp_decls.push(TempDecl { + ty: self.source.temp_decls[index].ty + }); + + // Inject the Rvalue or Call into the promoted MIR. + if stmt_idx < no_stmts { + self.assign(new_temp, rvalue.unwrap(), span); + } else { + let last = self.promoted.basic_blocks.len() - 1; + let new_target = self.new_block(); + let mut call = call.unwrap(); + match call { + TerminatorKind::Call { ref mut destination, ..} => { + *destination = Some((new_temp, new_target)); + } + _ => bug!() + } + let terminator = &mut self.promoted.basic_blocks[last].terminator_mut(); + terminator.span = span; + terminator.kind = call; + } + + // Restore the old duplication state. + self.keep_original = old_keep_original; + + new_index + } + + fn promote_candidate(mut self, candidate: Candidate) { + let span = self.promoted.span; + let new_operand = Operand::Constant(Constant { + span: span, + ty: self.promoted.return_ty.unwrap(), + literal: Literal::Promoted { + index: self.source.promoted.len() + } + }); + let mut rvalue = match candidate { + Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => { + match self.source[bb].statements[stmt_idx].kind { + StatementKind::Assign(_, ref mut rvalue) => { + mem::replace(rvalue, Rvalue::Use(new_operand)) + } + } + } + Candidate::ShuffleIndices(bb) => { + match self.source[bb].terminator_mut().kind { + TerminatorKind::Call { ref mut args, .. } => { + Rvalue::Use(mem::replace(&mut args[2], new_operand)) + } + _ => bug!() + } + } + }; + self.visit_rvalue(&mut rvalue); + self.assign(Lvalue::ReturnPointer, rvalue, span); + self.source.promoted.push(self.promoted); + } +} + +/// Replaces all temporaries with their promoted counterparts. +impl<'a, 'tcx> MutVisitor<'tcx> for Promoter<'a, 'tcx> { + fn visit_lvalue(&mut self, lvalue: &mut Lvalue<'tcx>, context: LvalueContext) { + if let Lvalue::Temp(ref mut index) = *lvalue { + *index = self.promote_temp(*index); + } + self.super_lvalue(lvalue, context); + } +} + +pub fn promote_candidates<'a, 'tcx>(mir: &mut Mir<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + mut temps: Vec, + candidates: Vec) { + // Visit candidates in reverse, in case they're nested. + for candidate in candidates.into_iter().rev() { + let (span, ty) = match candidate { + Candidate::Ref(Location { block: bb, statement_index: stmt_idx }) => { + let statement = &mir[bb].statements[stmt_idx]; + let StatementKind::Assign(ref dest, _) = statement.kind; + if let Lvalue::Temp(index) = *dest { + if temps[index as usize] == TempState::PromotedOut { + // Already promoted. + continue; + } + } + (statement.span, mir.lvalue_ty(tcx, dest).to_ty(tcx)) + } + Candidate::ShuffleIndices(bb) => { + let terminator = mir[bb].terminator(); + let ty = match terminator.kind { + TerminatorKind::Call { ref args, .. } => { + mir.operand_ty(tcx, &args[2]) + } + _ => { + span_bug!(terminator.span, + "expected simd_shuffleN call to promote"); + } + }; + (terminator.span, ty) + } + }; + + let mut promoter = Promoter { + source: mir, + promoted: Mir { + basic_blocks: vec![], + scopes: vec![ScopeData { + span: span, + parent_scope: None + }], + promoted: vec![], + return_ty: ty::FnConverging(ty), + var_decls: vec![], + arg_decls: vec![], + temp_decls: vec![], + upvar_decls: vec![], + span: span + }, + temps: &mut temps, + keep_original: false + }; + assert_eq!(promoter.new_block(), START_BLOCK); + promoter.promote_candidate(candidate); + } + + // Eliminate assignments to, and drops of promoted temps. + let promoted = |index: u32| temps[index as usize] == TempState::PromotedOut; + for block in &mut mir.basic_blocks { + block.statements.retain(|statement| { + match statement.kind { + StatementKind::Assign(Lvalue::Temp(index), _) => { + !promoted(index) + } + _ => true + } + }); + let terminator = block.terminator_mut(); + match terminator.kind { + TerminatorKind::Drop { value: Lvalue::Temp(index), target, .. } => { + if promoted(index) { + terminator.kind = TerminatorKind::Goto { + target: target + }; + } + } + _ => {} + } + } +} diff --git a/src/librustc_mir/transform/qualify_consts.rs b/src/librustc_mir/transform/qualify_consts.rs new file mode 100644 index 0000000000..2e4400c834 --- /dev/null +++ b/src/librustc_mir/transform/qualify_consts.rs @@ -0,0 +1,1048 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! A pass that qualifies constness of temporaries in constants, +//! static initializers and functions and also drives promotion. +//! +//! The Qualif flags below can be used to also provide better +//! diagnostics as to why a constant rvalue wasn't promoted. + +use rustc_data_structures::bitvec::BitVector; +use rustc::hir; +use rustc::hir::def_id::DefId; +use rustc::hir::intravisit::FnKind; +use rustc::hir::map::blocks::FnLikeNode; +use rustc::traits::{self, ProjectionMode}; +use rustc::ty::{self, TyCtxt, Ty}; +use rustc::ty::cast::CastTy; +use rustc::mir::repr::*; +use rustc::mir::mir_map::MirMap; +use rustc::mir::transform::{Pass, MirMapPass, MirSource}; +use rustc::mir::visit::{LvalueContext, Visitor}; +use rustc::util::nodemap::DefIdMap; +use syntax::abi::Abi; +use syntax::codemap::Span; +use syntax::feature_gate::UnstableFeatures; + +use std::collections::hash_map::Entry; +use std::fmt; + +use build::Location; +use traversal::{self, ReversePostorder}; + +use super::promote_consts::{self, Candidate, TempState}; + +bitflags! { + flags Qualif: u8 { + // Const item's qualification while recursing. + // Recursive consts are an error. + const RECURSIVE = 1 << 0, + + // Constant containing interior mutability (UnsafeCell). + const MUTABLE_INTERIOR = 1 << 1, + + // Constant containing an ADT that implements Drop. + const NEEDS_DROP = 1 << 2, + + // Function argument. + const FN_ARGUMENT = 1 << 3, + + // Static lvalue or move from a static. + const STATIC = 1 << 4, + + // Reference to a static. + const STATIC_REF = 1 << 5, + + // Not constant at all - non-`const fn` calls, asm!, + // pointer comparisons, ptr-to-int casts, etc. + const NOT_CONST = 1 << 6, + + // Refers to temporaries which cannot be promoted as + // promote_consts decided they weren't simple enough. + const NOT_PROMOTABLE = 1 << 7, + + // Borrows of temporaries can be promoted only + // if they have none of the above qualifications. + const NEVER_PROMOTE = !0, + + // Const items can only have MUTABLE_INTERIOR + // and NOT_PROMOTABLE without producing an error. + const CONST_ERROR = !Qualif::MUTABLE_INTERIOR.bits & + !Qualif::NOT_PROMOTABLE.bits + } +} + +impl<'a, 'tcx> Qualif { + /// Remove flags which are impossible for the given type. + fn restrict(&mut self, ty: Ty<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: &ty::ParameterEnvironment<'tcx>) { + if !ty.type_contents(tcx).interior_unsafe() { + *self = *self - Qualif::MUTABLE_INTERIOR; + } + if !tcx.type_needs_drop_given_env(ty, param_env) { + *self = *self - Qualif::NEEDS_DROP; + } + } +} + +/// What kind of item we are in. +#[derive(Copy, Clone, PartialEq, Eq)] +enum Mode { + Const, + Static, + StaticMut, + ConstFn, + Fn +} + +impl fmt::Display for Mode { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Mode::Const => write!(f, "constant"), + Mode::Static | Mode::StaticMut => write!(f, "static"), + Mode::ConstFn => write!(f, "constant function"), + Mode::Fn => write!(f, "function") + } + } +} + +fn is_const_fn(tcx: TyCtxt, def_id: DefId) -> bool { + if let Some(node_id) = tcx.map.as_local_node_id(def_id) { + let fn_like = FnLikeNode::from_node(tcx.map.get(node_id)); + match fn_like.map(|f| f.kind()) { + Some(FnKind::ItemFn(_, _, _, c, _, _, _)) => { + c == hir::Constness::Const + } + Some(FnKind::Method(_, m, _, _)) => { + m.constness == hir::Constness::Const + } + _ => false + } + } else { + tcx.sess.cstore.is_const_fn(def_id) + } +} + +struct Qualifier<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + mode: Mode, + span: Span, + def_id: DefId, + mir: &'a Mir<'tcx>, + rpo: ReversePostorder<'a, 'tcx>, + tcx: TyCtxt<'a, 'gcx, 'tcx>, + param_env: ty::ParameterEnvironment<'tcx>, + qualif_map: &'a mut DefIdMap, + mir_map: Option<&'a MirMap<'tcx>>, + temp_qualif: Vec>, + return_qualif: Option, + qualif: Qualif, + const_fn_arg_vars: BitVector, + location: Location, + temp_promotion_state: Vec, + promotion_candidates: Vec +} + +impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> { + fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_env: ty::ParameterEnvironment<'tcx>, + qualif_map: &'a mut DefIdMap, + mir_map: Option<&'a MirMap<'tcx>>, + def_id: DefId, + mir: &'a Mir<'tcx>, + mode: Mode) + -> Qualifier<'a, 'tcx, 'tcx> { + let mut rpo = traversal::reverse_postorder(mir); + let temps = promote_consts::collect_temps(mir, &mut rpo); + rpo.reset(); + Qualifier { + mode: mode, + span: mir.span, + def_id: def_id, + mir: mir, + rpo: rpo, + tcx: tcx, + param_env: param_env, + qualif_map: qualif_map, + mir_map: mir_map, + temp_qualif: vec![None; mir.temp_decls.len()], + return_qualif: None, + qualif: Qualif::empty(), + const_fn_arg_vars: BitVector::new(mir.var_decls.len()), + location: Location { + block: START_BLOCK, + statement_index: 0 + }, + temp_promotion_state: temps, + promotion_candidates: vec![] + } + } + + // FIXME(eddyb) we could split the errors into meaningful + // categories, but enabling full miri would make that + // slightly pointless (even with feature-gating). + fn not_const(&mut self) { + self.add(Qualif::NOT_CONST); + if self.mode != Mode::Fn { + span_err!(self.tcx.sess, self.span, E0019, + "{} contains unimplemented expression type", self.mode); + } + } + + /// Error about extra statements in a constant. + fn statement_like(&mut self) { + self.add(Qualif::NOT_CONST); + if self.mode != Mode::Fn { + span_err!(self.tcx.sess, self.span, E0016, + "blocks in {}s are limited to items and tail expressions", + self.mode); + } + } + + /// Add the given qualification to self.qualif. + fn add(&mut self, qualif: Qualif) { + self.qualif = self.qualif | qualif; + } + + /// Add the given type's qualification to self.qualif. + fn add_type(&mut self, ty: Ty<'tcx>) { + self.add(Qualif::MUTABLE_INTERIOR | Qualif::NEEDS_DROP); + self.qualif.restrict(ty, self.tcx, &self.param_env); + } + + /// Within the provided closure, self.qualif will start + /// out empty, and its value after the closure returns will + /// be combined with the value before the call to nest. + fn nest(&mut self, f: F) { + let original = self.qualif; + self.qualif = Qualif::empty(); + f(self); + self.add(original); + } + + /// Check for NEEDS_DROP (from an ADT or const fn call) and + /// error, unless we're in a function, or the feature-gate + /// for globals with destructors is enabled. + fn deny_drop(&self) { + if self.mode == Mode::Fn || !self.qualif.intersects(Qualif::NEEDS_DROP) { + return; + } + + // Static and const fn's allow destructors, but they're feature-gated. + let msg = if self.mode != Mode::Const { + // Feature-gate for globals with destructors is enabled. + if self.tcx.sess.features.borrow().drop_types_in_const { + return; + } + + // This comes from a macro that has #[allow_internal_unstable]. + if self.tcx.sess.codemap().span_allows_unstable(self.span) { + return; + } + + format!("destructors in {}s are an unstable feature", + self.mode) + } else { + format!("{}s are not allowed to have destructors", + self.mode) + }; + + let mut err = + struct_span_err!(self.tcx.sess, self.span, E0493, "{}", msg); + if self.mode != Mode::Const { + help!(&mut err, + "in Nightly builds, add `#![feature(drop_types_in_const)]` \ + to the crate attributes to enable"); + } + err.emit(); + } + + /// Check if an Lvalue with the current qualifications could + /// be consumed, by either an operand or a Deref projection. + fn try_consume(&mut self) -> bool { + if self.qualif.intersects(Qualif::STATIC) && self.mode != Mode::Fn { + let msg = if self.mode == Mode::Static || + self.mode == Mode::StaticMut { + "cannot refer to other statics by value, use the \ + address-of operator or a constant instead" + } else { + "cannot refer to statics by value, use a constant instead" + }; + span_err!(self.tcx.sess, self.span, E0394, "{}", msg); + + // Replace STATIC with NOT_CONST to avoid further errors. + self.qualif = self.qualif - Qualif::STATIC; + self.add(Qualif::NOT_CONST); + + false + } else { + true + } + } + + /// Assign the current qualification to the given destination. + fn assign(&mut self, dest: &Lvalue<'tcx>) { + let qualif = self.qualif; + let span = self.span; + let store = |slot: &mut Option| { + if slot.is_some() { + span_bug!(span, "multiple assignments to {:?}", dest); + } + *slot = Some(qualif); + }; + + // Only handle promotable temps in non-const functions. + if self.mode == Mode::Fn { + if let Lvalue::Temp(index) = *dest { + if self.temp_promotion_state[index as usize].is_promotable() { + store(&mut self.temp_qualif[index as usize]); + } + } + return; + } + + match *dest { + Lvalue::Temp(index) => store(&mut self.temp_qualif[index as usize]), + Lvalue::ReturnPointer => store(&mut self.return_qualif), + + Lvalue::Projection(box Projection { + base: Lvalue::Temp(index), + elem: ProjectionElem::Deref + }) if self.mir.temp_decls[index as usize].ty.is_unique() + && self.temp_qualif[index as usize].map_or(false, |qualif| { + qualif.intersects(Qualif::NOT_CONST) + }) => { + // Part of `box expr`, we should've errored + // already for the Box allocation Rvalue. + } + + // This must be an explicit assignment. + _ => { + // Catch more errors in the destination. + self.visit_lvalue(dest, LvalueContext::Store); + self.statement_like(); + } + } + } + + /// Returns true if the block ends in a bounds check branch, i.e.: + /// len = Len(array); + /// cond = Lt(idx, len); + /// if cond { + /// ... + /// } else { + /// loc = (...); + /// loc_ref = &loc; + /// panic_bounds_check(loc_ref, idx, len); + /// } + fn is_bounds_check(&self, bb: BasicBlock, + cond_op: &Operand<'tcx>, + if_else: BasicBlock) -> bool { + use rustc::mir::repr::Lvalue::*; + use rustc::mir::repr::Operand::Consume; + use rustc::mir::repr::Rvalue::*; + use rustc::mir::repr::StatementKind::*; + use rustc::mir::repr::TerminatorKind::*; + + let stmts = &self.mir[bb].statements; + let stmts_panic = &self.mir[if_else].statements; + if stmts.len() < 2 || stmts_panic.len() != 2 { + return false; + } + + let all = (&stmts[stmts.len() - 2].kind, + &stmts[stmts.len() - 1].kind, + cond_op, + &stmts_panic[0].kind, + &stmts_panic[1].kind, + &self.mir[if_else].terminator().kind); + match all { + (&Assign(Temp(len), Len(_)), + &Assign(Temp(cond), BinaryOp(BinOp::Lt, ref idx, Consume(Temp(len2)))), + /* if */ &Consume(Temp(cond2)), /* {...} else */ + &Assign(Temp(loc), Aggregate(..)), + &Assign(Temp(loc_ref), Ref(_, _, Temp(loc2))), + &Call { + func: Operand::Constant(Constant { + literal: Literal::Item { def_id, .. }, .. + }), + ref args, + destination: None, + .. + }) => { + len == len2 && cond == cond2 && loc == loc2 && + args[0] == Consume(Temp(loc_ref)) && + args[1] == *idx && + args[2] == Consume(Temp(len)) && + Some(def_id) == self.tcx.lang_items.panic_bounds_check_fn() + } + _ => false + } + } + + /// Qualify a whole const, static initializer or const fn. + fn qualify_const(&mut self) -> Qualif { + let mir = self.mir; + + let mut seen_blocks = BitVector::new(mir.basic_blocks.len()); + let mut bb = START_BLOCK; + loop { + seen_blocks.insert(bb.index()); + + self.visit_basic_block_data(bb, &mir[bb]); + + let target = match mir[bb].terminator().kind { + TerminatorKind::Goto { target } | + // Drops are considered noops. + TerminatorKind::Drop { target, .. } | + TerminatorKind::Call { destination: Some((_, target)), .. } => { + Some(target) + } + + // Non-terminating calls cannot produce any value. + TerminatorKind::Call { destination: None, .. } => { + return Qualif::empty(); + } + + // Need to allow bounds checking branches. + TerminatorKind::If { ref cond, targets: (if_true, if_else) } => { + if self.is_bounds_check(bb, cond, if_else) { + Some(if_true) + } else { + None + } + } + + TerminatorKind::Switch {..} | + TerminatorKind::SwitchInt {..} | + TerminatorKind::Resume => None, + + TerminatorKind::Return => { + // Check for unused values. This usually means + // there are extra statements in the AST. + for i in 0..mir.temp_decls.len() { + if self.temp_qualif[i].is_none() { + continue; + } + + let state = self.temp_promotion_state[i]; + if let TempState::Defined { location, uses: 0 } = state { + let data = &mir[location.block]; + let stmt_idx = location.statement_index; + + // Get the span for the initialization. + if stmt_idx < data.statements.len() { + self.span = data.statements[stmt_idx].span; + } else { + self.span = data.terminator().span; + } + + // Treat this as a statement in the AST. + self.statement_like(); + } + } + + // Make sure there are no extra unassigned variables. + self.qualif = Qualif::NOT_CONST; + for index in 0..mir.var_decls.len() { + if !self.const_fn_arg_vars.contains(index) { + self.assign(&Lvalue::Var(index as u32)); + } + } + + break; + } + }; + + match target { + // No loops allowed. + Some(target) if !seen_blocks.contains(target.index()) => { + bb = target; + } + _ => { + self.not_const(); + break; + } + } + } + + let return_ty = mir.return_ty.unwrap(); + self.qualif = self.return_qualif.unwrap_or(Qualif::NOT_CONST); + + match self.mode { + Mode::StaticMut => { + // Check for destructors in static mut. + self.add_type(return_ty); + self.deny_drop(); + } + _ => { + // Account for errors in consts by using the + // conservative type qualification instead. + if self.qualif.intersects(Qualif::CONST_ERROR) { + self.qualif = Qualif::empty(); + self.add_type(return_ty); + } + } + } + self.qualif + } +} + +/// Accumulates an Rvalue or Call's effects in self.qualif. +/// For functions (constant or not), it also records +/// candidates for promotion in promotion_candidates. +impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> { + fn visit_lvalue(&mut self, lvalue: &Lvalue<'tcx>, context: LvalueContext) { + match *lvalue { + Lvalue::Arg(_) => { + self.add(Qualif::FN_ARGUMENT); + } + Lvalue::Var(_) => { + self.add(Qualif::NOT_CONST); + } + Lvalue::Temp(index) => { + if !self.temp_promotion_state[index as usize].is_promotable() { + self.add(Qualif::NOT_PROMOTABLE); + } + + if let Some(qualif) = self.temp_qualif[index as usize] { + self.add(qualif); + } else { + self.not_const(); + } + } + Lvalue::Static(_) => { + self.add(Qualif::STATIC); + if self.mode == Mode::Const || self.mode == Mode::ConstFn { + span_err!(self.tcx.sess, self.span, E0013, + "{}s cannot refer to statics, use \ + a constant instead", self.mode); + } + } + Lvalue::ReturnPointer => { + self.not_const(); + } + Lvalue::Projection(ref proj) => { + self.nest(|this| { + this.super_lvalue(lvalue, context); + match proj.elem { + ProjectionElem::Deref => { + if !this.try_consume() { + return; + } + + if this.qualif.intersects(Qualif::STATIC_REF) { + this.qualif = this.qualif - Qualif::STATIC_REF; + this.add(Qualif::STATIC); + } + + let base_ty = this.mir.lvalue_ty(this.tcx, &proj.base) + .to_ty(this.tcx); + if let ty::TyRawPtr(_) = base_ty.sty { + this.add(Qualif::NOT_CONST); + if this.mode != Mode::Fn { + span_err!(this.tcx.sess, this.span, E0396, + "raw pointers cannot be dereferenced in {}s", + this.mode); + } + } + } + + ProjectionElem::Field(..) | + ProjectionElem::Index(_) => { + if this.mode != Mode::Fn && + this.qualif.intersects(Qualif::STATIC) { + span_err!(this.tcx.sess, this.span, E0494, + "cannot refer to the interior of another \ + static, use a constant instead"); + } + let ty = this.mir.lvalue_ty(this.tcx, lvalue) + .to_ty(this.tcx); + this.qualif.restrict(ty, this.tcx, &this.param_env); + } + + ProjectionElem::ConstantIndex {..} | + ProjectionElem::Downcast(..) => { + this.not_const() + } + } + }); + } + } + } + + fn visit_operand(&mut self, operand: &Operand<'tcx>) { + match *operand { + Operand::Consume(_) => { + self.nest(|this| { + this.super_operand(operand); + this.try_consume(); + }); + } + Operand::Constant(ref constant) => { + // Only functions and methods can have these types. + if let ty::TyFnDef(..) = constant.ty.sty { + return; + } + + if let Literal::Item { def_id, substs } = constant.literal { + // Don't peek inside generic (associated) constants. + if !substs.types.is_empty() { + self.add_type(constant.ty); + } else { + let qualif = qualify_const_item_cached(self.tcx, + self.qualif_map, + self.mir_map, + def_id); + self.add(qualif); + } + + // FIXME(eddyb) check recursive constants here, + // instead of rustc_passes::static_recursion. + if self.qualif.intersects(Qualif::RECURSIVE) { + span_bug!(constant.span, + "recursive constant wasn't caught earlier"); + } + + // Let `const fn` transitively have destructors, + // but they do get stopped in `const` or `static`. + if self.mode != Mode::ConstFn { + self.deny_drop(); + } + } + } + } + } + + fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>) { + // Recurse through operands and lvalues. + self.super_rvalue(rvalue); + + match *rvalue { + Rvalue::Use(_) | + Rvalue::Repeat(..) | + Rvalue::UnaryOp(..) | + Rvalue::Cast(CastKind::ReifyFnPointer, _, _) | + Rvalue::Cast(CastKind::UnsafeFnPointer, _, _) | + Rvalue::Cast(CastKind::Unsize, _, _) => {} + + Rvalue::Len(_) => { + // Static lvalues in consts would have errored already, + // don't treat length checks as reads from statics. + self.qualif = self.qualif - Qualif::STATIC; + } + + Rvalue::Ref(_, kind, ref lvalue) => { + // Static lvalues in consts would have errored already, + // only keep track of references to them here. + if self.qualif.intersects(Qualif::STATIC) { + self.qualif = self.qualif - Qualif::STATIC; + self.add(Qualif::STATIC_REF); + } + + let ty = self.mir.lvalue_ty(self.tcx, lvalue).to_ty(self.tcx); + if kind == BorrowKind::Mut { + // In theory, any zero-sized value could be borrowed + // mutably without consequences. However, only &mut [] + // is allowed right now, and only in functions. + let allow = if self.mode == Mode::StaticMut { + // Inside a `static mut`, &mut [...] is also allowed. + match ty.sty { + ty::TyArray(..) | ty::TySlice(_) => { + // Mutating can expose drops, be conservative. + self.add_type(ty); + self.deny_drop(); + true + } + _ => false + } + } else if let ty::TyArray(_, 0) = ty.sty { + self.mode == Mode::Fn + } else { + false + }; + + if !allow { + self.add(Qualif::NOT_CONST); + if self.mode != Mode::Fn { + span_err!(self.tcx.sess, self.span, E0017, + "references in {}s may only refer \ + to immutable values", self.mode); + } + } + } else { + // Constants cannot be borrowed if they contain interior mutability as + // it means that our "silent insertion of statics" could change + // initializer values (very bad). + if self.qualif.intersects(Qualif::MUTABLE_INTERIOR) { + // Replace MUTABLE_INTERIOR with NOT_CONST to avoid + // duplicate errors (from reborrowing, for example). + self.qualif = self.qualif - Qualif::MUTABLE_INTERIOR; + self.add(Qualif::NOT_CONST); + if self.mode != Mode::Fn { + span_err!(self.tcx.sess, self.span, E0492, + "cannot borrow a constant which contains \ + interior mutability, create a static instead"); + } + } + } + + // We might have a candidate for promotion. + let candidate = Candidate::Ref(self.location); + if self.mode == Mode::Fn || self.mode == Mode::ConstFn { + if !self.qualif.intersects(Qualif::NEVER_PROMOTE) { + // We can only promote direct borrows of temps. + if let Lvalue::Temp(_) = *lvalue { + self.promotion_candidates.push(candidate); + } + } + } + } + + Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => { + let operand_ty = self.mir.operand_ty(self.tcx, operand); + let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast"); + let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); + match (cast_in, cast_out) { + (CastTy::Ptr(_), CastTy::Int(_)) | + (CastTy::FnPtr, CastTy::Int(_)) => { + self.add(Qualif::NOT_CONST); + if self.mode != Mode::Fn { + span_err!(self.tcx.sess, self.span, E0018, + "raw pointers cannot be cast to integers in {}s", + self.mode); + } + } + _ => {} + } + } + + Rvalue::BinaryOp(op, ref lhs, _) => { + if let ty::TyRawPtr(_) = self.mir.operand_ty(self.tcx, lhs).sty { + assert!(op == BinOp::Eq || op == BinOp::Ne || + op == BinOp::Le || op == BinOp::Lt || + op == BinOp::Ge || op == BinOp::Gt); + + self.add(Qualif::NOT_CONST); + if self.mode != Mode::Fn { + span_err!(self.tcx.sess, self.span, E0395, + "raw pointers cannot be compared in {}s", + self.mode); + } + } + } + + Rvalue::Box(_) => { + self.add(Qualif::NOT_CONST); + if self.mode != Mode::Fn { + span_err!(self.tcx.sess, self.span, E0010, + "allocations are not allowed in {}s", self.mode); + } + } + + Rvalue::Aggregate(ref kind, _) => { + if let AggregateKind::Adt(def, _, _) = *kind { + if def.has_dtor() { + self.add(Qualif::NEEDS_DROP); + self.deny_drop(); + } + + if Some(def.did) == self.tcx.lang_items.unsafe_cell_type() { + let ty = self.mir.rvalue_ty(self.tcx, rvalue).unwrap(); + self.add_type(ty); + assert!(self.qualif.intersects(Qualif::MUTABLE_INTERIOR)); + // Even if the value inside may not need dropping, + // mutating it would change that. + if !self.qualif.intersects(Qualif::NOT_CONST) { + self.deny_drop(); + } + } + } + } + + Rvalue::Slice {..} | + Rvalue::InlineAsm {..} => { + self.not_const(); + } + } + } + + fn visit_terminator_kind(&mut self, bb: BasicBlock, kind: &TerminatorKind<'tcx>) { + if let TerminatorKind::Call { ref func, ref args, ref destination, .. } = *kind { + self.visit_operand(func); + + let fn_ty = self.mir.operand_ty(self.tcx, func); + let (is_shuffle, is_const_fn) = match fn_ty.sty { + ty::TyFnDef(def_id, _, f) => { + (f.abi == Abi::PlatformIntrinsic && + self.tcx.item_name(def_id).as_str().starts_with("simd_shuffle"), + is_const_fn(self.tcx, def_id)) + } + _ => (false, false) + }; + + for (i, arg) in args.iter().enumerate() { + self.nest(|this| { + this.visit_operand(arg); + if is_shuffle && i == 2 && this.mode == Mode::Fn { + let candidate = Candidate::ShuffleIndices(bb); + if !this.qualif.intersects(Qualif::NEVER_PROMOTE) { + this.promotion_candidates.push(candidate); + } else { + span_err!(this.tcx.sess, this.span, E0526, + "shuffle indices are not constant"); + } + } + }); + } + + // Const fn calls. + if is_const_fn { + // We are in a const or static initializer, + if self.mode != Mode::Fn && + + // feature-gate is not enabled, + !self.tcx.sess.features.borrow().const_fn && + + // this doesn't come from a crate with the feature-gate enabled, + self.def_id.is_local() && + + // this doesn't come from a macro that has #[allow_internal_unstable] + !self.tcx.sess.codemap().span_allows_unstable(self.span) + { + let mut err = self.tcx.sess.struct_span_err(self.span, + "const fns are an unstable feature"); + help!(&mut err, + "in Nightly builds, add `#![feature(const_fn)]` \ + to the crate attributes to enable"); + err.emit(); + } + } else { + self.qualif = Qualif::NOT_CONST; + if self.mode != Mode::Fn { + // FIXME(#24111) Remove this check when const fn stabilizes + let (msg, note) = if let UnstableFeatures::Disallow = + self.tcx.sess.opts.unstable_features { + (format!("calls in {}s are limited to \ + struct and enum constructors", + self.mode), + Some("a limited form of compile-time function \ + evaluation is available on a nightly \ + compiler via `const fn`")) + } else { + (format!("calls in {}s are limited \ + to constant functions, \ + struct and enum constructors", + self.mode), + None) + }; + let mut err = struct_span_err!(self.tcx.sess, self.span, E0015, "{}", msg); + if let Some(note) = note { + err.span_note(self.span, note); + } + err.emit(); + } + } + + if let Some((ref dest, _)) = *destination { + // Avoid propagating irrelevant callee/argument qualifications. + if self.qualif.intersects(Qualif::CONST_ERROR) { + self.qualif = Qualif::NOT_CONST; + } else { + // Be conservative about the returned value of a const fn. + let tcx = self.tcx; + let ty = self.mir.lvalue_ty(tcx, dest).to_ty(tcx); + self.qualif = Qualif::empty(); + self.add_type(ty); + + // Let `const fn` transitively have destructors, + // but they do get stopped in `const` or `static`. + if self.mode != Mode::ConstFn { + self.deny_drop(); + } + } + self.assign(dest); + } + } else { + // Qualify any operands inside other terminators. + self.super_terminator_kind(bb, kind); + } + } + + fn visit_assign(&mut self, _: BasicBlock, dest: &Lvalue<'tcx>, rvalue: &Rvalue<'tcx>) { + self.visit_rvalue(rvalue); + + // Check the allowed const fn argument forms. + if let (Mode::ConstFn, &Lvalue::Var(index)) = (self.mode, dest) { + if self.const_fn_arg_vars.insert(index as usize) { + // Direct use of an argument is permitted. + if let Rvalue::Use(Operand::Consume(Lvalue::Arg(_))) = *rvalue { + return; + } + + // Avoid a generic error for other uses of arguments. + if self.qualif.intersects(Qualif::FN_ARGUMENT) { + let decl = &self.mir.var_decls[index as usize]; + span_err!(self.tcx.sess, decl.span, E0022, + "arguments of constant functions can only \ + be immutable by-value bindings"); + return; + } + } + } + + self.assign(dest); + } + + fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>) { + assert_eq!(self.location.block, bb); + self.span = statement.span; + self.nest(|this| this.super_statement(bb, statement)); + self.location.statement_index += 1; + } + + fn visit_terminator(&mut self, bb: BasicBlock, terminator: &Terminator<'tcx>) { + assert_eq!(self.location.block, bb); + self.span = terminator.span; + self.nest(|this| this.super_terminator(bb, terminator)); + } + + fn visit_basic_block_data(&mut self, bb: BasicBlock, data: &BasicBlockData<'tcx>) { + self.location.statement_index = 0; + self.location.block = bb; + self.super_basic_block_data(bb, data); + } +} + +fn qualify_const_item_cached<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + qualif_map: &mut DefIdMap, + mir_map: Option<&MirMap<'tcx>>, + def_id: DefId) + -> Qualif { + match qualif_map.entry(def_id) { + Entry::Occupied(entry) => return *entry.get(), + Entry::Vacant(entry) => { + // Guard against `const` recursion. + entry.insert(Qualif::RECURSIVE); + } + } + + let extern_mir; + let param_env_and_mir = if def_id.is_local() { + let node_id = tcx.map.as_local_node_id(def_id).unwrap(); + mir_map.and_then(|map| map.map.get(&node_id)).map(|mir| { + (ty::ParameterEnvironment::for_item(tcx, node_id), mir) + }) + } else if let Some(mir) = tcx.sess.cstore.maybe_get_item_mir(tcx, def_id) { + // These should only be monomorphic constants. + extern_mir = mir; + Some((tcx.empty_parameter_environment(), &extern_mir)) + } else { + None + }; + + let (param_env, mir) = param_env_and_mir.unwrap_or_else(|| { + bug!("missing constant MIR for {}", tcx.item_path_str(def_id)) + }); + + let mut qualifier = Qualifier::new(tcx, param_env, qualif_map, mir_map, + def_id, mir, Mode::Const); + let qualif = qualifier.qualify_const(); + qualifier.qualif_map.insert(def_id, qualif); + qualif +} + +pub struct QualifyAndPromoteConstants; + +impl Pass for QualifyAndPromoteConstants {} + +impl<'tcx> MirMapPass<'tcx> for QualifyAndPromoteConstants { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, map: &mut MirMap<'tcx>) { + let mut qualif_map = DefIdMap(); + + // First, visit `const` items, potentially recursing, to get + // accurate MUTABLE_INTERIOR and NEEDS_DROP qualifications. + for &id in map.map.keys() { + let def_id = tcx.map.local_def_id(id); + let _task = tcx.dep_graph.in_task(self.dep_node(def_id)); + let src = MirSource::from_node(tcx, id); + if let MirSource::Const(_) = src { + qualify_const_item_cached(tcx, &mut qualif_map, Some(map), def_id); + } + } + + // Then, handle everything else, without recursing, + // as the MIR map is not shared, since promotion + // in functions (including `const fn`) mutates it. + for (&id, mir) in &mut map.map { + let def_id = tcx.map.local_def_id(id); + let _task = tcx.dep_graph.in_task(self.dep_node(def_id)); + let src = MirSource::from_node(tcx, id); + let mode = match src { + MirSource::Fn(_) => { + if is_const_fn(tcx, def_id) { + Mode::ConstFn + } else { + Mode::Fn + } + } + MirSource::Const(_) => continue, + MirSource::Static(_, hir::MutImmutable) => Mode::Static, + MirSource::Static(_, hir::MutMutable) => Mode::StaticMut, + MirSource::Promoted(..) => bug!() + }; + let param_env = ty::ParameterEnvironment::for_item(tcx, id); + + if mode == Mode::Fn || mode == Mode::ConstFn { + // This is ugly because Qualifier holds onto mir, + // which can't be mutated until its scope ends. + let (temps, candidates) = { + let mut qualifier = Qualifier::new(tcx, param_env, &mut qualif_map, + None, def_id, mir, mode); + if mode == Mode::ConstFn { + // Enforce a constant-like CFG for `const fn`. + qualifier.qualify_const(); + } else { + while let Some((bb, data)) = qualifier.rpo.next() { + qualifier.visit_basic_block_data(bb, data); + } + } + + (qualifier.temp_promotion_state, + qualifier.promotion_candidates) + }; + + // Do the actual promotion, now that we know what's viable. + promote_consts::promote_candidates(mir, tcx, temps, candidates); + } else { + let mut qualifier = Qualifier::new(tcx, param_env, &mut qualif_map, + None, def_id, mir, mode); + qualifier.qualify_const(); + } + + // Statics must be Sync. + if mode == Mode::Static { + let ty = mir.return_ty.unwrap(); + tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|infcx| { + let cause = traits::ObligationCause::new(mir.span, id, traits::SharedStatic); + let mut fulfillment_cx = traits::FulfillmentContext::new(); + fulfillment_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); + if let Err(err) = fulfillment_cx.select_all_or_error(&infcx) { + infcx.report_fulfillment_errors(&err); + } + + if let Err(errors) = fulfillment_cx.select_rfc1592_obligations(&infcx) { + infcx.report_fulfillment_errors_as_warnings(&errors, id); + } + }); + } + } + } +} diff --git a/src/librustc_mir/transform/remove_dead_blocks.rs b/src/librustc_mir/transform/remove_dead_blocks.rs index dc1ddad124..44f3ce7361 100644 --- a/src/librustc_mir/transform/remove_dead_blocks.rs +++ b/src/librustc_mir/transform/remove_dead_blocks.rs @@ -35,17 +35,16 @@ use rustc_data_structures::bitvec::BitVector; use rustc::ty::TyCtxt; use rustc::mir::repr::*; -use rustc::mir::transform::{Pass, MirPass}; -use syntax::ast::NodeId; +use rustc::mir::transform::{Pass, MirPass, MirSource}; pub struct RemoveDeadBlocks; impl<'tcx> MirPass<'tcx> for RemoveDeadBlocks { - fn run_pass(&mut self, _: &TyCtxt<'tcx>, _: NodeId, mir: &mut Mir<'tcx>) { + fn run_pass<'a>(&mut self, _: TyCtxt<'a, 'tcx, 'tcx>, + _: MirSource, mir: &mut Mir<'tcx>) { let mut seen = BitVector::new(mir.basic_blocks.len()); - // These blocks are always required. + // This block is always required. seen.insert(START_BLOCK.index()); - seen.insert(END_BLOCK.index()); let mut worklist = Vec::with_capacity(4); worklist.push(START_BLOCK); diff --git a/src/librustc_mir/transform/simplify_cfg.rs b/src/librustc_mir/transform/simplify_cfg.rs index 00b8f5c093..526157a49c 100644 --- a/src/librustc_mir/transform/simplify_cfg.rs +++ b/src/librustc_mir/transform/simplify_cfg.rs @@ -8,74 +8,155 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use rustc_data_structures::bitvec::BitVector; use rustc::middle::const_val::ConstVal; use rustc::ty::TyCtxt; use rustc::mir::repr::*; -use rustc::mir::transform::{MirPass, Pass}; +use rustc::mir::transform::{MirPass, MirSource, Pass}; use pretty; -use syntax::ast::NodeId; +use std::mem; use super::remove_dead_blocks::RemoveDeadBlocks; +use traversal; + pub struct SimplifyCfg; impl SimplifyCfg { pub fn new() -> SimplifyCfg { SimplifyCfg } +} + +impl<'tcx> MirPass<'tcx> for SimplifyCfg { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, src: MirSource, mir: &mut Mir<'tcx>) { + simplify_branches(mir); + RemoveDeadBlocks.run_pass(tcx, src, mir); + merge_consecutive_blocks(mir); + RemoveDeadBlocks.run_pass(tcx, src, mir); + pretty::dump_mir(tcx, "simplify_cfg", &0, src, mir, None); + + // FIXME: Should probably be moved into some kind of pass manager + mir.basic_blocks.shrink_to_fit(); + } +} + +impl Pass for SimplifyCfg {} + +fn merge_consecutive_blocks(mir: &mut Mir) { + // Build the precedecessor map for the MIR + let mut pred_count = vec![0u32; mir.basic_blocks.len()]; + for (_, data) in traversal::preorder(mir) { + if let Some(ref term) = data.terminator { + for &tgt in term.successors().iter() { + pred_count[tgt.index()] += 1; + } + } + } + + loop { + let mut changed = false; + let mut seen = BitVector::new(mir.basic_blocks.len()); + let mut worklist = vec![START_BLOCK]; + while let Some(bb) = worklist.pop() { + // Temporarily take ownership of the terminator we're modifying to keep borrowck happy + let mut terminator = mir.basic_block_data_mut(bb).terminator.take() + .expect("invalid terminator state"); + + // See if we can merge the target block into this one + loop { + let mut inner_change = false; - fn remove_goto_chains(&self, mir: &mut Mir) -> bool { - // Find the target at the end of the jump chain, return None if there is a loop - fn final_target(mir: &Mir, mut target: BasicBlock) -> Option { - // Keep track of already seen blocks to detect loops - let mut seen: Vec = Vec::with_capacity(8); - - while mir.basic_block_data(target).statements.is_empty() { - // NB -- terminator may have been swapped with `None` - // below, in which case we have a cycle and just want - // to stop - if let Some(ref terminator) = mir.basic_block_data(target).terminator { - match terminator.kind { - TerminatorKind::Goto { target: next } => { - if seen.contains(&next) { - return None; + if let TerminatorKind::Goto { target } = terminator.kind { + // Don't bother trying to merge a block into itself + if target == bb { + break; + } + + let num_insts = mir.basic_block_data(target).statements.len(); + match mir.basic_block_data(target).terminator().kind { + TerminatorKind::Goto { target: new_target } if num_insts == 0 => { + inner_change = true; + terminator.kind = TerminatorKind::Goto { target: new_target }; + pred_count[target.index()] -= 1; + pred_count[new_target.index()] += 1; + } + _ if pred_count[target.index()] == 1 => { + inner_change = true; + let mut stmts = Vec::new(); + { + let target_data = mir.basic_block_data_mut(target); + mem::swap(&mut stmts, &mut target_data.statements); + mem::swap(&mut terminator, target_data.terminator_mut()); } - seen.push(next); - target = next; + + mir.basic_block_data_mut(bb).statements.append(&mut stmts); } - _ => break + _ => {} + }; + } + + for target in terminator.successors_mut() { + let new_target = match final_target(mir, *target) { + Some(new_target) => new_target, + None if mir.basic_block_data(bb).statements.is_empty() => bb, + None => continue + }; + if *target != new_target { + inner_change = true; + pred_count[target.index()] -= 1; + pred_count[new_target.index()] += 1; + *target = new_target; } - } else { - break + } + + changed |= inner_change; + if !inner_change { + break; } } - Some(target) + mir.basic_block_data_mut(bb).terminator = Some(terminator); + + for succ in mir.basic_block_data(bb).terminator().successors().iter() { + if seen.insert(succ.index()) { + worklist.push(*succ); + } + } } - let mut changed = false; - for bb in mir.all_basic_blocks() { - // Temporarily take ownership of the terminator we're modifying to keep borrowck happy - let mut terminator = mir.basic_block_data_mut(bb).terminator.take() - .expect("invalid terminator state"); - - debug!("remove_goto_chains: bb={:?} terminator={:?}", bb, terminator); - - for target in terminator.successors_mut() { - let new_target = match final_target(mir, *target) { - Some(new_target) => new_target, - None if mir.basic_block_data(bb).statements.is_empty() => bb, - None => continue - }; - changed |= *target != new_target; - *target = new_target; + if !changed { + break; + } + } +} + +// Find the target at the end of the jump chain, return None if there is a loop +fn final_target(mir: &Mir, mut target: BasicBlock) -> Option { + // Keep track of already seen blocks to detect loops + let mut seen: Vec = Vec::with_capacity(8); + + while mir.basic_block_data(target).statements.is_empty() { + // NB -- terminator may have been swapped with `None` in + // merge_consecutive_blocks, in which case we have a cycle and just want + // to stop + match mir.basic_block_data(target).terminator { + Some(Terminator { kind: TerminatorKind::Goto { target: next }, .. }) => { + if seen.contains(&next) { + return None; + } + seen.push(next); + target = next; } - mir.basic_block_data_mut(bb).terminator = Some(terminator); + _ => break } - changed } - fn simplify_branches(&self, mir: &mut Mir) -> bool { + Some(target) +} + +fn simplify_branches(mir: &mut Mir) { + loop { let mut changed = false; for bb in mir.all_basic_blocks() { @@ -107,24 +188,8 @@ impl SimplifyCfg { } } - changed - } -} - -impl<'tcx> MirPass<'tcx> for SimplifyCfg { - fn run_pass(&mut self, tcx: &TyCtxt<'tcx>, id: NodeId, mir: &mut Mir<'tcx>) { - let mut counter = 0; - let mut changed = true; - while changed { - pretty::dump_mir(tcx, "simplify_cfg", &counter, id, mir, None); - counter += 1; - changed = self.simplify_branches(mir); - changed |= self.remove_goto_chains(mir); - RemoveDeadBlocks.run_pass(tcx, id, mir); + if !changed { + break; } - // FIXME: Should probably be moved into some kind of pass manager - mir.basic_blocks.shrink_to_fit(); } } - -impl Pass for SimplifyCfg {} diff --git a/src/librustc_mir/transform/type_check.rs b/src/librustc_mir/transform/type_check.rs index 11ac1fa8f8..40157aa934 100644 --- a/src/librustc_mir/transform/type_check.rs +++ b/src/librustc_mir/transform/type_check.rs @@ -12,16 +12,16 @@ #![allow(unreachable_code)] use rustc::dep_graph::DepNode; +use rustc::hir::def_id::DefId; use rustc::infer::{self, InferCtxt, InferOk}; use rustc::traits::{self, ProjectionMode}; use rustc::ty::fold::TypeFoldable; use rustc::ty::{self, Ty, TyCtxt}; use rustc::mir::repr::*; use rustc::mir::tcx::LvalueTy; -use rustc::mir::transform::{MirPass, Pass}; +use rustc::mir::transform::{MirPass, MirSource, Pass}; use rustc::mir::visit::{self, Visitor}; use std::fmt; -use syntax::ast::NodeId; use syntax::codemap::{Span, DUMMY_SP}; macro_rules! span_mirbug { @@ -54,14 +54,14 @@ enum FieldAccessError { /// The sanitize_XYZ methods here take an MIR object and compute its /// type, calling `span_mirbug` and returning an error type if there /// is a problem. -struct TypeVerifier<'a, 'b: 'a, 'tcx: 'b> { - cx: &'a mut TypeChecker<'b, 'tcx>, +struct TypeVerifier<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> { + cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, mir: &'a Mir<'tcx>, last_span: Span, errors_reported: bool } -impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { +impl<'a, 'b, 'gcx, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'gcx, 'tcx> { fn visit_span(&mut self, span: &Span) { if *span != DUMMY_SP { self.last_span = *span; @@ -104,8 +104,8 @@ impl<'a, 'b, 'tcx> Visitor<'tcx> for TypeVerifier<'a, 'b, 'tcx> { } } -impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { - fn new(cx: &'a mut TypeChecker<'b, 'tcx>, mir: &'a Mir<'tcx>) -> Self { +impl<'a, 'b, 'gcx, 'tcx> TypeVerifier<'a, 'b, 'gcx, 'tcx> { + fn new(cx: &'a mut TypeChecker<'b, 'gcx, 'tcx>, mir: &'a Mir<'tcx>) -> Self { TypeVerifier { cx: cx, mir: mir, @@ -114,11 +114,11 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { } } - fn tcx(&self) -> &'a TyCtxt<'tcx> { + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.cx.infcx.tcx } - fn infcx(&self) -> &'a InferCtxt<'a, 'tcx> { + fn infcx(&self) -> &'a InferCtxt<'a, 'gcx, 'tcx> { self.cx.infcx } @@ -237,7 +237,7 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { let fty = self.sanitize_type(lvalue, fty); match self.field_ty(lvalue, base, field) { Ok(ty) => { - if let Err(terr) = self.cx.mk_eqty(span, ty, fty) { + if let Err(terr) = self.cx.eq_types(span, ty, fty) { span_mirbug!( self, lvalue, "bad field access ({:?}: {:?}): {:?}", ty, fty, terr); @@ -276,8 +276,8 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { if adt_def.is_univariant() => { (&adt_def.variants[0], substs) } - ty::TyTuple(ref tys) | ty::TyClosure(_, box ty::ClosureSubsts { - upvar_tys: ref tys, .. + ty::TyTuple(tys) | ty::TyClosure(_, ty::ClosureSubsts { + upvar_tys: tys, .. }) => { return match tys.get(field.index()) { Some(&ty) => Ok(ty), @@ -318,14 +318,14 @@ impl<'a, 'b, 'tcx> TypeVerifier<'a, 'b, 'tcx> { } } -pub struct TypeChecker<'a, 'tcx: 'a> { - infcx: &'a InferCtxt<'a, 'tcx>, +pub struct TypeChecker<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + infcx: &'a InferCtxt<'a, 'gcx, 'tcx>, fulfillment_cx: traits::FulfillmentContext<'tcx>, last_span: Span } -impl<'a, 'tcx> TypeChecker<'a, 'tcx> { - fn new(infcx: &'a InferCtxt<'a, 'tcx>) -> Self { +impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> { + fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>) -> Self { TypeChecker { infcx: infcx, fulfillment_cx: traits::FulfillmentContext::new(), @@ -333,25 +333,23 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } } - fn mk_subty(&self, span: Span, sup: Ty<'tcx>, sub: Ty<'tcx>) - -> infer::UnitResult<'tcx> + fn sub_types(&self, span: Span, sup: Ty<'tcx>, sub: Ty<'tcx>) + -> infer::UnitResult<'tcx> { - infer::mk_subty(self.infcx, false, infer::TypeOrigin::Misc(span), - sup, sub) + self.infcx.sub_types(false, infer::TypeOrigin::Misc(span), sup, sub) // FIXME(#32730) propagate obligations .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())) } - fn mk_eqty(&self, span: Span, a: Ty<'tcx>, b: Ty<'tcx>) + fn eq_types(&self, span: Span, a: Ty<'tcx>, b: Ty<'tcx>) -> infer::UnitResult<'tcx> { - infer::mk_eqty(self.infcx, false, infer::TypeOrigin::Misc(span), - a, b) + self.infcx.eq_types(false, infer::TypeOrigin::Misc(span), a, b) // FIXME(#32730) propagate obligations .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())) } - fn tcx(&self) -> &'a TyCtxt<'tcx> { + fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> { self.infcx.tcx } @@ -363,7 +361,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { let lv_ty = mir.lvalue_ty(tcx, lv).to_ty(tcx); let rv_ty = mir.rvalue_ty(tcx, rv); if let Some(rv_ty) = rv_ty { - if let Err(terr) = self.mk_subty(self.last_span, rv_ty, lv_ty) { + if let Err(terr) = self.sub_types(self.last_span, rv_ty, lv_ty) { span_mirbug!(self, stmt, "bad assignment ({:?} = {:?}): {:?}", lv_ty, rv_ty, terr); } @@ -399,7 +397,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } TerminatorKind::SwitchInt { ref discr, switch_ty, .. } => { let discr_ty = mir.lvalue_ty(tcx, discr).to_ty(tcx); - if let Err(terr) = self.mk_subty(self.last_span, discr_ty, switch_ty) { + if let Err(terr) = self.sub_types(self.last_span, discr_ty, switch_ty) { span_mirbug!(self, term, "bad SwitchInt ({:?} on {:?}): {:?}", switch_ty, discr_ty, terr); } @@ -456,7 +454,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } (&Some((ref dest, _)), ty::FnConverging(ty)) => { let dest_ty = mir.lvalue_ty(tcx, dest).to_ty(tcx); - if let Err(terr) = self.mk_subty(self.last_span, ty, dest_ty) { + if let Err(terr) = self.sub_types(self.last_span, ty, dest_ty) { span_mirbug!(self, term, "call dest mismatch ({:?} <- {:?}): {:?}", dest_ty, ty, terr); @@ -482,7 +480,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } for (n, (fn_arg, op_arg)) in sig.inputs.iter().zip(args).enumerate() { let op_arg_ty = mir.operand_ty(self.tcx(), op_arg); - if let Err(terr) = self.mk_subty(self.last_span, op_arg_ty, fn_arg) { + if let Err(terr) = self.sub_types(self.last_span, op_arg_ty, fn_arg) { span_mirbug!(self, term, "bad arg #{:?} ({:?} <- {:?}): {:?}", n, fn_arg, op_arg_ty, terr); } @@ -537,7 +535,7 @@ impl<'a, 'tcx> TypeChecker<'a, 'tcx> { } }; - if let Err(terr) = self.mk_subty(self.last_span, arg_ty, pointee_ty) { + if let Err(terr) = self.sub_types(self.last_span, arg_ty, pointee_ty) { span_mirbug!(self, term, "bad box_free arg ({:?} <- {:?}): {:?}", pointee_ty, arg_ty, terr); } @@ -578,31 +576,32 @@ impl TypeckMir { } impl<'tcx> MirPass<'tcx> for TypeckMir { - fn run_pass(&mut self, tcx: &TyCtxt<'tcx>, id: NodeId, mir: &mut Mir<'tcx>) { + fn run_pass<'a>(&mut self, tcx: TyCtxt<'a, 'tcx, 'tcx>, + src: MirSource, mir: &mut Mir<'tcx>) { if tcx.sess.err_count() > 0 { // compiling a broken program can obviously result in a // broken MIR, so try not to report duplicate errors. return; } - let def_id = tcx.map.local_def_id(id); - let _task = tcx.dep_graph.in_task(DepNode::MirTypeck(def_id)); - let param_env = ty::ParameterEnvironment::for_item(tcx, id); - let infcx = infer::new_infer_ctxt(tcx, - &tcx.tables, - Some(param_env), - ProjectionMode::AnyFinal); - let mut checker = TypeChecker::new(&infcx); - { - let mut verifier = TypeVerifier::new(&mut checker, mir); - verifier.visit_mir(mir); - if verifier.errors_reported { - // don't do further checks to avoid ICEs - return; + let param_env = ty::ParameterEnvironment::for_item(tcx, src.item_id()); + tcx.infer_ctxt(None, Some(param_env), ProjectionMode::AnyFinal).enter(|infcx| { + let mut checker = TypeChecker::new(&infcx); + { + let mut verifier = TypeVerifier::new(&mut checker, mir); + verifier.visit_mir(mir); + if verifier.errors_reported { + // don't do further checks to avoid ICEs + return; + } } - } - checker.typeck_mir(mir); - checker.verify_obligations(mir); + checker.typeck_mir(mir); + checker.verify_obligations(mir); + }); } } -impl Pass for TypeckMir {} +impl Pass for TypeckMir { + fn dep_node(&self, def_id: DefId) -> DepNode { + DepNode::MirTypeck(def_id) + } +} diff --git a/src/librustc_mir/traversal.rs b/src/librustc_mir/traversal.rs index 8b6821136f..c58b5c8772 100644 --- a/src/librustc_mir/traversal.rs +++ b/src/librustc_mir/traversal.rs @@ -19,6 +19,8 @@ use rustc::mir::repr::*; /// Preorder traversal is when each node is visited before an of it's /// successors /// +/// ```text +/// /// A /// / \ /// / \ @@ -26,6 +28,7 @@ use rustc::mir::repr::*; /// \ / /// \ / /// D +/// ``` /// /// A preorder traversal of this graph is either `A B D C` or `A C D B` #[derive(Clone)] @@ -80,6 +83,9 @@ impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> { /// Postorder traversal is when each node is visited after all of it's /// successors, except when the successor is only reachable by a back-edge /// +/// +/// ```text +/// /// A /// / \ /// / \ @@ -87,6 +93,7 @@ impl<'a, 'tcx> Iterator for Preorder<'a, 'tcx> { /// \ / /// \ / /// D +/// ``` /// /// A Postorder traversal of this graph is `D B C A` or `D C B A` pub struct Postorder<'a, 'tcx: 'a> { @@ -215,6 +222,8 @@ impl<'a, 'tcx> Iterator for Postorder<'a, 'tcx> { /// This is different to a preorder traversal and represents a natural /// linearisation of control-flow. /// +/// ```text +/// /// A /// / \ /// / \ @@ -222,6 +231,7 @@ impl<'a, 'tcx> Iterator for Postorder<'a, 'tcx> { /// \ / /// \ / /// D +/// ``` /// /// A reverse postorder traversal of this graph is either `A B C D` or `A C B D` /// Note that for a graph containing no loops (i.e. A DAG), this is equivalent to diff --git a/src/librustc_passes/Cargo.toml b/src/librustc_passes/Cargo.toml index fa6bd3dfb6..0c85ffd2e9 100644 --- a/src/librustc_passes/Cargo.toml +++ b/src/librustc_passes/Cargo.toml @@ -12,4 +12,5 @@ crate-type = ["dylib"] log = { path = "../liblog" } rustc = { path = "../librustc" } rustc_const_eval = { path = "../librustc_const_eval" } +rustc_const_math = { path = "../librustc_const_math" } syntax = { path = "../libsyntax" } diff --git a/src/librustc_passes/const_fn.rs b/src/librustc_passes/const_fn.rs deleted file mode 100644 index 97a4c14863..0000000000 --- a/src/librustc_passes/const_fn.rs +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Verifies that const fn arguments are immutable by value bindings -//! and the const fn body doesn't contain any statements - -use rustc::session::{Session, CompileResult}; - -use syntax::ast::{self, PatKind}; -use syntax::visit::{self, Visitor, FnKind}; -use syntax::codemap::Span; - -pub fn check_crate(sess: &Session, krate: &ast::Crate) -> CompileResult { - sess.track_errors(|| { - visit::walk_crate(&mut CheckConstFn{ sess: sess }, krate); - }) -} - -struct CheckConstFn<'a> { - sess: &'a Session, -} - -struct CheckBlock<'a> { - sess: &'a Session, - kind: &'static str, -} - -impl<'a, 'v> Visitor<'v> for CheckBlock<'a> { - fn visit_block(&mut self, block: &'v ast::Block) { - check_block(&self.sess, block, self.kind); - CheckConstFn{ sess: self.sess}.visit_block(block); - } - fn visit_expr(&mut self, e: &'v ast::Expr) { - if let ast::ExprKind::Closure(..) = e.node { - CheckConstFn{ sess: self.sess}.visit_expr(e); - } else { - visit::walk_expr(self, e); - } - } - fn visit_item(&mut self, _i: &'v ast::Item) { bug!("should be handled in CheckConstFn") } - fn visit_fn(&mut self, - _fk: FnKind<'v>, - _fd: &'v ast::FnDecl, - _b: &'v ast::Block, - _s: Span, - _fn_id: ast::NodeId) { bug!("should be handled in CheckConstFn") } -} - -fn check_block(sess: &Session, b: &ast::Block, kind: &'static str) { - // Check all statements in the block - for stmt in &b.stmts { - let span = match stmt.node { - ast::StmtKind::Decl(ref decl, _) => { - match decl.node { - ast::DeclKind::Local(_) => decl.span, - - // Item statements are allowed - ast::DeclKind::Item(_) => continue, - } - } - ast::StmtKind::Expr(ref expr, _) => expr.span, - ast::StmtKind::Semi(ref semi, _) => semi.span, - ast::StmtKind::Mac(..) => bug!(), - }; - span_err!(sess, span, E0016, - "blocks in {}s are limited to items and tail expressions", kind); - } -} - -impl<'a, 'v> Visitor<'v> for CheckConstFn<'a> { - fn visit_item(&mut self, i: &'v ast::Item) { - visit::walk_item(self, i); - match i.node { - ast::ItemKind::Const(_, ref e) => { - CheckBlock{ sess: self.sess, kind: "constant"}.visit_expr(e) - }, - ast::ItemKind::Static(_, _, ref e) => { - CheckBlock{ sess: self.sess, kind: "static"}.visit_expr(e) - }, - _ => {}, - } - } - - fn visit_fn(&mut self, - fk: FnKind<'v>, - fd: &'v ast::FnDecl, - b: &'v ast::Block, - s: Span, - _fn_id: ast::NodeId) { - visit::walk_fn(self, fk, fd, b, s); - match fk { - FnKind::ItemFn(_, _, _, ast::Constness::Const, _, _) => {}, - FnKind::Method(_, m, _) if m.constness == ast::Constness::Const => {}, - _ => return, - } - - // Ensure the arguments are simple, not mutable/by-ref or patterns. - for arg in &fd.inputs { - match arg.pat.node { - PatKind::Wild => {} - PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable), _, None) => {} - _ => { - span_err!(self.sess, arg.pat.span, E0022, - "arguments of constant functions can only \ - be immutable by-value bindings"); - } - } - } - check_block(&self.sess, b, "const function"); - } -} diff --git a/src/librustc_passes/consts.rs b/src/librustc_passes/consts.rs index 4659bb389d..b1bb48aace 100644 --- a/src/librustc_passes/consts.rs +++ b/src/librustc_passes/consts.rs @@ -28,16 +28,18 @@ use rustc::dep_graph::DepNode; use rustc::ty::cast::{CastKind}; use rustc_const_eval::{ConstEvalErr, lookup_const_fn_by_id, compare_lit_exprs}; use rustc_const_eval::{eval_const_expr_partial, lookup_const_by_id}; -use rustc_const_eval::ErrKind::{IndexOpFeatureGated, UnimplementedConstVal}; +use rustc_const_eval::ErrKind::{IndexOpFeatureGated, UnimplementedConstVal, MiscCatchAll, Math}; +use rustc_const_eval::ErrKind::{ErroneousReferencedConstant, MiscBinaryOp, NonConstPath}; +use rustc_const_eval::ErrKind::UnresolvedPath; use rustc_const_eval::EvalHint::ExprTypeChecked; +use rustc_const_math::{ConstMathErr, Op}; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; use rustc::middle::expr_use_visitor as euv; -use rustc::infer; use rustc::middle::mem_categorization as mc; use rustc::middle::mem_categorization::Categorization; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::traits::{self, ProjectionMode}; +use rustc::traits::ProjectionMode; use rustc::util::nodemap::NodeMap; use rustc::middle::const_qualif::ConstQualif; use rustc::lint::builtin::CONST_ERR; @@ -45,7 +47,6 @@ use rustc::lint::builtin::CONST_ERR; use rustc::hir::{self, PatKind}; use syntax::ast; use syntax::codemap::Span; -use syntax::feature_gate::UnstableFeatures; use rustc::hir::intravisit::{self, FnKind, Visitor}; use std::collections::hash_map::Entry; @@ -66,15 +67,15 @@ enum Mode { } struct CheckCrateVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, mode: Mode, qualif: ConstQualif, rvalue_borrows: NodeMap } -impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { +impl<'a, 'gcx> CheckCrateVisitor<'a, 'gcx> { fn with_mode(&mut self, mode: Mode, f: F) -> R where - F: FnOnce(&mut CheckCrateVisitor<'a, 'tcx>) -> R, + F: FnOnce(&mut CheckCrateVisitor<'a, 'gcx>) -> R, { let (old_mode, old_qualif) = (self.mode, self.qualif); self.mode = mode; @@ -85,20 +86,17 @@ impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { r } - fn with_euv<'b, F, R>(&'b mut self, item_id: Option, f: F) -> R where - F: for<'t> FnOnce(&mut euv::ExprUseVisitor<'b, 't, 'b, 'tcx>) -> R, + fn with_euv(&mut self, item_id: Option, f: F) -> R where + F: for<'b, 'tcx> FnOnce(&mut euv::ExprUseVisitor<'b, 'gcx, 'tcx>) -> R, { let param_env = match item_id { Some(item_id) => ty::ParameterEnvironment::for_item(self.tcx, item_id), None => self.tcx.empty_parameter_environment() }; - let infcx = infer::new_infer_ctxt(self.tcx, - &self.tcx.tables, - Some(param_env), - ProjectionMode::AnyFinal); - - f(&mut euv::ExprUseVisitor::new(self, &infcx)) + self.tcx.infer_ctxt(None, Some(param_env), ProjectionMode::AnyFinal).enter(|infcx| { + f(&mut euv::ExprUseVisitor::new(self, &infcx)) + }) } fn global_expr(&mut self, mode: Mode, expr: &hir::Expr) -> ConstQualif { @@ -114,6 +112,7 @@ impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { match err.kind { UnimplementedConstVal(_) => {}, IndexOpFeatureGated => {}, + ErroneousReferencedConstant(_) => {}, _ => self.tcx.sess.add_lint(CONST_ERR, expr.id, expr.span, format!("constant evaluation error: {}. This will \ become a HARD ERROR in the future", @@ -176,32 +175,11 @@ impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { /// Returns true if the call is to a const fn or method. fn handle_const_fn_call(&mut self, - expr: &hir::Expr, + _expr: &hir::Expr, def_id: DefId, - ret_ty: Ty<'tcx>) + ret_ty: Ty<'gcx>) -> bool { if let Some(fn_like) = lookup_const_fn_by_id(self.tcx, def_id) { - if - // we are in a static/const initializer - self.mode != Mode::Var && - - // feature-gate is not enabled - !self.tcx.sess.features.borrow().const_fn && - - // this doesn't come from a macro that has #[allow_internal_unstable] - !self.tcx.sess.codemap().span_allows_unstable(expr.span) - { - let mut err = self.tcx.sess.struct_span_err( - expr.span, - "const fns are an unstable feature"); - fileline_help!( - &mut err, - expr.span, - "in Nightly builds, add `#![feature(const_fn)]` to the crate \ - attributes to enable"); - err.emit(); - } - let qualif = self.fn_like(fn_like.kind(), fn_like.decl(), fn_like.body(), @@ -242,39 +220,6 @@ impl<'a, 'tcx> CheckCrateVisitor<'a, 'tcx> { Mode::Var => bug!(), } } - - fn check_static_mut_type(&self, e: &hir::Expr) { - let node_ty = self.tcx.node_id_to_type(e.id); - let tcontents = node_ty.type_contents(self.tcx); - - let suffix = if tcontents.has_dtor() { - "destructors" - } else if tcontents.owns_owned() { - "boxes" - } else { - return - }; - - span_err!(self.tcx.sess, e.span, E0397, - "mutable statics are not allowed to have {}", suffix); - } - - fn check_static_type(&self, e: &hir::Expr) { - let ty = self.tcx.node_id_to_type(e.id); - let infcx = infer::new_infer_ctxt(self.tcx, - &self.tcx.tables, - None, - ProjectionMode::AnyFinal); - let cause = traits::ObligationCause::new(e.span, e.id, traits::SharedStatic); - let mut fulfillment_cx = traits::FulfillmentContext::new(); - fulfillment_cx.register_builtin_bound(&infcx, ty, ty::BoundSync, cause); - match fulfillment_cx.select_all_or_error(&infcx) { - Ok(()) => { }, - Err(ref errors) => { - traits::report_fulfillment_errors(&infcx, errors); - } - } - } } impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { @@ -283,11 +228,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { assert_eq!(self.mode, Mode::Var); match i.node { hir::ItemStatic(_, hir::MutImmutable, ref expr) => { - self.check_static_type(&expr); self.global_expr(Mode::Static, &expr); } hir::ItemStatic(_, hir::MutMutable, ref expr) => { - self.check_static_mut_type(&expr); self.global_expr(Mode::StaticMut, &expr); } hir::ItemConst(_, ref expr) => { @@ -354,8 +297,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { "lower range bound must be less than or equal to upper"); } None => { - self.tcx.sess.delay_span_bug(start.span, - "non-constant path in constant expr"); + span_err!(self.tcx.sess, p.span, E0014, + "paths in {}s may only refer to constants", + self.msg()); } } } @@ -378,8 +322,6 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { hir::StmtSemi(_, _) => {}, } self.add_qualif(ConstQualif::NOT_CONST); - // anything else should have been caught by check_const_fn - assert_eq!(self.mode, Mode::Var); } intravisit::walk_block(self, block); } @@ -435,29 +377,6 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { } intravisit::walk_expr(self, ex); } - // Division by zero and overflow checking. - hir::ExprBinary(op, _, _) => { - intravisit::walk_expr(self, ex); - let div_or_rem = op.node == hir::BiDiv || op.node == hir::BiRem; - match node_ty.sty { - ty::TyUint(_) | ty::TyInt(_) if div_or_rem => { - if !self.qualif.intersects(ConstQualif::NOT_CONST) { - match eval_const_expr_partial( - self.tcx, ex, ExprTypeChecked, None) { - Ok(_) => {} - Err(ConstEvalErr { kind: UnimplementedConstVal(_), ..}) | - Err(ConstEvalErr { kind: IndexOpFeatureGated, ..}) => {}, - Err(msg) => { - self.tcx.sess.add_lint(CONST_ERR, ex.id, - msg.span, - msg.description().into_owned()) - } - } - } - } - _ => {} - } - } _ => intravisit::walk_expr(self, ex) } @@ -472,11 +391,6 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { let tc = node_ty.type_contents(self.tcx); if self.qualif.intersects(ConstQualif::MUTABLE_MEM) && tc.interior_unsafe() { outer = outer | ConstQualif::NOT_CONST; - if self.mode != Mode::Var { - span_err!(self.tcx.sess, ex.span, E0492, - "cannot borrow a constant which contains \ - interior mutability, create a static instead"); - } } // If the reference has to be 'static, avoid in-place initialization // as that will end up pointing to the stack instead. @@ -491,10 +405,6 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { if self.mode == Mode::Var { outer = outer | ConstQualif::NOT_CONST; self.add_qualif(ConstQualif::MUTABLE_MEM); - } else { - span_err!(self.tcx.sess, ex.span, E0017, - "references in {}s may only refer \ - to immutable values", self.msg()) } } if !self.qualif.intersects(ConstQualif::NON_STATIC_BORROWS) { @@ -503,6 +413,27 @@ impl<'a, 'tcx, 'v> Visitor<'v> for CheckCrateVisitor<'a, 'tcx> { } None => {} } + + if self.mode == Mode::Var && !self.qualif.intersects(ConstQualif::NOT_CONST) { + match eval_const_expr_partial(self.tcx, ex, ExprTypeChecked, None) { + Ok(_) => {} + Err(ConstEvalErr { kind: UnimplementedConstVal(_), ..}) | + Err(ConstEvalErr { kind: MiscCatchAll, ..}) | + Err(ConstEvalErr { kind: MiscBinaryOp, ..}) | + Err(ConstEvalErr { kind: NonConstPath, ..}) | + Err(ConstEvalErr { kind: UnresolvedPath, ..}) | + Err(ConstEvalErr { kind: ErroneousReferencedConstant(_), ..}) | + Err(ConstEvalErr { kind: Math(ConstMathErr::Overflow(Op::Shr)), ..}) | + Err(ConstEvalErr { kind: Math(ConstMathErr::Overflow(Op::Shl)), ..}) | + Err(ConstEvalErr { kind: IndexOpFeatureGated, ..}) => {}, + Err(msg) => { + self.tcx.sess.add_lint(CONST_ERR, ex.id, + msg.span, + msg.description().into_owned()) + } + } + } + self.tcx.const_qualif_map.borrow_mut().insert(ex.id, self.qualif); // Don't propagate certain flags. self.qualif = outer | (self.qualif - ConstQualif::HAS_STATIC_BORROWS); @@ -521,11 +452,6 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, ty::TyStruct(def, _) | ty::TyEnum(def, _) if def.has_dtor() => { v.add_qualif(ConstQualif::NEEDS_DROP); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0493, - "{}s are not allowed to have destructors", - v.msg()); - } } _ => {} } @@ -536,17 +462,9 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, hir::ExprBinary(..) | hir::ExprIndex(..) if v.tcx.tables.borrow().method_map.contains_key(&method_call) => { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0011, - "user-defined operators are not allowed in {}s", v.msg()); - } } hir::ExprBox(_) => { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0010, - "allocations are not allowed in {}s", v.msg()); - } } hir::ExprUnary(op, ref inner) => { match v.tcx.node_id_to_type(inner.id).sty { @@ -554,10 +472,6 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, assert!(op == hir::UnDeref); v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0396, - "raw pointers cannot be dereferenced in {}s", v.msg()); - } } _ => {} } @@ -570,10 +484,6 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, op.node == hir::BiGe || op.node == hir::BiGt); v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0395, - "raw pointers cannot be compared in {}s", v.msg()); - } } _ => {} } @@ -584,10 +494,6 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, None => span_bug!(e.span, "no kind for cast"), Some(&CastKind::PtrAddrCast) | Some(&CastKind::FnPtrAddrCast) => { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0018, - "raw pointers cannot be cast to integers in {}s", v.msg()); - } } _ => {} } @@ -612,11 +518,7 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, Some(Def::Static(..)) => { match v.mode { Mode::Static | Mode::StaticMut => {} - Mode::Const | Mode::ConstFn => { - span_err!(v.tcx.sess, e.span, E0013, - "{}s cannot refer to other statics, insert \ - an intermediate constant instead", v.msg()); - } + Mode::Const | Mode::ConstFn => {} Mode::Var => v.add_qualif(ConstQualif::NOT_CONST) } } @@ -632,14 +534,8 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, // Sadly, we can't determine whether the types are zero-sized. v.add_qualif(ConstQualif::NOT_CONST | ConstQualif::NON_ZERO_SIZED); } - def => { + _ => { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - debug!("(checking const) found bad def: {:?}", def); - span_err!(v.tcx.sess, e.span, E0014, - "paths in {}s may only refer to constants \ - or functions", v.msg()); - } } } } @@ -677,29 +573,6 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, }; if !is_const { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - // FIXME(#24111) Remove this check when const fn stabilizes - let (msg, note) = - if let UnstableFeatures::Disallow = v.tcx.sess.opts.unstable_features { - (format!("function calls in {}s are limited to \ - struct and enum constructors", - v.msg()), - Some("a limited form of compile-time function \ - evaluation is available on a nightly \ - compiler via `const fn`")) - } else { - (format!("function calls in {}s are limited \ - to constant functions, \ - struct and enum constructors", - v.msg()), - None) - }; - let mut err = struct_span_err!(v.tcx.sess, e.span, E0015, "{}", msg); - if let Some(note) = note { - err.span_note(e.span, note); - } - err.emit(); - } } } hir::ExprMethodCall(..) => { @@ -710,11 +583,6 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, }; if !is_const { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0378, - "method calls in {}s are limited to \ - constant inherent methods", v.msg()); - } } } hir::ExprStruct(..) => { @@ -769,10 +637,6 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, hir::ExprAssignOp(..) | hir::ExprInlineAsm(..) => { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0019, - "{} contains unimplemented expression type", v.msg()); - } } } } @@ -792,17 +656,12 @@ fn check_adjustments<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Exp v.tcx.is_overloaded_autoderef(e.id, autoderef) }) { v.add_qualif(ConstQualif::NOT_CONST); - if v.mode != Mode::Var { - span_err!(v.tcx.sess, e.span, E0400, - "user-defined dereference operators are not allowed in {}s", - v.msg()); - } } } } } -pub fn check_crate(tcx: &TyCtxt) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { tcx.visit_all_items_in_krate(DepNode::CheckConst, &mut CheckCrateVisitor { tcx: tcx, mode: Mode::Var, @@ -812,24 +671,16 @@ pub fn check_crate(tcx: &TyCtxt) { tcx.sess.abort_if_errors(); } -impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'gcx> { fn consume(&mut self, _consume_id: ast::NodeId, - consume_span: Span, + _consume_span: Span, cmt: mc::cmt, _mode: euv::ConsumeMode) { let mut cur = &cmt; loop { match cur.cat { Categorization::StaticItem => { - if self.mode != Mode::Var { - // statics cannot be consumed by value at any time, that would imply - // that they're an initializer (what a const is for) or kept in sync - // over time (not feasible), so deny it outright. - span_err!(self.tcx.sess, consume_span, E0394, - "cannot refer to other statics by value, use the \ - address-of operator or a constant instead"); - } break; } Categorization::Deref(ref cmt, _, _) | @@ -844,7 +695,7 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { } fn borrow(&mut self, borrow_id: ast::NodeId, - borrow_span: Span, + _borrow_span: Span, cmt: mc::cmt<'tcx>, _loan_region: ty::Region, bk: ty::BorrowKind, @@ -862,7 +713,6 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { } let mut cur = &cmt; - let mut is_interior = false; loop { match cur.cat { Categorization::Rvalue(..) => { @@ -887,20 +737,11 @@ impl<'a, 'tcx> euv::Delegate<'tcx> for CheckCrateVisitor<'a, 'tcx> { break; } Categorization::StaticItem => { - if is_interior && self.mode != Mode::Var { - // Borrowed statics can specifically *only* have their address taken, - // not any number of other borrows such as borrowing fields, reading - // elements of an array, etc. - span_err!(self.tcx.sess, borrow_span, E0494, - "cannot refer to the interior of another \ - static, use a constant instead"); - } break; } Categorization::Deref(ref cmt, _, _) | Categorization::Downcast(ref cmt, _) | Categorization::Interior(ref cmt, _) => { - is_interior = true; cur = cmt; } diff --git a/src/librustc_passes/diagnostics.rs b/src/librustc_passes/diagnostics.rs index c89e9bb195..77f896e011 100644 --- a/src/librustc_passes/diagnostics.rs +++ b/src/librustc_passes/diagnostics.rs @@ -12,70 +12,6 @@ register_long_diagnostics! { -E0010: r##" -The value of statics and constants must be known at compile time, and they live -for the entire lifetime of a program. Creating a boxed value allocates memory on -the heap at runtime, and therefore cannot be done at compile time. Erroneous -code example: - -```compile_fail -#![feature(box_syntax)] - -const CON : Box = box 0; -``` -"##, - -E0011: r##" -Initializers for constants and statics are evaluated at compile time. -User-defined operators rely on user-defined functions, which cannot be evaluated -at compile time. - -Erroneous code example: - -```compile_fail -use std::ops::Index; - -struct Foo { a: u8 } - -impl Index for Foo { - type Output = u8; - - fn index<'a>(&'a self, idx: u8) -> &'a u8 { &self.a } -} - -const a: Foo = Foo { a: 0u8 }; -const b: u8 = a[0]; // Index trait is defined by the user, bad! -``` - -Only operators on builtin types are allowed. - -Example: - -``` -const a: &'static [i32] = &[1, 2, 3]; -const b: i32 = a[0]; // Ok! -``` -"##, - -E0013: r##" -Static and const variables can refer to other const variables. But a const -variable cannot refer to a static variable. For example, `Y` cannot refer to -`X` here: - -```compile_fail -static X: i32 = 42; -const Y: i32 = X; -``` - -To fix this, the value can be extracted as a const and then used: - -``` -const A: i32 = 42; -static X: i32 = A; -const Y: i32 = A; -``` -"##, - E0014: r##" Constants can only be initialized by a constant value or, in a future version of Rust, a call to a const function. This error indicates the use @@ -95,149 +31,6 @@ const FOO2: i32 = { 0 }; // but brackets are useless here ``` "##, -// FIXME(#24111) Change the language here when const fn stabilizes -E0015: r##" -The only functions that can be called in static or constant expressions are -`const` functions, and struct/enum constructors. `const` functions are only -available on a nightly compiler. Rust currently does not support more general -compile-time function execution. - -``` -const FOO: Option = Some(1); // enum constructor -struct Bar {x: u8} -const BAR: Bar = Bar {x: 1}; // struct constructor -``` - -See [RFC 911] for more details on the design of `const fn`s. - -[RFC 911]: https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md -"##, - -E0016: r##" -Blocks in constants may only contain items (such as constant, function -definition, etc...) and a tail expression. Erroneous code example: - -```compile_fail -const FOO: i32 = { let x = 0; x }; // 'x' isn't an item! -``` - -To avoid it, you have to replace the non-item object: - -``` -const FOO: i32 = { const X : i32 = 0; X }; -``` -"##, - -E0017: r##" -References in statics and constants may only refer to immutable values. -Erroneous code example: - -```compile_fail -static X: i32 = 1; -const C: i32 = 2; - -// these three are not allowed: -const CR: &'static mut i32 = &mut C; -static STATIC_REF: &'static mut i32 = &mut X; -static CONST_REF: &'static mut i32 = &mut C; -``` - -Statics are shared everywhere, and if they refer to mutable data one might -violate memory safety since holding multiple mutable references to shared data -is not allowed. - -If you really want global mutable state, try using `static mut` or a global -`UnsafeCell`. -"##, - -E0018: r##" - -The value of static and constant integers must be known at compile time. You -can't cast a pointer to an integer because the address of a pointer can -vary. - -For example, if you write: - -```compile_fail -static MY_STATIC: u32 = 42; -static MY_STATIC_ADDR: usize = &MY_STATIC as *const _ as usize; -static WHAT: usize = (MY_STATIC_ADDR^17) + MY_STATIC_ADDR; -``` - -Then `MY_STATIC_ADDR` would contain the address of `MY_STATIC`. However, -the address can change when the program is linked, as well as change -between different executions due to ASLR, and many linkers would -not be able to calculate the value of `WHAT`. - -On the other hand, static and constant pointers can point either to -a known numeric address or to the address of a symbol. - -``` -static MY_STATIC_ADDR: &'static u32 = &MY_STATIC; -// ... and also -static MY_STATIC_ADDR2: *const u32 = &MY_STATIC; - -const CONST_ADDR: *const u8 = 0x5f3759df as *const u8; -``` - -This does not pose a problem by itself because they can't be -accessed directly. -"##, - -E0019: r##" -A function call isn't allowed in the const's initialization expression -because the expression's value must be known at compile-time. Erroneous code -example: - -```compile_fail -enum Test { - V1 -} - -impl Test { - fn test(&self) -> i32 { - 12 - } -} - -fn main() { - const FOO: Test = Test::V1; - - const A: i32 = FOO.test(); // You can't call Test::func() here ! -} -``` - -Remember: you can't use a function call inside a const's initialization -expression! However, you can totally use it anywhere else: - -``` -fn main() { - const FOO: Test = Test::V1; - - FOO.func(); // here is good - let x = FOO.func(); // or even here! -} -``` -"##, - -E0022: r##" -Constant functions are not allowed to mutate anything. Thus, binding to an -argument with a mutable pattern is not allowed. For example, - -```compile_fail -const fn foo(mut x: u8) { - // do stuff -} -``` - -Is incorrect because the function body may not mutate `x`. - -Remove any mutable bindings from the argument list to fix this error. In case -you need to mutate the argument, try lazily initializing a global variable -instead of using a `const fn`, or refactoring the code to a functional style to -avoid mutation if possible. -"##, - E0030: r##" When matching against a range, the compiler verifies that the range is non-empty. Range patterns include both end-points, so this is equivalent to @@ -325,281 +118,6 @@ fn some_func() { ``` "##, -E0378: r##" -Method calls that aren't calls to inherent `const` methods are disallowed -in statics, constants, and constant functions. - -For example: - -```compile_fail -const BAZ: i32 = Foo(25).bar(); // error, `bar` isn't `const` - -struct Foo(i32); - -impl Foo { - const fn foo(&self) -> i32 { - self.bar() // error, `bar` isn't `const` - } - - fn bar(&self) -> i32 { self.0 } -} -``` - -For more information about `const fn`'s, see [RFC 911]. - -[RFC 911]: https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md -"##, - -E0394: r##" -From [RFC 246]: - - > It is invalid for a static to reference another static by value. It is - > required that all references be borrowed. - -[RFC 246]: https://github.com/rust-lang/rfcs/pull/246 -"##, - - -E0395: r##" -The value assigned to a constant scalar must be known at compile time, -which is not the case when comparing raw pointers. - -Erroneous code example: - -```compile_fail -static FOO: i32 = 42; -static BAR: i32 = 42; - -static BAZ: bool = { (&FOO as *const i32) == (&BAR as *const i32) }; -// error: raw pointers cannot be compared in statics! -``` - -The address assigned by the linker to `FOO` and `BAR` may or may not -be identical, so the value of `BAZ` can't be determined. - -If you want to do the comparison, please do it at run-time. - -For example: - -``` -static FOO: i32 = 42; -static BAR: i32 = 42; - -let baz: bool = { (&FOO as *const i32) == (&BAR as *const i32) }; -// baz isn't a constant expression so it's ok -``` -"##, - -E0396: r##" -The value behind a raw pointer can't be determined at compile-time -(or even link-time), which means it can't be used in a constant -expression. Erroneous code example: - -```compile_fail -const REG_ADDR: *const u8 = 0x5f3759df as *const u8; - -const VALUE: u8 = unsafe { *REG_ADDR }; -// error: raw pointers cannot be dereferenced in constants -``` - -A possible fix is to dereference your pointer at some point in run-time. - -For example: - -``` -const REG_ADDR: *const u8 = 0x5f3759df as *const u8; - -let reg_value = unsafe { *REG_ADDR }; -``` -"##, - -E0397: r##" -It is not allowed for a mutable static to allocate or have destructors. For -example: - -```compile_fail -// error: mutable statics are not allowed to have boxes -static mut FOO: Option> = None; - -// error: mutable statics are not allowed to have destructors -static mut BAR: Option> = None; -``` -"##, - -E0400: r##" -A user-defined dereference was attempted in an invalid context. Erroneous -code example: - -```compile_fail -use std::ops::Deref; - -struct A; - -impl Deref for A { - type Target = str; - - fn deref(&self)-> &str { "foo" } -} - -const S: &'static str = &A; -// error: user-defined dereference operators are not allowed in constants - -fn main() { - let foo = S; -} -``` - -You cannot directly use a dereference operation whilst initializing a constant -or a static. To fix this error, restructure your code to avoid this dereference, -perhaps moving it inline: - -``` -use std::ops::Deref; - -struct A; - -impl Deref for A { - type Target = str; - - fn deref(&self)-> &str { "foo" } -} - -fn main() { - let foo : &str = &A; -} -``` -"##, - -E0492: r##" -A borrow of a constant containing interior mutability was attempted. Erroneous -code example: - -```compile_fail -use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; - -const A: AtomicUsize = ATOMIC_USIZE_INIT; -static B: &'static AtomicUsize = &A; -// error: cannot borrow a constant which contains interior mutability, create a -// static instead -``` - -A `const` represents a constant value that should never change. If one takes -a `&` reference to the constant, then one is taking a pointer to some memory -location containing the value. Normally this is perfectly fine: most values -can't be changed via a shared `&` pointer, but interior mutability would allow -it. That is, a constant value could be mutated. On the other hand, a `static` is -explicitly a single memory location, which can be mutated at will. - -So, in order to solve this error, either use statics which are `Sync`: - -``` -use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT}; - -static A: AtomicUsize = ATOMIC_USIZE_INIT; -static B: &'static AtomicUsize = &A; // ok! -``` - -You can also have this error while using a cell type: - -```compile_fail -#![feature(const_fn)] - -use std::cell::Cell; - -const A: Cell = Cell::new(1); -const B: &'static Cell = &A; -// error: cannot borrow a constant which contains interior mutability, create -// a static instead - -// or: -struct C { a: Cell } - -const D: C = C { a: Cell::new(1) }; -const E: &'static Cell = &D.a; // error - -// or: -const F: &'static C = &D; // error -``` - -This is because cell types do operations that are not thread-safe. Due to this, -they don't implement Sync and thus can't be placed in statics. In this -case, `StaticMutex` would work just fine, but it isn't stable yet: -https://doc.rust-lang.org/nightly/std/sync/struct.StaticMutex.html - -However, if you still wish to use these types, you can achieve this by an unsafe -wrapper: - -``` -#![feature(const_fn)] - -use std::cell::Cell; -use std::marker::Sync; - -struct NotThreadSafe { - value: Cell, -} - -unsafe impl Sync for NotThreadSafe {} - -static A: NotThreadSafe = NotThreadSafe { value : Cell::new(1) }; -static B: &'static NotThreadSafe = &A; // ok! -``` - -Remember this solution is unsafe! You will have to ensure that accesses to the -cell are synchronized. -"##, - -E0493: r##" -A type with a destructor was assigned to an invalid type of variable. Erroneous -code example: - -```compile_fail -struct Foo { - a: u32 -} - -impl Drop for Foo { - fn drop(&mut self) {} -} - -const F : Foo = Foo { a : 0 }; -// error: constants are not allowed to have destructors -static S : Foo = Foo { a : 0 }; -// error: statics are not allowed to have destructors -``` - -To solve this issue, please use a type which does allow the usage of type with -destructors. -"##, - -E0494: r##" -A reference of an interior static was assigned to another const/static. -Erroneous code example: - -```compile_fail -struct Foo { - a: u32 -} - -static S : Foo = Foo { a : 0 }; -static A : &'static u32 = &S.a; -// error: cannot refer to the interior of another static, use a -// constant instead -``` - -The "base" variable has to be a const if you want another static/const variable -to refer to one of its fields. Example: - -``` -struct Foo { - a: u32 -} - -const S : Foo = Foo { a : 0 }; -static A : &'static u32 = &S.a; // ok! -``` -"##, - } register_diagnostics! { diff --git a/src/librustc_passes/lib.rs b/src/librustc_passes/lib.rs index 53ae1b30f7..67a9c2fd17 100644 --- a/src/librustc_passes/lib.rs +++ b/src/librustc_passes/lib.rs @@ -30,13 +30,13 @@ extern crate core; #[macro_use] extern crate rustc; extern crate rustc_const_eval; +extern crate rustc_const_math; #[macro_use] extern crate log; #[macro_use] extern crate syntax; pub mod diagnostics; -pub mod const_fn; pub mod consts; pub mod loops; pub mod no_asm; diff --git a/src/librustc_passes/loops.rs b/src/librustc_passes/loops.rs index 9a58a704c5..2174d1cf9b 100644 --- a/src/librustc_passes/loops.rs +++ b/src/librustc_passes/loops.rs @@ -48,7 +48,7 @@ impl<'a, 'v> Visitor<'v> for CheckLoopVisitor<'a> { hir::ExprLoop(ref b, _) => { self.with_context(Loop, |v| v.visit_block(&b)); } - hir::ExprClosure(_, _, ref b) => { + hir::ExprClosure(_, _, ref b, _) => { self.with_context(Closure, |v| v.visit_block(&b)); } hir::ExprBreak(_) => self.require_loop("break", e.span), diff --git a/src/librustc_passes/rvalues.rs b/src/librustc_passes/rvalues.rs index 754d5ef8f5..137a50642f 100644 --- a/src/librustc_passes/rvalues.rs +++ b/src/librustc_passes/rvalues.rs @@ -13,7 +13,6 @@ use rustc::dep_graph::DepNode; use rustc::middle::expr_use_visitor as euv; -use rustc::infer; use rustc::middle::mem_categorization as mc; use rustc::ty::{self, TyCtxt, ParameterEnvironment}; use rustc::traits::ProjectionMode; @@ -23,13 +22,13 @@ use rustc::hir::intravisit; use syntax::ast; use syntax::codemap::Span; -pub fn check_crate(tcx: &TyCtxt) { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut rvcx = RvalueContext { tcx: tcx }; tcx.visit_all_items_in_krate(DepNode::RvalueCheck, &mut rvcx); } struct RvalueContext<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, } impl<'a, 'tcx, 'v> intravisit::Visitor<'v> for RvalueContext<'a, 'tcx> { @@ -39,37 +38,38 @@ impl<'a, 'tcx, 'v> intravisit::Visitor<'v> for RvalueContext<'a, 'tcx> { b: &'v hir::Block, s: Span, fn_id: ast::NodeId) { - { - // FIXME (@jroesch) change this to be an inference context - let param_env = ParameterEnvironment::for_item(self.tcx, fn_id); - let infcx = infer::new_infer_ctxt(self.tcx, - &self.tcx.tables, - Some(param_env.clone()), - ProjectionMode::AnyFinal); - let mut delegate = RvalueContextDelegate { tcx: self.tcx, param_env: ¶m_env }; + // FIXME (@jroesch) change this to be an inference context + let param_env = ParameterEnvironment::for_item(self.tcx, fn_id); + self.tcx.infer_ctxt(None, Some(param_env.clone()), + ProjectionMode::AnyFinal).enter(|infcx| { + let mut delegate = RvalueContextDelegate { + tcx: infcx.tcx, + param_env: ¶m_env + }; let mut euv = euv::ExprUseVisitor::new(&mut delegate, &infcx); euv.walk_fn(fd, b); - } + }); intravisit::walk_fn(self, fk, fd, b, s) } } -struct RvalueContextDelegate<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, - param_env: &'a ty::ParameterEnvironment<'a,'tcx>, +struct RvalueContextDelegate<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + tcx: TyCtxt<'a, 'gcx, 'tcx>, + param_env: &'a ty::ParameterEnvironment<'gcx>, } -impl<'a, 'tcx> euv::Delegate<'tcx> for RvalueContextDelegate<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> euv::Delegate<'tcx> for RvalueContextDelegate<'a, 'gcx, 'tcx> { fn consume(&mut self, _: ast::NodeId, span: Span, cmt: mc::cmt<'tcx>, _: euv::ConsumeMode) { debug!("consume; cmt: {:?}; type: {:?}", *cmt, cmt.ty); - if !cmt.ty.is_sized(self.param_env, span) { + let ty = self.tcx.lift_to_global(&cmt.ty).unwrap(); + if !ty.is_sized(self.tcx.global_tcx(), self.param_env, span) { span_err!(self.tcx.sess, span, E0161, "cannot move a value of type {0}: the size of {0} cannot be statically determined", - cmt.ty); + ty); } } diff --git a/src/librustc_plugin/load.rs b/src/librustc_plugin/load.rs index ac40215bbb..036e46c380 100644 --- a/src/librustc_plugin/load.rs +++ b/src/librustc_plugin/load.rs @@ -51,27 +51,32 @@ pub fn load_plugins(sess: &Session, addl_plugins: Option>) -> Vec { let mut loader = PluginLoader::new(sess, cstore, crate_name); - for attr in &krate.attrs { - if !attr.check_name("plugin") { - continue; - } - - let plugins = match attr.meta_item_list() { - Some(xs) => xs, - None => { - call_malformed_plugin_attribute(sess, attr.span); + // do not report any error now. since crate attributes are + // not touched by expansion, every use of plugin without + // the feature enabled will result in an error later... + if sess.features.borrow().plugin { + for attr in &krate.attrs { + if !attr.check_name("plugin") { continue; } - }; - for plugin in plugins { - if plugin.value_str().is_some() { - call_malformed_plugin_attribute(sess, attr.span); - continue; + let plugins = match attr.meta_item_list() { + Some(xs) => xs, + None => { + call_malformed_plugin_attribute(sess, attr.span); + continue; + } + }; + + for plugin in plugins { + if plugin.value_str().is_some() { + call_malformed_plugin_attribute(sess, attr.span); + continue; + } + + let args = plugin.meta_item_list().map(ToOwned::to_owned).unwrap_or_default(); + loader.load_plugin(plugin.span, &plugin.name(), args); } - - let args = plugin.meta_item_list().map(ToOwned::to_owned).unwrap_or_default(); - loader.load_plugin(plugin.span, &plugin.name(), args); } } diff --git a/src/librustc_plugin/registry.rs b/src/librustc_plugin/registry.rs index 3cfd6a76dd..dc5a38bb76 100644 --- a/src/librustc_plugin/registry.rs +++ b/src/librustc_plugin/registry.rs @@ -92,8 +92,11 @@ impl<'a> Registry<'a> { /// ```no_run /// #![plugin(my_plugin_name(... args ...))] /// ``` - pub fn args<'b>(&'b self) -> &'b Vec> { - self.args_hidden.as_ref().expect("args not set") + /// + /// Returns empty slice in case the plugin was loaded + /// with `--extra-plugins` + pub fn args<'b>(&'b self) -> &'b [P] { + self.args_hidden.as_ref().map(|v| &v[..]).unwrap_or(&[]) } /// Register a syntax extension of any kind. diff --git a/src/librustc_privacy/lib.rs b/src/librustc_privacy/lib.rs index a6ce4cc3ee..f1e744098b 100644 --- a/src/librustc_privacy/lib.rs +++ b/src/librustc_privacy/lib.rs @@ -34,7 +34,6 @@ use rustc::hir::intravisit::{self, Visitor}; use rustc::dep_graph::DepNode; use rustc::lint; -use rustc::middle::cstore::CrateStore; use rustc::hir::def::{self, Def}; use rustc::hir::def_id::DefId; use rustc::middle::privacy::{AccessLevel, AccessLevels}; @@ -59,7 +58,7 @@ type CheckResult = Option<(Span, String, Option<(Span, String)>)>; //////////////////////////////////////////////////////////////////////////////// struct EmbargoVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, export_map: &'a def::ExportMap, // Accessibility levels for reachable nodes @@ -129,7 +128,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for EmbargoVisitor<'a, 'tcx> { /// We want to visit items in the context of their containing /// module and so forth, so supply a crate for doing a deep walk. fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.tcx.map.expect_item(item.id)) + let tcx = self.tcx; + self.visit_item(tcx.map.expect_item(item.id)) } fn visit_item(&mut self, item: &hir::Item) { @@ -376,7 +376,7 @@ impl<'b, 'a, 'tcx: 'a, 'v> Visitor<'v> for ReachEverythingInTheInterfaceVisitor< //////////////////////////////////////////////////////////////////////////////// struct PrivacyVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, curitem: ast::NodeId, in_foreign: bool, } @@ -418,7 +418,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { /// We want to visit items in the context of their containing /// module and so forth, so supply a crate for doing a deep walk. fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.tcx.map.expect_item(item.id)) + let tcx = self.tcx; + self.visit_item(tcx.map.expect_item(item.id)) } fn visit_item(&mut self, item: &hir::Item) { @@ -525,7 +526,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivacyVisitor<'a, 'tcx> { //////////////////////////////////////////////////////////////////////////////// struct SanePrivacyVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, } impl<'a, 'tcx, 'v> Visitor<'v> for SanePrivacyVisitor<'a, 'tcx> { @@ -596,7 +597,7 @@ impl<'a, 'tcx> SanePrivacyVisitor<'a, 'tcx> { /////////////////////////////////////////////////////////////////////////////// struct ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, access_levels: &'a AccessLevels, in_variant: bool, // set of errors produced by this obsolete visitor @@ -680,7 +681,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> /// We want to visit items in the context of their containing /// module and so forth, so supply a crate for doing a deep walk. fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.tcx.map.expect_item(item.id)) + let tcx = self.tcx; + self.visit_item(tcx.map.expect_item(item.id)) } fn visit_item(&mut self, item: &hir::Item) { @@ -829,8 +831,8 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> } } hir::ImplItemKind::Method(ref sig, _) => { - if sig.explicit_self.node == hir::SelfStatic && - self.item_is_public(&impl_item.id, &impl_item.vis) { + if !sig.decl.has_self() && + self.item_is_public(&impl_item.id, &impl_item.vis) { found_pub_static = true; intravisit::walk_impl_item(self, impl_item); } @@ -935,7 +937,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for ObsoleteVisiblePrivateTypesVisitor<'a, 'tcx> /////////////////////////////////////////////////////////////////////////////// struct SearchInterfaceForPrivateItemsVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, /// The visitor checks that each component type is at least this visible required_visibility: ty::Visibility, /// The visibility of the least visible component that has been visited @@ -944,7 +946,7 @@ struct SearchInterfaceForPrivateItemsVisitor<'a, 'tcx: 'a> { } impl<'a, 'tcx: 'a> SearchInterfaceForPrivateItemsVisitor<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>, old_error_set: &'a NodeSet) -> Self { + fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>, old_error_set: &'a NodeSet) -> Self { SearchInterfaceForPrivateItemsVisitor { tcx: tcx, min_visibility: ty::Visibility::Public, @@ -1016,14 +1018,15 @@ impl<'a, 'tcx: 'a, 'v> Visitor<'v> for SearchInterfaceForPrivateItemsVisitor<'a, let item = self.tcx.map.expect_item(node_id); let vis = match self.substituted_alias_visibility(item, path) { Some(vis) => vis, - None => ty::Visibility::from_hir(&item.vis, node_id, &self.tcx), + None => ty::Visibility::from_hir(&item.vis, node_id, self.tcx), }; if !vis.is_at_least(self.min_visibility, &self.tcx.map) { self.min_visibility = vis; } if !vis.is_at_least(self.required_visibility, &self.tcx.map) { - if self.old_error_set.contains(&ty.id) { + if self.tcx.sess.features.borrow().pub_restricted || + self.old_error_set.contains(&ty.id) { span_err!(self.tcx.sess, ty.span, E0446, "private type in public interface"); } else { @@ -1047,13 +1050,14 @@ impl<'a, 'tcx: 'a, 'v> Visitor<'v> for SearchInterfaceForPrivateItemsVisitor<'a, let def_id = self.tcx.trait_ref_to_def_id(trait_ref); if let Some(node_id) = self.tcx.map.as_local_node_id(def_id) { let item = self.tcx.map.expect_item(node_id); - let vis = ty::Visibility::from_hir(&item.vis, node_id, &self.tcx); + let vis = ty::Visibility::from_hir(&item.vis, node_id, self.tcx); if !vis.is_at_least(self.min_visibility, &self.tcx.map) { self.min_visibility = vis; } if !vis.is_at_least(self.required_visibility, &self.tcx.map) { - if self.old_error_set.contains(&trait_ref.ref_id) { + if self.tcx.sess.features.borrow().pub_restricted || + self.old_error_set.contains(&trait_ref.ref_id) { span_err!(self.tcx.sess, trait_ref.path.span, E0445, "private trait in public interface"); } else { @@ -1078,7 +1082,7 @@ impl<'a, 'tcx: 'a, 'v> Visitor<'v> for SearchInterfaceForPrivateItemsVisitor<'a, } struct PrivateItemsInPublicInterfacesVisitor<'a, 'tcx: 'a> { - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, old_error_set: &'a NodeSet, } @@ -1105,7 +1109,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivateItemsInPublicInterfacesVisitor<'a, 'tc }; let mut check = SearchInterfaceForPrivateItemsVisitor::new(self.tcx, self.old_error_set); - let item_visibility = ty::Visibility::from_hir(&item.vis, item.id, &self.tcx); + let item_visibility = ty::Visibility::from_hir(&item.vis, item.id, self.tcx); match item.node { // Crates are always public @@ -1124,7 +1128,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivateItemsInPublicInterfacesVisitor<'a, 'tc hir::ItemForeignMod(ref foreign_mod) => { for foreign_item in &foreign_mod.items { check.required_visibility = - ty::Visibility::from_hir(&foreign_item.vis, item.id, &self.tcx); + ty::Visibility::from_hir(&foreign_item.vis, item.id, self.tcx); check.visit_foreign_item(foreign_item); } } @@ -1134,7 +1138,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivateItemsInPublicInterfacesVisitor<'a, 'tc check.visit_generics(generics); for field in struct_def.fields() { - let field_visibility = ty::Visibility::from_hir(&field.vis, item.id, &self.tcx); + let field_visibility = ty::Visibility::from_hir(&field.vis, item.id, self.tcx); check.required_visibility = min(item_visibility, field_visibility); check.visit_struct_field(field); } @@ -1150,7 +1154,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivateItemsInPublicInterfacesVisitor<'a, 'tc for impl_item in impl_items { let impl_item_vis = - ty::Visibility::from_hir(&impl_item.vis, item.id, &self.tcx); + ty::Visibility::from_hir(&impl_item.vis, item.id, self.tcx); check.required_visibility = min(impl_item_vis, ty_vis); check.visit_impl_item(impl_item); } @@ -1169,7 +1173,9 @@ impl<'a, 'tcx, 'v> Visitor<'v> for PrivateItemsInPublicInterfacesVisitor<'a, 'tc } } -pub fn check_crate(tcx: &TyCtxt, export_map: &def::ExportMap) -> AccessLevels { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + export_map: &def::ExportMap) + -> AccessLevels { let _task = tcx.dep_graph.in_task(DepNode::Privacy); let krate = tcx.map.krate(); diff --git a/src/librustc_resolve/Cargo.toml b/src/librustc_resolve/Cargo.toml index cf477c2a95..a63460d912 100644 --- a/src/librustc_resolve/Cargo.toml +++ b/src/librustc_resolve/Cargo.toml @@ -7,10 +7,10 @@ version = "0.0.0" name = "rustc_resolve" path = "lib.rs" crate-type = ["dylib"] +test = false [dependencies] log = { path = "../liblog" } syntax = { path = "../libsyntax" } rustc = { path = "../librustc" } -rustc_bitflags = { path = "../librustc_bitflags" } arena = { path = "../libarena" } diff --git a/src/librustc_resolve/build_reduced_graph.rs b/src/librustc_resolve/build_reduced_graph.rs index 003450cd6f..f56b22f924 100644 --- a/src/librustc_resolve/build_reduced_graph.rs +++ b/src/librustc_resolve/build_reduced_graph.rs @@ -13,7 +13,6 @@ //! Here we build the "reduced graph": the graph of the module tree without //! any imports resolved. -use DefModifiers; use resolve_imports::ImportDirectiveSubclass::{self, GlobImport}; use Module; use Namespace::{self, TypeNS, ValueNS}; @@ -22,52 +21,48 @@ use ParentLink::{ModuleParentLink, BlockParentLink}; use Resolver; use {resolve_error, resolve_struct_error, ResolutionError}; -use rustc::middle::cstore::{CrateStore, ChildItem, DlDef}; +use rustc::middle::cstore::{ChildItem, DlDef}; use rustc::lint; use rustc::hir::def::*; use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; use rustc::ty::{self, VariantKind}; -use syntax::ast::Name; +use syntax::ast::{Name, NodeId}; use syntax::attr::AttrMetaMethods; -use syntax::parse::token::{special_idents, SELF_KEYWORD_NAME, SUPER_KEYWORD_NAME}; +use syntax::parse::token::{self, keywords}; use syntax::codemap::{Span, DUMMY_SP}; -use rustc::hir; -use rustc::hir::{Block, DeclItem}; -use rustc::hir::{ForeignItem, ForeignItemFn, ForeignItemStatic}; -use rustc::hir::{Item, ItemConst, ItemEnum, ItemExternCrate, ItemFn}; -use rustc::hir::{ItemForeignMod, ItemImpl, ItemMod, ItemStatic, ItemDefaultImpl}; -use rustc::hir::{ItemStruct, ItemTrait, ItemTy, ItemUse}; -use rustc::hir::{PathListIdent, PathListMod, StmtDecl}; -use rustc::hir::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple}; -use rustc::hir::intravisit::{self, Visitor}; +use syntax::ast::{Block, Crate, DeclKind}; +use syntax::ast::{ForeignItem, ForeignItemKind, Item, ItemKind}; +use syntax::ast::{Mutability, PathListItemKind}; +use syntax::ast::{SelfKind, Stmt, StmtKind, TraitItemKind}; +use syntax::ast::{Variant, ViewPath, ViewPathGlob, ViewPathList, ViewPathSimple}; +use syntax::visit::{self, Visitor}; trait ToNameBinding<'a> { fn to_name_binding(self) -> NameBinding<'a>; } -impl<'a> ToNameBinding<'a> for (Module<'a>, Span) { +impl<'a> ToNameBinding<'a> for (Module<'a>, Span, ty::Visibility) { fn to_name_binding(self) -> NameBinding<'a> { - NameBinding::create_from_module(self.0, Some(self.1)) + NameBinding { kind: NameBindingKind::Module(self.0), span: self.1, vis: self.2 } } } -impl<'a> ToNameBinding<'a> for (Def, Span, DefModifiers) { +impl<'a> ToNameBinding<'a> for (Def, Span, ty::Visibility) { fn to_name_binding(self) -> NameBinding<'a> { - let kind = NameBindingKind::Def(self.0); - NameBinding { modifiers: self.2, kind: kind, span: Some(self.1) } + NameBinding { kind: NameBindingKind::Def(self.0), span: self.1, vis: self.2 } } } -impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { +impl<'b> Resolver<'b> { /// Constructs the reduced graph for the entire crate. - pub fn build_reduced_graph(&mut self, krate: &hir::Crate) { + pub fn build_reduced_graph(&mut self, krate: &Crate) { let mut visitor = BuildReducedGraphVisitor { parent: self.graph_root, resolver: self, }; - intravisit::walk_crate(&mut visitor, krate); + visit::walk_crate(&mut visitor, krate); } /// Defines `name` in namespace `ns` of module `parent` to be `def` if it is not yet defined. @@ -87,9 +82,9 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { } fn block_needs_anonymous_module(&mut self, block: &Block) -> bool { - fn is_item(statement: &hir::Stmt) -> bool { - if let StmtDecl(ref declaration, _) = statement.node { - if let DeclItem(_) = declaration.node { + fn is_item(statement: &Stmt) -> bool { + if let StmtKind::Decl(ref declaration, _) = statement.node { + if let DeclKind::Item(_) = declaration.node { return true; } } @@ -100,27 +95,51 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { block.stmts.iter().any(is_item) } + fn sanity_check_import(&self, view_path: &ViewPath, id: NodeId) { + let path = match view_path.node { + ViewPathSimple(_, ref path) | + ViewPathGlob (ref path) | + ViewPathList(ref path, _) => path + }; + + // Check for type parameters + let found_param = path.segments.iter().any(|segment| { + !segment.parameters.types().is_empty() || + !segment.parameters.lifetimes().is_empty() || + !segment.parameters.bindings().is_empty() + }); + if found_param { + self.session.span_err(path.span, "type or lifetime parameters in import path"); + } + + // Checking for special identifiers in path + // prevent `self` or `super` at beginning of global path + if path.global && path.segments.len() > 0 { + let first = path.segments[0].identifier.name; + if first == keywords::Super.name() || first == keywords::SelfValue.name() { + self.session.add_lint( + lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH, id, path.span, + format!("expected identifier, found keyword `{}`", first) + ); + } + } + } + /// Constructs the reduced graph for one item. fn build_reduced_graph_for_item(&mut self, item: &Item, parent_ref: &mut Module<'b>) { let parent = *parent_ref; - let name = item.name; + let name = item.ident.name; let sp = item.span; - let is_public = item.vis == hir::Public; - let modifiers = if is_public { - DefModifiers::PUBLIC - } else { - DefModifiers::empty() - } | DefModifiers::IMPORTABLE; + self.current_module = parent; + let vis = self.resolve_visibility(&item.vis); match item.node { - ItemUse(ref view_path) => { + ItemKind::Use(ref view_path) => { // Extract and intern the module part of the path. For // globs and lists, the path is found directly in the AST; // for simple paths we have to munge the path a little. - let is_global; let module_path: Vec = match view_path.node { ViewPathSimple(_, ref full_path) => { - is_global = full_path.global; full_path.segments .split_last() .unwrap() @@ -132,7 +151,6 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { ViewPathGlob(ref module_ident_path) | ViewPathList(ref module_ident_path, _) => { - is_global = module_ident_path.global; module_ident_path.segments .iter() .map(|seg| seg.identifier.name) @@ -140,22 +158,10 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { } }; - // Checking for special identifiers in path - // prevent `self` or `super` at beginning of global path - if is_global && (module_path.first() == Some(&SELF_KEYWORD_NAME) || - module_path.first() == Some(&SUPER_KEYWORD_NAME)) { - self.session.add_lint( - lint::builtin::SUPER_OR_SELF_IN_GLOBAL_PATH, - item.id, - item.span, - format!("expected identifier, found keyword `{}`", - module_path.first().unwrap().as_str())); - } + self.sanity_check_import(view_path, item.id); // Build up the import directives. - let is_prelude = item.attrs.iter().any(|attr| { - attr.name() == special_idents::prelude_import.name.as_str() - }); + let is_prelude = item.attrs.iter().any(|attr| attr.name() == "prelude_import"); match view_path.node { ViewPathSimple(binding, ref full_path) => { @@ -166,25 +172,20 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { ResolutionError::SelfImportsOnlyAllowedWithin); } - let subclass = ImportDirectiveSubclass::single(binding, source_name); + let subclass = ImportDirectiveSubclass::single(binding.name, source_name); + let span = view_path.span; + parent.add_import_directive(module_path, subclass, span, item.id, vis); self.unresolved_imports += 1; - parent.add_import_directive(module_path, - subclass, - view_path.span, - item.id, - is_public, - is_prelude); } ViewPathList(_, ref source_items) => { // Make sure there's at most one `mod` import in the list. - let mod_spans = source_items.iter() - .filter_map(|item| { - match item.node { - PathListMod { .. } => Some(item.span), - _ => None, - } - }) - .collect::>(); + let mod_spans = source_items.iter().filter_map(|item| { + match item.node { + PathListItemKind::Mod { .. } => Some(item.span), + _ => None, + } + }).collect::>(); + if mod_spans.len() > 1 { let mut e = resolve_struct_error(self, mod_spans[0], @@ -197,9 +198,9 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { for source_item in source_items { let (module_path, name, rename) = match source_item.node { - PathListIdent { name, rename, .. } => - (module_path.clone(), name, rename.unwrap_or(name)), - PathListMod { rename, .. } => { + PathListItemKind::Ident { name, rename, .. } => + (module_path.clone(), name.name, rename.unwrap_or(name).name), + PathListItemKind::Mod { rename, .. } => { let name = match module_path.last() { Some(name) => *name, None => { @@ -213,33 +214,26 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { } }; let module_path = module_path.split_last().unwrap().1; - let rename = rename.unwrap_or(name); + let rename = rename.map(|i| i.name).unwrap_or(name); (module_path.to_vec(), name, rename) } }; let subclass = ImportDirectiveSubclass::single(rename, name); + let (span, id) = (source_item.span, source_item.node.id()); + parent.add_import_directive(module_path, subclass, span, id, vis); self.unresolved_imports += 1; - parent.add_import_directive(module_path, - subclass, - source_item.span, - source_item.node.id(), - is_public, - is_prelude); } } ViewPathGlob(_) => { + let subclass = GlobImport { is_prelude: is_prelude }; + let span = view_path.span; + parent.add_import_directive(module_path, subclass, span, item.id, vis); self.unresolved_imports += 1; - parent.add_import_directive(module_path, - GlobImport, - view_path.span, - item.id, - is_public, - is_prelude); } } } - ItemExternCrate(_) => { + ItemKind::ExternCrate(_) => { // n.b. we don't need to look at the path option here, because cstore already // did if let Some(crate_id) = self.session.cstore.extern_mod_stmt_cnum(item.id) { @@ -249,111 +243,110 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { }; let parent_link = ModuleParentLink(parent, name); let def = Def::Mod(def_id); - let module = self.new_extern_crate_module(parent_link, def, is_public, item.id); - self.define(parent, name, TypeNS, (module, sp)); + let module = self.new_extern_crate_module(parent_link, def, item.id); + self.define(parent, name, TypeNS, (module, sp, vis)); self.build_reduced_graph_for_external_crate(module); } } - ItemMod(..) => { + ItemKind::Mod(..) => { let parent_link = ModuleParentLink(parent, name); - let def = Def::Mod(self.ast_map.local_def_id(item.id)); - let module = self.new_module(parent_link, Some(def), false, is_public); - self.define(parent, name, TypeNS, (module, sp)); - parent.module_children.borrow_mut().insert(item.id, module); + let def = Def::Mod(self.definitions.local_def_id(item.id)); + let module = self.new_module(parent_link, Some(def), false); + self.define(parent, name, TypeNS, (module, sp, vis)); + self.module_map.insert(item.id, module); *parent_ref = module; } - ItemForeignMod(..) => {} + ItemKind::ForeignMod(..) => {} // These items live in the value namespace. - ItemStatic(_, m, _) => { - let mutbl = m == hir::MutMutable; - let def = Def::Static(self.ast_map.local_def_id(item.id), mutbl); - self.define(parent, name, ValueNS, (def, sp, modifiers)); + ItemKind::Static(_, m, _) => { + let mutbl = m == Mutability::Mutable; + let def = Def::Static(self.definitions.local_def_id(item.id), mutbl); + self.define(parent, name, ValueNS, (def, sp, vis)); } - ItemConst(_, _) => { - let def = Def::Const(self.ast_map.local_def_id(item.id)); - self.define(parent, name, ValueNS, (def, sp, modifiers)); + ItemKind::Const(_, _) => { + let def = Def::Const(self.definitions.local_def_id(item.id)); + self.define(parent, name, ValueNS, (def, sp, vis)); } - ItemFn(_, _, _, _, _, _) => { - let def = Def::Fn(self.ast_map.local_def_id(item.id)); - self.define(parent, name, ValueNS, (def, sp, modifiers)); + ItemKind::Fn(_, _, _, _, _, _) => { + let def = Def::Fn(self.definitions.local_def_id(item.id)); + self.define(parent, name, ValueNS, (def, sp, vis)); } // These items live in the type namespace. - ItemTy(..) => { - let def = Def::TyAlias(self.ast_map.local_def_id(item.id)); - self.define(parent, name, TypeNS, (def, sp, modifiers)); + ItemKind::Ty(..) => { + let def = Def::TyAlias(self.definitions.local_def_id(item.id)); + self.define(parent, name, TypeNS, (def, sp, vis)); } - ItemEnum(ref enum_definition, _) => { + ItemKind::Enum(ref enum_definition, _) => { let parent_link = ModuleParentLink(parent, name); - let def = Def::Enum(self.ast_map.local_def_id(item.id)); - let module = self.new_module(parent_link, Some(def), false, is_public); - self.define(parent, name, TypeNS, (module, sp)); - - let variant_modifiers = if is_public { - DefModifiers::empty() - } else { - DefModifiers::PRIVATE_VARIANT - }; + let def = Def::Enum(self.definitions.local_def_id(item.id)); + let module = self.new_module(parent_link, Some(def), false); + self.define(parent, name, TypeNS, (module, sp, vis)); + for variant in &(*enum_definition).variants { - let item_def_id = self.ast_map.local_def_id(item.id); - self.build_reduced_graph_for_variant(variant, item_def_id, - module, variant_modifiers); + let item_def_id = self.definitions.local_def_id(item.id); + self.build_reduced_graph_for_variant(variant, item_def_id, module, vis); } } // These items live in both the type and value namespaces. - ItemStruct(ref struct_def, _) => { + ItemKind::Struct(ref struct_def, _) => { // Define a name in the type namespace. - let def = Def::Struct(self.ast_map.local_def_id(item.id)); - self.define(parent, name, TypeNS, (def, sp, modifiers)); + let def = Def::Struct(self.definitions.local_def_id(item.id)); + self.define(parent, name, TypeNS, (def, sp, vis)); // If this is a newtype or unit-like struct, define a name // in the value namespace as well if !struct_def.is_struct() { - let def = Def::Struct(self.ast_map.local_def_id(struct_def.id())); - self.define(parent, name, ValueNS, (def, sp, modifiers)); + let def = Def::Struct(self.definitions.local_def_id(struct_def.id())); + self.define(parent, name, ValueNS, (def, sp, vis)); } // Record the def ID and fields of this struct. - let field_names = struct_def.fields() - .iter() - .map(|f| f.name) - .collect(); - let item_def_id = self.ast_map.local_def_id(item.id); + let field_names = struct_def.fields().iter().enumerate().map(|(index, field)| { + self.resolve_visibility(&field.vis); + field.ident.map(|ident| ident.name) + .unwrap_or_else(|| token::intern(&index.to_string())) + }).collect(); + let item_def_id = self.definitions.local_def_id(item.id); self.structs.insert(item_def_id, field_names); } - ItemDefaultImpl(_, _) | ItemImpl(..) => {} + ItemKind::DefaultImpl(_, _) | ItemKind::Impl(..) => {} - ItemTrait(_, _, _, ref items) => { - let def_id = self.ast_map.local_def_id(item.id); + ItemKind::Trait(_, _, _, ref items) => { + let def_id = self.definitions.local_def_id(item.id); // Add all the items within to a new module. let parent_link = ModuleParentLink(parent, name); let def = Def::Trait(def_id); - let module_parent = self.new_module(parent_link, Some(def), false, is_public); - self.define(parent, name, TypeNS, (module_parent, sp)); + let module_parent = self.new_module(parent_link, Some(def), false); + self.define(parent, name, TypeNS, (module_parent, sp, vis)); // Add the names of all the items to the trait info. for item in items { - let item_def_id = self.ast_map.local_def_id(item.id); + let item_def_id = self.definitions.local_def_id(item.id); + let mut is_static_method = false; let (def, ns) = match item.node { - hir::ConstTraitItem(..) => (Def::AssociatedConst(item_def_id), ValueNS), - hir::MethodTraitItem(..) => (Def::Method(item_def_id), ValueNS), - hir::TypeTraitItem(..) => (Def::AssociatedTy(def_id, item_def_id), TypeNS), + TraitItemKind::Const(..) => (Def::AssociatedConst(item_def_id), ValueNS), + TraitItemKind::Method(ref sig, _) => { + is_static_method = sig.explicit_self.node == SelfKind::Static; + (Def::Method(item_def_id), ValueNS) + } + TraitItemKind::Type(..) => (Def::AssociatedTy(def_id, item_def_id), TypeNS), }; - let modifiers = DefModifiers::PUBLIC; // NB: not DefModifiers::IMPORTABLE - self.define(module_parent, item.name, ns, (def, item.span, modifiers)); + self.define(module_parent, item.ident.name, ns, (def, item.span, vis)); - self.trait_item_map.insert((item.name, def_id), item_def_id); + self.trait_item_map.insert((item.ident.name, def_id), is_static_method); } } + ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"), } } @@ -363,44 +356,38 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { variant: &Variant, item_id: DefId, parent: Module<'b>, - variant_modifiers: DefModifiers) { - let name = variant.node.name; + vis: ty::Visibility) { + let name = variant.node.name.name; if variant.node.data.is_struct() { // Not adding fields for variants as they are not accessed with a self receiver - let variant_def_id = self.ast_map.local_def_id(variant.node.data.id()); + let variant_def_id = self.definitions.local_def_id(variant.node.data.id()); self.structs.insert(variant_def_id, Vec::new()); } // Variants are always treated as importable to allow them to be glob used. // All variants are defined in both type and value namespaces as future-proofing. - let modifiers = DefModifiers::PUBLIC | DefModifiers::IMPORTABLE | variant_modifiers; - let def = Def::Variant(item_id, self.ast_map.local_def_id(variant.node.data.id())); - - self.define(parent, name, ValueNS, (def, variant.span, modifiers)); - self.define(parent, name, TypeNS, (def, variant.span, modifiers)); + let def = Def::Variant(item_id, self.definitions.local_def_id(variant.node.data.id())); + self.define(parent, name, ValueNS, (def, variant.span, vis)); + self.define(parent, name, TypeNS, (def, variant.span, vis)); } /// Constructs the reduced graph for one foreign item. fn build_reduced_graph_for_foreign_item(&mut self, foreign_item: &ForeignItem, parent: Module<'b>) { - let name = foreign_item.name; - let is_public = foreign_item.vis == hir::Public; - let modifiers = if is_public { - DefModifiers::PUBLIC - } else { - DefModifiers::empty() - } | DefModifiers::IMPORTABLE; + let name = foreign_item.ident.name; let def = match foreign_item.node { - ForeignItemFn(..) => { - Def::Fn(self.ast_map.local_def_id(foreign_item.id)) + ForeignItemKind::Fn(..) => { + Def::Fn(self.definitions.local_def_id(foreign_item.id)) } - ForeignItemStatic(_, m) => { - Def::Static(self.ast_map.local_def_id(foreign_item.id), m) + ForeignItemKind::Static(_, m) => { + Def::Static(self.definitions.local_def_id(foreign_item.id), m) } }; - self.define(parent, name, ValueNS, (def, foreign_item.span, modifiers)); + self.current_module = parent; + let vis = self.resolve_visibility(&foreign_item.vis); + self.define(parent, name, ValueNS, (def, foreign_item.span, vis)); } fn build_reduced_graph_for_block(&mut self, block: &Block, parent: &mut Module<'b>) { @@ -412,8 +399,8 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { block_id); let parent_link = BlockParentLink(parent, block_id); - let new_module = self.new_module(parent_link, None, false, false); - parent.module_children.borrow_mut().insert(block_id, new_module); + let new_module = self.new_module(parent_link, None, false); + self.module_map.insert(block_id, new_module); *parent = new_module; } } @@ -434,32 +421,22 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { } let name = xcdef.name; - let is_public = xcdef.vis == ty::Visibility::Public || parent.is_trait(); - - let mut modifiers = DefModifiers::empty(); - if is_public { - modifiers = modifiers | DefModifiers::PUBLIC; - } - if parent.is_normal() { - modifiers = modifiers | DefModifiers::IMPORTABLE; - } + let vis = if parent.is_trait() { ty::Visibility::Public } else { xcdef.vis }; match def { Def::Mod(_) | Def::ForeignMod(_) | Def::Enum(..) => { - debug!("(building reduced graph for external crate) building module {} {}", - name, - is_public); + debug!("(building reduced graph for external crate) building module {} {:?}", + name, vis); let parent_link = ModuleParentLink(parent, name); - let module = self.new_module(parent_link, Some(def), true, is_public); - self.try_define(parent, name, TypeNS, (module, DUMMY_SP)); + let module = self.new_module(parent_link, Some(def), true); + self.try_define(parent, name, TypeNS, (module, DUMMY_SP, vis)); } Def::Variant(_, variant_id) => { debug!("(building reduced graph for external crate) building variant {}", name); // Variants are always treated as importable to allow them to be glob used. // All variants are defined in both type and value namespaces as future-proofing. - let modifiers = DefModifiers::PUBLIC | DefModifiers::IMPORTABLE; - self.try_define(parent, name, TypeNS, (def, DUMMY_SP, modifiers)); - self.try_define(parent, name, ValueNS, (def, DUMMY_SP, modifiers)); + self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis)); + self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis)); if self.session.cstore.variant_kind(variant_id) == Some(VariantKind::Struct) { // Not adding fields for variants as they are not accessed with a self receiver self.structs.insert(variant_id, Vec::new()); @@ -472,7 +449,7 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { Def::Method(..) => { debug!("(building reduced graph for external crate) building value (fn/static) {}", name); - self.try_define(parent, name, ValueNS, (def, DUMMY_SP, modifiers)); + self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis)); } Def::Trait(def_id) => { debug!("(building reduced graph for external crate) building type {}", name); @@ -489,25 +466,25 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { '{}'", trait_item_name); - self.trait_item_map.insert((trait_item_name, def_id), trait_item_def.def_id()); + self.trait_item_map.insert((trait_item_name, def_id), false); } let parent_link = ModuleParentLink(parent, name); - let module = self.new_module(parent_link, Some(def), true, is_public); - self.try_define(parent, name, TypeNS, (module, DUMMY_SP)); + let module = self.new_module(parent_link, Some(def), true); + self.try_define(parent, name, TypeNS, (module, DUMMY_SP, vis)); } Def::TyAlias(..) | Def::AssociatedTy(..) => { debug!("(building reduced graph for external crate) building type {}", name); - self.try_define(parent, name, TypeNS, (def, DUMMY_SP, modifiers)); + self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis)); } Def::Struct(def_id) if self.session.cstore.tuple_struct_definition_if_ctor(def_id).is_none() => { debug!("(building reduced graph for external crate) building type and value for {}", name); - self.try_define(parent, name, TypeNS, (def, DUMMY_SP, modifiers)); + self.try_define(parent, name, TypeNS, (def, DUMMY_SP, vis)); if let Some(ctor_def_id) = self.session.cstore.struct_ctor_def_id(def_id) { let def = Def::Struct(ctor_def_id); - self.try_define(parent, name, ValueNS, (def, DUMMY_SP, modifiers)); + self.try_define(parent, name, ValueNS, (def, DUMMY_SP, vis)); } // Record the def ID and fields of this struct. @@ -547,20 +524,16 @@ impl<'b, 'tcx:'b> Resolver<'b, 'tcx> { } } -struct BuildReducedGraphVisitor<'a, 'b: 'a, 'tcx: 'b> { - resolver: &'a mut Resolver<'b, 'tcx>, +struct BuildReducedGraphVisitor<'a, 'b: 'a> { + resolver: &'a mut Resolver<'b>, parent: Module<'b>, } -impl<'a, 'b, 'v, 'tcx> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b, 'tcx> { - fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.resolver.ast_map.expect_item(item.id)) - } - +impl<'a, 'b, 'v> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b> { fn visit_item(&mut self, item: &Item) { let old_parent = self.parent; self.resolver.build_reduced_graph_for_item(item, &mut self.parent); - intravisit::walk_item(self, item); + visit::walk_item(self, item); self.parent = old_parent; } @@ -571,7 +544,7 @@ impl<'a, 'b, 'v, 'tcx> Visitor<'v> for BuildReducedGraphVisitor<'a, 'b, 'tcx> { fn visit_block(&mut self, block: &Block) { let old_parent = self.parent; self.resolver.build_reduced_graph_for_block(block, &mut self.parent); - intravisit::walk_block(self, block); + visit::walk_block(self, block); self.parent = old_parent; } } diff --git a/src/librustc_resolve/check_unused.rs b/src/librustc_resolve/check_unused.rs index 9135b65673..64347d7b84 100644 --- a/src/librustc_resolve/check_unused.rs +++ b/src/librustc_resolve/check_unused.rs @@ -16,6 +16,8 @@ // resolve data structures and because it finalises the privacy information for // `use` directives. // +// Unused trait imports can't be checked until the method resolution. We save +// candidates here, and do the acutal check in librustc_typeck/check_unused.rs. use std::ops::{Deref, DerefMut}; @@ -23,58 +25,65 @@ use Resolver; use Namespace::{TypeNS, ValueNS}; use rustc::lint; -use syntax::ast; +use syntax::ast::{self, ViewPathGlob, ViewPathList, ViewPathSimple}; +use syntax::visit::{self, Visitor}; use syntax::codemap::{Span, DUMMY_SP}; -use rustc::hir; -use rustc::hir::{ViewPathGlob, ViewPathList, ViewPathSimple}; -use rustc::hir::intravisit::Visitor; -struct UnusedImportCheckVisitor<'a, 'b: 'a, 'tcx: 'b> { - resolver: &'a mut Resolver<'b, 'tcx>, +struct UnusedImportCheckVisitor<'a, 'b: 'a> { + resolver: &'a mut Resolver<'b>, } // Deref and DerefMut impls allow treating UnusedImportCheckVisitor as Resolver. -impl<'a, 'b, 'tcx:'b> Deref for UnusedImportCheckVisitor<'a, 'b, 'tcx> { - type Target = Resolver<'b, 'tcx>; +impl<'a, 'b> Deref for UnusedImportCheckVisitor<'a, 'b> { + type Target = Resolver<'b>; - fn deref<'c>(&'c self) -> &'c Resolver<'b, 'tcx> { + fn deref<'c>(&'c self) -> &'c Resolver<'b> { &*self.resolver } } -impl<'a, 'b, 'tcx:'b> DerefMut for UnusedImportCheckVisitor<'a, 'b, 'tcx> { - fn deref_mut<'c>(&'c mut self) -> &'c mut Resolver<'b, 'tcx> { +impl<'a, 'b> DerefMut for UnusedImportCheckVisitor<'a, 'b> { + fn deref_mut<'c>(&'c mut self) -> &'c mut Resolver<'b> { &mut *self.resolver } } -impl<'a, 'b, 'tcx> UnusedImportCheckVisitor<'a, 'b, 'tcx> { +impl<'a, 'b> UnusedImportCheckVisitor<'a, 'b> { // We have information about whether `use` (import) directives are actually // used now. If an import is not used at all, we signal a lint error. fn check_import(&mut self, id: ast::NodeId, span: Span) { if !self.used_imports.contains(&(id, TypeNS)) && !self.used_imports.contains(&(id, ValueNS)) { + if self.maybe_unused_trait_imports.contains(&id) { + // Check later. + return; + } self.session.add_lint(lint::builtin::UNUSED_IMPORTS, id, span, "unused import".to_string()); + } else { + // This trait import is definitely used, in a way other than + // method resolution. + self.maybe_unused_trait_imports.remove(&id); } } } -impl<'a, 'b, 'v, 'tcx> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b, 'tcx> { - fn visit_item(&mut self, item: &hir::Item) { +impl<'a, 'b, 'v> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b> { + fn visit_item(&mut self, item: &ast::Item) { + visit::walk_item(self, item); // Ignore is_public import statements because there's no way to be sure // whether they're used or not. Also ignore imports with a dummy span // because this means that they were generated in some fashion by the // compiler and we don't need to consider them. - if item.vis == hir::Public || item.span.source_equal(&DUMMY_SP) { + if item.vis == ast::Visibility::Public || item.span.source_equal(&DUMMY_SP) { return; } match item.node { - hir::ItemExternCrate(_) => { + ast::ItemKind::ExternCrate(_) => { if let Some(crate_num) = self.session.cstore.extern_mod_stmt_cnum(item.id) { if !self.used_crates.contains(&crate_num) { self.session.add_lint(lint::builtin::UNUSED_EXTERN_CRATES, @@ -84,7 +93,7 @@ impl<'a, 'b, 'v, 'tcx> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b, 'tcx> { } } } - hir::ItemUse(ref p) => { + ast::ItemKind::Use(ref p) => { match p.node { ViewPathSimple(_, _) => { self.check_import(item.id, p.span) @@ -105,7 +114,7 @@ impl<'a, 'b, 'v, 'tcx> Visitor<'v> for UnusedImportCheckVisitor<'a, 'b, 'tcx> { } } -pub fn check_crate(resolver: &mut Resolver, krate: &hir::Crate) { +pub fn check_crate(resolver: &mut Resolver, krate: &ast::Crate) { let mut visitor = UnusedImportCheckVisitor { resolver: resolver }; - krate.visit_all_items(&mut visitor); + visit::walk_crate(&mut visitor, krate); } diff --git a/src/librustc_resolve/diagnostics.rs b/src/librustc_resolve/diagnostics.rs index 8a196768ae..3514862205 100644 --- a/src/librustc_resolve/diagnostics.rs +++ b/src/librustc_resolve/diagnostics.rs @@ -497,6 +497,91 @@ impl Bar { ``` "##, +E0408: r##" +An "or" pattern was used where the variable bindings are not consistently bound +across patterns. + +Example of erroneous code: + +```compile_fail +match x { + Some(y) | None => { /* use y */ } // error: variable `y` from pattern #1 is + // not bound in pattern #2 + _ => () +} +``` + +Here, `y` is bound to the contents of the `Some` and can be used within the +block corresponding to the match arm. However, in case `x` is `None`, we have +not specified what `y` is, and the block will use a nonexistent variable. + +To fix this error, either split into multiple match arms: + +``` +let x = Some(1); +match x { + Some(y) => { /* use y */ } + None => { /* ... */ } +} +``` + +or, bind the variable to a field of the same type in all sub-patterns of the +or pattern: + +``` +let x = (0, 2); +match x { + (0, y) | (y, 0) => { /* use y */} + _ => {} +} +``` + +In this example, if `x` matches the pattern `(0, _)`, the second field is set +to `y`. If it matches `(_, 0)`, the first field is set to `y`; so in all +cases `y` is set to some value. +"##, + +E0409: r##" +An "or" pattern was used where the variable bindings are not consistently bound +across patterns. + +Example of erroneous code: + +```compile_fail +let x = (0, 2); +match x { + (0, ref y) | (y, 0) => { /* use y */} // error: variable `y` is bound with + // different mode in pattern #2 + // than in pattern #1 + _ => () +} +``` + +Here, `y` is bound by-value in one case and by-reference in the other. + +To fix this error, just use the same mode in both cases. +Generally using `ref` or `ref mut` where not already used will fix this: + +```ignore +let x = (0, 2); +match x { + (0, ref y) | (ref y, 0) => { /* use y */} + _ => () +} +``` + +Alternatively, split the pattern: + +``` +let x = (0, 2); +match x { + (y, 0) => { /* use y */ } + (0, ref y) => { /* use y */} + _ => () +} +``` +"##, + E0411: r##" The `Self` keyword was used outside an impl or a trait. Erroneous code example: @@ -623,6 +708,69 @@ let Foo = 12i32; // ok! The goal here is to avoid a conflict of names. "##, +E0414: r##" +A variable binding in an irrefutable pattern is shadowing the name of a +constant. Example of erroneous code: + +```compile_fail +const FOO: u8 = 7; + +let FOO = 5; // error: variable bindings cannot shadow constants + +// or + +fn bar(FOO: u8) { // error: variable bindings cannot shadow constants + +} + +// or + +for FOO in bar { + +} +``` + +Introducing a new variable in Rust is done through a pattern. Thus you can have +`let` bindings like `let (a, b) = ...`. However, patterns also allow constants +in them, e.g. if you want to match over a constant: + +```ignore +const FOO: u8 = 1; + +match (x,y) { + (3, 4) => { .. }, // it is (3,4) + (FOO, 1) => { .. }, // it is (1,1) + (foo, 1) => { .. }, // it is (anything, 1) + // call the value in the first slot "foo" + _ => { .. } // it is anything +} +``` + +Here, the second arm matches the value of `x` against the constant `FOO`, +whereas the third arm will accept any value of `x` and call it `foo`. + +This works for `match`, however in cases where an irrefutable pattern is +required, constants can't be used. An irrefutable pattern is one which always +matches, whose purpose is only to bind variable names to values. These are +required by let, for, and function argument patterns. + +Refutable patterns in such a situation do not make sense, for example: + +```ignore +let Some(x) = foo; // what if foo is None, instead? + +let (1, x) = foo; // what if foo.0 is not 1? + +let (SOME_CONST, x) = foo; // what if foo.0 is not SOME_CONST? + +let SOME_CONST = foo; // what if foo is not SOME_CONST? +``` + +Thus, an irrefutable variable binding can't contain a constant. + +To fix this error, just give the marked variable a different name. +"##, + E0415: r##" More than one function parameter have the same name. Example of erroneous code: @@ -916,11 +1064,14 @@ An import was unresolved. Erroneous code example: use something::Foo; // error: unresolved import `something::Foo`. ``` -Please verify you didn't misspell the import name or the import does exist -in the module from where you tried to import it. Example: +Paths in `use` statements are relative to the crate root. To import items +relative to the current and parent modules, use the `self::` and `super::` +prefixes, respectively. Also verify that you didn't misspell the import +name and that the import exists in the module from where you tried to +import it. Example: ```ignore -use something::Foo; // ok! +use self::something::Foo; // ok! mod something { pub struct Foo; @@ -928,7 +1079,7 @@ mod something { ``` Or, if you tried to use a module from an external crate, you may have missed -the `extern crate` declaration: +the `extern crate` declaration (which is usually placed in the crate root): ```ignore extern crate homura; // Required to use the `homura` crate @@ -948,6 +1099,51 @@ use something_which_doesnt_exist; Please verify you didn't misspell the import's name. "##, +E0434: r##" +This error indicates that a variable usage inside an inner function is invalid +because the variable comes from a dynamic environment. Inner functions do not +have access to their containing environment. + +Example of erroneous code: + +```compile_fail +fn foo() { + let y = 5; + fn bar() -> u32 { + y // error: can't capture dynamic environment in a fn item; use the + // || { ... } closure form instead. + } +} +``` + +Functions do not capture local variables. To fix this error, you can replace the +function with a closure: + +``` +fn foo() { + let y = 5; + let bar = || { + y + }; +} +``` + +or replace the captured variable with a constant or a static item: + +``` +fn foo() { + static mut X: u32 = 4; + const Y: u32 = 5; + fn bar() -> u32 { + unsafe { + X = 3; + } + Y + } +} +``` +"##, + E0435: r##" A non-constant value was used to initialise a constant. Example of erroneous code: @@ -1034,15 +1230,10 @@ register_diagnostics! { // E0258, E0402, // cannot use an outer type parameter in this context E0406, // undeclared associated type - E0408, // variable from pattern #1 is not bound in pattern # - E0409, // variable is bound with different mode in pattern # than in - // pattern #1 - E0410, // variable from pattern is not bound in pattern 1 - E0414, // only irrefutable patterns allowed here +// E0410, merged into 408 E0418, // is not an enum variant, struct or const E0420, // is not an associated const E0421, // unresolved associated const E0427, // cannot use `ref` binding mode with ... E0429, // `self` imports are only allowed within a { } list - E0434, // can't capture dynamic environment in a fn item } diff --git a/src/librustc_resolve/lib.rs b/src/librustc_resolve/lib.rs index a0c4d636fd..61ed88ec17 100644 --- a/src/librustc_resolve/lib.rs +++ b/src/librustc_resolve/lib.rs @@ -29,9 +29,6 @@ extern crate log; extern crate syntax; extern crate arena; #[macro_use] -#[no_link] -extern crate rustc_bitflags; -#[macro_use] extern crate rustc; use self::PatternBindingMode::*; @@ -45,45 +42,32 @@ use self::ModulePrefixResult::*; use self::AssocItemResolveResult::*; use self::BareIdentifierPatternResolution::*; use self::ParentLink::*; -use self::FallbackChecks::*; -use rustc::dep_graph::DepNode; -use rustc::hir::map as hir_map; +use rustc::hir::map::Definitions; +use rustc::hir::{self, PrimTy, TyBool, TyChar, TyFloat, TyInt, TyUint, TyStr}; use rustc::session::Session; use rustc::lint; -use rustc::middle::cstore::CrateStore; use rustc::hir::def::*; use rustc::hir::def_id::DefId; -use rustc::hir::pat_util::pat_bindings; +use rustc::ty; use rustc::ty::subst::{ParamSpace, FnSpace, TypeSpace}; -use rustc::hir::{Freevar, FreevarMap, TraitMap, GlobMap}; -use rustc::util::nodemap::{NodeMap, FnvHashMap, FnvHashSet}; +use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap}; +use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet}; +use syntax::ext::mtwt; use syntax::ast::{self, FloatTy}; use syntax::ast::{CRATE_NODE_ID, Name, NodeId, CrateNum, IntTy, UintTy}; -use syntax::attr::AttrMetaMethods; -use syntax::codemap::{self, Span, Pos}; +use syntax::codemap::{self, Span}; use syntax::errors::DiagnosticBuilder; -use syntax::parse::token::{self, special_names, special_idents}; +use syntax::parse::token::{self, keywords}; use syntax::util::lev_distance::find_best_match_for_name; -use rustc::hir::intravisit::{self, FnKind, Visitor}; -use rustc::hir; -use rustc::hir::{Arm, BindByRef, BindByValue, BindingMode, Block}; -use rustc::hir::Crate; -use rustc::hir::{Expr, ExprAgain, ExprBreak, ExprCall, ExprField}; -use rustc::hir::{ExprLoop, ExprWhile, ExprMethodCall}; -use rustc::hir::{ExprPath, ExprStruct, FnDecl}; -use rustc::hir::{ForeignItemFn, ForeignItemStatic, Generics}; -use rustc::hir::{ImplItem, Item, ItemConst, ItemEnum, ItemExternCrate}; -use rustc::hir::{ItemFn, ItemForeignMod, ItemImpl, ItemMod, ItemStatic, ItemDefaultImpl}; -use rustc::hir::{ItemStruct, ItemTrait, ItemTy, ItemUse}; -use rustc::hir::Local; -use rustc::hir::{Pat, PatKind, Path, PrimTy}; -use rustc::hir::{PathSegment, PathParameters}; -use rustc::hir::HirVec; -use rustc::hir::{TraitRef, Ty, TyBool, TyChar, TyFloat, TyInt}; -use rustc::hir::{TyRptr, TyStr, TyUint, TyPath, TyPtr}; +use syntax::visit::{self, FnKind, Visitor}; +use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind}; +use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, Generics}; +use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind}; +use syntax::ast::{Local, Pat, PatKind, Path}; +use syntax::ast::{PathSegment, PathParameters, TraitItemKind, TraitRef, Ty, TyKind}; use std::collections::{HashMap, HashSet}; use std::cell::{Cell, RefCell}; @@ -100,17 +84,6 @@ mod check_unused; mod build_reduced_graph; mod resolve_imports; -// Perform the callback, not walking deeper if the return is true -macro_rules! execute_callback { - ($node: expr, $walker: expr) => ( - if let Some(ref callback) = $walker.callback { - if callback($node, &mut $walker.resolved) { - return; - } - } - ) -} - enum SuggestionType { Macro(String), Function(token::InternedString), @@ -142,26 +115,24 @@ enum ResolutionError<'a> { TypeNotMemberOfTrait(Name, &'a str), /// error E0438: const is not a member of trait ConstNotMemberOfTrait(Name, &'a str), - /// error E0408: variable `{}` from pattern #1 is not bound in pattern - VariableNotBoundInPattern(Name, usize), + /// error E0408: variable `{}` from pattern #{} is not bound in pattern #{} + VariableNotBoundInPattern(Name, usize, usize), /// error E0409: variable is bound with different mode in pattern #{} than in pattern #1 VariableBoundWithDifferentMode(Name, usize), - /// error E0410: variable from pattern is not bound in pattern #1 - VariableNotBoundInParentPattern(Name, usize), /// error E0411: use of `Self` outside of an impl or trait SelfUsedOutsideImplOrTrait, /// error E0412: use of undeclared UseOfUndeclared(&'a str, &'a str, SuggestedCandidates), - /// error E0413: declaration shadows an enum variant or unit-like struct in scope + /// error E0413: cannot be named the same as an enum variant or unit-like struct in scope DeclarationShadowsEnumVariantOrUnitLikeStruct(Name), /// error E0414: only irrefutable patterns allowed here - OnlyIrrefutablePatternsAllowedHere(DefId, Name), + ConstantForIrrefutableBinding(Name), /// error E0415: identifier is bound more than once in this parameter list IdentifierBoundMoreThanOnceInParameterList(&'a str), /// error E0416: identifier is bound more than once in the same pattern IdentifierBoundMoreThanOnceInSamePattern(&'a str), /// error E0417: static variables cannot be referenced in a pattern - StaticVariableReference, + StaticVariableReference(&'a NameBinding<'a>), /// error E0418: is not an enum variant, struct or const NotAnEnumVariantStructOrConst(&'a str), /// error E0419: unresolved enum variant, struct or const @@ -177,7 +148,7 @@ enum ResolutionError<'a> { /// error E0424: `self` is not available in a static method SelfNotAvailableInStaticMethod, /// error E0425: unresolved name - UnresolvedName(&'a str, &'a str, UnresolvedNameContext), + UnresolvedName(&'a str, &'a str, UnresolvedNameContext<'a>), /// error E0426: use of undeclared label UndeclaredLabel(&'a str), /// error E0427: cannot use `ref` binding mode with ... @@ -200,12 +171,12 @@ enum ResolutionError<'a> { /// Context of where `ResolutionError::UnresolvedName` arose. #[derive(Clone, PartialEq, Eq, Debug)] -enum UnresolvedNameContext { - /// `PathIsMod(id)` indicates that a given path, used in +enum UnresolvedNameContext<'a> { + /// `PathIsMod(parent)` indicates that a given path, used in /// expression context, actually resolved to a module rather than - /// a value. The `id` attached to the variant is the node id of - /// the erroneous path expression. - PathIsMod(ast::NodeId), + /// a value. The optional expression attached to the variant is the + /// the parent of the erroneous path expression. + PathIsMod(Option<&'a Expr>), /// `Other` means we have no extra information about the context /// of the unresolved name error. (Maybe we could eliminate all @@ -213,27 +184,29 @@ enum UnresolvedNameContext { Other, } -fn resolve_error<'b, 'a: 'b, 'tcx: 'a>(resolver: &'b Resolver<'a, 'tcx>, - span: syntax::codemap::Span, - resolution_error: ResolutionError<'b>) { +fn resolve_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, + span: syntax::codemap::Span, + resolution_error: ResolutionError<'c>) { resolve_struct_error(resolver, span, resolution_error).emit(); } -fn resolve_struct_error<'b, 'a: 'b, 'tcx: 'a>(resolver: &'b Resolver<'a, 'tcx>, - span: syntax::codemap::Span, - resolution_error: ResolutionError<'b>) - -> DiagnosticBuilder<'a> { +fn resolve_struct_error<'b, 'a: 'b, 'c>(resolver: &'b Resolver<'a>, + span: syntax::codemap::Span, + resolution_error: ResolutionError<'c>) + -> DiagnosticBuilder<'a> { if !resolver.emit_errors { return resolver.session.diagnostic().struct_dummy(); } match resolution_error { ResolutionError::TypeParametersFromOuterFunction => { - struct_span_err!(resolver.session, - span, - E0401, - "can't use type parameters from outer function; try using a local \ - type parameter instead") + let mut err = struct_span_err!(resolver.session, + span, + E0401, + "can't use type parameters from outer function; \ + try using a local type parameter instead"); + err.span_label(span, &format!("use of type variable from outer function")); + err } ResolutionError::OuterTypeParameterContext => { struct_span_err!(resolver.session, @@ -258,7 +231,8 @@ fn resolve_struct_error<'b, 'a: 'b, 'tcx: 'a>(resolver: &'b Resolver<'a, 'tcx>, E0405, "trait `{}` is not in scope", name); - show_candidates(&mut err, span, &candidates); + show_candidates(&mut err, &candidates); + err.span_label(span, &format!("`{}` is not in scope", name)); err } ResolutionError::UndeclaredAssociatedType => { @@ -288,13 +262,14 @@ fn resolve_struct_error<'b, 'a: 'b, 'tcx: 'a>(resolver: &'b Resolver<'a, 'tcx>, const_, trait_) } - ResolutionError::VariableNotBoundInPattern(variable_name, pattern_number) => { + ResolutionError::VariableNotBoundInPattern(variable_name, from, to) => { struct_span_err!(resolver.session, span, E0408, - "variable `{}` from pattern #1 is not bound in pattern #{}", + "variable `{}` from pattern #{} is not bound in pattern #{}", variable_name, - pattern_number) + from, + to) } ResolutionError::VariableBoundWithDifferentMode(variable_name, pattern_number) => { struct_span_err!(resolver.session, @@ -305,19 +280,13 @@ fn resolve_struct_error<'b, 'a: 'b, 'tcx: 'a>(resolver: &'b Resolver<'a, 'tcx>, variable_name, pattern_number) } - ResolutionError::VariableNotBoundInParentPattern(variable_name, pattern_number) => { - struct_span_err!(resolver.session, - span, - E0410, - "variable `{}` from pattern #{} is not bound in pattern #1", - variable_name, - pattern_number) - } ResolutionError::SelfUsedOutsideImplOrTrait => { - struct_span_err!(resolver.session, - span, - E0411, - "use of `Self` outside of an impl or trait") + let mut err = struct_span_err!(resolver.session, + span, + E0411, + "use of `Self` outside of an impl or trait"); + err.span_label(span, &format!("used outside of impl or trait")); + err } ResolutionError::UseOfUndeclared(kind, name, candidates) => { let mut err = struct_span_err!(resolver.session, @@ -326,56 +295,66 @@ fn resolve_struct_error<'b, 'a: 'b, 'tcx: 'a>(resolver: &'b Resolver<'a, 'tcx>, "{} `{}` is undefined or not in scope", kind, name); - show_candidates(&mut err, span, &candidates); + show_candidates(&mut err, &candidates); + err.span_label(span, &format!("undefined or not in scope")); err } ResolutionError::DeclarationShadowsEnumVariantOrUnitLikeStruct(name) => { - struct_span_err!(resolver.session, + let mut err = struct_span_err!(resolver.session, span, E0413, - "declaration of `{}` shadows an enum variant \ + "`{}` cannot be named the same as an enum variant \ or unit-like struct in scope", - name) + name); + err.span_label(span, + &format!("has same name as enum variant or unit-like struct")); + err } - ResolutionError::OnlyIrrefutablePatternsAllowedHere(did, name) => { + ResolutionError::ConstantForIrrefutableBinding(name) => { let mut err = struct_span_err!(resolver.session, span, E0414, - "only irrefutable patterns allowed here"); - err.span_note(span, - "there already is a constant in scope sharing the same \ - name as this pattern"); - if let Some(sp) = resolver.ast_map.span_if_local(did) { - err.span_note(sp, "constant defined here"); - } + "let variables cannot be named the same as const variables"); + err.span_label(span, + &format!("cannot be named the same as a const variable")); if let Some(binding) = resolver.current_module .resolve_name_in_lexical_scope(name, ValueNS) { - if binding.is_import() { - err.span_note(binding.span.unwrap(), "constant imported here"); - } + let participle = if binding.is_import() { "imported" } else { "defined" }; + err.span_label(binding.span, &format!("a constant `{}` is {} here", + name, participle)); } err } ResolutionError::IdentifierBoundMoreThanOnceInParameterList(identifier) => { - struct_span_err!(resolver.session, + let mut err = struct_span_err!(resolver.session, span, E0415, "identifier `{}` is bound more than once in this parameter list", - identifier) + identifier); + err.span_label(span, &format!("used as parameter more than once")); + err } ResolutionError::IdentifierBoundMoreThanOnceInSamePattern(identifier) => { - struct_span_err!(resolver.session, + let mut err = struct_span_err!(resolver.session, span, E0416, "identifier `{}` is bound more than once in the same pattern", - identifier) + identifier); + err.span_label(span, &format!("used in a pattern more than once")); + err } - ResolutionError::StaticVariableReference => { - struct_span_err!(resolver.session, - span, - E0417, - "static variables cannot be referenced in a pattern, use a \ - `const` instead") + ResolutionError::StaticVariableReference(binding) => { + let mut err = struct_span_err!(resolver.session, + span, + E0417, + "static variables cannot be referenced in a \ + pattern, use a `const` instead"); + err.span_label(span, &format!("static variable used in pattern")); + if binding.span != codemap::DUMMY_SP { + let participle = if binding.is_import() { "imported" } else { "defined" }; + err.span_label(binding.span, &format!("static variable {} here", participle)); + } + err } ResolutionError::NotAnEnumVariantStructOrConst(name) => { struct_span_err!(resolver.session, @@ -437,39 +416,25 @@ fn resolve_struct_error<'b, 'a: 'b, 'tcx: 'a>(resolver: &'b Resolver<'a, 'tcx>, match context { UnresolvedNameContext::Other => { } // no help available - UnresolvedNameContext::PathIsMod(id) => { - let mut help_msg = String::new(); - let parent_id = resolver.ast_map.get_parent_node(id); - if let Some(hir_map::Node::NodeExpr(e)) = resolver.ast_map.find(parent_id) { - match e.node { - ExprField(_, ident) => { - help_msg = format!("To reference an item from the \ - `{module}` module, use \ - `{module}::{ident}`", - module = path, - ident = ident.node); - } - ExprMethodCall(ident, _, _) => { - help_msg = format!("To call a function from the \ - `{module}` module, use \ - `{module}::{ident}(..)`", - module = path, - ident = ident.node); - } - ExprCall(_, _) => { - help_msg = format!("No function corresponds to `{module}(..)`", - module = path); - } - _ => { } // no help available + UnresolvedNameContext::PathIsMod(parent) => { + err.help(&match parent.map(|parent| &parent.node) { + Some(&ExprKind::Field(_, ident)) => { + format!("To reference an item from the `{module}` module, \ + use `{module}::{ident}`", + module = path, + ident = ident.node) } - } else { - help_msg = format!("Module `{module}` cannot be the value of an expression", - module = path); - } - - if !help_msg.is_empty() { - err.fileline_help(span, &help_msg); - } + Some(&ExprKind::MethodCall(ident, _, _)) => { + format!("To call a function from the `{module}` module, \ + use `{module}::{ident}(..)`", + module = path, + ident = ident.node) + } + _ => { + format!("Module `{module}` cannot be used as an expression", + module = path) + } + }); } } err @@ -557,27 +522,20 @@ pub enum Namespace { ValueNS, } -impl<'a, 'v, 'tcx> Visitor<'v> for Resolver<'a, 'tcx> { - fn visit_nested_item(&mut self, item: hir::ItemId) { - self.visit_item(self.ast_map.expect_item(item.id)) - } +impl<'a, 'v> Visitor<'v> for Resolver<'a> { fn visit_item(&mut self, item: &Item) { - execute_callback!(hir_map::Node::NodeItem(item), self); self.resolve_item(item); } fn visit_arm(&mut self, arm: &Arm) { self.resolve_arm(arm); } fn visit_block(&mut self, block: &Block) { - execute_callback!(hir_map::Node::NodeBlock(block), self); self.resolve_block(block); } fn visit_expr(&mut self, expr: &Expr) { - execute_callback!(hir_map::Node::NodeExpr(expr), self); - self.resolve_expr(expr); + self.resolve_expr(expr, None); } fn visit_local(&mut self, local: &Local) { - execute_callback!(hir_map::Node::NodeLocal(&local.pat), self); self.resolve_local(local); } fn visit_ty(&mut self, ty: &Ty) { @@ -586,7 +544,7 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Resolver<'a, 'tcx> { fn visit_generics(&mut self, generics: &Generics) { self.resolve_generics(generics); } - fn visit_poly_trait_ref(&mut self, tref: &hir::PolyTraitRef, m: &hir::TraitBoundModifier) { + fn visit_poly_trait_ref(&mut self, tref: &ast::PolyTraitRef, m: &ast::TraitBoundModifier) { match self.resolve_trait_reference(tref.trait_ref.ref_id, &tref.trait_ref.path, 0) { Ok(def) => self.record_def(tref.trait_ref.ref_id, def), Err(_) => { @@ -594,13 +552,12 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Resolver<'a, 'tcx> { self.record_def(tref.trait_ref.ref_id, err_path_resolution()) } } - intravisit::walk_poly_trait_ref(self, tref, m); + visit::walk_poly_trait_ref(self, tref, m); } fn visit_variant(&mut self, - variant: &hir::Variant, + variant: &ast::Variant, generics: &Generics, item_id: ast::NodeId) { - execute_callback!(hir_map::Node::NodeVariant(variant), self); if let Some(ref dis_expr) = variant.node.disr_expr { // resolve the discriminator expr as a constant self.with_constant_rib(|this| { @@ -608,23 +565,22 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Resolver<'a, 'tcx> { }); } - // `intravisit::walk_variant` without the discriminant expression. + // `visit::walk_variant` without the discriminant expression. self.visit_variant_data(&variant.node.data, variant.node.name, generics, item_id, variant.span); } - fn visit_foreign_item(&mut self, foreign_item: &hir::ForeignItem) { - execute_callback!(hir_map::Node::NodeForeignItem(foreign_item), self); + fn visit_foreign_item(&mut self, foreign_item: &ForeignItem) { let type_parameters = match foreign_item.node { - ForeignItemFn(_, ref generics) => { + ForeignItemKind::Fn(_, ref generics) => { HasTypeParameters(generics, FnSpace, ItemRibKind) } - ForeignItemStatic(..) => NoTypeParameters, + ForeignItemKind::Static(..) => NoTypeParameters, }; self.with_type_parameter_rib(type_parameters, |this| { - intravisit::walk_foreign_item(this, foreign_item); + visit::walk_foreign_item(this, foreign_item); }); } fn visit_fn(&mut self, @@ -634,16 +590,15 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Resolver<'a, 'tcx> { _: Span, node_id: NodeId) { let rib_kind = match function_kind { - FnKind::ItemFn(_, generics, _, _, _, _, _) => { + FnKind::ItemFn(_, generics, _, _, _, _) => { self.visit_generics(generics); ItemRibKind } - FnKind::Method(_, sig, _, _) => { + FnKind::Method(_, sig, _) => { self.visit_generics(&sig.generics); - self.visit_explicit_self(&sig.explicit_self); MethodRibKind } - FnKind::Closure(_) => ClosureRibKind(node_id), + FnKind::Closure => ClosureRibKind(node_id), }; self.resolve_function(rib_kind, declaration, block); } @@ -678,24 +633,22 @@ impl ResolveResult { enum FallbackSuggestion { NoSuggestion, Field, - Method, TraitItem, - StaticMethod(String), TraitMethod(String), } #[derive(Copy, Clone)] -enum TypeParameters<'tcx, 'a> { +enum TypeParameters<'a, 'b> { NoTypeParameters, HasTypeParameters(// Type parameters. - &'a Generics, + &'b Generics, // Identifies the things that these parameters // were declared on (type, fn, etc) ParamSpace, // The kind of the rib used for type parameters. - RibKind<'tcx>), + RibKind<'a>), } // The rib kind controls the translation of local @@ -796,10 +749,6 @@ impl<'a> LexicalScopeBinding<'a> { } } - fn def(self) -> Def { - self.local_def().def - } - fn module(self) -> Option> { match self { LexicalScopeBinding::Item(binding) => binding.module(), @@ -820,7 +769,6 @@ enum ParentLink<'a> { pub struct ModuleS<'a> { parent_link: ParentLink<'a>, def: Option, - is_public: bool, // If the module is an extern crate, `def` is root of the external crate and `extern_crate_id` // is the NodeId of the local `extern crate` item (otherwise, `extern_crate_id` is None). @@ -829,29 +777,13 @@ pub struct ModuleS<'a> { resolutions: RefCell>>>, unresolved_imports: RefCell>>, - // The module children of this node, including normal modules and anonymous modules. - // Anonymous children are pseudo-modules that are implicitly created around items - // contained within blocks. - // - // For example, if we have this: - // - // fn f() { - // fn g() { - // ... - // } - // } - // - // There will be an anonymous module created around `g` with the ID of the - // entry block for `f`. - module_children: RefCell>>, - prelude: RefCell>>, glob_importers: RefCell, &'a ImportDirective<'a>)>>, globs: RefCell>>, // Used to memoize the traits in this module for faster searches through all traits in scope. - traits: RefCell]>>>, + traits: RefCell)]>>>, // Whether this module is populated. If not populated, any attempt to // access the children must be preceded with a @@ -867,16 +799,13 @@ impl<'a> ModuleS<'a> { fn new(parent_link: ParentLink<'a>, def: Option, external: bool, - is_public: bool, arenas: &'a ResolverArenas<'a>) -> Self { ModuleS { parent_link: parent_link, def: def, - is_public: is_public, extern_crate_id: None, resolutions: RefCell::new(HashMap::new()), unresolved_imports: RefCell::new(Vec::new()), - module_children: RefCell::new(NodeMap()), prelude: RefCell::new(None), glob_importers: RefCell::new(Vec::new()), globs: RefCell::new((Vec::new())), @@ -896,9 +825,10 @@ impl<'a> ModuleS<'a> { self.def.as_ref().map(Def::def_id) } + // `self` resolves to the first module ancestor that `is_normal`. fn is_normal(&self) -> bool { match self.def { - Some(Def::Mod(_)) | Some(Def::ForeignMod(_)) => true, + Some(Def::Mod(_)) => true, _ => false, } } @@ -909,50 +839,20 @@ impl<'a> ModuleS<'a> { _ => false, } } - - fn is_ancestor_of(&self, module: Module<'a>) -> bool { - if self.def_id() == module.def_id() { return true } - match module.parent_link { - ParentLink::BlockParentLink(parent, _) | - ParentLink::ModuleParentLink(parent, _) => self.is_ancestor_of(parent), - _ => false, - } - } } impl<'a> fmt::Debug for ModuleS<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, - "{:?}, {}", - self.def, - if self.is_public { - "public" - } else { - "private" - }) - } -} - -bitflags! { - #[derive(Debug)] - flags DefModifiers: u8 { - // Enum variants are always considered `PUBLIC`, this is needed for `use Enum::Variant` - // or `use Enum::*` to work on private enums. - const PUBLIC = 1 << 0, - const IMPORTABLE = 1 << 1, - // Variants are considered `PUBLIC`, but some of them live in private enums. - // We need to track them to prohibit reexports like `pub use PrivEnum::Variant`. - const PRIVATE_VARIANT = 1 << 2, - const GLOB_IMPORTED = 1 << 3, + write!(f, "{:?}", self.def) } } // Records a possibly-private value, type, or module definition. #[derive(Clone, Debug)] pub struct NameBinding<'a> { - modifiers: DefModifiers, kind: NameBindingKind<'a>, - span: Option, + span: Span, + vis: ty::Visibility, } #[derive(Clone, Debug)] @@ -961,7 +861,7 @@ enum NameBindingKind<'a> { Module(Module<'a>), Import { binding: &'a NameBinding<'a>, - id: NodeId, + directive: &'a ImportDirective<'a>, // Some(error) if using this imported name causes the import to be a privacy error privacy_error: Option>>, }, @@ -971,16 +871,6 @@ enum NameBindingKind<'a> { struct PrivacyError<'a>(Span, Name, &'a NameBinding<'a>); impl<'a> NameBinding<'a> { - fn create_from_module(module: Module<'a>, span: Option) -> Self { - let modifiers = if module.is_public { - DefModifiers::PUBLIC - } else { - DefModifiers::empty() - } | DefModifiers::IMPORTABLE; - - NameBinding { modifiers: modifiers, kind: NameBindingKind::Module(module), span: span } - } - fn module(&self) -> Option> { match self.kind { NameBindingKind::Module(module) => Some(module), @@ -997,12 +887,20 @@ impl<'a> NameBinding<'a> { } } - fn defined_with(&self, modifiers: DefModifiers) -> bool { - self.modifiers.contains(modifiers) + fn is_pseudo_public(&self) -> bool { + self.pseudo_vis() == ty::Visibility::Public + } + + // We sometimes need to treat variants as `pub` for backwards compatibility + fn pseudo_vis(&self) -> ty::Visibility { + if self.is_variant() { ty::Visibility::Public } else { self.vis } } - fn is_public(&self) -> bool { - self.defined_with(DefModifiers::PUBLIC) + fn is_variant(&self) -> bool { + match self.kind { + NameBindingKind::Def(Def::Variant(..)) => true, + _ => false, + } } fn is_extern_crate(&self) -> bool { @@ -1015,6 +913,20 @@ impl<'a> NameBinding<'a> { _ => false, } } + + fn is_glob_import(&self) -> bool { + match self.kind { + NameBindingKind::Import { directive, .. } => directive.is_glob(), + _ => false, + } + } + + fn is_importable(&self) -> bool { + match self.def().unwrap() { + Def::AssociatedConst(..) | Def::Method(..) | Def::AssociatedTy(..) => false, + _ => true, + } + } } /// Interns the names of the primitive types. @@ -1051,14 +963,14 @@ impl PrimitiveTypeTable { } /// The main resolver class. -pub struct Resolver<'a, 'tcx: 'a> { +pub struct Resolver<'a> { session: &'a Session, - ast_map: &'a hir_map::Map<'tcx>, + definitions: &'a mut Definitions, graph_root: Module<'a>, - trait_item_map: FnvHashMap<(Name, DefId), DefId>, + trait_item_map: FnvHashMap<(Name, DefId), bool /* is static method? */>, structs: FnvHashMap>, @@ -1087,30 +999,42 @@ pub struct Resolver<'a, 'tcx: 'a> { // The idents for the primitive types. primitive_type_table: PrimitiveTypeTable, - def_map: RefCell, - freevars: FreevarMap, + pub def_map: DefMap, + pub freevars: FreevarMap, freevars_seen: NodeMap>, - export_map: ExportMap, - trait_map: TraitMap, + pub export_map: ExportMap, + pub trait_map: TraitMap, + + // A map from nodes to modules, both normal (`mod`) modules and anonymous modules. + // Anonymous modules are pseudo-modules that are implicitly created around items + // contained within blocks. + // + // For example, if we have this: + // + // fn f() { + // fn g() { + // ... + // } + // } + // + // There will be an anonymous module created around `g` with the ID of the + // entry block for `f`. + module_map: NodeMap>, // Whether or not to print error messages. Can be set to true // when getting additional info for error message suggestions, // so as to avoid printing duplicate errors emit_errors: bool, - make_glob_map: bool, + pub make_glob_map: bool, // Maps imports to the names of items actually imported (this actually maps // all imports, but only glob imports are actually interesting). - glob_map: GlobMap, + pub glob_map: GlobMap, used_imports: HashSet<(NodeId, Namespace)>, used_crates: HashSet, + pub maybe_unused_trait_imports: NodeSet, - // Callback function for intercepting walks - callback: Option bool>>, - // The intention is that the callback modifies this flag. - // Once set, the resolver falls out of the walk, preserving the ribs. - resolved: bool, privacy_errors: Vec>, arenas: &'a ResolverArenas<'a>, @@ -1118,6 +1042,7 @@ pub struct Resolver<'a, 'tcx: 'a> { struct ResolverArenas<'a> { modules: arena::TypedArena>, + local_modules: RefCell>>, name_bindings: arena::TypedArena>, import_directives: arena::TypedArena>, name_resolutions: arena::TypedArena>>, @@ -1125,7 +1050,14 @@ struct ResolverArenas<'a> { impl<'a> ResolverArenas<'a> { fn alloc_module(&'a self, module: ModuleS<'a>) -> Module<'a> { - self.modules.alloc(module) + let module = self.modules.alloc(module); + if module.def_id().map(|def_id| def_id.is_local()).unwrap_or(true) { + self.local_modules.borrow_mut().push(module); + } + module + } + fn local_modules(&'a self) -> ::std::cell::Ref<'a, Vec>> { + self.local_modules.borrow() } fn alloc_name_binding(&'a self, name_binding: NameBinding<'a>) -> &'a NameBinding<'a> { self.name_bindings.alloc(name_binding) @@ -1139,27 +1071,83 @@ impl<'a> ResolverArenas<'a> { } } -#[derive(PartialEq)] -enum FallbackChecks { - Everything, - OnlyTraitAndStatics, +impl<'a> ty::NodeIdTree for Resolver<'a> { + fn is_descendant_of(&self, node: NodeId, ancestor: NodeId) -> bool { + let ancestor = self.definitions.local_def_id(ancestor); + let mut module = *self.module_map.get(&node).unwrap(); + while module.def_id() != Some(ancestor) { + let module_parent = match self.get_nearest_normal_module_parent(module) { + Some(parent) => parent, + None => return false, + }; + module = module_parent; + } + true + } } -impl<'a, 'tcx> Resolver<'a, 'tcx> { +impl<'a> hir::lowering::Resolver for Resolver<'a> { + fn resolve_generated_global_path(&mut self, path: &hir::Path, is_value: bool) -> Def { + let namespace = if is_value { ValueNS } else { TypeNS }; + match self.resolve_crate_relative_path(path.span, &path.segments, namespace) { + Ok(binding) => binding.def().unwrap(), + Err(true) => Def::Err, + Err(false) => { + let path_name = &format!("{}", path); + let error = + ResolutionError::UnresolvedName(path_name, "", UnresolvedNameContext::Other); + resolve_error(self, path.span, error); + Def::Err + } + } + } + + fn get_resolution(&mut self, id: NodeId) -> Option { + self.def_map.get(&id).cloned() + } + + fn record_resolution(&mut self, id: NodeId, def: Def) { + self.def_map.insert(id, PathResolution { base_def: def, depth: 0 }); + } + + fn definitions(&mut self) -> Option<&mut Definitions> { + Some(self.definitions) + } +} + +trait Named { + fn name(&self) -> Name; +} + +impl Named for ast::PathSegment { + fn name(&self) -> Name { + self.identifier.name + } +} + +impl Named for hir::PathSegment { + fn name(&self) -> Name { + self.name + } +} + +impl<'a> Resolver<'a> { fn new(session: &'a Session, - ast_map: &'a hir_map::Map<'tcx>, + definitions: &'a mut Definitions, make_glob_map: MakeGlobMap, arenas: &'a ResolverArenas<'a>) - -> Resolver<'a, 'tcx> { - let root_def_id = ast_map.local_def_id(CRATE_NODE_ID); + -> Resolver<'a> { + let root_def_id = definitions.local_def_id(CRATE_NODE_ID); let graph_root = - ModuleS::new(NoParentLink, Some(Def::Mod(root_def_id)), false, true, arenas); + ModuleS::new(NoParentLink, Some(Def::Mod(root_def_id)), false, arenas); let graph_root = arenas.alloc_module(graph_root); + let mut module_map = NodeMap(); + module_map.insert(CRATE_NODE_ID, graph_root); Resolver { session: session, - ast_map: ast_map, + definitions: definitions, // The outermost module has def ID 0; this is not reflected in the // AST. @@ -1180,20 +1168,21 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { primitive_type_table: PrimitiveTypeTable::new(), - def_map: RefCell::new(NodeMap()), + def_map: NodeMap(), freevars: NodeMap(), freevars_seen: NodeMap(), export_map: NodeMap(), trait_map: NodeMap(), - used_imports: HashSet::new(), - used_crates: HashSet::new(), + module_map: module_map, emit_errors: true, make_glob_map: make_glob_map == MakeGlobMap::Yes, glob_map: NodeMap(), - callback: None, - resolved: false, + used_imports: HashSet::new(), + used_crates: HashSet::new(), + maybe_unused_trait_imports: NodeSet(), + privacy_errors: Vec::new(), arenas: arenas, @@ -1203,27 +1192,21 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { fn arenas() -> ResolverArenas<'a> { ResolverArenas { modules: arena::TypedArena::new(), + local_modules: RefCell::new(Vec::new()), name_bindings: arena::TypedArena::new(), import_directives: arena::TypedArena::new(), name_resolutions: arena::TypedArena::new(), } } - fn new_module(&self, - parent_link: ParentLink<'a>, - def: Option, - external: bool, - is_public: bool) -> Module<'a> { - self.arenas.alloc_module(ModuleS::new(parent_link, def, external, is_public, self.arenas)) + fn new_module(&self, parent_link: ParentLink<'a>, def: Option, external: bool) + -> Module<'a> { + self.arenas.alloc_module(ModuleS::new(parent_link, def, external, self.arenas)) } - fn new_extern_crate_module(&self, - parent_link: ParentLink<'a>, - def: Def, - is_public: bool, - local_node_id: NodeId) + fn new_extern_crate_module(&self, parent_link: ParentLink<'a>, def: Def, local_node_id: NodeId) -> Module<'a> { - let mut module = ModuleS::new(parent_link, Some(def), false, is_public, self.arenas); + let mut module = ModuleS::new(parent_link, Some(def), false, self.arenas); module.extern_crate_id = Some(local_node_id); self.arenas.modules.alloc(module) } @@ -1233,18 +1216,18 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } #[inline] - fn record_use(&mut self, name: Name, ns: Namespace, binding: &'a NameBinding<'a>) { + fn record_use(&mut self, name: Name, binding: &'a NameBinding<'a>) { // track extern crates for unused_extern_crate lint if let Some(DefId { krate, .. }) = binding.module().and_then(ModuleS::def_id) { self.used_crates.insert(krate); } - let (import_id, privacy_error) = match binding.kind { - NameBindingKind::Import { id, ref privacy_error, .. } => (id, privacy_error), + let (directive, privacy_error) = match binding.kind { + NameBindingKind::Import { directive, ref privacy_error, .. } => + (directive, privacy_error), _ => return, }; - self.used_imports.insert((import_id, ns)); if let Some(error) = privacy_error.as_ref() { self.privacy_errors.push((**error).clone()); } @@ -1252,22 +1235,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { if !self.make_glob_map { return; } - if self.glob_map.contains_key(&import_id) { - self.glob_map.get_mut(&import_id).unwrap().insert(name); + if self.glob_map.contains_key(&directive.id) { + self.glob_map.get_mut(&directive.id).unwrap().insert(name); return; } let mut new_set = FnvHashSet(); new_set.insert(name); - self.glob_map.insert(import_id, new_set); - } - - fn get_trait_name(&self, did: DefId) -> Name { - if let Some(node_id) = self.ast_map.as_local_node_id(did) { - self.ast_map.expect_item(node_id).name - } else { - self.session.cstore.item_name(did) - } + self.glob_map.insert(directive.id, new_set); } /// Resolves the given module path from the given root `module_`. @@ -1302,10 +1277,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { Failed(None) => { let segment_name = name.as_str(); let module_name = module_to_string(search_module); - let mut span = span; let msg = if "???" == &module_name { - span.hi = span.lo + Pos::from_usize(segment_name.len()); - match search_parent_externals(name, &self.current_module) { Some(module) => { let path_str = names_to_string(module_path); @@ -1339,7 +1311,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Check to see whether there are type bindings, and, if // so, whether there is a module within. if let Some(module_def) = binding.module() { - self.check_privacy(search_module, name, binding, span); + self.check_privacy(name, binding, span); search_module = module_def; } else { let msg = format!("Not a module `{}`", name); @@ -1395,7 +1367,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // This is not a crate-relative path. We resolve the // first component of the path in the current lexical // scope and then proceed to resolve below that. - let ident = hir::Ident::from_name(module_path[0]); + let ident = ast::Ident::with_empty_ctxt(module_path[0]); match self.resolve_ident_in_lexical_scope(ident, TypeNS, true) .and_then(LexicalScopeBinding::module) { None => return Failed(None), @@ -1437,11 +1409,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { /// Invariant: This must only be called during main resolution, not during /// import resolution. fn resolve_ident_in_lexical_scope(&mut self, - ident: hir::Ident, + ident: ast::Ident, ns: Namespace, record_used: bool) -> Option> { - let name = match ns { ValueNS => ident.name, TypeNS => ident.unhygienic_name }; + let name = match ns { ValueNS => mtwt::resolve(ident), TypeNS => ident.name }; // Walk backwards up the ribs in scope. for i in (0 .. self.get_ribs(ns).len()).rev() { @@ -1454,7 +1426,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } if let ModuleRibKind(module) = self.get_ribs(ns)[i].kind { - let name = ident.unhygienic_name; + let name = ident.name; let item = self.resolve_name_in_module(module, name, ns, true, record_used); if let Success(binding) = item { // The ident resolves to an item. @@ -1470,7 +1442,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } /// Returns the nearest normal module parent of the given module. - fn get_nearest_normal_module_parent(&mut self, module_: Module<'a>) -> Option> { + fn get_nearest_normal_module_parent(&self, module_: Module<'a>) -> Option> { let mut module_ = module_; loop { match module_.parent_link { @@ -1489,7 +1461,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { /// Returns the nearest normal module parent of the given module, or the /// module itself if it is a normal module. - fn get_nearest_normal_module_parent_or_self(&mut self, module_: Module<'a>) -> Module<'a> { + fn get_nearest_normal_module_parent_or_self(&self, module_: Module<'a>) -> Module<'a> { if module_.is_normal() { return module_; } @@ -1555,7 +1527,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { false => module.resolve_name(name, namespace, false), }.and_then(|binding| { if record_used { - self.record_use(name, namespace, binding); + if let NameBindingKind::Import { directive, .. } = binding.kind { + self.used_imports.insert((directive.id, namespace)); + } + self.record_use(name, binding); } Success(binding) }) @@ -1582,7 +1557,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { fn with_scope(&mut self, id: NodeId, f: F) where F: FnOnce(&mut Resolver) { - if let Some(module) = self.current_module.module_children.borrow().get(&id) { + let module = self.module_map.get(&id).cloned(); // clones a reference + if let Some(module) = module { // Move down in the graph. let orig_module = ::std::mem::replace(&mut self.current_module, module); self.value_ribs.push(Rib::new(ModuleRibKind(module))); @@ -1619,77 +1595,76 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None } - fn resolve_crate(&mut self, krate: &hir::Crate) { + fn resolve_crate(&mut self, krate: &Crate) { debug!("(resolving crate) starting"); - - intravisit::walk_crate(self, krate); + self.current_module = self.graph_root; + visit::walk_crate(self, krate); } fn resolve_item(&mut self, item: &Item) { - let name = item.name; + let name = item.ident.name; debug!("(resolving item) resolving {}", name); match item.node { - ItemEnum(_, ref generics) | - ItemTy(_, ref generics) | - ItemStruct(_, ref generics) => { + ItemKind::Enum(_, ref generics) | + ItemKind::Ty(_, ref generics) | + ItemKind::Struct(_, ref generics) => { self.with_type_parameter_rib(HasTypeParameters(generics, TypeSpace, ItemRibKind), - |this| intravisit::walk_item(this, item)); + |this| visit::walk_item(this, item)); } - ItemFn(_, _, _, _, ref generics, _) => { + ItemKind::Fn(_, _, _, _, ref generics, _) => { self.with_type_parameter_rib(HasTypeParameters(generics, FnSpace, ItemRibKind), - |this| intravisit::walk_item(this, item)); + |this| visit::walk_item(this, item)); } - ItemDefaultImpl(_, ref trait_ref) => { + ItemKind::DefaultImpl(_, ref trait_ref) => { self.with_optional_trait_ref(Some(trait_ref), |_, _| {}); } - ItemImpl(_, _, ref generics, ref opt_trait_ref, ref self_type, ref impl_items) => { + ItemKind::Impl(_, _, ref generics, ref opt_trait_ref, ref self_type, ref impl_items) => self.resolve_implementation(generics, opt_trait_ref, &self_type, item.id, - impl_items); - } + impl_items), - ItemTrait(_, ref generics, ref bounds, ref trait_items) => { + ItemKind::Trait(_, ref generics, ref bounds, ref trait_items) => { // Create a new rib for the trait-wide type parameters. self.with_type_parameter_rib(HasTypeParameters(generics, TypeSpace, ItemRibKind), |this| { - let local_def_id = this.ast_map.local_def_id(item.id); + let local_def_id = this.definitions.local_def_id(item.id); this.with_self_rib(Def::SelfTy(Some(local_def_id), None), |this| { this.visit_generics(generics); walk_list!(this, visit_ty_param_bound, bounds); for trait_item in trait_items { match trait_item.node { - hir::ConstTraitItem(_, ref default) => { + TraitItemKind::Const(_, ref default) => { // Only impose the restrictions of // ConstRibKind if there's an actual constant // expression in a provided default. if default.is_some() { this.with_constant_rib(|this| { - intravisit::walk_trait_item(this, trait_item) + visit::walk_trait_item(this, trait_item) }); } else { - intravisit::walk_trait_item(this, trait_item) + visit::walk_trait_item(this, trait_item) } } - hir::MethodTraitItem(ref sig, _) => { + TraitItemKind::Method(ref sig, _) => { let type_parameters = HasTypeParameters(&sig.generics, FnSpace, MethodRibKind); this.with_type_parameter_rib(type_parameters, |this| { - intravisit::walk_trait_item(this, trait_item) + visit::walk_trait_item(this, trait_item) }); } - hir::TypeTraitItem(..) => { + TraitItemKind::Type(..) => { this.with_type_parameter_rib(NoTypeParameters, |this| { - intravisit::walk_trait_item(this, trait_item) + visit::walk_trait_item(this, trait_item) }); } }; @@ -1698,28 +1673,30 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }); } - ItemMod(_) | ItemForeignMod(_) => { + ItemKind::Mod(_) | ItemKind::ForeignMod(_) => { self.with_scope(item.id, |this| { - intravisit::walk_item(this, item); + visit::walk_item(this, item); }); } - ItemConst(..) | ItemStatic(..) => { + ItemKind::Const(..) | ItemKind::Static(..) => { self.with_constant_rib(|this| { - intravisit::walk_item(this, item); + visit::walk_item(this, item); }); } - ItemUse(ref view_path) => { + ItemKind::Use(ref view_path) => { match view_path.node { - hir::ViewPathList(ref prefix, ref items) => { + ast::ViewPathList(ref prefix, ref items) => { // Resolve prefix of an import with empty braces (issue #28388) if items.is_empty() && !prefix.segments.is_empty() { match self.resolve_crate_relative_path(prefix.span, &prefix.segments, TypeNS) { - Ok(def) => - self.record_def(item.id, PathResolution::new(def, 0)), + Ok(binding) => { + let def = binding.def().unwrap(); + self.record_def(item.id, PathResolution::new(def, 0)); + } Err(true) => self.record_def(item.id, err_path_resolution()), Err(false) => { resolve_error(self, @@ -1735,9 +1712,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - ItemExternCrate(_) => { + ItemKind::ExternCrate(_) => { // do nothing, these are just around to be encoded } + + ItemKind::Mac(_) => panic!("unexpanded macro in resolve!"), } } @@ -1749,7 +1728,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let mut function_type_rib = Rib::new(rib_kind); let mut seen_bindings = HashSet::new(); for (index, type_parameter) in generics.ty_params.iter().enumerate() { - let name = type_parameter.name; + let name = type_parameter.ident.name; debug!("with_type_parameter_rib: {}", type_parameter.id); if seen_bindings.contains(&name) { @@ -1760,7 +1739,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { seen_bindings.insert(name); // plain insert (no renaming) - let def_id = self.ast_map.local_def_id(type_parameter.id); + let def_id = self.definitions.local_def_id(type_parameter.id); let def = Def::TyParam(space, index as u32, def_id, name); function_type_rib.bindings.insert(name, def); } @@ -1774,13 +1753,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { f(self); - match type_parameters { - HasTypeParameters(..) => { - if !self.resolved { - self.type_ribs.pop(); - } - } - NoTypeParameters => {} + if let HasTypeParameters(..) = type_parameters { + self.type_ribs.pop(); } } @@ -1789,9 +1763,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { { self.label_ribs.push(Rib::new(NormalRibKind)); f(self); - if !self.resolved { - self.label_ribs.pop(); - } + self.label_ribs.pop(); } fn with_constant_rib(&mut self, f: F) @@ -1800,10 +1772,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.value_ribs.push(Rib::new(ConstantItemRibKind)); self.type_ribs.push(Rib::new(ConstantItemRibKind)); f(self); - if !self.resolved { - self.type_ribs.pop(); - self.value_ribs.pop(); - } + self.type_ribs.pop(); + self.value_ribs.pop(); } fn resolve_function(&mut self, rib_kind: RibKind<'a>, declaration: &FnDecl, block: &Block) { @@ -1822,17 +1792,15 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { debug!("(resolving function) recorded argument"); } - intravisit::walk_fn_ret_ty(self, &declaration.output); + visit::walk_fn_ret_ty(self, &declaration.output); // Resolve the function body. self.visit_block(block); debug!("(resolving function) leaving function"); - if !self.resolved { - self.label_ribs.pop(); - self.value_ribs.pop(); - } + self.label_ribs.pop(); + self.value_ribs.pop(); } fn resolve_trait_reference(&mut self, @@ -1852,11 +1820,23 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { path_depth))); // If it's a typedef, give a note - if let Def::TyAlias(did) = path_res.base_def { - err.fileline_note(trait_path.span, - "`type` aliases cannot be used for traits"); - if let Some(sp) = self.ast_map.span_if_local(did) { - err.span_note(sp, "type defined here"); + if let Def::TyAlias(..) = path_res.base_def { + let trait_name = trait_path.segments.last().unwrap().identifier.name; + err.span_label(trait_path.span, + &format!("`{}` is not a trait", trait_name)); + + let definition_site = { + let segments = &trait_path.segments; + if trait_path.global { + self.resolve_crate_relative_path(trait_path.span, segments, TypeNS) + } else { + self.resolve_module_relative_path(trait_path.span, segments, TypeNS) + }.map(|binding| binding.span).unwrap_or(codemap::DUMMY_SP) + }; + + if definition_site != codemap::DUMMY_SP { + err.span_label(definition_site, + &format!("type aliases cannot be used for traits")); } } err.emit(); @@ -1892,9 +1872,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { fn resolve_generics(&mut self, generics: &Generics) { for predicate in &generics.where_clause.predicates { match predicate { - &hir::WherePredicate::BoundPredicate(_) | - &hir::WherePredicate::RegionPredicate(_) => {} - &hir::WherePredicate::EqPredicate(ref eq_pred) => { + &ast::WherePredicate::BoundPredicate(_) | + &ast::WherePredicate::RegionPredicate(_) => {} + &ast::WherePredicate::EqPredicate(ref eq_pred) => { self.resolve_path(eq_pred.id, &eq_pred.path, 0, TypeNS).and_then(|path_res| { if let PathResolution { base_def: Def::TyParam(..), .. } = path_res { Ok(self.record_def(eq_pred.id, path_res)) @@ -1910,7 +1890,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } } - intravisit::walk_generics(self, generics); + visit::walk_generics(self, generics); } fn with_current_self_type(&mut self, self_type: &Ty, f: F) -> T @@ -1939,7 +1919,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } else { self.record_def(trait_ref.ref_id, err_path_resolution()); } - intravisit::walk_trait_ref(self, trait_ref); + visit::walk_trait_ref(self, trait_ref); } let original_trait_ref = replace(&mut self.current_trait_ref, new_val); let result = f(self, new_id); @@ -1953,13 +1933,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let mut self_type_rib = Rib::new(NormalRibKind); // plain insert (no renaming, types are not currently hygienic....) - let name = special_names::type_self; - self_type_rib.bindings.insert(name, self_def); + self_type_rib.bindings.insert(keywords::SelfType.name(), self_def); self.type_ribs.push(self_type_rib); f(self); - if !self.resolved { - self.type_ribs.pop(); - } + self.type_ribs.pop(); } fn resolve_implementation(&mut self, @@ -1981,24 +1958,23 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Resolve the self type. this.visit_ty(self_type); - this.with_self_rib(Def::SelfTy(trait_id, Some((item_id, self_type.id))), |this| { + this.with_self_rib(Def::SelfTy(trait_id, Some(item_id)), |this| { this.with_current_self_type(self_type, |this| { for impl_item in impl_items { + this.resolve_visibility(&impl_item.vis); match impl_item.node { - hir::ImplItemKind::Const(..) => { + ImplItemKind::Const(..) => { // If this is a trait impl, ensure the const // exists in trait - this.check_trait_item(impl_item.name, + this.check_trait_item(impl_item.ident.name, impl_item.span, |n, s| ResolutionError::ConstNotMemberOfTrait(n, s)); - this.with_constant_rib(|this| { - intravisit::walk_impl_item(this, impl_item); - }); + visit::walk_impl_item(this, impl_item); } - hir::ImplItemKind::Method(ref sig, _) => { + ImplItemKind::Method(ref sig, _) => { // If this is a trait impl, ensure the method // exists in trait - this.check_trait_item(impl_item.name, + this.check_trait_item(impl_item.ident.name, impl_item.span, |n, s| ResolutionError::MethodNotMemberOfTrait(n, s)); @@ -2009,18 +1985,19 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { FnSpace, MethodRibKind); this.with_type_parameter_rib(type_parameters, |this| { - intravisit::walk_impl_item(this, impl_item); + visit::walk_impl_item(this, impl_item); }); } - hir::ImplItemKind::Type(ref ty) => { + ImplItemKind::Type(ref ty) => { // If this is a trait impl, ensure the type // exists in trait - this.check_trait_item(impl_item.name, + this.check_trait_item(impl_item.ident.name, impl_item.span, |n, s| ResolutionError::TypeNotMemberOfTrait(n, s)); this.visit_ty(ty); } + ImplItemKind::Macro(_) => panic!("unexpanded macro in resolve!"), } } }); @@ -2058,16 +2035,22 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // that expands into an or-pattern where one 'x' was from the // user and one 'x' came from the macro. fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap { - let mut result = HashMap::new(); - pat_bindings(&self.def_map, pat, |binding_mode, _id, sp, path1| { - let name = path1.node; - result.insert(name, - BindingInfo { - span: sp, - binding_mode: binding_mode, - }); + let mut binding_map = HashMap::new(); + + pat.walk(&mut |pat| { + if let PatKind::Ident(binding_mode, ident, ref sub_pat) = pat.node { + if sub_pat.is_some() || match self.def_map.get(&pat.id) { + Some(&PathResolution { base_def: Def::Local(..), .. }) => true, + _ => false, + } { + let binding_info = BindingInfo { span: ident.span, binding_mode: binding_mode }; + binding_map.insert(mtwt::resolve(ident.node), binding_info); + } + } + true }); - return result; + + binding_map } // check that all of the arms in an or-pattern have exactly the @@ -2085,7 +2068,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None => { resolve_error(self, p.span, - ResolutionError::VariableNotBoundInPattern(key, i + 1)); + ResolutionError::VariableNotBoundInPattern(key, 1, i + 1)); } Some(binding_i) => { if binding_0.binding_mode != binding_i.binding_mode { @@ -2102,7 +2085,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { if !map_0.contains_key(&key) { resolve_error(self, binding.span, - ResolutionError::VariableNotBoundInParentPattern(key, i + 1)); + ResolutionError::VariableNotBoundInPattern(key, i + 1, 1)); } } } @@ -2123,17 +2106,14 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { walk_list!(self, visit_expr, &arm.guard); self.visit_expr(&arm.body); - if !self.resolved { - self.value_ribs.pop(); - } + self.value_ribs.pop(); } fn resolve_block(&mut self, block: &Block) { debug!("(resolving block) entering block"); // Move down in the graph, if there's an anonymous module rooted here. let orig_module = self.current_module; - let anonymous_module = - orig_module.module_children.borrow().get(&block.id).map(|module| *module); + let anonymous_module = self.module_map.get(&block.id).cloned(); // clones a reference if let Some(anonymous_module) = anonymous_module { debug!("(resolving block) found anonymous module, moving down"); @@ -2145,22 +2125,20 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } // Descend into the block. - intravisit::walk_block(self, block); + visit::walk_block(self, block); // Move back up. - if !self.resolved { - self.current_module = orig_module; - self.value_ribs.pop(); - if let Some(_) = anonymous_module { - self.type_ribs.pop(); - } + self.current_module = orig_module; + self.value_ribs.pop(); + if let Some(_) = anonymous_module { + self.type_ribs.pop(); } debug!("(resolving block) leaving block"); } fn resolve_type(&mut self, ty: &Ty) { match ty.node { - TyPath(ref maybe_qself, ref path) => { + TyKind::Path(ref maybe_qself, ref path) => { let resolution = match self.resolve_possibly_assoc_item(ty.id, maybe_qself.as_ref(), path, @@ -2168,7 +2146,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // `::a::b::c` is resolved by typeck alone. TypecheckRequired => { // Resolve embedded types. - intravisit::walk_ty(self, ty); + visit::walk_ty(self, ty); return; } ResolveAttempt(resolution) => resolution, @@ -2194,11 +2172,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { "type name" }; - let self_type_name = special_idents::type_self.name; let is_invalid_self_type_name = path.segments.len() > 0 && maybe_qself.is_none() && path.segments[0].identifier.name == - self_type_name; + keywords::SelfType.name(); if is_invalid_self_type_name { resolve_error(self, ty.span, @@ -2238,7 +2215,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { _ => {} } // Resolve embedded types. - intravisit::walk_ty(self, ty); + visit::walk_ty(self, ty); } fn resolve_pattern(&mut self, @@ -2248,7 +2225,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // pattern that binds them bindings_list: &mut HashMap) { let pat_id = pattern.id; - pattern.walk(|pattern| { + pattern.walk(&mut |pattern| { match pattern.node { PatKind::Ident(binding_mode, ref path1, ref at_rhs) => { // The meaning of PatKind::Ident with no type parameters @@ -2262,7 +2239,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let const_ok = mode == RefutableMode && at_rhs.is_none(); let ident = path1.node; - let renamed = ident.name; + let renamed = mtwt::resolve(ident); match self.resolve_bare_identifier_pattern(ident, pattern.span) { FoundStructOrEnumVariant(def) if const_ok => { @@ -2297,19 +2274,18 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { depth: 0, }); } - FoundConst(def, name) => { + FoundConst(_, name) => { resolve_error( self, pattern.span, - ResolutionError::OnlyIrrefutablePatternsAllowedHere(def.def_id(), - name) + ResolutionError::ConstantForIrrefutableBinding(name) ); self.record_def(pattern.id, err_path_resolution()); } BareIdentifierPatternUnresolved => { debug!("(resolving pattern) binding `{}`", renamed); - let def_id = self.ast_map.local_def_id(pattern.id); + let def_id = self.definitions.local_def_id(pattern.id); let def = Def::Local(def_id, pattern.id); // Record the definition so that later passes @@ -2383,9 +2359,15 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.record_def(pattern.id, path_res); } Def::Static(..) => { - resolve_error(&self, - path.span, - ResolutionError::StaticVariableReference); + let segments = &path.segments; + let binding = if path.global { + self.resolve_crate_relative_path(path.span, segments, ValueNS) + } else { + self.resolve_module_relative_path(path.span, segments, ValueNS) + }.unwrap(); + + let error = ResolutionError::StaticVariableReference(binding); + resolve_error(self, path.span, error); self.record_def(pattern.id, err_path_resolution()); } _ => { @@ -2418,7 +2400,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } } - } else { + } else if let Err(false) = self.resolve_path(pat_id, &path, 0, ValueNS) { resolve_error( self, path.span, @@ -2427,7 +2409,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { ); self.record_def(pattern.id, err_path_resolution()); } - intravisit::walk_path(self, path); + visit::walk_path(self, path); } PatKind::QPath(ref qself, ref path) => { @@ -2447,7 +2429,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .name; let traits = self.get_traits_containing_item(const_name); self.trait_map.insert(pattern.id, traits); - intravisit::walk_pat(self, pattern); + visit::walk_pat(self, pattern); return true; } ResolveAttempt(resolution) => resolution, @@ -2481,7 +2463,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { .as_str())); self.record_def(pattern.id, err_path_resolution()); } - intravisit::walk_pat(self, pattern); + visit::walk_pat(self, pattern); } PatKind::Struct(ref path, _, _) => { @@ -2500,11 +2482,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { self.record_def(pattern.id, err_path_resolution()); } } - intravisit::walk_path(self, path); + visit::walk_path(self, path); } PatKind::Lit(_) | PatKind::Range(..) => { - intravisit::walk_pat(self, pattern); + visit::walk_pat(self, pattern); } _ => { @@ -2515,18 +2497,20 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { }); } - fn resolve_bare_identifier_pattern(&mut self, ident: hir::Ident, span: Span) + fn resolve_bare_identifier_pattern(&mut self, ident: ast::Ident, span: Span) -> BareIdentifierPatternResolution { - match self.resolve_ident_in_lexical_scope(ident, ValueNS, true) - .map(LexicalScopeBinding::def) { - Some(def @ Def::Variant(..)) | Some(def @ Def::Struct(..)) => { - FoundStructOrEnumVariant(def) - } - Some(def @ Def::Const(..)) | Some(def @ Def::AssociatedConst(..)) => { - FoundConst(def, ident.unhygienic_name) - } - Some(Def::Static(..)) => { - resolve_error(self, span, ResolutionError::StaticVariableReference); + let binding = match self.resolve_ident_in_lexical_scope(ident, ValueNS, true) { + Some(LexicalScopeBinding::Item(binding)) => binding, + _ => return BareIdentifierPatternUnresolved, + }; + let def = binding.def().unwrap(); + + match def { + Def::Variant(..) | Def::Struct(..) => FoundStructOrEnumVariant(def), + Def::Const(..) | Def::AssociatedConst(..) => FoundConst(def, ident.name), + Def::Static(..) => { + let error = ResolutionError::StaticVariableReference(binding); + resolve_error(self, span, error); BareIdentifierPatternUnresolved } _ => BareIdentifierPatternUnresolved, @@ -2536,7 +2520,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { /// Handles paths that may refer to associated items fn resolve_possibly_assoc_item(&mut self, id: NodeId, - maybe_qself: Option<&hir::QSelf>, + maybe_qself: Option<&ast::QSelf>, path: &Path, namespace: Namespace) -> AssocItemResolveResult { @@ -2578,14 +2562,16 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { /// returned value. See `hir::def::PathResolution` for more info. fn resolve_path(&mut self, id: NodeId, path: &Path, path_depth: usize, namespace: Namespace) -> Result { + debug!("resolve_path(id={:?} path={:?}, path_depth={:?})", id, path, path_depth); + let span = path.span; let segments = &path.segments[..path.segments.len() - path_depth]; let mk_res = |def| PathResolution::new(def, path_depth); if path.global { - let def = self.resolve_crate_relative_path(span, segments, namespace); - return def.map(mk_res); + let binding = self.resolve_crate_relative_path(span, segments, namespace); + return binding.map(|binding| mk_res(binding.def().unwrap())); } // Try to find a path to an item in a module. @@ -2597,7 +2583,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { None | Some(LocalDef{def: Def::Mod(..), ..}) if namespace == TypeNS => this.primitive_type_table .primitive_types - .get(&last_ident.unhygienic_name) + .get(&last_ident.name) .map_or(def, |prim_ty| Some(LocalDef::from_def(Def::PrimTy(*prim_ty)))), _ => def } @@ -2621,9 +2607,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } let unqualified_def = resolve_identifier_with_fallback(self, false); - let def = self.resolve_module_relative_path(span, segments, namespace); - match (def, unqualified_def) { - (Ok(d), Some(ref ud)) if d == ud.def => { + let qualified_binding = self.resolve_module_relative_path(span, segments, namespace); + match (qualified_binding, unqualified_def) { + (Ok(binding), Some(ref ud)) if binding.def().unwrap() == ud.def => { self.session .add_lint(lint::builtin::UNUSED_QUALIFICATIONS, id, @@ -2633,16 +2619,16 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { _ => {} } - def.map(mk_res) + qualified_binding.map(|binding| mk_res(binding.def().unwrap())) } // Resolve a single identifier fn resolve_identifier(&mut self, - identifier: hir::Ident, + identifier: ast::Ident, namespace: Namespace, record_used: bool) -> Option { - if identifier.name == special_idents::invalid.name { + if identifier.name == keywords::Invalid.name() { return Some(LocalDef::from_def(Def::Err)); } @@ -2670,7 +2656,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } ClosureRibKind(function_id) => { let prev_def = def; - let node_def_id = self.ast_map.local_def_id(node_id); + let node_def_id = self.definitions.local_def_id(node_id); let seen = self.freevars_seen .entry(function_id) @@ -2742,9 +2728,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // resolve a "module-relative" path, e.g. a::b::c fn resolve_module_relative_path(&mut self, span: Span, - segments: &[hir::PathSegment], + segments: &[ast::PathSegment], namespace: Namespace) - -> Result { + -> Result<&'a NameBinding<'a>, + bool /* true if an error was reported */> { let module_path = segments.split_last() .unwrap() .1 @@ -2776,25 +2763,19 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let name = segments.last().unwrap().identifier.name; let result = self.resolve_name_in_module(containing_module, name, namespace, false, true); result.success().map(|binding| { - self.check_privacy(containing_module, name, binding, span); - binding.def().unwrap() + self.check_privacy(name, binding, span); + binding }).ok_or(false) } /// Invariant: This must be called only during main resolution, not during /// import resolution. - fn resolve_crate_relative_path(&mut self, - span: Span, - segments: &[hir::PathSegment], - namespace: Namespace) - -> Result { - let module_path = segments.split_last() - .unwrap() - .1 - .iter() - .map(|ps| ps.identifier.name) - .collect::>(); - + fn resolve_crate_relative_path(&mut self, span: Span, segments: &[T], namespace: Namespace) + -> Result<&'a NameBinding<'a>, + bool /* true if an error was reported */> + where T: Named, + { + let module_path = segments.split_last().unwrap().1.iter().map(T::name).collect::>(); let root_module = self.graph_root; let containing_module; @@ -2823,11 +2804,11 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - let name = segments.last().unwrap().identifier.name; + let name = segments.last().unwrap().name(); let result = self.resolve_name_in_module(containing_module, name, namespace, false, true); result.success().map(|binding| { - self.check_privacy(containing_module, name, binding, span); - binding.def().unwrap() + self.check_privacy(name, binding, span); + binding }).ok_or(false) } @@ -2841,13 +2822,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } fn find_fallback_in_self_type(&mut self, name: Name) -> FallbackSuggestion { - fn extract_path_and_node_id(t: &Ty, - allow: FallbackChecks) - -> Option<(Path, NodeId, FallbackChecks)> { + fn extract_node_id(t: &Ty) -> Option { match t.node { - TyPath(None, ref path) => Some((path.clone(), t.id, allow)), - TyPtr(ref mut_ty) => extract_path_and_node_id(&mut_ty.ty, OnlyTraitAndStatics), - TyRptr(_, ref mut_ty) => extract_path_and_node_id(&mut_ty.ty, allow), + TyKind::Path(None, _) => Some(t.id), + TyKind::Rptr(_, ref mut_ty) => extract_node_id(&mut_ty.ty), // This doesn't handle the remaining `Ty` variants as they are not // that commonly the self_type, it might be interesting to provide // support for those in future. @@ -2855,53 +2833,9 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - fn get_module<'a, 'tcx>(this: &mut Resolver<'a, 'tcx>, - span: Span, - name_path: &[ast::Name]) - -> Option> { - let last_name = name_path.last().unwrap(); - - if name_path.len() == 1 { - match this.primitive_type_table.primitive_types.get(last_name) { - Some(_) => None, - None => this.current_module.resolve_name_in_lexical_scope(*last_name, TypeNS) - .and_then(NameBinding::module) - } - } else { - this.resolve_module_path(&name_path, UseLexicalScope, span).success() - } - } - - fn is_static_method(this: &Resolver, did: DefId) -> bool { - if let Some(node_id) = this.ast_map.as_local_node_id(did) { - let sig = match this.ast_map.get(node_id) { - hir_map::NodeTraitItem(trait_item) => match trait_item.node { - hir::MethodTraitItem(ref sig, _) => sig, - _ => return false, - }, - hir_map::NodeImplItem(impl_item) => match impl_item.node { - hir::ImplItemKind::Method(ref sig, _) => sig, - _ => return false, - }, - _ => return false, - }; - sig.explicit_self.node == hir::SelfStatic - } else { - this.session.cstore.is_static_method(did) - } - } - - let (path, node_id, allowed) = match self.current_self_type { - Some(ref ty) => match extract_path_and_node_id(ty, Everything) { - Some(x) => x, - None => return NoSuggestion, - }, - None => return NoSuggestion, - }; - - if allowed == Everything { + if let Some(node_id) = self.current_self_type.as_ref().and_then(extract_node_id) { // Look for a field with the same name in the current self_type. - match self.def_map.borrow().get(&node_id).map(|d| d.full_def()) { + match self.def_map.get(&node_id).map(|d| d.full_def()) { Some(Def::Enum(did)) | Some(Def::TyAlias(did)) | Some(Def::Struct(did)) | @@ -2917,28 +2851,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - let name_path = path.segments.iter().map(|seg| seg.identifier.name).collect::>(); - - // Look for a method in the current self type's impl module. - if let Some(module) = get_module(self, path.span, &name_path) { - if let Some(binding) = module.resolve_name_in_lexical_scope(name, ValueNS) { - if let Some(Def::Method(did)) = binding.def() { - if is_static_method(self, did) { - return StaticMethod(path_names_to_string(&path, 0)); - } - if self.current_trait_ref.is_some() { - return TraitItem; - } else if allowed == Everything { - return Method; - } - } - } - } - // Look for a method in the current trait. if let Some((trait_did, ref trait_ref)) = self.current_trait_ref { - if let Some(&did) = self.trait_item_map.get(&(name, trait_did)) { - if is_static_method(self, did) { + if let Some(&is_static_method) = self.trait_item_map.get(&(name, trait_did)) { + if is_static_method { return TraitMethod(path_names_to_string(&trait_ref.path, 0)); } else { return TraitItem; @@ -2967,7 +2883,19 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } SuggestionType::NotFound } - fn resolve_expr(&mut self, expr: &Expr) { + fn resolve_labeled_block(&mut self, label: Option, id: NodeId, block: &Block) { + if let Some(label) = label { + let (label, def) = (mtwt::resolve(label), Def::Label(id)); + self.with_label_rib(|this| { + this.label_ribs.last_mut().unwrap().bindings.insert(label, def); + this.visit_block(block); + }); + } else { + self.visit_block(block); + } + } + + fn resolve_expr(&mut self, expr: &Expr, parent: Option<&Expr>) { // First, record candidate traits for this expression if it could // result in the invocation of a method call. @@ -2975,7 +2903,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // Next, resolve the node. match expr.node { - ExprPath(ref maybe_qself, ref path) => { + ExprKind::Path(ref maybe_qself, ref path) => { let resolution = match self.resolve_possibly_assoc_item(expr.id, maybe_qself.as_ref(), path, @@ -2985,7 +2913,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let method_name = path.segments.last().unwrap().identifier.name; let traits = self.get_traits_containing_item(method_name); self.trait_map.insert(expr.id, traits); - intravisit::walk_expr(self, expr); + visit::walk_expr(self, expr); return; } ResolveAttempt(resolution) => resolution, @@ -3011,7 +2939,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let msg = format!("did you mean to write: `{} {{ /* fields */ }}`?", path_name); if self.emit_errors { - err.fileline_help(expr.span, &msg); + err.help(&msg); } else { err.span_help(expr.span, &msg); } @@ -3053,7 +2981,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { path_name); if self.emit_errors { - err.fileline_help(expr.span, &msg); + err.help(&msg); } else { err.span_help(expr.span, &msg); } @@ -3073,7 +3001,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { false // Stop advancing }); - if method_scope && special_names::self_.as_str() == &path_name[..] { + if method_scope && + &path_name[..] == keywords::SelfValue.name().as_str() { resolve_error(self, expr.span, ResolutionError::SelfNotAvailableInStaticMethod); @@ -3092,10 +3021,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } Field => format!("`self.{}`", path_name), - Method | TraitItem => format!("to call `self.{}`", path_name), - TraitMethod(path_str) | - StaticMethod(path_str) => + TraitMethod(path_str) => format!("to call `{}::{}`", path_str, path_name), }; @@ -3113,7 +3040,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { UseLexicalScope, expr.span) { Success(_) => { - context = UnresolvedNameContext::PathIsMod(expr.id); + context = UnresolvedNameContext::PathIsMod(parent); }, _ => {}, }; @@ -3128,10 +3055,10 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - intravisit::walk_expr(self, expr); + visit::walk_expr(self, expr); } - ExprStruct(ref path, _, _) => { + ExprKind::Struct(ref path, _, _) => { // Resolve the path to the structure it goes to. We don't // check to ensure that the path is actually a structure; that // is checked later during typeck. @@ -3150,24 +3077,24 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - intravisit::walk_expr(self, expr); + visit::walk_expr(self, expr); } - ExprLoop(_, Some(label)) | ExprWhile(_, _, Some(label)) => { + ExprKind::Loop(_, Some(label)) | ExprKind::While(_, _, Some(label)) => { self.with_label_rib(|this| { let def = Def::Label(expr.id); { let rib = this.label_ribs.last_mut().unwrap(); - rib.bindings.insert(label.name, def); + rib.bindings.insert(mtwt::resolve(label), def); } - intravisit::walk_expr(this, expr); + visit::walk_expr(this, expr); }) } - ExprBreak(Some(label)) | ExprAgain(Some(label)) => { - match self.search_label(label.node.name) { + ExprKind::Break(Some(label)) | ExprKind::Again(Some(label)) => { + match self.search_label(mtwt::resolve(label.node)) { None => { self.record_def(expr.id, err_path_resolution()); resolve_error(self, @@ -3188,26 +3115,71 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } + ExprKind::IfLet(ref pattern, ref subexpression, ref if_block, ref optional_else) => { + self.visit_expr(subexpression); + + self.value_ribs.push(Rib::new(NormalRibKind)); + self.resolve_pattern(pattern, RefutableMode, &mut HashMap::new()); + self.visit_block(if_block); + self.value_ribs.pop(); + + optional_else.as_ref().map(|expr| self.visit_expr(expr)); + } + + ExprKind::WhileLet(ref pattern, ref subexpression, ref block, label) => { + self.visit_expr(subexpression); + self.value_ribs.push(Rib::new(NormalRibKind)); + self.resolve_pattern(pattern, RefutableMode, &mut HashMap::new()); + + self.resolve_labeled_block(label, expr.id, block); + + self.value_ribs.pop(); + } + + ExprKind::ForLoop(ref pattern, ref subexpression, ref block, label) => { + self.visit_expr(subexpression); + self.value_ribs.push(Rib::new(NormalRibKind)); + self.resolve_pattern(pattern, LocalIrrefutableMode, &mut HashMap::new()); + + self.resolve_labeled_block(label, expr.id, block); + + self.value_ribs.pop(); + } + + ExprKind::Field(ref subexpression, _) => { + self.resolve_expr(subexpression, Some(expr)); + } + ExprKind::MethodCall(_, ref types, ref arguments) => { + let mut arguments = arguments.iter(); + self.resolve_expr(arguments.next().unwrap(), Some(expr)); + for argument in arguments { + self.resolve_expr(argument, None); + } + for ty in types.iter() { + self.visit_ty(ty); + } + } + _ => { - intravisit::walk_expr(self, expr); + visit::walk_expr(self, expr); } } } fn record_candidate_traits_for_expr_if_necessary(&mut self, expr: &Expr) { match expr.node { - ExprField(_, name) => { + ExprKind::Field(_, name) => { // FIXME(#6890): Even though you can't treat a method like a // field, we need to add any trait methods we find that match // the field name so that we can do some nice error reporting // later on in typeck. - let traits = self.get_traits_containing_item(name.node); + let traits = self.get_traits_containing_item(name.node.name); self.trait_map.insert(expr.id, traits); } - ExprMethodCall(name, _, _) => { + ExprKind::MethodCall(name, _, _) => { debug!("(recording candidate traits for expr) recording traits for {}", expr.id); - let traits = self.get_traits_containing_item(name.node); + let traits = self.get_traits_containing_item(name.node.name); self.trait_map.insert(expr.id, traits); } _ => { @@ -3216,21 +3188,27 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - fn get_traits_containing_item(&mut self, name: Name) -> Vec { + fn get_traits_containing_item(&mut self, name: Name) -> Vec { debug!("(getting traits containing item) looking for '{}'", name); - fn add_trait_info(found_traits: &mut Vec, trait_def_id: DefId, name: Name) { + fn add_trait_info(found_traits: &mut Vec, + trait_def_id: DefId, + import_id: Option, + name: Name) { debug!("(adding trait info) found trait {:?} for method '{}'", trait_def_id, name); - found_traits.push(trait_def_id); + found_traits.push(TraitCandidate { + def_id: trait_def_id, + import_id: import_id, + }); } let mut found_traits = Vec::new(); // Look for the current trait. if let Some((trait_def_id, _)) = self.current_trait_ref { if self.trait_item_map.contains_key(&(name, trait_def_id)) { - add_trait_info(&mut found_traits, trait_def_id, name); + add_trait_info(&mut found_traits, trait_def_id, None, name); } } @@ -3241,21 +3219,26 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { let mut traits = module.traits.borrow_mut(); if traits.is_none() { let mut collected_traits = Vec::new(); - module.for_each_child(|_, ns, binding| { + module.for_each_child(|name, ns, binding| { if ns != TypeNS { return } if let Some(Def::Trait(_)) = binding.def() { - collected_traits.push(binding); + collected_traits.push((name, binding)); } }); *traits = Some(collected_traits.into_boxed_slice()); } - for binding in traits.as_ref().unwrap().iter() { + for &(trait_name, binding) in traits.as_ref().unwrap().iter() { let trait_def_id = binding.def().unwrap().def_id(); if self.trait_item_map.contains_key(&(name, trait_def_id)) { - add_trait_info(&mut found_traits, trait_def_id, name); - let trait_name = self.get_trait_name(trait_def_id); - self.record_use(trait_name, TypeNS, binding); + let mut import_id = None; + if let NameBindingKind::Import { directive, .. } = binding.kind { + let id = directive.id; + self.maybe_unused_trait_imports.insert(id); + import_id = Some(id); + } + add_trait_info(&mut found_traits, trait_def_id, import_id, name); + self.record_use(trait_name, binding); } } }; @@ -3306,16 +3289,15 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { if let Some(def) = name_binding.def() { if name == lookup_name && ns == namespace && filter_fn(def) { // create the path - let ident = hir::Ident::from_name(name); + let ident = ast::Ident::with_empty_ctxt(name); let params = PathParameters::none(); let segment = PathSegment { identifier: ident, parameters: params, }; - let span = name_binding.span.unwrap_or(syntax::codemap::DUMMY_SP); + let span = name_binding.span; let mut segms = path_segments.clone(); segms.push(segment); - let segms = HirVec::from_vec(segms); let path = Path { span: span, global: true, @@ -3328,7 +3310,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { // only if both the module is public and the entity is // declared as public (due to pruning, we don't explore // outside crate private modules => no need to check this) - if !in_module_is_extern || name_binding.is_public() { + if !in_module_is_extern || name_binding.vis == ty::Visibility::Public { lookup_results.push(path); } } @@ -3341,7 +3323,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { NoParentLink => path_segments.clone(), ModuleParentLink(_, name) => { let mut paths = path_segments.clone(); - let ident = hir::Ident::from_name(name); + let ident = ast::Ident::with_empty_ctxt(name); let params = PathParameters::none(); let segm = PathSegment { identifier: ident, @@ -3353,7 +3335,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { _ => bug!(), }; - if !in_module_is_extern || name_binding.is_public() { + if !in_module_is_extern || name_binding.vis == ty::Visibility::Public { // add the module to the lookup let is_extern = in_module_is_extern || name_binding.is_extern_crate(); worklist.push((module, path_segments, is_extern)); @@ -3370,12 +3352,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { fn record_def(&mut self, node_id: NodeId, resolution: PathResolution) { debug!("(recording def) recording {:?} for {}", resolution, node_id); - if let Some(prev_res) = self.def_map.borrow_mut().insert(node_id, resolution) { - let span = self.ast_map.opt_span(node_id).unwrap_or(codemap::DUMMY_SP); - span_bug!(span, - "path resolved multiple times ({:?} before, {:?} now)", - prev_res, - resolution); + if let Some(prev_res) = self.def_map.insert(node_id, resolution) { + panic!("path resolved multiple times ({:?} before, {:?} now)", prev_res, resolution); } } @@ -3384,8 +3362,8 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { pat_binding_mode: BindingMode, descr: &str) { match pat_binding_mode { - BindByValue(_) => {} - BindByRef(..) => { + BindingMode::ByValue(_) => {} + BindingMode::ByRef(..) => { resolve_error(self, pat.span, ResolutionError::CannotUseRefBindingModeWith(descr)); @@ -3393,16 +3371,52 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { } } - fn is_visible(&self, binding: &'a NameBinding<'a>, parent: Module<'a>) -> bool { - binding.is_public() || parent.is_ancestor_of(self.current_module) + fn resolve_visibility(&mut self, vis: &ast::Visibility) -> ty::Visibility { + let (path, id) = match *vis { + ast::Visibility::Public => return ty::Visibility::Public, + ast::Visibility::Crate(_) => return ty::Visibility::Restricted(ast::CRATE_NODE_ID), + ast::Visibility::Restricted { ref path, id } => (path, id), + ast::Visibility::Inherited => { + let current_module = + self.get_nearest_normal_module_parent_or_self(self.current_module); + let id = + self.definitions.as_local_node_id(current_module.def_id().unwrap()).unwrap(); + return ty::Visibility::Restricted(id); + } + }; + + let segments: Vec<_> = path.segments.iter().map(|seg| seg.identifier.name).collect(); + let vis = match self.resolve_module_path(&segments, DontUseLexicalScope, path.span) { + Success(module) => { + let def = module.def.unwrap(); + let path_resolution = PathResolution { base_def: def, depth: 0 }; + self.def_map.insert(id, path_resolution); + ty::Visibility::Restricted(self.definitions.as_local_node_id(def.def_id()).unwrap()) + } + Failed(Some((span, msg))) => { + self.session.span_err(span, &format!("failed to resolve module path. {}", msg)); + ty::Visibility::Public + } + _ => { + self.session.span_err(path.span, "unresolved module path"); + ty::Visibility::Public + } + }; + if !self.is_accessible(vis) { + let msg = format!("visibilities can only be restricted to ancestor modules"); + self.session.span_err(path.span, &msg); + } + vis } - fn check_privacy(&mut self, - module: Module<'a>, - name: Name, - binding: &'a NameBinding<'a>, - span: Span) { - if !self.is_visible(binding, module) { + fn is_accessible(&self, vis: ty::Visibility) -> bool { + let current_module = self.get_nearest_normal_module_parent_or_self(self.current_module); + let node_id = self.definitions.as_local_node_id(current_module.def_id().unwrap()).unwrap(); + vis.is_accessible_from(node_id, self) + } + + fn check_privacy(&mut self, name: Name, binding: &'a NameBinding<'a>, span: Span) { + if !self.is_accessible(binding.vis) { self.privacy_errors.push(PrivacyError(span, name, binding)); } } @@ -3431,7 +3445,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { binding: &NameBinding, old_binding: &NameBinding) { // Error on the second of two conflicting names - if old_binding.span.unwrap().lo > binding.span.unwrap().lo { + if old_binding.span.lo > binding.span.lo { return self.report_conflict(parent, name, ns, old_binding, binding); } @@ -3447,7 +3461,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { false => ("defined", "definition"), }; - let span = binding.span.unwrap(); + let span = binding.span; let msg = { let kind = match (ns, old_binding.module()) { (ValueNS, _) => "a value", @@ -3468,13 +3482,16 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> { _ => match (old_binding.is_import(), binding.is_import()) { (false, false) => struct_span_err!(self.session, span, E0428, "{}", msg), (true, true) => struct_span_err!(self.session, span, E0252, "{}", msg), - _ => struct_span_err!(self.session, span, E0255, "{}", msg), + _ => { + let mut e = struct_span_err!(self.session, span, E0255, "{}", msg); + e.span_label(span, &format!("`{}` was already imported", name)); + e + } }, }; - let span = old_binding.span.unwrap(); - if span != codemap::DUMMY_SP { - err.span_note(span, &format!("previous {} of `{}` here", noun, name)); + if old_binding.span != codemap::DUMMY_SP { + err.span_label(old_binding.span, &format!("previous {} of `{}` here", noun, name)); } err.emit(); } @@ -3506,7 +3523,6 @@ fn path_names_to_string(path: &Path, depth: usize) -> String { /// entities with that name in all crates. This method allows outputting the /// results of this search in a programmer-friendly way fn show_candidates(session: &mut DiagnosticBuilder, - span: syntax::codemap::Span, candidates: &SuggestedCandidates) { let paths = &candidates.candidates; @@ -3526,26 +3542,23 @@ fn show_candidates(session: &mut DiagnosticBuilder, // behave differently based on how many candidates we have: if !paths.is_empty() { if paths.len() == 1 { - session.fileline_help( - span, + session.help( &format!("you can import it into scope: `use {};`.", &path_strings[0]), ); } else { - session.fileline_help(span, "you can import several candidates \ + session.help("you can import several candidates \ into scope (`use ...;`):"); let count = path_strings.len() as isize - MAX_CANDIDATES as isize + 1; for (idx, path_string) in path_strings.iter().enumerate() { if idx == MAX_CANDIDATES - 1 && count > 1 { - session.fileline_help( - span, + session.help( &format!(" and {} other candidates", count).to_string(), ); break; } else { - session.fileline_help( - span, + session.help( &format!(" `{}`", path_string).to_string(), ); } @@ -3554,8 +3567,7 @@ fn show_candidates(session: &mut DiagnosticBuilder, } } else { // nothing found: - session.fileline_help( - span, + session.help( &format!("no candidates by the name of `{}` found in your \ project; maybe you misspelled the name or forgot to import \ an external crate?", candidates.name.to_string()), @@ -3576,7 +3588,7 @@ fn module_to_string(module: Module) -> String { } BlockParentLink(ref module, _) => { // danger, shouldn't be ident? - names.push(special_idents::opaque.name); + names.push(token::intern("")); collect_mod(names, module); } } @@ -3597,14 +3609,6 @@ fn err_path_resolution() -> PathResolution { } -pub struct CrateMap { - pub def_map: RefCell, - pub freevars: FreevarMap, - pub export_map: ExportMap, - pub trait_map: TraitMap, - pub glob_map: Option, -} - #[derive(PartialEq,Copy, Clone)] pub enum MakeGlobMap { Yes, @@ -3612,10 +3616,7 @@ pub enum MakeGlobMap { } /// Entry point to crate resolution. -pub fn resolve_crate<'a, 'tcx>(session: &'a Session, - ast_map: &'a hir_map::Map<'tcx>, - make_glob_map: MakeGlobMap) - -> CrateMap { +pub fn resolve_crate<'a, 'b>(resolver: &'b mut Resolver<'a>, krate: &'b Crate) { // Currently, we ignore the name resolution data structures for // the purposes of dependency tracking. Instead we will run name // resolution and include its output in the hash of each item, @@ -3623,54 +3624,25 @@ pub fn resolve_crate<'a, 'tcx>(session: &'a Session, // reflects not just its contents but the results of name // resolution on those contents. Hopefully we'll push this back at // some point. - let _task = ast_map.dep_graph.in_task(DepNode::Resolve); - - let krate = ast_map.krate(); - let arenas = Resolver::arenas(); - let mut resolver = create_resolver(session, ast_map, krate, make_glob_map, &arenas, None); + let _ignore = resolver.session.dep_graph.in_ignore(); + resolver.build_reduced_graph(krate); + resolve_imports::resolve_imports(resolver); resolver.resolve_crate(krate); - check_unused::check_crate(&mut resolver, krate); + check_unused::check_crate(resolver, krate); resolver.report_privacy_errors(); - - CrateMap { - def_map: resolver.def_map, - freevars: resolver.freevars, - export_map: resolver.export_map, - trait_map: resolver.trait_map, - glob_map: if resolver.make_glob_map { - Some(resolver.glob_map) - } else { - None - }, - } } -/// Builds a name resolution walker to be used within this module, -/// or used externally, with an optional callback function. -/// -/// The callback takes a &mut bool which allows callbacks to end a -/// walk when set to true, passing through the rest of the walk, while -/// preserving the ribs + current module. This allows resolve_path -/// calls to be made with the correct scope info. The node in the -/// callback corresponds to the current node in the walk. -fn create_resolver<'a, 'tcx>(session: &'a Session, - ast_map: &'a hir_map::Map<'tcx>, - krate: &'a Crate, - make_glob_map: MakeGlobMap, - arenas: &'a ResolverArenas<'a>, - callback: Option bool>>) - -> Resolver<'a, 'tcx> { - let mut resolver = Resolver::new(session, ast_map, make_glob_map, arenas); - - resolver.callback = callback; - - resolver.build_reduced_graph(krate); - - resolve_imports::resolve_imports(&mut resolver); - - resolver +pub fn with_resolver<'a, T, F>(session: &'a Session, + definitions: &'a mut Definitions, + make_glob_map: MakeGlobMap, + f: F) -> T + where F: for<'b> FnOnce(Resolver<'b>) -> T, +{ + let arenas = Resolver::arenas(); + let resolver = Resolver::new(session, definitions, make_glob_map, &arenas); + f(resolver) } __build_diagnostic_array! { librustc_resolve, DIAGNOSTICS } diff --git a/src/librustc_resolve/resolve_imports.rs b/src/librustc_resolve/resolve_imports.rs index 1404b8cf3a..9bd16117f9 100644 --- a/src/librustc_resolve/resolve_imports.rs +++ b/src/librustc_resolve/resolve_imports.rs @@ -10,7 +10,6 @@ use self::ImportDirectiveSubclass::*; -use DefModifiers; use Module; use Namespace::{self, TypeNS, ValueNS}; use {NameBinding, NameBindingKind, PrivacyError}; @@ -21,15 +20,14 @@ use UseLexicalScopeFlag::DontUseLexicalScope; use {names_to_string, module_to_string}; use {resolve_error, ResolutionError}; +use rustc::ty; use rustc::lint; use rustc::hir::def::*; use syntax::ast::{NodeId, Name}; -use syntax::attr::AttrMetaMethods; -use syntax::codemap::Span; +use syntax::codemap::{Span, DUMMY_SP}; use syntax::util::lev_distance::find_best_match_for_name; -use std::mem::replace; use std::cell::{Cell, RefCell}; /// Contains data for specific types of import directives. @@ -41,7 +39,7 @@ pub enum ImportDirectiveSubclass { type_determined: Cell, value_determined: Cell, }, - GlobImport, + GlobImport { is_prelude: bool }, } impl ImportDirectiveSubclass { @@ -58,38 +56,33 @@ impl ImportDirectiveSubclass { /// One import directive. #[derive(Debug,Clone)] pub struct ImportDirective<'a> { + pub id: NodeId, module_path: Vec, target_module: Cell>>, // the resolution of `module_path` subclass: ImportDirectiveSubclass, span: Span, - id: NodeId, - is_public: bool, // see note in ImportResolutionPerNamespace about how to use this - is_prelude: bool, + vis: ty::Visibility, // see note in ImportResolutionPerNamespace about how to use this } impl<'a> ImportDirective<'a> { // Given the binding to which this directive resolves in a particular namespace, // this returns the binding for the name this directive defines in that namespace. - fn import(&self, binding: &'a NameBinding<'a>, privacy_error: Option>>) + fn import(&'a self, binding: &'a NameBinding<'a>, privacy_error: Option>>) -> NameBinding<'a> { - let mut modifiers = match self.is_public { - true => DefModifiers::PUBLIC | DefModifiers::IMPORTABLE, - false => DefModifiers::empty(), - }; - if let GlobImport = self.subclass { - modifiers = modifiers | DefModifiers::GLOB_IMPORTED; - } - NameBinding { kind: NameBindingKind::Import { binding: binding, - id: self.id, + directive: self, privacy_error: privacy_error, }, - span: Some(self.span), - modifiers: modifiers, + span: self.span, + vis: self.vis, } } + + pub fn is_glob(&self) -> bool { + match self.subclass { ImportDirectiveSubclass::GlobImport { .. } => true, _ => false } + } } #[derive(Clone, Default)] @@ -142,9 +135,9 @@ impl<'a> SingleImports<'a> { impl<'a> NameResolution<'a> { fn try_define(&mut self, binding: &'a NameBinding<'a>) -> Result<(), &'a NameBinding<'a>> { if let Some(old_binding) = self.binding { - if binding.defined_with(DefModifiers::GLOB_IMPORTED) { + if binding.is_glob_import() { self.duplicate_globs.push(binding); - } else if old_binding.defined_with(DefModifiers::GLOB_IMPORTED) { + } else if old_binding.is_glob_import() { self.duplicate_globs.push(old_binding); self.binding = Some(binding); } else { @@ -161,7 +154,7 @@ impl<'a> NameResolution<'a> { fn binding(&self) -> Option<&'a NameBinding<'a>> { self.binding.and_then(|binding| match self.single_imports { SingleImports::None => Some(binding), - _ if !binding.defined_with(DefModifiers::GLOB_IMPORTED) => Some(binding), + _ if !binding.is_glob_import() => Some(binding), _ => None, // The binding could be shadowed by a single import, so it is not known. }) } @@ -171,7 +164,7 @@ impl<'a> NameResolution<'a> { fn try_result(&self, ns: Namespace, allow_private_imports: bool) -> Option>> { match self.binding { - Some(binding) if !binding.defined_with(DefModifiers::GLOB_IMPORTED) => + Some(binding) if !binding.is_glob_import() => return Some(Success(binding)), _ => {} // Items and single imports are not shadowable }; @@ -184,8 +177,8 @@ impl<'a> NameResolution<'a> { // If (1) we don't allow private imports, (2) no public single import can define // the name, and (3) no public glob has defined the name, the resolution depends // on whether more globs can define the name. - if !allow_private_imports && !directive.is_public && - !self.binding.map(NameBinding::is_public).unwrap_or(false) { + if !allow_private_imports && directive.vis != ty::Visibility::Public && + !self.binding.map(NameBinding::is_pseudo_public).unwrap_or(false) { return None; } @@ -195,7 +188,7 @@ impl<'a> NameResolution<'a> { }; let name = match directive.subclass { SingleImport { source, .. } => source, - GlobImport => unreachable!(), + GlobImport { .. } => unreachable!(), }; match target_module.resolve_name(name, ns, false) { Failed(_) => {} @@ -243,14 +236,15 @@ impl<'a> ::ModuleS<'a> { if let Some(result) = resolution.try_result(ns, allow_private_imports) { // If the resolution doesn't depend on glob definability, check privacy and return. return result.and_then(|binding| { - let allowed = allow_private_imports || !binding.is_import() || binding.is_public(); + let allowed = allow_private_imports || !binding.is_import() || + binding.is_pseudo_public(); if allowed { Success(binding) } else { Failed(None) } }); } // Check if the globs are determined for directive in self.globs.borrow().iter() { - if !allow_private_imports && !directive.is_public { continue } + if !allow_private_imports && directive.vis != ty::Visibility::Public { continue } match directive.target_module.get() { None => return Indeterminate, Some(target_module) => match target_module.resolve_name(name, ns, false) { @@ -285,16 +279,14 @@ impl<'a> ::ModuleS<'a> { subclass: ImportDirectiveSubclass, span: Span, id: NodeId, - is_public: bool, - is_prelude: bool) { + vis: ty::Visibility) { let directive = self.arenas.alloc_import_directive(ImportDirective { module_path: module_path, target_module: Cell::new(None), subclass: subclass, span: span, id: id, - is_public: is_public, - is_prelude: is_prelude, + vis: vis, }); self.unresolved_imports.borrow_mut().push(directive); @@ -307,8 +299,8 @@ impl<'a> ::ModuleS<'a> { } // We don't add prelude imports to the globs since they only affect lexical scopes, // which are not relevant to import resolution. - GlobImport if directive.is_prelude => {} - GlobImport => self.globs.borrow_mut().push(directive), + GlobImport { is_prelude: true } => {} + GlobImport { .. } => self.globs.borrow_mut().push(directive), } } @@ -337,7 +329,7 @@ impl<'a> ::ModuleS<'a> { } fn define_in_glob_importers(&self, name: Name, ns: Namespace, binding: &'a NameBinding<'a>) { - if !binding.defined_with(DefModifiers::PUBLIC | DefModifiers::IMPORTABLE) { return } + if !binding.is_importable() || !binding.is_pseudo_public() { return } for &(importer, directive) in self.glob_importers.borrow_mut().iter() { let _ = importer.try_define_child(name, ns, directive.import(binding, None)); } @@ -352,11 +344,11 @@ struct ImportResolvingError<'a> { help: String, } -struct ImportResolver<'a, 'b: 'a, 'tcx: 'b> { - resolver: &'a mut Resolver<'b, 'tcx>, +struct ImportResolver<'a, 'b: 'a> { + resolver: &'a mut Resolver<'b>, } -impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { +impl<'a, 'b:'a> ImportResolver<'a, 'b> { // Import resolution // // This is a fixed-point algorithm. We resolve imports until our efforts @@ -377,11 +369,17 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { i, self.resolver.unresolved_imports); - self.resolve_imports_for_module_subtree(self.resolver.graph_root, &mut errors); + // Attempt to resolve imports in all local modules. + for module in self.resolver.arenas.local_modules().iter() { + self.resolver.current_module = module; + self.resolve_imports_in_current_module(&mut errors); + } if self.resolver.unresolved_imports == 0 { debug!("(resolving imports) success"); - self.finalize_resolutions(self.resolver.graph_root, false); + for module in self.resolver.arenas.local_modules().iter() { + self.finalize_resolutions_in(module, false); + } break; } @@ -391,7 +389,9 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { // to avoid generating multiple errors on the same import. // Imports that are still indeterminate at this point are actually blocked // by errored imports, so there is no point reporting them. - self.finalize_resolutions(self.resolver.graph_root, errors.len() == 0); + for module in self.resolver.arenas.local_modules().iter() { + self.finalize_resolutions_in(module, errors.len() == 0); + } for e in errors { self.import_resolving_error(e) } @@ -410,9 +410,9 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { // resolution for it so that later resolve stages won't complain. if let SingleImport { target, .. } = e.import_directive.subclass { let dummy_binding = self.resolver.arenas.alloc_name_binding(NameBinding { - modifiers: DefModifiers::GLOB_IMPORTED, kind: NameBindingKind::Def(Def::Err), - span: None, + span: DUMMY_SP, + vis: ty::Visibility::Public, }); let dummy_binding = e.import_directive.import(dummy_binding, None); @@ -428,22 +428,6 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { ResolutionError::UnresolvedImport(Some((&path, &e.help)))); } - /// Attempts to resolve imports for the given module and all of its - /// submodules. - fn resolve_imports_for_module_subtree(&mut self, - module_: Module<'b>, - errors: &mut Vec>) { - debug!("(resolving imports for module subtree) resolving {}", - module_to_string(&module_)); - let orig_module = replace(&mut self.resolver.current_module, module_); - self.resolve_imports_in_current_module(errors); - self.resolver.current_module = orig_module; - - for (_, child_module) in module_.module_children.borrow().iter() { - self.resolve_imports_for_module_subtree(child_module, errors); - } - } - /// Attempts to resolve imports for the given module only. fn resolve_imports_in_current_module(&mut self, errors: &mut Vec>) { let mut imports = Vec::new(); @@ -499,7 +483,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { let (source, target, value_determined, type_determined) = match directive.subclass { SingleImport { source, target, ref value_determined, ref type_determined } => (source, target, value_determined, type_determined), - GlobImport => return self.resolve_glob_import(target_module, directive), + GlobImport { .. } => return self.resolve_glob_import(target_module, directive), }; // We need to resolve both namespaces for this to succeed. @@ -516,12 +500,12 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { determined.set(true); if let Success(binding) = *result { - if !binding.defined_with(DefModifiers::IMPORTABLE) { + if !binding.is_importable() { let msg = format!("`{}` is not directly importable", target); span_err!(self.resolver.session, directive.span, E0253, "{}", &msg); } - let privacy_error = if !self.resolver.is_visible(binding, target_module) { + let privacy_error = if !self.resolver.is_accessible(binding.vis) { Some(Box::new(PrivacyError(directive.span, source, binding))) } else { None @@ -545,6 +529,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { (&Failed(_), &Failed(_)) => { let resolutions = target_module.resolutions.borrow(); let names = resolutions.iter().filter_map(|(&(ref name, _), resolution)| { + if *name == source { return None; } // Never suggest the same name match *resolution.borrow() { NameResolution { binding: Some(_), .. } => Some(name), NameResolution { single_imports: SingleImports::None, .. } => None, @@ -555,18 +540,21 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { Some(name) => format!(". Did you mean to use `{}`?", name), None => "".to_owned(), }; - let msg = format!("There is no `{}` in `{}`{}", - source, - module_to_string(target_module), lev_suggestion); + let module_str = module_to_string(target_module); + let msg = if &module_str == "???" { + format!("There is no `{}` in the crate root{}", source, lev_suggestion) + } else { + format!("There is no `{}` in `{}`{}", source, module_str, lev_suggestion) + }; return Failed(Some((directive.span, msg))); } _ => (), } match (&value_result, &type_result) { - (&Success(name_binding), _) if !name_binding.is_import() && - directive.is_public && - !name_binding.is_public() => { + (&Success(binding), _) if !binding.pseudo_vis() + .is_at_least(directive.vis, self.resolver) && + self.resolver.is_accessible(binding.vis) => { let msg = format!("`{}` is private, and cannot be reexported", source); let note_msg = format!("consider marking `{}` as `pub` in the imported module", source); @@ -575,10 +563,10 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { .emit(); } - (_, &Success(name_binding)) if !name_binding.is_import() && - directive.is_public && - !name_binding.is_public() => { - if name_binding.is_extern_crate() { + (_, &Success(binding)) if !binding.pseudo_vis() + .is_at_least(directive.vis, self.resolver) && + self.resolver.is_accessible(binding.vis) => { + if binding.is_extern_crate() { let msg = format!("extern crate `{}` is private, and cannot be reexported \ (error E0364), consider declaring with `pub`", source); @@ -620,7 +608,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { None => value_result.success().and_then(NameBinding::def).unwrap(), }; let path_resolution = PathResolution { base_def: def, depth: 0 }; - self.resolver.def_map.borrow_mut().insert(directive.id, path_resolution); + self.resolver.def_map.insert(directive.id, path_resolution); debug!("(resolving single import) successfully resolved import"); return Success(()); @@ -644,7 +632,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { } self.resolver.populate_module_if_necessary(target_module); - if directive.is_prelude { + if let GlobImport { is_prelude: true } = directive.subclass { *module_.prelude.borrow_mut() = Some(target_module); return Success(()); } @@ -658,27 +646,24 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { resolution.borrow().binding().map(|binding| (*name, binding)) }).collect::>(); for ((name, ns), binding) in bindings { - if binding.defined_with(DefModifiers::IMPORTABLE | DefModifiers::PUBLIC) { + if binding.is_importable() && binding.is_pseudo_public() { let _ = module_.try_define_child(name, ns, directive.import(binding, None)); } } // Record the destination of this import if let Some(did) = target_module.def_id() { - self.resolver.def_map.borrow_mut().insert(directive.id, - PathResolution { - base_def: Def::Mod(did), - depth: 0, - }); + let resolution = PathResolution { base_def: Def::Mod(did), depth: 0 }; + self.resolver.def_map.insert(directive.id, resolution); } debug!("(resolving glob import) successfully resolved import"); return Success(()); } - // Miscellaneous post-processing, including recording reexports, recording shadowed traits, - // reporting conflicts, reporting the PRIVATE_IN_PUBLIC lint, and reporting unresolved imports. - fn finalize_resolutions(&mut self, module: Module<'b>, report_unresolved_imports: bool) { + // Miscellaneous post-processing, including recording reexports, reporting conflicts, + // reporting the PRIVATE_IN_PUBLIC lint, and reporting unresolved imports. + fn finalize_resolutions_in(&mut self, module: Module<'b>, report_unresolved_imports: bool) { // Since import resolution is finished, globs will not define any more names. *module.globs.borrow_mut() = Vec::new(); @@ -694,27 +679,28 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { None => continue, }; - if binding.is_public() && (binding.is_import() || binding.is_extern_crate()) { + if binding.vis == ty::Visibility::Public && + (binding.is_import() || binding.is_extern_crate()) { if let Some(def) = binding.def() { reexports.push(Export { name: name, def_id: def.def_id() }); } } - if let NameBindingKind::Import { binding: orig_binding, id, .. } = binding.kind { - if ns == TypeNS && binding.is_public() && - orig_binding.defined_with(DefModifiers::PRIVATE_VARIANT) { + if let NameBindingKind::Import { binding: orig_binding, directive, .. } = binding.kind { + if ns == TypeNS && orig_binding.is_variant() && + !orig_binding.vis.is_at_least(binding.vis, self.resolver) { let msg = format!("variant `{}` is private, and cannot be reexported \ (error E0364), consider declaring its enum as `pub`", name); let lint = lint::builtin::PRIVATE_IN_PUBLIC; - self.resolver.session.add_lint(lint, id, binding.span.unwrap(), msg); + self.resolver.session.add_lint(lint, directive.id, binding.span, msg); } } } if reexports.len() > 0 { if let Some(def_id) = module.def_id() { - let node_id = self.resolver.ast_map.as_local_node_id(def_id).unwrap(); + let node_id = self.resolver.definitions.as_local_node_id(def_id).unwrap(); self.resolver.export_map.insert(node_id, reexports); } } @@ -725,10 +711,6 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> { break; } } - - for (_, child) in module.module_children.borrow().iter() { - self.finalize_resolutions(child, report_unresolved_imports); - } } } @@ -746,7 +728,7 @@ fn import_path_to_string(names: &[Name], subclass: &ImportDirectiveSubclass) -> fn import_directive_subclass_to_string(subclass: &ImportDirectiveSubclass) -> String { match *subclass { SingleImport { source, .. } => source.to_string(), - GlobImport => "*".to_string(), + GlobImport { .. } => "*".to_string(), } } diff --git a/src/librustc_save_analysis/Cargo.toml b/src/librustc_save_analysis/Cargo.toml index 52fa919347..c786b4d711 100644 --- a/src/librustc_save_analysis/Cargo.toml +++ b/src/librustc_save_analysis/Cargo.toml @@ -12,3 +12,4 @@ crate-type = ["dylib"] log = { path = "../liblog" } rustc = { path = "../librustc" } syntax = { path = "../libsyntax" } +serialize = { path = "../libserialize" } diff --git a/src/librustc_save_analysis/csv_dumper.rs b/src/librustc_save_analysis/csv_dumper.rs index 0e02830db7..e7cc534c5b 100644 --- a/src/librustc_save_analysis/csv_dumper.rs +++ b/src/librustc_save_analysis/csv_dumper.rs @@ -10,26 +10,20 @@ use std::io::Write; -use rustc::hir::def_id::{DefId, DefIndex}; -use syntax::codemap::Span; - -use super::data::*; +use super::external_data::*; use super::dump::Dump; -use super::span_utils::SpanUtils; -pub struct CsvDumper<'a, 'b, W: 'b> { - output: &'b mut W, - dump_spans: bool, - span: SpanUtils<'a> +pub struct CsvDumper<'b, W: 'b> { + output: &'b mut W } -impl<'a, 'b, W: Write> CsvDumper<'a, 'b, W> { - pub fn new(writer: &'b mut W, span: SpanUtils<'a>) -> CsvDumper<'a, 'b, W> { - CsvDumper { output: writer, dump_spans: false, span: span } +impl<'b, W: Write> CsvDumper<'b, W> { + pub fn new(writer: &'b mut W) -> CsvDumper<'b, W> { + CsvDumper { output: writer } } - fn record(&mut self, kind: &str, span: Span, values: String) { - let span_str = self.span.extent_str(span); + fn record(&mut self, kind: &str, span: SpanData, values: String) { + let span_str = span_extent_str(span); if let Err(_) = write!(self.output, "{},{}{}\n", kind, span_str, values) { error!("Error writing output"); } @@ -40,36 +34,23 @@ impl<'a, 'b, W: Write> CsvDumper<'a, 'b, W> { error!("Error writing output '{}'", info); } } - - pub fn dump_span(&mut self, kind: &str, span: Span) { - assert!(self.dump_spans); - let result = format!("span,kind,{},{},text,\"{}\"\n", - kind, - self.span.extent_str(span), - escape(self.span.snippet(span))); - self.record_raw(&result); - } } -impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { - fn crate_prelude(&mut self, span: Span, data: CratePreludeData) { - let crate_root = data.crate_root.unwrap_or("".to_owned()); - +impl<'b, W: Write + 'b> Dump for CsvDumper<'b, W> { + fn crate_prelude(&mut self, data: CratePreludeData) { let values = make_values_str(&[ ("name", &data.crate_name), - ("crate_root", &crate_root) + ("crate_root", &data.crate_root) ]); - self.record("crate", span, values); + self.record("crate", data.span, values); for c in data.external_crates { let num = c.num.to_string(); - let lo_loc = self.span.sess.codemap().lookup_char_pos(span.lo); - let file_name = SpanUtils::make_path_string(&lo_loc.file.name); let values = make_values_str(&[ ("name", &c.name), ("crate", &num), - ("file_name", &file_name) + ("file_name", &c.file_name) ]); self.record_raw(&format!("external_crate{}\n", values)); @@ -78,14 +59,9 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record_raw("end_external_crates\n"); } - fn enum_data(&mut self, span: Span, data: EnumData) { - if self.dump_spans { - self.dump_span("enum", span); - return; - } - - let id = data.id.to_string(); - let scope = data.scope.to_string(); + fn enum_data(&mut self, data: EnumData) { + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("qualname", &data.qualname), @@ -96,15 +72,10 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("enum", data.span, values); } - fn extern_crate(&mut self, span: Span, data: ExternCrateData) { - if self.dump_spans { - self.dump_span("extern_crate", span); - return; - } - - let id = data.id.to_string(); + fn extern_crate(&mut self, data: ExternCrateData) { + let id = data.id.index.as_u32().to_string(); let crate_num = data.crate_num.to_string(); - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("name", &data.name), @@ -116,21 +87,16 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("extern_crate", data.span, values); } - fn impl_data(&mut self, span: Span, data: ImplData) { - if self.dump_spans { - self.dump_span("impl", span); - return; - } - + fn impl_data(&mut self, data: ImplData) { let self_ref = data.self_ref.unwrap_or(null_def_id()); let trait_ref = data.trait_ref.unwrap_or(null_def_id()); - let id = data.id.to_string(); + let id = data.id.index.as_u32().to_string(); let ref_id = self_ref.index.as_usize().to_string(); let ref_id_crate = self_ref.krate.to_string(); let trait_id = trait_ref.index.as_usize().to_string(); let trait_id_crate = trait_ref.krate.to_string(); - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("refid", &ref_id), @@ -144,14 +110,10 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { } fn inheritance(&mut self, data: InheritanceData) { - if self.dump_spans { - return; - } - let base_id = data.base_id.index.as_usize().to_string(); let base_crate = data.base_id.krate.to_string(); - let deriv_id = data.deriv_id.to_string(); - let deriv_crate = 0.to_string(); + let deriv_id = data.deriv_id.index.as_u32().to_string(); + let deriv_crate = data.deriv_id.krate.to_string(); let values = make_values_str(&[ ("base", &base_id), ("basecrate", &base_crate), @@ -162,19 +124,14 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("inheritance", data.span, values); } - fn function(&mut self, span: Span, data: FunctionData) { - if self.dump_spans { - self.dump_span("function", span); - return; - } - + fn function(&mut self, data: FunctionData) { let (decl_id, decl_crate) = match data.declaration { Some(id) => (id.index.as_usize().to_string(), id.krate.to_string()), None => (String::new(), String::new()) }; - let id = data.id.to_string(); - let scope = data.scope.to_string(); + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("qualname", &data.qualname), @@ -186,15 +143,10 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("function", data.span, values); } - fn function_ref(&mut self, span: Span, data: FunctionRefData) { - if self.dump_spans { - self.dump_span("fn_ref", span); - return; - } - + fn function_ref(&mut self, data: FunctionRefData) { let ref_id = data.ref_id.index.as_usize().to_string(); let ref_crate = data.ref_id.krate.to_string(); - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("refid", &ref_id), ("refidcrate", &ref_crate), @@ -205,16 +157,11 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("fn_ref", data.span, values); } - fn function_call(&mut self, span: Span, data: FunctionCallData) { - if self.dump_spans { - self.dump_span("fn_call", span); - return; - } - + fn function_call(&mut self, data: FunctionCallData) { let ref_id = data.ref_id.index.as_usize().to_string(); let ref_crate = data.ref_id.krate.to_string(); let qualname = String::new(); - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("refid", &ref_id), ("refidcrate", &ref_crate), @@ -225,29 +172,19 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("fn_call", data.span, values); } - fn method(&mut self, span: Span, data: MethodData) { - if self.dump_spans { - self.dump_span("method_decl", span); - return; - } - - let id = data.id.to_string(); - let scope = data.scope.to_string(); + fn method(&mut self, data: MethodData) { + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("qualname", &data.qualname), ("scopeid", &scope) ]); - self.record("method_decl", span, values); + self.record("method_decl", data.span, values); } - fn method_call(&mut self, span: Span, data: MethodCallData) { - if self.dump_spans { - self.dump_span("method_call", span); - return; - } - + fn method_call(&mut self, data: MethodCallData) { let (dcn, dck) = match data.decl_id { Some(declid) => (declid.index.as_usize().to_string(), declid.krate.to_string()), None => (String::new(), String::new()), @@ -257,7 +194,7 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { let def_id = ref_id.index.as_usize().to_string(); let def_crate = ref_id.krate.to_string(); - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("refid", &def_id), ("refidcrate", &def_crate), @@ -269,12 +206,7 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("method_call", data.span, values); } - fn macro_data(&mut self, span: Span, data: MacroData) { - if self.dump_spans { - self.dump_span("macro", span); - return; - } - + fn macro_data(&mut self, data: MacroData) { let values = make_values_str(&[ ("name", &data.name), ("qualname", &data.qualname) @@ -283,13 +215,8 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("macro", data.span, values); } - fn macro_use(&mut self, span: Span, data: MacroUseData) { - if self.dump_spans { - self.dump_span("macro_use", span); - return; - } - - let scope = data.scope.to_string(); + fn macro_use(&mut self, data: MacroUseData) { + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("callee_name", &data.name), ("qualname", &data.qualname), @@ -300,12 +227,8 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { } fn mod_data(&mut self, data: ModData) { - if self.dump_spans { - return; - } - - let id = data.id.to_string(); - let scope = data.scope.to_string(); + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("qualname", &data.qualname), @@ -316,18 +239,13 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("module", data.span, values); } - fn mod_ref(&mut self, span: Span, data: ModRefData) { - if self.dump_spans { - self.dump_span("mod_ref", span); - return; - } - + fn mod_ref(&mut self, data: ModRefData) { let (ref_id, ref_crate) = match data.ref_id { Some(rid) => (rid.index.as_usize().to_string(), rid.krate.to_string()), None => (0.to_string(), 0.to_string()) }; - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("refid", &ref_id), ("refidcrate", &ref_crate), @@ -338,15 +256,10 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("mod_ref", data.span, values); } - fn struct_data(&mut self, span: Span, data: StructData) { - if self.dump_spans { - self.dump_span("struct", span); - return; - } - - let id = data.id.to_string(); - let ctor_id = data.ctor_id.to_string(); - let scope = data.scope.to_string(); + fn struct_data(&mut self, data: StructData) { + let id = data.id.index.as_u32().to_string(); + let ctor_id = data.ctor_id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("ctor_id", &ctor_id), @@ -358,14 +271,9 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("struct", data.span, values); } - fn struct_variant(&mut self, span: Span, data: StructVariantData) { - if self.dump_spans { - self.dump_span("variant_struct", span); - return; - } - - let id = data.id.to_string(); - let scope = data.scope.to_string(); + fn struct_variant(&mut self, data: StructVariantData) { + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("ctor_id", &id), @@ -378,14 +286,9 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("variant_struct", data.span, values); } - fn trait_data(&mut self, span: Span, data: TraitData) { - if self.dump_spans { - self.dump_span("trait", span); - return; - } - - let id = data.id.to_string(); - let scope = data.scope.to_string(); + fn trait_data(&mut self, data: TraitData) { + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("qualname", &data.qualname), @@ -396,14 +299,9 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("trait", data.span, values); } - fn tuple_variant(&mut self, span: Span, data: TupleVariantData) { - if self.dump_spans { - self.dump_span("variant", span); - return; - } - - let id = data.id.to_string(); - let scope = data.scope.to_string(); + fn tuple_variant(&mut self, data: TupleVariantData) { + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("name", &data.name), @@ -416,18 +314,13 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("variant", data.span, values); } - fn type_ref(&mut self, span: Span, data: TypeRefData) { - if self.dump_spans { - self.dump_span("type_ref", span); - return; - } - + fn type_ref(&mut self, data: TypeRefData) { let (ref_id, ref_crate) = match data.ref_id { Some(id) => (id.index.as_usize().to_string(), id.krate.to_string()), None => (0.to_string(), 0.to_string()) }; - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("refid", &ref_id), ("refidcrate", &ref_crate), @@ -438,13 +331,8 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("type_ref", data.span, values); } - fn typedef(&mut self, span: Span, data: TypedefData) { - if self.dump_spans { - self.dump_span("typedef", span); - return; - } - - let id = data.id.to_string(); + fn typedef(&mut self, data: TypeDefData) { + let id = data.id.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("qualname", &data.qualname), @@ -454,18 +342,13 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("typedef", data.span, values); } - fn use_data(&mut self, span: Span, data: UseData) { - if self.dump_spans { - self.dump_span("use_alias", span); - return; - } - + fn use_data(&mut self, data: UseData) { let mod_id = data.mod_id.unwrap_or(null_def_id()); - let id = data.id.to_string(); + let id = data.id.index.as_u32().to_string(); let ref_id = mod_id.index.as_usize().to_string(); let ref_crate = mod_id.krate.to_string(); - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("refid", &ref_id), @@ -477,16 +360,11 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("use_alias", data.span, values); } - fn use_glob(&mut self, span: Span, data: UseGlobData) { - if self.dump_spans { - self.dump_span("use_glob", span); - return; - } - + fn use_glob(&mut self, data: UseGlobData) { let names = data.names.join(", "); - let id = data.id.to_string(); - let scope = data.scope.to_string(); + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("value", &names), @@ -496,14 +374,9 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("use_glob", data.span, values); } - fn variable(&mut self, span: Span, data: VariableData) { - if self.dump_spans { - self.dump_span("variable", span); - return; - } - - let id = data.id.to_string(); - let scope = data.scope.to_string(); + fn variable(&mut self, data: VariableData) { + let id = data.id.index.as_u32().to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("id", &id), ("name", &data.name), @@ -516,15 +389,10 @@ impl<'a, 'b, W: Write + 'b> Dump for CsvDumper<'a, 'b, W> { self.record("variable", data.span, values); } - fn variable_ref(&mut self, span: Span, data: VariableRefData) { - if self.dump_spans { - self.dump_span("var_ref", span); - return; - } - + fn variable_ref(&mut self, data: VariableRefData) { let ref_id = data.ref_id.index.as_usize().to_string(); let ref_crate = data.ref_id.krate.to_string(); - let scope = data.scope.to_string(); + let scope = data.scope.index.as_u32().to_string(); let values = make_values_str(&[ ("refid", &ref_id), ("refidcrate", &ref_crate), @@ -558,9 +426,9 @@ fn make_values_str(pairs: &[(&'static str, &str)]) -> String { }) } -fn null_def_id() -> DefId { - DefId { - krate: 0, - index: DefIndex::new(0), - } +fn span_extent_str(span: SpanData) -> String { + format!("file_name,\"{}\",file_line,{},file_col,{},byte_start,{}\ + file_line_end,{},file_col_end,{},byte_end,{}", + span.file_name, span.line_start, span.column_start, span.byte_start, + span.line_end, span.column_end, span.byte_end) } diff --git a/src/librustc_save_analysis/data.rs b/src/librustc_save_analysis/data.rs index 7f2f2618c3..2275a1c5c6 100644 --- a/src/librustc_save_analysis/data.rs +++ b/src/librustc_save_analysis/data.rs @@ -13,21 +13,19 @@ //! The `Dump` trait can be used together with `DumpVisitor` in order to //! retrieve the data from a crate. -use std::hash::Hasher; - use rustc::hir::def_id::DefId; -use rustc::ty; use syntax::ast::{CrateNum, NodeId}; use syntax::codemap::Span; pub struct CrateData { pub name: String, pub number: u32, + pub span: Span, } /// Data for any entity in the Rust language. The actual data contained varies /// with the kind of entity being queried. See the nested structs for details. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub enum Data { /// Data for Enums. EnumData(EnumData), @@ -64,7 +62,7 @@ pub enum Data { /// Data for a tuple variant. TupleVariantData(TupleVariantData), /// Data for a typedef. - TypeDefData(TypedefData), + TypeDefData(TypeDefData), /// Data for a reference to a type or trait. TypeRefData(TypeRefData), /// Data for a use statement. @@ -79,24 +77,27 @@ pub enum Data { } /// Data for the prelude of a crate. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct CratePreludeData { pub crate_name: String, - pub crate_root: Option, - pub external_crates: Vec + pub crate_root: String, + pub external_crates: Vec, + pub span: Span, } /// Data for external crates in the prelude of a crate. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct ExternalCrateData { pub name: String, - pub num: CrateNum + pub num: CrateNum, + pub file_name: String, } /// Data for enum declarations. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, RustcEncodable)] pub struct EnumData { pub id: NodeId, + pub name: String, pub value: String, pub qualname: String, pub span: Span, @@ -104,7 +105,7 @@ pub struct EnumData { } /// Data for extern crates. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct ExternCrateData { pub id: NodeId, pub name: String, @@ -115,7 +116,7 @@ pub struct ExternCrateData { } /// Data about a function call. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct FunctionCallData { pub span: Span, pub scope: NodeId, @@ -123,7 +124,7 @@ pub struct FunctionCallData { } /// Data for all kinds of functions and methods. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, RustcEncodable)] pub struct FunctionData { pub id: NodeId, pub name: String, @@ -131,17 +132,18 @@ pub struct FunctionData { pub declaration: Option, pub span: Span, pub scope: NodeId, + pub value: String, } /// Data about a function call. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct FunctionRefData { pub span: Span, pub scope: NodeId, pub ref_id: DefId, } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct ImplData { pub id: NodeId, pub span: Span, @@ -150,7 +152,7 @@ pub struct ImplData { pub self_ref: Option, } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] // FIXME: this struct should not exist. However, removing it requires heavy // refactoring of dump_visitor.rs. See PR 31838 for more info. pub struct ImplData2 { @@ -164,7 +166,7 @@ pub struct ImplData2 { pub self_ref: Option, } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct InheritanceData { pub span: Span, pub base_id: DefId, @@ -172,7 +174,7 @@ pub struct InheritanceData { } /// Data about a macro declaration. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct MacroData { pub span: Span, pub name: String, @@ -180,7 +182,7 @@ pub struct MacroData { } /// Data about a macro use. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct MacroUseData { pub span: Span, pub name: String, @@ -193,7 +195,7 @@ pub struct MacroUseData { } /// Data about a method call. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct MethodCallData { pub span: Span, pub scope: NodeId, @@ -202,16 +204,18 @@ pub struct MethodCallData { } /// Data for method declarations (methods with a body are treated as functions). -#[derive(Clone, Debug)] +#[derive(Clone, Debug, RustcEncodable)] pub struct MethodData { pub id: NodeId, + pub name: String, pub qualname: String, pub span: Span, pub scope: NodeId, + pub value: String, } /// Data for modules. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct ModData { pub id: NodeId, pub name: String, @@ -222,7 +226,7 @@ pub struct ModData { } /// Data for a reference to a module. -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct ModRefData { pub span: Span, pub scope: NodeId, @@ -230,9 +234,10 @@ pub struct ModRefData { pub qualname: String } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct StructData { pub span: Span, + pub name: String, pub id: NodeId, pub ctor_id: NodeId, pub qualname: String, @@ -240,9 +245,10 @@ pub struct StructData { pub value: String } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct StructVariantData { pub span: Span, + pub name: String, pub id: NodeId, pub qualname: String, pub type_value: String, @@ -250,16 +256,17 @@ pub struct StructVariantData { pub scope: NodeId } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct TraitData { pub span: Span, pub id: NodeId, + pub name: String, pub qualname: String, pub scope: NodeId, pub value: String } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct TupleVariantData { pub span: Span, pub id: NodeId, @@ -271,16 +278,17 @@ pub struct TupleVariantData { } /// Data for a typedef. -#[derive(Debug)] -pub struct TypedefData { +#[derive(Debug, RustcEncodable)] +pub struct TypeDefData { pub id: NodeId, + pub name: String, pub span: Span, pub qualname: String, pub value: String, } /// Data for a reference to a type or trait. -#[derive(Clone, Debug)] +#[derive(Clone, Debug, RustcEncodable)] pub struct TypeRefData { pub span: Span, pub scope: NodeId, @@ -288,7 +296,7 @@ pub struct TypeRefData { pub qualname: String, } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct UseData { pub id: NodeId, pub span: Span, @@ -297,7 +305,7 @@ pub struct UseData { pub scope: NodeId } -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct UseGlobData { pub id: NodeId, pub span: Span, @@ -306,7 +314,7 @@ pub struct UseGlobData { } /// Data for local and global variables (consts and statics). -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct VariableData { pub id: NodeId, pub name: String, @@ -319,65 +327,10 @@ pub struct VariableData { /// Data for the use of some item (e.g., the use of a local variable, which /// will refer to that variables declaration (by ref_id)). -#[derive(Debug)] +#[derive(Debug, RustcEncodable)] pub struct VariableRefData { pub name: String, pub span: Span, pub scope: NodeId, pub ref_id: DefId, } - -// Emitted ids are used to cross-reference items across crates. DefIds and -// NodeIds do not usually correspond in any way. The strategy is to use the -// index from the DefId as a crate-local id. However, within a crate, DefId -// indices and NodeIds can overlap. So, we must adjust the NodeIds. If an -// item can be identified by a DefId as well as a NodeId, then we use the -// DefId index as the id. If it can't, then we have to use the NodeId, but -// need to adjust it so it will not clash with any possible DefId index. -pub fn normalize_node_id<'a>(tcx: &ty::TyCtxt<'a>, id: NodeId) -> usize { - match tcx.map.opt_local_def_id(id) { - Some(id) => id.index.as_usize(), - None => id as usize + tcx.map.num_local_def_ids() - } -} - -// Macro to implement a normalize() function (see below for usage) -macro_rules! impl_normalize { - ($($t:ty => $($field:ident),*);*) => { - $( - impl $t { - pub fn normalize<'a>(mut self, tcx: &ty::TyCtxt<'a>) -> $t { - $( - self.$field = normalize_node_id(tcx, self.$field) as u32; - )* - self - } - } - )* - } -} - -impl_normalize! { - EnumData => id, scope; - ExternCrateData => id, scope; - FunctionCallData => scope; - FunctionData => id, scope; - FunctionRefData => scope; - ImplData => id, scope; - InheritanceData => deriv_id; - MacroUseData => scope; - MethodCallData => scope; - MethodData => id, scope; - ModData => id, scope; - ModRefData => scope; - StructData => ctor_id, id, scope; - StructVariantData => id, scope; - TupleVariantData => id, scope; - TraitData => id, scope; - TypedefData => id; - TypeRefData => scope; - UseData => id, scope; - UseGlobData => id, scope; - VariableData => id; - VariableRefData => scope -} diff --git a/src/librustc_save_analysis/dump.rs b/src/librustc_save_analysis/dump.rs index b0cc7926f4..18241b394c 100644 --- a/src/librustc_save_analysis/dump.rs +++ b/src/librustc_save_analysis/dump.rs @@ -8,33 +8,31 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use syntax::codemap::Span; - -use super::data::*; +use super::external_data::*; pub trait Dump { - fn crate_prelude(&mut self, _: Span, _: CratePreludeData) {} - fn enum_data(&mut self, _: Span, _: EnumData) {} - fn extern_crate(&mut self, _: Span, _: ExternCrateData) {} - fn impl_data(&mut self, _: Span, _: ImplData) {} - fn inheritance(&mut self, _: InheritanceData) {} - fn function(&mut self, _: Span, _: FunctionData) {} - fn function_ref(&mut self, _: Span, _: FunctionRefData) {} - fn function_call(&mut self, _: Span, _: FunctionCallData) {} - fn method(&mut self, _: Span, _: MethodData) {} - fn method_call(&mut self, _: Span, _: MethodCallData) {} - fn macro_data(&mut self, _: Span, _: MacroData) {} - fn macro_use(&mut self, _: Span, _: MacroUseData) {} - fn mod_data(&mut self, _: ModData) {} - fn mod_ref(&mut self, _: Span, _: ModRefData) {} - fn struct_data(&mut self, _: Span, _: StructData) {} - fn struct_variant(&mut self, _: Span, _: StructVariantData) {} - fn trait_data(&mut self, _: Span, _: TraitData) {} - fn tuple_variant(&mut self, _: Span, _: TupleVariantData) {} - fn type_ref(&mut self, _: Span, _: TypeRefData) {} - fn typedef(&mut self, _: Span, _: TypedefData) {} - fn use_data(&mut self, _: Span, _: UseData) {} - fn use_glob(&mut self, _: Span, _: UseGlobData) {} - fn variable(&mut self, _: Span, _: VariableData) {} - fn variable_ref(&mut self, _: Span, _: VariableRefData) {} + fn crate_prelude(&mut self, CratePreludeData) {} + fn enum_data(&mut self, EnumData) {} + fn extern_crate(&mut self, ExternCrateData) {} + fn impl_data(&mut self, ImplData) {} + fn inheritance(&mut self, InheritanceData) {} + fn function(&mut self, FunctionData) {} + fn function_ref(&mut self, FunctionRefData) {} + fn function_call(&mut self, FunctionCallData) {} + fn method(&mut self, MethodData) {} + fn method_call(&mut self, MethodCallData) {} + fn macro_data(&mut self, MacroData) {} + fn macro_use(&mut self, MacroUseData) {} + fn mod_data(&mut self, ModData) {} + fn mod_ref(&mut self, ModRefData) {} + fn struct_data(&mut self, StructData) {} + fn struct_variant(&mut self, StructVariantData) {} + fn trait_data(&mut self, TraitData) {} + fn tuple_variant(&mut self, TupleVariantData) {} + fn type_ref(&mut self, TypeRefData) {} + fn typedef(&mut self, TypeDefData) {} + fn use_data(&mut self, UseData) {} + fn use_glob(&mut self, UseGlobData) {} + fn variable(&mut self, VariableData) {} + fn variable_ref(&mut self, VariableRefData) {} } diff --git a/src/librustc_save_analysis/dump_visitor.rs b/src/librustc_save_analysis/dump_visitor.rs index 3784c95fe2..4d79ddfe8c 100644 --- a/src/librustc_save_analysis/dump_visitor.rs +++ b/src/librustc_save_analysis/dump_visitor.rs @@ -39,14 +39,13 @@ use syntax::ast::{self, NodeId, PatKind}; use syntax::codemap::*; use syntax::parse::token::{self, keywords}; use syntax::visit::{self, Visitor}; -use syntax::print::pprust::{path_to_string, ty_to_string}; +use syntax::print::pprust::{path_to_string, ty_to_string, bounds_to_string, generics_to_string}; use syntax::ptr::P; -use rustc::hir::lowering::{lower_expr, LoweringContext}; - use super::{escape, generated_code, SaveContext, PathCollector}; use super::data::*; use super::dump::Dump; +use super::external_data::Lower; use super::span_utils::SpanUtils; use super::recorder; @@ -60,12 +59,12 @@ macro_rules! down_cast_data { }; } -pub struct DumpVisitor<'l, 'tcx: 'l, D: 'l> { +pub struct DumpVisitor<'l, 'tcx: 'l, 'll, D: 'll> { save_ctxt: SaveContext<'l, 'tcx>, sess: &'l Session, - tcx: &'l TyCtxt<'tcx>, + tcx: TyCtxt<'l, 'tcx, 'tcx>, analysis: &'l ty::CrateAnalysis<'l>, - dumper: &'l mut D, + dumper: &'ll mut D, span: SpanUtils<'l>, @@ -77,22 +76,19 @@ pub struct DumpVisitor<'l, 'tcx: 'l, D: 'l> { // one macro use per unique callsite span. mac_defs: HashSet, mac_uses: HashSet, - } -impl <'l, 'tcx, D> DumpVisitor<'l, 'tcx, D> -where D: Dump -{ - pub fn new(tcx: &'l TyCtxt<'tcx>, - lcx: &'l LoweringContext<'l>, +impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> { + pub fn new(tcx: TyCtxt<'l, 'tcx, 'tcx>, + save_ctxt: SaveContext<'l, 'tcx>, analysis: &'l ty::CrateAnalysis<'l>, - dumper: &'l mut D) - -> DumpVisitor<'l, 'tcx, D> { + dumper: &'ll mut D) + -> DumpVisitor<'l, 'tcx, 'll, D> { let span_utils = SpanUtils::new(&tcx.sess); DumpVisitor { sess: &tcx.sess, tcx: tcx, - save_ctxt: SaveContext::from_span_utils(tcx, lcx, span_utils.clone()), + save_ctxt: save_ctxt, analysis: analysis, dumper: dumper, span: span_utils.clone(), @@ -103,7 +99,7 @@ where D: Dump } fn nest(&mut self, scope_id: NodeId, f: F) - where F: FnOnce(&mut DumpVisitor<'l, 'tcx, D>) + where F: FnOnce(&mut DumpVisitor<'l, 'tcx, 'll, D>) { let parent_scope = self.cur_scope; self.cur_scope = scope_id; @@ -122,20 +118,23 @@ where D: Dump // Info about all the external crates referenced from this crate. let external_crates = self.save_ctxt.get_external_crates().into_iter().map(|c| { + let lo_loc = self.span.sess.codemap().lookup_char_pos(c.span.lo); ExternalCrateData { name: c.name, - num: c.number + num: c.number, + file_name: SpanUtils::make_path_string(&lo_loc.file.name), } }).collect(); // The current crate. let data = CratePreludeData { crate_name: name.into(), - crate_root: crate_root, - external_crates: external_crates + crate_root: crate_root.unwrap_or("".to_owned()), + external_crates: external_crates, + span: krate.span, }; - self.dumper.crate_prelude(krate.span, data); + self.dumper.crate_prelude(data.lower(self.tcx)); } // Return all non-empty prefixes of a path. @@ -200,12 +199,12 @@ where D: Dump } else { qualname.clone() }; - self.dumper.mod_ref(path.span, ModRefData { + self.dumper.mod_ref(ModRefData { span: *span, qualname: qualname, scope: self.cur_scope, ref_id: None - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } @@ -225,12 +224,12 @@ where D: Dump } else { qualname.clone() }; - self.dumper.mod_ref(path.span, ModRefData { + self.dumper.mod_ref(ModRefData { span: *span, qualname: qualname, scope: self.cur_scope, ref_id: None - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } @@ -246,12 +245,12 @@ where D: Dump // write the trait part of the sub-path let (ref span, ref qualname) = sub_paths[len-2]; - self.dumper.type_ref(path.span, TypeRefData { + self.dumper.type_ref(TypeRefData { ref_id: None, span: *span, qualname: qualname.to_owned(), scope: 0 - }); + }.lower(self.tcx)); // write the other sub-paths if len <= 2 { @@ -259,12 +258,12 @@ where D: Dump } let sub_paths = &sub_paths[..len-2]; for &(ref span, ref qualname) in sub_paths { - self.dumper.mod_ref(path.span, ModRefData { + self.dumper.mod_ref(ModRefData { span: *span, qualname: qualname.to_owned(), scope: self.cur_scope, ref_id: None - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } @@ -301,24 +300,24 @@ where D: Dump match def { Def::Mod(_) | Def::ForeignMod(_) => { - self.dumper.mod_ref(span, ModRefData { + self.dumper.mod_ref(ModRefData { span: sub_span.expect("No span found for mod ref"), ref_id: Some(def_id), scope: scope, qualname: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } Def::Struct(..) | Def::Enum(..) | Def::TyAlias(..) | Def::AssociatedTy(..) | Def::Trait(_) => { - self.dumper.type_ref(span, TypeRefData { + self.dumper.type_ref(TypeRefData { span: sub_span.expect("No span found for type ref"), ref_id: Some(def_id), scope: scope, qualname: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } Def::Static(_, _) | Def::Const(_) | @@ -326,19 +325,19 @@ where D: Dump Def::Local(..) | Def::Variant(..) | Def::Upvar(..) => { - self.dumper.variable_ref(span, VariableRefData { + self.dumper.variable_ref(VariableRefData { span: sub_span.expect("No span found for var ref"), ref_id: def_id, scope: scope, name: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } Def::Fn(..) => { - self.dumper.function_ref(span, FunctionRefData { + self.dumper.function_ref(FunctionRefData { span: sub_span.expect("No span found for fn ref"), ref_id: def_id, scope: scope - }.normalize(&self.tcx)); + }.lower(self.tcx)); } Def::SelfTy(..) | Def::Label(_) | @@ -365,7 +364,7 @@ where D: Dump // variable name, but who knows?) let sub_span = span_utils.span_for_last_ident(p.span); if !self.span.filter_generated(sub_span, p.span) { - self.dumper.variable(p.span, VariableData { + self.dumper.variable(VariableData { id: id, span: sub_span.expect("No span found for variable"), name: path_to_string(p), @@ -373,7 +372,7 @@ where D: Dump type_value: typ, value: String::new(), scope: 0 - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } } @@ -389,19 +388,24 @@ where D: Dump if let Some(method_data) = self.save_ctxt.get_method_data(id, name, span) { + let sig_str = ::make_signature(&sig.decl, &sig.generics); if body.is_some() { if !self.span.filter_generated(Some(method_data.span), span) { - self.dumper.function(span, method_data.clone().normalize(&self.tcx)); + let mut data = method_data.clone(); + data.value = sig_str; + self.dumper.function(data.lower(self.tcx)); } self.process_formals(&sig.decl.inputs, &method_data.qualname); } else { if !self.span.filter_generated(Some(method_data.span), span) { - self.dumper.method(span, MethodData { + self.dumper.method(MethodData { id: method_data.id, + name: method_data.name, span: method_data.span, scope: method_data.scope, qualname: method_data.qualname.clone(), - }.normalize(&self.tcx)); + value: sig_str, + }.lower(self.tcx)); } } self.process_generic_params(&sig.generics, span, &method_data.qualname, id); @@ -426,7 +430,7 @@ where D: Dump let trait_ref_data = self.save_ctxt.get_trait_ref_data(trait_ref, self.cur_scope); if let Some(trait_ref_data) = trait_ref_data { if !self.span.filter_generated(Some(trait_ref_data.span), trait_ref.path.span) { - self.dumper.type_ref(trait_ref.path.span, trait_ref_data.normalize(&self.tcx)); + self.dumper.type_ref(trait_ref_data.lower(self.tcx)); } visit::walk_path(self, &trait_ref.path); @@ -437,9 +441,8 @@ where D: Dump let field_data = self.save_ctxt.get_field_data(field, parent_id); if let Some(mut field_data) = field_data { if !self.span.filter_generated(Some(field_data.span), field.span) { - field_data.scope = normalize_node_id(&self.tcx, field_data.scope) as u32; field_data.value = String::new(); - self.dumper.variable(field.span, field_data.normalize(&self.tcx)); + self.dumper.variable(field_data.lower(self.tcx)); } } } @@ -457,18 +460,20 @@ where D: Dump let param_sub_spans = self.span.spans_for_ty_params(full_span, (generics.ty_params.len() as isize)); for (param, param_ss) in generics.ty_params.iter().zip(param_sub_spans) { + let name = escape(self.span.snippet(param_ss)); // Append $id to name to make sure each one is unique - let name = format!("{}::{}${}", - prefix, - escape(self.span.snippet(param_ss)), - id); + let qualname = format!("{}::{}${}", + prefix, + name, + id); if !self.span.filter_generated(Some(param_ss), full_span) { - self.dumper.typedef(full_span, TypedefData { + self.dumper.typedef(TypeDefData { span: param_ss, + name: name, id: param.id, - qualname: name, + qualname: qualname, value: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } self.visit_generics(generics); @@ -482,7 +487,7 @@ where D: Dump if let Some(fn_data) = self.save_ctxt.get_item_data(item) { down_cast_data!(fn_data, FunctionData, item.span); if !self.span.filter_generated(Some(fn_data.span), item.span) { - self.dumper.function(item.span, fn_data.clone().normalize(&self.tcx)); + self.dumper.function(fn_data.clone().lower(self.tcx)); } self.process_formals(&decl.inputs, &fn_data.qualname); @@ -504,9 +509,7 @@ where D: Dump if let Some(var_data) = self.save_ctxt.get_item_data(item) { down_cast_data!(var_data, VariableData, item.span); if !self.span.filter_generated(Some(var_data.span), item.span) { - let mut var_data = var_data; - var_data.scope = normalize_node_id(&self.tcx, var_data.scope) as u32; - self.dumper.variable(item.span, var_data.normalize(&self.tcx)); + self.dumper.variable(var_data.lower(self.tcx)); } } self.visit_ty(&typ); @@ -524,15 +527,15 @@ where D: Dump let sub_span = self.span.sub_span_after_keyword(span, keywords::Const); if !self.span.filter_generated(sub_span, span) { - self.dumper.variable(span, VariableData { + self.dumper.variable(VariableData { span: sub_span.expect("No span found for variable"), id: id, name: name.to_string(), qualname: qualname, value: self.span.snippet(expr.span), type_value: ty_to_string(&typ), - scope: normalize_node_id(&self.tcx, self.cur_scope) as u32 - }.normalize(&self.tcx)); + scope: self.cur_scope + }.lower(self.tcx)); } // walk type and init value @@ -540,23 +543,38 @@ where D: Dump self.visit_expr(expr); } + // FIXME tuple structs should generate tuple-specific data. fn process_struct(&mut self, item: &ast::Item, def: &ast::VariantData, ty_params: &ast::Generics) { + let name = item.ident.to_string(); let qualname = format!("::{}", self.tcx.node_path_str(item.id)); - let val = self.span.snippet(item.span); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Struct); + let val = if let ast::ItemKind::Struct(ast::VariantData::Struct(ref fields, _), _) = + item.node { + let fields_str = fields.iter() + .enumerate() + .map(|(i, f)| f.ident.map(|i| i.to_string()) + .unwrap_or(i.to_string())) + .collect::>() + .join(", "); + format!("{} {{ {} }}", name, fields_str) + } else { + String::new() + }; + if !self.span.filter_generated(sub_span, item.span) { - self.dumper.struct_data(item.span, StructData { + self.dumper.struct_data(StructData { span: sub_span.expect("No span found for struct"), id: item.id, + name: name, ctor_id: def.id(), qualname: qualname.clone(), scope: self.cur_scope, value: val - }.normalize(&self.tcx)); + }.lower(self.tcx)); } @@ -579,44 +597,59 @@ where D: Dump Some(data) => data, }; down_cast_data!(enum_data, EnumData, item.span); - let normalized = enum_data.clone().normalize(&self.tcx); - if !self.span.filter_generated(Some(normalized.span), item.span) { - self.dumper.enum_data(item.span, normalized); + if !self.span.filter_generated(Some(enum_data.span), item.span) { + self.dumper.enum_data(enum_data.clone().lower(self.tcx)); } for variant in &enum_definition.variants { - let name = &variant.node.name.name.as_str(); + let name = variant.node.name.name.to_string(); let mut qualname = enum_data.qualname.clone(); qualname.push_str("::"); - qualname.push_str(name); - let val = self.span.snippet(variant.span); + qualname.push_str(&name); match variant.node.data { - ast::VariantData::Struct(..) => { + ast::VariantData::Struct(ref fields, _) => { let sub_span = self.span.span_for_first_ident(variant.span); + let fields_str = fields.iter() + .enumerate() + .map(|(i, f)| f.ident.map(|i| i.to_string()) + .unwrap_or(i.to_string())) + .collect::>() + .join(", "); + let val = format!("{}::{} {{ {} }}", enum_data.name, name, fields_str); if !self.span.filter_generated(sub_span, variant.span) { - self.dumper.struct_variant(variant.span, StructVariantData { + self.dumper.struct_variant(StructVariantData { span: sub_span.expect("No span found for struct variant"), id: variant.node.data.id(), + name: name, qualname: qualname, type_value: enum_data.qualname.clone(), value: val, scope: enum_data.scope - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } - _ => { + ref v => { let sub_span = self.span.span_for_first_ident(variant.span); + let mut val = format!("{}::{}", enum_data.name, name); + if let &ast::VariantData::Tuple(ref fields, _) = v { + val.push('('); + val.push_str(&fields.iter() + .map(|f| ty_to_string(&f.ty)) + .collect::>() + .join(", ")); + val.push(')'); + } if !self.span.filter_generated(sub_span, variant.span) { - self.dumper.tuple_variant(variant.span, TupleVariantData { + self.dumper.tuple_variant(TupleVariantData { span: sub_span.expect("No span found for tuple variant"), id: variant.node.data.id(), - name: name.to_string(), + name: name, qualname: qualname, type_value: enum_data.qualname.clone(), value: val, scope: enum_data.scope - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } } @@ -642,25 +675,25 @@ where D: Dump if let Some(ref self_ref) = impl_data.self_ref { has_self_ref = true; if !self.span.filter_generated(Some(self_ref.span), item.span) { - self.dumper.type_ref(item.span, self_ref.clone().normalize(&self.tcx)); + self.dumper.type_ref(self_ref.clone().lower(self.tcx)); } } if let Some(ref trait_ref_data) = impl_data.trait_ref { if !self.span.filter_generated(Some(trait_ref_data.span), item.span) { - self.dumper.type_ref(item.span, trait_ref_data.clone().normalize(&self.tcx)); + self.dumper.type_ref(trait_ref_data.clone().lower(self.tcx)); } visit::walk_path(self, &trait_ref.as_ref().unwrap().path); } if !self.span.filter_generated(Some(impl_data.span), item.span) { - self.dumper.impl_data(item.span, ImplData { + self.dumper.impl_data(ImplData { id: impl_data.id, span: impl_data.span, scope: impl_data.scope, trait_ref: impl_data.trait_ref.map(|d| d.ref_id.unwrap()), self_ref: impl_data.self_ref.map(|d| d.ref_id.unwrap()) - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } if !has_self_ref { @@ -677,17 +710,26 @@ where D: Dump generics: &ast::Generics, trait_refs: &ast::TyParamBounds, methods: &[ast::TraitItem]) { + let name = item.ident.to_string(); let qualname = format!("::{}", self.tcx.node_path_str(item.id)); - let val = self.span.snippet(item.span); + let mut val = name.clone(); + if !generics.lifetimes.is_empty() || !generics.ty_params.is_empty() { + val.push_str(&generics_to_string(generics)); + } + if !trait_refs.is_empty() { + val.push_str(": "); + val.push_str(&bounds_to_string(trait_refs)); + } let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Trait); if !self.span.filter_generated(sub_span, item.span) { - self.dumper.trait_data(item.span, TraitData { + self.dumper.trait_data(TraitData { span: sub_span.expect("No span found for trait"), id: item.id, + name: name, qualname: qualname.clone(), scope: self.cur_scope, value: val - }.normalize(&self.tcx)); + }.lower(self.tcx)); } // super-traits @@ -705,12 +747,12 @@ where D: Dump if let Some(id) = self.lookup_type_ref(trait_ref.ref_id) { let sub_span = self.span.sub_span_for_type_name(trait_ref.path.span); if !self.span.filter_generated(sub_span, trait_ref.path.span) { - self.dumper.type_ref(trait_ref.path.span, TypeRefData { + self.dumper.type_ref(TypeRefData { span: sub_span.expect("No span found for trait ref"), ref_id: Some(id), scope: self.cur_scope, qualname: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } if !self.span.filter_generated(sub_span, trait_ref.path.span) { @@ -719,7 +761,7 @@ where D: Dump span: sub_span, base_id: id, deriv_id: item.id - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } } @@ -736,7 +778,7 @@ where D: Dump if let Some(mod_data) = self.save_ctxt.get_item_data(item) { down_cast_data!(mod_data, ModData, item.span); if !self.span.filter_generated(Some(mod_data.span), item.span) { - self.dumper.mod_data(mod_data.normalize(&self.tcx)); + self.dumper.mod_data(mod_data.lower(self.tcx)); } } } @@ -762,47 +804,47 @@ where D: Dump if !self.span.filter_generated(Some(vrd.span), path.span) { match ref_kind { Some(recorder::TypeRef) => { - self.dumper.type_ref(path.span, TypeRefData { + self.dumper.type_ref(TypeRefData { span: vrd.span, ref_id: Some(vrd.ref_id), scope: vrd.scope, qualname: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } Some(recorder::FnRef) => { - self.dumper.function_ref(path.span, FunctionRefData { + self.dumper.function_ref(FunctionRefData { span: vrd.span, ref_id: vrd.ref_id, scope: vrd.scope - }.normalize(&self.tcx)); + }.lower(self.tcx)); } Some(recorder::ModRef) => { - self.dumper.mod_ref(path.span, ModRefData { + self.dumper.mod_ref( ModRefData { span: vrd.span, ref_id: Some(vrd.ref_id), scope: vrd.scope, qualname: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } Some(recorder::VarRef) | None - => self.dumper.variable_ref(path.span, vrd.normalize(&self.tcx)) + => self.dumper.variable_ref(vrd.lower(self.tcx)) } } } Data::TypeRefData(trd) => { if !self.span.filter_generated(Some(trd.span), path.span) { - self.dumper.type_ref(path.span, trd.normalize(&self.tcx)); + self.dumper.type_ref(trd.lower(self.tcx)); } } Data::MethodCallData(mcd) => { if !self.span.filter_generated(Some(mcd.span), path.span) { - self.dumper.method_call(path.span, mcd.normalize(&self.tcx)); + self.dumper.method_call(mcd.lower(self.tcx)); } } Data::FunctionCallData(fcd) => { if !self.span.filter_generated(Some(fcd.span), path.span) { - self.dumper.function_call(path.span, fcd.normalize(&self.tcx)); + self.dumper.function_call(fcd.lower(self.tcx)); } } _ => { @@ -844,7 +886,7 @@ where D: Dump if let Some(struct_lit_data) = self.save_ctxt.get_expr_data(ex) { down_cast_data!(struct_lit_data, TypeRefData, ex.span); if !self.span.filter_generated(Some(struct_lit_data.span), ex.span) { - self.dumper.type_ref(ex.span, struct_lit_data.normalize(&self.tcx)); + self.dumper.type_ref(struct_lit_data.lower(self.tcx)); } let scope = self.save_ctxt.enclosing_scope(ex.id); @@ -854,7 +896,7 @@ where D: Dump .get_field_ref_data(field, variant, scope) { if !self.span.filter_generated(Some(field_data.span), field.ident.span) { - self.dumper.variable_ref(field.ident.span, field_data.normalize(&self.tcx)); + self.dumper.variable_ref(field_data.lower(self.tcx)); } } @@ -869,7 +911,7 @@ where D: Dump if let Some(mcd) = self.save_ctxt.get_expr_data(ex) { down_cast_data!(mcd, MethodCallData, ex.span); if !self.span.filter_generated(Some(mcd.span), ex.span) { - self.dumper.method_call(ex.span, mcd.normalize(&self.tcx)); + self.dumper.method_call(mcd.lower(self.tcx)); } } @@ -889,12 +931,12 @@ where D: Dump let sub_span = self.span.span_for_first_ident(span); if let Some(f) = variant.find_field_named(field.ident.name) { if !self.span.filter_generated(sub_span, span) { - self.dumper.variable_ref(span, VariableRefData { + self.dumper.variable_ref(VariableRefData { span: sub_span.expect("No span fund for var ref"), ref_id: f.did, scope: self.cur_scope, name: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } self.visit_pat(&field.pat); @@ -913,19 +955,21 @@ where D: Dump self.visit_pat(&p); for &(id, ref p, immut, _) in &collector.collected_paths { - let value = if immut == ast::Mutability::Immutable { + let mut value = if immut == ast::Mutability::Immutable { value.to_string() } else { "".to_string() }; let types = self.tcx.node_types(); let typ = types.get(&id).map(|t| t.to_string()).unwrap_or(String::new()); + value.push_str(": "); + value.push_str(&typ); // Get the span only for the name of the variable (I hope the path // is only ever a variable name, but who knows?). let sub_span = self.span.span_for_last_ident(p.span); // Rust uses the id of the pattern for var lookups, so we'll use it too. if !self.span.filter_generated(sub_span, p.span) { - self.dumper.variable(p.span, VariableData { + self.dumper.variable(VariableData { span: sub_span.expect("No span found for variable"), id: id, name: path_to_string(p), @@ -933,7 +977,7 @@ where D: Dump value: value, type_value: typ, scope: 0 - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } } @@ -959,30 +1003,30 @@ where D: Dump && !data.imported { self.mac_defs.insert(data.callee_span); if let Some(sub_span) = self.span.span_for_macro_def_name(data.callee_span) { - self.dumper.macro_data(data.callee_span, MacroData { + self.dumper.macro_data(MacroData { span: sub_span, name: data.name.clone(), qualname: qualname.clone() - }); + }.lower(self.tcx)); } } if !self.mac_uses.contains(&data.span) { self.mac_uses.insert(data.span); if let Some(sub_span) = self.span.span_for_macro_use_name(data.span) { - self.dumper.macro_use(data.span, MacroUseData { + self.dumper.macro_use(MacroUseData { span: sub_span, name: data.name, qualname: qualname, scope: data.scope, callee_span: data.callee_span, imported: data.imported - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } } } -impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { +impl<'v, 'l, 'tcx: 'l, 'll, D: Dump +'ll> Visitor<'v> for DumpVisitor<'l, 'tcx, 'll, D> { fn visit_item(&mut self, item: &ast::Item) { use syntax::ast::ItemKind::*; self.process_macro_use(item.span, item.id); @@ -1010,13 +1054,13 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { }; if !self.span.filter_generated(sub_span, path.span) { - self.dumper.use_data(path.span, UseData { + self.dumper.use_data(UseData { span: sub_span.expect("No span found for use"), id: item.id, mod_id: mod_id, - name: ident.name.to_string(), + name: ident.to_string(), scope: self.cur_scope - }.normalize(&self.tcx)); + }.lower(self.tcx)); } self.write_sub_paths_truncated(path, true); } @@ -1032,14 +1076,14 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { } let sub_span = self.span - .sub_span_of_token(path.span, token::BinOp(token::Star)); - if !self.span.filter_generated(sub_span, path.span) { - self.dumper.use_glob(path.span, UseGlobData { + .sub_span_of_token(item.span, token::BinOp(token::Star)); + if !self.span.filter_generated(sub_span, item.span) { + self.dumper.use_glob(UseGlobData { span: sub_span.expect("No span found for use glob"), id: item.id, names: names, scope: self.cur_scope - }.normalize(&self.tcx)); + }.lower(self.tcx)); } self.write_sub_paths(path, true); } @@ -1076,14 +1120,14 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { }; if !self.span.filter_generated(alias_span, item.span) { - self.dumper.extern_crate(item.span, ExternCrateData { + self.dumper.extern_crate(ExternCrateData { id: item.id, - name: item.ident.name.to_string(), + name: item.ident.to_string(), crate_num: cnum, location: location, span: alias_span.expect("No span found for extern crate"), scope: self.cur_scope, - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } Fn(ref decl, _, _, _, ref ty_params, ref body) => @@ -1112,12 +1156,13 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { let value = ty_to_string(&ty); let sub_span = self.span.sub_span_after_keyword(item.span, keywords::Type); if !self.span.filter_generated(sub_span, item.span) { - self.dumper.typedef(item.span, TypedefData { + self.dumper.typedef(TypeDefData { span: sub_span.expect("No span found for typedef"), + name: item.ident.to_string(), id: item.id, qualname: qualname.clone(), value: value - }.normalize(&self.tcx)); + }.lower(self.tcx)); } self.visit_ty(&ty); @@ -1192,12 +1237,12 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { if let Some(id) = self.lookup_type_ref(t.id) { let sub_span = self.span.sub_span_for_type_name(t.span); if !self.span.filter_generated(sub_span, t.span) { - self.dumper.type_ref(t.span, TypeRefData { + self.dumper.type_ref(TypeRefData { span: sub_span.expect("No span found for type ref"), ref_id: Some(id), scope: self.cur_scope, qualname: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } @@ -1222,7 +1267,7 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { visit::walk_expr(self, ex); } ast::ExprKind::Struct(ref path, ref fields, ref base) => { - let hir_expr = lower_expr(self.save_ctxt.lcx, ex); + let hir_expr = self.save_ctxt.tcx.map.expect_expr(ex.id); let adt = self.tcx.expr_ty(&hir_expr).ty_adt_def().unwrap(); let def = self.tcx.resolve_expr(&hir_expr); self.process_struct_lit(ex, path, fields, adt.variant_of_def(def), base) @@ -1234,25 +1279,25 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { if let Some(field_data) = self.save_ctxt.get_expr_data(ex) { down_cast_data!(field_data, VariableRefData, ex.span); if !self.span.filter_generated(Some(field_data.span), ex.span) { - self.dumper.variable_ref(ex.span, field_data.normalize(&self.tcx)); + self.dumper.variable_ref(field_data.lower(self.tcx)); } } } ast::ExprKind::TupField(ref sub_ex, idx) => { self.visit_expr(&sub_ex); - let hir_node = lower_expr(self.save_ctxt.lcx, sub_ex); + let hir_node = self.save_ctxt.tcx.map.expect_expr(sub_ex.id); let ty = &self.tcx.expr_ty_adjusted(&hir_node).sty; match *ty { ty::TyStruct(def, _) => { let sub_span = self.span.sub_span_after_token(ex.span, token::Dot); if !self.span.filter_generated(sub_span, ex.span) { - self.dumper.variable_ref(ex.span, VariableRefData { + self.dumper.variable_ref(VariableRefData { span: sub_span.expect("No span found for var ref"), ref_id: def.struct_variant().fields[idx.node].did, scope: self.cur_scope, name: String::new() - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } ty::TyTuple(_) => {} @@ -1261,7 +1306,7 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { ty), } } - ast::ExprKind::Closure(_, ref decl, ref body) => { + ast::ExprKind::Closure(_, ref decl, ref body, _fn_decl_span) => { let mut id = String::from("$"); id.push_str(&ex.id.to_string()); self.process_formals(&decl.inputs, &id); @@ -1280,13 +1325,13 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { } ast::ExprKind::ForLoop(ref pattern, ref subexpression, ref block, _) | ast::ExprKind::WhileLet(ref pattern, ref subexpression, ref block, _) => { - let value = self.span.snippet(mk_sp(ex.span.lo, subexpression.span.hi)); + let value = self.span.snippet(subexpression.span); self.process_var_decl(pattern, value); visit::walk_expr(self, subexpression); visit::walk_block(self, block); } ast::ExprKind::IfLet(ref pattern, ref subexpression, ref block, ref opt_else) => { - let value = self.span.snippet(mk_sp(ex.span.lo, subexpression.span.hi)); + let value = self.span.snippet(subexpression.span); self.process_var_decl(pattern, value); visit::walk_expr(self, subexpression); visit::walk_block(self, block); @@ -1337,7 +1382,7 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { assert!(p.segments.len() == 1, "qualified path for local variable def in arm"); if !self.span.filter_generated(Some(p.span), p.span) { - self.dumper.variable(p.span, VariableData { + self.dumper.variable(VariableData { span: p.span, id: id, name: path_to_string(p), @@ -1345,7 +1390,7 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { value: value, type_value: String::new(), scope: 0 - }.normalize(&self.tcx)); + }.lower(self.tcx)); } } Def::Variant(..) | Def::Enum(..) | @@ -1376,7 +1421,7 @@ impl<'l, 'tcx, 'v, D: Dump + 'l> Visitor<'v> for DumpVisitor<'l, 'tcx, D> { fn visit_local(&mut self, l: &ast::Local) { self.process_macro_use(l.span, l.id); - let value = self.span.snippet(l.span); + let value = l.init.as_ref().map(|i| self.span.snippet(i.span)).unwrap_or(String::new()); self.process_var_decl(&l.pat, value); // Just walk the initialiser and type (don't want to walk the pattern again). diff --git a/src/librustc_save_analysis/external_data.rs b/src/librustc_save_analysis/external_data.rs new file mode 100644 index 0000000000..dc51c41f29 --- /dev/null +++ b/src/librustc_save_analysis/external_data.rs @@ -0,0 +1,632 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::hir::def_id::{DefId, DefIndex}; +use rustc::hir::map::Map; +use rustc::ty::TyCtxt; +use syntax::ast::{CrateNum, NodeId}; +use syntax::codemap::{Span, CodeMap}; + +use super::data; + +// FIXME: this should be pub(crate), but the current snapshot doesn't allow it yet +pub trait Lower { + type Target; + fn lower(self, tcx: TyCtxt) -> Self::Target; +} + +fn make_def_id(id: NodeId, map: &Map) -> DefId { + map.opt_local_def_id(id).unwrap_or(null_def_id()) +} + +pub fn null_def_id() -> DefId { + DefId { krate: u32::max_value(), index: DefIndex::from_u32(u32::max_value()) } +} + +#[derive(Clone, Debug, RustcEncodable)] +pub struct SpanData { + pub file_name: String, + pub byte_start: u32, + pub byte_end: u32, + /// 1-based. + pub line_start: usize, + pub line_end: usize, + /// 1-based, character offset. + pub column_start: usize, + pub column_end: usize, +} + +impl SpanData { + pub fn from_span(span: Span, cm: &CodeMap) -> SpanData { + let start = cm.lookup_char_pos(span.lo); + let end = cm.lookup_char_pos(span.hi); + + SpanData { + file_name: start.file.name.clone(), + byte_start: span.lo.0, + byte_end: span.hi.0, + line_start: start.line, + line_end: end.line, + column_start: start.col.0 + 1, + column_end: end.col.0 + 1, + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct CratePreludeData { + pub crate_name: String, + pub crate_root: String, + pub external_crates: Vec, + pub span: SpanData, +} + +impl Lower for data::CratePreludeData { + type Target = CratePreludeData; + + fn lower(self, tcx: TyCtxt) -> CratePreludeData { + CratePreludeData { + crate_name: self.crate_name, + crate_root: self.crate_root, + external_crates: self.external_crates, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + } + } +} + +/// Data for enum declarations. +#[derive(Clone, Debug, RustcEncodable)] +pub struct EnumData { + pub id: DefId, + pub value: String, + pub name: String, + pub qualname: String, + pub span: SpanData, + pub scope: DefId, +} + +impl Lower for data::EnumData { + type Target = EnumData; + + fn lower(self, tcx: TyCtxt) -> EnumData { + EnumData { + id: make_def_id(self.id, &tcx.map), + name: self.name, + value: self.value, + qualname: self.qualname, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + } + } +} + +/// Data for extern crates. +#[derive(Debug, RustcEncodable)] +pub struct ExternCrateData { + pub id: DefId, + pub name: String, + pub crate_num: CrateNum, + pub location: String, + pub span: SpanData, + pub scope: DefId, +} + +impl Lower for data::ExternCrateData { + type Target = ExternCrateData; + + fn lower(self, tcx: TyCtxt) -> ExternCrateData { + ExternCrateData { + id: make_def_id(self.id, &tcx.map), + name: self.name, + crate_num: self.crate_num, + location: self.location, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + } + } +} + +/// Data about a function call. +#[derive(Debug, RustcEncodable)] +pub struct FunctionCallData { + pub span: SpanData, + pub scope: DefId, + pub ref_id: DefId, +} + +impl Lower for data::FunctionCallData { + type Target = FunctionCallData; + + fn lower(self, tcx: TyCtxt) -> FunctionCallData { + FunctionCallData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + ref_id: self.ref_id, + } + } +} + +/// Data for all kinds of functions and methods. +#[derive(Clone, Debug, RustcEncodable)] +pub struct FunctionData { + pub id: DefId, + pub name: String, + pub qualname: String, + pub declaration: Option, + pub span: SpanData, + pub scope: DefId, + pub value: String, +} + +impl Lower for data::FunctionData { + type Target = FunctionData; + + fn lower(self, tcx: TyCtxt) -> FunctionData { + FunctionData { + id: make_def_id(self.id, &tcx.map), + name: self.name, + qualname: self.qualname, + declaration: self.declaration, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + value: self.value, + } + } +} + +/// Data about a function call. +#[derive(Debug, RustcEncodable)] +pub struct FunctionRefData { + pub span: SpanData, + pub scope: DefId, + pub ref_id: DefId, +} + +impl Lower for data::FunctionRefData { + type Target = FunctionRefData; + + fn lower(self, tcx: TyCtxt) -> FunctionRefData { + FunctionRefData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + ref_id: self.ref_id, + } + } +} +#[derive(Debug, RustcEncodable)] +pub struct ImplData { + pub id: DefId, + pub span: SpanData, + pub scope: DefId, + pub trait_ref: Option, + pub self_ref: Option, +} + +impl Lower for data::ImplData { + type Target = ImplData; + + fn lower(self, tcx: TyCtxt) -> ImplData { + ImplData { + id: make_def_id(self.id, &tcx.map), + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + trait_ref: self.trait_ref, + self_ref: self.self_ref, + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct InheritanceData { + pub span: SpanData, + pub base_id: DefId, + pub deriv_id: DefId +} + +impl Lower for data::InheritanceData { + type Target = InheritanceData; + + fn lower(self, tcx: TyCtxt) -> InheritanceData { + InheritanceData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + base_id: self.base_id, + deriv_id: make_def_id(self.deriv_id, &tcx.map) + } + } +} + +/// Data about a macro declaration. +#[derive(Debug, RustcEncodable)] +pub struct MacroData { + pub span: SpanData, + pub name: String, + pub qualname: String, +} + +impl Lower for data::MacroData { + type Target = MacroData; + + fn lower(self, tcx: TyCtxt) -> MacroData { + MacroData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + name: self.name, + qualname: self.qualname, + } + } +} + +/// Data about a macro use. +#[derive(Debug, RustcEncodable)] +pub struct MacroUseData { + pub span: SpanData, + pub name: String, + pub qualname: String, + // Because macro expansion happens before ref-ids are determined, + // we use the callee span to reference the associated macro definition. + pub callee_span: SpanData, + pub scope: DefId, +} + +impl Lower for data::MacroUseData { + type Target = MacroUseData; + + fn lower(self, tcx: TyCtxt) -> MacroUseData { + MacroUseData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + name: self.name, + qualname: self.qualname, + callee_span: SpanData::from_span(self.callee_span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + } + } +} + +/// Data about a method call. +#[derive(Debug, RustcEncodable)] +pub struct MethodCallData { + pub span: SpanData, + pub scope: DefId, + pub ref_id: Option, + pub decl_id: Option, +} + +impl Lower for data::MethodCallData { + type Target = MethodCallData; + + fn lower(self, tcx: TyCtxt) -> MethodCallData { + MethodCallData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + ref_id: self.ref_id, + decl_id: self.decl_id, + } + } +} + +/// Data for method declarations (methods with a body are treated as functions). +#[derive(Clone, Debug, RustcEncodable)] +pub struct MethodData { + pub id: DefId, + pub name: String, + pub qualname: String, + pub span: SpanData, + pub scope: DefId, + pub value: String, +} + +impl Lower for data::MethodData { + type Target = MethodData; + + fn lower(self, tcx: TyCtxt) -> MethodData { + MethodData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + name: self.name, + scope: make_def_id(self.scope, &tcx.map), + id: make_def_id(self.id, &tcx.map), + qualname: self.qualname, + value: self.value, + } + } +} + +/// Data for modules. +#[derive(Debug, RustcEncodable)] +pub struct ModData { + pub id: DefId, + pub name: String, + pub qualname: String, + pub span: SpanData, + pub scope: DefId, + pub filename: String, +} + +impl Lower for data::ModData { + type Target = ModData; + + fn lower(self, tcx: TyCtxt) -> ModData { + ModData { + id: make_def_id(self.id, &tcx.map), + name: self.name, + qualname: self.qualname, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + filename: self.filename, + } + } +} + +/// Data for a reference to a module. +#[derive(Debug, RustcEncodable)] +pub struct ModRefData { + pub span: SpanData, + pub scope: DefId, + pub ref_id: Option, + pub qualname: String +} + +impl Lower for data::ModRefData { + type Target = ModRefData; + + fn lower(self, tcx: TyCtxt) -> ModRefData { + ModRefData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + ref_id: self.ref_id, + qualname: self.qualname, + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct StructData { + pub span: SpanData, + pub name: String, + pub id: DefId, + pub ctor_id: DefId, + pub qualname: String, + pub scope: DefId, + pub value: String +} + +impl Lower for data::StructData { + type Target = StructData; + + fn lower(self, tcx: TyCtxt) -> StructData { + StructData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + name: self.name, + id: make_def_id(self.id, &tcx.map), + ctor_id: make_def_id(self.ctor_id, &tcx.map), + qualname: self.qualname, + scope: make_def_id(self.scope, &tcx.map), + value: self.value + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct StructVariantData { + pub span: SpanData, + pub name: String, + pub id: DefId, + pub qualname: String, + pub type_value: String, + pub value: String, + pub scope: DefId +} + +impl Lower for data::StructVariantData { + type Target = StructVariantData; + + fn lower(self, tcx: TyCtxt) -> StructVariantData { + StructVariantData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + name: self.name, + id: make_def_id(self.id, &tcx.map), + qualname: self.qualname, + type_value: self.type_value, + value: self.value, + scope: make_def_id(self.scope, &tcx.map), + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct TraitData { + pub span: SpanData, + pub name: String, + pub id: DefId, + pub qualname: String, + pub scope: DefId, + pub value: String +} + +impl Lower for data::TraitData { + type Target = TraitData; + + fn lower(self, tcx: TyCtxt) -> TraitData { + TraitData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + name: self.name, + id: make_def_id(self.id, &tcx.map), + qualname: self.qualname, + scope: make_def_id(self.scope, &tcx.map), + value: self.value, + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct TupleVariantData { + pub span: SpanData, + pub id: DefId, + pub name: String, + pub qualname: String, + pub type_value: String, + pub value: String, + pub scope: DefId, +} + +impl Lower for data::TupleVariantData { + type Target = TupleVariantData; + + fn lower(self, tcx: TyCtxt) -> TupleVariantData { + TupleVariantData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + id: make_def_id(self.id, &tcx.map), + name: self.name, + qualname: self.qualname, + type_value: self.type_value, + value: self.value, + scope: make_def_id(self.scope, &tcx.map), + } + } +} + +/// Data for a typedef. +#[derive(Debug, RustcEncodable)] +pub struct TypeDefData { + pub id: DefId, + pub name: String, + pub span: SpanData, + pub qualname: String, + pub value: String, +} + +impl Lower for data::TypeDefData { + type Target = TypeDefData; + + fn lower(self, tcx: TyCtxt) -> TypeDefData { + TypeDefData { + id: make_def_id(self.id, &tcx.map), + name: self.name, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + qualname: self.qualname, + value: self.value, + } + } +} + +/// Data for a reference to a type or trait. +#[derive(Clone, Debug, RustcEncodable)] +pub struct TypeRefData { + pub span: SpanData, + pub scope: DefId, + pub ref_id: Option, + pub qualname: String, +} + +impl Lower for data::TypeRefData { + type Target = TypeRefData; + + fn lower(self, tcx: TyCtxt) -> TypeRefData { + TypeRefData { + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + ref_id: self.ref_id, + qualname: self.qualname, + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct UseData { + pub id: DefId, + pub span: SpanData, + pub name: String, + pub mod_id: Option, + pub scope: DefId +} + +impl Lower for data::UseData { + type Target = UseData; + + fn lower(self, tcx: TyCtxt) -> UseData { + UseData { + id: make_def_id(self.id, &tcx.map), + span: SpanData::from_span(self.span, tcx.sess.codemap()), + name: self.name, + mod_id: self.mod_id, + scope: make_def_id(self.scope, &tcx.map), + } + } +} + +#[derive(Debug, RustcEncodable)] +pub struct UseGlobData { + pub id: DefId, + pub span: SpanData, + pub names: Vec, + pub scope: DefId +} + +impl Lower for data::UseGlobData { + type Target = UseGlobData; + + fn lower(self, tcx: TyCtxt) -> UseGlobData { + UseGlobData { + id: make_def_id(self.id, &tcx.map), + span: SpanData::from_span(self.span, tcx.sess.codemap()), + names: self.names, + scope: make_def_id(self.scope, &tcx.map), + } + } +} + +/// Data for local and global variables (consts and statics). +#[derive(Debug, RustcEncodable)] +pub struct VariableData { + pub id: DefId, + pub name: String, + pub qualname: String, + pub span: SpanData, + pub scope: DefId, + pub value: String, + pub type_value: String, +} + +impl Lower for data::VariableData { + type Target = VariableData; + + fn lower(self, tcx: TyCtxt) -> VariableData { + VariableData { + id: make_def_id(self.id, &tcx.map), + name: self.name, + qualname: self.qualname, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + value: self.value, + type_value: self.type_value, + } + } +} + +/// Data for the use of some item (e.g., the use of a local variable, which +/// will refer to that variables declaration (by ref_id)). +#[derive(Debug, RustcEncodable)] +pub struct VariableRefData { + pub name: String, + pub span: SpanData, + pub scope: DefId, + pub ref_id: DefId, +} + +impl Lower for data::VariableRefData { + type Target = VariableRefData; + + fn lower(self, tcx: TyCtxt) -> VariableRefData { + VariableRefData { + name: self.name, + span: SpanData::from_span(self.span, tcx.sess.codemap()), + scope: make_def_id(self.scope, &tcx.map), + ref_id: self.ref_id, + } + } +} diff --git a/src/librustc_save_analysis/json_dumper.rs b/src/librustc_save_analysis/json_dumper.rs new file mode 100644 index 0000000000..610a9ac2ad --- /dev/null +++ b/src/librustc_save_analysis/json_dumper.rs @@ -0,0 +1,426 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::io::Write; + +use rustc::hir::def_id::DefId; +use rustc_serialize::json::as_json; + +use super::external_data::*; +use super::dump::Dump; + +pub struct JsonDumper<'b, W: Write + 'b> { + output: &'b mut W, + result: Analysis, +} + +impl<'b, W: Write> JsonDumper<'b, W> { + pub fn new(writer: &'b mut W) -> JsonDumper<'b, W> { + JsonDumper { output: writer, result: Analysis::new() } + } +} + +impl<'b, W: Write> Drop for JsonDumper<'b, W> { + fn drop(&mut self) { + if let Err(_) = write!(self.output, "{}", as_json(&self.result)) { + error!("Error writing output"); + } + } +} + +macro_rules! impl_fn { + ($fn_name: ident, $data_type: ident, $bucket: ident) => { + fn $fn_name(&mut self, data: $data_type) { + self.result.$bucket.push(From::from(data)); + } + } +} + +impl<'b, W: Write + 'b> Dump for JsonDumper<'b, W> { + fn crate_prelude(&mut self, data: CratePreludeData) { + self.result.prelude = Some(data) + } + + impl_fn!(extern_crate, ExternCrateData, imports); + impl_fn!(use_data, UseData, imports); + impl_fn!(use_glob, UseGlobData, imports); + + impl_fn!(enum_data, EnumData, defs); + impl_fn!(tuple_variant, TupleVariantData, defs); + impl_fn!(struct_variant, StructVariantData, defs); + impl_fn!(struct_data, StructData, defs); + impl_fn!(trait_data, TraitData, defs); + impl_fn!(function, FunctionData, defs); + impl_fn!(method, MethodData, defs); + impl_fn!(macro_data, MacroData, defs); + impl_fn!(mod_data, ModData, defs); + impl_fn!(typedef, TypeDefData, defs); + impl_fn!(variable, VariableData, defs); + + impl_fn!(function_ref, FunctionRefData, refs); + impl_fn!(function_call, FunctionCallData, refs); + impl_fn!(method_call, MethodCallData, refs); + impl_fn!(mod_ref, ModRefData, refs); + impl_fn!(type_ref, TypeRefData, refs); + impl_fn!(variable_ref, VariableRefData, refs); + + impl_fn!(macro_use, MacroUseData, macro_refs); + + // FIXME store this instead of throwing it away. + fn impl_data(&mut self, _data: ImplData) {} + fn inheritance(&mut self, _data: InheritanceData) {} +} + +// FIXME do we want to change ExternalData to this mode? It will break DXR. +// FIXME methods. The defs have information about possible overriding and the +// refs have decl information (e.g., a trait method where we know the required +// method, but not the supplied method). In both cases, we are currently +// ignoring it. + +#[derive(Debug, RustcEncodable)] +struct Analysis { + prelude: Option, + imports: Vec, + defs: Vec, + refs: Vec, + macro_refs: Vec, +} + +impl Analysis { + fn new() -> Analysis { + Analysis { + prelude: None, + imports: vec![], + defs: vec![], + refs: vec![], + macro_refs: vec![], + } + } +} + +// DefId::index is a newtype and so the JSON serialisation is ugly. Therefore +// we use our own Id which is the same, but without the newtype. +#[derive(Debug, RustcEncodable)] +struct Id { + krate: u32, + index: u32, +} + +impl From for Id { + fn from(id: DefId) -> Id { + Id { + krate: id.krate, + index: id.index.as_u32(), + } + } +} + +#[derive(Debug, RustcEncodable)] +struct Import { + kind: ImportKind, + id: Id, + span: SpanData, + name: String, + value: String, +} + +#[derive(Debug, RustcEncodable)] +enum ImportKind { + ExternCrate, + Use, + GlobUse, +} + +impl From for Import { + fn from(data: ExternCrateData) -> Import { + Import { + kind: ImportKind::ExternCrate, + id: From::from(data.id), + span: data.span, + name: data.name, + value: String::new(), + } + } +} +impl From for Import { + fn from(data: UseData) -> Import { + Import { + kind: ImportKind::Use, + id: From::from(data.id), + span: data.span, + name: data.name, + value: String::new(), + } + } +} +impl From for Import { + fn from(data: UseGlobData) -> Import { + Import { + kind: ImportKind::GlobUse, + id: From::from(data.id), + span: data.span, + name: "*".to_owned(), + value: data.names.join(", "), + } + } +} + +#[derive(Debug, RustcEncodable)] +struct Def { + kind: DefKind, + id: Id, + span: SpanData, + name: String, + qualname: String, + value: String, +} + +#[derive(Debug, RustcEncodable)] +enum DefKind { + // value = variant names + Enum, + // value = enum name + variant name + types + Tuple, + // value = [enum name +] name + fields + Struct, + // value = signature + Trait, + // value = type + generics + Function, + // No id, no value. + Macro, + // value = file_name + Mod, + // value = aliased type + Type, + // value = type and init expression + Variable, +} + +impl From for Def { + fn from(data: EnumData) -> Def { + Def { + kind: DefKind::Enum, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} + +impl From for Def { + fn from(data: TupleVariantData) -> Def { + Def { + kind: DefKind::Tuple, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} +impl From for Def { + fn from(data: StructVariantData) -> Def { + Def { + kind: DefKind::Struct, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} +impl From for Def { + fn from(data: StructData) -> Def { + Def { + kind: DefKind::Struct, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} +impl From for Def { + fn from(data: TraitData) -> Def { + Def { + kind: DefKind::Trait, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} +impl From for Def { + fn from(data: FunctionData) -> Def { + Def { + kind: DefKind::Function, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} +impl From for Def { + fn from(data: MethodData) -> Def { + Def { + kind: DefKind::Function, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} +impl From for Def { + fn from(data: MacroData) -> Def { + Def { + kind: DefKind::Macro, + id: From::from(null_def_id()), + span: data.span, + name: data.name, + qualname: data.qualname, + value: String::new(), + } + } +} +impl From for Def { + fn from(data:ModData) -> Def { + Def { + kind: DefKind::Mod, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.filename, + } + } +} +impl From for Def { + fn from(data: TypeDefData) -> Def { + Def { + kind: DefKind::Type, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} +impl From for Def { + fn from(data: VariableData) -> Def { + Def { + kind: DefKind::Variable, + id: From::from(data.id), + span: data.span, + name: data.name, + qualname: data.qualname, + value: data.value, + } + } +} + +#[derive(Debug, RustcEncodable)] +enum RefKind { + Function, + Mod, + Type, + Variable, +} + +#[derive(Debug, RustcEncodable)] +struct Ref { + kind: RefKind, + span: SpanData, + ref_id: Id, +} + +impl From for Ref { + fn from(data: FunctionRefData) -> Ref { + Ref { + kind: RefKind::Function, + span: data.span, + ref_id: From::from(data.ref_id), + } + } +} +impl From for Ref { + fn from(data: FunctionCallData) -> Ref { + Ref { + kind: RefKind::Function, + span: data.span, + ref_id: From::from(data.ref_id), + } + } +} +impl From for Ref { + fn from(data: MethodCallData) -> Ref { + Ref { + kind: RefKind::Function, + span: data.span, + ref_id: From::from(data.ref_id.or(data.decl_id).unwrap_or(null_def_id())), + } + } +} +impl From for Ref { + fn from(data: ModRefData) -> Ref { + Ref { + kind: RefKind::Mod, + span: data.span, + ref_id: From::from(data.ref_id.unwrap_or(null_def_id())), + } + } +} +impl From for Ref { + fn from(data: TypeRefData) -> Ref { + Ref { + kind: RefKind::Type, + span: data.span, + ref_id: From::from(data.ref_id.unwrap_or(null_def_id())), + } + } +} +impl From for Ref { + fn from(data: VariableRefData) -> Ref { + Ref { + kind: RefKind::Variable, + span: data.span, + ref_id: From::from(data.ref_id), + } + } +} + +#[derive(Debug, RustcEncodable)] +struct MacroRef { + span: SpanData, + qualname: String, + callee_span: SpanData, +} + +impl From for MacroRef { + fn from(data: MacroUseData) -> MacroRef { + MacroRef { + span: data.span, + qualname: data.qualname, + callee_span: data.callee_span, + } + } +} diff --git a/src/librustc_save_analysis/lib.rs b/src/librustc_save_analysis/lib.rs index 4596398c31..8c00a56999 100644 --- a/src/librustc_save_analysis/lib.rs +++ b/src/librustc_save_analysis/lib.rs @@ -26,8 +26,18 @@ #[macro_use] extern crate log; #[macro_use] extern crate syntax; +extern crate serialize as rustc_serialize; -use rustc::hir::{self, lowering}; +mod csv_dumper; +mod json_dumper; +mod data; +mod dump; +mod dump_visitor; +pub mod external_data; +#[macro_use] +pub mod span_utils; + +use rustc::hir; use rustc::hir::map::NodeItem; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; @@ -42,16 +52,10 @@ use syntax::ast::{self, NodeId, PatKind}; use syntax::codemap::*; use syntax::parse::token::{self, keywords}; use syntax::visit::{self, Visitor}; -use syntax::print::pprust::ty_to_string; - -mod csv_dumper; -mod data; -mod dump; -mod dump_visitor; -#[macro_use] -pub mod span_utils; +use syntax::print::pprust::{ty_to_string, arg_to_string}; pub use self::csv_dumper::CsvDumper; +pub use self::json_dumper::JsonDumper; pub use self::data::*; pub use self::dump::Dump; pub use self::dump_visitor::DumpVisitor; @@ -71,9 +75,8 @@ pub mod recorder { } pub struct SaveContext<'l, 'tcx: 'l> { - tcx: &'l TyCtxt<'tcx>, - lcx: &'l lowering::LoweringContext<'l>, - span_utils: SpanUtils<'l>, + tcx: TyCtxt<'l, 'tcx, 'tcx>, + span_utils: SpanUtils<'tcx>, } macro_rules! option_try( @@ -81,20 +84,16 @@ macro_rules! option_try( ); impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { - pub fn new(tcx: &'l TyCtxt<'tcx>, - lcx: &'l lowering::LoweringContext<'l>) - -> SaveContext<'l, 'tcx> { + pub fn new(tcx: TyCtxt<'l, 'tcx, 'tcx>) -> SaveContext<'l, 'tcx> { let span_utils = SpanUtils::new(&tcx.sess); - SaveContext::from_span_utils(tcx, lcx, span_utils) + SaveContext::from_span_utils(tcx, span_utils) } - pub fn from_span_utils(tcx: &'l TyCtxt<'tcx>, - lcx: &'l lowering::LoweringContext<'l>, - span_utils: SpanUtils<'l>) + pub fn from_span_utils(tcx: TyCtxt<'l, 'tcx, 'tcx>, + span_utils: SpanUtils<'tcx>) -> SaveContext<'l, 'tcx> { SaveContext { tcx: tcx, - lcx: lcx, span_utils: span_utils, } } @@ -104,9 +103,17 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { let mut result = Vec::new(); for n in self.tcx.sess.cstore.crates() { + let span = match self.tcx.sess.cstore.extern_crate(n) { + Some(ref c) => c.span, + None => { + debug!("Skipping crate {}, no data", n); + continue; + } + }; result.push(CrateData { name: (&self.tcx.sess.cstore.crate_name(n)[..]).to_owned(), number: n, + span: span, }); } @@ -115,11 +122,13 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { pub fn get_item_data(&self, item: &ast::Item) -> Option { match item.node { - ast::ItemKind::Fn(..) => { + ast::ItemKind::Fn(ref decl, _, _, _, ref generics, _) => { let name = self.tcx.node_path_str(item.id); let qualname = format!("::{}", name); let sub_span = self.span_utils.sub_span_after_keyword(item.span, keywords::Fn); filter!(self.span_utils, sub_span, item.span, None); + + Some(Data::FunctionData(FunctionData { id: item.id, name: name, @@ -127,6 +136,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { declaration: None, span: sub_span.unwrap(), scope: self.enclosing_scope(item.id), + value: make_signature(decl, generics), })) } ast::ItemKind::Static(ref typ, mt, ref expr) => { @@ -183,16 +193,22 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { filename: filename, })) } - ast::ItemKind::Enum(..) => { - let enum_name = format!("::{}", self.tcx.node_path_str(item.id)); - let val = self.span_utils.snippet(item.span); + ast::ItemKind::Enum(ref def, _) => { + let name = item.ident.to_string(); + let qualname = format!("::{}", self.tcx.node_path_str(item.id)); let sub_span = self.span_utils.sub_span_after_keyword(item.span, keywords::Enum); filter!(self.span_utils, sub_span, item.span, None); + let variants_str = def.variants.iter() + .map(|v| v.node.name.to_string()) + .collect::>() + .join(", "); + let val = format!("{}::{{{}}}", name, variants_str); Some(Data::EnumData(EnumData { id: item.id, + name: name, value: val, span: sub_span.unwrap(), - qualname: enum_name, + qualname: qualname, scope: self.enclosing_scope(item.id), })) } @@ -346,6 +362,8 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { declaration: decl_id, span: sub_span.unwrap(), scope: self.enclosing_scope(id), + // FIXME you get better data here by using the visitor. + value: String::new(), }) } @@ -367,14 +385,14 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { } pub fn get_expr_data(&self, expr: &ast::Expr) -> Option { - let hir_node = lowering::lower_expr(self.lcx, expr); + let hir_node = self.tcx.map.expect_expr(expr.id); let ty = self.tcx.expr_ty_adjusted_opt(&hir_node); if ty.is_none() || ty.unwrap().sty == ty::TyError { return None; } match expr.node { ast::ExprKind::Field(ref sub_ex, ident) => { - let hir_node = lowering::lower_expr(self.lcx, sub_ex); + let hir_node = self.tcx.map.expect_expr(sub_ex.id); match self.tcx.expr_ty_adjusted(&hir_node).sty { ty::TyStruct(def, _) => { let f = def.struct_variant().field_named(ident.node.name); @@ -394,7 +412,7 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { } } ast::ExprKind::Struct(ref path, _, _) => { - let hir_node = lowering::lower_expr(self.lcx, expr); + let hir_node = self.tcx.map.expect_expr(expr.id); match self.tcx.expr_ty_adjusted(&hir_node).sty { ty::TyStruct(def, _) => { let sub_span = self.span_utils.span_for_last_ident(path.span); @@ -630,6 +648,35 @@ impl<'l, 'tcx: 'l> SaveContext<'l, 'tcx> { } } +fn make_signature(decl: &ast::FnDecl, generics: &ast::Generics) -> String { + let mut sig = String::new(); + if !generics.lifetimes.is_empty() || !generics.ty_params.is_empty() { + sig.push('<'); + sig.push_str(&generics.lifetimes.iter() + .map(|l| l.lifetime.name.to_string()) + .collect::>() + .join(", ")); + if !generics.lifetimes.is_empty() { + sig.push_str(", "); + } + sig.push_str(&generics.ty_params.iter() + .map(|l| l.ident.to_string()) + .collect::>() + .join(", ")); + sig.push_str("> "); + } + sig.push('('); + sig.push_str(&decl.inputs.iter().map(arg_to_string).collect::>().join(", ")); + sig.push(')'); + match decl.output { + ast::FunctionRetTy::None(_) => sig.push_str(" -> !"), + ast::FunctionRetTy::Default(_) => {} + ast::FunctionRetTy::Ty(ref t) => sig.push_str(&format!(" -> {}", ty_to_string(t))), + } + + sig +} + // An AST visitor for collecting paths from patterns. struct PathCollector { // The Row field identifies the kind of pattern. @@ -677,12 +724,27 @@ impl<'v> Visitor<'v> for PathCollector { } } -pub fn process_crate<'l, 'tcx>(tcx: &'l TyCtxt<'tcx>, - lcx: &'l lowering::LoweringContext<'l>, +#[derive(Clone, Copy, Debug)] +pub enum Format { + Csv, + Json, +} + +impl Format { + fn extension(&self) -> &'static str { + match *self { + Format::Csv => ".csv", + Format::Json => ".json", + } + } +} + +pub fn process_crate<'l, 'tcx>(tcx: TyCtxt<'l, 'tcx, 'tcx>, krate: &ast::Crate, - analysis: &ty::CrateAnalysis, + analysis: &'l ty::CrateAnalysis<'l>, cratename: &str, - odir: Option<&Path>) { + odir: Option<&Path>, + format: Format) { let _ignore = tcx.dep_graph.in_ignore(); assert!(analysis.glob_map.is_some()); @@ -690,11 +752,11 @@ pub fn process_crate<'l, 'tcx>(tcx: &'l TyCtxt<'tcx>, info!("Dumping crate {}", cratename); // find a path to dump our data to - let mut root_path = match env::var_os("DXR_RUST_TEMP_FOLDER") { + let mut root_path = match env::var_os("RUST_SAVE_ANALYSIS_FOLDER") { Some(val) => PathBuf::from(val), None => match odir { - Some(val) => val.join("dxr"), - None => PathBuf::from("dxr-temp"), + Some(val) => val.join("save-analysis"), + None => PathBuf::from("save-analysis-temp"), }, }; @@ -718,21 +780,31 @@ pub fn process_crate<'l, 'tcx>(tcx: &'l TyCtxt<'tcx>, }; out_name.push_str(&cratename); out_name.push_str(&tcx.sess.opts.cg.extra_filename); - out_name.push_str(".csv"); + out_name.push_str(format.extension()); root_path.push(&out_name); let mut output_file = File::create(&root_path).unwrap_or_else(|e| { let disp = root_path.display(); tcx.sess.fatal(&format!("Could not open {}: {}", disp, e)); }); root_path.pop(); + let output = &mut output_file; - let utils = SpanUtils::new(&tcx.sess); - let mut dumper = CsvDumper::new(&mut output_file, utils); - let mut visitor = DumpVisitor::new(tcx, lcx, analysis, &mut dumper); - // FIXME: we don't write anything! + let save_ctxt = SaveContext::new(tcx); - visitor.dump_crate_info(cratename, krate); - visit::walk_crate(&mut visitor, krate); + macro_rules! dump { + ($new_dumper: expr) => {{ + let mut dumper = $new_dumper; + let mut visitor = DumpVisitor::new(tcx, save_ctxt, analysis, &mut dumper); + + visitor.dump_crate_info(cratename, krate); + visit::walk_crate(&mut visitor, krate); + }} + } + + match format { + Format::Csv => dump!(CsvDumper::new(output)), + Format::Json => dump!(JsonDumper::new(output)), + } } // Utility functions for the module. diff --git a/src/librustc_save_analysis/span_utils.rs b/src/librustc_save_analysis/span_utils.rs index f410d42817..3028fb1bfa 100644 --- a/src/librustc_save_analysis/span_utils.rs +++ b/src/librustc_save_analysis/span_utils.rs @@ -18,14 +18,14 @@ use std::path::Path; use syntax::ast; use syntax::codemap::*; -use syntax::parse::lexer; -use syntax::parse::lexer::{Reader, StringReader}; -use syntax::parse::token; -use syntax::parse::token::{keywords, Token}; +use syntax::parse::lexer::{self, Reader, StringReader}; +use syntax::parse::token::{self, keywords, Token}; #[derive(Clone)] pub struct SpanUtils<'a> { pub sess: &'a Session, + // FIXME given that we clone SpanUtils all over the place, this err_count is + // probably useless and any logic relying on it is bogus. pub err_count: Cell, } @@ -46,23 +46,6 @@ impl<'a> SpanUtils<'a> { } } - // Standard string for extents/location. - #[rustfmt_skip] - pub fn extent_str(&self, span: Span) -> String { - let lo_loc = self.sess.codemap().lookup_char_pos(span.lo); - let hi_loc = self.sess.codemap().lookup_char_pos(span.hi); - let lo_pos = self.sess.codemap().bytepos_to_file_charpos(span.lo); - let hi_pos = self.sess.codemap().bytepos_to_file_charpos(span.hi); - let lo_pos_byte = self.sess.codemap().lookup_byte_offset(span.lo).pos; - let hi_pos_byte = self.sess.codemap().lookup_byte_offset(span.hi).pos; - - format!("file_name,\"{}\",file_line,{},file_col,{},extent_start,{},extent_start_bytes,{},\ - file_line_end,{},file_col_end,{},extent_end,{},extent_end_bytes,{}", - SpanUtils::make_path_string(&lo_loc.file.name), - lo_loc.line, lo_loc.col.to_usize(), lo_pos.to_usize(), lo_pos_byte.to_usize(), - hi_loc.line, hi_loc.col.to_usize(), hi_pos.to_usize(), hi_pos_byte.to_usize()) - } - // sub_span starts at span.lo, so we need to adjust the positions etc. // If sub_span is None, we don't need to adjust. pub fn make_sub_span(&self, span: Span, sub_span: Option) -> Option { diff --git a/src/librustc_trans/Cargo.toml b/src/librustc_trans/Cargo.toml index ccb430fbb7..9a0580472b 100644 --- a/src/librustc_trans/Cargo.toml +++ b/src/librustc_trans/Cargo.toml @@ -7,6 +7,7 @@ version = "0.0.0" name = "rustc_trans" path = "lib.rs" crate-type = ["dylib"] +test = false [dependencies] arena = { path = "../libarena" } diff --git a/src/librustc_trans/_match.rs b/src/librustc_trans/_match.rs index d8b8f9f080..dbc277f243 100644 --- a/src/librustc_trans/_match.rs +++ b/src/librustc_trans/_match.rs @@ -189,12 +189,11 @@ use self::Opt::*; use self::FailureHandler::*; use llvm::{ValueRef, BasicBlockRef}; -use rustc_const_eval::check_match::{self, StaticInliner}; +use rustc_const_eval::check_match::{self, Constructor, StaticInliner}; use rustc_const_eval::{compare_lit_exprs, eval_const_expr}; use rustc::hir::def::{Def, DefMap}; use rustc::hir::def_id::DefId; use middle::expr_use_visitor as euv; -use rustc::infer; use middle::lang_items::StrEqFnLangItem; use middle::mem_categorization as mc; use middle::mem_categorization::Categorization; @@ -239,7 +238,7 @@ use syntax::ptr::P; struct ConstantExpr<'a>(&'a hir::Expr); impl<'a> ConstantExpr<'a> { - fn eq(self, other: ConstantExpr<'a>, tcx: &TyCtxt) -> bool { + fn eq<'b, 'tcx>(self, other: ConstantExpr<'a>, tcx: TyCtxt<'b, 'tcx, 'tcx>) -> bool { match compare_lit_exprs(tcx, self.0, other.0) { Some(result) => result == Ordering::Equal, None => bug!("compare_list_exprs: type mismatch"), @@ -259,8 +258,8 @@ enum Opt<'a, 'tcx> { DebugLoc), } -impl<'a, 'tcx> Opt<'a, 'tcx> { - fn eq(&self, other: &Opt<'a, 'tcx>, tcx: &TyCtxt<'tcx>) -> bool { +impl<'a, 'b, 'tcx> Opt<'a, 'tcx> { + fn eq(&self, other: &Opt<'a, 'tcx>, tcx: TyCtxt<'b, 'tcx, 'tcx>) -> bool { match (self, other) { (&ConstantValue(a, _), &ConstantValue(b, _)) => a.eq(b, tcx), (&ConstantRange(a1, a2, _), &ConstantRange(b1, b2, _)) => { @@ -483,7 +482,7 @@ fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>, loop { pat = match pat.node { PatKind::Ident(_, ref path, Some(ref inner)) => { - bound_ptrs.push((path.node.name, val.val)); + bound_ptrs.push((path.node, val.val)); &inner }, _ => break @@ -521,7 +520,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, match this.node { PatKind::Ident(_, ref path, None) => { if pat_is_binding(&dm.borrow(), &this) { - bound_ptrs.push((path.node.name, val.val)); + bound_ptrs.push((path.node, val.val)); } } PatKind::Vec(ref before, Some(ref slice), ref after) => { @@ -529,7 +528,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, let subslice_val = bind_subslice_pat( bcx, this.id, val, before.len(), after.len()); - bound_ptrs.push((path.node.name, subslice_val)); + bound_ptrs.push((path.node, subslice_val)); } } _ => {} @@ -609,19 +608,19 @@ fn enter_opt<'a, 'p, 'blk, 'tcx>( let _indenter = indenter(); let ctor = match opt { - &ConstantValue(ConstantExpr(expr), _) => check_match::ConstantValue( + &ConstantValue(ConstantExpr(expr), _) => Constructor::ConstantValue( eval_const_expr(bcx.tcx(), &expr) ), - &ConstantRange(ConstantExpr(lo), ConstantExpr(hi), _) => check_match::ConstantRange( + &ConstantRange(ConstantExpr(lo), ConstantExpr(hi), _) => Constructor::ConstantRange( eval_const_expr(bcx.tcx(), &lo), eval_const_expr(bcx.tcx(), &hi) ), &SliceLengthEqual(n, _) => - check_match::Slice(n), + Constructor::Slice(n), &SliceLengthGreaterOrEqual(before, after, _) => - check_match::SliceWithSubslice(before, after), + Constructor::SliceWithSubslice(before, after), &Variant(_, _, def_id, _) => - check_match::Constructor::Variant(def_id) + Constructor::Variant(def_id) }; let param_env = bcx.tcx().empty_parameter_environment(); @@ -789,7 +788,7 @@ fn any_region_pat(m: &[Match], col: usize) -> bool { any_pat!(m, col, PatKind::Ref(..)) } -fn any_irrefutable_adt_pat(tcx: &TyCtxt, m: &[Match], col: usize) -> bool { +fn any_irrefutable_adt_pat(tcx: TyCtxt, m: &[Match], col: usize) -> bool { m.iter().any(|br| { let pat = br.pats[col]; match pat.node { @@ -1229,7 +1228,7 @@ fn compile_submatch_continue<'a, 'p, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, Some(field_vals) => { let pats = enter_match(bcx, dm, m, col, val, |pats| check_match::specialize(&mcx, pats, - &check_match::Single, col, + &Constructor::Single, col, field_vals.len()) ); let mut vals: Vec<_> = field_vals.into_iter() @@ -1466,13 +1465,10 @@ fn is_discr_reassigned(bcx: Block, discr: &hir::Expr, body: &hir::Expr) -> bool field: field, reassigned: false }; - { - let infcx = infer::normalizing_infer_ctxt(bcx.tcx(), - &bcx.tcx().tables, - ProjectionMode::Any); + bcx.tcx().normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| { let mut visitor = euv::ExprUseVisitor::new(&mut rc, &infcx); visitor.walk_expr(body); - } + }); rc.reassigned } @@ -1533,7 +1529,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat, let llmatch; let trmode; - let moves_by_default = variable_ty.moves_by_default(¶m_env, span); + let moves_by_default = variable_ty.moves_by_default(tcx, ¶m_env, span); match bm { hir::BindByValue(_) if !moves_by_default || reassigned => { @@ -1806,7 +1802,7 @@ pub fn bind_irrefutable_pat<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // binding will live and place it into the appropriate // map. bcx = mk_binding_alloca( - bcx, pat.id, path1.node.name, cleanup_scope, (), + bcx, pat.id, path1.node, cleanup_scope, (), "_match::bind_irrefutable_pat", |(), bcx, Datum { val: llval, ty, kind: _ }| { match pat_binding_mode { diff --git a/src/librustc_trans/abi.rs b/src/librustc_trans/abi.rs index 9bbe0cb5f6..df3d2d149b 100644 --- a/src/librustc_trans/abi.rs +++ b/src/librustc_trans/abi.rs @@ -10,8 +10,8 @@ use llvm::{self, ValueRef}; use base; -use builder::Builder; -use common::{type_is_fat_ptr, BlockAndBuilder}; +use build::AllocaFcx; +use common::{type_is_fat_ptr, BlockAndBuilder, C_uint}; use context::CrateContext; use cabi_x86; use cabi_x86_64; @@ -22,7 +22,7 @@ use cabi_powerpc; use cabi_powerpc64; use cabi_mips; use cabi_asmjs; -use machine::{llalign_of_min, llsize_of, llsize_of_real}; +use machine::{llalign_of_min, llsize_of, llsize_of_real, llsize_of_store}; use type_::Type; use type_of; @@ -30,6 +30,7 @@ use rustc::hir; use rustc::ty::{self, Ty}; use libc::c_uint; +use std::cmp; pub use syntax::abi::Abi; pub use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA}; @@ -150,26 +151,63 @@ impl ArgType { /// lvalue for the original Rust type of this argument/return. /// Can be used for both storing formal arguments into Rust variables /// or results of call/invoke instructions into their destinations. - pub fn store(&self, b: &Builder, mut val: ValueRef, dst: ValueRef) { + pub fn store(&self, bcx: &BlockAndBuilder, mut val: ValueRef, dst: ValueRef) { if self.is_ignore() { return; } + let ccx = bcx.ccx(); if self.is_indirect() { - let llsz = llsize_of(b.ccx, self.ty); - let llalign = llalign_of_min(b.ccx, self.ty); - base::call_memcpy(b, dst, val, llsz, llalign as u32); + let llsz = llsize_of(ccx, self.ty); + let llalign = llalign_of_min(ccx, self.ty); + base::call_memcpy(bcx, dst, val, llsz, llalign as u32); } else if let Some(ty) = self.cast { - let cast_dst = b.pointercast(dst, ty.ptr_to()); - let store = b.store(val, cast_dst); - let llalign = llalign_of_min(b.ccx, self.ty); - unsafe { - llvm::LLVMSetAlignment(store, llalign); + // FIXME(eddyb): Figure out when the simpler Store is safe, clang + // uses it for i16 -> {i8, i8}, but not for i24 -> {i8, i8, i8}. + let can_store_through_cast_ptr = false; + if can_store_through_cast_ptr { + let cast_dst = bcx.pointercast(dst, ty.ptr_to()); + let store = bcx.store(val, cast_dst); + let llalign = llalign_of_min(ccx, self.ty); + unsafe { + llvm::LLVMSetAlignment(store, llalign); + } + } else { + // The actual return type is a struct, but the ABI + // adaptation code has cast it into some scalar type. The + // code that follows is the only reliable way I have + // found to do a transform like i64 -> {i32,i32}. + // Basically we dump the data onto the stack then memcpy it. + // + // Other approaches I tried: + // - Casting rust ret pointer to the foreign type and using Store + // is (a) unsafe if size of foreign type > size of rust type and + // (b) runs afoul of strict aliasing rules, yielding invalid + // assembly under -O (specifically, the store gets removed). + // - Truncating foreign type to correct integral type and then + // bitcasting to the struct type yields invalid cast errors. + + // We instead thus allocate some scratch space... + let llscratch = AllocaFcx(bcx.fcx(), ty, "abi_cast"); + base::Lifetime::Start.call(bcx, llscratch); + + // ...where we first store the value... + bcx.store(val, llscratch); + + // ...and then memcpy it to the intended destination. + base::call_memcpy(bcx, + bcx.pointercast(dst, Type::i8p(ccx)), + bcx.pointercast(llscratch, Type::i8p(ccx)), + C_uint(ccx, llsize_of_store(ccx, self.ty)), + cmp::min(llalign_of_min(ccx, self.ty), + llalign_of_min(ccx, ty)) as u32); + + base::Lifetime::End.call(bcx, llscratch); } } else { - if self.original_ty == Type::i1(b.ccx) { - val = b.zext(val, Type::i8(b.ccx)); + if self.original_ty == Type::i1(ccx) { + val = bcx.zext(val, Type::i8(ccx)); } - b.store(val, dst); + bcx.store(val, dst); } } diff --git a/src/librustc_trans/adt.rs b/src/librustc_trans/adt.rs index 8922aa0618..a4792ea328 100644 --- a/src/librustc_trans/adt.rs +++ b/src/librustc_trans/adt.rs @@ -236,9 +236,9 @@ fn dtor_to_init_u8(dtor: bool) -> u8 { if dtor { DTOR_NEEDED } else { 0 } } -pub trait GetDtorType<'tcx> { fn dtor_type(&self) -> Ty<'tcx>; } -impl<'tcx> GetDtorType<'tcx> for TyCtxt<'tcx> { - fn dtor_type(&self) -> Ty<'tcx> { self.types.u8 } +pub trait GetDtorType<'tcx> { fn dtor_type(self) -> Ty<'tcx>; } +impl<'a, 'tcx> GetDtorType<'tcx> for TyCtxt<'a, 'tcx, 'tcx> { + fn dtor_type(self) -> Ty<'tcx> { self.types.u8 } } fn dtor_active(flag: u8) -> bool { @@ -306,10 +306,8 @@ fn represent_type_uncached<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, cx.tcx().item_path_str(def.did)); } - if cases.len() == 1 { + if cases.len() == 1 && hint == attr::ReprAny { // Equivalent to a struct/tuple/newtype. - // (Typechecking will reject discriminant-sizing attrs.) - assert_eq!(hint, attr::ReprAny); let mut ftys = cases[0].tys.clone(); if dtor { ftys.push(cx.tcx().dtor_type()); } return Univariant(mk_struct(cx, &ftys[..], false, t), @@ -442,9 +440,10 @@ struct Case<'tcx> { /// This represents the (GEP) indices to follow to get to the discriminant field pub type DiscrField = Vec; -fn find_discr_field_candidate<'tcx>(tcx: &TyCtxt<'tcx>, - ty: Ty<'tcx>, - mut path: DiscrField) -> Option { +fn find_discr_field_candidate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>, + mut path: DiscrField) + -> Option { match ty.sty { // Fat &T/&mut T/Box i.e. T is [T], str, or Trait ty::TyRef(_, ty::TypeAndMut { ty, .. }) | ty::TyBox(ty) if !type_is_sized(tcx, ty) => { @@ -544,10 +543,10 @@ impl<'tcx> Case<'tcx> { } } -fn get_cases<'tcx>(tcx: &TyCtxt<'tcx>, - adt: ty::AdtDef<'tcx>, - substs: &subst::Substs<'tcx>) - -> Vec> { +fn get_cases<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + adt: ty::AdtDef<'tcx>, + substs: &subst::Substs<'tcx>) + -> Vec> { adt.variants.iter().map(|vi| { let field_tys = vi.fields.iter().map(|field| { monomorphize::field_ty(tcx, substs, field) @@ -668,7 +667,7 @@ fn bounds_usable(cx: &CrateContext, ity: IntType, bounds: &IntBounds) -> bool { } } -pub fn ty_of_inttype<'tcx>(tcx: &TyCtxt<'tcx>, ity: IntType) -> Ty<'tcx> { +pub fn ty_of_inttype<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ity: IntType) -> Ty<'tcx> { match ity { attr::SignedInt(t) => tcx.mk_mach_int(t), attr::UnsignedInt(t) => tcx.mk_mach_uint(t) diff --git a/src/librustc_trans/back/archive.rs b/src/librustc_trans/back/archive.rs index 514fc52d00..aea61da18a 100644 --- a/src/librustc_trans/back/archive.rs +++ b/src/librustc_trans/back/archive.rs @@ -21,8 +21,6 @@ use std::process::{Command, Output, Stdio}; use std::ptr; use std::str; -use middle::cstore::CrateStore; - use libc; use llvm::archive_ro::{ArchiveRO, Child}; use llvm::{self, ArchiveKind}; diff --git a/src/librustc_trans/back/link.rs b/src/librustc_trans/back/link.rs index 4e77b2bc06..53cc031982 100644 --- a/src/librustc_trans/back/link.rs +++ b/src/librustc_trans/back/link.rs @@ -19,12 +19,13 @@ use session::config::{OutputFilenames, Input, OutputType}; use session::filesearch; use session::search_paths::PathKind; use session::Session; -use middle::cstore::{self, CrateStore, LinkMeta}; +use middle::cstore::{self, LinkMeta}; use middle::cstore::{LinkagePreference, NativeLibraryKind}; use middle::dependency_format::Linkage; use CrateTranslation; use util::common::time; use util::fs::fix_windows_verbatim_for_gcc; +use rustc::dep_graph::DepNode; use rustc::ty::TyCtxt; use rustc_back::tempdir::TempDir; @@ -124,9 +125,9 @@ pub fn find_crate_name(sess: Option<&Session>, } -pub fn build_link_meta(tcx: &TyCtxt, - name: &str) - -> LinkMeta { +pub fn build_link_meta<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + name: &str) + -> LinkMeta { let r = LinkMeta { crate_name: name.to_owned(), crate_hash: tcx.calculate_krate_hash(), @@ -183,6 +184,8 @@ pub fn link_binary(sess: &Session, trans: &CrateTranslation, outputs: &OutputFilenames, crate_name: &str) -> Vec { + let _task = sess.dep_graph.in_task(DepNode::LinkBinary); + let mut out_filenames = Vec::new(); for &crate_type in sess.crate_types.borrow().iter() { if invalid_output_for_target(sess, crate_type) { @@ -228,6 +231,7 @@ pub fn invalid_output_for_target(sess: &Session, crate_type: config::CrateType) -> bool { match (sess.target.target.options.dynamic_linking, sess.target.target.options.executables, crate_type) { + (false, _, config::CrateTypeCdylib) | (false, _, config::CrateTypeDylib) => true, (_, false, config::CrateTypeExecutable) => true, _ => false @@ -250,6 +254,7 @@ pub fn filename_for_input(sess: &Session, config::CrateTypeRlib => { outputs.out_directory.join(&format!("lib{}.rlib", libname)) } + config::CrateTypeCdylib | config::CrateTypeDylib => { let (prefix, suffix) = (&sess.target.target.options.dll_prefix, &sess.target.target.options.dll_suffix); @@ -278,9 +283,10 @@ pub fn each_linked_rlib(sess: &Session, f: &mut FnMut(ast::CrateNum, &Path)) { let crates = sess.cstore.used_crates(LinkagePreference::RequireStatic).into_iter(); let fmts = sess.dependency_formats.borrow(); - let fmts = fmts.get(&config::CrateTypeExecutable).or_else(|| { - fmts.get(&config::CrateTypeStaticlib) - }).unwrap_or_else(|| { + let fmts = fmts.get(&config::CrateTypeExecutable) + .or_else(|| fmts.get(&config::CrateTypeStaticlib)) + .or_else(|| fmts.get(&config::CrateTypeCdylib)); + let fmts = fmts.unwrap_or_else(|| { bug!("could not find formats for rlibs") }); for (cnum, path) in crates { @@ -335,13 +341,9 @@ fn link_binary_output(sess: &Session, config::CrateTypeStaticlib => { link_staticlib(sess, &objects, &out_filename, tmpdir.path()); } - config::CrateTypeExecutable => { - link_natively(sess, false, &objects, &out_filename, trans, outputs, - tmpdir.path()); - } - config::CrateTypeDylib => { - link_natively(sess, true, &objects, &out_filename, trans, outputs, - tmpdir.path()); + _ => { + link_natively(sess, crate_type, &objects, &out_filename, trans, + outputs, tmpdir.path()); } } @@ -609,13 +611,14 @@ fn link_staticlib(sess: &Session, objects: &[PathBuf], out_filename: &Path, // // This will invoke the system linker/cc to create the resulting file. This // links to all upstream files as well. -fn link_natively(sess: &Session, dylib: bool, - objects: &[PathBuf], out_filename: &Path, +fn link_natively(sess: &Session, + crate_type: config::CrateType, + objects: &[PathBuf], + out_filename: &Path, trans: &CrateTranslation, outputs: &OutputFilenames, tmpdir: &Path) { - info!("preparing dylib? ({}) from {:?} to {:?}", dylib, objects, - out_filename); + info!("preparing {:?} from {:?} to {:?}", crate_type, objects, out_filename); // The invocations of cc share some flags across platforms let (pname, mut cmd) = get_linker(sess); @@ -624,10 +627,10 @@ fn link_natively(sess: &Session, dylib: bool, let root = sess.target_filesearch(PathKind::Native).get_lib_path(); cmd.args(&sess.target.target.options.pre_link_args); - let pre_link_objects = if dylib { - &sess.target.target.options.pre_link_objects_dll - } else { + let pre_link_objects = if crate_type == config::CrateTypeExecutable { &sess.target.target.options.pre_link_objects_exe + } else { + &sess.target.target.options.pre_link_objects_dll }; for obj in pre_link_objects { cmd.arg(root.join(obj)); @@ -639,7 +642,7 @@ fn link_natively(sess: &Session, dylib: bool, } else { Box::new(GnuLinker { cmd: &mut cmd, sess: &sess }) as Box }; - link_args(&mut *linker, sess, dylib, tmpdir, + link_args(&mut *linker, sess, crate_type, tmpdir, objects, out_filename, trans, outputs); if !sess.target.target.options.no_compiler_rt { linker.link_staticlib("compiler-rt"); @@ -705,7 +708,7 @@ fn link_natively(sess: &Session, dylib: bool, fn link_args(cmd: &mut Linker, sess: &Session, - dylib: bool, + crate_type: config::CrateType, tmpdir: &Path, objects: &[PathBuf], out_filename: &Path, @@ -727,26 +730,28 @@ fn link_args(cmd: &mut Linker, // If we're building a dynamic library then some platforms need to make sure // that all symbols are exported correctly from the dynamic library. - if dylib { - cmd.export_symbols(sess, trans, tmpdir); + if crate_type != config::CrateTypeExecutable { + cmd.export_symbols(sess, trans, tmpdir, crate_type); } // When linking a dynamic library, we put the metadata into a section of the // executable. This metadata is in a separate object file from the main // object file, so we link that in here. - if dylib { + if crate_type == config::CrateTypeDylib { cmd.add_object(&outputs.with_extension("metadata.o")); } // Try to strip as much out of the generated object by removing unused // sections if possible. See more comments in linker.rs if !sess.opts.cg.link_dead_code { - cmd.gc_sections(dylib); + let keep_metadata = crate_type == config::CrateTypeDylib; + cmd.gc_sections(keep_metadata); } let used_link_args = sess.cstore.used_link_args(); - if !dylib && t.options.position_independent_executables { + if crate_type == config::CrateTypeExecutable && + t.options.position_independent_executables { let empty_vec = Vec::new(); let empty_str = String::new(); let args = sess.opts.cg.link_args.as_ref().unwrap_or(&empty_vec); @@ -801,12 +806,12 @@ fn link_args(cmd: &mut Linker, // in this DAG so far because they're only dylibs and dylibs can only depend // on other dylibs (e.g. other native deps). add_local_native_libraries(cmd, sess); - add_upstream_rust_crates(cmd, sess, dylib, tmpdir); + add_upstream_rust_crates(cmd, sess, crate_type, tmpdir); add_upstream_native_libraries(cmd, sess); // # Telling the linker what we're doing - if dylib { + if crate_type != config::CrateTypeExecutable { cmd.build_dylib(out_filename); } @@ -904,8 +909,10 @@ fn add_local_native_libraries(cmd: &mut Linker, sess: &Session) { // Rust crates are not considered at all when creating an rlib output. All // dependencies will be linked when producing the final output (instead of // the intermediate rlib version) -fn add_upstream_rust_crates(cmd: &mut Linker, sess: &Session, - dylib: bool, tmpdir: &Path) { +fn add_upstream_rust_crates(cmd: &mut Linker, + sess: &Session, + crate_type: config::CrateType, + tmpdir: &Path) { // All of the heavy lifting has previously been accomplished by the // dependency_format module of the compiler. This is just crawling the // output of that module, adding crates as necessary. @@ -915,11 +922,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker, sess: &Session, // involves just passing the right -l flag. let formats = sess.dependency_formats.borrow(); - let data = if dylib { - formats.get(&config::CrateTypeDylib).unwrap() - } else { - formats.get(&config::CrateTypeExecutable).unwrap() - }; + let data = formats.get(&crate_type).unwrap(); // Invoke get_used_crates to ensure that we get a topological sorting of // crates. @@ -934,7 +937,8 @@ fn add_upstream_rust_crates(cmd: &mut Linker, sess: &Session, Linkage::NotLinked | Linkage::IncludedFromDylib => {} Linkage::Static => { - add_static_crate(cmd, sess, tmpdir, dylib, &src.rlib.unwrap().0) + add_static_crate(cmd, sess, tmpdir, crate_type, + &src.rlib.unwrap().0) } Linkage::Dynamic => { add_dynamic_crate(cmd, sess, &src.dylib.unwrap().0) @@ -979,9 +983,12 @@ fn add_upstream_rust_crates(cmd: &mut Linker, sess: &Session, // (aka we're making an executable), we can just pass the rlib blindly to // the linker (fast) because it's fine if it's not actually included as // we're at the end of the dependency chain. - fn add_static_crate(cmd: &mut Linker, sess: &Session, tmpdir: &Path, - dylib: bool, cratepath: &Path) { - if !sess.lto() && !dylib { + fn add_static_crate(cmd: &mut Linker, + sess: &Session, + tmpdir: &Path, + crate_type: config::CrateType, + cratepath: &Path) { + if !sess.lto() && crate_type != config::CrateTypeDylib { cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath)); return } @@ -1017,7 +1024,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker, sess: &Session, if any_objects { archive.build(); - if dylib { + if crate_type == config::CrateTypeDylib { cmd.link_whole_rlib(&fix_windows_verbatim_for_gcc(&dst)); } else { cmd.link_rlib(&fix_windows_verbatim_for_gcc(&dst)); diff --git a/src/librustc_trans/back/linker.rs b/src/librustc_trans/back/linker.rs index c02a482f81..50f6366e85 100644 --- a/src/librustc_trans/back/linker.rs +++ b/src/librustc_trans/back/linker.rs @@ -16,10 +16,9 @@ use std::path::{Path, PathBuf}; use std::process::Command; use back::archive; -use middle::cstore::CrateStore; use middle::dependency_format::Linkage; use session::Session; -use session::config::CrateTypeDylib; +use session::config::CrateType; use session::config; use syntax::ast; use CrateTranslation; @@ -43,7 +42,7 @@ pub trait Linker { fn framework_path(&mut self, path: &Path); fn output_filename(&mut self, path: &Path); fn add_object(&mut self, path: &Path); - fn gc_sections(&mut self, is_dylib: bool); + fn gc_sections(&mut self, keep_metadata: bool); fn position_independent_executable(&mut self); fn optimize(&mut self); fn debuginfo(&mut self); @@ -54,8 +53,11 @@ pub trait Linker { fn hint_dynamic(&mut self); fn whole_archives(&mut self); fn no_whole_archives(&mut self); - fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation, - tmpdir: &Path); + fn export_symbols(&mut self, + sess: &Session, + trans: &CrateTranslation, + tmpdir: &Path, + crate_type: CrateType); } pub struct GnuLinker<'a> { @@ -114,7 +116,7 @@ impl<'a> Linker for GnuLinker<'a> { } } - fn gc_sections(&mut self, is_dylib: bool) { + fn gc_sections(&mut self, keep_metadata: bool) { // The dead_strip option to the linker specifies that functions and data // unreachable by the entry point will be removed. This is quite useful // with Rust's compilation model of compiling libraries at a time into @@ -140,7 +142,7 @@ impl<'a> Linker for GnuLinker<'a> { // eliminate the metadata. If we're building an executable, however, // --gc-sections drops the size of hello world from 1.8MB to 597K, a 67% // reduction. - } else if !is_dylib { + } else if !keep_metadata { self.cmd.arg("-Wl,--gc-sections"); } } @@ -199,8 +201,46 @@ impl<'a> Linker for GnuLinker<'a> { self.cmd.arg("-Wl,-Bdynamic"); } - fn export_symbols(&mut self, _: &Session, _: &CrateTranslation, _: &Path) { - // noop, visibility in object files takes care of this + fn export_symbols(&mut self, + sess: &Session, + trans: &CrateTranslation, + tmpdir: &Path, + crate_type: CrateType) { + // If we're compiling a dylib, then we let symbol visibility in object + // files to take care of whether they're exported or not. + // + // If we're compiling a cdylib, however, we manually create a list of + // exported symbols to ensure we don't expose any more. The object files + // have far more public symbols than we actually want to export, so we + // hide them all here. + if crate_type == CrateType::CrateTypeDylib { + return + } + + let path = tmpdir.join("list"); + let prefix = if self.sess.target.target.options.is_like_osx { + "_" + } else { + "" + }; + let res = (|| -> io::Result<()> { + let mut f = BufWriter::new(File::create(&path)?); + for sym in exported_symbols(sess, trans, crate_type) { + writeln!(f, "{}{}", prefix, sym)?; + } + Ok(()) + })(); + if let Err(e) = res { + sess.fatal(&format!("failed to write lib.def file: {}", e)); + } + let mut arg = OsString::new(); + if self.sess.target.target.options.is_like_osx { + arg.push("-Wl,-exported_symbols_list,"); + } else { + arg.push("-Wl,--retain-symbols-file="); + } + arg.push(&path); + self.cmd.arg(arg); } } @@ -221,7 +261,9 @@ impl<'a> Linker for MsvcLinker<'a> { self.cmd.arg(arg); } - fn gc_sections(&mut self, _is_dylib: bool) { self.cmd.arg("/OPT:REF,ICF"); } + fn gc_sections(&mut self, _keep_metadata: bool) { + self.cmd.arg("/OPT:REF,ICF"); + } fn link_dylib(&mut self, lib: &str) { self.cmd.arg(&format!("{}.lib", lib)); @@ -323,8 +365,11 @@ impl<'a> Linker for MsvcLinker<'a> { // crates. Upstream rlibs may be linked statically to this dynamic library, // in which case they may continue to transitively be used and hence need // their symbols exported. - fn export_symbols(&mut self, sess: &Session, trans: &CrateTranslation, - tmpdir: &Path) { + fn export_symbols(&mut self, + sess: &Session, + trans: &CrateTranslation, + tmpdir: &Path, + crate_type: CrateType) { let path = tmpdir.join("lib.def"); let res = (|| -> io::Result<()> { let mut f = BufWriter::new(File::create(&path)?); @@ -334,32 +379,10 @@ impl<'a> Linker for MsvcLinker<'a> { writeln!(f, "LIBRARY")?; writeln!(f, "EXPORTS")?; - // Write out all our local symbols - for sym in trans.reachable.iter() { + for sym in exported_symbols(sess, trans, crate_type) { writeln!(f, " {}", sym)?; } - // Take a look at how all upstream crates are linked into this - // dynamic library. For all statically linked libraries we take all - // their reachable symbols and emit them as well. - let cstore = &sess.cstore; - let formats = sess.dependency_formats.borrow(); - let symbols = formats[&CrateTypeDylib].iter(); - let symbols = symbols.enumerate().filter_map(|(i, f)| { - if *f == Linkage::Static { - Some((i + 1) as ast::CrateNum) - } else { - None - } - }).flat_map(|cnum| { - cstore.reachable_ids(cnum) - }).map(|did| { - cstore.item_symbol(did) - }); - for symbol in symbols { - writeln!(f, " {}", symbol)?; - } - Ok(()) })(); if let Err(e) = res { @@ -370,3 +393,36 @@ impl<'a> Linker for MsvcLinker<'a> { self.cmd.arg(&arg); } } + +fn exported_symbols(sess: &Session, + trans: &CrateTranslation, + crate_type: CrateType) -> Vec { + let mut symbols = trans.reachable.iter().cloned().collect::>(); + + // If we're producing anything other than a dylib then the `reachable` array + // above is the exhaustive set of symbols we should be exporting. + // + // For dylibs, however, we need to take a look at how all upstream crates + // are linked into this dynamic library. For all statically linked + // libraries we take all their reachable symbols and emit them as well. + if crate_type != CrateType::CrateTypeDylib { + return symbols + } + + let cstore = &sess.cstore; + let formats = sess.dependency_formats.borrow(); + let upstream_symbols = formats[&crate_type].iter(); + symbols.extend(upstream_symbols.enumerate().filter_map(|(i, f)| { + if *f == Linkage::Static { + Some((i + 1) as ast::CrateNum) + } else { + None + } + }).flat_map(|cnum| { + cstore.reachable_ids(cnum) + }).map(|did| { + cstore.item_symbol(did) + })); + + return symbols +} diff --git a/src/librustc_trans/back/lto.rs b/src/librustc_trans/back/lto.rs index 649d37e802..31bc11fb21 100644 --- a/src/librustc_trans/back/lto.rs +++ b/src/librustc_trans/back/lto.rs @@ -30,7 +30,8 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, output_names: &config::OutputFilenames) { if sess.opts.cg.prefer_dynamic { sess.struct_err("cannot prefer dynamic linking when performing LTO") - .note("only 'staticlib' and 'bin' outputs are supported with LTO") + .note("only 'staticlib', 'bin', and 'cdylib' outputs are \ + supported with LTO") .emit(); sess.abort_if_errors(); } @@ -38,7 +39,9 @@ pub fn run(sess: &session::Session, llmod: ModuleRef, // Make sure we actually can run LTO for crate_type in sess.crate_types.borrow().iter() { match *crate_type { - config::CrateTypeExecutable | config::CrateTypeStaticlib => {} + config::CrateTypeExecutable | + config::CrateTypeCdylib | + config::CrateTypeStaticlib => {} _ => { sess.fatal("lto can only be run for executables and \ static library outputs"); diff --git a/src/librustc_trans/back/symbol_names.rs b/src/librustc_trans/back/symbol_names.rs index 8127d1c0e2..0cf82d66b2 100644 --- a/src/librustc_trans/back/symbol_names.rs +++ b/src/librustc_trans/back/symbol_names.rs @@ -103,7 +103,7 @@ use util::sha2::{Digest, Sha256}; use rustc::middle::cstore; use rustc::hir::def_id::DefId; -use rustc::ty::{self, TypeFoldable}; +use rustc::ty::{self, TyCtxt, TypeFoldable}; use rustc::ty::item_path::{ItemPathBuffer, RootMode}; use rustc::hir::map::definitions::{DefPath, DefPathData}; @@ -111,12 +111,12 @@ use std::fmt::Write; use syntax::parse::token::{self, InternedString}; use serialize::hex::ToHex; -pub fn def_id_to_string<'tcx>(tcx: &ty::TyCtxt<'tcx>, def_id: DefId) -> String { +pub fn def_id_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> String { let def_path = tcx.def_path(def_id); def_path_to_string(tcx, &def_path) } -pub fn def_path_to_string<'tcx>(tcx: &ty::TyCtxt<'tcx>, def_path: &DefPath) -> String { +pub fn def_path_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_path: &DefPath) -> String { let mut s = String::with_capacity(def_path.data.len() * 16); s.push_str(&tcx.crate_name(def_path.krate)); diff --git a/src/librustc_trans/back/write.rs b/src/librustc_trans/back/write.rs index e6a51eb7c8..cf81777be2 100644 --- a/src/librustc_trans/back/write.rs +++ b/src/librustc_trans/back/write.rs @@ -19,9 +19,9 @@ use llvm::SMDiagnosticRef; use {CrateTranslation, ModuleTranslation}; use util::common::time; use util::common::path2cstr; -use syntax::codemap; -use syntax::errors::{self, Handler, Level}; -use syntax::errors::emitter::Emitter; +use syntax::codemap::MultiSpan; +use syntax::errors::{self, Handler, Level, RenderSpan}; +use syntax::errors::emitter::CoreEmitter; use std::collections::HashMap; use std::ffi::{CStr, CString}; @@ -31,7 +31,7 @@ use std::str; use std::sync::{Arc, Mutex}; use std::sync::mpsc::channel; use std::thread; -use libc::{c_uint, c_int, c_void}; +use libc::{c_uint, c_void}; pub fn llvm_err(handler: &errors::Handler, msg: String) -> ! { match llvm::last_error() { @@ -84,13 +84,13 @@ impl SharedEmitter { for diag in &*buffer { match diag.code { Some(ref code) => { - handler.emit_with_code(None, + handler.emit_with_code(&MultiSpan::new(), &diag.msg, &code[..], diag.lvl); }, None => { - handler.emit(None, + handler.emit(&MultiSpan::new(), &diag.msg, diag.lvl); }, @@ -100,21 +100,20 @@ impl SharedEmitter { } } -impl Emitter for SharedEmitter { - fn emit(&mut self, sp: Option<&codemap::MultiSpan>, - msg: &str, code: Option<&str>, lvl: Level) { - assert!(sp.is_none(), "SharedEmitter doesn't support spans"); - +impl CoreEmitter for SharedEmitter { + fn emit_message(&mut self, + _rsp: &RenderSpan, + msg: &str, + code: Option<&str>, + lvl: Level, + _is_header: bool, + _show_snippet: bool) { self.buffer.lock().unwrap().push(Diagnostic { msg: msg.to_string(), code: code.map(|s| s.to_string()), lvl: lvl, }); } - - fn custom_emit(&mut self, _sp: &errors::RenderSpan, _msg: &str, _lvl: Level) { - bug!("SharedEmitter doesn't support custom_emit"); - } } @@ -140,6 +139,15 @@ fn get_llvm_opt_level(optimize: config::OptLevel) -> llvm::CodeGenOptLevel { config::OptLevel::Less => llvm::CodeGenLevelLess, config::OptLevel::Default => llvm::CodeGenLevelDefault, config::OptLevel::Aggressive => llvm::CodeGenLevelAggressive, + _ => llvm::CodeGenLevelDefault, + } +} + +fn get_llvm_opt_size(optimize: config::OptLevel) -> llvm::CodeGenOptSize { + match optimize { + config::OptLevel::Size => llvm::CodeGenOptSizeDefault, + config::OptLevel::SizeMin => llvm::CodeGenOptSizeAggressive, + _ => llvm::CodeGenOptSizeNone, } } @@ -237,6 +245,9 @@ pub struct ModuleConfig { /// absolutely no optimizations (used for the metadata module). opt_level: Option, + /// Some(level) to optimize binary size, or None to not affect program size. + opt_size: Option, + // Flags indicating which outputs to produce. emit_no_opt_bc: bool, emit_bc: bool, @@ -268,6 +279,7 @@ impl ModuleConfig { tm: tm, passes: passes, opt_level: None, + opt_size: None, emit_no_opt_bc: false, emit_bc: false, @@ -627,7 +639,8 @@ pub fn run_passes(sess: &Session, } // Sanity check - assert!(trans.modules.len() == sess.opts.cg.codegen_units); + assert!(trans.modules.len() == sess.opts.cg.codegen_units || + sess.opts.debugging_opts.incremental.is_some()); let tm = create_target_machine(sess); @@ -637,6 +650,7 @@ pub fn run_passes(sess: &Session, let mut metadata_config = ModuleConfig::new(tm, vec!()); modules_config.opt_level = Some(get_llvm_opt_level(sess.opts.optimize)); + modules_config.opt_size = Some(get_llvm_opt_size(sess.opts.optimize)); // Save all versions of the bytecode if we're saving our temporaries. if sess.opts.cg.save_temps { @@ -984,36 +998,6 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { } } -pub unsafe fn configure_llvm(sess: &Session) { - let mut llvm_c_strs = Vec::new(); - let mut llvm_args = Vec::new(); - - { - let mut add = |arg: &str| { - let s = CString::new(arg).unwrap(); - llvm_args.push(s.as_ptr()); - llvm_c_strs.push(s); - }; - add("rustc"); // fake program name - if sess.time_llvm_passes() { add("-time-passes"); } - if sess.print_llvm_passes() { add("-debug-pass=Structure"); } - - // FIXME #21627 disable faulty FastISel on AArch64 (even for -O0) - if sess.target.target.arch == "aarch64" { add("-fast-isel=0"); } - - for arg in &sess.opts.cg.llvm_args { - add(&(*arg)); - } - } - - llvm::LLVMInitializePasses(); - - llvm::initialize_available_targets(); - - llvm::LLVMRustSetLLVMOptions(llvm_args.len() as c_int, - llvm_args.as_ptr()); -} - pub unsafe fn with_llvm_pmb(llmod: ModuleRef, config: &ModuleConfig, f: &mut FnMut(llvm::PassManagerBuilderRef)) { @@ -1021,13 +1005,19 @@ pub unsafe fn with_llvm_pmb(llmod: ModuleRef, // reasonable defaults and prepare it to actually populate the pass // manager. let builder = llvm::LLVMPassManagerBuilderCreate(); - let opt = config.opt_level.unwrap_or(llvm::CodeGenLevelNone); + let opt_level = config.opt_level.unwrap_or(llvm::CodeGenLevelNone); + let opt_size = config.opt_size.unwrap_or(llvm::CodeGenOptSizeNone); let inline_threshold = config.inline_threshold; - llvm::LLVMRustConfigurePassManagerBuilder(builder, opt, + llvm::LLVMRustConfigurePassManagerBuilder(builder, opt_level, config.merge_functions, config.vectorize_slp, config.vectorize_loop); + llvm::LLVMPassManagerBuilderSetSizeLevel(builder, opt_size as u32); + + if opt_size != llvm::CodeGenOptSizeNone { + llvm::LLVMPassManagerBuilderSetDisableUnrollLoops(builder, 1); + } llvm::LLVMRustAddBuilderLibraryInfo(builder, llmod, config.no_builtins); @@ -1035,22 +1025,28 @@ pub unsafe fn with_llvm_pmb(llmod: ModuleRef, // always-inline functions (but don't add lifetime intrinsics), at O1 we // inline with lifetime intrinsics, and O2+ we add an inliner with a // thresholds copied from clang. - match (opt, inline_threshold) { - (_, Some(t)) => { + match (opt_level, opt_size, inline_threshold) { + (_, _, Some(t)) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, t as u32); } - (llvm::CodeGenLevelNone, _) => { + (llvm::CodeGenLevelAggressive, _, _) => { + llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 275); + } + (_, llvm::CodeGenOptSizeDefault, _) => { + llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 75); + } + (_, llvm::CodeGenOptSizeAggressive, _) => { + llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 25); + } + (llvm::CodeGenLevelNone, _, _) => { llvm::LLVMRustAddAlwaysInlinePass(builder, false); } - (llvm::CodeGenLevelLess, _) => { + (llvm::CodeGenLevelLess, _, _) => { llvm::LLVMRustAddAlwaysInlinePass(builder, true); } - (llvm::CodeGenLevelDefault, _) => { + (llvm::CodeGenLevelDefault, _, _) => { llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 225); } - (llvm::CodeGenLevelAggressive, _) => { - llvm::LLVMPassManagerBuilderUseInlinerWithThreshold(builder, 275); - } } f(builder); diff --git a/src/librustc_trans/base.rs b/src/librustc_trans/base.rs index 104a74a63c..d4f0786e9e 100644 --- a/src/librustc_trans/base.rs +++ b/src/librustc_trans/base.rs @@ -35,9 +35,7 @@ use lint; use llvm::{BasicBlockRef, Linkage, ValueRef, Vector, get_param}; use llvm; use rustc::cfg; -use middle::cstore::CrateStore; use rustc::hir::def_id::DefId; -use rustc::infer; use middle::lang_items::{LangItem, ExchangeMallocFnLangItem, StartFnLangItem}; use middle::weak_lang_items; use rustc::hir::pat_util::simple_name; @@ -49,6 +47,7 @@ use rustc::dep_graph::DepNode; use rustc::hir::map as hir_map; use rustc::util::common::time; use rustc::mir::mir_map::MirMap; +use rustc_data_structures::graph::OUTGOING; use session::config::{self, NoDebugInfo, FullDebugInfo}; use session::Session; use _match; @@ -61,7 +60,7 @@ use callee::{Callee, CallArgs, ArgExprs, ArgVals}; use cleanup::{self, CleanupMethods, DropHint}; use closure; use common::{Block, C_bool, C_bytes_in_context, C_i32, C_int, C_uint, C_integral}; -use collector::{self, TransItem, TransItemState, TransItemCollectionMode}; +use collector::{self, TransItemState, TransItemCollectionMode}; use common::{C_null, C_struct_in_context, C_u64, C_u8, C_undef}; use common::{CrateContext, DropFlagHintsMap, Field, FunctionContext}; use common::{Result, NodeIdAndSpan, VariantInfo}; @@ -69,7 +68,7 @@ use common::{node_id_type, fulfill_obligation}; use common::{type_is_immediate, type_is_zero_size, val_ty}; use common; use consts; -use context::SharedCrateContext; +use context::{SharedCrateContext, CrateContextList}; use controlflow; use datum; use debuginfo::{self, DebugLoc, ToDebugLoc}; @@ -82,7 +81,9 @@ use machine::{llalign_of_min, llsize_of, llsize_of_real}; use meth; use mir; use monomorphize::{self, Instance}; +use partitioning::{self, PartitioningStrategy, CodegenUnit}; use symbol_names_test; +use trans_item::TransItem; use tvec; use type_::Type; use type_of; @@ -616,7 +617,13 @@ pub fn coerce_unsized_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, (&ty::TyRawPtr(..), &ty::TyRawPtr(..)) => { let (base, info) = if common::type_is_fat_ptr(bcx.tcx(), src_ty) { // fat-ptr to fat-ptr unsize preserves the vtable - load_fat_ptr(bcx, src, src_ty) + // i.e. &'a fmt::Debug+Send => &'a fmt::Debug + // So we need to pointercast the base to ensure + // the types match up. + let (base, info) = load_fat_ptr(bcx, src, src_ty); + let llcast_ty = type_of::fat_ptr_base_ty(bcx.ccx(), dst_ty); + let base = PointerCast(bcx, base, llcast_ty); + (base, info) } else { let base = load_ty(bcx, src, src_ty); unsize_thin_ptr(bcx, base, src_ty, dst_ty) @@ -664,7 +671,7 @@ pub fn coerce_unsized_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } } -pub fn custom_coerce_unsize_info<'ccx, 'tcx>(ccx: &CrateContext<'ccx, 'tcx>, +pub fn custom_coerce_unsize_info<'scx, 'tcx>(scx: &SharedCrateContext<'scx, 'tcx>, source_ty: Ty<'tcx>, target_ty: Ty<'tcx>) -> CustomCoerceUnsized { @@ -674,13 +681,13 @@ pub fn custom_coerce_unsize_info<'ccx, 'tcx>(ccx: &CrateContext<'ccx, 'tcx>, subst::VecPerParamSpace::empty()); let trait_ref = ty::Binder(ty::TraitRef { - def_id: ccx.tcx().lang_items.coerce_unsized_trait().unwrap(), - substs: ccx.tcx().mk_substs(trait_substs) + def_id: scx.tcx().lang_items.coerce_unsized_trait().unwrap(), + substs: scx.tcx().mk_substs(trait_substs) }); - match fulfill_obligation(ccx, DUMMY_SP, trait_ref) { + match fulfill_obligation(scx, DUMMY_SP, trait_ref) { traits::VtableImpl(traits::VtableImplData { impl_def_id, .. }) => { - ccx.tcx().custom_coerce_unsized_kind(impl_def_id) + scx.tcx().custom_coerce_unsized_kind(impl_def_id) } vtable => { bug!("invalid CoerceUnsized vtable: {:?}", vtable); @@ -1035,7 +1042,7 @@ pub fn with_cond<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, val: ValueRef, f: F) -> next_cx } -enum Lifetime { Start, End } +pub enum Lifetime { Start, End } // If LLVM lifetime intrinsic support is enabled (i.e. optimizations // on), and `ptr` is nonzero-sized, then extracts the size of `ptr` @@ -1072,24 +1079,25 @@ fn core_lifetime_emit<'blk, 'tcx, F>(ccx: &'blk CrateContext<'blk, 'tcx>, emit(ccx, size, lifetime_intrinsic) } -pub fn call_lifetime_start(cx: Block, ptr: ValueRef) { - core_lifetime_emit(cx.ccx(), ptr, Lifetime::Start, |ccx, size, lifetime_start| { - let ptr = PointerCast(cx, ptr, Type::i8p(ccx)); - Call(cx, - lifetime_start, - &[C_u64(ccx, size), ptr], - DebugLoc::None); - }) +impl Lifetime { + pub fn call(self, b: &Builder, ptr: ValueRef) { + core_lifetime_emit(b.ccx, ptr, self, |ccx, size, lifetime_intrinsic| { + let ptr = b.pointercast(ptr, Type::i8p(ccx)); + b.call(lifetime_intrinsic, &[C_u64(ccx, size), ptr], None); + }); + } } -pub fn call_lifetime_end(cx: Block, ptr: ValueRef) { - core_lifetime_emit(cx.ccx(), ptr, Lifetime::End, |ccx, size, lifetime_end| { - let ptr = PointerCast(cx, ptr, Type::i8p(ccx)); - Call(cx, - lifetime_end, - &[C_u64(ccx, size), ptr], - DebugLoc::None); - }) +pub fn call_lifetime_start(bcx: Block, ptr: ValueRef) { + if !bcx.unreachable.get() { + Lifetime::Start.call(&bcx.build(), ptr); + } +} + +pub fn call_lifetime_end(bcx: Block, ptr: ValueRef) { + if !bcx.unreachable.get() { + Lifetime::End.call(&bcx.build(), ptr); + } } // Generates code for resumption of unwind at the end of a landing pad. @@ -1275,7 +1283,7 @@ pub fn alloca(cx: Block, ty: Type, name: &str) -> ValueRef { return llvm::LLVMGetUndef(ty.ptr_to().to_ref()); } } - debuginfo::clear_source_location(cx.fcx); + DebugLoc::None.apply(cx.fcx); Alloca(cx, ty, name) } @@ -1309,7 +1317,9 @@ impl<'v> Visitor<'v> for FindNestedReturn { } } -fn build_cfg(tcx: &TyCtxt, id: ast::NodeId) -> (ast::NodeId, Option) { +fn build_cfg<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + id: ast::NodeId) + -> (ast::NodeId, Option) { let blk = match tcx.map.find(id) { Some(hir_map::NodeItem(i)) => { match i.node { @@ -1338,7 +1348,7 @@ fn build_cfg(tcx: &TyCtxt, id: ast::NodeId) -> (ast::NodeId, Option) { } Some(hir_map::NodeExpr(e)) => { match e.node { - hir::ExprClosure(_, _, ref blk) => blk, + hir::ExprClosure(_, _, ref blk, _) => blk, _ => bug!("unexpected expr variant in has_nested_returns"), } } @@ -1365,8 +1375,8 @@ fn build_cfg(tcx: &TyCtxt, id: ast::NodeId) -> (ast::NodeId, Option) { // part of a larger expression that may have already partially-filled the // return slot alloca. This can cause errors related to clean-up due to // the clobbering of the existing value in the return slot. -fn has_nested_returns(tcx: &TyCtxt, cfg: &cfg::CFG, blk_id: ast::NodeId) -> bool { - for index in cfg.graph.depth_traverse(cfg.entry) { +fn has_nested_returns(tcx: TyCtxt, cfg: &cfg::CFG, blk_id: ast::NodeId) -> bool { + for index in cfg.graph.depth_traverse(cfg.entry, OUTGOING) { let n = cfg.graph.node_data(index); match tcx.map.find(n.id()) { Some(hir_map::NodeExpr(ex)) => { @@ -1399,23 +1409,23 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { pub fn new(ccx: &'blk CrateContext<'blk, 'tcx>, llfndecl: ValueRef, fn_ty: FnType, - def_id: Option, - param_substs: &'tcx Substs<'tcx>, + definition: Option<(Instance<'tcx>, &ty::FnSig<'tcx>, Abi)>, block_arena: &'blk TypedArena>) -> FunctionContext<'blk, 'tcx> { - common::validate_substs(param_substs); + let (param_substs, def_id) = match definition { + Some((instance, _, _)) => { + common::validate_substs(instance.substs); + (instance.substs, Some(instance.def)) + } + None => (ccx.tcx().mk_substs(Substs::empty()), None) + }; let inlined_did = def_id.and_then(|def_id| inline::get_local_instance(ccx, def_id)); let inlined_id = inlined_did.and_then(|id| ccx.tcx().map.as_local_node_id(id)); let local_id = def_id.and_then(|id| ccx.tcx().map.as_local_node_id(id)); - debug!("FunctionContext::new(path={}, def_id={:?}, param_substs={:?})", - inlined_id.map_or(String::new(), |id| ccx.tcx().node_path_str(id)), - def_id, - param_substs); - - let debug_context = debuginfo::create_function_debug_context(ccx, - inlined_id.unwrap_or(ast::DUMMY_NODE_ID), param_substs, llfndecl); + debug!("FunctionContext::new({})", + definition.map_or(String::new(), |d| d.0.to_string())); let cfg = inlined_id.map(|id| build_cfg(ccx.tcx(), id)); let nested_returns = if let Some((blk_id, Some(ref cfg))) = cfg { @@ -1427,10 +1437,11 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { let check_attrs = |attrs: &[ast::Attribute]| { let default_to_mir = ccx.sess().opts.debugging_opts.orbit; let invert = if default_to_mir { "rustc_no_mir" } else { "rustc_mir" }; - default_to_mir ^ attrs.iter().any(|item| item.check_name(invert)) + (default_to_mir ^ attrs.iter().any(|item| item.check_name(invert)), + attrs.iter().any(|item| item.check_name("no_debug"))) }; - let use_mir = if let Some(id) = local_id { + let (use_mir, no_debug) = if let Some(id) = local_id { check_attrs(ccx.tcx().map.attrs(id)) } else if let Some(def_id) = def_id { check_attrs(&ccx.sess().cstore.item_attrs(def_id)) @@ -1444,6 +1455,13 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { None }; + let debug_context = if let (false, Some(definition)) = (no_debug, definition) { + let (instance, sig, abi) = definition; + debuginfo::create_function_debug_context(ccx, instance, sig, abi, llfndecl) + } else { + debuginfo::empty_function_debug_context(ccx) + }; + FunctionContext { needs_ret_allocas: nested_returns && mir.is_none(), mir: mir, @@ -1643,8 +1661,8 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { self.schedule_drop_mem(arg_scope_id, llarg, arg_ty, None); datum::Datum::new(llarg, - arg_ty, - datum::Lvalue::new("FunctionContext::bind_args")) + arg_ty, + datum::Lvalue::new("FunctionContext::bind_args")) } else { unpack_datum!(bcx, datum::lvalue_scratch_datum(bcx, arg_ty, "", uninit_reason, @@ -1730,7 +1748,7 @@ impl<'blk, 'tcx> FunctionContext<'blk, 'tcx> { self.build_return_block(ret_cx, ret_debug_loc); - debuginfo::clear_source_location(self); + DebugLoc::None.apply(self); self.cleanup(); } @@ -1809,32 +1827,36 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, decl: &hir::FnDecl, body: &hir::Block, llfndecl: ValueRef, - param_substs: &'tcx Substs<'tcx>, - def_id: DefId, + instance: Instance<'tcx>, inlined_id: ast::NodeId, - fn_ty: FnType, + sig: &ty::FnSig<'tcx>, abi: Abi, closure_env: closure::ClosureEnv) { ccx.stats().n_closures.set(ccx.stats().n_closures.get() + 1); - if collector::collecting_debug_information(ccx) { - ccx.record_translation_item_as_generated( - TransItem::Fn(Instance::new(def_id, param_substs))); + if collector::collecting_debug_information(ccx.shared()) { + ccx.record_translation_item_as_generated(TransItem::Fn(instance)); } let _icx = push_ctxt("trans_closure"); - attributes::emit_uwtable(llfndecl, true); + if !ccx.sess().no_landing_pads() { + attributes::emit_uwtable(llfndecl, true); + } - debug!("trans_closure(..., param_substs={:?})", param_substs); + debug!("trans_closure(..., {})", instance); + + let fn_ty = FnType::new(ccx, abi, sig, &[]); let (arena, fcx): (TypedArena<_>, FunctionContext); arena = TypedArena::new(); - fcx = FunctionContext::new(ccx, llfndecl, fn_ty, Some(def_id), param_substs, &arena); + fcx = FunctionContext::new(ccx, llfndecl, fn_ty, Some((instance, sig, abi)), &arena); if fcx.mir.is_some() { return mir::trans_mir(&fcx); } + debuginfo::fill_scope_map_for_function(&fcx, decl, body, inlined_id); + // cleanup scope for the incoming arguments let fn_cleanup_debug_loc = debuginfo::get_cleanup_debug_loc_for_ast_node( ccx, inlined_id, body.span, true); @@ -1889,10 +1911,8 @@ pub fn trans_closure<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } } - let ret_debug_loc = DebugLoc::At(fn_cleanup_debug_loc.id, fn_cleanup_debug_loc.span); - // Insert the mandatory first few basic blocks before lltop. - fcx.finish(bcx, ret_debug_loc); + fcx.finish(bcx, fn_cleanup_debug_loc.debug_loc()); } /// Creates an LLVM function corresponding to a source language function. @@ -1905,25 +1925,23 @@ pub fn trans_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let _s = StatRecorder::new(ccx, ccx.tcx().node_path_str(id)); debug!("trans_fn(param_substs={:?})", param_substs); let _icx = push_ctxt("trans_fn"); - let fn_ty = ccx.tcx().node_id_to_type(id); - let fn_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &fn_ty); - let sig = ccx.tcx().erase_late_bound_regions(fn_ty.fn_sig()); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); - let abi = fn_ty.fn_abi(); - let fn_ty = FnType::new(ccx, abi, &sig, &[]); let def_id = if let Some(&def_id) = ccx.external_srcs().borrow().get(&id) { def_id } else { ccx.tcx().map.local_def_id(id) }; + let fn_ty = ccx.tcx().lookup_item_type(def_id).ty; + let fn_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &fn_ty); + let sig = ccx.tcx().erase_late_bound_regions(fn_ty.fn_sig()); + let sig = ccx.tcx().normalize_associated_type(&sig); + let abi = fn_ty.fn_abi(); trans_closure(ccx, decl, body, llfndecl, - param_substs, - def_id, + Instance::new(def_id, param_substs), id, - fn_ty, + &sig, abi, closure::ClosureEnv::NotClosure); } @@ -1939,7 +1957,7 @@ pub fn trans_named_tuple_constructor<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let ccx = bcx.fcx.ccx; let sig = ccx.tcx().erase_late_bound_regions(&ctor_ty.fn_sig()); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); + let sig = ccx.tcx().normalize_associated_type(&sig); let result_ty = sig.output.unwrap(); // Get location to store the result. If the user does not care about @@ -2009,14 +2027,12 @@ pub fn trans_ctor_shim<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let ctor_ty = monomorphize::apply_param_substs(ccx.tcx(), param_substs, &ctor_ty); let sig = ccx.tcx().erase_late_bound_regions(&ctor_ty.fn_sig()); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); + let sig = ccx.tcx().normalize_associated_type(&sig); let fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]); let (arena, fcx): (TypedArena<_>, FunctionContext); arena = TypedArena::new(); - fcx = FunctionContext::new(ccx, llfndecl, fn_ty, - Some(ccx.tcx().map.local_def_id(ctor_id)), - param_substs, &arena); + fcx = FunctionContext::new(ccx, llfndecl, fn_ty, None, &arena); let bcx = fcx.init(false, None); assert!(!fcx.needs_ret_allocas); @@ -2182,7 +2198,8 @@ pub fn update_linkage(ccx: &CrateContext, // `llval` is a translation of an item defined in a separate // compilation unit. This only makes sense if there are at least // two compilation units. - assert!(ccx.sess().opts.cg.codegen_units > 1); + assert!(ccx.sess().opts.cg.codegen_units > 1 || + ccx.sess().opts.debugging_opts.incremental.is_some()); // `llval` is a copy of something defined elsewhere, so use // `AvailableExternallyLinkage` to avoid duplicating code in the // output. @@ -2499,9 +2516,7 @@ pub fn write_metadata<'a, 'tcx>(cx: &SharedCrateContext<'a, 'tcx>, let llmeta = C_bytes_in_context(cx.metadata_llcx(), &compressed[..]); let llconst = C_struct_in_context(cx.metadata_llcx(), &[llmeta], false); - let name = format!("rust_metadata_{}_{}", - cx.link_meta().crate_name, - cx.link_meta().crate_hash); + let name = cx.metadata_symbol_name(); let buf = CString::new(name).unwrap(); let llglobal = unsafe { llvm::LLVMAddGlobal(cx.metadata_llmod(), val_ty(llconst).to_ref(), buf.as_ptr()) @@ -2518,7 +2533,7 @@ pub fn write_metadata<'a, 'tcx>(cx: &SharedCrateContext<'a, 'tcx>, /// Find any symbols that are defined in one compilation unit, but not declared /// in any other compilation unit. Give these symbols internal linkage. -fn internalize_symbols(cx: &SharedCrateContext, reachable: &HashSet<&str>) { +fn internalize_symbols(cx: &CrateContextList, reachable: &HashSet<&str>) { unsafe { let mut declared = HashSet::new(); @@ -2573,12 +2588,12 @@ fn internalize_symbols(cx: &SharedCrateContext, reachable: &HashSet<&str>) { // when using MSVC linker. We do this only for data, as linker can fix up // code references on its own. // See #26591, #27438 -fn create_imps(cx: &SharedCrateContext) { +fn create_imps(cx: &CrateContextList) { // The x86 ABI seems to require that leading underscores are added to symbol // names, so we need an extra underscore on 32-bit. There's also a leading // '\x01' here which disables LLVM's symbol mangling (e.g. no extra // underscores added in front). - let prefix = if cx.sess().target.target.target_pointer_width == "32" { + let prefix = if cx.shared().sess().target.target.target_pointer_width == "32" { "\x01__imp__" } else { "\x01__imp_" @@ -2655,10 +2670,10 @@ fn iter_functions(llmod: llvm::ModuleRef) -> ValueIter { /// /// This list is later used by linkers to determine the set of symbols needed to /// be exposed from a dynamic library and it's also encoded into the metadata. -pub fn filter_reachable_ids(ccx: &SharedCrateContext) -> NodeSet { - ccx.reachable().iter().map(|x| *x).filter(|id| { +pub fn filter_reachable_ids(scx: &SharedCrateContext) -> NodeSet { + scx.reachable().iter().map(|x| *x).filter(|id| { // First, only worry about nodes which have a symbol name - ccx.item_symbols().borrow().contains_key(id) + scx.item_symbols().borrow().contains_key(id) }).filter(|&id| { // Next, we want to ignore some FFI functions that are not exposed from // this crate. Reachable FFI functions can be lumped into two @@ -2673,19 +2688,19 @@ pub fn filter_reachable_ids(ccx: &SharedCrateContext) -> NodeSet { // // As a result, if this id is an FFI item (foreign item) then we only // let it through if it's included statically. - match ccx.tcx().map.get(id) { + match scx.tcx().map.get(id) { hir_map::NodeForeignItem(..) => { - ccx.sess().cstore.is_statically_included_foreign_item(id) + scx.sess().cstore.is_statically_included_foreign_item(id) } _ => true, } }).collect() } -pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>, - mir_map: &MirMap<'tcx>, - analysis: ty::CrateAnalysis) - -> CrateTranslation { +pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + mir_map: &MirMap<'tcx>, + analysis: ty::CrateAnalysis) + -> CrateTranslation { let _task = tcx.dep_graph.in_task(DepNode::TransCrate); // Be careful with this krate: obviously it gives access to the @@ -2708,32 +2723,9 @@ pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>, tcx.sess.opts.debug_assertions }; - // Before we touch LLVM, make sure that multithreading is enabled. - unsafe { - use std::sync::Once; - static INIT: Once = Once::new(); - static mut POISONED: bool = false; - INIT.call_once(|| { - if llvm::LLVMStartMultithreaded() != 1 { - // use an extra bool to make sure that all future usage of LLVM - // cannot proceed despite the Once not running more than once. - POISONED = true; - } + let link_meta = link::build_link_meta(tcx, name); - ::back::write::configure_llvm(&tcx.sess); - }); - - if POISONED { - bug!("couldn't enable multi-threaded LLVM"); - } - } - - let link_meta = link::build_link_meta(&tcx, name); - - let codegen_units = tcx.sess.opts.cg.codegen_units; - let shared_ccx = SharedCrateContext::new(&link_meta.crate_name, - codegen_units, - tcx, + let shared_ccx = SharedCrateContext::new(tcx, &mir_map, export_map, Sha256::new(), @@ -2742,9 +2734,15 @@ pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>, check_overflow, check_dropflag); + let codegen_units = collect_and_partition_translation_items(&shared_ccx); + let codegen_unit_count = codegen_units.len(); + assert!(tcx.sess.opts.cg.codegen_units == codegen_unit_count || + tcx.sess.opts.debugging_opts.incremental.is_some()); + + let crate_context_list = CrateContextList::new(&shared_ccx, codegen_units); + { - let ccx = shared_ccx.get_ccx(0); - collect_translation_items(&ccx); + let ccx = crate_context_list.get_ccx(0); // Translate all items. See `TransModVisitor` for // details on why we walk in this particular way. @@ -2754,12 +2752,12 @@ pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>, krate.visit_all_items(&mut TransModVisitor { ccx: &ccx }); } - collector::print_collection_results(&ccx); + collector::print_collection_results(ccx.shared()); symbol_names_test::report_symbol_names(&ccx); } - for ccx in shared_ccx.iter() { + for ccx in crate_context_list.iter() { if ccx.sess().opts.debuginfo != NoDebugInfo { debuginfo::finalize(&ccx); } @@ -2808,7 +2806,7 @@ pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>, } } - let modules = shared_ccx.iter() + let modules = crate_context_list.iter() .map(|ccx| ModuleTranslation { llcx: ccx.llcx(), llmod: ccx.llmod() }) .collect(); @@ -2820,28 +2818,35 @@ pub fn trans_crate<'tcx>(tcx: &TyCtxt<'tcx>, reachable_symbols.push("main".to_string()); } - // For the purposes of LTO, we add to the reachable set all of the upstream - // reachable extern fns. These functions are all part of the public ABI of - // the final product, so LTO needs to preserve them. - if sess.lto() { - for cnum in sess.cstore.crates() { - let syms = sess.cstore.reachable_ids(cnum); - reachable_symbols.extend(syms.into_iter().filter(|did| { - sess.cstore.is_extern_item(shared_ccx.tcx(), *did) - }).map(|did| { - sess.cstore.item_symbol(did) - })); - } + if sess.crate_types.borrow().contains(&config::CrateTypeDylib) { + reachable_symbols.push(shared_ccx.metadata_symbol_name()); + } + + // For the purposes of LTO or when creating a cdylib, we add to the + // reachable set all of the upstream reachable extern fns. These functions + // are all part of the public ABI of the final product, so we need to + // preserve them. + // + // Note that this happens even if LTO isn't requested or we're not creating + // a cdylib. In those cases, though, we're not even reading the + // `reachable_symbols` list later on so it should be ok. + for cnum in sess.cstore.crates() { + let syms = sess.cstore.reachable_ids(cnum); + reachable_symbols.extend(syms.into_iter().filter(|did| { + sess.cstore.is_extern_item(shared_ccx.tcx(), *did) + }).map(|did| { + sess.cstore.item_symbol(did) + })); } - if codegen_units > 1 { - internalize_symbols(&shared_ccx, + if codegen_unit_count > 1 { + internalize_symbols(&crate_context_list, &reachable_symbols.iter().map(|x| &x[..]).collect()); } if sess.target.target.options.is_like_msvc && sess.crate_types.borrow().iter().any(|ct| *ct == config::CrateTypeRlib) { - create_imps(&shared_ccx); + create_imps(&crate_context_list); } let metadata_module = ModuleTranslation { @@ -2926,10 +2931,11 @@ impl<'a, 'tcx, 'v> Visitor<'v> for TransItemsWithinModVisitor<'a, 'tcx> { } } -fn collect_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) { - let time_passes = ccx.sess().time_passes(); +fn collect_and_partition_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>) + -> Vec> { + let time_passes = scx.sess().time_passes(); - let collection_mode = match ccx.sess().opts.debugging_opts.print_trans_items { + let collection_mode = match scx.sess().opts.debugging_opts.print_trans_items { Some(ref s) => { let mode_string = s.to_lowercase(); let mode_string = mode_string.trim(); @@ -2940,7 +2946,7 @@ fn collect_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) { let message = format!("Unknown codegen-item collection mode '{}'. \ Falling back to 'lazy' mode.", mode_string); - ccx.sess().warn(&message); + scx.sess().warn(&message); } TransItemCollectionMode::Lazy @@ -2949,24 +2955,81 @@ fn collect_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) { None => TransItemCollectionMode::Lazy }; - let items = time(time_passes, "translation item collection", || { - collector::collect_crate_translation_items(&ccx, collection_mode) + let (items, inlining_map) = time(time_passes, "translation item collection", || { + collector::collect_crate_translation_items(&scx, collection_mode) }); - if ccx.sess().opts.debugging_opts.print_trans_items.is_some() { - let mut item_keys: Vec<_> = items.iter() - .map(|i| i.to_string(ccx)) - .collect(); + let strategy = if scx.sess().opts.debugging_opts.incremental.is_some() { + PartitioningStrategy::PerModule + } else { + PartitioningStrategy::FixedUnitCount(scx.sess().opts.cg.codegen_units) + }; + + let codegen_units = time(time_passes, "codegen unit partitioning", || { + partitioning::partition(scx.tcx(), + items.iter().cloned(), + strategy, + &inlining_map) + }); + + if scx.sess().opts.debugging_opts.print_trans_items.is_some() { + let mut item_to_cgus = HashMap::new(); + + for cgu in &codegen_units { + for (&trans_item, &linkage) in &cgu.items { + item_to_cgus.entry(trans_item) + .or_insert(Vec::new()) + .push((cgu.name.clone(), linkage)); + } + } + + let mut item_keys: Vec<_> = items + .iter() + .map(|i| { + let mut output = i.to_string(scx.tcx()); + output.push_str(" @@"); + let mut empty = Vec::new(); + let mut cgus = item_to_cgus.get_mut(i).unwrap_or(&mut empty); + cgus.as_mut_slice().sort_by_key(|&(ref name, _)| name.clone()); + cgus.dedup(); + for &(ref cgu_name, linkage) in cgus.iter() { + output.push_str(" "); + output.push_str(&cgu_name[..]); + + let linkage_abbrev = match linkage { + llvm::ExternalLinkage => "External", + llvm::AvailableExternallyLinkage => "Available", + llvm::LinkOnceAnyLinkage => "OnceAny", + llvm::LinkOnceODRLinkage => "OnceODR", + llvm::WeakAnyLinkage => "WeakAny", + llvm::WeakODRLinkage => "WeakODR", + llvm::AppendingLinkage => "Appending", + llvm::InternalLinkage => "Internal", + llvm::PrivateLinkage => "Private", + llvm::ExternalWeakLinkage => "ExternalWeak", + llvm::CommonLinkage => "Common", + }; + + output.push_str("["); + output.push_str(linkage_abbrev); + output.push_str("]"); + } + output + }) + .collect(); + item_keys.sort(); for item in item_keys { println!("TRANS_ITEM {}", item); } - let mut ccx_map = ccx.translation_items().borrow_mut(); + let mut ccx_map = scx.translation_items().borrow_mut(); for cgi in items { ccx_map.insert(cgi, TransItemState::PredictedButNotGenerated); } } + + codegen_units } diff --git a/src/librustc_trans/builder.rs b/src/librustc_trans/builder.rs index 92fb342497..9f032cdbfe 100644 --- a/src/librustc_trans/builder.rs +++ b/src/librustc_trans/builder.rs @@ -165,8 +165,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { args: &[ValueRef], then: BasicBlockRef, catch: BasicBlockRef, - bundle: Option<&OperandBundleDef>) - -> ValueRef { + bundle: Option<&OperandBundleDef>) -> ValueRef { self.count_insn("invoke"); debug!("Invoke {:?} with args ({})", @@ -176,6 +175,8 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .collect::>() .join(", ")); + check_call("invoke", llfn, args); + let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _); unsafe { @@ -856,28 +857,7 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { .collect::>() .join(", ")); - let mut fn_ty = val_ty(llfn); - // Strip off pointers - while fn_ty.kind() == llvm::TypeKind::Pointer { - fn_ty = fn_ty.element_type(); - } - - assert!(fn_ty.kind() == llvm::TypeKind::Function, - "builder::call not passed a function"); - - let param_tys = fn_ty.func_params(); - - let iter = param_tys.into_iter() - .zip(args.iter().map(|&v| val_ty(v))); - for (i, (expected_ty, actual_ty)) in iter.enumerate() { - if expected_ty != actual_ty { - bug!("Type mismatch in function call of {:?}. \ - Expected {:?} for param {}, got {:?}", - Value(llfn), - expected_ty, i, actual_ty); - - } - } + check_call("call", llfn, args); let bundle = bundle.as_ref().map(|b| b.raw()).unwrap_or(0 as *mut _); @@ -1121,3 +1101,30 @@ impl<'a, 'tcx> Builder<'a, 'tcx> { } } } + +fn check_call(typ: &str, llfn: ValueRef, args: &[ValueRef]) { + if cfg!(debug_assertions) { + let mut fn_ty = val_ty(llfn); + // Strip off pointers + while fn_ty.kind() == llvm::TypeKind::Pointer { + fn_ty = fn_ty.element_type(); + } + + assert!(fn_ty.kind() == llvm::TypeKind::Function, + "builder::{} not passed a function", typ); + + let param_tys = fn_ty.func_params(); + + let iter = param_tys.into_iter() + .zip(args.iter().map(|&v| val_ty(v))); + for (i, (expected_ty, actual_ty)) in iter.enumerate() { + if expected_ty != actual_ty { + bug!("Type mismatch in function call of {:?}. \ + Expected {:?} for param {}, got {:?}", + Value(llfn), + expected_ty, i, actual_ty); + + } + } + } +} diff --git a/src/librustc_trans/callee.rs b/src/librustc_trans/callee.rs index f86dd2d281..01e0d45348 100644 --- a/src/librustc_trans/callee.rs +++ b/src/librustc_trans/callee.rs @@ -22,9 +22,7 @@ use back::symbol_names; use llvm::{self, ValueRef, get_params}; use middle::cstore::LOCAL_CRATE; use rustc::hir::def_id::DefId; -use rustc::infer; use rustc::ty::subst; -use rustc::ty::subst::{Substs}; use rustc::traits; use rustc::hir::map as hir_map; use abi::{Abi, FnType}; @@ -36,8 +34,7 @@ use build::*; use cleanup; use cleanup::CleanupMethods; use closure; -use common::{self, Block, Result, CrateContext, FunctionContext}; -use common::{C_uint, C_undef}; +use common::{self, Block, Result, CrateContext, FunctionContext, C_undef}; use consts; use datum::*; use debuginfo::DebugLoc; @@ -46,7 +43,7 @@ use expr; use glue; use inline; use intrinsic; -use machine::{llalign_of_min, llsize_of_store}; +use machine::llalign_of_min; use meth; use monomorphize::{self, Instance}; use type_::Type; @@ -60,8 +57,6 @@ use syntax::codemap::DUMMY_SP; use syntax::errors; use syntax::ptr::P; -use std::cmp; - #[derive(Debug)] pub enum CalleeData { /// Constructor for enum variant/tuple-like-struct. @@ -102,7 +97,7 @@ impl<'tcx> Callee<'tcx> { /// Trait or impl method. pub fn method<'blk>(bcx: Block<'blk, 'tcx>, method: ty::MethodCallee<'tcx>) -> Callee<'tcx> { - let substs = bcx.tcx().mk_substs(bcx.fcx.monomorphize(&method.substs)); + let substs = bcx.fcx.monomorphize(&method.substs); Callee::def(bcx.ccx(), method.def_id, substs) } @@ -156,8 +151,8 @@ impl<'tcx> Callee<'tcx> { let method_item = tcx.impl_or_trait_item(def_id); let trait_id = method_item.container().id(); let trait_ref = ty::Binder(substs.to_trait_ref(tcx, trait_id)); - let trait_ref = infer::normalize_associated_type(tcx, &trait_ref); - match common::fulfill_obligation(ccx, DUMMY_SP, trait_ref) { + let trait_ref = tcx.normalize_associated_type(&trait_ref); + match common::fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref) { traits::VtableImpl(vtable_impl) => { let impl_did = vtable_impl.impl_def_id; let mname = tcx.item_name(def_id); @@ -184,19 +179,19 @@ impl<'tcx> Callee<'tcx> { let method_ty = def_ty(tcx, def_id, substs); let fn_ptr_ty = match method_ty.sty { - ty::TyFnDef(_, _, fty) => tcx.mk_ty(ty::TyFnPtr(fty)), + ty::TyFnDef(_, _, fty) => tcx.mk_fn_ptr(fty), _ => bug!("expected fn item type, found {}", method_ty) }; Callee::ptr(immediate_rvalue(llfn, fn_ptr_ty)) } - traits::VtableFnPointer(fn_ty) => { + traits::VtableFnPointer(vtable_fn_pointer) => { let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_id).unwrap(); - let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, fn_ty); + let llfn = trans_fn_pointer_shim(ccx, trait_closure_kind, vtable_fn_pointer.fn_ty); let method_ty = def_ty(tcx, def_id, substs); let fn_ptr_ty = match method_ty.sty { - ty::TyFnDef(_, _, fty) => tcx.mk_ty(ty::TyFnPtr(fty)), + ty::TyFnDef(_, _, fty) => tcx.mk_fn_ptr(fty), _ => bug!("expected fn item type, found {}", method_ty) }; @@ -204,8 +199,7 @@ impl<'tcx> Callee<'tcx> { } traits::VtableObject(ref data) => { Callee { - data: Virtual(traits::get_vtable_index_of_object_method( - tcx, data, def_id)), + data: Virtual(tcx.get_vtable_index_of_object_method(data, def_id)), ty: def_ty(tcx, def_id, substs) } } @@ -222,7 +216,7 @@ impl<'tcx> Callee<'tcx> { extra_args: &[Ty<'tcx>]) -> FnType { let abi = self.ty.fn_abi(); let sig = ccx.tcx().erase_late_bound_regions(self.ty.fn_sig()); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); + let sig = ccx.tcx().normalize_associated_type(&sig); let mut fn_ty = FnType::unadjusted(ccx, abi, &sig, extra_args); if let Virtual(_) = self.data { // Don't pass the vtable, it's not an argument of the virtual fn. @@ -255,7 +249,7 @@ impl<'tcx> Callee<'tcx> { pub fn reify<'a>(self, ccx: &CrateContext<'a, 'tcx>) -> Datum<'tcx, Rvalue> { let fn_ptr_ty = match self.ty.sty { - ty::TyFnDef(_, _, f) => ccx.tcx().mk_ty(ty::TyFnPtr(f)), + ty::TyFnDef(_, _, f) => ccx.tcx().mk_fn_ptr(f), _ => self.ty }; match self.data { @@ -278,10 +272,10 @@ impl<'tcx> Callee<'tcx> { } /// Given a DefId and some Substs, produces the monomorphic item type. -fn def_ty<'tcx>(tcx: &TyCtxt<'tcx>, - def_id: DefId, - substs: &'tcx subst::Substs<'tcx>) - -> Ty<'tcx> { +fn def_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId, + substs: &'tcx subst::Substs<'tcx>) + -> Ty<'tcx> { let ty = tcx.lookup_item_type(def_id).ty; monomorphize::apply_param_substs(tcx, substs, &ty) } @@ -362,7 +356,7 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( } }; let sig = tcx.erase_late_bound_regions(sig); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); + let sig = ccx.tcx().normalize_associated_type(&sig); let tuple_input_ty = tcx.mk_tup(sig.inputs.to_vec()); let sig = ty::FnSig { inputs: vec![bare_fn_ty_maybe_ref, @@ -371,11 +365,11 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( variadic: false }; let fn_ty = FnType::new(ccx, Abi::RustCall, &sig, &[]); - let tuple_fn_ty = tcx.mk_fn_ptr(ty::BareFnTy { + let tuple_fn_ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: Abi::RustCall, sig: ty::Binder(sig) - }); + })); debug!("tuple_fn_ty: {:?}", tuple_fn_ty); // @@ -386,10 +380,9 @@ pub fn trans_fn_pointer_shim<'a, 'tcx>( let llfn = declare::define_internal_fn(ccx, &function_name, tuple_fn_ty); // - let empty_substs = tcx.mk_substs(Substs::empty()); let (block_arena, fcx): (TypedArena<_>, FunctionContext); block_arena = TypedArena::new(); - fcx = FunctionContext::new(ccx, llfn, fn_ty, None, empty_substs, &block_arena); + fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &block_arena); let mut bcx = fcx.init(false, None); let llargs = get_params(fcx.llfn); @@ -446,7 +439,7 @@ fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // def_id to the local id of the inlined copy. let def_id = inline::maybe_instantiate_inline(ccx, def_id); - fn is_named_tuple_constructor(tcx: &TyCtxt, def_id: DefId) -> bool { + fn is_named_tuple_constructor(tcx: TyCtxt, def_id: DefId) -> bool { let node_id = match tcx.map.as_local_node_id(def_id) { Some(n) => n, None => { return false; } @@ -480,7 +473,7 @@ fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let fn_ptr_ty = match fn_ty.sty { ty::TyFnDef(_, _, fty) => { // Create a fn pointer with the substituted signature. - tcx.mk_ty(ty::TyFnPtr(fty)) + tcx.mk_fn_ptr(fty) } _ => bug!("expected fn item type, found {}", fn_ty) }; @@ -491,14 +484,14 @@ fn get_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Find the actual function pointer. let ty = ccx.tcx().lookup_item_type(def_id).ty; let fn_ptr_ty = match ty.sty { - ty::TyFnDef(_, _, fty) => { + ty::TyFnDef(_, _, ref fty) => { // Create a fn pointer with the normalized signature. - tcx.mk_fn_ptr(infer::normalize_associated_type(tcx, fty)) + tcx.mk_fn_ptr(tcx.normalize_associated_type(fty)) } _ => bug!("expected fn item type, found {}", ty) }; - let instance = Instance::mono(ccx.tcx(), def_id); + let instance = Instance::mono(ccx.shared(), def_id); if let Some(&llfn) = ccx.instances().borrow().get(&instance) { return immediate_rvalue(llfn, fn_ptr_ty); } @@ -625,7 +618,7 @@ fn trans_call_inner<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let abi = callee.ty.fn_abi(); let sig = callee.ty.fn_sig(); let output = bcx.tcx().erase_late_bound_regions(&sig.output()); - let output = infer::normalize_associated_type(bcx.tcx(), &output); + let output = bcx.tcx().normalize_associated_type(&output); let extra_args = match args { ArgExprs(args) if abi != Abi::RustCall => { @@ -712,49 +705,16 @@ fn trans_call_inner<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let (llret, mut bcx) = base::invoke(bcx, llfn, &llargs, debug_loc); if !bcx.unreachable.get() { fn_ty.apply_attrs_callsite(llret); - } - - // If the function we just called does not use an outpointer, - // store the result into the rust outpointer. Cast the outpointer - // type to match because some ABIs will use a different type than - // the Rust type. e.g., a {u32,u32} struct could be returned as - // u64. - if !fn_ty.ret.is_ignore() && !fn_ty.ret.is_indirect() { - if let Some(llforeign_ret_ty) = fn_ty.ret.cast { - let llrust_ret_ty = fn_ty.ret.original_ty; - let llretslot = opt_llretslot.unwrap(); - - // The actual return type is a struct, but the ABI - // adaptation code has cast it into some scalar type. The - // code that follows is the only reliable way I have - // found to do a transform like i64 -> {i32,i32}. - // Basically we dump the data onto the stack then memcpy it. - // - // Other approaches I tried: - // - Casting rust ret pointer to the foreign type and using Store - // is (a) unsafe if size of foreign type > size of rust type and - // (b) runs afoul of strict aliasing rules, yielding invalid - // assembly under -O (specifically, the store gets removed). - // - Truncating foreign type to correct integral type and then - // bitcasting to the struct type yields invalid cast errors. - let llscratch = base::alloca(bcx, llforeign_ret_ty, "__cast"); - base::call_lifetime_start(bcx, llscratch); - Store(bcx, llret, llscratch); - let llscratch_i8 = PointerCast(bcx, llscratch, Type::i8(ccx).ptr_to()); - let llretptr_i8 = PointerCast(bcx, llretslot, Type::i8(ccx).ptr_to()); - let llrust_size = llsize_of_store(ccx, llrust_ret_ty); - let llforeign_align = llalign_of_min(ccx, llforeign_ret_ty); - let llrust_align = llalign_of_min(ccx, llrust_ret_ty); - let llalign = cmp::min(llforeign_align, llrust_align); - debug!("llrust_size={}", llrust_size); - if !bcx.unreachable.get() { - base::call_memcpy(&B(bcx), llretptr_i8, llscratch_i8, - C_uint(ccx, llrust_size), llalign as u32); + // If the function we just called does not use an outpointer, + // store the result into the rust outpointer. Cast the outpointer + // type to match because some ABIs will use a different type than + // the Rust type. e.g., a {u32,u32} struct could be returned as + // u64. + if !fn_ty.ret.is_indirect() { + if let Some(llretslot) = opt_llretslot { + fn_ty.ret.store(&bcx.build(), llret, llretslot); } - base::call_lifetime_end(bcx, llscratch); - } else if let Some(llretslot) = opt_llretslot { - base::store_ty(bcx, llret, llretslot, output.unwrap()); } } diff --git a/src/librustc_trans/cleanup.rs b/src/librustc_trans/cleanup.rs index 514e6bda59..3081f055bb 100644 --- a/src/librustc_trans/cleanup.rs +++ b/src/librustc_trans/cleanup.rs @@ -1176,7 +1176,7 @@ impl<'tcx> Cleanup<'tcx> for LifetimeEnd { } } -pub fn temporary_scope(tcx: &TyCtxt, +pub fn temporary_scope(tcx: TyCtxt, id: ast::NodeId) -> ScopeId { match tcx.region_maps.temporary_scope(id) { @@ -1191,7 +1191,7 @@ pub fn temporary_scope(tcx: &TyCtxt, } } -pub fn var_scope(tcx: &TyCtxt, +pub fn var_scope(tcx: TyCtxt, id: ast::NodeId) -> ScopeId { let r = AstScope(tcx.region_maps.var_scope(id).node_id(&tcx.region_maps)); diff --git a/src/librustc_trans/closure.rs b/src/librustc_trans/closure.rs index 6bd5fd355a..1c393f8091 100644 --- a/src/librustc_trans/closure.rs +++ b/src/librustc_trans/closure.rs @@ -12,8 +12,6 @@ use arena::TypedArena; use back::symbol_names; use llvm::{ValueRef, get_param, get_params}; use rustc::hir::def_id::DefId; -use rustc::infer; -use rustc::traits::ProjectionMode; use abi::{Abi, FnType}; use adt; use attributes; @@ -119,10 +117,10 @@ impl ClosureEnv { } } -fn get_self_type<'tcx>(tcx: &TyCtxt<'tcx>, - closure_id: DefId, - fn_ty: Ty<'tcx>) - -> Ty<'tcx> { +fn get_self_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + closure_id: DefId, + fn_ty: Ty<'tcx>) + -> Ty<'tcx> { match tcx.closure_kind(closure_id) { ty::ClosureKind::Fn => { tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), fn_ty) @@ -138,13 +136,13 @@ fn get_self_type<'tcx>(tcx: &TyCtxt<'tcx>, /// necessary. If the ID does not correspond to a closure ID, returns None. fn get_or_create_closure_declaration<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, closure_id: DefId, - substs: &ty::ClosureSubsts<'tcx>) + substs: ty::ClosureSubsts<'tcx>) -> ValueRef { // Normalize type so differences in regions and typedefs don't cause // duplicate declarations let tcx = ccx.tcx(); - let substs = tcx.erase_regions(substs); - let instance = Instance::new(closure_id, &substs.func_substs); + let substs = tcx.erase_regions(&substs); + let instance = Instance::new(closure_id, substs.func_substs); if let Some(&llfn) = ccx.instances().borrow().get(&instance) { debug!("get_or_create_closure_declaration(): found closure {:?}: {:?}", @@ -155,12 +153,11 @@ fn get_or_create_closure_declaration<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let symbol = symbol_names::exported_name(ccx, &instance); // Compute the rust-call form of the closure call method. - let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Any); - let sig = &infcx.closure_type(closure_id, &substs).sig; + let sig = &tcx.closure_type(closure_id, substs).sig; let sig = tcx.erase_late_bound_regions(sig); - let sig = infer::normalize_associated_type(tcx, &sig); - let closure_type = tcx.mk_closure_from_closure_substs(closure_id, Box::new(substs)); - let function_type = tcx.mk_fn_ptr(ty::BareFnTy { + let sig = tcx.normalize_associated_type(&sig); + let closure_type = tcx.mk_closure_from_closure_substs(closure_id, substs); + let function_type = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: Abi::RustCall, sig: ty::Binder(ty::FnSig { @@ -169,7 +166,7 @@ fn get_or_create_closure_declaration<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, output: sig.output, variadic: false }) - }); + })); let llfn = declare::define_internal_fn(ccx, &symbol, function_type); // set an inline hint for all closures @@ -193,7 +190,7 @@ pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>, body: &hir::Block, id: ast::NodeId, closure_def_id: DefId, // (*) - closure_substs: &ty::ClosureSubsts<'tcx>) + closure_substs: ty::ClosureSubsts<'tcx>) -> Option> { // (*) Note that in the case of inlined functions, the `closure_def_id` will be the @@ -220,30 +217,26 @@ pub fn trans_closure_expr<'a, 'tcx>(dest: Dest<'a, 'tcx>, // this function (`trans_closure`) is invoked at the point // of the closure expression. - let infcx = infer::normalizing_infer_ctxt(ccx.tcx(), &ccx.tcx().tables, ProjectionMode::Any); - let function_type = infcx.closure_type(closure_def_id, closure_substs); - - let sig = tcx.erase_late_bound_regions(&function_type.sig); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); + let sig = &tcx.closure_type(closure_def_id, closure_substs).sig; + let sig = tcx.erase_late_bound_regions(sig); + let sig = tcx.normalize_associated_type(&sig); let closure_type = tcx.mk_closure_from_closure_substs(closure_def_id, - Box::new(closure_substs.clone())); + closure_substs); let sig = ty::FnSig { inputs: Some(get_self_type(tcx, closure_def_id, closure_type)) .into_iter().chain(sig.inputs).collect(), output: sig.output, variadic: false }; - let fn_ty = FnType::new(ccx, Abi::RustCall, &sig, &[]); trans_closure(ccx, decl, body, llfn, - param_substs, - closure_def_id, + Instance::new(closure_def_id, param_substs), id, - fn_ty, + &sig, Abi::RustCall, ClosureEnv::Closure(closure_def_id, id)); @@ -290,7 +283,7 @@ pub fn trans_closure_method<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, -> ValueRef { // If this is a closure, redirect to it. - let llfn = get_or_create_closure_declaration(ccx, closure_def_id, &substs); + let llfn = get_or_create_closure_declaration(ccx, closure_def_id, substs); // If the closure is a Fn closure, but a FnOnce is needed (etc), // then adapt the self type @@ -346,21 +339,21 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>( closure_def_id, substs, Value(llreffn)); let tcx = ccx.tcx(); - let infcx = infer::normalizing_infer_ctxt(ccx.tcx(), &ccx.tcx().tables, ProjectionMode::Any); // Find a version of the closure type. Substitute static for the // region since it doesn't really matter. - let closure_ty = tcx.mk_closure_from_closure_substs(closure_def_id, Box::new(substs.clone())); + let closure_ty = tcx.mk_closure_from_closure_substs(closure_def_id, substs); let ref_closure_ty = tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), closure_ty); // Make a version with the type of by-ref closure. - let ty::ClosureTy { unsafety, abi, mut sig } = infcx.closure_type(closure_def_id, &substs); + let ty::ClosureTy { unsafety, abi, mut sig } = + tcx.closure_type(closure_def_id, substs); sig.0.inputs.insert(0, ref_closure_ty); // sig has no self type as of yet - let llref_fn_ty = tcx.mk_fn_ptr(ty::BareFnTy { + let llref_fn_ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: unsafety, abi: abi, sig: sig.clone() - }); + })); debug!("trans_fn_once_adapter_shim: llref_fn_ty={:?}", llref_fn_ty); @@ -371,14 +364,14 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>( sig.0.inputs[0] = closure_ty; let sig = tcx.erase_late_bound_regions(&sig); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); + let sig = tcx.normalize_associated_type(&sig); let fn_ty = FnType::new(ccx, abi, &sig, &[]); - let llonce_fn_ty = tcx.mk_fn_ptr(ty::BareFnTy { + let llonce_fn_ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: unsafety, abi: abi, sig: ty::Binder(sig) - }); + })); // Create the by-value helper. let function_name = @@ -387,7 +380,7 @@ fn trans_fn_once_adapter_shim<'a, 'tcx>( let (block_arena, fcx): (TypedArena<_>, FunctionContext); block_arena = TypedArena::new(); - fcx = FunctionContext::new(ccx, lloncefn, fn_ty, None, substs.func_substs, &block_arena); + fcx = FunctionContext::new(ccx, lloncefn, fn_ty, None, &block_arena); let mut bcx = fcx.init(false, None); diff --git a/src/librustc_trans/collector.rs b/src/librustc_trans/collector.rs index c72a1a6bef..d278c3c832 100644 --- a/src/librustc_trans/collector.rs +++ b/src/librustc_trans/collector.rs @@ -196,27 +196,24 @@ use rustc::hir::def_id::DefId; use rustc::middle::lang_items::{ExchangeFreeFnLangItem, ExchangeMallocFnLangItem}; use rustc::traits; use rustc::ty::subst::{self, Substs, Subst}; -use rustc::ty::{self, Ty, TypeFoldable}; +use rustc::ty::{self, TypeFoldable, TyCtxt}; use rustc::ty::adjustment::CustomCoerceUnsized; use rustc::mir::repr as mir; use rustc::mir::visit as mir_visit; use rustc::mir::visit::Visitor as MirVisitor; -use syntax::ast::{self, NodeId}; +use syntax::abi::Abi; use syntax::codemap::DUMMY_SP; use syntax::errors; -use syntax::parse::token; - use base::custom_coerce_unsize_info; -use context::CrateContext; -use common::{fulfill_obligation, normalize_and_test_predicates, - type_is_sized}; -use glue; +use context::SharedCrateContext; +use common::{fulfill_obligation, normalize_and_test_predicates, type_is_sized}; +use glue::{self, DropGlueKind}; use meth; use monomorphize::{self, Instance}; use util::nodemap::{FnvHashSet, FnvHashMap, DefIdMap}; -use std::hash::{Hash, Hasher}; +use trans_item::{TransItem, type_to_string, def_id_to_string}; #[derive(PartialEq, Eq, Hash, Clone, Copy, Debug)] pub enum TransItemCollectionMode { @@ -224,56 +221,81 @@ pub enum TransItemCollectionMode { Lazy } -#[derive(PartialEq, Eq, Clone, Copy, Debug)] -pub enum TransItem<'tcx> { - DropGlue(Ty<'tcx>), - Fn(Instance<'tcx>), - Static(NodeId) +/// Maps every translation item to all translation items it references in its +/// body. +pub struct InliningMap<'tcx> { + // Maps a source translation item to a range of target translation items + // that are potentially inlined by LLVM into the source. + // The two numbers in the tuple are the start (inclusive) and + // end index (exclusive) within the `targets` vecs. + index: FnvHashMap, (usize, usize)>, + targets: Vec>, } -impl<'tcx> Hash for TransItem<'tcx> { - fn hash(&self, s: &mut H) { - match *self { - TransItem::DropGlue(t) => { - 0u8.hash(s); - t.hash(s); - }, - TransItem::Fn(instance) => { - 1u8.hash(s); - instance.def.hash(s); - (instance.substs as *const _ as usize).hash(s); - } - TransItem::Static(node_id) => { - 2u8.hash(s); - node_id.hash(s); +impl<'tcx> InliningMap<'tcx> { + + fn new() -> InliningMap<'tcx> { + InliningMap { + index: FnvHashMap(), + targets: Vec::new(), + } + } + + fn record_inlining_canditates(&mut self, + source: TransItem<'tcx>, + targets: I) + where I: Iterator> + { + assert!(!self.index.contains_key(&source)); + + let start_index = self.targets.len(); + self.targets.extend(targets); + let end_index = self.targets.len(); + self.index.insert(source, (start_index, end_index)); + } + + // Internally iterate over all items referenced by `source` which will be + // made available for inlining. + pub fn with_inlining_candidates(&self, source: TransItem<'tcx>, mut f: F) + where F: FnMut(TransItem<'tcx>) { + if let Some(&(start_index, end_index)) = self.index.get(&source) + { + for candidate in &self.targets[start_index .. end_index] { + f(*candidate) } - }; + } } } -pub fn collect_crate_translation_items<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +pub fn collect_crate_translation_items<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, mode: TransItemCollectionMode) - -> FnvHashSet> { + -> (FnvHashSet>, + InliningMap<'tcx>) { // We are not tracking dependencies of this pass as it has to be re-executed // every time no matter what. - ccx.tcx().dep_graph.with_ignore(|| { - let roots = collect_roots(ccx, mode); + scx.tcx().dep_graph.with_ignore(|| { + let roots = collect_roots(scx, mode); debug!("Building translation item graph, beginning at roots"); let mut visited = FnvHashSet(); let mut recursion_depths = DefIdMap(); + let mut inlining_map = InliningMap::new(); for root in roots { - collect_items_rec(ccx, root, &mut visited, &mut recursion_depths); + collect_items_rec(scx, + root, + &mut visited, + &mut recursion_depths, + &mut inlining_map); } - visited + (visited, inlining_map) }) } // Find all non-generic items by walking the HIR. These items serve as roots to // start monomorphizing from. -fn collect_roots<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn collect_roots<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, mode: TransItemCollectionMode) -> Vec> { debug!("Collecting roots"); @@ -281,77 +303,121 @@ fn collect_roots<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, { let mut visitor = RootCollector { - ccx: ccx, + scx: scx, mode: mode, output: &mut roots, enclosing_item: None, }; - ccx.tcx().map.krate().visit_all_items(&mut visitor); + scx.tcx().map.krate().visit_all_items(&mut visitor); } roots } // Collect all monomorphized translation items reachable from `starting_point` -fn collect_items_rec<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>, +fn collect_items_rec<'a, 'tcx: 'a>(scx: &SharedCrateContext<'a, 'tcx>, starting_point: TransItem<'tcx>, visited: &mut FnvHashSet>, - recursion_depths: &mut DefIdMap) { + recursion_depths: &mut DefIdMap, + inlining_map: &mut InliningMap<'tcx>) { if !visited.insert(starting_point.clone()) { // We've been here already, no need to search again. return; } - debug!("BEGIN collect_items_rec({})", starting_point.to_string(ccx)); + debug!("BEGIN collect_items_rec({})", starting_point.to_string(scx.tcx())); let mut neighbors = Vec::new(); let recursion_depth_reset; match starting_point { TransItem::DropGlue(t) => { - find_drop_glue_neighbors(ccx, t, &mut neighbors); + find_drop_glue_neighbors(scx, t, &mut neighbors); recursion_depth_reset = None; } - TransItem::Static(_) => { + TransItem::Static(node_id) => { + let def_id = scx.tcx().map.local_def_id(node_id); + let ty = scx.tcx().lookup_item_type(def_id).ty; + let ty = glue::get_drop_glue_type(scx.tcx(), ty); + neighbors.push(TransItem::DropGlue(DropGlueKind::Ty(ty))); + recursion_depth_reset = None; + + // Scan the MIR in order to find function calls, closures, and + // drop-glue + let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(def_id), + || format!("Could not find MIR for static: {:?}", def_id)); + + let empty_substs = scx.empty_substs_for_def_id(def_id); + let mut visitor = MirNeighborCollector { + scx: scx, + mir: &mir, + output: &mut neighbors, + param_substs: empty_substs + }; + + visitor.visit_mir(&mir); + for promoted in &mir.promoted { + visitor.visit_mir(promoted); + } } TransItem::Fn(instance) => { // Keep track of the monomorphization recursion depth - recursion_depth_reset = Some(check_recursion_limit(ccx, + recursion_depth_reset = Some(check_recursion_limit(scx.tcx(), instance, recursion_depths)); // Scan the MIR in order to find function calls, closures, and // drop-glue - let mir = errors::expect(ccx.sess().diagnostic(), ccx.get_mir(instance.def), + let mir = errors::expect(scx.sess().diagnostic(), scx.get_mir(instance.def), || format!("Could not find MIR for function: {}", instance)); let mut visitor = MirNeighborCollector { - ccx: ccx, + scx: scx, mir: &mir, output: &mut neighbors, param_substs: instance.substs }; visitor.visit_mir(&mir); + for promoted in &mir.promoted { + visitor.visit_mir(promoted); + } } } + record_inlining_canditates(scx.tcx(), starting_point, &neighbors[..], inlining_map); + for neighbour in neighbors { - collect_items_rec(ccx, neighbour, visited, recursion_depths); + collect_items_rec(scx, neighbour, visited, recursion_depths, inlining_map); } if let Some((def_id, depth)) = recursion_depth_reset { recursion_depths.insert(def_id, depth); } - debug!("END collect_items_rec({})", starting_point.to_string(ccx)); + debug!("END collect_items_rec({})", starting_point.to_string(scx.tcx())); } -fn check_recursion_limit<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>, - instance: Instance<'tcx>, - recursion_depths: &mut DefIdMap) - -> (DefId, usize) { +fn record_inlining_canditates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + caller: TransItem<'tcx>, + callees: &[TransItem<'tcx>], + inlining_map: &mut InliningMap<'tcx>) { + let is_inlining_candidate = |trans_item: &TransItem<'tcx>| { + trans_item.is_from_extern_crate() || trans_item.requests_inline(tcx) + }; + + let inlining_candidates = callees.into_iter() + .map(|x| *x) + .filter(is_inlining_candidate); + + inlining_map.record_inlining_canditates(caller, inlining_candidates); +} + +fn check_recursion_limit<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + instance: Instance<'tcx>, + recursion_depths: &mut DefIdMap) + -> (DefId, usize) { let recursion_depth = recursion_depths.get(&instance.def) .map(|x| *x) .unwrap_or(0); @@ -360,13 +426,13 @@ fn check_recursion_limit<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>, // Code that needs to instantiate the same function recursively // more than the recursion limit is assumed to be causing an // infinite expansion. - if recursion_depth > ccx.sess().recursion_limit.get() { + if recursion_depth > tcx.sess.recursion_limit.get() { let error = format!("reached the recursion limit while instantiating `{}`", instance); - if let Some(node_id) = ccx.tcx().map.as_local_node_id(instance.def) { - ccx.sess().span_fatal(ccx.tcx().map.span(node_id), &error); + if let Some(node_id) = tcx.map.as_local_node_id(instance.def) { + tcx.sess.span_fatal(tcx.map.span(node_id), &error); } else { - ccx.sess().fatal(&error); + tcx.sess.fatal(&error); } } @@ -376,7 +442,7 @@ fn check_recursion_limit<'a, 'tcx: 'a>(ccx: &CrateContext<'a, 'tcx>, } struct MirNeighborCollector<'a, 'tcx: 'a> { - ccx: &'a CrateContext<'a, 'tcx>, + scx: &'a SharedCrateContext<'a, 'tcx>, mir: &'a mir::Mir<'tcx>, output: &'a mut Vec>, param_substs: &'tcx Substs<'tcx> @@ -390,8 +456,8 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { match *rvalue { mir::Rvalue::Aggregate(mir::AggregateKind::Closure(def_id, ref substs), _) => { - assert!(can_have_local_instance(self.ccx, def_id)); - let trans_item = create_fn_trans_item(self.ccx, + assert!(can_have_local_instance(self.scx.tcx(), def_id)); + let trans_item = create_fn_trans_item(self.scx.tcx(), def_id, substs.func_substs, self.param_substs); @@ -401,21 +467,21 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { // have to instantiate all methods of the trait being cast to, so we // can build the appropriate vtable. mir::Rvalue::Cast(mir::CastKind::Unsize, ref operand, target_ty) => { - let target_ty = monomorphize::apply_param_substs(self.ccx.tcx(), + let target_ty = monomorphize::apply_param_substs(self.scx.tcx(), self.param_substs, &target_ty); - let source_ty = self.mir.operand_ty(self.ccx.tcx(), operand); - let source_ty = monomorphize::apply_param_substs(self.ccx.tcx(), + let source_ty = self.mir.operand_ty(self.scx.tcx(), operand); + let source_ty = monomorphize::apply_param_substs(self.scx.tcx(), self.param_substs, &source_ty); - let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.ccx, + let (source_ty, target_ty) = find_vtable_types_for_unsizing(self.scx, source_ty, target_ty); // This could also be a different Unsize instruction, like // from a fixed sized array to a slice. But we are only // interested in things that produce a vtable. if target_ty.is_trait() && !source_ty.is_trait() { - create_trans_items_for_vtable_methods(self.ccx, + create_trans_items_for_vtable_methods(self.scx, target_ty, source_ty, self.output); @@ -423,17 +489,18 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { } mir::Rvalue::Box(_) => { let exchange_malloc_fn_def_id = - self.ccx + self.scx .tcx() .lang_items .require(ExchangeMallocFnLangItem) - .unwrap_or_else(|e| self.ccx.sess().fatal(&e)); + .unwrap_or_else(|e| self.scx.sess().fatal(&e)); - assert!(can_have_local_instance(self.ccx, exchange_malloc_fn_def_id)); + assert!(can_have_local_instance(self.scx.tcx(), exchange_malloc_fn_def_id)); + let empty_substs = self.scx.empty_substs_for_def_id(exchange_malloc_fn_def_id); let exchange_malloc_fn_trans_item = - create_fn_trans_item(self.ccx, + create_fn_trans_item(self.scx.tcx(), exchange_malloc_fn_def_id, - &Substs::empty(), + empty_substs, self.param_substs); self.output.push(exchange_malloc_fn_trans_item); @@ -450,15 +517,15 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { debug!("visiting lvalue {:?}", *lvalue); if let mir_visit::LvalueContext::Drop = context { - let ty = self.mir.lvalue_ty(self.ccx.tcx(), lvalue) - .to_ty(self.ccx.tcx()); + let ty = self.mir.lvalue_ty(self.scx.tcx(), lvalue) + .to_ty(self.scx.tcx()); - let ty = monomorphize::apply_param_substs(self.ccx.tcx(), + let ty = monomorphize::apply_param_substs(self.scx.tcx(), self.param_substs, &ty); - let ty = self.ccx.tcx().erase_regions(&ty); - let ty = glue::get_drop_glue_type(self.ccx, ty); - self.output.push(TransItem::DropGlue(ty)); + let ty = self.scx.tcx().erase_regions(&ty); + let ty = glue::get_drop_glue_type(self.scx.tcx(), ty); + self.output.push(TransItem::DropGlue(DropGlueKind::Ty(ty))); } self.super_lvalue(lvalue, context); @@ -485,7 +552,7 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { // // Calling do_static_dispatch() here will map the def_id of // `std::cmp::partial_cmp` to the def_id of `i32::partial_cmp` - let dispatched = do_static_dispatch(self.ccx, + let dispatched = do_static_dispatch(self.scx, callee_def_id, callee_substs, self.param_substs); @@ -496,9 +563,9 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { // object shim or a closure that is handled differently), // we check if the callee is something that will actually // result in a translation item ... - if can_result_in_trans_item(self.ccx, callee_def_id) { + if can_result_in_trans_item(self.scx.tcx(), callee_def_id) { // ... and create one if it does. - let trans_item = create_fn_trans_item(self.ccx, + let trans_item = create_fn_trans_item(self.scx.tcx(), callee_def_id, callee_substs, self.param_substs); @@ -509,21 +576,21 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { self.super_operand(operand); - fn can_result_in_trans_item<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, + fn can_result_in_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool { - if !match ccx.tcx().lookup_item_type(def_id).ty.sty { + if !match tcx.lookup_item_type(def_id).ty.sty { ty::TyFnDef(def_id, _, _) => { // Some constructors also have type TyFnDef but they are // always instantiated inline and don't result in // translation item. Same for FFI functions. - match ccx.tcx().map.get_if_local(def_id) { + match tcx.map.get_if_local(def_id) { Some(hir_map::NodeVariant(_)) | Some(hir_map::NodeStructCtor(_)) | Some(hir_map::NodeForeignItem(_)) => false, Some(_) => true, None => { - ccx.sess().cstore.variant_kind(def_id).is_none() + tcx.sess.cstore.variant_kind(def_id).is_none() } } } @@ -533,40 +600,92 @@ impl<'a, 'tcx> MirVisitor<'tcx> for MirNeighborCollector<'a, 'tcx> { return false; } - can_have_local_instance(ccx, def_id) + can_have_local_instance(tcx, def_id) + } + } + + // This takes care of the "drop_in_place" intrinsic for which we otherwise + // we would not register drop-glues. + fn visit_terminator_kind(&mut self, + block: mir::BasicBlock, + kind: &mir::TerminatorKind<'tcx>) { + let tcx = self.scx.tcx(); + match *kind { + mir::TerminatorKind::Call { + func: mir::Operand::Constant(ref constant), + ref args, + .. + } => { + match constant.ty.sty { + ty::TyFnDef(def_id, _, bare_fn_ty) + if is_drop_in_place_intrinsic(tcx, def_id, bare_fn_ty) => { + let operand_ty = self.mir.operand_ty(tcx, &args[0]); + if let ty::TyRawPtr(mt) = operand_ty.sty { + let operand_ty = monomorphize::apply_param_substs(tcx, + self.param_substs, + &mt.ty); + self.output.push(TransItem::DropGlue(DropGlueKind::Ty(operand_ty))); + } else { + bug!("Has the drop_in_place() intrinsic's signature changed?") + } + } + _ => { /* Nothing to do. */ } + } + } + _ => { /* Nothing to do. */ } + } + + self.super_terminator_kind(block, kind); + + fn is_drop_in_place_intrinsic<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId, + bare_fn_ty: &ty::BareFnTy<'tcx>) + -> bool { + (bare_fn_ty.abi == Abi::RustIntrinsic || + bare_fn_ty.abi == Abi::PlatformIntrinsic) && + tcx.item_name(def_id).as_str() == "drop_in_place" } } } -fn can_have_local_instance<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn can_have_local_instance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> bool { // Take a look if we have the definition available. If not, we // will not emit code for this item in the local crate, and thus // don't create a translation item for it. - def_id.is_local() || ccx.sess().cstore.is_item_mir_available(def_id) + def_id.is_local() || tcx.sess.cstore.is_item_mir_available(def_id) } -fn find_drop_glue_neighbors<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - ty: ty::Ty<'tcx>, - output: &mut Vec>) -{ - debug!("find_drop_glue_neighbors: {}", type_to_string(ccx, ty)); +fn find_drop_glue_neighbors<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, + dg: DropGlueKind<'tcx>, + output: &mut Vec>) { + let ty = match dg { + DropGlueKind::Ty(ty) => ty, + DropGlueKind::TyContents(_) => { + // We already collected the neighbors of this item via the + // DropGlueKind::Ty variant. + return + } + }; + + debug!("find_drop_glue_neighbors: {}", type_to_string(scx.tcx(), ty)); // Make sure the exchange_free_fn() lang-item gets translated if // there is a boxed value. if let ty::TyBox(_) = ty.sty { - let exchange_free_fn_def_id = ccx.tcx() + let exchange_free_fn_def_id = scx.tcx() .lang_items .require(ExchangeFreeFnLangItem) - .unwrap_or_else(|e| ccx.sess().fatal(&e)); + .unwrap_or_else(|e| scx.sess().fatal(&e)); - assert!(can_have_local_instance(ccx, exchange_free_fn_def_id)); + assert!(can_have_local_instance(scx.tcx(), exchange_free_fn_def_id)); + let fn_substs = scx.empty_substs_for_def_id(exchange_free_fn_def_id); let exchange_free_fn_trans_item = - create_fn_trans_item(ccx, + create_fn_trans_item(scx.tcx(), exchange_free_fn_def_id, - &Substs::empty(), - &Substs::empty()); + fn_substs, + scx.tcx().mk_substs(Substs::empty())); output.push(exchange_free_fn_trans_item); } @@ -582,12 +701,12 @@ fn find_drop_glue_neighbors<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, if let Some(destructor_did) = destructor_did { use rustc::ty::ToPolyTraitRef; - let drop_trait_def_id = ccx.tcx() + let drop_trait_def_id = scx.tcx() .lang_items .drop_trait() .unwrap(); - let self_type_substs = ccx.tcx().mk_substs( + let self_type_substs = scx.tcx().mk_substs( Substs::empty().with_self_ty(ty)); let trait_ref = ty::TraitRef { @@ -595,18 +714,22 @@ fn find_drop_glue_neighbors<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, substs: self_type_substs, }.to_poly_trait_ref(); - let substs = match fulfill_obligation(ccx, DUMMY_SP, trait_ref) { + let substs = match fulfill_obligation(scx, DUMMY_SP, trait_ref) { traits::VtableImpl(data) => data.substs, _ => bug!() }; - if can_have_local_instance(ccx, destructor_did) { - let trans_item = create_fn_trans_item(ccx, + if can_have_local_instance(scx.tcx(), destructor_did) { + let trans_item = create_fn_trans_item(scx.tcx(), destructor_did, substs, - &Substs::empty()); + scx.tcx().mk_substs(Substs::empty())); output.push(trans_item); } + + // This type has a Drop implementation, we'll need the contents-only + // version of the glue too. + output.push(TransItem::DropGlue(DropGlueKind::TyContents(ty))); } // Finally add the types of nested values @@ -621,43 +744,43 @@ fn find_drop_glue_neighbors<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty::TyRef(..) | ty::TyFnDef(..) | ty::TyFnPtr(_) | - ty::TySlice(_) | ty::TyTrait(_) => { /* nothing to do */ } ty::TyStruct(ref adt_def, substs) | ty::TyEnum(ref adt_def, substs) => { for field in adt_def.all_fields() { - let field_type = monomorphize::apply_param_substs(ccx.tcx(), + let field_type = monomorphize::apply_param_substs(scx.tcx(), substs, &field.unsubst_ty()); - let field_type = glue::get_drop_glue_type(ccx, field_type); + let field_type = glue::get_drop_glue_type(scx.tcx(), field_type); - if glue::type_needs_drop(ccx.tcx(), field_type) { - output.push(TransItem::DropGlue(field_type)); + if glue::type_needs_drop(scx.tcx(), field_type) { + output.push(TransItem::DropGlue(DropGlueKind::Ty(field_type))); } } } - ty::TyClosure(_, ref substs) => { - for upvar_ty in &substs.upvar_tys { - let upvar_ty = glue::get_drop_glue_type(ccx, upvar_ty); - if glue::type_needs_drop(ccx.tcx(), upvar_ty) { - output.push(TransItem::DropGlue(upvar_ty)); + ty::TyClosure(_, substs) => { + for upvar_ty in substs.upvar_tys { + let upvar_ty = glue::get_drop_glue_type(scx.tcx(), upvar_ty); + if glue::type_needs_drop(scx.tcx(), upvar_ty) { + output.push(TransItem::DropGlue(DropGlueKind::Ty(upvar_ty))); } } } ty::TyBox(inner_type) | + ty::TySlice(inner_type) | ty::TyArray(inner_type, _) => { - let inner_type = glue::get_drop_glue_type(ccx, inner_type); - if glue::type_needs_drop(ccx.tcx(), inner_type) { - output.push(TransItem::DropGlue(inner_type)); + let inner_type = glue::get_drop_glue_type(scx.tcx(), inner_type); + if glue::type_needs_drop(scx.tcx(), inner_type) { + output.push(TransItem::DropGlue(DropGlueKind::Ty(inner_type))); } } - ty::TyTuple(ref args) => { + ty::TyTuple(args) => { for arg in args { - let arg = glue::get_drop_glue_type(ccx, arg); - if glue::type_needs_drop(ccx.tcx(), arg) { - output.push(TransItem::DropGlue(arg)); + let arg = glue::get_drop_glue_type(scx.tcx(), arg); + if glue::type_needs_drop(scx.tcx(), arg) { + output.push(TransItem::DropGlue(DropGlueKind::Ty(arg))); } } } @@ -668,27 +791,29 @@ fn find_drop_glue_neighbors<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, bug!("encountered unexpected type"); } } + + } -fn do_static_dispatch<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn do_static_dispatch<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, fn_def_id: DefId, fn_substs: &'tcx Substs<'tcx>, param_substs: &'tcx Substs<'tcx>) -> Option<(DefId, &'tcx Substs<'tcx>)> { debug!("do_static_dispatch(fn_def_id={}, fn_substs={:?}, param_substs={:?})", - def_id_to_string(ccx, fn_def_id), + def_id_to_string(scx.tcx(), fn_def_id), fn_substs, param_substs); - let is_trait_method = ccx.tcx().trait_of_item(fn_def_id).is_some(); + let is_trait_method = scx.tcx().trait_of_item(fn_def_id).is_some(); if is_trait_method { - match ccx.tcx().impl_or_trait_item(fn_def_id) { + match scx.tcx().impl_or_trait_item(fn_def_id) { ty::MethodTraitItem(ref method) => { match method.container { ty::TraitContainer(trait_def_id) => { debug!(" => trait method, attempting to find impl"); - do_static_trait_method_dispatch(ccx, + do_static_trait_method_dispatch(scx, method, trait_def_id, fn_substs, @@ -713,28 +838,28 @@ fn do_static_dispatch<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Given a trait-method and substitution information, find out the actual // implementation of the trait method. -fn do_static_trait_method_dispatch<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn do_static_trait_method_dispatch<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, trait_method: &ty::Method, trait_id: DefId, callee_substs: &'tcx Substs<'tcx>, param_substs: &'tcx Substs<'tcx>) -> Option<(DefId, &'tcx Substs<'tcx>)> { - let tcx = ccx.tcx(); + let tcx = scx.tcx(); debug!("do_static_trait_method_dispatch(trait_method={}, \ trait_id={}, \ callee_substs={:?}, \ param_substs={:?}", - def_id_to_string(ccx, trait_method.def_id), - def_id_to_string(ccx, trait_id), + def_id_to_string(scx.tcx(), trait_method.def_id), + def_id_to_string(scx.tcx(), trait_id), callee_substs, param_substs); let rcvr_substs = monomorphize::apply_param_substs(tcx, param_substs, - callee_substs); + &callee_substs); let trait_ref = ty::Binder(rcvr_substs.to_trait_ref(tcx, trait_id)); - let vtbl = fulfill_obligation(ccx, DUMMY_SP, trait_ref); + let vtbl = fulfill_obligation(scx, DUMMY_SP, trait_ref); // Now that we know which impl is being used, we can dispatch to // the actual function: @@ -802,7 +927,7 @@ fn do_static_trait_method_dispatch<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, /// /// Finally, there is also the case of custom unsizing coercions, e.g. for /// smart pointers such as `Rc` and `Arc`. -fn find_vtable_types_for_unsizing<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn find_vtable_types_for_unsizing<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, source_ty: ty::Ty<'tcx>, target_ty: ty::Ty<'tcx>) -> (ty::Ty<'tcx>, ty::Ty<'tcx>) { @@ -816,10 +941,10 @@ fn find_vtable_types_for_unsizing<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, &ty::TyRawPtr(ty::TypeAndMut { ty: b, .. })) => { let (inner_source, inner_target) = (a, b); - if !type_is_sized(ccx.tcx(), inner_source) { + if !type_is_sized(scx.tcx(), inner_source) { (inner_source, inner_target) } else { - ccx.tcx().struct_lockstep_tails(inner_source, inner_target) + scx.tcx().struct_lockstep_tails(inner_source, inner_target) } } @@ -827,7 +952,7 @@ fn find_vtable_types_for_unsizing<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, &ty::TyStruct(target_adt_def, target_substs)) => { assert_eq!(source_adt_def, target_adt_def); - let kind = custom_coerce_unsize_info(ccx, source_ty, target_ty); + let kind = custom_coerce_unsize_info(scx, source_ty, target_ty); let coerce_index = match kind { CustomCoerceUnsized::Struct(i) => i @@ -839,10 +964,10 @@ fn find_vtable_types_for_unsizing<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, assert!(coerce_index < source_fields.len() && source_fields.len() == target_fields.len()); - find_vtable_types_for_unsizing(ccx, - source_fields[coerce_index].ty(ccx.tcx(), + find_vtable_types_for_unsizing(scx, + source_fields[coerce_index].ty(scx.tcx(), source_substs), - target_fields[coerce_index].ty(ccx.tcx(), + target_fields[coerce_index].ty(scx.tcx(), target_substs)) } _ => bug!("find_vtable_types_for_unsizing: invalid coercion {:?} -> {:?}", @@ -851,64 +976,61 @@ fn find_vtable_types_for_unsizing<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } } -fn create_fn_trans_item<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn create_fn_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId, - fn_substs: &Substs<'tcx>, - param_substs: &Substs<'tcx>) - -> TransItem<'tcx> -{ + fn_substs: &'tcx Substs<'tcx>, + param_substs: &'tcx Substs<'tcx>) + -> TransItem<'tcx> { debug!("create_fn_trans_item(def_id={}, fn_substs={:?}, param_substs={:?})", - def_id_to_string(ccx, def_id), + def_id_to_string(tcx, def_id), fn_substs, param_substs); // We only get here, if fn_def_id either designates a local item or // an inlineable external item. Non-inlineable external items are // ignored because we don't want to generate any code for them. - let concrete_substs = monomorphize::apply_param_substs(ccx.tcx(), + let concrete_substs = monomorphize::apply_param_substs(tcx, param_substs, - fn_substs); - let concrete_substs = ccx.tcx().erase_regions(&concrete_substs); + &fn_substs); + let concrete_substs = tcx.erase_regions(&concrete_substs); let trans_item = - TransItem::Fn(Instance::new(def_id, - &ccx.tcx().mk_substs(concrete_substs))); - + TransItem::Fn(Instance::new(def_id, concrete_substs)); return trans_item; } /// Creates a `TransItem` for each method that is referenced by the vtable for /// the given trait/impl pair. -fn create_trans_items_for_vtable_methods<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn create_trans_items_for_vtable_methods<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, trait_ty: ty::Ty<'tcx>, impl_ty: ty::Ty<'tcx>, output: &mut Vec>) { assert!(!trait_ty.needs_subst() && !impl_ty.needs_subst()); if let ty::TyTrait(ref trait_ty) = trait_ty.sty { - let poly_trait_ref = trait_ty.principal_trait_ref_with_self_ty(ccx.tcx(), + let poly_trait_ref = trait_ty.principal_trait_ref_with_self_ty(scx.tcx(), impl_ty); // Walk all methods of the trait, including those of its supertraits - for trait_ref in traits::supertraits(ccx.tcx(), poly_trait_ref) { - let vtable = fulfill_obligation(ccx, DUMMY_SP, trait_ref); + for trait_ref in traits::supertraits(scx.tcx(), poly_trait_ref) { + let vtable = fulfill_obligation(scx, DUMMY_SP, trait_ref); match vtable { traits::VtableImpl( traits::VtableImplData { impl_def_id, substs, nested: _ }) => { - let items = meth::get_vtable_methods(ccx, impl_def_id, substs) + let items = meth::get_vtable_methods(scx.tcx(), impl_def_id, substs) .into_iter() // filter out None values .filter_map(|opt_impl_method| opt_impl_method) // create translation items .filter_map(|impl_method| { - if can_have_local_instance(ccx, impl_method.method.def_id) { - Some(create_fn_trans_item(ccx, - impl_method.method.def_id, - &impl_method.substs, - &Substs::empty())) + if can_have_local_instance(scx.tcx(), impl_method.method.def_id) { + Some(create_fn_trans_item(scx.tcx(), + impl_method.method.def_id, + impl_method.substs, + scx.tcx().mk_substs(Substs::empty()))) } else { None } @@ -928,7 +1050,7 @@ fn create_trans_items_for_vtable_methods<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, //=----------------------------------------------------------------------------- struct RootCollector<'b, 'a: 'b, 'tcx: 'a + 'b> { - ccx: &'b CrateContext<'a, 'tcx>, + scx: &'b SharedCrateContext<'a, 'tcx>, mode: TransItemCollectionMode, output: &'b mut Vec>, enclosing_item: Option<&'tcx hir::Item>, @@ -953,7 +1075,7 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> { hir::ItemImpl(..) => { if self.mode == TransItemCollectionMode::Eager { - create_trans_items_for_default_impls(self.ccx, + create_trans_items_for_default_impls(self.scx.tcx(), item, self.output); } @@ -963,35 +1085,35 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> { hir::ItemStruct(_, ref generics) => { if !generics.is_parameterized() { let ty = { - let tables = self.ccx.tcx().tables.borrow(); + let tables = self.scx.tcx().tables.borrow(); tables.node_types[&item.id] }; if self.mode == TransItemCollectionMode::Eager { debug!("RootCollector: ADT drop-glue for {}", - def_id_to_string(self.ccx, - self.ccx.tcx().map.local_def_id(item.id))); + def_id_to_string(self.scx.tcx(), + self.scx.tcx().map.local_def_id(item.id))); - let ty = glue::get_drop_glue_type(self.ccx, ty); - self.output.push(TransItem::DropGlue(ty)); + let ty = glue::get_drop_glue_type(self.scx.tcx(), ty); + self.output.push(TransItem::DropGlue(DropGlueKind::Ty(ty))); } } } hir::ItemStatic(..) => { debug!("RootCollector: ItemStatic({})", - def_id_to_string(self.ccx, - self.ccx.tcx().map.local_def_id(item.id))); + def_id_to_string(self.scx.tcx(), + self.scx.tcx().map.local_def_id(item.id))); self.output.push(TransItem::Static(item.id)); } hir::ItemFn(_, _, constness, _, ref generics, _) => { if !generics.is_type_parameterized() && constness == hir::Constness::NotConst { - let def_id = self.ccx.tcx().map.local_def_id(item.id); + let def_id = self.scx.tcx().map.local_def_id(item.id); debug!("RootCollector: ItemFn({})", - def_id_to_string(self.ccx, def_id)); + def_id_to_string(self.scx.tcx(), def_id)); - let instance = Instance::mono(self.ccx.tcx(), def_id); + let instance = Instance::mono(self.scx, def_id); self.output.push(TransItem::Fn(instance)); } } @@ -1008,7 +1130,7 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> { constness, .. }, _) if constness == hir::Constness::NotConst => { - let hir_map = &self.ccx.tcx().map; + let hir_map = &self.scx.tcx().map; let parent_node_id = hir_map.get_parent_node(ii.id); let is_impl_generic = match hir_map.expect_item(parent_node_id) { &hir::Item { @@ -1023,12 +1145,12 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> { }; if !generics.is_type_parameterized() && !is_impl_generic { - let def_id = self.ccx.tcx().map.local_def_id(ii.id); + let def_id = self.scx.tcx().map.local_def_id(ii.id); debug!("RootCollector: MethodImplItem({})", - def_id_to_string(self.ccx, def_id)); + def_id_to_string(self.scx.tcx(), def_id)); - let instance = Instance::mono(self.ccx.tcx(), def_id); + let instance = Instance::mono(self.scx, def_id); self.output.push(TransItem::Fn(instance)); } } @@ -1039,7 +1161,7 @@ impl<'b, 'a, 'v> hir_visit::Visitor<'v> for RootCollector<'b, 'a, 'v> { } } -fn create_trans_items_for_default_impls<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +fn create_trans_items_for_default_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item: &'tcx hir::Item, output: &mut Vec>) { match item.node { @@ -1053,15 +1175,14 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, return } - let tcx = ccx.tcx(); let impl_def_id = tcx.map.local_def_id(item.id); debug!("create_trans_items_for_default_impls(item={})", - def_id_to_string(ccx, impl_def_id)); + def_id_to_string(tcx, impl_def_id)); if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) { let default_impls = tcx.provided_trait_methods(trait_ref.def_id); - let callee_substs = tcx.mk_substs(tcx.erase_regions(trait_ref.substs)); + let callee_substs = tcx.erase_regions(&trait_ref.substs); let overridden_methods: FnvHashSet<_> = items.iter() .map(|item| item.name) .collect(); @@ -1084,13 +1205,13 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, assert!(mth.is_provided); let predicates = mth.method.predicates.predicates.subst(tcx, &mth.substs); - if !normalize_and_test_predicates(ccx, predicates.into_vec()) { + if !normalize_and_test_predicates(tcx, predicates.into_vec()) { continue; } - if can_have_local_instance(ccx, default_impl.def_id) { - let empty_substs = ccx.tcx().mk_substs(ccx.tcx().erase_regions(mth.substs)); - let item = create_fn_trans_item(ccx, + if can_have_local_instance(tcx, default_impl.def_id) { + let empty_substs = tcx.erase_regions(&mth.substs); + let item = create_fn_trans_item(tcx, default_impl.def_id, callee_substs, empty_substs); @@ -1105,280 +1226,6 @@ fn create_trans_items_for_default_impls<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, } } -//=----------------------------------------------------------------------------- -// TransItem String Keys -//=----------------------------------------------------------------------------- - -// The code below allows for producing a unique string key for a trans item. -// These keys are used by the handwritten auto-tests, so they need to be -// predictable and human-readable. -// -// Note: A lot of this could looks very similar to what's already in the -// ppaux module. It would be good to refactor things so we only have one -// parameterizable implementation for printing types. - -/// Same as `unique_type_name()` but with the result pushed onto the given -/// `output` parameter. -pub fn push_unique_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - t: ty::Ty<'tcx>, - output: &mut String) { - match t.sty { - ty::TyBool => output.push_str("bool"), - ty::TyChar => output.push_str("char"), - ty::TyStr => output.push_str("str"), - ty::TyInt(ast::IntTy::Is) => output.push_str("isize"), - ty::TyInt(ast::IntTy::I8) => output.push_str("i8"), - ty::TyInt(ast::IntTy::I16) => output.push_str("i16"), - ty::TyInt(ast::IntTy::I32) => output.push_str("i32"), - ty::TyInt(ast::IntTy::I64) => output.push_str("i64"), - ty::TyUint(ast::UintTy::Us) => output.push_str("usize"), - ty::TyUint(ast::UintTy::U8) => output.push_str("u8"), - ty::TyUint(ast::UintTy::U16) => output.push_str("u16"), - ty::TyUint(ast::UintTy::U32) => output.push_str("u32"), - ty::TyUint(ast::UintTy::U64) => output.push_str("u64"), - ty::TyFloat(ast::FloatTy::F32) => output.push_str("f32"), - ty::TyFloat(ast::FloatTy::F64) => output.push_str("f64"), - ty::TyStruct(adt_def, substs) | - ty::TyEnum(adt_def, substs) => { - push_item_name(cx, adt_def.did, output); - push_type_params(cx, &substs.types, &[], output); - }, - ty::TyTuple(ref component_types) => { - output.push('('); - for &component_type in component_types { - push_unique_type_name(cx, component_type, output); - output.push_str(", "); - } - if !component_types.is_empty() { - output.pop(); - output.pop(); - } - output.push(')'); - }, - ty::TyBox(inner_type) => { - output.push_str("Box<"); - push_unique_type_name(cx, inner_type, output); - output.push('>'); - }, - ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => { - output.push('*'); - match mutbl { - hir::MutImmutable => output.push_str("const "), - hir::MutMutable => output.push_str("mut "), - } - - push_unique_type_name(cx, inner_type, output); - }, - ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => { - output.push('&'); - if mutbl == hir::MutMutable { - output.push_str("mut "); - } - - push_unique_type_name(cx, inner_type, output); - }, - ty::TyArray(inner_type, len) => { - output.push('['); - push_unique_type_name(cx, inner_type, output); - output.push_str(&format!("; {}", len)); - output.push(']'); - }, - ty::TySlice(inner_type) => { - output.push('['); - push_unique_type_name(cx, inner_type, output); - output.push(']'); - }, - ty::TyTrait(ref trait_data) => { - push_item_name(cx, trait_data.principal.skip_binder().def_id, output); - push_type_params(cx, - &trait_data.principal.skip_binder().substs.types, - &trait_data.bounds.projection_bounds, - output); - }, - ty::TyFnDef(_, _, &ty::BareFnTy{ unsafety, abi, ref sig } ) | - ty::TyFnPtr(&ty::BareFnTy{ unsafety, abi, ref sig } ) => { - if unsafety == hir::Unsafety::Unsafe { - output.push_str("unsafe "); - } - - if abi != ::abi::Abi::Rust { - output.push_str("extern \""); - output.push_str(abi.name()); - output.push_str("\" "); - } - - output.push_str("fn("); - - let sig = cx.tcx().erase_late_bound_regions(sig); - if !sig.inputs.is_empty() { - for ¶meter_type in &sig.inputs { - push_unique_type_name(cx, parameter_type, output); - output.push_str(", "); - } - output.pop(); - output.pop(); - } - - if sig.variadic { - if !sig.inputs.is_empty() { - output.push_str(", ..."); - } else { - output.push_str("..."); - } - } - - output.push(')'); - - match sig.output { - ty::FnConverging(result_type) if result_type.is_nil() => {} - ty::FnConverging(result_type) => { - output.push_str(" -> "); - push_unique_type_name(cx, result_type, output); - } - ty::FnDiverging => { - output.push_str(" -> !"); - } - } - }, - ty::TyClosure(def_id, ref closure_substs) => { - push_item_name(cx, def_id, output); - output.push_str("{"); - output.push_str(&format!("{}:{}", def_id.krate, def_id.index.as_usize())); - output.push_str("}"); - push_type_params(cx, &closure_substs.func_substs.types, &[], output); - } - ty::TyError | - ty::TyInfer(_) | - ty::TyProjection(..) | - ty::TyParam(_) => { - bug!("debuginfo: Trying to create type name for \ - unexpected type: {:?}", t); - } - } -} - -fn push_item_name(ccx: &CrateContext, - def_id: DefId, - output: &mut String) { - let def_path = ccx.tcx().def_path(def_id); - - // some_crate:: - output.push_str(&ccx.tcx().crate_name(def_path.krate)); - output.push_str("::"); - - // foo::bar::ItemName:: - for part in ccx.tcx().def_path(def_id).data { - output.push_str(&format!("{}[{}]::", - part.data.as_interned_str(), - part.disambiguator)); - } - - // remove final "::" - output.pop(); - output.pop(); -} - -fn push_type_params<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - types: &'tcx subst::VecPerParamSpace>, - projections: &[ty::PolyProjectionPredicate<'tcx>], - output: &mut String) { - if types.is_empty() && projections.is_empty() { - return; - } - - output.push('<'); - - for &type_parameter in types { - push_unique_type_name(cx, type_parameter, output); - output.push_str(", "); - } - - for projection in projections { - let projection = projection.skip_binder(); - let name = token::get_ident_interner().get(projection.projection_ty.item_name); - output.push_str(&name[..]); - output.push_str("="); - push_unique_type_name(cx, projection.ty, output); - output.push_str(", "); - } - - output.pop(); - output.pop(); - - output.push('>'); -} - -fn push_instance_as_string<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - instance: Instance<'tcx>, - output: &mut String) { - push_item_name(ccx, instance.def, output); - push_type_params(ccx, &instance.substs.types, &[], output); -} - -fn def_id_to_string(ccx: &CrateContext, def_id: DefId) -> String { - let mut output = String::new(); - push_item_name(ccx, def_id, &mut output); - output -} - -fn type_to_string<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - ty: ty::Ty<'tcx>) - -> String { - let mut output = String::new(); - push_unique_type_name(ccx, ty, &mut output); - output -} - -impl<'tcx> TransItem<'tcx> { - - pub fn to_string<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> String { - let hir_map = &ccx.tcx().map; - - return match *self { - TransItem::DropGlue(t) => { - let mut s = String::with_capacity(32); - s.push_str("drop-glue "); - push_unique_type_name(ccx, t, &mut s); - s - } - TransItem::Fn(instance) => { - to_string_internal(ccx, "fn ", instance) - }, - TransItem::Static(node_id) => { - let def_id = hir_map.local_def_id(node_id); - let instance = Instance::mono(ccx.tcx(), def_id); - to_string_internal(ccx, "static ", instance) - }, - }; - - fn to_string_internal<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, - prefix: &str, - instance: Instance<'tcx>) - -> String { - let mut result = String::with_capacity(32); - result.push_str(prefix); - push_instance_as_string(ccx, instance, &mut result); - result - } - } - - fn to_raw_string(&self) -> String { - match *self { - TransItem::DropGlue(t) => { - format!("DropGlue({})", t as *const _ as usize) - } - TransItem::Fn(instance) => { - format!("Fn({:?}, {})", - instance.def, - instance.substs as *const _ as usize) - } - TransItem::Static(id) => { - format!("Static({:?})", id) - } - } - } -} - #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub enum TransItemState { PredictedAndGenerated, @@ -1386,15 +1233,15 @@ pub enum TransItemState { NotPredictedButGenerated, } -pub fn collecting_debug_information(ccx: &CrateContext) -> bool { - return cfg!(debug_assertions) && - ccx.sess().opts.debugging_opts.print_trans_items.is_some(); +pub fn collecting_debug_information(scx: &SharedCrateContext) -> bool { + return scx.sess().opts.cg.debug_assertions == Some(true) && + scx.sess().opts.debugging_opts.print_trans_items.is_some(); } -pub fn print_collection_results<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) { +pub fn print_collection_results<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>) { use std::hash::{Hash, SipHasher, Hasher}; - if !collecting_debug_information(ccx) { + if !collecting_debug_information(scx) { return; } @@ -1404,14 +1251,14 @@ pub fn print_collection_results<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) { s.finish() } - let trans_items = ccx.translation_items().borrow(); + let trans_items = scx.translation_items().borrow(); { // Check for duplicate item keys let mut item_keys = FnvHashMap(); for (item, item_state) in trans_items.iter() { - let k = item.to_string(&ccx); + let k = item.to_string(scx.tcx()); if item_keys.contains_key(&k) { let prev: (TransItem, TransItemState) = item_keys[&k]; @@ -1439,7 +1286,7 @@ pub fn print_collection_results<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>) { let mut generated = FnvHashSet(); for (item, item_state) in trans_items.iter() { - let item_key = item.to_string(&ccx); + let item_key = item.to_string(scx.tcx()); match *item_state { TransItemState::PredictedAndGenerated => { diff --git a/src/librustc_trans/common.rs b/src/librustc_trans/common.rs index 5ce7caf5de..c1685e6a74 100644 --- a/src/librustc_trans/common.rs +++ b/src/librustc_trans/common.rs @@ -19,7 +19,8 @@ use llvm::{True, False, Bool, OperandBundleDef}; use rustc::cfg; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; -use rustc::infer; +use rustc::infer::TransNormalize; +use rustc::util::common::MemoizationMap; use middle::lang_items::LangItem; use rustc::ty::subst::Substs; use abi::{Abi, FnType}; @@ -39,7 +40,7 @@ use type_::Type; use value::Value; use rustc::ty::{self, Ty, TyCtxt}; use rustc::traits::{self, SelectionContext, ProjectionMode}; -use rustc::ty::fold::{TypeFolder, TypeFoldable}; +use rustc::ty::fold::TypeFoldable; use rustc::hir; use util::nodemap::NodeMap; @@ -54,19 +55,19 @@ use syntax::codemap::{DUMMY_SP, Span}; use syntax::parse::token::InternedString; use syntax::parse::token; -pub use context::CrateContext; +pub use context::{CrateContext, SharedCrateContext}; /// Is the type's representation size known at compile time? -pub fn type_is_sized<'tcx>(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { - ty.is_sized(&tcx.empty_parameter_environment(), DUMMY_SP) +pub fn type_is_sized<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool { + ty.is_sized(tcx, &tcx.empty_parameter_environment(), DUMMY_SP) } -pub fn type_is_fat_ptr<'tcx>(cx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { +pub fn type_is_fat_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool { match ty.sty { ty::TyRawPtr(ty::TypeAndMut{ty, ..}) | ty::TyRef(_, ty::TypeAndMut{ty, ..}) | ty::TyBox(ty) => { - !type_is_sized(cx, ty) + !type_is_sized(tcx, ty) } _ => { false @@ -163,8 +164,8 @@ pub struct VariantInfo<'tcx> { pub fields: Vec> } -impl<'tcx> VariantInfo<'tcx> { - pub fn from_ty(tcx: &TyCtxt<'tcx>, +impl<'a, 'tcx> VariantInfo<'tcx> { + pub fn from_ty(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>, opt_def: Option) -> Self @@ -200,7 +201,7 @@ impl<'tcx> VariantInfo<'tcx> { } /// Return the variant corresponding to a given node (e.g. expr) - pub fn of_node(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>, id: ast::NodeId) -> Self { + pub fn of_node(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>, id: ast::NodeId) -> Self { let node_def = tcx.def_map.borrow().get(&id).map(|v| v.full_def()); Self::from_ty(tcx, ty, node_def) } @@ -280,7 +281,7 @@ pub struct FunctionContext<'a, 'tcx: 'a> { pub llfn: ValueRef, // always an empty parameter-environment NOTE: @jroesch another use of ParamEnv - pub param_env: ty::ParameterEnvironment<'a, 'tcx>, + pub param_env: ty::ParameterEnvironment<'tcx>, // A pointer to where to store the return value. If the return type is // immediate, this points to an alloca in the function. Otherwise, it's a @@ -427,7 +428,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { } pub fn monomorphize(&self, value: &T) -> T - where T : TypeFoldable<'tcx> + where T: TransNormalize<'tcx> { monomorphize::apply_param_substs(self.ccx.tcx(), self.param_substs, @@ -463,20 +464,18 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { // landing pads as "landing pads for SEH". let ccx = self.ccx; let tcx = ccx.tcx(); - let target = &ccx.sess().target.target; match tcx.lang_items.eh_personality() { Some(def_id) if !base::wants_msvc_seh(ccx.sess()) => { Callee::def(ccx, def_id, tcx.mk_substs(Substs::empty())).reify(ccx).val } - _ => if let Some(llpersonality) = ccx.eh_personality().get() { - llpersonality - } else { - let name = if !base::wants_msvc_seh(ccx.sess()) { - "rust_eh_personality" - } else if target.arch == "x86" { - "_except_handler3" + _ => { + if let Some(llpersonality) = ccx.eh_personality().get() { + return llpersonality + } + let name = if base::wants_msvc_seh(ccx.sess()) { + "__CxxFrameHandler3" } else { - "__C_specific_handler" + "rust_eh_personality" }; let fty = Type::variadic_func(&[], &Type::i32(ccx)); let f = declare::declare_cfn(ccx, name, fty); @@ -497,7 +496,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { return Callee::def(ccx, def_id, tcx.mk_substs(Substs::empty())); } - let ty = tcx.mk_fn_ptr(ty::BareFnTy { + let ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Unsafe, abi: Abi::C, sig: ty::Binder(ty::FnSig { @@ -505,7 +504,7 @@ impl<'a, 'tcx> FunctionContext<'a, 'tcx> { output: ty::FnDiverging, variadic: false }), - }); + })); let unwresume = ccx.eh_unwind_resume(); if let Some(llfn) = unwresume.get() { @@ -569,7 +568,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> { pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> { self.fcx } - pub fn tcx(&self) -> &'blk TyCtxt<'tcx> { + pub fn tcx(&self) -> TyCtxt<'blk, 'tcx, 'tcx> { self.fcx.ccx.tcx() } pub fn sess(&self) -> &'blk Session { self.fcx.ccx.sess() } @@ -604,7 +603,7 @@ impl<'blk, 'tcx> BlockS<'blk, 'tcx> { } pub fn monomorphize(&self, value: &T) -> T - where T : TypeFoldable<'tcx> + where T: TransNormalize<'tcx> { monomorphize::apply_param_substs(self.tcx(), self.fcx.param_substs, @@ -695,7 +694,7 @@ impl<'blk, 'tcx> BlockAndBuilder<'blk, 'tcx> { pub fn fcx(&self) -> &'blk FunctionContext<'blk, 'tcx> { self.bcx.fcx() } - pub fn tcx(&self) -> &'blk TyCtxt<'tcx> { + pub fn tcx(&self) -> TyCtxt<'blk, 'tcx, 'tcx> { self.bcx.tcx() } pub fn sess(&self) -> &'blk Session { @@ -711,7 +710,7 @@ impl<'blk, 'tcx> BlockAndBuilder<'blk, 'tcx> { } pub fn monomorphize(&self, value: &T) -> T - where T: TypeFoldable<'tcx> + where T: TransNormalize<'tcx> { self.bcx.monomorphize(value) } @@ -1051,107 +1050,94 @@ pub fn expr_ty_adjusted<'blk, 'tcx>(bcx: &BlockS<'blk, 'tcx>, ex: &hir::Expr) -> /// Attempts to resolve an obligation. The result is a shallow vtable resolution -- meaning that we /// do not (necessarily) resolve all nested obligations on the impl. Note that type check should /// guarantee to us that all nested obligations *could be* resolved if we wanted to. -pub fn fulfill_obligation<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +pub fn fulfill_obligation<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>, span: Span, trait_ref: ty::PolyTraitRef<'tcx>) -> traits::Vtable<'tcx, ()> { - let tcx = ccx.tcx(); + let tcx = scx.tcx(); // Remove any references to regions; this helps improve caching. let trait_ref = tcx.erase_regions(&trait_ref); - // First check the cache. - match ccx.trait_cache().borrow().get(&trait_ref) { - Some(vtable) => { - info!("Cache hit: {:?}", trait_ref); - return (*vtable).clone(); - } - None => { } - } - - debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}", - trait_ref, trait_ref.def_id()); - - - // Do the initial selection for the obligation. This yields the - // shallow result we are looking for -- that is, what specific impl. - let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Any); - let mut selcx = SelectionContext::new(&infcx); - - let obligation = - traits::Obligation::new(traits::ObligationCause::misc(span, ast::DUMMY_NODE_ID), - trait_ref.to_poly_trait_predicate()); - let selection = match selcx.select(&obligation) { - Ok(Some(selection)) => selection, - Ok(None) => { - // Ambiguity can happen when monomorphizing during trans - // expands to some humongo type that never occurred - // statically -- this humongo type can then overflow, - // leading to an ambiguous result. So report this as an - // overflow bug, since I believe this is the only case - // where ambiguity can result. - debug!("Encountered ambiguity selecting `{:?}` during trans, \ - presuming due to overflow", - trait_ref); - ccx.sess().span_fatal( - span, - "reached the recursion limit during monomorphization (selection ambiguity)"); - } - Err(e) => { - span_bug!( - span, - "Encountered error `{:?}` selecting `{:?}` during trans", - e, - trait_ref) - } - }; - - // Currently, we use a fulfillment context to completely resolve - // all nested obligations. This is because they can inform the - // inference of the impl's type parameters. - let mut fulfill_cx = traits::FulfillmentContext::new(); - let vtable = selection.map(|predicate| { - fulfill_cx.register_predicate_obligation(&infcx, predicate); - }); - let vtable = infer::drain_fulfillment_cx_or_panic( - span, &infcx, &mut fulfill_cx, &vtable - ); - - info!("Cache miss: {:?} => {:?}", trait_ref, vtable); - - ccx.trait_cache().borrow_mut().insert(trait_ref, vtable.clone()); - - vtable + scx.trait_cache().memoize(trait_ref, || { + debug!("trans fulfill_obligation: trait_ref={:?} def_id={:?}", + trait_ref, trait_ref.def_id()); + + // Do the initial selection for the obligation. This yields the + // shallow result we are looking for -- that is, what specific impl. + tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| { + let mut selcx = SelectionContext::new(&infcx); + + let obligation_cause = traits::ObligationCause::misc(span, + ast::DUMMY_NODE_ID); + let obligation = traits::Obligation::new(obligation_cause, + trait_ref.to_poly_trait_predicate()); + + let selection = match selcx.select(&obligation) { + Ok(Some(selection)) => selection, + Ok(None) => { + // Ambiguity can happen when monomorphizing during trans + // expands to some humongo type that never occurred + // statically -- this humongo type can then overflow, + // leading to an ambiguous result. So report this as an + // overflow bug, since I believe this is the only case + // where ambiguity can result. + debug!("Encountered ambiguity selecting `{:?}` during trans, \ + presuming due to overflow", + trait_ref); + tcx.sess.span_fatal(span, + "reached the recursion limit during monomorphization \ + (selection ambiguity)"); + } + Err(e) => { + span_bug!(span, "Encountered error `{:?}` selecting `{:?}` during trans", + e, trait_ref) + } + }; + + // Currently, we use a fulfillment context to completely resolve + // all nested obligations. This is because they can inform the + // inference of the impl's type parameters. + let mut fulfill_cx = traits::FulfillmentContext::new(); + let vtable = selection.map(|predicate| { + fulfill_cx.register_predicate_obligation(&infcx, predicate); + }); + let vtable = infcx.drain_fulfillment_cx_or_panic(span, &mut fulfill_cx, &vtable); + + info!("Cache miss: {:?} => {:?}", trait_ref, vtable); + vtable + }) + }) } /// Normalizes the predicates and checks whether they hold. If this /// returns false, then either normalize encountered an error or one /// of the predicates did not hold. Used when creating vtables to /// check for unsatisfiable methods. -pub fn normalize_and_test_predicates<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +pub fn normalize_and_test_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, predicates: Vec>) -> bool { debug!("normalize_and_test_predicates(predicates={:?})", predicates); - let tcx = ccx.tcx(); - let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Any); - let mut selcx = SelectionContext::new(&infcx); - let mut fulfill_cx = traits::FulfillmentContext::new(); - let cause = traits::ObligationCause::dummy(); - let traits::Normalized { value: predicates, obligations } = - traits::normalize(&mut selcx, cause.clone(), &predicates); - for obligation in obligations { - fulfill_cx.register_predicate_obligation(&infcx, obligation); - } - for predicate in predicates { - let obligation = traits::Obligation::new(cause.clone(), predicate); - fulfill_cx.register_predicate_obligation(&infcx, obligation); - } + tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| { + let mut selcx = SelectionContext::new(&infcx); + let mut fulfill_cx = traits::FulfillmentContext::new(); + let cause = traits::ObligationCause::dummy(); + let traits::Normalized { value: predicates, obligations } = + traits::normalize(&mut selcx, cause.clone(), &predicates); + for obligation in obligations { + fulfill_cx.register_predicate_obligation(&infcx, obligation); + } + for predicate in predicates { + let obligation = traits::Obligation::new(cause.clone(), predicate); + fulfill_cx.register_predicate_obligation(&infcx, obligation); + } - infer::drain_fulfillment_cx(&infcx, &mut fulfill_cx, &()).is_ok() + infcx.drain_fulfillment_cx(&mut fulfill_cx, &()).is_ok() + }) } pub fn langcall(bcx: Block, diff --git a/src/librustc_trans/consts.rs b/src/librustc_trans/consts.rs index 89f3b295c8..3e876eb3d7 100644 --- a/src/librustc_trans/consts.rs +++ b/src/librustc_trans/consts.rs @@ -21,7 +21,8 @@ use rustc::hir::map as hir_map; use {abi, adt, closure, debuginfo, expr, machine}; use base::{self, exported_name, imported_name, push_ctxt}; use callee::Callee; -use collector::{self, TransItem}; +use collector; +use trans_item::TransItem; use common::{type_is_sized, C_nil, const_get_elt}; use common::{CrateContext, C_integral, C_floating, C_bool, C_str_slice, C_bytes, val_ty}; use common::{C_struct, C_undef, const_to_opt_int, const_to_opt_uint, VariantInfo, C_uint}; @@ -39,7 +40,7 @@ use rustc::ty::adjustment::{AdjustUnsafeFnPointer, AdjustMutToConstPointer}; use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::cast::{CastTy,IntTy}; use util::nodemap::NodeMap; -use rustc_const_math::{ConstInt, ConstMathErr, ConstUsize, ConstIsize}; +use rustc_const_math::{ConstInt, ConstUsize, ConstIsize}; use rustc::hir; @@ -48,6 +49,7 @@ use std::borrow::Cow; use libc::c_uint; use syntax::ast::{self, LitKind}; use syntax::attr::{self, AttrMetaMethods}; +use syntax::codemap::Span; use syntax::parse::token; use syntax::ptr::P; @@ -110,11 +112,11 @@ pub fn ptrcast(val: ValueRef, ty: Type) -> ValueRef { } } -fn addr_of_mut(ccx: &CrateContext, - cv: ValueRef, - align: machine::llalign, - kind: &str) - -> ValueRef { +pub fn addr_of_mut(ccx: &CrateContext, + cv: ValueRef, + align: machine::llalign, + kind: &str) + -> ValueRef { unsafe { // FIXME: this totally needs a better name generation scheme, perhaps a simple global // counter? Also most other uses of gensym in trans. @@ -158,13 +160,13 @@ pub fn addr_of(ccx: &CrateContext, } /// Deref a constant pointer -fn load_const(cx: &CrateContext, v: ValueRef, t: Ty) -> ValueRef { +pub fn load_const(cx: &CrateContext, v: ValueRef, t: Ty) -> ValueRef { let v = match cx.const_unsized().borrow().get(&v) { Some(&v) => v, None => v }; let d = unsafe { llvm::LLVMGetInitializer(v) }; - if t.is_bool() { + if !d.is_null() && t.is_bool() { unsafe { llvm::LLVMConstTrunc(d, Type::i1(cx).to_ref()) } } else { d @@ -193,7 +195,7 @@ fn const_deref<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, fn const_fn_call<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId, - substs: Substs<'tcx>, + substs: &'tcx Substs<'tcx>, arg_vals: &[ValueRef], param_substs: &'tcx Substs<'tcx>, trueconst: TrueConst) -> Result { @@ -211,10 +213,10 @@ fn const_fn_call<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let arg_ids = args.iter().map(|arg| arg.pat.id); let fn_args = arg_ids.zip(arg_vals.iter().cloned()).collect(); + let substs = ccx.tcx().mk_substs(substs.clone().erase_regions()); let substs = monomorphize::apply_param_substs(ccx.tcx(), param_substs, - &substs.erase_regions()); - let substs = ccx.tcx().mk_substs(substs); + &substs); const_expr(ccx, body, substs, Some(&fn_args), trueconst).map(|(res, _)| res) } @@ -225,9 +227,10 @@ pub fn get_const_expr<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, param_substs: &'tcx Substs<'tcx>) -> &'tcx hir::Expr { let substs = ccx.tcx().node_id_item_substs(ref_expr.id).substs; + let substs = ccx.tcx().mk_substs(substs.clone().erase_regions()); let substs = monomorphize::apply_param_substs(ccx.tcx(), param_substs, - &substs.erase_regions()); + &substs); match lookup_const_by_id(ccx.tcx(), def_id, Some(substs)) { Some((ref expr, _ty)) => expr, None => { @@ -466,16 +469,12 @@ fn check_unary_expr_validity(cx: &CrateContext, e: &hir::Expr, t: Ty, Some(v) => v, None => return Ok(()), }; - match -cval { - Ok(_) => return Ok(()), - Err(err) => const_err(cx, e, Err(err), trueconst), - } - } else { - Ok(()) + const_err(cx, e.span, (-cval).map_err(ErrKind::Math), trueconst)?; } + Ok(()) } -fn to_const_int(value: ValueRef, t: Ty, tcx: &TyCtxt) -> Option { +pub fn to_const_int(value: ValueRef, t: Ty, tcx: TyCtxt) -> Option { match t.sty { ty::TyInt(int_type) => const_to_opt_int(value).and_then(|input| match int_type { ast::IntTy::I8 => { @@ -523,24 +522,21 @@ fn to_const_int(value: ValueRef, t: Ty, tcx: &TyCtxt) -> Option { } } -fn const_err(cx: &CrateContext, - e: &hir::Expr, - result: Result, - trueconst: TrueConst) - -> Result<(), ConstEvalFailure> { +pub fn const_err(cx: &CrateContext, + span: Span, + result: Result, + trueconst: TrueConst) + -> Result { match (result, trueconst) { - (Ok(_), _) => { - // We do not actually care about a successful result. - Ok(()) - }, + (Ok(x), _) => Ok(x), (Err(err), TrueConst::Yes) => { - let err = ConstEvalErr{ span: e.span, kind: ErrKind::Math(err) }; - cx.tcx().sess.span_err(e.span, &err.description()); + let err = ConstEvalErr{ span: span, kind: err }; + cx.tcx().sess.span_err(span, &err.description()); Err(Compiletime(err)) }, (Err(err), TrueConst::No) => { - let err = ConstEvalErr{ span: e.span, kind: ErrKind::Math(err) }; - cx.tcx().sess.span_warn(e.span, &err.description()); + let err = ConstEvalErr{ span: span, kind: err }; + cx.tcx().sess.span_warn(span, &err.description()); Err(Runtime(err)) }, } @@ -564,7 +560,8 @@ fn check_binary_expr_validity(cx: &CrateContext, e: &hir::Expr, t: Ty, hir::BiShr => lhs >> rhs, _ => return Ok(()), }; - const_err(cx, e, result, trueconst) + const_err(cx, e.span, result.map_err(ErrKind::Math), trueconst)?; + Ok(()) } fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, @@ -719,8 +716,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, if iv >= len { // FIXME #3170: report this earlier on in the const-eval // pass. Reporting here is a bit late. - span_err!(cx.sess(), e.span, E0515, - "const index-expr is out of bounds"); + const_err(cx, e.span, Err(ErrKind::IndexOutOfBounds), trueconst)?; C_undef(val_ty(arr).element_type()) } else { const_get_elt(arr, &[iv as c_uint]) @@ -974,7 +970,7 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let arg_vals = map_list(args)?; let method_call = ty::MethodCall::expr(e.id); let method = cx.tcx().tables.borrow().method_map[&method_call]; - const_fn_call(cx, method.def_id, method.substs.clone(), + const_fn_call(cx, method.def_id, method.substs, &arg_vals, param_substs, trueconst)? }, hir::ExprType(ref e, _) => const_expr(cx, &e, param_substs, fn_args, trueconst)?.0, @@ -990,9 +986,9 @@ fn const_expr_unadjusted<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, None => C_nil(cx), } }, - hir::ExprClosure(_, ref decl, ref body) => { + hir::ExprClosure(_, ref decl, ref body, _) => { match ety.sty { - ty::TyClosure(def_id, ref substs) => { + ty::TyClosure(def_id, substs) => { closure::trans_closure_expr(closure::Dest::Ignore(cx), decl, body, @@ -1016,7 +1012,7 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId) -> Datum<'tcx, Lvalue> { let ty = ccx.tcx().lookup_item_type(def_id).ty; - let instance = Instance::mono(ccx.tcx(), def_id); + let instance = Instance::mono(ccx.shared(), def_id); if let Some(&g) = ccx.instances().borrow().get(&instance) { return Datum::new(g, ty, Lvalue::new("static")); } @@ -1128,6 +1124,7 @@ pub fn get_static<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, def_id: DefId) }; ccx.instances().borrow_mut().insert(instance, g); + ccx.statics().borrow_mut().insert(g, def_id); Datum::new(g, ty, Lvalue::new("static")) } @@ -1138,7 +1135,7 @@ pub fn trans_static(ccx: &CrateContext, attrs: &[ast::Attribute]) -> Result { - if collector::collecting_debug_information(ccx) { + if collector::collecting_debug_information(ccx.shared()) { ccx.record_translation_item_as_generated(TransItem::Static(id)); } @@ -1147,14 +1144,20 @@ pub fn trans_static(ccx: &CrateContext, let def_id = ccx.tcx().map.local_def_id(id); let datum = get_static(ccx, def_id); - let empty_substs = ccx.tcx().mk_substs(Substs::empty()); - let (v, _) = const_expr( - ccx, - expr, - empty_substs, - None, - TrueConst::Yes, - ).map_err(|e| e.into_inner())?; + let check_attrs = |attrs: &[ast::Attribute]| { + let default_to_mir = ccx.sess().opts.debugging_opts.orbit; + let invert = if default_to_mir { "rustc_no_mir" } else { "rustc_mir" }; + default_to_mir ^ attrs.iter().any(|item| item.check_name(invert)) + }; + let use_mir = check_attrs(ccx.tcx().map.attrs(id)); + + let v = if use_mir { + ::mir::trans_static_initializer(ccx, def_id) + } else { + let empty_substs = ccx.tcx().mk_substs(Substs::empty()); + const_expr(ccx, expr, empty_substs, None, TrueConst::Yes) + .map(|(v, _)| v) + }.map_err(|e| e.into_inner())?; // boolean SSA values are i1, but they have to be stored in i8 slots, // otherwise some LLVM optimization passes don't work as expected diff --git a/src/librustc_trans/context.rs b/src/librustc_trans/context.rs index 1217b2b5a1..4d6c4cdcc6 100644 --- a/src/librustc_trans/context.rs +++ b/src/librustc_trans/context.rs @@ -27,7 +27,9 @@ use glue::DropGlueKind; use mir::CachedMir; use monomorphize::Instance; -use collector::{TransItem, TransItemState}; +use partitioning::CodegenUnit; +use collector::TransItemState; +use trans_item::TransItem; use type_::{Type, TypeNames}; use rustc::ty::subst::{Substs, VecPerParamSpace}; use rustc::ty::{self, Ty, TyCtxt}; @@ -64,8 +66,6 @@ pub struct Stats { /// crate, so it must not contain references to any LLVM data structures /// (aside from metadata-related ones). pub struct SharedCrateContext<'a, 'tcx: 'a> { - local_ccxs: Vec>, - metadata_llmod: ModuleRef, metadata_llcx: ContextRef, @@ -74,7 +74,7 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> { item_symbols: RefCell>, link_meta: LinkMeta, symbol_hasher: RefCell, - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, stats: Stats, check_overflow: bool, check_drop_flag_for_sanity: bool, @@ -86,6 +86,7 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> { use_dll_storage_attrs: bool, translation_items: RefCell, TransItemState>>, + trait_cache: RefCell>>, } /// The local portion of a `CrateContext`. There is one `LocalCrateContext` @@ -95,7 +96,8 @@ pub struct SharedCrateContext<'a, 'tcx: 'a> { pub struct LocalCrateContext<'tcx> { llmod: ModuleRef, llcx: ContextRef, - tn: TypeNames, + tn: TypeNames, // FIXME: This seems to be largely unused. + codegen_unit: CodegenUnit<'tcx>, needs_unwind_cleanup_cache: RefCell, bool>>, fn_pointer_shims: RefCell, ValueRef>>, drop_glues: RefCell, ValueRef>>, @@ -131,6 +133,9 @@ pub struct LocalCrateContext<'tcx> { /// Cache of external const values extern_const_values: RefCell>, + /// Mapping from static definitions to their DefId's. + statics: RefCell>, + impl_method_cache: RefCell>, /// Cache of closure wrappers for bare fn's. @@ -168,8 +173,6 @@ pub struct LocalCrateContext<'tcx> { /// Depth of the current type-of computation - used to bail out type_of_depth: Cell, - - trait_cache: RefCell>>, } // Implement DepTrackingMapConfig for `trait_cache` @@ -181,23 +184,66 @@ impl<'tcx> DepTrackingMapConfig for TraitSelectionCache<'tcx> { type Key = ty::PolyTraitRef<'tcx>; type Value = traits::Vtable<'tcx, ()>; fn to_dep_node(key: &ty::PolyTraitRef<'tcx>) -> DepNode { - ty::tls::with(|tcx| { - let lifted_key = tcx.lift(key).unwrap(); - lifted_key.to_poly_trait_predicate().dep_node() - }) + key.to_poly_trait_predicate().dep_node() + } +} + +/// This list owns a number of LocalCrateContexts and binds them to their common +/// SharedCrateContext. This type just exists as a convenience, something to +/// pass around all LocalCrateContexts with and get an iterator over them. +pub struct CrateContextList<'a, 'tcx: 'a> { + shared: &'a SharedCrateContext<'a, 'tcx>, + local_ccxs: Vec>, +} + +impl<'a, 'tcx: 'a> CrateContextList<'a, 'tcx> { + + pub fn new(shared_ccx: &'a SharedCrateContext<'a, 'tcx>, + codegen_units: Vec>) + -> CrateContextList<'a, 'tcx> { + CrateContextList { + shared: shared_ccx, + local_ccxs: codegen_units.into_iter().map(|codegen_unit| { + LocalCrateContext::new(shared_ccx, codegen_unit) + }).collect() + } + } + + pub fn iter<'b>(&'b self) -> CrateContextIterator<'b, 'tcx> { + CrateContextIterator { + shared: self.shared, + index: 0, + local_ccxs: &self.local_ccxs[..] + } + } + + pub fn get_ccx<'b>(&'b self, index: usize) -> CrateContext<'b, 'tcx> { + CrateContext { + shared: self.shared, + index: index, + local_ccxs: &self.local_ccxs[..], + } + } + + pub fn shared(&self) -> &'a SharedCrateContext<'a, 'tcx> { + self.shared } } +/// A CrateContext value binds together one LocalCrateContext with the +/// SharedCrateContext. It exists as a convenience wrapper, so we don't have to +/// pass around (SharedCrateContext, LocalCrateContext) tuples all over trans. pub struct CrateContext<'a, 'tcx: 'a> { shared: &'a SharedCrateContext<'a, 'tcx>, - local: &'a LocalCrateContext<'tcx>, - /// The index of `local` in `shared.local_ccxs`. This is used in + local_ccxs: &'a [LocalCrateContext<'tcx>], + /// The index of `local` in `local_ccxs`. This is used in /// `maybe_iter(true)` to identify the original `LocalCrateContext`. index: usize, } pub struct CrateContextIterator<'a, 'tcx: 'a> { shared: &'a SharedCrateContext<'a, 'tcx>, + local_ccxs: &'a [LocalCrateContext<'tcx>], index: usize, } @@ -205,7 +251,7 @@ impl<'a, 'tcx> Iterator for CrateContextIterator<'a,'tcx> { type Item = CrateContext<'a, 'tcx>; fn next(&mut self) -> Option> { - if self.index >= self.shared.local_ccxs.len() { + if self.index >= self.local_ccxs.len() { return None; } @@ -214,8 +260,8 @@ impl<'a, 'tcx> Iterator for CrateContextIterator<'a,'tcx> { Some(CrateContext { shared: self.shared, - local: &self.shared.local_ccxs[index], index: index, + local_ccxs: self.local_ccxs, }) } } @@ -223,6 +269,7 @@ impl<'a, 'tcx> Iterator for CrateContextIterator<'a,'tcx> { /// The iterator produced by `CrateContext::maybe_iter`. pub struct CrateContextMaybeIterator<'a, 'tcx: 'a> { shared: &'a SharedCrateContext<'a, 'tcx>, + local_ccxs: &'a [LocalCrateContext<'tcx>], index: usize, single: bool, origin: usize, @@ -232,20 +279,20 @@ impl<'a, 'tcx> Iterator for CrateContextMaybeIterator<'a, 'tcx> { type Item = (CrateContext<'a, 'tcx>, bool); fn next(&mut self) -> Option<(CrateContext<'a, 'tcx>, bool)> { - if self.index >= self.shared.local_ccxs.len() { + if self.index >= self.local_ccxs.len() { return None; } let index = self.index; self.index += 1; if self.single { - self.index = self.shared.local_ccxs.len(); + self.index = self.local_ccxs.len(); } let ccx = CrateContext { shared: self.shared, - local: &self.shared.local_ccxs[index], index: index, + local_ccxs: self.local_ccxs }; Some((ccx, index == self.origin)) } @@ -285,9 +332,7 @@ unsafe fn create_context_and_module(sess: &Session, mod_name: &str) -> (ContextR } impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { - pub fn new(crate_name: &str, - local_count: usize, - tcx: &'b TyCtxt<'tcx>, + pub fn new(tcx: TyCtxt<'b, 'tcx, 'tcx>, mir_map: &'b MirMap<'tcx>, export_map: ExportMap, symbol_hasher: Sha256, @@ -345,8 +390,7 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { // start) and then strongly recommending static linkage on MSVC! let use_dll_storage_attrs = tcx.sess.target.target.options.is_like_msvc; - let mut shared_ccx = SharedCrateContext { - local_ccxs: Vec::with_capacity(local_count), + SharedCrateContext { metadata_llmod: metadata_llmod, metadata_llcx: metadata_llcx, export_map: export_map, @@ -375,55 +419,10 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { available_drop_glues: RefCell::new(FnvHashMap()), use_dll_storage_attrs: use_dll_storage_attrs, translation_items: RefCell::new(FnvHashMap()), - }; - - for i in 0..local_count { - // Append ".rs" to crate name as LLVM module identifier. - // - // LLVM code generator emits a ".file filename" directive - // for ELF backends. Value of the "filename" is set as the - // LLVM module identifier. Due to a LLVM MC bug[1], LLVM - // crashes if the module identifier is same as other symbols - // such as a function name in the module. - // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 - let llmod_id = format!("{}.{}.rs", crate_name, i); - let local_ccx = LocalCrateContext::new(&shared_ccx, &llmod_id[..]); - shared_ccx.local_ccxs.push(local_ccx); - } - - shared_ccx - } - - pub fn iter<'a>(&'a self) -> CrateContextIterator<'a, 'tcx> { - CrateContextIterator { - shared: self, - index: 0, - } - } - - pub fn get_ccx<'a>(&'a self, index: usize) -> CrateContext<'a, 'tcx> { - CrateContext { - shared: self, - local: &self.local_ccxs[index], - index: index, - } - } - - fn get_smallest_ccx<'a>(&'a self) -> CrateContext<'a, 'tcx> { - let (local_ccx, index) = - self.local_ccxs - .iter() - .zip(0..self.local_ccxs.len()) - .min_by_key(|&(local_ccx, _idx)| local_ccx.n_llvm_insns.get()) - .unwrap(); - CrateContext { - shared: self, - local: local_ccx, - index: index, + trait_cache: RefCell::new(DepTrackingMap::new(tcx.dep_graph.clone())), } } - pub fn metadata_llmod(&self) -> ModuleRef { self.metadata_llmod } @@ -444,11 +443,15 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { &self.item_symbols } + pub fn trait_cache(&self) -> &RefCell>> { + &self.trait_cache + } + pub fn link_meta<'a>(&'a self) -> &'a LinkMeta { &self.link_meta } - pub fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx> { + pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> { self.tcx } @@ -463,14 +466,68 @@ impl<'b, 'tcx> SharedCrateContext<'b, 'tcx> { pub fn use_dll_storage_attrs(&self) -> bool { self.use_dll_storage_attrs } + + pub fn get_mir(&self, def_id: DefId) -> Option> { + if def_id.is_local() { + let node_id = self.tcx.map.as_local_node_id(def_id).unwrap(); + self.mir_map.map.get(&node_id).map(CachedMir::Ref) + } else { + if let Some(mir) = self.mir_cache.borrow().get(&def_id).cloned() { + return Some(CachedMir::Owned(mir)); + } + + let mir = self.sess().cstore.maybe_get_item_mir(self.tcx, def_id); + let cached = mir.map(Rc::new); + if let Some(ref mir) = cached { + self.mir_cache.borrow_mut().insert(def_id, mir.clone()); + } + cached.map(CachedMir::Owned) + } + } + + pub fn translation_items(&self) -> &RefCell, TransItemState>> { + &self.translation_items + } + + /// Given the def-id of some item that has no type parameters, make + /// a suitable "empty substs" for it. + pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> { + let scheme = self.tcx().lookup_item_type(item_def_id); + self.empty_substs_for_scheme(&scheme) + } + + pub fn empty_substs_for_scheme(&self, scheme: &ty::TypeScheme<'tcx>) + -> &'tcx Substs<'tcx> { + assert!(scheme.generics.types.is_empty()); + self.tcx().mk_substs( + Substs::new(VecPerParamSpace::empty(), + scheme.generics.regions.map(|_| ty::ReStatic))) + } + + pub fn metadata_symbol_name(&self) -> String { + format!("rust_metadata_{}_{}", + self.link_meta().crate_name, + self.link_meta().crate_hash) + } } impl<'tcx> LocalCrateContext<'tcx> { fn new<'a>(shared: &SharedCrateContext<'a, 'tcx>, - name: &str) + codegen_unit: CodegenUnit<'tcx>) -> LocalCrateContext<'tcx> { unsafe { - let (llcx, llmod) = create_context_and_module(&shared.tcx.sess, name); + // Append ".rs" to LLVM module identifier. + // + // LLVM code generator emits a ".file filename" directive + // for ELF backends. Value of the "filename" is set as the + // LLVM module identifier. Due to a LLVM MC bug[1], LLVM + // crashes if the module identifier is same as other symbols + // such as a function name in the module. + // 1. http://llvm.org/bugs/show_bug.cgi?id=11479 + let llmod_id = format!("{}.rs", codegen_unit.name); + + let (llcx, llmod) = create_context_and_module(&shared.tcx.sess, + &llmod_id[..]); let dbg_cx = if shared.tcx.sess.opts.debuginfo != NoDebugInfo { Some(debuginfo::CrateDebugContext::new(llmod)) @@ -478,9 +535,10 @@ impl<'tcx> LocalCrateContext<'tcx> { None }; - let mut local_ccx = LocalCrateContext { + let local_ccx = LocalCrateContext { llmod: llmod, llcx: llcx, + codegen_unit: codegen_unit, tn: TypeNames::new(), needs_unwind_cleanup_cache: RefCell::new(FnvHashMap()), fn_pointer_shims: RefCell::new(FnvHashMap()), @@ -495,6 +553,7 @@ impl<'tcx> LocalCrateContext<'tcx> { const_globals: RefCell::new(FnvHashMap()), const_values: RefCell::new(FnvHashMap()), extern_const_values: RefCell::new(DefIdMap()), + statics: RefCell::new(FnvHashMap()), impl_method_cache: RefCell::new(FnvHashMap()), closure_bare_wrapper_cache: RefCell::new(FnvHashMap()), statics_to_rauw: RefCell::new(Vec::new()), @@ -513,26 +572,30 @@ impl<'tcx> LocalCrateContext<'tcx> { intrinsics: RefCell::new(FnvHashMap()), n_llvm_insns: Cell::new(0), type_of_depth: Cell::new(0), - trait_cache: RefCell::new(DepTrackingMap::new(shared.tcx - .dep_graph - .clone())), }; - local_ccx.int_type = Type::int(&local_ccx.dummy_ccx(shared)); - local_ccx.opaque_vec_type = Type::opaque_vec(&local_ccx.dummy_ccx(shared)); - - // Done mutating local_ccx directly. (The rest of the - // initialization goes through RefCell.) - { - let ccx = local_ccx.dummy_ccx(shared); + let (int_type, opaque_vec_type, str_slice_ty, mut local_ccx) = { + // Do a little dance to create a dummy CrateContext, so we can + // create some things in the LLVM module of this codegen unit + let mut local_ccxs = vec![local_ccx]; + let (int_type, opaque_vec_type, str_slice_ty) = { + let dummy_ccx = LocalCrateContext::dummy_ccx(shared, + local_ccxs.as_mut_slice()); + let mut str_slice_ty = Type::named_struct(&dummy_ccx, "str_slice"); + str_slice_ty.set_struct_body(&[Type::i8p(&dummy_ccx), + Type::int(&dummy_ccx)], + false); + (Type::int(&dummy_ccx), Type::opaque_vec(&dummy_ccx), str_slice_ty) + }; + (int_type, opaque_vec_type, str_slice_ty, local_ccxs.pop().unwrap()) + }; - let mut str_slice_ty = Type::named_struct(&ccx, "str_slice"); - str_slice_ty.set_struct_body(&[Type::i8p(&ccx), ccx.int_type()], false); - ccx.tn().associate_type("str_slice", &str_slice_ty); + local_ccx.int_type = int_type; + local_ccx.opaque_vec_type = opaque_vec_type; + local_ccx.tn.associate_type("str_slice", &str_slice_ty); - if ccx.sess().count_llvm_insns() { - base::init_insn_ctxt() - } + if shared.tcx.sess.count_llvm_insns() { + base::init_insn_ctxt() } local_ccx @@ -541,18 +604,19 @@ impl<'tcx> LocalCrateContext<'tcx> { /// Create a dummy `CrateContext` from `self` and the provided /// `SharedCrateContext`. This is somewhat dangerous because `self` may - /// not actually be an element of `shared.local_ccxs`, which can cause some - /// operations to panic unexpectedly. + /// not be fully initialized. /// /// This is used in the `LocalCrateContext` constructor to allow calling /// functions that expect a complete `CrateContext`, even before the local /// portion is fully initialized and attached to the `SharedCrateContext`. - fn dummy_ccx<'a>(&'a self, shared: &'a SharedCrateContext<'a, 'tcx>) + fn dummy_ccx<'a>(shared: &'a SharedCrateContext<'a, 'tcx>, + local_ccxs: &'a [LocalCrateContext<'tcx>]) -> CrateContext<'a, 'tcx> { + assert!(local_ccxs.len() == 1); CrateContext { shared: shared, - local: self, - index: !0 as usize, + index: 0, + local_ccxs: local_ccxs } } } @@ -563,13 +627,23 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn local(&self) -> &'b LocalCrateContext<'tcx> { - self.local + &self.local_ccxs[self.index] } /// Get a (possibly) different `CrateContext` from the same /// `SharedCrateContext`. - pub fn rotate(&self) -> CrateContext<'b, 'tcx> { - self.shared.get_smallest_ccx() + pub fn rotate(&'b self) -> CrateContext<'b, 'tcx> { + let (_, index) = + self.local_ccxs + .iter() + .zip(0..self.local_ccxs.len()) + .min_by_key(|&(local_ccx, _idx)| local_ccx.n_llvm_insns.get()) + .unwrap(); + CrateContext { + shared: self.shared, + index: index, + local_ccxs: &self.local_ccxs[..], + } } /// Either iterate over only `self`, or iterate over all `CrateContext`s in @@ -584,11 +658,11 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { index: if iter_all { 0 } else { self.index }, single: !iter_all, origin: self.index, + local_ccxs: self.local_ccxs, } } - - pub fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx> { + pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> { self.shared.tcx } @@ -601,7 +675,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn raw_builder<'a>(&'a self) -> BuilderRef { - self.local.builder.b + self.local().builder.b } pub fn get_intrinsic(&self, key: &str) -> ValueRef { @@ -615,11 +689,15 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn llmod(&self) -> ModuleRef { - self.local.llmod + self.local().llmod } pub fn llcx(&self) -> ContextRef { - self.local.llcx + self.local().llcx + } + + pub fn codegen_unit(&self) -> &CodegenUnit<'tcx> { + &self.local().codegen_unit } pub fn td(&self) -> llvm::TargetDataRef { @@ -627,7 +705,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn tn<'a>(&'a self) -> &'a TypeNames { - &self.local.tn + &self.local().tn } pub fn export_map<'a>(&'a self) -> &'a ExportMap { @@ -647,81 +725,85 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn needs_unwind_cleanup_cache(&self) -> &RefCell, bool>> { - &self.local.needs_unwind_cleanup_cache + &self.local().needs_unwind_cleanup_cache } pub fn fn_pointer_shims(&self) -> &RefCell, ValueRef>> { - &self.local.fn_pointer_shims + &self.local().fn_pointer_shims } pub fn drop_glues<'a>(&'a self) -> &'a RefCell, ValueRef>> { - &self.local.drop_glues + &self.local().drop_glues } pub fn external<'a>(&'a self) -> &'a RefCell>> { - &self.local.external + &self.local().external } pub fn external_srcs<'a>(&'a self) -> &'a RefCell> { - &self.local.external_srcs + &self.local().external_srcs } pub fn instances<'a>(&'a self) -> &'a RefCell, ValueRef>> { - &self.local.instances + &self.local().instances } pub fn monomorphizing<'a>(&'a self) -> &'a RefCell> { - &self.local.monomorphizing + &self.local().monomorphizing } pub fn vtables<'a>(&'a self) -> &'a RefCell, ValueRef>> { - &self.local.vtables + &self.local().vtables } pub fn const_cstr_cache<'a>(&'a self) -> &'a RefCell> { - &self.local.const_cstr_cache + &self.local().const_cstr_cache } pub fn const_unsized<'a>(&'a self) -> &'a RefCell> { - &self.local.const_unsized + &self.local().const_unsized } pub fn const_globals<'a>(&'a self) -> &'a RefCell> { - &self.local.const_globals + &self.local().const_globals } pub fn const_values<'a>(&'a self) -> &'a RefCell), ValueRef>> { - &self.local.const_values + &self.local().const_values } pub fn extern_const_values<'a>(&'a self) -> &'a RefCell> { - &self.local.extern_const_values + &self.local().extern_const_values + } + + pub fn statics<'a>(&'a self) -> &'a RefCell> { + &self.local().statics } pub fn impl_method_cache<'a>(&'a self) -> &'a RefCell> { - &self.local.impl_method_cache + &self.local().impl_method_cache } pub fn closure_bare_wrapper_cache<'a>(&'a self) -> &'a RefCell> { - &self.local.closure_bare_wrapper_cache + &self.local().closure_bare_wrapper_cache } pub fn statics_to_rauw<'a>(&'a self) -> &'a RefCell> { - &self.local.statics_to_rauw + &self.local().statics_to_rauw } pub fn lltypes<'a>(&'a self) -> &'a RefCell, Type>> { - &self.local.lltypes + &self.local().lltypes } pub fn llsizingtypes<'a>(&'a self) -> &'a RefCell, Type>> { - &self.local.llsizingtypes + &self.local().llsizingtypes } pub fn adt_reprs<'a>(&'a self) -> &'a RefCell, Rc>>> { - &self.local.adt_reprs + &self.local().adt_reprs } pub fn symbol_hasher<'a>(&'a self) -> &'a RefCell { @@ -729,7 +811,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn type_hashcodes<'a>(&'a self) -> &'a RefCell, String>> { - &self.local.type_hashcodes + &self.local().type_hashcodes } pub fn stats<'a>(&'a self) -> &'a Stats { @@ -745,43 +827,39 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn int_type(&self) -> Type { - self.local.int_type + self.local().int_type } pub fn opaque_vec_type(&self) -> Type { - self.local.opaque_vec_type + self.local().opaque_vec_type } pub fn closure_vals<'a>(&'a self) -> &'a RefCell, ValueRef>> { - &self.local.closure_vals + &self.local().closure_vals } pub fn dbg_cx<'a>(&'a self) -> &'a Option> { - &self.local.dbg_cx + &self.local().dbg_cx } pub fn eh_personality<'a>(&'a self) -> &'a Cell> { - &self.local.eh_personality + &self.local().eh_personality } pub fn eh_unwind_resume<'a>(&'a self) -> &'a Cell> { - &self.local.eh_unwind_resume + &self.local().eh_unwind_resume } pub fn rust_try_fn<'a>(&'a self) -> &'a Cell> { - &self.local.rust_try_fn + &self.local().rust_try_fn } fn intrinsics<'a>(&'a self) -> &'a RefCell> { - &self.local.intrinsics + &self.local().intrinsics } pub fn count_llvm_insn(&self) { - self.local.n_llvm_insns.set(self.local.n_llvm_insns.get() + 1); - } - - pub fn trait_cache(&self) -> &RefCell>> { - &self.local.trait_cache + self.local().n_llvm_insns.set(self.local().n_llvm_insns.get() + 1); } pub fn obj_size_bound(&self) -> u64 { @@ -795,14 +873,14 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn enter_type_of(&self, ty: Ty<'tcx>) -> TypeOfDepthLock<'b, 'tcx> { - let current_depth = self.local.type_of_depth.get(); + let current_depth = self.local().type_of_depth.get(); debug!("enter_type_of({:?}) at depth {:?}", ty, current_depth); if current_depth > self.sess().recursion_limit.get() { self.sess().fatal( &format!("overflow representing the type `{}`", ty)) } - self.local.type_of_depth.set(current_depth + 1); - TypeOfDepthLock(self.local) + self.local().type_of_depth.set(current_depth + 1); + TypeOfDepthLock(self.local()) } pub fn check_overflow(&self) -> bool { @@ -821,21 +899,7 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { } pub fn get_mir(&self, def_id: DefId) -> Option> { - if def_id.is_local() { - let node_id = self.tcx().map.as_local_node_id(def_id).unwrap(); - self.shared.mir_map.map.get(&node_id).map(CachedMir::Ref) - } else { - if let Some(mir) = self.shared.mir_cache.borrow().get(&def_id).cloned() { - return Some(CachedMir::Owned(mir)); - } - - let mir = self.sess().cstore.maybe_get_item_mir(self.tcx(), def_id); - let cached = mir.map(Rc::new); - if let Some(ref mir) = cached { - self.shared.mir_cache.borrow_mut().insert(def_id, mir.clone()); - } - cached.map(CachedMir::Owned) - } + self.shared.get_mir(def_id) } pub fn translation_items(&self) -> &RefCell, TransItemState>> { @@ -859,16 +923,12 @@ impl<'b, 'tcx> CrateContext<'b, 'tcx> { /// Given the def-id of some item that has no type parameters, make /// a suitable "empty substs" for it. pub fn empty_substs_for_def_id(&self, item_def_id: DefId) -> &'tcx Substs<'tcx> { - let scheme = self.tcx().lookup_item_type(item_def_id); - self.empty_substs_for_scheme(&scheme) + self.shared().empty_substs_for_def_id(item_def_id) } pub fn empty_substs_for_scheme(&self, scheme: &ty::TypeScheme<'tcx>) -> &'tcx Substs<'tcx> { - assert!(scheme.generics.types.is_empty()); - self.tcx().mk_substs( - Substs::new(VecPerParamSpace::empty(), - scheme.generics.regions.map(|_| ty::ReStatic))) + self.shared().empty_substs_for_scheme(scheme) } } diff --git a/src/librustc_trans/controlflow.rs b/src/librustc_trans/controlflow.rs index 58971dec8a..f793f0a6d5 100644 --- a/src/librustc_trans/controlflow.rs +++ b/src/librustc_trans/controlflow.rs @@ -167,11 +167,11 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, if cv == 1 { // if true { .. } [else { .. }] bcx = trans_block(bcx, &thn, dest); - debuginfo::clear_source_location(bcx.fcx); + DebugLoc::None.apply(bcx.fcx); } else { if let Some(elexpr) = els { bcx = expr::trans_into(bcx, &elexpr, dest); - debuginfo::clear_source_location(bcx.fcx); + DebugLoc::None.apply(bcx.fcx); } } @@ -181,7 +181,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let name = format!("then-block-{}-", thn.id); let then_bcx_in = bcx.fcx.new_id_block(&name[..], thn.id); let then_bcx_out = trans_block(then_bcx_in, &thn, dest); - debuginfo::clear_source_location(bcx.fcx); + DebugLoc::None.apply(bcx.fcx); let cond_source_loc = cond.debug_loc(); @@ -204,7 +204,7 @@ pub fn trans_if<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // Clear the source location because it is still set to whatever has been translated // right before. - debuginfo::clear_source_location(next_bcx.fcx); + DebugLoc::None.apply(next_bcx.fcx); next_bcx } diff --git a/src/librustc_trans/datum.rs b/src/librustc_trans/datum.rs index 0ed38f0681..eda3ce1d10 100644 --- a/src/librustc_trans/datum.rs +++ b/src/librustc_trans/datum.rs @@ -769,8 +769,8 @@ impl<'tcx, K: KindOps + fmt::Debug> Datum<'tcx, K> { * affine values (since they must never be duplicated). */ - assert!(!self.ty - .moves_by_default(&bcx.tcx().empty_parameter_environment(), DUMMY_SP)); + assert!(!self.ty.moves_by_default(bcx.tcx(), + &bcx.tcx().empty_parameter_environment(), DUMMY_SP)); self.shallow_copy_raw(bcx, dst) } diff --git a/src/librustc_trans/debuginfo/create_scope_map.rs b/src/librustc_trans/debuginfo/create_scope_map.rs index b1cfeb8125..ba592382d1 100644 --- a/src/librustc_trans/debuginfo/create_scope_map.rs +++ b/src/librustc_trans/debuginfo/create_scope_map.rs @@ -8,19 +8,24 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use super::FunctionDebugContext; use super::metadata::file_metadata; -use super::utils::DIB; +use super::utils::{DIB, span_start}; use llvm; use llvm::debuginfo::{DIScope, DISubprogram}; -use common::CrateContext; +use common::{CrateContext, FunctionContext}; use rustc::hir::pat_util; +use rustc::mir::repr::{Mir, ScopeId}; use rustc::util::nodemap::NodeMap; use libc::c_uint; +use std::ptr; + use syntax::codemap::{Span, Pos}; use syntax::{ast, codemap}; +use rustc_data_structures::bitvec::BitVector; use rustc::hir::{self, PatKind}; // This procedure builds the *scope map* for a given function, which maps any @@ -46,9 +51,9 @@ pub fn create_scope_map(cx: &CrateContext, // Push argument identifiers onto the stack so arguments integrate nicely // with variable shadowing. for arg in args { - pat_util::pat_bindings_ident(def_map, &arg.pat, |_, node_id, _, path1| { + pat_util::pat_bindings(def_map, &arg.pat, |_, node_id, _, path1| { scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata, - name: Some(path1.node.unhygienic_name) }); + name: Some(path1.node.unhygienize()) }); scope_map.insert(node_id, fn_metadata); }) } @@ -65,6 +70,81 @@ pub fn create_scope_map(cx: &CrateContext, return scope_map; } +/// Produce DIScope DIEs for each MIR Scope which has variables defined in it. +/// If debuginfo is disabled, the returned vector is empty. +pub fn create_mir_scopes(fcx: &FunctionContext) -> Vec { + let mir = fcx.mir.clone().expect("create_mir_scopes: missing MIR for fn"); + let mut scopes = vec![ptr::null_mut(); mir.scopes.len()]; + + let fn_metadata = match fcx.debug_context { + FunctionDebugContext::RegularContext(box ref data) => data.fn_metadata, + FunctionDebugContext::DebugInfoDisabled | + FunctionDebugContext::FunctionWithoutDebugInfo => { + return scopes; + } + }; + + // Find all the scopes with variables defined in them. + let mut has_variables = BitVector::new(mir.scopes.len()); + for var in &mir.var_decls { + has_variables.insert(var.scope.index()); + } + + // Instantiate all scopes. + for idx in 0..mir.scopes.len() { + let scope = ScopeId::new(idx); + make_mir_scope(fcx.ccx, &mir, &has_variables, fn_metadata, scope, &mut scopes); + } + + scopes +} + +fn make_mir_scope(ccx: &CrateContext, + mir: &Mir, + has_variables: &BitVector, + fn_metadata: DISubprogram, + scope: ScopeId, + scopes: &mut [DIScope]) { + let idx = scope.index(); + if !scopes[idx].is_null() { + return; + } + + let scope_data = &mir.scopes[scope]; + let parent_scope = if let Some(parent) = scope_data.parent_scope { + make_mir_scope(ccx, mir, has_variables, fn_metadata, parent, scopes); + scopes[parent.index()] + } else { + // The root is the function itself. + scopes[idx] = fn_metadata; + return; + }; + + if !has_variables.contains(idx) { + // Do not create a DIScope if there are no variables + // defined in this MIR Scope, to avoid debuginfo bloat. + + // However, we don't skip creating a nested scope if + // our parent is the root, because we might want to + // put arguments in the root and not have shadowing. + if parent_scope != fn_metadata { + scopes[idx] = parent_scope; + return; + } + } + + let loc = span_start(ccx, scope_data.span); + let file_metadata = file_metadata(ccx, &loc.file.name); + scopes[idx] = unsafe { + llvm::LLVMDIBuilderCreateLexicalBlock( + DIB(ccx), + parent_scope, + file_metadata, + loc.line as c_uint, + loc.col.to_usize() as c_uint) + }; +} + // local helper functions for walking the AST. fn with_new_scope(cx: &CrateContext, scope_span: Span, @@ -74,7 +154,7 @@ fn with_new_scope(cx: &CrateContext, F: FnOnce(&CrateContext, &mut Vec, &mut NodeMap), { // Create a new lexical scope and push it onto the stack - let loc = cx.sess().codemap().lookup_char_pos(scope_span.lo); + let loc = span_start(cx, scope_span); let file_metadata = file_metadata(cx, &loc.file.name); let parent_scope = scope_stack.last().unwrap().scope_metadata; @@ -168,7 +248,7 @@ fn walk_pattern(cx: &CrateContext, // scope stack and maybe introduce an artificial scope if pat_util::pat_is_binding(&def_map.borrow(), &pat) { - let name = path1.node.unhygienic_name; + let name = path1.node.unhygienize(); // LLVM does not properly generate 'DW_AT_start_scope' fields // for variable DIEs. For this reason we have to introduce @@ -199,7 +279,7 @@ fn walk_pattern(cx: &CrateContext, if need_new_scope { // Create a new lexical scope and push it onto the stack - let loc = cx.sess().codemap().lookup_char_pos(pat.span.lo); + let loc = span_start(cx, pat.span); let file_metadata = file_metadata(cx, &loc.file.name); let parent_scope = scope_stack.last().unwrap().scope_metadata; @@ -399,7 +479,7 @@ fn walk_expr(cx: &CrateContext, }) } - hir::ExprClosure(_, ref decl, ref block) => { + hir::ExprClosure(_, ref decl, ref block, _) => { with_new_scope(cx, block.span, scope_stack, diff --git a/src/librustc_trans/debuginfo/metadata.rs b/src/librustc_trans/debuginfo/metadata.rs index 8471b6a274..ccb01789aa 100644 --- a/src/librustc_trans/debuginfo/metadata.rs +++ b/src/librustc_trans/debuginfo/metadata.rs @@ -16,7 +16,7 @@ use self::EnumDiscriminantInfo::*; use super::utils::{debug_context, DIB, span_start, bytes_to_bits, size_and_align_of, get_namespace_and_span_for_item, create_DIArray, fn_should_be_ignored, is_node_local_to_unit}; -use super::namespace::namespace_for_item; +use super::namespace::mangled_name_of_item; use super::type_names::{compute_debuginfo_type_name, push_debuginfo_type_name}; use super::{declare_local, VariableKind, VariableAccess}; @@ -24,7 +24,6 @@ use llvm::{self, ValueRef}; use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor, DICompositeType}; use rustc::hir::def_id::DefId; -use rustc::infer; use rustc::hir::pat_util; use rustc::ty::subst; use rustc::hir::map as hir_map; @@ -68,8 +67,8 @@ pub const UNKNOWN_LINE_NUMBER: c_uint = 0; pub const UNKNOWN_COLUMN_NUMBER: c_uint = 0; // ptr::null() doesn't work :( -const NO_FILE_METADATA: DIFile = (0 as DIFile); -const NO_SCOPE_METADATA: DIScope = (0 as DIScope); +pub const NO_FILE_METADATA: DIFile = (0 as DIFile); +pub const NO_SCOPE_METADATA: DIScope = (0 as DIScope); const FLAGS_NONE: c_uint = 0; @@ -188,10 +187,10 @@ impl<'tcx> TypeMap<'tcx> { unique_type_id.push_str("struct "); from_def_id_and_substs(self, cx, def.did, substs, &mut unique_type_id); }, - ty::TyTuple(ref component_types) if component_types.is_empty() => { + ty::TyTuple(component_types) if component_types.is_empty() => { push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); }, - ty::TyTuple(ref component_types) => { + ty::TyTuple(component_types) => { unique_type_id.push_str("tuple "); for &component_type in component_types { let component_type_id = @@ -263,7 +262,7 @@ impl<'tcx> TypeMap<'tcx> { unique_type_id.push_str(" fn("); let sig = cx.tcx().erase_late_bound_regions(sig); - let sig = infer::normalize_associated_type(cx.tcx(), &sig); + let sig = cx.tcx().normalize_associated_type(&sig); for ¶meter_type in &sig.inputs { let parameter_type_id = @@ -290,12 +289,12 @@ impl<'tcx> TypeMap<'tcx> { } } }, - ty::TyClosure(_, ref substs) if substs.upvar_tys.is_empty() => { + ty::TyClosure(_, substs) if substs.upvar_tys.is_empty() => { push_debuginfo_type_name(cx, type_, false, &mut unique_type_id); }, - ty::TyClosure(_, ref substs) => { + ty::TyClosure(_, substs) => { unique_type_id.push_str("closure "); - for upvar_type in &substs.upvar_tys { + for upvar_type in substs.upvar_tys { let upvar_type_id = self.get_unique_type_id_of_type(cx, upvar_type); let upvar_type_id = @@ -1159,12 +1158,12 @@ fn prepare_struct_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let struct_name = compute_debuginfo_type_name(cx, struct_type, false); let struct_llvm_type = type_of::in_memory_type_of(cx, struct_type); - let (variant, substs) = match struct_type.sty { - ty::TyStruct(def, substs) => (def.struct_variant(), substs), + let (struct_def_id, variant, substs) = match struct_type.sty { + ty::TyStruct(def, substs) => (def.did, def.struct_variant(), substs), _ => bug!("prepare_struct_metadata on a non-struct") }; - let (containing_scope, _) = get_namespace_and_span_for_item(cx, variant.did); + let (containing_scope, _) = get_namespace_and_span_for_item(cx, struct_def_id); let struct_metadata_stub = create_struct_stub(cx, struct_llvm_type, @@ -1846,28 +1845,8 @@ pub fn create_global_var_metadata(cx: &CrateContext, return; } - let var_item = cx.tcx().map.get(node_id); - - let (name, span) = match var_item { - hir_map::NodeItem(item) => { - match item.node { - hir::ItemStatic(..) => (item.name, item.span), - hir::ItemConst(..) => (item.name, item.span), - _ => { - span_bug!(item.span, - "debuginfo::\ - create_global_var_metadata() - - Captured var-id refers to \ - unexpected ast_item variant: {:?}", - var_item) - } - } - }, - _ => bug!("debuginfo::create_global_var_metadata() \ - - Captured var-id refers to unexpected \ - hir_map variant: {:?}", - var_item) - }; + let node_def_id = cx.tcx().map.local_def_id(node_id); + let (var_scope, span) = get_namespace_and_span_for_item(cx, node_def_id); let (file_metadata, line_number) = if span != codemap::DUMMY_SP { let loc = span_start(cx, span); @@ -1879,12 +1858,8 @@ pub fn create_global_var_metadata(cx: &CrateContext, let is_local_to_unit = is_node_local_to_unit(cx, node_id); let variable_type = cx.tcx().node_id_to_type(node_id); let type_metadata = type_metadata(cx, variable_type, span); - let node_def_id = cx.tcx().map.local_def_id(node_id); - let namespace_node = namespace_for_item(cx, node_def_id); - let var_name = name.to_string(); - let linkage_name = - namespace_node.mangled_name_of_contained_item(&var_name[..]); - let var_scope = namespace_node.scope; + let var_name = cx.tcx().item_name(node_def_id).to_string(); + let linkage_name = mangled_name_of_item(cx, node_def_id, ""); let var_name = CString::new(var_name).unwrap(); let linkage_name = CString::new(linkage_name).unwrap(); @@ -1971,7 +1946,7 @@ pub fn create_captured_var_metadata<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, Some(hir_map::NodeLocal(pat)) => { match pat.node { PatKind::Ident(_, ref path1, _) => { - path1.node.name + path1.node } _ => { span_bug!(span, diff --git a/src/librustc_trans/debuginfo/mod.rs b/src/librustc_trans/debuginfo/mod.rs index bb999c31ff..6c1bd715f1 100644 --- a/src/librustc_trans/debuginfo/mod.rs +++ b/src/librustc_trans/debuginfo/mod.rs @@ -14,42 +14,37 @@ mod doc; use self::VariableAccess::*; use self::VariableKind::*; -use self::utils::{DIB, span_start, assert_type_for_node_id, contains_nodebug_attribute, - create_DIArray, is_node_local_to_unit}; -use self::namespace::{namespace_for_item, NamespaceTreeNode}; +use self::utils::{DIB, span_start, create_DIArray, is_node_local_to_unit}; +use self::namespace::mangled_name_of_item; use self::type_names::compute_debuginfo_type_name; use self::metadata::{type_metadata, diverging_type_metadata}; use self::metadata::{file_metadata, scope_metadata, TypeMap, compile_unit_metadata}; -use self::source_loc::InternalDebugLocation; +use self::source_loc::InternalDebugLocation::{self, UnknownLocation}; use llvm; use llvm::{ModuleRef, ContextRef, ValueRef}; use llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilderRef, DISubprogram, DIArray, - DIDescriptor, FlagPrototyped}; + FlagPrototyped}; use rustc::hir::def_id::DefId; -use rustc::infer::normalize_associated_type; -use rustc::ty::subst::{self, Substs}; +use rustc::hir::map::DefPathData; +use rustc::ty::subst::Substs; use rustc::hir; use abi::Abi; -use common::{NodeIdAndSpan, CrateContext, FunctionContext, Block}; -use monomorphize; -use rustc::infer; +use common::{NodeIdAndSpan, CrateContext, FunctionContext, Block, BlockAndBuilder}; +use monomorphize::{self, Instance}; use rustc::ty::{self, Ty}; use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo}; -use util::nodemap::{NodeMap, FnvHashMap, FnvHashSet}; -use rustc::hir::map as hir_map; +use util::nodemap::{DefIdMap, NodeMap, FnvHashMap, FnvHashSet}; use libc::c_uint; use std::cell::{Cell, RefCell}; use std::ffi::CString; use std::ptr; -use std::rc::Rc; use syntax::codemap::{Span, Pos}; use syntax::{ast, codemap}; use syntax::attr::IntType; -use syntax::parse::token::{self, special_idents}; pub mod gdb; mod utils; @@ -59,8 +54,7 @@ mod metadata; mod create_scope_map; mod source_loc; -pub use self::source_loc::set_source_location; -pub use self::source_loc::clear_source_location; +pub use self::create_scope_map::create_mir_scopes; pub use self::source_loc::start_emitting_source_locations; pub use self::source_loc::get_cleanup_debug_loc_for_ast_node; pub use self::source_loc::with_source_location_override; @@ -84,7 +78,7 @@ pub struct CrateDebugContext<'tcx> { created_enum_disr_types: RefCell>, type_map: RefCell>, - namespace_map: RefCell, Rc>>, + namespace_map: RefCell>, // This collection is used to assert that composite types (structs, enums, // ...) have their members only set once: @@ -104,7 +98,7 @@ impl<'tcx> CrateDebugContext<'tcx> { created_files: RefCell::new(FnvHashMap()), created_enum_disr_types: RefCell::new(FnvHashMap()), type_map: RefCell::new(TypeMap::new()), - namespace_map: RefCell::new(FnvHashMap()), + namespace_map: RefCell::new(DefIdMap()), composite_types_completed: RefCell::new(FnvHashSet()), }; } @@ -214,6 +208,18 @@ pub fn finalize(cx: &CrateContext) { }; } +/// Creates a function-specific debug context for a function w/o debuginfo. +pub fn empty_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>) + -> FunctionDebugContext { + if cx.sess().opts.debuginfo == NoDebugInfo { + return FunctionDebugContext::DebugInfoDisabled; + } + + // Clear the debug location so we don't assign them in the function prelude. + source_loc::set_debug_location(cx, None, UnknownLocation); + FunctionDebugContext::FunctionWithoutDebugInfo +} + /// Creates the function-specific debug context. /// /// Returns the FunctionDebugContext for the function which holds state needed @@ -221,8 +227,9 @@ pub fn finalize(cx: &CrateContext) { /// FunctionDebugContext enum which indicates why no debuginfo should be created /// for the function. pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - fn_ast_id: ast::NodeId, - param_substs: &Substs<'tcx>, + instance: Instance<'tcx>, + sig: &ty::FnSig<'tcx>, + abi: Abi, llfn: ValueRef) -> FunctionDebugContext { if cx.sess().opts.debuginfo == NoDebugInfo { return FunctionDebugContext::DebugInfoDisabled; @@ -230,103 +237,9 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, // Clear the debug location so we don't assign them in the function prelude. // Do this here already, in case we do an early exit from this function. - source_loc::set_debug_location(cx, InternalDebugLocation::UnknownLocation); - - if fn_ast_id == ast::DUMMY_NODE_ID { - // This is a function not linked to any source location, so don't - // generate debuginfo for it. - return FunctionDebugContext::FunctionWithoutDebugInfo; - } - - let empty_generics = hir::Generics::empty(); - - let fnitem = cx.tcx().map.get(fn_ast_id); + source_loc::set_debug_location(cx, None, UnknownLocation); - let (name, fn_decl, generics, top_level_block, span, has_path) = match fnitem { - hir_map::NodeItem(ref item) => { - if contains_nodebug_attribute(&item.attrs) { - return FunctionDebugContext::FunctionWithoutDebugInfo; - } - - match item.node { - hir::ItemFn(ref fn_decl, _, _, _, ref generics, ref top_level_block) => { - (item.name, fn_decl, generics, top_level_block, item.span, true) - } - _ => { - span_bug!(item.span, - "create_function_debug_context: item bound to non-function"); - } - } - } - hir_map::NodeImplItem(impl_item) => { - match impl_item.node { - hir::ImplItemKind::Method(ref sig, ref body) => { - if contains_nodebug_attribute(&impl_item.attrs) { - return FunctionDebugContext::FunctionWithoutDebugInfo; - } - - (impl_item.name, - &sig.decl, - &sig.generics, - body, - impl_item.span, - true) - } - _ => { - span_bug!(impl_item.span, - "create_function_debug_context() \ - called on non-method impl item?!") - } - } - } - hir_map::NodeExpr(ref expr) => { - match expr.node { - hir::ExprClosure(_, ref fn_decl, ref top_level_block) => { - let name = format!("fn{}", token::gensym("fn")); - let name = token::intern(&name[..]); - (name, fn_decl, - // This is not quite right. It should actually inherit - // the generics of the enclosing function. - &empty_generics, - top_level_block, - expr.span, - // Don't try to lookup the item path: - false) - } - _ => span_bug!(expr.span, - "create_function_debug_context: expected an expr_fn_block here") - } - } - hir_map::NodeTraitItem(trait_item) => { - match trait_item.node { - hir::MethodTraitItem(ref sig, Some(ref body)) => { - if contains_nodebug_attribute(&trait_item.attrs) { - return FunctionDebugContext::FunctionWithoutDebugInfo; - } - - (trait_item.name, - &sig.decl, - &sig.generics, - body, - trait_item.span, - true) - } - _ => { - bug!("create_function_debug_context: \ - unexpected sort of node: {:?}", - fnitem) - } - } - } - hir_map::NodeForeignItem(..) | - hir_map::NodeVariant(..) | - hir_map::NodeStructCtor(..) => { - return FunctionDebugContext::FunctionWithoutDebugInfo; - } - _ => bug!("create_function_debug_context: \ - unexpected sort of node: {:?}", - fnitem) - }; + let (containing_scope, span) = get_containing_scope_and_span(cx, instance); // This can be the case for functions inlined from another crate if span == codemap::DUMMY_SP { @@ -337,45 +250,40 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, let file_metadata = file_metadata(cx, &loc.file.name); let function_type_metadata = unsafe { - let fn_signature = get_function_signature(cx, - fn_ast_id, - param_substs, - span); + let fn_signature = get_function_signature(cx, sig, abi); llvm::LLVMDIBuilderCreateSubroutineType(DIB(cx), file_metadata, fn_signature) }; + // Find the enclosing function, in case this is a closure. + let mut fn_def_id = instance.def; + let mut def_key = cx.tcx().def_key(fn_def_id); + let mut name = def_key.disambiguated_data.data.to_string(); + let name_len = name.len(); + while def_key.disambiguated_data.data == DefPathData::ClosureExpr { + fn_def_id.index = def_key.parent.expect("closure without a parent?"); + def_key = cx.tcx().def_key(fn_def_id); + } + // Get_template_parameters() will append a `<...>` clause to the function // name if necessary. - let mut function_name = name.to_string(); + let generics = cx.tcx().lookup_item_type(fn_def_id).generics; let template_parameters = get_template_parameters(cx, - generics, - param_substs, + &generics, + instance.substs, file_metadata, - &mut function_name); - - // There is no hir_map::Path for hir::ExprClosure-type functions. For now, - // just don't put them into a namespace. In the future this could be improved - // somehow (storing a path in the hir_map, or construct a path using the - // enclosing function). - let (linkage_name, containing_scope) = if has_path { - let fn_ast_def_id = cx.tcx().map.local_def_id(fn_ast_id); - let namespace_node = namespace_for_item(cx, fn_ast_def_id); - let linkage_name = namespace_node.mangled_name_of_contained_item( - &function_name[..]); - let containing_scope = namespace_node.scope; - (linkage_name, containing_scope) - } else { - (function_name.clone(), file_metadata) - }; + &mut name); + + // Build the linkage_name out of the item path and "template" parameters. + let linkage_name = mangled_name_of_item(cx, instance.def, &name[name_len..]); - // Clang sets this parameter to the opening brace of the function's block, - // so let's do this too. - let scope_line = span_start(cx, top_level_block.span).line; + let scope_line = span_start(cx, span).line; - let is_local_to_unit = is_node_local_to_unit(cx, fn_ast_id); + let local_id = cx.tcx().map.as_local_node_id(instance.def); + let is_local_to_unit = local_id.map_or(false, |id| is_node_local_to_unit(cx, id)); - let function_name = CString::new(function_name).unwrap(); + let function_name = CString::new(name).unwrap(); let linkage_name = CString::new(linkage_name).unwrap(); + let fn_metadata = unsafe { llvm::LLVMDIBuilderCreateFunction( DIB(cx), @@ -395,54 +303,24 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, ptr::null_mut()) }; - let scope_map = create_scope_map::create_scope_map(cx, - &fn_decl.inputs, - &top_level_block, - fn_metadata, - fn_ast_id); - // Initialize fn debug context (including scope map and namespace map) let fn_debug_context = box FunctionDebugContextData { - scope_map: RefCell::new(scope_map), + scope_map: RefCell::new(NodeMap()), fn_metadata: fn_metadata, argument_counter: Cell::new(1), source_locations_enabled: Cell::new(false), source_location_override: Cell::new(false), }; - - return FunctionDebugContext::RegularContext(fn_debug_context); fn get_function_signature<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - fn_ast_id: ast::NodeId, - param_substs: &Substs<'tcx>, - error_reporting_span: Span) -> DIArray { + sig: &ty::FnSig<'tcx>, + abi: Abi) -> DIArray { if cx.sess().opts.debuginfo == LimitedDebugInfo { return create_DIArray(DIB(cx), &[]); } - // Return type -- llvm::DIBuilder wants this at index 0 - assert_type_for_node_id(cx, fn_ast_id, error_reporting_span); - let fn_type = cx.tcx().node_id_to_type(fn_ast_id); - let fn_type = monomorphize::apply_param_substs(cx.tcx(), param_substs, &fn_type); - - let (sig, abi) = match fn_type.sty { - ty::TyFnDef(_, _, ref barefnty) | ty::TyFnPtr(ref barefnty) => { - let sig = cx.tcx().erase_late_bound_regions(&barefnty.sig); - let sig = infer::normalize_associated_type(cx.tcx(), &sig); - (sig, barefnty.abi) - } - ty::TyClosure(def_id, ref substs) => { - let closure_type = cx.tcx().closure_type(def_id, substs); - let sig = cx.tcx().erase_late_bound_regions(&closure_type.sig); - let sig = infer::normalize_associated_type(cx.tcx(), &sig); - (sig, closure_type.abi) - } - - _ => bug!("get_function_metdata: Expected a function type!") - }; - let mut signature = Vec::with_capacity(sig.inputs.len() + 1); // Return type -- llvm::DIBuilder wants this at index 0 @@ -466,7 +344,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } if abi == Abi::RustCall && !sig.inputs.is_empty() { - if let ty::TyTuple(ref args) = sig.inputs[sig.inputs.len() - 1].sty { + if let ty::TyTuple(args) = sig.inputs[sig.inputs.len() - 1].sty { for &argument_type in args { signature.push(type_metadata(cx, argument_type, codemap::DUMMY_SP)); } @@ -477,86 +355,40 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, } fn get_template_parameters<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, - generics: &hir::Generics, + generics: &ty::Generics<'tcx>, param_substs: &Substs<'tcx>, file_metadata: DIFile, name_to_append_suffix_to: &mut String) -> DIArray { - let self_type = param_substs.self_ty(); - let self_type = normalize_associated_type(cx.tcx(), &self_type); - - // Only true for static default methods: - let has_self_type = self_type.is_some(); + let actual_types = param_substs.types.as_slice(); - if !generics.is_type_parameterized() && !has_self_type { + if actual_types.is_empty() { return create_DIArray(DIB(cx), &[]); } name_to_append_suffix_to.push('<'); - - // The list to be filled with template parameters: - let mut template_params: Vec = - Vec::with_capacity(generics.ty_params.len() + 1); - - // Handle self type - if has_self_type { - let actual_self_type = self_type.unwrap(); - // Add self type name to <...> clause of function name - let actual_self_type_name = compute_debuginfo_type_name( - cx, - actual_self_type, - true); - - name_to_append_suffix_to.push_str(&actual_self_type_name[..]); - - if generics.is_type_parameterized() { - name_to_append_suffix_to.push_str(","); - } - - // Only create type information if full debuginfo is enabled - if cx.sess().opts.debuginfo == FullDebugInfo { - let actual_self_type_metadata = type_metadata(cx, - actual_self_type, - codemap::DUMMY_SP); - - let name = special_idents::type_self.name.as_str(); - - let name = CString::new(name.as_bytes()).unwrap(); - let param_metadata = unsafe { - llvm::LLVMDIBuilderCreateTemplateTypeParameter( - DIB(cx), - ptr::null_mut(), - name.as_ptr(), - actual_self_type_metadata, - file_metadata, - 0, - 0) - }; - - template_params.push(param_metadata); - } - } - - // Handle other generic parameters - let actual_types = param_substs.types.get_slice(subst::FnSpace); - for (index, &hir::TyParam{ name, .. }) in generics.ty_params.iter().enumerate() { - let actual_type = actual_types[index]; + for (i, &actual_type) in actual_types.iter().enumerate() { + let actual_type = cx.tcx().normalize_associated_type(&actual_type); // Add actual type name to <...> clause of function name let actual_type_name = compute_debuginfo_type_name(cx, actual_type, true); name_to_append_suffix_to.push_str(&actual_type_name[..]); - if index != generics.ty_params.len() - 1 { + if i != actual_types.len() - 1 { name_to_append_suffix_to.push_str(","); } + } + name_to_append_suffix_to.push('>'); - // Again, only create type information if full debuginfo is enabled - if cx.sess().opts.debuginfo == FullDebugInfo { + // Again, only create type information if full debuginfo is enabled + let template_params: Vec<_> = if cx.sess().opts.debuginfo == FullDebugInfo { + generics.types.as_slice().iter().enumerate().map(|(i, param)| { + let actual_type = cx.tcx().normalize_associated_type(&actual_types[i]); let actual_type_metadata = type_metadata(cx, actual_type, codemap::DUMMY_SP); - let name = CString::new(name.as_str().as_bytes()).unwrap(); - let param_metadata = unsafe { + let name = CString::new(param.name.as_str().as_bytes()).unwrap(); + unsafe { llvm::LLVMDIBuilderCreateTemplateTypeParameter( DIB(cx), ptr::null_mut(), @@ -565,24 +397,83 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, file_metadata, 0, 0) - }; - template_params.push(param_metadata); + } + }).collect() + } else { + vec![] + }; + + return create_DIArray(DIB(cx), &template_params[..]); + } + + fn get_containing_scope_and_span<'ccx, 'tcx>(cx: &CrateContext<'ccx, 'tcx>, + instance: Instance<'tcx>) + -> (DIScope, Span) { + // First, let's see if this is a method within an inherent impl. Because + // if yes, we want to make the result subroutine DIE a child of the + // subroutine's self-type. + let self_type = cx.tcx().impl_of_method(instance.def).and_then(|impl_def_id| { + // If the method does *not* belong to a trait, proceed + if cx.tcx().trait_id_of_impl(impl_def_id).is_none() { + let impl_self_ty = cx.tcx().lookup_item_type(impl_def_id).ty; + let impl_self_ty = cx.tcx().erase_regions(&impl_self_ty); + let impl_self_ty = monomorphize::apply_param_substs(cx.tcx(), + instance.substs, + &impl_self_ty); + Some(type_metadata(cx, impl_self_ty, codemap::DUMMY_SP)) + } else { + // For trait method impls we still use the "parallel namespace" + // strategy + None } - } + }); - name_to_append_suffix_to.push('>'); + let containing_scope = self_type.unwrap_or_else(|| { + namespace::item_namespace(cx, DefId { + krate: instance.def.krate, + index: cx.tcx() + .def_key(instance.def) + .parent + .expect("get_containing_scope_and_span: missing parent?") + }) + }); - return create_DIArray(DIB(cx), &template_params[..]); + // Try to get some span information, if we have an inlined item. + let definition_span = match cx.external().borrow().get(&instance.def) { + Some(&Some(node_id)) => cx.tcx().map.span(node_id), + _ => cx.tcx().map.def_id_span(instance.def, codemap::DUMMY_SP) + }; + + (containing_scope, definition_span) + } +} + +/// Computes the scope map for a function given its declaration and body. +pub fn fill_scope_map_for_function<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>, + fn_decl: &hir::FnDecl, + top_level_block: &hir::Block, + fn_ast_id: ast::NodeId) { + match fcx.debug_context { + FunctionDebugContext::RegularContext(box ref data) => { + let scope_map = create_scope_map::create_scope_map(fcx.ccx, + &fn_decl.inputs, + top_level_block, + data.fn_metadata, + fn_ast_id); + *data.scope_map.borrow_mut() = scope_map; + } + FunctionDebugContext::DebugInfoDisabled | + FunctionDebugContext::FunctionWithoutDebugInfo => {} } } -fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, - variable_name: ast::Name, - variable_type: Ty<'tcx>, - scope_metadata: DIScope, - variable_access: VariableAccess, - variable_kind: VariableKind, - span: Span) { +pub fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, + variable_name: ast::Name, + variable_type: Ty<'tcx>, + scope_metadata: DIScope, + variable_access: VariableAccess, + variable_kind: VariableKind, + span: Span) { let cx: &CrateContext = bcx.ccx(); let filename = span_start(cx, span).file.name.clone(); @@ -616,9 +507,8 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, address_operations.len() as c_uint, argument_index) }; - source_loc::set_debug_location(cx, InternalDebugLocation::new(scope_metadata, - loc.line, - loc.col.to_usize())); + source_loc::set_debug_location(cx, None, + InternalDebugLocation::new(scope_metadata, loc.line, loc.col.to_usize())); unsafe { let debug_loc = llvm::LLVMGetCurrentDebugLocation(cx.raw_builder()); let instr = llvm::LLVMDIBuilderInsertDeclareAtEnd( @@ -642,7 +532,7 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, .get_ref(span) .source_locations_enabled .get()); - source_loc::set_debug_location(cx, InternalDebugLocation::UnknownLocation); + source_loc::set_debug_location(cx, None, UnknownLocation); } _ => { /* nothing to do */ } } @@ -651,19 +541,17 @@ fn declare_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum DebugLoc { At(ast::NodeId, Span), + ScopeAt(DIScope, Span), None } impl DebugLoc { - pub fn apply(&self, fcx: &FunctionContext) { - match *self { - DebugLoc::At(node_id, span) => { - source_loc::set_source_location(fcx, node_id, span); - } - DebugLoc::None => { - source_loc::clear_source_location(fcx); - } - } + pub fn apply(self, fcx: &FunctionContext) { + source_loc::set_source_location(fcx, None, self); + } + + pub fn apply_to_bcx(self, bcx: &BlockAndBuilder) { + source_loc::set_source_location(bcx.fcx(), Some(bcx), self); } } diff --git a/src/librustc_trans/debuginfo/namespace.rs b/src/librustc_trans/debuginfo/namespace.rs index 5272a4fbbb..fc31eaa4e7 100644 --- a/src/librustc_trans/debuginfo/namespace.rs +++ b/src/librustc_trans/debuginfo/namespace.rs @@ -10,118 +10,82 @@ // Namespace Handling. -use super::utils::{DIB, debug_context}; +use super::metadata::{file_metadata, NO_FILE_METADATA, UNKNOWN_LINE_NUMBER}; +use super::utils::{DIB, debug_context, span_start}; use llvm; use llvm::debuginfo::DIScope; use rustc::hir::def_id::DefId; -use rustc::hir::map as hir_map; +use rustc::hir::map::DefPathData; use common::CrateContext; +use libc::c_uint; use std::ffi::CString; -use std::iter::once; use std::ptr; -use std::rc::{Rc, Weak}; -use syntax::ast; -use syntax::parse::token; - -pub struct NamespaceTreeNode { - pub name: ast::Name, - pub scope: DIScope, - pub parent: Option>, -} - -impl NamespaceTreeNode { - pub fn mangled_name_of_contained_item(&self, item_name: &str) -> String { - fn fill_nested(node: &NamespaceTreeNode, output: &mut String) { - match node.parent { - Some(ref parent) => fill_nested(&parent.upgrade().unwrap(), output), - None => {} - } - let string = node.name.as_str(); - output.push_str(&string.len().to_string()); - output.push_str(&string); +use syntax::codemap::DUMMY_SP; + +pub fn mangled_name_of_item(ccx: &CrateContext, def_id: DefId, extra: &str) -> String { + fn fill_nested(ccx: &CrateContext, def_id: DefId, extra: &str, output: &mut String) { + let def_key = ccx.tcx().def_key(def_id); + if let Some(parent) = def_key.parent { + fill_nested(ccx, DefId { + krate: def_id.krate, + index: parent + }, "", output); } - let mut name = String::from("_ZN"); - fill_nested(self, &mut name); - name.push_str(&item_name.len().to_string()); - name.push_str(item_name); - name.push('E'); - name - } -} - -pub fn namespace_for_item(cx: &CrateContext, def_id: DefId) -> Rc { - // prepend crate name. - // This shouldn't need a roundtrip through InternedString. - let krate = token::intern(&cx.tcx().crate_name(def_id.krate)); - let krate = hir_map::DefPathData::TypeNs(krate); - let path = cx.tcx().def_path(def_id).data; - let mut path = once(krate).chain(path.into_iter().map(|e| e.data)).peekable(); - - let mut current_key = Vec::new(); - let mut parent_node: Option> = None; - - // Create/Lookup namespace for each element of the path. - loop { - // Emulate a for loop so we can use peek below. - let path_element = match path.next() { - Some(e) => e, - None => break + let name = match def_key.disambiguated_data.data { + DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate), + data => data.as_interned_str() }; - // Ignore the name of the item (the last path element). - if path.peek().is_none() { - break; - } - // This shouldn't need a roundtrip through InternedString. - let namespace_name = path_element.as_interned_str(); - let name = token::intern(&namespace_name); - current_key.push(name); - - let existing_node = debug_context(cx).namespace_map.borrow() - .get(¤t_key).cloned(); - let current_node = match existing_node { - Some(existing_node) => existing_node, - None => { - // create and insert - let parent_scope = match parent_node { - Some(ref node) => node.scope, - None => ptr::null_mut() - }; - let namespace_name = CString::new(namespace_name.as_bytes()).unwrap(); - let scope = unsafe { - llvm::LLVMDIBuilderCreateNameSpace( - DIB(cx), - parent_scope, - namespace_name.as_ptr(), - // cannot reconstruct file ... - ptr::null_mut(), - // ... or line information, but that's not so important. - 0) - }; - - let node = Rc::new(NamespaceTreeNode { - name: name, - scope: scope, - parent: parent_node.map(|parent| Rc::downgrade(&parent)), - }); - - debug_context(cx).namespace_map.borrow_mut() - .insert(current_key.clone(), node.clone()); + output.push_str(&(name.len() + extra.len()).to_string()); + output.push_str(&name); + output.push_str(extra); + } - node - } - }; + let mut name = String::from("_ZN"); + fill_nested(ccx, def_id, extra, &mut name); + name.push('E'); + name +} - parent_node = Some(current_node); +pub fn item_namespace(ccx: &CrateContext, def_id: DefId) -> DIScope { + if let Some(&scope) = debug_context(ccx).namespace_map.borrow().get(&def_id) { + return scope; } - match parent_node { - Some(node) => node, - None => { - bug!("debuginfo::namespace_for_item: path too short for {:?}", def_id); - } - } + let def_key = ccx.tcx().def_key(def_id); + let parent_scope = def_key.parent.map_or(ptr::null_mut(), |parent| { + item_namespace(ccx, DefId { + krate: def_id.krate, + index: parent + }) + }); + + let namespace_name = match def_key.disambiguated_data.data { + DefPathData::CrateRoot => ccx.tcx().crate_name(def_id.krate), + data => data.as_interned_str() + }; + + let namespace_name = CString::new(namespace_name.as_bytes()).unwrap(); + let span = ccx.tcx().map.def_id_span(def_id, DUMMY_SP); + let (file, line) = if span != DUMMY_SP { + let loc = span_start(ccx, span); + (file_metadata(ccx, &loc.file.name), loc.line as c_uint) + } else { + (NO_FILE_METADATA, UNKNOWN_LINE_NUMBER) + }; + + let scope = unsafe { + llvm::LLVMDIBuilderCreateNameSpace( + DIB(ccx), + parent_scope, + namespace_name.as_ptr(), + file, + line as c_uint) + }; + + debug_context(ccx).namespace_map.borrow_mut().insert(def_id, scope); + scope } diff --git a/src/librustc_trans/debuginfo/source_loc.rs b/src/librustc_trans/debuginfo/source_loc.rs index 2879da7d03..6b00c1bb1a 100644 --- a/src/librustc_trans/debuginfo/source_loc.rs +++ b/src/librustc_trans/debuginfo/source_loc.rs @@ -10,12 +10,13 @@ use self::InternalDebugLocation::*; -use super::utils::{debug_context, span_start, fn_should_be_ignored}; +use super::utils::{debug_context, span_start}; use super::metadata::{scope_metadata,UNKNOWN_COLUMN_NUMBER}; use super::{FunctionDebugContext, DebugLoc}; use llvm; use llvm::debuginfo::DIScope; +use builder::Builder; use common::{NodeIdAndSpan, CrateContext, FunctionContext}; use libc::c_uint; @@ -86,41 +87,46 @@ pub fn get_cleanup_debug_loc_for_ast_node<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, /// Sets the current debug location at the beginning of the span. /// -/// Maps to a call to llvm::LLVMSetCurrentDebugLocation(...). The node_id -/// parameter is used to reliably find the correct visibility scope for the code -/// position. +/// Maps to a call to llvm::LLVMSetCurrentDebugLocation(...). pub fn set_source_location(fcx: &FunctionContext, - node_id: ast::NodeId, - span: Span) { - match fcx.debug_context { + builder: Option<&Builder>, + debug_loc: DebugLoc) { + let builder = builder.map(|b| b.llbuilder); + let function_debug_context = match fcx.debug_context { FunctionDebugContext::DebugInfoDisabled => return, FunctionDebugContext::FunctionWithoutDebugInfo => { - set_debug_location(fcx.ccx, UnknownLocation); + set_debug_location(fcx.ccx, builder, UnknownLocation); return; } - FunctionDebugContext::RegularContext(box ref function_debug_context) => { - if function_debug_context.source_location_override.get() { - // Just ignore any attempts to set a new debug location while - // the override is active. - return; - } - - let cx = fcx.ccx; - - debug!("set_source_location: {}", cx.sess().codemap().span_to_string(span)); + FunctionDebugContext::RegularContext(box ref data) => data + }; - if function_debug_context.source_locations_enabled.get() { - let loc = span_start(cx, span); - let scope = scope_metadata(fcx, node_id, span); + if function_debug_context.source_location_override.get() { + // Just ignore any attempts to set a new debug location while + // the override is active. + return; + } - set_debug_location(cx, InternalDebugLocation::new(scope, - loc.line, - loc.col.to_usize())); - } else { - set_debug_location(cx, UnknownLocation); + let dbg_loc = if function_debug_context.source_locations_enabled.get() { + let (scope, span) = match debug_loc { + DebugLoc::At(node_id, span) => { + (scope_metadata(fcx, node_id, span), span) } - } - } + DebugLoc::ScopeAt(scope, span) => (scope, span), + DebugLoc::None => { + set_debug_location(fcx.ccx, builder, UnknownLocation); + return; + } + }; + + debug!("set_source_location: {}", + fcx.ccx.sess().codemap().span_to_string(span)); + let loc = span_start(fcx.ccx, span); + InternalDebugLocation::new(scope, loc.line, loc.col.to_usize()) + } else { + UnknownLocation + }; + set_debug_location(fcx.ccx, builder, dbg_loc); } /// This function makes sure that all debug locations emitted while executing @@ -135,7 +141,7 @@ pub fn with_source_location_override(fcx: &FunctionContext, wrapped_function() } FunctionDebugContext::FunctionWithoutDebugInfo => { - set_debug_location(fcx.ccx, UnknownLocation); + set_debug_location(fcx.ccx, None, UnknownLocation); wrapped_function() } FunctionDebugContext::RegularContext(box ref function_debug_context) => { @@ -152,17 +158,6 @@ pub fn with_source_location_override(fcx: &FunctionContext, } } -/// Clears the current debug location. -/// -/// Instructions generated hereafter won't be assigned a source location. -pub fn clear_source_location(fcx: &FunctionContext) { - if fn_should_be_ignored(fcx) { - return; - } - - set_debug_location(fcx.ccx, UnknownLocation); -} - /// Enables emitting source locations for the given functions. /// /// Since we don't want source locations to be emitted for the function prelude, @@ -195,37 +190,42 @@ impl InternalDebugLocation { } } -pub fn set_debug_location(cx: &CrateContext, debug_location: InternalDebugLocation) { - if debug_location == debug_context(cx).current_debug_location.get() { - return; +pub fn set_debug_location(cx: &CrateContext, + builder: Option, + debug_location: InternalDebugLocation) { + if builder.is_none() { + if debug_location == debug_context(cx).current_debug_location.get() { + return; + } } - let metadata_node; - - match debug_location { + let metadata_node = match debug_location { KnownLocation { scope, line, .. } => { // Always set the column to zero like Clang and GCC let col = UNKNOWN_COLUMN_NUMBER; debug!("setting debug location to {} {}", line, col); unsafe { - metadata_node = llvm::LLVMDIBuilderCreateDebugLocation( + llvm::LLVMDIBuilderCreateDebugLocation( debug_context(cx).llcontext, line as c_uint, col as c_uint, scope, - ptr::null_mut()); + ptr::null_mut()) } } UnknownLocation => { debug!("clearing debug location "); - metadata_node = ptr::null_mut(); + ptr::null_mut() } }; - unsafe { - llvm::LLVMSetCurrentDebugLocation(cx.raw_builder(), metadata_node); + if builder.is_none() { + debug_context(cx).current_debug_location.set(debug_location); } - debug_context(cx).current_debug_location.set(debug_location); + let builder = builder.unwrap_or_else(|| cx.raw_builder()); + unsafe { + llvm::LLVMSetCurrentDebugLocation(builder, metadata_node); + } } diff --git a/src/librustc_trans/debuginfo/type_names.rs b/src/librustc_trans/debuginfo/type_names.rs index 6fdd6a2c1d..63f460e469 100644 --- a/src/librustc_trans/debuginfo/type_names.rs +++ b/src/librustc_trans/debuginfo/type_names.rs @@ -12,7 +12,6 @@ use common::CrateContext; use rustc::hir::def_id::DefId; -use rustc::infer; use rustc::ty::subst; use rustc::ty::{self, Ty}; @@ -49,7 +48,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, push_item_name(cx, def.did, qualified, output); push_type_params(cx, substs, output); }, - ty::TyTuple(ref component_types) => { + ty::TyTuple(component_types) => { output.push('('); for &component_type in component_types { push_debuginfo_type_name(cx, component_type, true, output); @@ -114,7 +113,7 @@ pub fn push_debuginfo_type_name<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, output.push_str("fn("); let sig = cx.tcx().erase_late_bound_regions(sig); - let sig = infer::normalize_associated_type(cx.tcx(), &sig); + let sig = cx.tcx().normalize_associated_type(&sig); if !sig.inputs.is_empty() { for ¶meter_type in &sig.inputs { push_debuginfo_type_name(cx, parameter_type, true, output); diff --git a/src/librustc_trans/debuginfo/utils.rs b/src/librustc_trans/debuginfo/utils.rs index bef7af3aec..3fd9793718 100644 --- a/src/librustc_trans/debuginfo/utils.rs +++ b/src/librustc_trans/debuginfo/utils.rs @@ -11,7 +11,7 @@ // Utility Functions. use super::{FunctionDebugContext, CrateDebugContext}; -use super::namespace::namespace_for_item; +use super::namespace::item_namespace; use rustc::hir::def_id::DefId; @@ -44,16 +44,6 @@ pub fn create_DIArray(builder: DIBuilderRef, arr: &[DIDescriptor]) -> DIArray { }; } -pub fn contains_nodebug_attribute(attributes: &[ast::Attribute]) -> bool { - attributes.iter().any(|attr| { - let meta_item: &ast::MetaItem = &attr.node.value; - match meta_item.node { - ast::MetaItemKind::Word(ref value) => &value[..] == "no_debug", - _ => false - } - }) -} - /// Return codemap::Loc corresponding to the beginning of the span pub fn span_start(cx: &CrateContext, span: Span) -> codemap::Loc { cx.sess().codemap().lookup_char_pos(span.lo) @@ -87,21 +77,19 @@ pub fn fn_should_be_ignored(fcx: &FunctionContext) -> bool { } } -pub fn assert_type_for_node_id(cx: &CrateContext, - node_id: ast::NodeId, - error_reporting_span: Span) { - if !cx.tcx().node_types().contains_key(&node_id) { - span_bug!(error_reporting_span, - "debuginfo: Could not find type for node id!"); - } -} - pub fn get_namespace_and_span_for_item(cx: &CrateContext, def_id: DefId) -> (DIScope, Span) { - let containing_scope = namespace_for_item(cx, def_id).scope; - let definition_span = cx.tcx().map.def_id_span(def_id, codemap::DUMMY_SP /* (1) */ ); - - // (1) For external items there is no span information + let containing_scope = item_namespace(cx, DefId { + krate: def_id.krate, + index: cx.tcx().def_key(def_id).parent + .expect("get_namespace_and_span_for_item: missing parent?") + }); + + // Try to get some span information, if we have an inlined item. + let definition_span = match cx.external().borrow().get(&def_id) { + Some(&Some(node_id)) => cx.tcx().map.span(node_id), + _ => cx.tcx().map.def_id_span(def_id, codemap::DUMMY_SP) + }; (containing_scope, definition_span) } diff --git a/src/librustc_trans/declare.rs b/src/librustc_trans/declare.rs index eb520fe744..e6db695943 100644 --- a/src/librustc_trans/declare.rs +++ b/src/librustc_trans/declare.rs @@ -21,7 +21,6 @@ //! * When in doubt, define. use llvm::{self, ValueRef}; use rustc::ty; -use rustc::infer; use abi::{Abi, FnType}; use attributes; use context::CrateContext; @@ -69,6 +68,17 @@ fn declare_raw_fn(ccx: &CrateContext, name: &str, callconv: llvm::CallConv, ty: llvm::SetFunctionAttribute(llfn, llvm::Attribute::NoRedZone) } + match ccx.tcx().sess.opts.cg.opt_level.as_ref().map(String::as_ref) { + Some("s") => { + llvm::SetFunctionAttribute(llfn, llvm::Attribute::OptimizeForSize); + }, + Some("z") => { + llvm::SetFunctionAttribute(llfn, llvm::Attribute::MinSize); + llvm::SetFunctionAttribute(llfn, llvm::Attribute::OptimizeForSize); + }, + _ => {}, + } + llfn } @@ -94,7 +104,7 @@ pub fn declare_fn<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, name: &str, debug!("declare_rust_fn(name={:?}, fn_type={:?})", name, fn_type); let abi = fn_type.fn_abi(); let sig = ccx.tcx().erase_late_bound_regions(fn_type.fn_sig()); - let sig = infer::normalize_associated_type(ccx.tcx(), &sig); + let sig = ccx.tcx().normalize_associated_type(&sig); debug!("declare_rust_fn (after region erasure) sig={:?}", sig); let fty = FnType::new(ccx, abi, &sig, &[]); diff --git a/src/librustc_trans/diagnostics.rs b/src/librustc_trans/diagnostics.rs index 5e4902cf3c..d9de673db2 100644 --- a/src/librustc_trans/diagnostics.rs +++ b/src/librustc_trans/diagnostics.rs @@ -82,19 +82,4 @@ extern "platform-intrinsic" { unsafe { simd_add(i32x1(0), i32x1(1)); } // ok! ``` "##, - -E0515: r##" -A constant index expression was out of bounds. Erroneous code example: - -```compile_fail -let x = &[0, 1, 2][7]; // error: const index-expr is out of bounds -``` - -Please specify a valid index (not inferior to 0 or superior to array length). -Example: - -``` -let x = &[0, 1, 2][2]; // ok -``` -"##, } diff --git a/src/librustc_trans/expr.rs b/src/librustc_trans/expr.rs index beca81da05..36a593a546 100644 --- a/src/librustc_trans/expr.rs +++ b/src/librustc_trans/expr.rs @@ -63,7 +63,6 @@ use cleanup::{self, CleanupMethods, DropHintMethods}; use common::*; use datum::*; use debuginfo::{self, DebugLoc, ToDebugLoc}; -use declare; use glue; use machine; use tvec; @@ -115,7 +114,7 @@ pub fn trans_into<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, -> Block<'blk, 'tcx> { let mut bcx = bcx; - debuginfo::set_source_location(bcx.fcx, expr.id, expr.span); + expr.debug_loc().apply(bcx.fcx); if adjustment_required(bcx, expr) { // use trans, which may be less efficient but @@ -510,7 +509,9 @@ fn coerce_unsized<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let source = unpack_datum!(bcx, source.to_ref_datum(bcx)); assert!(target.kind.is_by_ref()); - let kind = custom_coerce_unsize_info(bcx.ccx(), source.ty, target.ty); + let kind = custom_coerce_unsize_info(bcx.ccx().shared(), + source.ty, + target.ty); let repr_source = adt::represent_type(bcx.ccx(), source.ty); let src_fields = match &*repr_source { @@ -587,7 +588,7 @@ fn trans_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, debug!("trans_unadjusted(expr={:?})", expr); let _indenter = indenter(); - debuginfo::set_source_location(bcx.fcx, expr.id, expr.span); + expr.debug_loc().apply(bcx.fcx); return match expr_kind(bcx.tcx(), expr) { ExprKind::Lvalue | ExprKind::RvalueDatum => { @@ -720,7 +721,7 @@ fn trans_field<'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>, base: &hir::Expr, get_idx: F) -> DatumBlock<'blk, 'tcx, Expr> where - F: FnOnce(&'blk TyCtxt<'tcx>, &VariantInfo<'tcx>) -> usize, + F: FnOnce(TyCtxt<'blk, 'tcx, 'tcx>, &VariantInfo<'tcx>) -> usize, { let mut bcx = bcx; let _icx = push_ctxt("trans_rec_field"); @@ -923,17 +924,17 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, return bcx; } - debuginfo::set_source_location(bcx.fcx, expr.id, expr.span); + expr.debug_loc().apply(bcx.fcx); match expr.node { hir::ExprBreak(label_opt) => { - controlflow::trans_break(bcx, expr, label_opt.map(|l| l.node.name)) + controlflow::trans_break(bcx, expr, label_opt.map(|l| l.node)) } hir::ExprType(ref e, _) => { trans_into(bcx, &e, Ignore) } hir::ExprAgain(label_opt) => { - controlflow::trans_cont(bcx, expr, label_opt.map(|l| l.node.name)) + controlflow::trans_cont(bcx, expr, label_opt.map(|l| l.node)) } hir::ExprRet(ref ex) => { // Check to see if the return expression itself is reachable. @@ -987,7 +988,7 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // // We could avoid this intermediary with some analysis // to determine whether `dst` may possibly own `src`. - debuginfo::set_source_location(bcx.fcx, expr.id, expr.span); + expr.debug_loc().apply(bcx.fcx); let src_datum = unpack_datum!( bcx, src_datum.to_rvalue_datum(bcx, "ExprAssign")); let opt_hint_datum = dst_datum.kind.drop_flag_info.hint_datum(bcx); @@ -1062,7 +1063,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let _icx = push_ctxt("trans_rvalue_dps_unadjusted"); let mut bcx = bcx; - debuginfo::set_source_location(bcx.fcx, expr.id, expr.span); + expr.debug_loc().apply(bcx.fcx); // Entry into the method table if this is an overloaded call/op. let method_call = MethodCall::expr(expr.id); @@ -1118,7 +1119,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, hir::ExprVec(..) | hir::ExprRepeat(..) => { tvec::trans_fixed_vstore(bcx, expr, dest) } - hir::ExprClosure(_, ref decl, ref body) => { + hir::ExprClosure(_, ref decl, ref body, _) => { let dest = match dest { SaveIn(lldest) => closure::Dest::SaveIn(bcx, lldest), Ignore => closure::Dest::Ignore(bcx.ccx()) @@ -1132,7 +1133,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // the key we need to find the closure-kind and // closure-type etc. let (def_id, substs) = match expr_ty(bcx, expr).sty { - ty::TyClosure(def_id, ref substs) => (def_id, substs), + ty::TyClosure(def_id, substs) => (def_id, substs), ref t => span_bug!( expr.span, @@ -1591,7 +1592,6 @@ fn trans_scalar_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, { let _icx = push_ctxt("trans_scalar_binop"); - let tcx = bcx.tcx(); let lhs_t = lhs.ty; assert!(!lhs_t.is_simd()); let is_float = lhs_t.is_fp(); @@ -1654,42 +1654,7 @@ fn trans_scalar_binop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } hir::BiRem => { if is_float { - // LLVM currently always lowers the `frem` instructions appropriate - // library calls typically found in libm. Notably f64 gets wired up - // to `fmod` and f32 gets wired up to `fmodf`. Inconveniently for - // us, 32-bit MSVC does not actually have a `fmodf` symbol, it's - // instead just an inline function in a header that goes up to a - // f64, uses `fmod`, and then comes back down to a f32. - // - // Although LLVM knows that `fmodf` doesn't exist on MSVC, it will - // still unconditionally lower frem instructions over 32-bit floats - // to a call to `fmodf`. To work around this we special case MSVC - // 32-bit float rem instructions and instead do the call out to - // `fmod` ourselves. - // - // Note that this is currently duplicated with src/libcore/ops.rs - // which does the same thing, and it would be nice to perhaps unify - // these two implementations on day! Also note that we call `fmod` - // for both 32 and 64-bit floats because if we emit any FRem - // instruction at all then LLVM is capable of optimizing it into a - // 32-bit FRem (which we're trying to avoid). - let use_fmod = tcx.sess.target.target.options.is_like_msvc && - tcx.sess.target.target.arch == "x86"; - if use_fmod { - let f64t = Type::f64(bcx.ccx()); - let fty = Type::func(&[f64t, f64t], &f64t); - let llfn = declare::declare_cfn(bcx.ccx(), "fmod", fty); - if lhs_t == tcx.types.f32 { - let lhs = FPExt(bcx, lhs, f64t); - let rhs = FPExt(bcx, rhs, f64t); - let res = Call(bcx, llfn, &[lhs, rhs], binop_debug_loc); - FPTrunc(bcx, res, Type::f32(bcx.ccx())) - } else { - Call(bcx, llfn, &[lhs, rhs], binop_debug_loc) - } - } else { - FRem(bcx, lhs, rhs, binop_debug_loc) - } + FRem(bcx, lhs, rhs, binop_debug_loc) } else { // Only zero-check integers; fp %0 is NaN bcx = base::fail_if_zero_or_overflows(bcx, @@ -1824,11 +1789,11 @@ fn trans_binary<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } } -pub fn cast_is_noop<'tcx>(tcx: &TyCtxt<'tcx>, - expr: &hir::Expr, - t_in: Ty<'tcx>, - t_out: Ty<'tcx>) - -> bool { +pub fn cast_is_noop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + expr: &hir::Expr, + t_in: Ty<'tcx>, + t_out: Ty<'tcx>) + -> bool { if let Some(&CastKind::CoercionCast) = tcx.cast_kinds.borrow().get(&expr.id) { return true; } @@ -2183,7 +2148,7 @@ impl OverflowOpViaIntrinsic { let name = self.to_intrinsic_name(bcx.tcx(), lhs_ty); bcx.ccx().get_intrinsic(&name) } - fn to_intrinsic_name(&self, tcx: &TyCtxt, ty: Ty) -> &'static str { + fn to_intrinsic_name(&self, tcx: TyCtxt, ty: Ty) -> &'static str { use syntax::ast::IntTy::*; use syntax::ast::UintTy::*; use rustc::ty::{TyInt, TyUint}; @@ -2375,7 +2340,7 @@ enum ExprKind { RvalueStmt } -fn expr_kind(tcx: &TyCtxt, expr: &hir::Expr) -> ExprKind { +fn expr_kind<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, expr: &hir::Expr) -> ExprKind { if tcx.is_method_call(expr.id) { // Overloaded operations are generally calls, and hence they are // generated via DPS, but there are a few exceptions: diff --git a/src/librustc_trans/glue.rs b/src/librustc_trans/glue.rs index 5676024ea9..10e3319530 100644 --- a/src/librustc_trans/glue.rs +++ b/src/librustc_trans/glue.rs @@ -29,13 +29,14 @@ use build::*; use callee::{Callee, ArgVals}; use cleanup; use cleanup::CleanupMethods; -use collector::{self, TransItem}; +use collector; use common::*; use debuginfo::DebugLoc; use declare; use expr; use machine::*; use monomorphize; +use trans_item::TransItem; use type_of::{type_of, sizing_type_of, align_of}; use type_::Type; use value::Value; @@ -88,17 +89,17 @@ pub fn trans_exchange_free_ty<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, } } -pub fn type_needs_drop<'tcx>(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> bool { +pub fn type_needs_drop<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: Ty<'tcx>) -> bool { tcx.type_needs_drop_given_env(ty, &tcx.empty_parameter_environment()) } -pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +pub fn get_drop_glue_type<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, t: Ty<'tcx>) -> Ty<'tcx> { - let tcx = ccx.tcx(); // Even if there is no dtor for t, there might be one deeper down and we // might need to pass in the vtable ptr. if !type_is_sized(tcx, t) { - return t + return tcx.erase_regions(&t); } // FIXME (#22815): note that type_needs_drop conservatively @@ -110,21 +111,23 @@ pub fn get_drop_glue_type<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // returned `tcx.types.i8` does not appear unsound. The impact on // code quality is unknown at this time.) - if !type_needs_drop(&tcx, t) { + if !type_needs_drop(tcx, t) { return tcx.types.i8; } match t.sty { - ty::TyBox(typ) if !type_needs_drop(&tcx, typ) + ty::TyBox(typ) if !type_needs_drop(tcx, typ) && type_is_sized(tcx, typ) => { - let llty = sizing_type_of(ccx, typ); - // `Box` does not allocate. - if llsize_of_alloc(ccx, llty) == 0 { - tcx.types.i8 - } else { - t - } + tcx.normalizing_infer_ctxt(traits::ProjectionMode::Any).enter(|infcx| { + let layout = t.layout(&infcx).unwrap(); + if layout.size(&tcx.data_layout).bytes() == 0 { + // `Box` does not allocate. + tcx.types.i8 + } else { + tcx.erase_regions(&t) + } + }) } - _ => t + _ => tcx.erase_regions(&t) } } @@ -154,7 +157,7 @@ pub fn drop_ty_core<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, DropGlueKind::Ty(t) }; let glue = get_drop_glue_core(ccx, g); - let glue_type = get_drop_glue_type(ccx, t); + let glue_type = get_drop_glue_type(ccx.tcx(), t); let ptr = if glue_type != t { PointerCast(bcx, v, type_of(ccx, glue_type).ptr_to()) } else { @@ -215,11 +218,11 @@ pub enum DropGlueKind<'tcx> { } impl<'tcx> DropGlueKind<'tcx> { - fn ty(&self) -> Ty<'tcx> { + pub fn ty(&self) -> Ty<'tcx> { match *self { DropGlueKind::Ty(t) | DropGlueKind::TyContents(t) => t } } - fn map_ty(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>) -> Ty<'tcx> + pub fn map_ty(&self, mut f: F) -> DropGlueKind<'tcx> where F: FnMut(Ty<'tcx>) -> Ty<'tcx> { match *self { DropGlueKind::Ty(t) => DropGlueKind::Ty(f(t)), @@ -231,7 +234,7 @@ impl<'tcx> DropGlueKind<'tcx> { fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, g: DropGlueKind<'tcx>) -> ValueRef { debug!("make drop glue for {:?}", g); - let g = g.map_ty(|t| get_drop_glue_type(ccx, t)); + let g = g.map_ty(|t| get_drop_glue_type(ccx.tcx(), t)); debug!("drop glue type {:?}", g); match ccx.drop_glues().borrow().get(&g) { Some(&glue) => return glue, @@ -272,10 +275,9 @@ fn get_drop_glue_core<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let _s = StatRecorder::new(ccx, format!("drop {:?}", t)); - let empty_substs = ccx.tcx().mk_substs(Substs::empty()); let (arena, fcx): (TypedArena<_>, FunctionContext); arena = TypedArena::new(); - fcx = FunctionContext::new(ccx, llfn, fn_ty, None, empty_substs, &arena); + fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &arena); let bcx = fcx.init(false, None); @@ -365,7 +367,7 @@ fn trans_struct_drop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, def_id: tcx.lang_items.drop_trait().unwrap(), substs: tcx.mk_substs(Substs::empty().with_self_ty(t)) }); - let vtbl = match fulfill_obligation(bcx.ccx(), DUMMY_SP, trait_ref) { + let vtbl = match fulfill_obligation(bcx.ccx().shared(), DUMMY_SP, trait_ref) { traits::VtableImpl(data) => data, _ => bug!("dtor for {:?} is not an impl???", t) }; @@ -488,14 +490,13 @@ pub fn size_and_align_of_dst<'blk, 'tcx>(bcx: &BlockAndBuilder<'blk, 'tcx>, fn make_drop_glue<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, v0: ValueRef, g: DropGlueKind<'tcx>) -> Block<'blk, 'tcx> { - let t = g.ty(); - - if collector::collecting_debug_information(bcx.ccx()) { + if collector::collecting_debug_information(bcx.ccx().shared()) { bcx.ccx() - .record_translation_item_as_generated(TransItem::DropGlue(bcx.tcx() - .erase_regions(&t))); + .record_translation_item_as_generated(TransItem::DropGlue(g)); } + let t = g.ty(); + let skip_dtor = match g { DropGlueKind::Ty(_) => false, DropGlueKind::TyContents(_) => true }; // NB: v0 is an *alias* of type t here, not a direct value. let _icx = push_ctxt("make_drop_glue"); diff --git a/src/librustc_trans/inline.rs b/src/librustc_trans/inline.rs index 1eff09d67f..af175fbf88 100644 --- a/src/librustc_trans/inline.rs +++ b/src/librustc_trans/inline.rs @@ -9,7 +9,7 @@ // except according to those terms. use llvm::{AvailableExternallyLinkage, InternalLinkage, SetLinkage}; -use middle::cstore::{CrateStore, FoundAst, InlinedItem}; +use middle::cstore::{FoundAst, InlinedItem}; use rustc::hir::def_id::DefId; use rustc::ty::subst::Substs; use base::{push_ctxt, trans_item, trans_fn}; diff --git a/src/librustc_trans/intrinsic.rs b/src/librustc_trans/intrinsic.rs index 0f9b04c04f..640ac25a5e 100644 --- a/src/librustc_trans/intrinsic.rs +++ b/src/librustc_trans/intrinsic.rs @@ -15,7 +15,6 @@ use intrinsics::{self, Intrinsic}; use libc; use llvm; use llvm::{ValueRef, TypeKind}; -use rustc::infer; use rustc::ty::subst; use rustc::ty::subst::FnSpace; use abi::{Abi, FnType}; @@ -114,7 +113,7 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let (def_id, substs, sig) = match callee_ty.sty { ty::TyFnDef(def_id, substs, fty) => { let sig = tcx.erase_late_bound_regions(&fty.sig); - (def_id, substs, infer::normalize_associated_type(tcx, &sig)) + (def_id, substs, tcx.normalize_associated_type(&sig)) } _ => bug!("expected fn item type, found {}", callee_ty) }; @@ -123,8 +122,11 @@ pub fn trans_intrinsic_call<'a, 'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>, let name = tcx.item_name(def_id).as_str(); let span = match call_debug_location { - DebugLoc::At(_, span) => span, - DebugLoc::None => fcx.span.unwrap_or(DUMMY_SP) + DebugLoc::At(_, span) | DebugLoc::ScopeAt(_, span) => span, + DebugLoc::None => { + span_bug!(fcx.span.unwrap_or(DUMMY_SP), + "intrinsic `{}` called with missing span", name); + } }; let cleanup_scope = fcx.push_custom_cleanup_scope(); @@ -1093,9 +1095,7 @@ fn trans_msvc_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // We're generating an IR snippet that looks like: // // declare i32 @rust_try(%func, %data, %ptr) { - // %slot = alloca i8* - // call @llvm.localescape(%slot) - // store %ptr, %slot + // %slot = alloca i64* // invoke %func(%data) to label %normal unwind label %catchswitch // // normal: @@ -1105,26 +1105,34 @@ fn trans_msvc_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, // %cs = catchswitch within none [%catchpad] unwind to caller // // catchpad: - // %tok = catchpad within %cs [%rust_try_filter] + // %tok = catchpad within %cs [%type_descriptor, 0, %slot] + // %ptr[0] = %slot[0] + // %ptr[1] = %slot[1] // catchret from %tok to label %caught // // caught: // ret i32 1 // } // - // This structure follows the basic usage of the instructions in LLVM - // (see their documentation/test cases for examples), but a - // perhaps-surprising part here is the usage of the `localescape` - // intrinsic. This is used to allow the filter function (also generated - // here) to access variables on the stack of this intrinsic. This - // ability enables us to transfer information about the exception being - // thrown to this point, where we're catching the exception. + // This structure follows the basic usage of throw/try/catch in LLVM. + // For example, compile this C++ snippet to see what LLVM generates: + // + // #include + // + // int bar(void (*foo)(void), uint64_t *ret) { + // try { + // foo(); + // return 0; + // } catch(uint64_t a[2]) { + // ret[0] = a[0]; + // ret[1] = a[1]; + // return 1; + // } + // } // // More information can be found in libstd's seh.rs implementation. - let slot = Alloca(bcx, Type::i8p(ccx), "slot"); - let localescape = ccx.get_intrinsic(&"llvm.localescape"); - Call(bcx, localescape, &[slot], dloc); - Store(bcx, local_ptr, slot); + let i64p = Type::i64(ccx).ptr_to(); + let slot = Alloca(bcx, i64p, "slot"); Invoke(bcx, func, &[data], normal.llbb, catchswitch.llbb, dloc); Ret(normal, C_i32(ccx, 0), dloc); @@ -1132,9 +1140,19 @@ fn trans_msvc_try<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, let cs = CatchSwitch(catchswitch, None, None, 1); AddHandler(catchswitch, cs, catchpad.llbb); - let filter = generate_filter_fn(bcx.fcx, bcx.fcx.llfn); - let filter = BitCast(catchpad, filter, Type::i8p(ccx)); - let tok = CatchPad(catchpad, cs, &[filter]); + let tcx = ccx.tcx(); + let tydesc = match tcx.lang_items.msvc_try_filter() { + Some(did) => ::consts::get_static(ccx, did).to_llref(), + None => bug!("msvc_try_filter not defined"), + }; + let tok = CatchPad(catchpad, cs, &[tydesc, C_i32(ccx, 0), slot]); + let addr = Load(catchpad, slot); + let arg1 = Load(catchpad, addr); + let val1 = C_i32(ccx, 1); + let arg2 = Load(catchpad, InBoundsGEP(catchpad, addr, &[val1])); + let local_ptr = BitCast(catchpad, local_ptr, i64p); + Store(catchpad, arg1, local_ptr); + Store(catchpad, arg2, InBoundsGEP(catchpad, local_ptr, &[val1])); CatchRet(catchpad, tok, caught.llbb); Ret(caught, C_i32(ccx, 1), dloc); @@ -1241,16 +1259,15 @@ fn gen_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>, }; let fn_ty = FnType::new(ccx, Abi::Rust, &sig, &[]); - let rust_fn_ty = ccx.tcx().mk_fn_ptr(ty::BareFnTy { + let rust_fn_ty = ccx.tcx().mk_fn_ptr(ccx.tcx().mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Unsafe, abi: Abi::Rust, sig: ty::Binder(sig) - }); + })); let llfn = declare::define_internal_fn(ccx, name, rust_fn_ty); - let empty_substs = ccx.tcx().mk_substs(Substs::empty()); let (fcx, block_arena); block_arena = TypedArena::new(); - fcx = FunctionContext::new(ccx, llfn, fn_ty, None, empty_substs, &block_arena); + fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &block_arena); let bcx = fcx.init(true, None); trans(bcx); fcx.cleanup(); @@ -1272,7 +1289,7 @@ fn get_rust_try_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>, // Define the type up front for the signature of the rust_try function. let tcx = ccx.tcx(); let i8p = tcx.mk_mut_ptr(tcx.types.i8); - let fn_ty = tcx.mk_fn_ptr(ty::BareFnTy { + let fn_ty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Unsafe, abi: Abi::Rust, sig: ty::Binder(ty::FnSig { @@ -1280,96 +1297,13 @@ fn get_rust_try_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>, output: ty::FnOutput::FnConverging(tcx.mk_nil()), variadic: false, }), - }); + })); let output = ty::FnOutput::FnConverging(tcx.types.i32); let rust_try = gen_fn(fcx, "__rust_try", vec![fn_ty, i8p, i8p], output, trans); ccx.rust_try_fn().set(Some(rust_try)); return rust_try } -// For MSVC-style exceptions (SEH), the compiler generates a filter function -// which is used to determine whether an exception is being caught (e.g. if it's -// a Rust exception or some other). -// -// This function is used to generate said filter function. The shim generated -// here is actually just a thin wrapper to call the real implementation in the -// standard library itself. For reasons as to why, see seh.rs in the standard -// library. -fn generate_filter_fn<'a, 'tcx>(fcx: &FunctionContext<'a, 'tcx>, - rust_try_fn: ValueRef) - -> ValueRef { - let ccx = fcx.ccx; - let tcx = ccx.tcx(); - let dloc = DebugLoc::None; - - let rust_try_filter = match tcx.lang_items.msvc_try_filter() { - Some(did) => { - Callee::def(ccx, did, tcx.mk_substs(Substs::empty())).reify(ccx).val - } - None => bug!("msvc_try_filter not defined"), - }; - - let output = ty::FnOutput::FnConverging(tcx.types.i32); - let i8p = tcx.mk_mut_ptr(tcx.types.i8); - - let frameaddress = ccx.get_intrinsic(&"llvm.frameaddress"); - let recoverfp = ccx.get_intrinsic(&"llvm.x86.seh.recoverfp"); - let localrecover = ccx.get_intrinsic(&"llvm.localrecover"); - - // On all platforms, once we have the EXCEPTION_POINTERS handle as well as - // the base pointer, we follow the standard layout of: - // - // block: - // %parentfp = call i8* llvm.x86.seh.recoverfp(@rust_try_fn, %bp) - // %arg = call i8* llvm.localrecover(@rust_try_fn, %parentfp, 0) - // %ret = call i32 @the_real_filter_function(%ehptrs, %arg) - // ret i32 %ret - // - // The recoverfp intrinsic is used to recover the frame frame pointer of the - // `rust_try_fn` function, which is then in turn passed to the - // `localrecover` intrinsic (pairing with the `localescape` intrinsic - // mentioned above). Putting all this together means that we now have a - // handle to the arguments passed into the `try` function, allowing writing - // to the stack over there. - // - // For more info, see seh.rs in the standard library. - let do_trans = |bcx: Block, ehptrs, base_pointer| { - let rust_try_fn = BitCast(bcx, rust_try_fn, Type::i8p(ccx)); - let parentfp = Call(bcx, recoverfp, &[rust_try_fn, base_pointer], dloc); - let arg = Call(bcx, localrecover, - &[rust_try_fn, parentfp, C_i32(ccx, 0)], dloc); - let ret = Call(bcx, rust_try_filter, &[ehptrs, arg], dloc); - Ret(bcx, ret, dloc); - }; - - if ccx.tcx().sess.target.target.arch == "x86" { - // On x86 the filter function doesn't actually receive any arguments. - // Instead the %ebp register contains some contextual information. - // - // Unfortunately I don't know of any great documentation as to what's - // going on here, all I can say is that there's a few tests cases in - // LLVM's test suite which follow this pattern of instructions, so we - // just do the same. - gen_fn(fcx, "__rustc_try_filter", vec![], output, &mut |bcx| { - let ebp = Call(bcx, frameaddress, &[C_i32(ccx, 1)], dloc); - let exn = InBoundsGEP(bcx, ebp, &[C_i32(ccx, -20)]); - let exn = Load(bcx, BitCast(bcx, exn, Type::i8p(ccx).ptr_to())); - do_trans(bcx, exn, ebp); - }) - } else if ccx.tcx().sess.target.target.arch == "x86_64" { - // Conveniently on x86_64 the EXCEPTION_POINTERS handle and base pointer - // are passed in as arguments to the filter function, so we just pass - // those along. - gen_fn(fcx, "__rustc_try_filter", vec![i8p, i8p], output, &mut |bcx| { - let exn = llvm::get_param(bcx.fcx.llfn, 0); - let rbp = llvm::get_param(bcx.fcx.llfn, 1); - do_trans(bcx, exn, rbp); - }) - } else { - bug!("unknown target to generate a filter function") - } -} - fn span_invalid_monomorphization_error(a: &Session, b: Span, c: &str) { span_err!(a, b, E0511, "{}", c); } @@ -1417,7 +1351,7 @@ fn generic_simd_intrinsic<'blk, 'tcx, 'a> let tcx = bcx.tcx(); let sig = tcx.erase_late_bound_regions(callee_ty.fn_sig()); - let sig = infer::normalize_associated_type(tcx, &sig); + let sig = tcx.normalize_associated_type(&sig); let arg_tys = sig.inputs; // every intrinsic takes a SIMD vector as its first argument @@ -1480,28 +1414,23 @@ fn generic_simd_intrinsic<'blk, 'tcx, 'a> let total_len = in_len as u64 * 2; - let (vector, indirect) = match args { + let vector = match args { Some(args) => { match consts::const_expr(bcx.ccx(), &args[2], substs, None, // this should probably help simd error reporting consts::TrueConst::Yes) { - Ok((vector, _)) => (vector, false), + Ok((vector, _)) => vector, Err(err) => bcx.sess().span_fatal(span, &err.description()), } } - None => (llargs[2], !type_is_immediate(bcx.ccx(), arg_tys[2])) + None => llargs[2] }; let indices: Option> = (0..n) .map(|i| { let arg_idx = i; - let val = if indirect { - Load(bcx, StructGEP(bcx, vector, i)) - } else { - const_get_elt(vector, &[i as libc::c_uint]) - }; - let c = const_to_opt_uint(val); - match c { + let val = const_get_elt(vector, &[i as libc::c_uint]); + match const_to_opt_uint(val) { None => { emit_error!("shuffle index #{} is not a constant", arg_idx); None diff --git a/src/librustc_trans/lib.rs b/src/librustc_trans/lib.rs index cb421b6be4..bccb5aa050 100644 --- a/src/librustc_trans/lib.rs +++ b/src/librustc_trans/lib.rs @@ -103,6 +103,7 @@ mod cabi_x86_win64; mod callee; mod cleanup; mod closure; +mod collector; mod common; mod consts; mod context; @@ -120,8 +121,9 @@ mod _match; mod meth; mod mir; mod monomorphize; -mod collector; +mod partitioning; mod symbol_names_test; +mod trans_item; mod tvec; mod type_; mod type_of; diff --git a/src/librustc_trans/meth.rs b/src/librustc_trans/meth.rs index 478094c2b8..64ee18fcce 100644 --- a/src/librustc_trans/meth.rs +++ b/src/librustc_trans/meth.rs @@ -14,7 +14,6 @@ use arena::TypedArena; use back::symbol_names; use llvm::{ValueRef, get_params}; use rustc::hir::def_id::DefId; -use rustc::infer; use rustc::ty::subst::{FnSpace, Subst, Substs}; use rustc::ty::subst; use rustc::traits::{self, ProjectionMode}; @@ -86,17 +85,16 @@ pub fn trans_object_shim<'a, 'tcx>(ccx: &'a CrateContext<'a, 'tcx>, method_ty); let sig = tcx.erase_late_bound_regions(&method_ty.fn_sig()); - let sig = infer::normalize_associated_type(tcx, &sig); + let sig = tcx.normalize_associated_type(&sig); let fn_ty = FnType::new(ccx, method_ty.fn_abi(), &sig, &[]); let function_name = symbol_names::internal_name_from_type_and_suffix(ccx, method_ty, "object_shim"); let llfn = declare::define_internal_fn(ccx, &function_name, method_ty); - let empty_substs = tcx.mk_substs(Substs::empty()); let (block_arena, fcx): (TypedArena<_>, FunctionContext); block_arena = TypedArena::new(); - fcx = FunctionContext::new(ccx, llfn, fn_ty, None, empty_substs, &block_arena); + fcx = FunctionContext::new(ccx, llfn, fn_ty, None, &block_arena); let mut bcx = fcx.init(false, None); assert!(!fcx.needs_ret_allocas); @@ -145,7 +143,7 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // Not in the cache. Build it. let methods = traits::supertraits(tcx, trait_ref.clone()).flat_map(|trait_ref| { - let vtable = fulfill_obligation(ccx, DUMMY_SP, trait_ref.clone()); + let vtable = fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref.clone()); match vtable { // Should default trait error here? traits::VtableDefaultImpl(_) | @@ -158,7 +156,7 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, substs, nested: _ }) => { let nullptr = C_null(Type::nil(ccx).ptr_to()); - get_vtable_methods(ccx, id, substs) + get_vtable_methods(tcx, id, substs) .into_iter() .map(|opt_mth| opt_mth.map_or(nullptr, |mth| { Callee::def(ccx, mth.method.def_id, &mth.substs).reify(ccx).val @@ -178,7 +176,10 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, trait_closure_kind); vec![llfn].into_iter() } - traits::VtableFnPointer(bare_fn_ty) => { + traits::VtableFnPointer( + traits::VtableFnPointerData { + fn_ty: bare_fn_ty, + nested: _ }) => { let trait_closure_kind = tcx.lang_items.fn_trait_kind(trait_ref.def_id()).unwrap(); vec![trans_fn_pointer_shim(ccx, trait_closure_kind, bare_fn_ty)].into_iter() } @@ -216,13 +217,11 @@ pub fn get_vtable<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, vtable } -pub fn get_vtable_methods<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, +pub fn get_vtable_methods<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, impl_id: DefId, substs: &'tcx subst::Substs<'tcx>) -> Vec>> { - let tcx = ccx.tcx(); - debug!("get_vtable_methods(impl_id={:?}, substs={:?}", impl_id, substs); let trt_id = match tcx.impl_trait_ref(impl_id) { @@ -259,7 +258,7 @@ pub fn get_vtable_methods<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, let name = trait_method_type.name; // Some methods cannot be called on an object; skip those. - if !traits::is_vtable_safe_method(tcx, trt_id, &trait_method_type) { + if !tcx.is_vtable_safe_method(trt_id, &trait_method_type) { debug!("get_vtable_methods: not vtable safe"); return None; } @@ -288,7 +287,7 @@ pub fn get_vtable_methods<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, // try and trans it, in that case. Issue #23435. if mth.is_provided { let predicates = mth.method.predicates.predicates.subst(tcx, &mth.substs); - if !normalize_and_test_predicates(ccx, predicates.into_vec()) { + if !normalize_and_test_predicates(tcx, predicates.into_vec()) { debug!("get_vtable_methods: predicates do not hold"); return None; } @@ -307,23 +306,31 @@ pub struct ImplMethod<'tcx> { } /// Locates the applicable definition of a method, given its name. -pub fn get_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, - impl_def_id: DefId, - substs: &'tcx Substs<'tcx>, - name: Name) - -> ImplMethod<'tcx> +pub fn get_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + impl_def_id: DefId, + substs: &'tcx Substs<'tcx>, + name: Name) + -> ImplMethod<'tcx> { assert!(!substs.types.needs_infer()); let trait_def_id = tcx.trait_id_of_impl(impl_def_id).unwrap(); let trait_def = tcx.lookup_trait_def(trait_def_id); - let infcx = infer::normalizing_infer_ctxt(tcx, &tcx.tables, ProjectionMode::Any); match trait_def.ancestors(impl_def_id).fn_defs(tcx, name).next() { Some(node_item) => { + let substs = tcx.normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| { + let substs = traits::translate_substs(&infcx, impl_def_id, + substs, node_item.node); + tcx.lift(&substs).unwrap_or_else(|| { + bug!("trans::meth::get_impl_method: translate_substs \ + returned {:?} which contains inference types/regions", + substs); + }) + }); ImplMethod { method: node_item.item, - substs: traits::translate_substs(&infcx, impl_def_id, substs, node_item.node), + substs: substs, is_provided: node_item.node.is_from_trait(), } } diff --git a/src/librustc_trans/mir/analyze.rs b/src/librustc_trans/mir/analyze.rs index f721e88a95..0b88ba554d 100644 --- a/src/librustc_trans/mir/analyze.rs +++ b/src/librustc_trans/mir/analyze.rs @@ -14,13 +14,13 @@ use rustc_data_structures::bitvec::BitVector; use rustc::mir::repr as mir; use rustc::mir::visit::{Visitor, LvalueContext}; -use common::{self, Block}; +use common::{self, Block, BlockAndBuilder}; use super::rvalue; pub fn lvalue_temps<'bcx,'tcx>(bcx: Block<'bcx,'tcx>, - mir: &mir::Mir<'tcx>) - -> BitVector { - let mut analyzer = TempAnalyzer::new(mir.temp_decls.len()); + mir: &mir::Mir<'tcx>) -> BitVector { + let bcx = bcx.build(); + let mut analyzer = TempAnalyzer::new(mir, &bcx, mir.temp_decls.len()); analyzer.visit_mir(mir); @@ -30,7 +30,8 @@ pub fn lvalue_temps<'bcx,'tcx>(bcx: Block<'bcx,'tcx>, if ty.is_scalar() || ty.is_unique() || ty.is_region_ptr() || - ty.is_simd() + ty.is_simd() || + common::type_is_zero_size(bcx.ccx(), ty) { // These sorts of types are immediates that we can store // in an ValueRef without an alloca. @@ -50,14 +51,20 @@ pub fn lvalue_temps<'bcx,'tcx>(bcx: Block<'bcx,'tcx>, analyzer.lvalue_temps } -struct TempAnalyzer { +struct TempAnalyzer<'mir, 'bcx: 'mir, 'tcx: 'bcx> { + mir: &'mir mir::Mir<'tcx>, + bcx: &'mir BlockAndBuilder<'bcx, 'tcx>, lvalue_temps: BitVector, seen_assigned: BitVector } -impl TempAnalyzer { - fn new(temp_count: usize) -> TempAnalyzer { +impl<'mir, 'bcx, 'tcx> TempAnalyzer<'mir, 'bcx, 'tcx> { + fn new(mir: &'mir mir::Mir<'tcx>, + bcx: &'mir BlockAndBuilder<'bcx, 'tcx>, + temp_count: usize) -> TempAnalyzer<'mir, 'bcx, 'tcx> { TempAnalyzer { + mir: mir, + bcx: bcx, lvalue_temps: BitVector::new(temp_count), seen_assigned: BitVector::new(temp_count) } @@ -75,7 +82,7 @@ impl TempAnalyzer { } } -impl<'tcx> Visitor<'tcx> for TempAnalyzer { +impl<'mir, 'bcx, 'tcx> Visitor<'tcx> for TempAnalyzer<'mir, 'bcx, 'tcx> { fn visit_assign(&mut self, block: mir::BasicBlock, lvalue: &mir::Lvalue<'tcx>, @@ -85,7 +92,7 @@ impl<'tcx> Visitor<'tcx> for TempAnalyzer { match *lvalue { mir::Lvalue::Temp(index) => { self.mark_assigned(index as usize); - if !rvalue::rvalue_creates_operand(rvalue) { + if !rvalue::rvalue_creates_operand(self.mir, self.bcx, rvalue) { self.mark_as_lvalue(index as usize); } } diff --git a/src/librustc_trans/mir/block.rs b/src/librustc_trans/mir/block.rs index 303cf61ad3..4e3386bc73 100644 --- a/src/librustc_trans/mir/block.rs +++ b/src/librustc_trans/mir/block.rs @@ -24,8 +24,10 @@ use meth; use type_of; use glue; use type_::Type; +use rustc_data_structures::fnv::FnvHashMap; use super::{MirContext, TempRef, drop}; +use super::constant::Const; use super::lvalue::{LvalueRef, load_fat_ptr}; use super::operand::OperandRef; use super::operand::OperandValue::{self, FatPtr, Immediate, Ref}; @@ -54,9 +56,14 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { bcx = self.trans_statement(bcx, statement); } - debug!("trans_block: terminator: {:?}", data.terminator()); + let terminator = data.terminator(); + debug!("trans_block: terminator: {:?}", terminator); - match data.terminator().kind { + let debug_loc = DebugLoc::ScopeAt(self.scopes[terminator.scope.index()], + terminator.span); + debug_loc.apply_to_bcx(&bcx); + debug_loc.apply(bcx.fcx()); + match terminator.kind { mir::TerminatorKind::Resume => { if let Some(cleanup_pad) = cleanup_pad { bcx.cleanup_ret(cleanup_pad, None); @@ -89,17 +96,32 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { adt::trans_get_discr(bcx, &repr, discr_lvalue.llval, None, true) ); - // The else branch of the Switch can't be hit, so branch to an unreachable - // instruction so LLVM knows that - let unreachable_blk = self.unreachable_block(); - let switch = bcx.switch(discr, unreachable_blk.llbb, targets.len()); + let mut bb_hist = FnvHashMap(); + for target in targets { + *bb_hist.entry(target).or_insert(0) += 1; + } + let (default_bb, default_blk) = match bb_hist.iter().max_by_key(|&(_, c)| c) { + // If a single target basic blocks is predominant, promote that to be the + // default case for the switch instruction to reduce the size of the generated + // code. This is especially helpful in cases like an if-let on a huge enum. + // Note: This optimization is only valid for exhaustive matches. + Some((&&bb, &c)) if c > targets.len() / 2 => { + (Some(bb), self.blocks[bb.index()]) + } + // We're generating an exhaustive switch, so the else branch + // can't be hit. Branching to an unreachable instruction + // lets LLVM know this + _ => (None, self.unreachable_block()) + }; + let switch = bcx.switch(discr, default_blk.llbb, targets.len()); assert_eq!(adt_def.variants.len(), targets.len()); - for (adt_variant, target) in adt_def.variants.iter().zip(targets) { - let llval = bcx.with_block(|bcx| - adt::trans_case(bcx, &repr, Disr::from(adt_variant.disr_val)) - ); - let llbb = self.llblock(*target); - build::AddCase(switch, llval, llbb) + for (adt_variant, &target) in adt_def.variants.iter().zip(targets) { + if default_bb != Some(target) { + let llbb = self.llblock(target); + let llval = bcx.with_block(|bcx| adt::trans_case( + bcx, &repr, Disr::from(adt_variant.disr_val))); + build::AddCase(switch, llval, llbb) + } } } @@ -109,15 +131,15 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { let discr = bcx.with_block(|bcx| base::to_immediate(bcx, discr, switch_ty)); let switch = bcx.switch(discr, self.llblock(*otherwise), values.len()); for (value, target) in values.iter().zip(targets) { - let llval = self.trans_constval(&bcx, value, switch_ty).immediate(); + let val = Const::from_constval(bcx.ccx(), value.clone(), switch_ty); let llbb = self.llblock(*target); - build::AddCase(switch, llval, llbb) + build::AddCase(switch, val.llval, llbb) } } mir::TerminatorKind::Return => { bcx.with_block(|bcx| { - self.fcx.build_return_block(bcx, DebugLoc::None); + self.fcx.build_return_block(bcx, debug_loc); }) } @@ -130,7 +152,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { return; } let drop_fn = glue::get_drop_glue(bcx.ccx(), ty); - let drop_ty = glue::get_drop_glue_type(bcx.ccx(), ty); + let drop_ty = glue::get_drop_glue_type(bcx.tcx(), ty); let llvalue = if drop_ty != ty { bcx.pointercast(lvalue.llval, type_of::type_of(bcx.ccx(), drop_ty).ptr_to()) } else { @@ -144,7 +166,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { self.llblock(target), unwind.llbb(), cleanup_bundle.as_ref()); - self.bcx(target).at_start(|bcx| drop::drop_fill(bcx, lvalue.llval, ty)); + self.bcx(target).at_start(|bcx| { + debug_loc.apply_to_bcx(bcx); + drop::drop_fill(bcx, lvalue.llval, ty) + }); } else { bcx.call(drop_fn, &[llvalue], cleanup_bundle.as_ref()); drop::drop_fill(&bcx, lvalue.llval, ty); @@ -204,7 +229,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { let extra_args = &args[sig.inputs.len()..]; let extra_args = extra_args.iter().map(|op_arg| { - self.mir.operand_ty(bcx.tcx(), op_arg) + let op_ty = self.mir.operand_ty(bcx.tcx(), op_arg); + bcx.monomorphize(&op_ty) }).collect::>(); let fn_ty = callee.direct_fn_type(bcx.ccx(), &extra_args); @@ -232,8 +258,30 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { (&args[..], None) }; + let is_shuffle = intrinsic.map_or(false, |name| { + name.starts_with("simd_shuffle") + }); let mut idx = 0; for arg in first_args { + // The indices passed to simd_shuffle* in the + // third argument must be constant. This is + // checked by const-qualification, which also + // promotes any complex rvalues to constants. + if is_shuffle && idx == 2 { + match *arg { + mir::Operand::Consume(_) => { + span_bug!(terminator.span, + "shuffle indices must be constant"); + } + mir::Operand::Constant(ref constant) => { + let val = self.trans_constant(&bcx, constant); + llargs.push(val.llval); + idx += 1; + continue; + } + } + } + let val = self.trans_operand(&bcx, arg).val; self.trans_argument(&bcx, val, &mut llargs, &fn_ty, &mut idx, &mut callee.data); @@ -267,7 +315,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { bcx.with_block(|bcx| { trans_intrinsic_call(bcx, callee.ty, &fn_ty, ArgVals(llargs), dest, - DebugLoc::None); + debug_loc); }); if let ReturnDest::IndirectOperand(dst, _) = ret_dest { @@ -311,13 +359,17 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { cleanup_bundle.as_ref()); fn_ty.apply_attrs_callsite(invokeret); - landingpad.at_start(|bcx| for op in args { - self.set_operand_dropped(bcx, op); + landingpad.at_start(|bcx| { + debug_loc.apply_to_bcx(bcx); + for op in args { + self.set_operand_dropped(bcx, op); + } }); if destination.is_some() { let ret_bcx = ret_bcx.build(); ret_bcx.at_start(|ret_bcx| { + debug_loc.apply_to_bcx(ret_bcx); let op = OperandRef { val: OperandValue::Immediate(invokeret), ty: sig.output.unwrap() @@ -424,47 +476,47 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { fn_ty: &FnType, next_idx: &mut usize, callee: &mut CalleeData) { - // FIXME: consider having some optimization to avoid tupling/untupling - // (and storing/loading in the case of immediates) - - // avoid trans_operand for pointless copying - let lv = match *operand { - mir::Operand::Consume(ref lvalue) => self.trans_lvalue(bcx, lvalue), - mir::Operand::Constant(ref constant) => { - // FIXME: consider being less pessimized - if constant.ty.is_nil() { - return; - } - - let ty = bcx.monomorphize(&constant.ty); - let lv = LvalueRef::alloca(bcx, ty, "__untuple_alloca"); - let constant = self.trans_constant(bcx, constant); - self.store_operand(bcx, lv.llval, constant); - lv - } - }; + let tuple = self.trans_operand(bcx, operand); - let lv_ty = lv.ty.to_ty(bcx.tcx()); - let result_types = match lv_ty.sty { + let arg_types = match tuple.ty.sty { ty::TyTuple(ref tys) => tys, - _ => span_bug!( - self.mir.span, - "bad final argument to \"rust-call\" fn {:?}", lv_ty) + _ => span_bug!(self.mir.span, + "bad final argument to \"rust-call\" fn {:?}", tuple.ty) }; - let base_repr = adt::represent_type(bcx.ccx(), lv_ty); - let base = adt::MaybeSizedValue::sized(lv.llval); - for (n, &ty) in result_types.iter().enumerate() { - let ptr = adt::trans_field_ptr_builder(bcx, &base_repr, base, Disr(0), n); - let val = if common::type_is_fat_ptr(bcx.tcx(), ty) { - let (lldata, llextra) = load_fat_ptr(bcx, ptr); - FatPtr(lldata, llextra) - } else { - // Don't bother loading the value, trans_argument will. - Ref(ptr) - }; - self.trans_argument(bcx, val, llargs, fn_ty, next_idx, callee); + // Handle both by-ref and immediate tuples. + match tuple.val { + Ref(llval) => { + let base_repr = adt::represent_type(bcx.ccx(), tuple.ty); + let base = adt::MaybeSizedValue::sized(llval); + for (n, &ty) in arg_types.iter().enumerate() { + let ptr = adt::trans_field_ptr_builder(bcx, &base_repr, base, Disr(0), n); + let val = if common::type_is_fat_ptr(bcx.tcx(), ty) { + let (lldata, llextra) = load_fat_ptr(bcx, ptr); + FatPtr(lldata, llextra) + } else { + // trans_argument will load this if it needs to + Ref(ptr) + }; + self.trans_argument(bcx, val, llargs, fn_ty, next_idx, callee); + } + + } + Immediate(llval) => { + for (n, &ty) in arg_types.iter().enumerate() { + let mut elem = bcx.extract_value(llval, n); + // Truncate bools to i1, if needed + if ty.is_bool() && common::val_ty(elem) != Type::i1(bcx.ccx()) { + elem = bcx.trunc(elem, Type::i1(bcx.ccx())); + } + // If the tuple is immediate, the elements are as well + let val = Immediate(elem); + self.trans_argument(bcx, val, llargs, fn_ty, next_idx, callee); + } + } + FatPtr(_, _) => bug!("tuple is a fat pointer?!") } + } fn get_personality_slot(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>) -> ValueRef { @@ -514,7 +566,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { let use_funclets = base::wants_msvc_seh(bcx.sess()) && data.is_cleanup; let cleanup_pad = if use_funclets { bcx.set_personality_fn(self.fcx.eh_personality()); - bcx.at_start(|bcx| Some(bcx.cleanup_pad(None, &[]))) + bcx.at_start(|bcx| { + DebugLoc::None.apply_to_bcx(bcx); + Some(bcx.cleanup_pad(None, &[])) + }) } else { None }; diff --git a/src/librustc_trans/mir/constant.rs b/src/librustc_trans/mir/constant.rs index cf85595c08..0403c7b1f7 100644 --- a/src/librustc_trans/mir/constant.rs +++ b/src/librustc_trans/mir/constant.rs @@ -8,62 +8,60 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use llvm::ValueRef; -use rustc::ty::{Ty, TypeFoldable}; +use llvm::{self, ValueRef}; use rustc::middle::const_val::ConstVal; +use rustc_const_eval::ErrKind; use rustc_const_math::ConstInt::*; -use rustc_const_eval::lookup_const_by_id; +use rustc::hir::def_id::DefId; +use rustc::infer::TransNormalize; use rustc::mir::repr as mir; -use abi; -use common::{self, BlockAndBuilder, C_bool, C_bytes, C_floating_f64, C_integral, - C_str_slice, C_undef}; -use consts; -use datum; -use expr; +use rustc::mir::tcx::LvalueTy; +use rustc::traits; +use rustc::ty::{self, Ty, TypeFoldable}; +use rustc::ty::cast::{CastTy, IntTy}; +use rustc::ty::subst::Substs; +use {abi, adt, base, Disr}; +use callee::Callee; +use common::{self, BlockAndBuilder, CrateContext, const_get_elt, val_ty}; +use common::{C_array, C_bool, C_bytes, C_floating_f64, C_integral}; +use common::{C_null, C_struct, C_str_slice, C_undef, C_uint}; +use consts::{self, ConstEvalFailure, TrueConst, to_const_int}; +use monomorphize::{self, Instance}; use type_of; use type_::Type; +use value::Value; -use super::operand::{OperandRef, OperandValue}; -use super::MirContext; +use syntax::codemap::{Span, DUMMY_SP}; +use std::ptr; -impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { - pub fn trans_constval(&mut self, - bcx: &BlockAndBuilder<'bcx, 'tcx>, - cv: &ConstVal, - ty: Ty<'tcx>) - -> OperandRef<'tcx> - { - let ccx = bcx.ccx(); - let val = self.trans_constval_inner(bcx, cv, ty); - let val = if common::type_is_immediate(ccx, ty) { - OperandValue::Immediate(val) - } else if common::type_is_fat_ptr(bcx.tcx(), ty) { - let data = common::const_get_elt(val, &[abi::FAT_PTR_ADDR as u32]); - let extra = common::const_get_elt(val, &[abi::FAT_PTR_EXTRA as u32]); - OperandValue::FatPtr(data, extra) - } else { - OperandValue::Ref(val) - }; +use super::operand::{OperandRef, OperandValue}; +use super::MirContext; - assert!(!ty.has_erasable_regions()); +/// A sized constant rvalue. +/// The LLVM type might not be the same for a single Rust type, +/// e.g. each enum variant would have its own LLVM struct type. +#[derive(Copy, Clone)] +pub struct Const<'tcx> { + pub llval: ValueRef, + pub ty: Ty<'tcx> +} - OperandRef { - ty: ty, - val: val +impl<'tcx> Const<'tcx> { + pub fn new(llval: ValueRef, ty: Ty<'tcx>) -> Const<'tcx> { + Const { + llval: llval, + ty: ty } } - /// Translate ConstVal into a bare LLVM ValueRef. - fn trans_constval_inner(&mut self, - bcx: &BlockAndBuilder<'bcx, 'tcx>, - cv: &ConstVal, - ty: Ty<'tcx>) - -> ValueRef - { - let ccx = bcx.ccx(); + /// Translate ConstVal into a LLVM constant value. + pub fn from_constval<'a>(ccx: &CrateContext<'a, 'tcx>, + cv: ConstVal, + ty: Ty<'tcx>) + -> Const<'tcx> { let llty = type_of::type_of(ccx, ty); - match *cv { + let val = match cv { ConstVal::Float(v) => C_floating_f64(v, llty), ConstVal::Bool(v) => C_bool(ccx, v), ConstVal::Integral(I8(v)) => C_integral(Type::i8(ccx), v as u64, true), @@ -93,51 +91,769 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } ConstVal::Char(c) => C_integral(Type::char(ccx), c as u64, false), ConstVal::Dummy => bug!(), + }; + + assert!(!ty.has_erasable_regions()); + + Const::new(val, ty) + } + + fn get_fat_ptr(&self) -> (ValueRef, ValueRef) { + (const_get_elt(self.llval, &[abi::FAT_PTR_ADDR as u32]), + const_get_elt(self.llval, &[abi::FAT_PTR_EXTRA as u32])) + } + + fn as_lvalue(&self) -> ConstLvalue<'tcx> { + ConstLvalue { + base: Base::Value(self.llval), + llextra: ptr::null_mut(), + ty: self.ty + } + } + + pub fn to_operand<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> OperandRef<'tcx> { + let llty = type_of::immediate_type_of(ccx, self.ty); + let llvalty = val_ty(self.llval); + + let val = if common::type_is_fat_ptr(ccx.tcx(), self.ty) { + let (data, extra) = self.get_fat_ptr(); + OperandValue::FatPtr(data, extra) + } else if common::type_is_immediate(ccx, self.ty) && llty == llvalty { + // If the types match, we can use the value directly. + OperandValue::Immediate(self.llval) + } else { + // Otherwise, or if the value is not immediate, we create + // a constant LLVM global and cast its address if necessary. + let align = type_of::align_of(ccx, self.ty); + let ptr = consts::addr_of(ccx, self.llval, align, "const"); + OperandValue::Ref(consts::ptrcast(ptr, llty.ptr_to())) + }; + + OperandRef { + val: val, + ty: self.ty + } + } +} + +#[derive(Copy, Clone)] +enum Base { + /// A constant value without an unique address. + Value(ValueRef), + + /// String literal base pointer (cast from array). + Str(ValueRef), + + /// The address of a static. + Static(ValueRef) +} + +/// An lvalue as seen from a constant. +#[derive(Copy, Clone)] +struct ConstLvalue<'tcx> { + base: Base, + llextra: ValueRef, + ty: Ty<'tcx> +} + +impl<'tcx> ConstLvalue<'tcx> { + fn to_const(&self, span: Span) -> Const<'tcx> { + match self.base { + Base::Value(val) => Const::new(val, self.ty), + Base::Str(ptr) => { + span_bug!(span, "loading from `str` ({:?}) in constant", + Value(ptr)) + } + Base::Static(val) => { + span_bug!(span, "loading from `static` ({:?}) in constant", + Value(val)) + } + } + } + + pub fn len<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef { + match self.ty.sty { + ty::TyArray(_, n) => C_uint(ccx, n), + ty::TySlice(_) | ty::TyStr => { + assert!(self.llextra != ptr::null_mut()); + self.llextra + } + _ => bug!("unexpected type `{}` in ConstLvalue::len", self.ty) + } + } +} + +/// Machinery for translating a constant's MIR to LLVM values. +/// FIXME(eddyb) use miri and lower its allocations to LLVM. +struct MirConstContext<'a, 'tcx: 'a> { + ccx: &'a CrateContext<'a, 'tcx>, + mir: &'a mir::Mir<'tcx>, + + /// Type parameters for const fn and associated constants. + substs: &'tcx Substs<'tcx>, + + /// Arguments passed to a const fn. + args: Vec>, + + /// Variable values - specifically, argument bindings of a const fn. + vars: Vec>>, + + /// Temp values. + temps: Vec>>, + + /// Value assigned to Return, which is the resulting constant. + return_value: Option> +} + + +impl<'a, 'tcx> MirConstContext<'a, 'tcx> { + fn new(ccx: &'a CrateContext<'a, 'tcx>, + mir: &'a mir::Mir<'tcx>, + substs: &'tcx Substs<'tcx>, + args: Vec>) + -> MirConstContext<'a, 'tcx> { + MirConstContext { + ccx: ccx, + mir: mir, + substs: substs, + args: args, + vars: vec![None; mir.var_decls.len()], + temps: vec![None; mir.temp_decls.len()], + return_value: None } } + fn trans_def(ccx: &'a CrateContext<'a, 'tcx>, + mut instance: Instance<'tcx>, + args: Vec>) + -> Result, ConstEvalFailure> { + // Try to resolve associated constants. + if instance.substs.self_ty().is_some() { + // Only trait items can have a Self parameter. + let trait_item = ccx.tcx().impl_or_trait_item(instance.def); + let trait_id = trait_item.container().id(); + let substs = instance.substs; + let trait_ref = ty::Binder(substs.to_trait_ref(ccx.tcx(), trait_id)); + let vtable = common::fulfill_obligation(ccx.shared(), DUMMY_SP, trait_ref); + if let traits::VtableImpl(vtable_impl) = vtable { + let name = ccx.tcx().item_name(instance.def); + for ac in ccx.tcx().associated_consts(vtable_impl.impl_def_id) { + if ac.name == name { + instance = Instance::new(ac.def_id, vtable_impl.substs); + break; + } + } + } + } + + let mir = ccx.get_mir(instance.def).unwrap_or_else(|| { + bug!("missing constant MIR for {}", instance) + }); + MirConstContext::new(ccx, &mir, instance.substs, args).trans() + } + + fn monomorphize(&self, value: &T) -> T + where T: TransNormalize<'tcx> + { + monomorphize::apply_param_substs(self.ccx.tcx(), + self.substs, + value) + } + + fn trans(&mut self) -> Result, ConstEvalFailure> { + let tcx = self.ccx.tcx(); + let mut bb = mir::START_BLOCK; + loop { + let data = self.mir.basic_block_data(bb); + for statement in &data.statements { + match statement.kind { + mir::StatementKind::Assign(ref dest, ref rvalue) => { + let ty = self.mir.lvalue_ty(tcx, dest); + let ty = self.monomorphize(&ty).to_ty(tcx); + let value = self.const_rvalue(rvalue, ty, statement.span)?; + self.store(dest, value, statement.span); + } + } + } + + let terminator = data.terminator(); + let span = terminator.span; + bb = match terminator.kind { + mir::TerminatorKind::Drop { target, .. } | // No dropping. + mir::TerminatorKind::Goto { target } => target, + mir::TerminatorKind::Return => { + return Ok(self.return_value.unwrap_or_else(|| { + span_bug!(span, "no returned value in constant"); + })) + } + + // This is only supported to make bounds checking work. + mir::TerminatorKind::If { ref cond, targets: (true_bb, false_bb) } => { + let cond = self.const_operand(cond, span)?; + if common::const_to_uint(cond.llval) != 0 { + true_bb + } else { + false_bb + } + } + + mir::TerminatorKind::Call { ref func, ref args, ref destination, .. } => { + let fn_ty = self.mir.operand_ty(tcx, func); + let fn_ty = self.monomorphize(&fn_ty); + let instance = match fn_ty.sty { + ty::TyFnDef(def_id, substs, _) => { + Instance::new(def_id, substs) + } + _ => span_bug!(span, "calling {:?} (of type {}) in constant", + func, fn_ty) + }; + + // Indexing OOB doesn't call a const fn, handle it. + if Some(instance.def) == tcx.lang_items.panic_bounds_check_fn() { + consts::const_err(self.ccx, span, + Err(ErrKind::IndexOutOfBounds), + TrueConst::Yes)?; + } + + let args = args.iter().map(|arg| { + self.const_operand(arg, span) + }).collect::, _>>()?; + let value = MirConstContext::trans_def(self.ccx, instance, args)?; + if let Some((ref dest, target)) = *destination { + self.store(dest, value, span); + target + } else { + span_bug!(span, "diverging {:?} in constant", terminator.kind) + } + } + _ => span_bug!(span, "{:?} in constant", terminator.kind) + }; + } + } + + fn store(&mut self, dest: &mir::Lvalue<'tcx>, value: Const<'tcx>, span: Span) { + let dest = match *dest { + mir::Lvalue::Var(index) => &mut self.vars[index as usize], + mir::Lvalue::Temp(index) => &mut self.temps[index as usize], + mir::Lvalue::ReturnPointer => &mut self.return_value, + _ => span_bug!(span, "assignment to {:?} in constant", dest) + }; + *dest = Some(value); + } + + fn const_lvalue(&self, lvalue: &mir::Lvalue<'tcx>, span: Span) + -> Result, ConstEvalFailure> { + let tcx = self.ccx.tcx(); + let lvalue = match *lvalue { + mir::Lvalue::Var(index) => { + self.vars[index as usize].unwrap_or_else(|| { + span_bug!(span, "var{} not initialized", index) + }).as_lvalue() + } + mir::Lvalue::Temp(index) => { + self.temps[index as usize].unwrap_or_else(|| { + span_bug!(span, "tmp{} not initialized", index) + }).as_lvalue() + } + mir::Lvalue::Arg(index) => self.args[index as usize].as_lvalue(), + mir::Lvalue::Static(def_id) => { + ConstLvalue { + base: Base::Static(consts::get_static(self.ccx, def_id).val), + llextra: ptr::null_mut(), + ty: self.mir.lvalue_ty(tcx, lvalue).to_ty(tcx) + } + } + mir::Lvalue::ReturnPointer => { + span_bug!(span, "accessing Lvalue::ReturnPointer in constant") + } + mir::Lvalue::Projection(ref projection) => { + let tr_base = self.const_lvalue(&projection.base, span)?; + let projected_ty = LvalueTy::Ty { ty: tr_base.ty } + .projection_ty(tcx, &projection.elem); + let base = tr_base.to_const(span); + let projected_ty = self.monomorphize(&projected_ty).to_ty(tcx); + let is_sized = common::type_is_sized(tcx, projected_ty); + + let (projected, llextra) = match projection.elem { + mir::ProjectionElem::Deref => { + let (base, extra) = if is_sized { + (base.llval, ptr::null_mut()) + } else { + base.get_fat_ptr() + }; + if self.ccx.statics().borrow().contains_key(&base) { + (Base::Static(base), extra) + } else if let ty::TyStr = projected_ty.sty { + (Base::Str(base), extra) + } else { + let val = consts::load_const(self.ccx, base, projected_ty); + if val.is_null() { + span_bug!(span, "dereference of non-constant pointer `{:?}`", + Value(base)); + } + (Base::Value(val), extra) + } + } + mir::ProjectionElem::Field(ref field, _) => { + let base_repr = adt::represent_type(self.ccx, tr_base.ty); + let llprojected = adt::const_get_field(&base_repr, base.llval, + Disr(0), field.index()); + let llextra = if is_sized { + ptr::null_mut() + } else { + tr_base.llextra + }; + (Base::Value(llprojected), llextra) + } + mir::ProjectionElem::Index(ref index) => { + let llindex = self.const_operand(index, span)?.llval; + + let iv = if let Some(iv) = common::const_to_opt_uint(llindex) { + iv + } else { + span_bug!(span, "index is not an integer-constant expression") + }; + (Base::Value(const_get_elt(base.llval, &[iv as u32])), + ptr::null_mut()) + } + _ => span_bug!(span, "{:?} in constant", projection.elem) + }; + ConstLvalue { + base: projected, + llextra: llextra, + ty: projected_ty + } + } + }; + Ok(lvalue) + } + + fn const_operand(&self, operand: &mir::Operand<'tcx>, span: Span) + -> Result, ConstEvalFailure> { + match *operand { + mir::Operand::Consume(ref lvalue) => { + Ok(self.const_lvalue(lvalue, span)?.to_const(span)) + } + + mir::Operand::Constant(ref constant) => { + let ty = self.monomorphize(&constant.ty); + match constant.literal.clone() { + mir::Literal::Item { def_id, substs } => { + // Shortcut for zero-sized types, including function item + // types, which would not work with MirConstContext. + if common::type_is_zero_size(self.ccx, ty) { + let llty = type_of::type_of(self.ccx, ty); + return Ok(Const::new(C_null(llty), ty)); + } + + let substs = self.monomorphize(&substs); + let instance = Instance::new(def_id, substs); + MirConstContext::trans_def(self.ccx, instance, vec![]) + } + mir::Literal::Promoted { index } => { + let mir = &self.mir.promoted[index]; + MirConstContext::new(self.ccx, mir, self.substs, vec![]).trans() + } + mir::Literal::Value { value } => { + Ok(Const::from_constval(self.ccx, value, ty)) + } + } + } + } + } + + fn const_rvalue(&self, rvalue: &mir::Rvalue<'tcx>, + dest_ty: Ty<'tcx>, span: Span) + -> Result, ConstEvalFailure> { + let tcx = self.ccx.tcx(); + let val = match *rvalue { + mir::Rvalue::Use(ref operand) => self.const_operand(operand, span)?, + + mir::Rvalue::Repeat(ref elem, ref count) => { + let elem = self.const_operand(elem, span)?; + let size = count.value.as_u64(tcx.sess.target.uint_type); + let fields = vec![elem.llval; size as usize]; + + let llunitty = type_of::type_of(self.ccx, elem.ty); + // If the array contains enums, an LLVM array won't work. + let val = if val_ty(elem.llval) == llunitty { + C_array(llunitty, &fields) + } else { + C_struct(self.ccx, &fields, false) + }; + Const::new(val, dest_ty) + } + + mir::Rvalue::Aggregate(ref kind, ref operands) => { + let fields = operands.iter().map(|operand| { + Ok(self.const_operand(operand, span)?.llval) + }).collect::, _>>()?; + + // FIXME Shouldn't need to manually trigger closure instantiations. + if let mir::AggregateKind::Closure(def_id, substs) = *kind { + use rustc::hir; + use syntax::ast::DUMMY_NODE_ID; + use syntax::ptr::P; + use closure; + + closure::trans_closure_expr(closure::Dest::Ignore(self.ccx), + &hir::FnDecl { + inputs: P::new(), + output: hir::NoReturn(DUMMY_SP), + variadic: false + }, + &hir::Block { + stmts: P::new(), + expr: None, + id: DUMMY_NODE_ID, + rules: hir::DefaultBlock, + span: DUMMY_SP + }, + DUMMY_NODE_ID, def_id, + self.monomorphize(&substs)); + } + + let val = if let mir::AggregateKind::Adt(adt_def, index, _) = *kind { + let repr = adt::represent_type(self.ccx, dest_ty); + let disr = Disr::from(adt_def.variants[index].disr_val); + adt::trans_const(self.ccx, &repr, disr, &fields) + } else if let ty::TyArray(elem_ty, _) = dest_ty.sty { + let llunitty = type_of::type_of(self.ccx, elem_ty); + // If the array contains enums, an LLVM array won't work. + if fields.iter().all(|&f| val_ty(f) == llunitty) { + C_array(llunitty, &fields) + } else { + C_struct(self.ccx, &fields, false) + } + } else { + C_struct(self.ccx, &fields, false) + }; + Const::new(val, dest_ty) + } + + mir::Rvalue::Cast(ref kind, ref source, cast_ty) => { + let operand = self.const_operand(source, span)?; + let cast_ty = self.monomorphize(&cast_ty); + + let val = match *kind { + mir::CastKind::ReifyFnPointer => { + match operand.ty.sty { + ty::TyFnDef(def_id, substs, _) => { + Callee::def(self.ccx, def_id, substs) + .reify(self.ccx).val + } + _ => { + span_bug!(span, "{} cannot be reified to a fn ptr", + operand.ty) + } + } + } + mir::CastKind::UnsafeFnPointer => { + // this is a no-op at the LLVM level + operand.llval + } + mir::CastKind::Unsize => { + // unsize targets other than to a fat pointer currently + // can't be in constants. + assert!(common::type_is_fat_ptr(tcx, cast_ty)); + + let pointee_ty = operand.ty.builtin_deref(true, ty::NoPreference) + .expect("consts: unsizing got non-pointer type").ty; + let (base, old_info) = if !common::type_is_sized(tcx, pointee_ty) { + // Normally, the source is a thin pointer and we are + // adding extra info to make a fat pointer. The exception + // is when we are upcasting an existing object fat pointer + // to use a different vtable. In that case, we want to + // load out the original data pointer so we can repackage + // it. + let (base, extra) = operand.get_fat_ptr(); + (base, Some(extra)) + } else { + (operand.llval, None) + }; + + let unsized_ty = cast_ty.builtin_deref(true, ty::NoPreference) + .expect("consts: unsizing got non-pointer target type").ty; + let ptr_ty = type_of::in_memory_type_of(self.ccx, unsized_ty).ptr_to(); + let base = consts::ptrcast(base, ptr_ty); + let info = base::unsized_info(self.ccx, pointee_ty, + unsized_ty, old_info); + + if old_info.is_none() { + let prev_const = self.ccx.const_unsized().borrow_mut() + .insert(base, operand.llval); + assert!(prev_const.is_none() || prev_const == Some(operand.llval)); + } + assert_eq!(abi::FAT_PTR_ADDR, 0); + assert_eq!(abi::FAT_PTR_EXTRA, 1); + C_struct(self.ccx, &[base, info], false) + } + mir::CastKind::Misc if common::type_is_immediate(self.ccx, operand.ty) => { + debug_assert!(common::type_is_immediate(self.ccx, cast_ty)); + let r_t_in = CastTy::from_ty(operand.ty).expect("bad input type for cast"); + let r_t_out = CastTy::from_ty(cast_ty).expect("bad output type for cast"); + let ll_t_out = type_of::immediate_type_of(self.ccx, cast_ty); + let llval = operand.llval; + let signed = if let CastTy::Int(IntTy::CEnum) = r_t_in { + let repr = adt::represent_type(self.ccx, operand.ty); + adt::is_discr_signed(&repr) + } else { + operand.ty.is_signed() + }; + + unsafe { + match (r_t_in, r_t_out) { + (CastTy::Int(_), CastTy::Int(_)) => { + let s = signed as llvm::Bool; + llvm::LLVMConstIntCast(llval, ll_t_out.to_ref(), s) + } + (CastTy::Int(_), CastTy::Float) => { + if signed { + llvm::LLVMConstSIToFP(llval, ll_t_out.to_ref()) + } else { + llvm::LLVMConstUIToFP(llval, ll_t_out.to_ref()) + } + } + (CastTy::Float, CastTy::Float) => { + llvm::LLVMConstFPCast(llval, ll_t_out.to_ref()) + } + (CastTy::Float, CastTy::Int(IntTy::I)) => { + llvm::LLVMConstFPToSI(llval, ll_t_out.to_ref()) + } + (CastTy::Float, CastTy::Int(_)) => { + llvm::LLVMConstFPToUI(llval, ll_t_out.to_ref()) + } + (CastTy::Ptr(_), CastTy::Ptr(_)) | + (CastTy::FnPtr, CastTy::Ptr(_)) | + (CastTy::RPtr(_), CastTy::Ptr(_)) => { + consts::ptrcast(llval, ll_t_out) + } + (CastTy::Int(_), CastTy::Ptr(_)) => { + llvm::LLVMConstIntToPtr(llval, ll_t_out.to_ref()) + } + (CastTy::Ptr(_), CastTy::Int(_)) | + (CastTy::FnPtr, CastTy::Int(_)) => { + llvm::LLVMConstPtrToInt(llval, ll_t_out.to_ref()) + } + _ => bug!("unsupported cast: {:?} to {:?}", operand.ty, cast_ty) + } + } + } + mir::CastKind::Misc => { // Casts from a fat-ptr. + let ll_cast_ty = type_of::immediate_type_of(self.ccx, cast_ty); + let ll_from_ty = type_of::immediate_type_of(self.ccx, operand.ty); + if common::type_is_fat_ptr(tcx, operand.ty) { + let (data_ptr, meta_ptr) = operand.get_fat_ptr(); + if common::type_is_fat_ptr(tcx, cast_ty) { + let ll_cft = ll_cast_ty.field_types(); + let ll_fft = ll_from_ty.field_types(); + let data_cast = consts::ptrcast(data_ptr, ll_cft[0]); + assert_eq!(ll_cft[1].kind(), ll_fft[1].kind()); + C_struct(self.ccx, &[data_cast, meta_ptr], false) + } else { // cast to thin-ptr + // Cast of fat-ptr to thin-ptr is an extraction of data-ptr and + // pointer-cast of that pointer to desired pointer type. + consts::ptrcast(data_ptr, ll_cast_ty) + } + } else { + bug!("Unexpected non-FatPtr operand") + } + } + }; + Const::new(val, cast_ty) + } + + mir::Rvalue::Ref(_, bk, ref lvalue) => { + let tr_lvalue = self.const_lvalue(lvalue, span)?; + + let ty = tr_lvalue.ty; + let ref_ty = tcx.mk_ref(tcx.mk_region(ty::ReStatic), + ty::TypeAndMut { ty: ty, mutbl: bk.to_mutbl_lossy() }); + + let base = match tr_lvalue.base { + Base::Value(llval) => { + let align = type_of::align_of(self.ccx, ty); + if bk == mir::BorrowKind::Mut { + consts::addr_of_mut(self.ccx, llval, align, "ref_mut") + } else { + consts::addr_of(self.ccx, llval, align, "ref") + } + } + Base::Str(llval) | + Base::Static(llval) => llval + }; + + let ptr = if common::type_is_sized(tcx, ty) { + base + } else { + C_struct(self.ccx, &[base, tr_lvalue.llextra], false) + }; + Const::new(ptr, ref_ty) + } + + mir::Rvalue::Len(ref lvalue) => { + let tr_lvalue = self.const_lvalue(lvalue, span)?; + Const::new(tr_lvalue.len(self.ccx), tcx.types.usize) + } + + mir::Rvalue::BinaryOp(op, ref lhs, ref rhs) => { + let lhs = self.const_operand(lhs, span)?; + let rhs = self.const_operand(rhs, span)?; + let ty = lhs.ty; + let binop_ty = self.mir.binop_ty(tcx, op, lhs.ty, rhs.ty); + let (lhs, rhs) = (lhs.llval, rhs.llval); + assert!(!ty.is_simd()); + let is_float = ty.is_fp(); + let signed = ty.is_signed(); + + if let (Some(lhs), Some(rhs)) = (to_const_int(lhs, ty, tcx), + to_const_int(rhs, ty, tcx)) { + let result = match op { + mir::BinOp::Add => lhs + rhs, + mir::BinOp::Sub => lhs - rhs, + mir::BinOp::Mul => lhs * rhs, + mir::BinOp::Div => lhs / rhs, + mir::BinOp::Rem => lhs % rhs, + mir::BinOp::Shl => lhs << rhs, + mir::BinOp::Shr => lhs >> rhs, + _ => Ok(lhs) + }; + consts::const_err(self.ccx, span, + result.map_err(ErrKind::Math), + TrueConst::Yes)?; + } + + let llval = unsafe { + match op { + mir::BinOp::Add if is_float => llvm::LLVMConstFAdd(lhs, rhs), + mir::BinOp::Add => llvm::LLVMConstAdd(lhs, rhs), + + mir::BinOp::Sub if is_float => llvm::LLVMConstFSub(lhs, rhs), + mir::BinOp::Sub => llvm::LLVMConstSub(lhs, rhs), + + mir::BinOp::Mul if is_float => llvm::LLVMConstFMul(lhs, rhs), + mir::BinOp::Mul => llvm::LLVMConstMul(lhs, rhs), + + mir::BinOp::Div if is_float => llvm::LLVMConstFDiv(lhs, rhs), + mir::BinOp::Div if signed => llvm::LLVMConstSDiv(lhs, rhs), + mir::BinOp::Div => llvm::LLVMConstUDiv(lhs, rhs), + + mir::BinOp::Rem if is_float => llvm::LLVMConstFRem(lhs, rhs), + mir::BinOp::Rem if signed => llvm::LLVMConstSRem(lhs, rhs), + mir::BinOp::Rem => llvm::LLVMConstURem(lhs, rhs), + + mir::BinOp::BitXor => llvm::LLVMConstXor(lhs, rhs), + mir::BinOp::BitAnd => llvm::LLVMConstAnd(lhs, rhs), + mir::BinOp::BitOr => llvm::LLVMConstOr(lhs, rhs), + mir::BinOp::Shl => { + let rhs = base::cast_shift_const_rhs(op.to_hir_binop(), lhs, rhs); + llvm::LLVMConstShl(lhs, rhs) + } + mir::BinOp::Shr => { + let rhs = base::cast_shift_const_rhs(op.to_hir_binop(), lhs, rhs); + if signed { llvm::LLVMConstAShr(lhs, rhs) } + else { llvm::LLVMConstLShr(lhs, rhs) } + } + mir::BinOp::Eq | mir::BinOp::Ne | + mir::BinOp::Lt | mir::BinOp::Le | + mir::BinOp::Gt | mir::BinOp::Ge => { + if is_float { + let cmp = base::bin_op_to_fcmp_predicate(op.to_hir_binop()); + llvm::ConstFCmp(cmp, lhs, rhs) + } else { + let cmp = base::bin_op_to_icmp_predicate(op.to_hir_binop(), + signed); + llvm::ConstICmp(cmp, lhs, rhs) + } + } + } + }; + Const::new(llval, binop_ty) + } + + mir::Rvalue::UnaryOp(op, ref operand) => { + let operand = self.const_operand(operand, span)?; + let lloperand = operand.llval; + let llval = match op { + mir::UnOp::Not => { + unsafe { + llvm::LLVMConstNot(lloperand) + } + } + mir::UnOp::Neg => { + if let Some(cval) = to_const_int(lloperand, operand.ty, tcx) { + consts::const_err(self.ccx, span, + (-cval).map_err(ErrKind::Math), + TrueConst::Yes)?; + } + let is_float = operand.ty.is_fp(); + unsafe { + if is_float { + llvm::LLVMConstFNeg(lloperand) + } else { + llvm::LLVMConstNeg(lloperand) + } + } + } + }; + Const::new(llval, operand.ty) + } + + _ => span_bug!(span, "{:?} in constant", rvalue) + }; + + Ok(val) + } +} + +impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { pub fn trans_constant(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>, constant: &mir::Constant<'tcx>) - -> OperandRef<'tcx> + -> Const<'tcx> { let ty = bcx.monomorphize(&constant.ty); - match constant.literal { + let result = match constant.literal.clone() { mir::Literal::Item { def_id, substs } => { // Shortcut for zero-sized types, including function item - // types, which would not work with lookup_const_by_id. + // types, which would not work with MirConstContext. if common::type_is_zero_size(bcx.ccx(), ty) { let llty = type_of::type_of(bcx.ccx(), ty); - return OperandRef { - val: OperandValue::Immediate(C_undef(llty)), - ty: ty - }; + return Const::new(C_null(llty), ty); } - let substs = Some(bcx.monomorphize(substs)); - let expr = lookup_const_by_id(bcx.tcx(), def_id, substs) - .expect("def was const, but lookup_const_by_id failed").0; - // FIXME: this is falling back to translating from HIR. This is not easy to fix, - // because we would have somehow adapt const_eval to work on MIR rather than HIR. - let d = bcx.with_block(|bcx| { - expr::trans(bcx, expr) - }); - - let datum = d.datum.to_rvalue_datum(d.bcx, "").datum; - - match datum.kind.mode { - datum::RvalueMode::ByValue => { - OperandRef { - ty: datum.ty, - val: OperandValue::Immediate(datum.val) - } - } - datum::RvalueMode::ByRef => self.trans_load(bcx, datum.val, datum.ty) - } + let substs = bcx.monomorphize(&substs); + let instance = Instance::new(def_id, substs); + MirConstContext::trans_def(bcx.ccx(), instance, vec![]) + } + mir::Literal::Promoted { index } => { + let mir = &self.mir.promoted[index]; + MirConstContext::new(bcx.ccx(), mir, bcx.fcx().param_substs, vec![]).trans() + } + mir::Literal::Value { value } => { + Ok(Const::from_constval(bcx.ccx(), value, ty)) } - mir::Literal::Value { ref value } => { - self.trans_constval(bcx, value, ty) + }; + + match result { + Ok(v) => v, + Err(ConstEvalFailure::Compiletime(_)) => { + // We've errored, so we don't have to produce working code. + let llty = type_of::type_of(bcx.ccx(), ty); + Const::new(C_undef(llty), ty) + } + Err(ConstEvalFailure::Runtime(err)) => { + span_bug!(constant.span, + "MIR constant {:?} results in runtime panic: {}", + constant, err.description()) } } } } + + +pub fn trans_static_initializer(ccx: &CrateContext, def_id: DefId) + -> Result { + let instance = Instance::mono(ccx.shared(), def_id); + MirConstContext::trans_def(ccx, instance, vec![]).map(|c| c.llval) +} diff --git a/src/librustc_trans/mir/lvalue.rs b/src/librustc_trans/mir/lvalue.rs index 695806aa82..b39a6ac1ce 100644 --- a/src/librustc_trans/mir/lvalue.rs +++ b/src/librustc_trans/mir/lvalue.rs @@ -16,11 +16,11 @@ use abi; use adt; use base; use builder::Builder; -use common::{self, BlockAndBuilder, C_uint}; +use common::{self, BlockAndBuilder, CrateContext, C_uint, C_undef}; use consts; use machine; +use type_of::type_of; use mir::drop; -use llvm; use Disr; use std::ptr; @@ -56,6 +56,18 @@ impl<'tcx> LvalueRef<'tcx> { } LvalueRef::new_sized(lltemp, LvalueTy::from_ty(ty)) } + + pub fn len<'a>(&self, ccx: &CrateContext<'a, 'tcx>) -> ValueRef { + let ty = self.ty.to_ty(ccx.tcx()); + match ty.sty { + ty::TyArray(_, n) => common::C_uint(ccx, n), + ty::TySlice(_) | ty::TyStr => { + assert!(self.llextra != ptr::null_mut()); + self.llextra + } + _ => bug!("unexpected type `{}` in LvalueRef::len", ty) + } + } } pub fn get_meta(b: &Builder, fat_ptr: ValueRef) -> ValueRef { @@ -71,20 +83,6 @@ pub fn load_fat_ptr(b: &Builder, fat_ptr: ValueRef) -> (ValueRef, ValueRef) { } impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { - pub fn lvalue_len(&mut self, - bcx: &BlockAndBuilder<'bcx, 'tcx>, - lvalue: LvalueRef<'tcx>) - -> ValueRef { - match lvalue.ty.to_ty(bcx.tcx()).sty { - ty::TyArray(_, n) => common::C_uint(bcx.ccx(), n), - ty::TySlice(_) | ty::TyStr => { - assert!(lvalue.llextra != ptr::null_mut()); - lvalue.llextra - } - _ => bug!("unexpected type in lvalue_len"), - } - } - pub fn trans_lvalue(&mut self, bcx: &BlockAndBuilder<'bcx, 'tcx>, lvalue: &mir::Lvalue<'tcx>) @@ -118,10 +116,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { // Ergo, we return an undef ValueRef, so we do not have to special-case every // place using lvalues, and could use it the same way you use a regular // ReturnPointer LValue (i.e. store into it, load from it etc). - let llty = fcx.fn_ty.ret.original_ty.ptr_to(); - unsafe { - llvm::LLVMGetUndef(llty.to_ref()) - } + C_undef(fcx.fn_ty.ret.original_ty.ptr_to()) }; let fn_return_ty = bcx.monomorphize(&self.mir.return_ty); let return_ty = fn_return_ty.unwrap(); @@ -190,7 +185,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { from_end: true, min_length: _ } => { let lloffset = C_uint(bcx.ccx(), offset); - let lllen = self.lvalue_len(bcx, tr_base); + let lllen = tr_base.len(bcx.ccx()); let llindex = bcx.sub(lllen, lloffset); project_index(self.prepare_index(bcx, llindex)) } @@ -230,7 +225,19 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { ret } TempRef::Operand(Some(_)) => { - bug!("Lvalue temp already set"); + let lvalue_ty = self.mir.lvalue_ty(bcx.tcx(), lvalue); + let lvalue_ty = bcx.monomorphize(&lvalue_ty); + + // See comments in TempRef::new_operand as to why + // we always have Some in a ZST TempRef::Operand. + let ty = lvalue_ty.to_ty(bcx.tcx()); + if common::type_is_zero_size(bcx.ccx(), ty) { + // Pass an undef pointer as no stores can actually occur. + let llptr = C_undef(type_of(bcx.ccx(), ty).ptr_to()); + f(self, LvalueRef::new_sized(llptr, lvalue_ty)) + } else { + bug!("Lvalue temp already set"); + } } } } diff --git a/src/librustc_trans/mir/mod.rs b/src/librustc_trans/mir/mod.rs index 7e44b72db7..b98e04e51c 100644 --- a/src/librustc_trans/mir/mod.rs +++ b/src/librustc_trans/mir/mod.rs @@ -10,11 +10,19 @@ use libc::c_uint; use llvm::{self, ValueRef}; +use llvm::debuginfo::DIScope; use rustc::ty; use rustc::mir::repr as mir; use rustc::mir::tcx::LvalueTy; +use session::config::FullDebugInfo; use base; -use common::{self, Block, BlockAndBuilder, FunctionContext}; +use common::{self, Block, BlockAndBuilder, CrateContext, FunctionContext}; +use debuginfo::{self, declare_local, DebugLoc, VariableAccess, VariableKind}; +use machine; +use type_of; + +use syntax::codemap::DUMMY_SP; +use syntax::parse::token::keywords; use std::ops::Deref; use std::rc::Rc; @@ -23,10 +31,12 @@ use basic_block::BasicBlock; use rustc_data_structures::bitvec::BitVector; +pub use self::constant::trans_static_initializer; + use self::lvalue::{LvalueRef, get_dataptr, get_meta}; use rustc_mir::traversal; -use self::operand::OperandRef; +use self::operand::{OperandRef, OperandValue}; #[derive(Clone)] pub enum CachedMir<'mir, 'tcx: 'mir> { @@ -44,8 +54,6 @@ impl<'mir, 'tcx: 'mir> Deref for CachedMir<'mir, 'tcx> { } } -// FIXME DebugLoc is always None right now - /// Master context for translating MIR. pub struct MirContext<'bcx, 'tcx:'bcx> { mir: CachedMir<'bcx, 'tcx>, @@ -92,6 +100,9 @@ pub struct MirContext<'bcx, 'tcx:'bcx> { /// always indirect, though we try to avoid creating an alloca /// when we can (and just reuse the pointer the caller provided). args: Vec>, + + /// Debug information for MIR scopes. + scopes: Vec } enum TempRef<'tcx> { @@ -99,6 +110,25 @@ enum TempRef<'tcx> { Operand(Option>), } +impl<'tcx> TempRef<'tcx> { + fn new_operand<'bcx>(ccx: &CrateContext<'bcx, 'tcx>, + ty: ty::Ty<'tcx>) -> TempRef<'tcx> { + if common::type_is_zero_size(ccx, ty) { + // Zero-size temporaries aren't always initialized, which + // doesn't matter because they don't contain data, but + // we need something in the operand. + let val = OperandValue::Immediate(common::C_nil(ccx)); + let op = OperandRef { + val: val, + ty: ty + }; + TempRef::Operand(Some(op)) + } else { + TempRef::Operand(None) + } + } +} + /////////////////////////////////////////////////////////////////////////// pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { @@ -113,11 +143,27 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { analyze::lvalue_temps(bcx, &mir) }); + // Compute debuginfo scopes from MIR scopes. + let scopes = debuginfo::create_mir_scopes(fcx); + // Allocate variable and temp allocas + let args = arg_value_refs(&bcx, &mir, &scopes); let vars = mir.var_decls.iter() - .map(|decl| (bcx.monomorphize(&decl.ty), decl.name)) - .map(|(mty, name)| LvalueRef::alloca(&bcx, mty, &name.as_str())) - .collect(); + .map(|decl| (bcx.monomorphize(&decl.ty), decl)) + .map(|(mty, decl)| { + let lvalue = LvalueRef::alloca(&bcx, mty, &decl.name.as_str()); + + let scope = scopes[decl.scope.index()]; + if !scope.is_null() && bcx.sess().opts.debuginfo == FullDebugInfo { + bcx.with_block(|bcx| { + declare_local(bcx, decl.name, mty, scope, + VariableAccess::DirectVariable { alloca: lvalue.llval }, + VariableKind::LocalVariable, decl.span); + }); + } + + lvalue + }).collect(); let temps = mir.temp_decls.iter() .map(|decl| bcx.monomorphize(&decl.ty)) .enumerate() @@ -129,10 +175,9 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { // If this is an immediate temp, we do not create an // alloca in advance. Instead we wait until we see the // definition and update the operand there. - TempRef::Operand(None) + TempRef::new_operand(bcx.ccx(), mty) }) .collect(); - let args = arg_value_refs(&bcx, &mir); // Allocate a `Block` for every basic block let block_bcxs: Vec> = @@ -140,8 +185,6 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { .map(|&bb|{ if bb == mir::START_BLOCK { fcx.new_block("start", None) - } else if bb == mir::END_BLOCK { - fcx.new_block("end", None) } else { fcx.new_block(&format!("{:?}", bb), None) } @@ -152,6 +195,11 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { let start_bcx = block_bcxs[mir::START_BLOCK.index()]; bcx.br(start_bcx.llbb); + // Up until here, IR instructions for this function have explicitly not been annotated with + // source code location, so we don't step into call setup code. From here on, source location + // emitting should be enabled. + debuginfo::start_emitting_source_locations(fcx); + let mut mircx = MirContext { mir: mir.clone(), fcx: fcx, @@ -161,6 +209,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { vars: vars, temps: temps, args: args, + scopes: scopes }; let mut visited = BitVector::new(mir_blocks.len()); @@ -185,6 +234,7 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { } } + DebugLoc::None.apply(fcx); fcx.cleanup(); } @@ -192,12 +242,25 @@ pub fn trans_mir<'blk, 'tcx: 'blk>(fcx: &'blk FunctionContext<'blk, 'tcx>) { /// argument's value. As arguments are lvalues, these are always /// indirect. fn arg_value_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, - mir: &mir::Mir<'tcx>) + mir: &mir::Mir<'tcx>, + scopes: &[DIScope]) -> Vec> { let fcx = bcx.fcx(); let tcx = bcx.tcx(); let mut idx = 0; let mut llarg_idx = fcx.fn_ty.ret.is_indirect() as usize; + + // Get the argument scope assuming ScopeId(0) has no parent. + let arg_scope = mir.scopes.get(0).and_then(|data| { + let scope = scopes[0]; + if data.parent_scope.is_none() && !scope.is_null() && + bcx.sess().opts.debuginfo == FullDebugInfo { + Some(scope) + } else { + None + } + }); + mir.arg_decls.iter().enumerate().map(|(arg_index, arg_decl)| { let arg_ty = bcx.monomorphize(&arg_decl.ty); if arg_decl.spread { @@ -211,13 +274,14 @@ fn arg_value_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, _ => bug!("spread argument isn't a tuple?!") }; + let lltuplety = type_of::type_of(bcx.ccx(), arg_ty); let lltemp = bcx.with_block(|bcx| { base::alloc_ty(bcx, arg_ty, &format!("arg{}", arg_index)) }); for (i, &tupled_arg_ty) in tupled_arg_tys.iter().enumerate() { let dst = bcx.struct_gep(lltemp, i); let arg = &fcx.fn_ty.args[idx]; - idx += 1; + idx += 1; if common::type_is_fat_ptr(tcx, tupled_arg_ty) { // We pass fat pointers as two words, but inside the tuple // they are the two sub-fields of a single aggregate field. @@ -228,17 +292,37 @@ fn arg_value_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, } else { arg.store_fn_arg(bcx, &mut llarg_idx, dst); } + + bcx.with_block(|bcx| arg_scope.map(|scope| { + let byte_offset_of_var_in_tuple = + machine::llelement_offset(bcx.ccx(), lltuplety, i); + + let ops = unsafe { + [llvm::LLVMDIBuilderCreateOpDeref(), + llvm::LLVMDIBuilderCreateOpPlus(), + byte_offset_of_var_in_tuple as i64] + }; + + let variable_access = VariableAccess::IndirectVariable { + alloca: lltemp, + address_operations: &ops + }; + declare_local(bcx, keywords::Invalid.name(), + tupled_arg_ty, scope, variable_access, + VariableKind::ArgumentVariable(arg_index + i + 1), + bcx.fcx().span.unwrap_or(DUMMY_SP)); + })); } return LvalueRef::new_sized(lltemp, LvalueTy::from_ty(arg_ty)); } let arg = &fcx.fn_ty.args[idx]; idx += 1; - let llval = if arg.is_indirect() { + let llval = if arg.is_indirect() && bcx.sess().opts.debuginfo != FullDebugInfo { // Don't copy an indirect argument to an alloca, the caller // already put it in a temporary alloca and gave it up, unless // we emit extra-debug-info, which requires local allocas :(. - // FIXME: lifetimes, debug info + // FIXME: lifetimes let llarg = llvm::get_param(fcx.llfn, llarg_idx as c_uint); llarg_idx += 1; llarg @@ -261,6 +345,80 @@ fn arg_value_refs<'bcx, 'tcx>(bcx: &BlockAndBuilder<'bcx, 'tcx>, } lltemp }; + bcx.with_block(|bcx| arg_scope.map(|scope| { + // Is this a regular argument? + if arg_index > 0 || mir.upvar_decls.is_empty() { + declare_local(bcx, arg_decl.debug_name, arg_ty, scope, + VariableAccess::DirectVariable { alloca: llval }, + VariableKind::ArgumentVariable(arg_index + 1), + bcx.fcx().span.unwrap_or(DUMMY_SP)); + return; + } + + // Or is it the closure environment? + let (closure_ty, env_ref) = if let ty::TyRef(_, mt) = arg_ty.sty { + (mt.ty, true) + } else { + (arg_ty, false) + }; + let upvar_tys = if let ty::TyClosure(_, ref substs) = closure_ty.sty { + &substs.upvar_tys[..] + } else { + bug!("upvar_decls with non-closure arg0 type `{}`", closure_ty); + }; + + // Store the pointer to closure data in an alloca for debuginfo + // because that's what the llvm.dbg.declare intrinsic expects. + + // FIXME(eddyb) this shouldn't be necessary but SROA seems to + // mishandle DW_OP_plus not preceded by DW_OP_deref, i.e. it + // doesn't actually strip the offset when splitting the closure + // environment into its components so it ends up out of bounds. + let env_ptr = if !env_ref { + use base::*; + use build::*; + use common::*; + let alloc = alloca(bcx, val_ty(llval), "__debuginfo_env_ptr"); + Store(bcx, llval, alloc); + alloc + } else { + llval + }; + + let llclosurety = type_of::type_of(bcx.ccx(), closure_ty); + for (i, (decl, ty)) in mir.upvar_decls.iter().zip(upvar_tys).enumerate() { + let byte_offset_of_var_in_env = + machine::llelement_offset(bcx.ccx(), llclosurety, i); + + let ops = unsafe { + [llvm::LLVMDIBuilderCreateOpDeref(), + llvm::LLVMDIBuilderCreateOpPlus(), + byte_offset_of_var_in_env as i64, + llvm::LLVMDIBuilderCreateOpDeref()] + }; + + // The environment and the capture can each be indirect. + + // FIXME(eddyb) see above why we have to keep + // a pointer in an alloca for debuginfo atm. + let mut ops = if env_ref || true { &ops[..] } else { &ops[1..] }; + + let ty = if let (true, &ty::TyRef(_, mt)) = (decl.by_ref, &ty.sty) { + mt.ty + } else { + ops = &ops[..ops.len() - 1]; + ty + }; + + let variable_access = VariableAccess::IndirectVariable { + alloca: env_ptr, + address_operations: &ops + }; + declare_local(bcx, decl.debug_name, ty, scope, variable_access, + VariableKind::CapturedVariable, + bcx.fcx().span.unwrap_or(DUMMY_SP)); + } + })); LvalueRef::new_sized(llval, LvalueTy::from_ty(arg_ty)) }).collect() } diff --git a/src/librustc_trans/mir/operand.rs b/src/librustc_trans/mir/operand.rs index c15d6cd5b2..fc726a3474 100644 --- a/src/librustc_trans/mir/operand.rs +++ b/src/librustc_trans/mir/operand.rs @@ -140,7 +140,14 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } mir::Operand::Constant(ref constant) => { - self.trans_constant(bcx, constant) + let val = self.trans_constant(bcx, constant); + let operand = val.to_operand(bcx.ccx()); + if let OperandValue::Ref(ptr) = operand.val { + // If this is a OperandValue::Ref to an immediate constant, load it. + self.trans_load(bcx, ptr, operand.ty) + } else { + operand + } } } } diff --git a/src/librustc_trans/mir/rvalue.rs b/src/librustc_trans/mir/rvalue.rs index 8e5d220b4f..5945e8813a 100644 --- a/src/librustc_trans/mir/rvalue.rs +++ b/src/librustc_trans/mir/rvalue.rs @@ -11,8 +11,6 @@ use llvm::ValueRef; use rustc::ty::{self, Ty}; use rustc::ty::cast::{CastTy, IntTy}; -use middle::const_val::ConstVal; -use rustc_const_math::ConstInt; use rustc::mir::repr as mir; use asm; @@ -21,14 +19,13 @@ use callee::Callee; use common::{self, C_uint, BlockAndBuilder, Result}; use datum::{Datum, Lvalue}; use debuginfo::DebugLoc; -use declare; use adt; use machine; -use type_::Type; use type_of; use tvec; use value::Value; use Disr; +use glue; use super::MirContext; use super::operand::{OperandRef, OperandValue}; @@ -38,7 +35,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { pub fn trans_rvalue(&mut self, bcx: BlockAndBuilder<'bcx, 'tcx>, dest: LvalueRef<'tcx>, - rvalue: &mir::Rvalue<'tcx>) + rvalue: &mir::Rvalue<'tcx>, + debug_loc: DebugLoc) -> BlockAndBuilder<'bcx, 'tcx> { debug!("trans_rvalue(dest.llval={:?}, rvalue={:?})", @@ -55,10 +53,12 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } mir::Rvalue::Cast(mir::CastKind::Unsize, ref source, cast_ty) => { + let cast_ty = bcx.monomorphize(&cast_ty); + if common::type_is_fat_ptr(bcx.tcx(), cast_ty) { // into-coerce of a thin pointer to a fat pointer - just // use the operand path. - let (bcx, temp) = self.trans_rvalue_operand(bcx, rvalue); + let (bcx, temp) = self.trans_rvalue_operand(bcx, rvalue, debug_loc); self.store_operand(&bcx, dest.llval, temp); return bcx; } @@ -98,8 +98,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { mir::Rvalue::Repeat(ref elem, ref count) => { let tr_elem = self.trans_operand(&bcx, elem); - let count = ConstVal::Integral(ConstInt::Usize(count.value)); - let size = self.trans_constval(&bcx, &count, bcx.tcx().types.usize).immediate(); + let size = count.value.as_u64(bcx.tcx().sess.target.uint_type); + let size = C_uint(bcx.ccx(), size); let base = get_dataptr(&bcx, dest.llval); let bcx = bcx.map_block(|block| { tvec::iter_vec_raw(block, base, tr_elem.ty, size, |block, llslot, _| { @@ -154,7 +154,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { span: DUMMY_SP }, DUMMY_NODE_ID, def_id, - &bcx.monomorphize(substs)); + bcx.monomorphize(&substs)); } for (i, operand) in operands.iter().enumerate() { @@ -216,8 +216,8 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } _ => { - assert!(rvalue_creates_operand(rvalue)); - let (bcx, temp) = self.trans_rvalue_operand(bcx, rvalue); + assert!(rvalue_creates_operand(&self.mir, &bcx, rvalue)); + let (bcx, temp) = self.trans_rvalue_operand(bcx, rvalue, debug_loc); self.store_operand(&bcx, dest.llval, temp); bcx } @@ -226,10 +226,12 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { pub fn trans_rvalue_operand(&mut self, bcx: BlockAndBuilder<'bcx, 'tcx>, - rvalue: &mir::Rvalue<'tcx>) + rvalue: &mir::Rvalue<'tcx>, + debug_loc: DebugLoc) -> (BlockAndBuilder<'bcx, 'tcx>, OperandRef<'tcx>) { - assert!(rvalue_creates_operand(rvalue), "cannot trans {:?} to operand", rvalue); + assert!(rvalue_creates_operand(&self.mir, &bcx, rvalue), + "cannot trans {:?} to operand", rvalue); match *rvalue { mir::Rvalue::Cast(ref kind, ref source, cast_ty) => { @@ -260,14 +262,17 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { assert!(common::type_is_fat_ptr(bcx.tcx(), cast_ty)); match operand.val { - OperandValue::FatPtr(..) => { + OperandValue::FatPtr(lldata, llextra) => { // unsize from a fat pointer - this is a // "trait-object-to-supertrait" coercion, for // example, // &'a fmt::Debug+Send => &'a fmt::Debug, - // and is a no-op at the LLVM level + // So we need to pointercast the base to ensure + // the types match up. self.set_operand_dropped(&bcx, source); - operand.val + let llcast_ty = type_of::fat_ptr_base_ty(bcx.ccx(), cast_ty); + let lldata = bcx.pointercast(lldata, llcast_ty); + OperandValue::FatPtr(lldata, llextra) } OperandValue::Immediate(lldata) => { // "standard" unsize @@ -401,7 +406,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { mir::Rvalue::Len(ref lvalue) => { let tr_lvalue = self.trans_lvalue(&bcx, lvalue); let operand = OperandRef { - val: OperandValue::Immediate(self.lvalue_len(&bcx, tr_lvalue)), + val: OperandValue::Immediate(tr_lvalue.len(bcx.ccx())), ty: bcx.tcx().types.usize, }; (bcx, operand) @@ -419,7 +424,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { lhs_addr, lhs_extra, rhs_addr, rhs_extra, lhs.ty, op.to_hir_binop(), - DebugLoc::None) + debug_loc) }) } _ => bug!() @@ -470,7 +475,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { box_ty, llsize, llalign, - DebugLoc::None); + debug_loc); llval = Some(val); bcx }); @@ -481,7 +486,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { (bcx, operand) } - mir::Rvalue::Use(..) | + mir::Rvalue::Use(ref operand) => { + let operand = self.trans_operand(&bcx, operand); + (bcx, operand) + } mir::Rvalue::Repeat(..) | mir::Rvalue::Aggregate(..) | mir::Rvalue::Slice { .. } | @@ -524,43 +532,7 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { bcx.udiv(lhs, rhs) }, mir::BinOp::Rem => if is_float { - // LLVM currently always lowers the `frem` instructions appropriate - // library calls typically found in libm. Notably f64 gets wired up - // to `fmod` and f32 gets wired up to `fmodf`. Inconveniently for - // us, 32-bit MSVC does not actually have a `fmodf` symbol, it's - // instead just an inline function in a header that goes up to a - // f64, uses `fmod`, and then comes back down to a f32. - // - // Although LLVM knows that `fmodf` doesn't exist on MSVC, it will - // still unconditionally lower frem instructions over 32-bit floats - // to a call to `fmodf`. To work around this we special case MSVC - // 32-bit float rem instructions and instead do the call out to - // `fmod` ourselves. - // - // Note that this is currently duplicated with src/libcore/ops.rs - // which does the same thing, and it would be nice to perhaps unify - // these two implementations one day! Also note that we call `fmod` - // for both 32 and 64-bit floats because if we emit any FRem - // instruction at all then LLVM is capable of optimizing it into a - // 32-bit FRem (which we're trying to avoid). - let tcx = bcx.tcx(); - let use_fmod = tcx.sess.target.target.options.is_like_msvc && - tcx.sess.target.target.arch == "x86"; - if use_fmod { - let f64t = Type::f64(bcx.ccx()); - let fty = Type::func(&[f64t, f64t], &f64t); - let llfn = declare::declare_cfn(bcx.ccx(), "fmod", fty); - if input_ty == tcx.types.f32 { - let lllhs = bcx.fpext(lhs, f64t); - let llrhs = bcx.fpext(rhs, f64t); - let llres = bcx.call(llfn, &[lllhs, llrhs], None); - bcx.fptrunc(llres, Type::f32(bcx.ccx())) - } else { - bcx.call(llfn, &[lhs, rhs], None) - } - } else { - bcx.frem(lhs, rhs) - } + bcx.frem(lhs, rhs) } else if is_signed { bcx.srem(lhs, rhs) } else { @@ -597,7 +569,9 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { } } -pub fn rvalue_creates_operand<'tcx>(rvalue: &mir::Rvalue<'tcx>) -> bool { +pub fn rvalue_creates_operand<'bcx, 'tcx>(mir: &mir::Mir<'tcx>, + bcx: &BlockAndBuilder<'bcx, 'tcx>, + rvalue: &mir::Rvalue<'tcx>) -> bool { match *rvalue { mir::Rvalue::Ref(..) | mir::Rvalue::Len(..) | @@ -606,16 +580,20 @@ pub fn rvalue_creates_operand<'tcx>(rvalue: &mir::Rvalue<'tcx>) -> bool { mir::Rvalue::UnaryOp(..) | mir::Rvalue::Box(..) => true, - mir::Rvalue::Use(..) | // (**) mir::Rvalue::Repeat(..) | mir::Rvalue::Aggregate(..) | mir::Rvalue::Slice { .. } | mir::Rvalue::InlineAsm { .. } => false, + mir::Rvalue::Use(ref operand) => { + let ty = mir.operand_ty(bcx.tcx(), operand); + let ty = bcx.monomorphize(&ty); + // Types that don't need dropping can just be an operand, + // this allows temporary lvalues, used as rvalues, to + // avoid a stack slot when it's unnecessary + !glue::type_needs_drop(bcx.tcx(), ty) + } } // (*) this is only true if the type is suitable - // (**) we need to zero-out the source operand after moving, so we are restricted to either - // ensuring all users of `Use` zero it out themselves or not allowing to “create” operand for - // it. } diff --git a/src/librustc_trans/mir/statement.rs b/src/librustc_trans/mir/statement.rs index 1d85ac6fb7..c9a4e540fa 100644 --- a/src/librustc_trans/mir/statement.rs +++ b/src/librustc_trans/mir/statement.rs @@ -9,7 +9,8 @@ // except according to those terms. use rustc::mir::repr as mir; -use common::BlockAndBuilder; +use common::{self, BlockAndBuilder}; +use debuginfo::DebugLoc; use super::MirContext; use super::TempRef; @@ -21,6 +22,10 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { -> BlockAndBuilder<'bcx, 'tcx> { debug!("trans_statement(statement={:?})", statement); + let debug_loc = DebugLoc::ScopeAt(self.scopes[statement.scope.index()], + statement.span); + debug_loc.apply_to_bcx(&bcx); + debug_loc.apply(bcx.fcx()); match statement.kind { mir::StatementKind::Assign(ref lvalue, ref rvalue) => { match *lvalue { @@ -28,23 +33,33 @@ impl<'bcx, 'tcx> MirContext<'bcx, 'tcx> { let index = index as usize; match self.temps[index as usize] { TempRef::Lvalue(tr_dest) => { - self.trans_rvalue(bcx, tr_dest, rvalue) + self.trans_rvalue(bcx, tr_dest, rvalue, debug_loc) } TempRef::Operand(None) => { - let (bcx, operand) = self.trans_rvalue_operand(bcx, rvalue); + let (bcx, operand) = self.trans_rvalue_operand(bcx, rvalue, + debug_loc); self.temps[index] = TempRef::Operand(Some(operand)); bcx } TempRef::Operand(Some(_)) => { - span_bug!(statement.span, - "operand {:?} already assigned", - rvalue); + let ty = self.mir.lvalue_ty(bcx.tcx(), lvalue); + let ty = bcx.monomorphize(&ty.to_ty(bcx.tcx())); + + if !common::type_is_zero_size(bcx.ccx(), ty) { + span_bug!(statement.span, + "operand {:?} already assigned", + rvalue); + } else { + // If the type is zero-sized, it's already been set here, + // but we still need to make sure we translate the operand + self.trans_rvalue_operand(bcx, rvalue, debug_loc).0 + } } } } _ => { let tr_dest = self.trans_lvalue(&bcx, lvalue); - self.trans_rvalue(bcx, tr_dest, rvalue) + self.trans_rvalue(bcx, tr_dest, rvalue, debug_loc) } } } diff --git a/src/librustc_trans/monomorphize.rs b/src/librustc_trans/monomorphize.rs index ef0da37f0b..dfaf84ecef 100644 --- a/src/librustc_trans/monomorphize.rs +++ b/src/librustc_trans/monomorphize.rs @@ -12,7 +12,7 @@ use back::symbol_names; use llvm::ValueRef; use llvm; use rustc::hir::def_id::DefId; -use rustc::infer::normalize_associated_type; +use rustc::infer::TransNormalize; use rustc::ty::subst; use rustc::ty::subst::{Subst, Substs}; use rustc::ty::{self, Ty, TypeFoldable, TyCtxt}; @@ -183,29 +183,29 @@ impl<'tcx> Instance<'tcx> { assert!(substs.regions.iter().all(|&r| r == ty::ReStatic)); Instance { def: def_id, substs: substs } } - pub fn mono(tcx: &TyCtxt<'tcx>, def_id: DefId) -> Instance<'tcx> { - Instance::new(def_id, &tcx.mk_substs(Substs::empty())) + pub fn mono<'a>(scx: &SharedCrateContext<'a, 'tcx>, def_id: DefId) -> Instance<'tcx> { + Instance::new(def_id, scx.empty_substs_for_def_id(def_id)) } } /// Monomorphizes a type from the AST by first applying the in-scope /// substitutions and then normalizing any associated types. -pub fn apply_param_substs<'tcx,T>(tcx: &TyCtxt<'tcx>, - param_substs: &Substs<'tcx>, - value: &T) - -> T - where T : TypeFoldable<'tcx> +pub fn apply_param_substs<'a, 'tcx, T>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_substs: &Substs<'tcx>, + value: &T) + -> T + where T: TransNormalize<'tcx> { let substituted = value.subst(tcx, param_substs); - normalize_associated_type(tcx, &substituted) + tcx.normalize_associated_type(&substituted) } /// Returns the normalized type of a struct field -pub fn field_ty<'tcx>(tcx: &TyCtxt<'tcx>, - param_substs: &Substs<'tcx>, - f: ty::FieldDef<'tcx>) - -> Ty<'tcx> +pub fn field_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + param_substs: &Substs<'tcx>, + f: ty::FieldDef<'tcx>) + -> Ty<'tcx> { - normalize_associated_type(tcx, &f.ty(tcx, param_substs)) + tcx.normalize_associated_type(&f.ty(tcx, param_substs)) } diff --git a/src/librustc_trans/partitioning.rs b/src/librustc_trans/partitioning.rs new file mode 100644 index 0000000000..098ba75924 --- /dev/null +++ b/src/librustc_trans/partitioning.rs @@ -0,0 +1,401 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Partitioning Codegen Units for Incremental Compilation +//! ====================================================== +//! +//! The task of this module is to take the complete set of translation items of +//! a crate and produce a set of codegen units from it, where a codegen unit +//! is a named set of (translation-item, linkage) pairs. That is, this module +//! decides which translation item appears in which codegen units with which +//! linkage. The following paragraphs describe some of the background on the +//! partitioning scheme. +//! +//! The most important opportunity for saving on compilation time with +//! incremental compilation is to avoid re-translating and re-optimizing code. +//! Since the unit of translation and optimization for LLVM is "modules" or, how +//! we call them "codegen units", the particulars of how much time can be saved +//! by incremental compilation are tightly linked to how the output program is +//! partitioned into these codegen units prior to passing it to LLVM -- +//! especially because we have to treat codegen units as opaque entities once +//! they are created: There is no way for us to incrementally update an existing +//! LLVM module and so we have to build any such module from scratch if it was +//! affected by some change in the source code. +//! +//! From that point of view it would make sense to maximize the number of +//! codegen units by, for example, putting each function into its own module. +//! That way only those modules would have to be re-compiled that were actually +//! affected by some change, minimizing the number of functions that could have +//! been re-used but just happened to be located in a module that is +//! re-compiled. +//! +//! However, since LLVM optimization does not work across module boundaries, +//! using such a highly granular partitioning would lead to very slow runtime +//! code since it would effectively prohibit inlining and other inter-procedure +//! optimizations. We want to avoid that as much as possible. +//! +//! Thus we end up with a trade-off: The bigger the codegen units, the better +//! LLVM's optimizer can do its work, but also the smaller the compilation time +//! reduction we get from incremental compilation. +//! +//! Ideally, we would create a partitioning such that there are few big codegen +//! units with few interdependencies between them. For now though, we use the +//! following heuristic to determine the partitioning: +//! +//! - There are two codegen units for every source-level module: +//! - One for "stable", that is non-generic, code +//! - One for more "volatile" code, i.e. monomorphized instances of functions +//! defined in that module +//! - Code for monomorphized instances of functions from external crates gets +//! placed into every codegen unit that uses that instance. +//! +//! In order to see why this heuristic makes sense, let's take a look at when a +//! codegen unit can get invalidated: +//! +//! 1. The most straightforward case is when the BODY of a function or global +//! changes. Then any codegen unit containing the code for that item has to be +//! re-compiled. Note that this includes all codegen units where the function +//! has been inlined. +//! +//! 2. The next case is when the SIGNATURE of a function or global changes. In +//! this case, all codegen units containing a REFERENCE to that item have to be +//! re-compiled. This is a superset of case 1. +//! +//! 3. The final and most subtle case is when a REFERENCE to a generic function +//! is added or removed somewhere. Even though the definition of the function +//! might be unchanged, a new REFERENCE might introduce a new monomorphized +//! instance of this function which has to be placed and compiled somewhere. +//! Conversely, when removing a REFERENCE, it might have been the last one with +//! that particular set of generic arguments and thus we have to remove it. +//! +//! From the above we see that just using one codegen unit per source-level +//! module is not such a good idea, since just adding a REFERENCE to some +//! generic item somewhere else would invalidate everything within the module +//! containing the generic item. The heuristic above reduces this detrimental +//! side-effect of references a little by at least not touching the non-generic +//! code of the module. +//! +//! As another optimization, monomorphized functions from external crates get +//! some special handling. Since we assume that the definition of such a +//! function changes rather infrequently compared to local items, we can just +//! instantiate external functions in every codegen unit where it is referenced +//! -- without having to fear that doing this will cause a lot of unnecessary +//! re-compilations. If such a reference is added or removed, the codegen unit +//! has to be re-translated anyway. +//! (Note that this only makes sense if external crates actually don't change +//! frequently. For certain multi-crate projects this might not be a valid +//! assumption). +//! +//! A Note on Inlining +//! ------------------ +//! As briefly mentioned above, in order for LLVM to be able to inline a +//! function call, the body of the function has to be available in the LLVM +//! module where the call is made. This has a few consequences for partitioning: +//! +//! - The partitioning algorithm has to take care of placing functions into all +//! codegen units where they should be available for inlining. It also has to +//! decide on the correct linkage for these functions. +//! +//! - The partitioning algorithm has to know which functions are likely to get +//! inlined, so it can distribute function instantiations accordingly. Since +//! there is no way of knowing for sure which functions LLVM will decide to +//! inline in the end, we apply a heuristic here: Only functions marked with +//! #[inline] and (as stated above) functions from external crates are +//! considered for inlining by the partitioner. The current implementation +//! will not try to determine if a function is likely to be inlined by looking +//! at the functions definition. +//! +//! Note though that as a side-effect of creating a codegen units per +//! source-level module, functions from the same module will be available for +//! inlining, even when they are not marked #[inline]. + +use collector::InliningMap; +use llvm; +use monomorphize; +use rustc::hir::def_id::DefId; +use rustc::hir::map::DefPathData; +use rustc::ty::TyCtxt; +use rustc::ty::item_path::characteristic_def_id_of_type; +use syntax::parse::token::{self, InternedString}; +use trans_item::TransItem; +use util::nodemap::{FnvHashMap, FnvHashSet}; + +pub struct CodegenUnit<'tcx> { + pub name: InternedString, + pub items: FnvHashMap, llvm::Linkage>, +} + +pub enum PartitioningStrategy { + /// Generate one codegen unit per source-level module. + PerModule, + + /// Partition the whole crate into a fixed number of codegen units. + FixedUnitCount(usize) +} + +// Anything we can't find a proper codegen unit for goes into this. +const FALLBACK_CODEGEN_UNIT: &'static str = "__rustc_fallback_codegen_unit"; + +pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trans_items: I, + strategy: PartitioningStrategy, + inlining_map: &InliningMap<'tcx>) + -> Vec> + where I: Iterator> +{ + // In the first step, we place all regular translation items into their + // respective 'home' codegen unit. Regular translation items are all + // functions and statics defined in the local crate. + let mut initial_partitioning = place_root_translation_items(tcx, trans_items); + + // If the partitioning should produce a fixed count of codegen units, merge + // until that count is reached. + if let PartitioningStrategy::FixedUnitCount(count) = strategy { + merge_codegen_units(&mut initial_partitioning, count, &tcx.crate_name[..]); + } + + // In the next step, we use the inlining map to determine which addtional + // translation items have to go into each codegen unit. These additional + // translation items can be drop-glue, functions from external crates, and + // local functions the definition of which is marked with #[inline]. + let post_inlining = place_inlined_translation_items(initial_partitioning, + inlining_map); + post_inlining.0 +} + +struct PreInliningPartitioning<'tcx> { + codegen_units: Vec>, + roots: FnvHashSet>, +} + +struct PostInliningPartitioning<'tcx>(Vec>); + +fn place_root_translation_items<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trans_items: I) + -> PreInliningPartitioning<'tcx> + where I: Iterator> +{ + let mut roots = FnvHashSet(); + let mut codegen_units = FnvHashMap(); + + for trans_item in trans_items { + let is_root = match trans_item { + TransItem::Static(..) => true, + TransItem::DropGlue(..) => false, + TransItem::Fn(_) => !trans_item.is_from_extern_crate(), + }; + + if is_root { + let characteristic_def_id = characteristic_def_id_of_trans_item(tcx, trans_item); + let is_volatile = trans_item.is_lazily_instantiated(); + + let codegen_unit_name = match characteristic_def_id { + Some(def_id) => compute_codegen_unit_name(tcx, def_id, is_volatile), + None => InternedString::new(FALLBACK_CODEGEN_UNIT), + }; + + let make_codegen_unit = || { + CodegenUnit { + name: codegen_unit_name.clone(), + items: FnvHashMap(), + } + }; + + let mut codegen_unit = codegen_units.entry(codegen_unit_name.clone()) + .or_insert_with(make_codegen_unit); + + let linkage = match trans_item.explicit_linkage(tcx) { + Some(explicit_linkage) => explicit_linkage, + None => { + match trans_item { + TransItem::Static(..) => llvm::ExternalLinkage, + TransItem::DropGlue(..) => unreachable!(), + // Is there any benefit to using ExternalLinkage?: + TransItem::Fn(..) => llvm::WeakODRLinkage, + } + } + }; + + codegen_unit.items.insert(trans_item, linkage); + roots.insert(trans_item); + } + } + + PreInliningPartitioning { + codegen_units: codegen_units.into_iter() + .map(|(_, codegen_unit)| codegen_unit) + .collect(), + roots: roots, + } +} + +fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<'tcx>, + target_cgu_count: usize, + crate_name: &str) { + assert!(target_cgu_count >= 1); + let codegen_units = &mut initial_partitioning.codegen_units; + + // Merge the two smallest codegen units until the target size is reached. + // Note that "size" is estimated here rather inaccurately as the number of + // translation items in a given unit. This could be improved on. + while codegen_units.len() > target_cgu_count { + // Sort small cgus to the back + codegen_units.as_mut_slice().sort_by_key(|cgu| -(cgu.items.len() as i64)); + let smallest = codegen_units.pop().unwrap(); + let second_smallest = codegen_units.last_mut().unwrap(); + + for (k, v) in smallest.items.into_iter() { + second_smallest.items.insert(k, v); + } + } + + for (index, cgu) in codegen_units.iter_mut().enumerate() { + cgu.name = numbered_codegen_unit_name(crate_name, index); + } + + // If the initial partitioning contained less than target_cgu_count to begin + // with, we won't have enough codegen units here, so add a empty units until + // we reach the target count + while codegen_units.len() < target_cgu_count { + let index = codegen_units.len(); + codegen_units.push(CodegenUnit { + name: numbered_codegen_unit_name(crate_name, index), + items: FnvHashMap() + }); + } + + fn numbered_codegen_unit_name(crate_name: &str, index: usize) -> InternedString { + token::intern_and_get_ident(&format!("{}.{}", crate_name, index)[..]) + } +} + +fn place_inlined_translation_items<'tcx>(initial_partitioning: PreInliningPartitioning<'tcx>, + inlining_map: &InliningMap<'tcx>) + -> PostInliningPartitioning<'tcx> { + let mut new_partitioning = Vec::new(); + + for codegen_unit in &initial_partitioning.codegen_units[..] { + // Collect all items that need to be available in this codegen unit + let mut reachable = FnvHashSet(); + for root in codegen_unit.items.keys() { + follow_inlining(*root, inlining_map, &mut reachable); + } + + let mut new_codegen_unit = CodegenUnit { + name: codegen_unit.name.clone(), + items: FnvHashMap(), + }; + + // Add all translation items that are not already there + for trans_item in reachable { + if let Some(linkage) = codegen_unit.items.get(&trans_item) { + // This is a root, just copy it over + new_codegen_unit.items.insert(trans_item, *linkage); + } else { + if initial_partitioning.roots.contains(&trans_item) { + // This item will be instantiated in some other codegen unit, + // so we just add it here with AvailableExternallyLinkage + new_codegen_unit.items.insert(trans_item, + llvm::AvailableExternallyLinkage); + } else { + // We can't be sure if this will also be instantiated + // somewhere else, so we add an instance here with + // LinkOnceODRLinkage. That way the item can be discarded if + // it's not needed (inlined) after all. + new_codegen_unit.items.insert(trans_item, + llvm::LinkOnceODRLinkage); + } + } + } + + new_partitioning.push(new_codegen_unit); + } + + return PostInliningPartitioning(new_partitioning); + + fn follow_inlining<'tcx>(trans_item: TransItem<'tcx>, + inlining_map: &InliningMap<'tcx>, + visited: &mut FnvHashSet>) { + if !visited.insert(trans_item) { + return; + } + + inlining_map.with_inlining_candidates(trans_item, |target| { + follow_inlining(target, inlining_map, visited); + }); + } +} + +fn characteristic_def_id_of_trans_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trans_item: TransItem<'tcx>) + -> Option { + match trans_item { + TransItem::Fn(instance) => { + // If this is a method, we want to put it into the same module as + // its self-type. If the self-type does not provide a characteristic + // DefId, we use the location of the impl after all. + + if let Some(self_ty) = instance.substs.self_ty() { + // This is an implementation of a trait method. + return characteristic_def_id_of_type(self_ty).or(Some(instance.def)); + } + + if let Some(impl_def_id) = tcx.impl_of_method(instance.def) { + // This is a method within an inherent impl, find out what the + // self-type is: + let impl_self_ty = tcx.lookup_item_type(impl_def_id).ty; + let impl_self_ty = tcx.erase_regions(&impl_self_ty); + let impl_self_ty = monomorphize::apply_param_substs(tcx, + instance.substs, + &impl_self_ty); + + if let Some(def_id) = characteristic_def_id_of_type(impl_self_ty) { + return Some(def_id); + } + } + + Some(instance.def) + } + TransItem::DropGlue(dg) => characteristic_def_id_of_type(dg.ty()), + TransItem::Static(node_id) => Some(tcx.map.local_def_id(node_id)), + } +} + +fn compute_codegen_unit_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + def_id: DefId, + volatile: bool) + -> InternedString { + // Unfortunately we cannot just use the `ty::item_path` infrastructure here + // because we need paths to modules and the DefIds of those are not + // available anymore for external items. + let mut mod_path = String::with_capacity(64); + + let def_path = tcx.def_path(def_id); + mod_path.push_str(&tcx.crate_name(def_path.krate)); + + for part in tcx.def_path(def_id) + .data + .iter() + .take_while(|part| { + match part.data { + DefPathData::Module(..) => true, + _ => false, + } + }) { + mod_path.push_str("-"); + mod_path.push_str(&part.data.as_interned_str()); + } + + if volatile { + mod_path.push_str(".volatile"); + } + + return token::intern_and_get_ident(&mod_path[..]); +} diff --git a/src/librustc_trans/symbol_names_test.rs b/src/librustc_trans/symbol_names_test.rs index 2e3355968d..284a227276 100644 --- a/src/librustc_trans/symbol_names_test.rs +++ b/src/librustc_trans/symbol_names_test.rs @@ -52,7 +52,7 @@ impl<'a, 'tcx> SymbolNamesTest<'a, 'tcx> { for attr in tcx.get_attrs(def_id).iter() { if attr.check_name(SYMBOL_NAME) { // for now, can only use on monomorphic names - let instance = Instance::mono(tcx, def_id); + let instance = Instance::mono(self.ccx.shared(), def_id); let name = symbol_names::exported_name(self.ccx, &instance); tcx.sess.span_err(attr.span, &format!("symbol-name({})", name)); } else if attr.check_name(ITEM_PATH) { diff --git a/src/librustc_trans/trans_item.rs b/src/librustc_trans/trans_item.rs new file mode 100644 index 0000000000..d7c5c41a15 --- /dev/null +++ b/src/librustc_trans/trans_item.rs @@ -0,0 +1,384 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Walks the crate looking for items/impl-items/trait-items that have +//! either a `rustc_symbol_name` or `rustc_item_path` attribute and +//! generates an error giving, respectively, the symbol name or +//! item-path. This is used for unit testing the code that generates +//! paths etc in all kinds of annoying scenarios. + +use base::llvm_linkage_by_name; +use glue::DropGlueKind; +use llvm; +use monomorphize::Instance; +use rustc::hir; +use rustc::hir::def_id::DefId; +use rustc::ty::{self, Ty, TyCtxt}; +use rustc::ty::subst; +use std::hash::{Hash, Hasher}; +use syntax::ast::{self, NodeId}; +use syntax::attr; +use syntax::parse::token; + +#[derive(PartialEq, Eq, Clone, Copy, Debug)] +pub enum TransItem<'tcx> { + DropGlue(DropGlueKind<'tcx>), + Fn(Instance<'tcx>), + Static(NodeId) +} + +impl<'tcx> Hash for TransItem<'tcx> { + fn hash(&self, s: &mut H) { + match *self { + TransItem::DropGlue(t) => { + 0u8.hash(s); + t.hash(s); + }, + TransItem::Fn(instance) => { + 1u8.hash(s); + instance.def.hash(s); + (instance.substs as *const _ as usize).hash(s); + } + TransItem::Static(node_id) => { + 2u8.hash(s); + node_id.hash(s); + } + }; + } +} + +//=----------------------------------------------------------------------------- +// TransItem String Keys +//=----------------------------------------------------------------------------- + +// The code below allows for producing a unique string key for a trans item. +// These keys are used by the handwritten auto-tests, so they need to be +// predictable and human-readable. +// +// Note: A lot of this could looks very similar to what's already in the +// ppaux module. It would be good to refactor things so we only have one +// parameterizable implementation for printing types. + +/// Same as `unique_type_name()` but with the result pushed onto the given +/// `output` parameter. +pub fn push_unique_type_name<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + t: ty::Ty<'tcx>, + output: &mut String) { + match t.sty { + ty::TyBool => output.push_str("bool"), + ty::TyChar => output.push_str("char"), + ty::TyStr => output.push_str("str"), + ty::TyInt(ast::IntTy::Is) => output.push_str("isize"), + ty::TyInt(ast::IntTy::I8) => output.push_str("i8"), + ty::TyInt(ast::IntTy::I16) => output.push_str("i16"), + ty::TyInt(ast::IntTy::I32) => output.push_str("i32"), + ty::TyInt(ast::IntTy::I64) => output.push_str("i64"), + ty::TyUint(ast::UintTy::Us) => output.push_str("usize"), + ty::TyUint(ast::UintTy::U8) => output.push_str("u8"), + ty::TyUint(ast::UintTy::U16) => output.push_str("u16"), + ty::TyUint(ast::UintTy::U32) => output.push_str("u32"), + ty::TyUint(ast::UintTy::U64) => output.push_str("u64"), + ty::TyFloat(ast::FloatTy::F32) => output.push_str("f32"), + ty::TyFloat(ast::FloatTy::F64) => output.push_str("f64"), + ty::TyStruct(adt_def, substs) | + ty::TyEnum(adt_def, substs) => { + push_item_name(tcx, adt_def.did, output); + push_type_params(tcx, &substs.types, &[], output); + }, + ty::TyTuple(component_types) => { + output.push('('); + for &component_type in component_types { + push_unique_type_name(tcx, component_type, output); + output.push_str(", "); + } + if !component_types.is_empty() { + output.pop(); + output.pop(); + } + output.push(')'); + }, + ty::TyBox(inner_type) => { + output.push_str("Box<"); + push_unique_type_name(tcx, inner_type, output); + output.push('>'); + }, + ty::TyRawPtr(ty::TypeAndMut { ty: inner_type, mutbl } ) => { + output.push('*'); + match mutbl { + hir::MutImmutable => output.push_str("const "), + hir::MutMutable => output.push_str("mut "), + } + + push_unique_type_name(tcx, inner_type, output); + }, + ty::TyRef(_, ty::TypeAndMut { ty: inner_type, mutbl }) => { + output.push('&'); + if mutbl == hir::MutMutable { + output.push_str("mut "); + } + + push_unique_type_name(tcx, inner_type, output); + }, + ty::TyArray(inner_type, len) => { + output.push('['); + push_unique_type_name(tcx, inner_type, output); + output.push_str(&format!("; {}", len)); + output.push(']'); + }, + ty::TySlice(inner_type) => { + output.push('['); + push_unique_type_name(tcx, inner_type, output); + output.push(']'); + }, + ty::TyTrait(ref trait_data) => { + push_item_name(tcx, trait_data.principal.skip_binder().def_id, output); + push_type_params(tcx, + &trait_data.principal.skip_binder().substs.types, + &trait_data.bounds.projection_bounds, + output); + }, + ty::TyFnDef(_, _, &ty::BareFnTy{ unsafety, abi, ref sig } ) | + ty::TyFnPtr(&ty::BareFnTy{ unsafety, abi, ref sig } ) => { + if unsafety == hir::Unsafety::Unsafe { + output.push_str("unsafe "); + } + + if abi != ::abi::Abi::Rust { + output.push_str("extern \""); + output.push_str(abi.name()); + output.push_str("\" "); + } + + output.push_str("fn("); + + let sig = tcx.erase_late_bound_regions(sig); + if !sig.inputs.is_empty() { + for ¶meter_type in &sig.inputs { + push_unique_type_name(tcx, parameter_type, output); + output.push_str(", "); + } + output.pop(); + output.pop(); + } + + if sig.variadic { + if !sig.inputs.is_empty() { + output.push_str(", ..."); + } else { + output.push_str("..."); + } + } + + output.push(')'); + + match sig.output { + ty::FnConverging(result_type) if result_type.is_nil() => {} + ty::FnConverging(result_type) => { + output.push_str(" -> "); + push_unique_type_name(tcx, result_type, output); + } + ty::FnDiverging => { + output.push_str(" -> !"); + } + } + }, + ty::TyClosure(def_id, ref closure_substs) => { + push_item_name(tcx, def_id, output); + output.push_str("{"); + output.push_str(&format!("{}:{}", def_id.krate, def_id.index.as_usize())); + output.push_str("}"); + push_type_params(tcx, &closure_substs.func_substs.types, &[], output); + } + ty::TyError | + ty::TyInfer(_) | + ty::TyProjection(..) | + ty::TyParam(_) => { + bug!("debuginfo: Trying to create type name for \ + unexpected type: {:?}", t); + } + } +} + +fn push_item_name(tcx: TyCtxt, + def_id: DefId, + output: &mut String) { + let def_path = tcx.def_path(def_id); + + // some_crate:: + output.push_str(&tcx.crate_name(def_path.krate)); + output.push_str("::"); + + // foo::bar::ItemName:: + for part in tcx.def_path(def_id).data { + output.push_str(&format!("{}[{}]::", + part.data.as_interned_str(), + part.disambiguator)); + } + + // remove final "::" + output.pop(); + output.pop(); +} + +fn push_type_params<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + types: &'tcx subst::VecPerParamSpace>, + projections: &[ty::PolyProjectionPredicate<'tcx>], + output: &mut String) { + if types.is_empty() && projections.is_empty() { + return; + } + + output.push('<'); + + for &type_parameter in types { + push_unique_type_name(tcx, type_parameter, output); + output.push_str(", "); + } + + for projection in projections { + let projection = projection.skip_binder(); + let name = token::get_ident_interner().get(projection.projection_ty.item_name); + output.push_str(&name[..]); + output.push_str("="); + push_unique_type_name(tcx, projection.ty, output); + output.push_str(", "); + } + + output.pop(); + output.pop(); + + output.push('>'); +} + +fn push_instance_as_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + instance: Instance<'tcx>, + output: &mut String) { + push_item_name(tcx, instance.def, output); + push_type_params(tcx, &instance.substs.types, &[], output); +} + +pub fn def_id_to_string(tcx: TyCtxt, def_id: DefId) -> String { + let mut output = String::new(); + push_item_name(tcx, def_id, &mut output); + output +} + +pub fn type_to_string<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + ty: ty::Ty<'tcx>) + -> String { + let mut output = String::new(); + push_unique_type_name(tcx, ty, &mut output); + output +} + +impl<'tcx> TransItem<'tcx> { + + pub fn requests_inline<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> bool { + match *self { + TransItem::Fn(ref instance) => { + let attributes = tcx.get_attrs(instance.def); + attr::requests_inline(&attributes[..]) + } + TransItem::DropGlue(..) => true, + TransItem::Static(..) => false, + } + } + + pub fn is_from_extern_crate(&self) -> bool { + match *self { + TransItem::Fn(ref instance) => !instance.def.is_local(), + TransItem::DropGlue(..) | + TransItem::Static(..) => false, + } + } + + pub fn is_lazily_instantiated(&self) -> bool { + match *self { + TransItem::Fn(ref instance) => !instance.substs.types.is_empty(), + TransItem::DropGlue(..) => true, + TransItem::Static(..) => false, + } + } + + pub fn explicit_linkage<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Option { + let def_id = match *self { + TransItem::Fn(ref instance) => instance.def, + TransItem::Static(node_id) => tcx.map.local_def_id(node_id), + TransItem::DropGlue(..) => return None, + }; + + let attributes = tcx.get_attrs(def_id); + if let Some(name) = attr::first_attr_value_str_by_name(&attributes, "linkage") { + if let Some(linkage) = llvm_linkage_by_name(&name) { + Some(linkage) + } else { + let span = tcx.map.span_if_local(def_id); + if let Some(span) = span { + tcx.sess.span_fatal(span, "invalid linkage specified") + } else { + tcx.sess.fatal(&format!("invalid linkage specified: {}", name)) + } + } + } else { + None + } + } + + pub fn to_string<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) -> String { + let hir_map = &tcx.map; + + return match *self { + TransItem::DropGlue(dg) => { + let mut s = String::with_capacity(32); + match dg { + DropGlueKind::Ty(_) => s.push_str("drop-glue "), + DropGlueKind::TyContents(_) => s.push_str("drop-glue-contents "), + }; + push_unique_type_name(tcx, dg.ty(), &mut s); + s + } + TransItem::Fn(instance) => { + to_string_internal(tcx, "fn ", instance) + }, + TransItem::Static(node_id) => { + let def_id = hir_map.local_def_id(node_id); + let empty_substs = tcx.mk_substs(subst::Substs::empty()); + let instance = Instance::new(def_id, empty_substs); + to_string_internal(tcx, "static ", instance) + }, + }; + + fn to_string_internal<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + prefix: &str, + instance: Instance<'tcx>) + -> String { + let mut result = String::with_capacity(32); + result.push_str(prefix); + push_instance_as_string(tcx, instance, &mut result); + result + } + } + + pub fn to_raw_string(&self) -> String { + match *self { + TransItem::DropGlue(dg) => { + format!("DropGlue({})", dg.ty() as *const _ as usize) + } + TransItem::Fn(instance) => { + format!("Fn({:?}, {})", + instance.def, + instance.substs as *const _ as usize) + } + TransItem::Static(id) => { + format!("Static({:?})", id) + } + } + } +} diff --git a/src/librustc_trans/type_of.rs b/src/librustc_trans/type_of.rs index 863ae3f942..e5acb9b669 100644 --- a/src/librustc_trans/type_of.rs +++ b/src/librustc_trans/type_of.rs @@ -11,7 +11,6 @@ #![allow(non_camel_case_types)] use rustc::hir::def_id::DefId; -use rustc::infer; use rustc::ty::subst; use abi::FnType; use adt; @@ -124,8 +123,10 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ cx.llsizingtypes().borrow_mut().insert(t, llsizingty); // FIXME(eddyb) Temporary sanity check for ty::layout. - let infcx = infer::normalizing_infer_ctxt(cx.tcx(), &cx.tcx().tables, ProjectionMode::Any); - match t.layout(&infcx) { + let layout = cx.tcx().normalizing_infer_ctxt(ProjectionMode::Any).enter(|infcx| { + t.layout(&infcx) + }); + match layout { Ok(layout) => { if !type_is_sized(cx.tcx(), t) { if !layout.is_unsized() { @@ -156,6 +157,17 @@ pub fn sizing_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> Typ llsizingty } +pub fn fat_ptr_base_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> Type { + match ty.sty { + ty::TyBox(t) | + ty::TyRef(_, ty::TypeAndMut { ty: t, .. }) | + ty::TyRawPtr(ty::TypeAndMut { ty: t, .. }) if !type_is_sized(ccx.tcx(), t) => { + in_memory_type_of(ccx, t).ptr_to() + } + _ => bug!("expected fat ptr ty but got {:?}", ty) + } +} + fn unsized_info_ty<'a, 'tcx>(ccx: &CrateContext<'a, 'tcx>, ty: Ty<'tcx>) -> Type { let unsized_part = ccx.tcx().struct_tail(ty); match unsized_part.sty { @@ -296,7 +308,7 @@ pub fn in_memory_type_of<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>, t: Ty<'tcx>) -> ty::TyFnDef(..) => Type::nil(cx), ty::TyFnPtr(f) => { let sig = cx.tcx().erase_late_bound_regions(&f.sig); - let sig = infer::normalize_associated_type(cx.tcx(), &sig); + let sig = cx.tcx().normalize_associated_type(&sig); FnType::new(cx, f.abi, &sig, &[]).llvm_type(cx).ptr_to() } ty::TyTuple(ref tys) if tys.is_empty() => Type::nil(cx), diff --git a/src/librustc_typeck/Cargo.toml b/src/librustc_typeck/Cargo.toml index e9dabf16ea..a0c4c7534f 100644 --- a/src/librustc_typeck/Cargo.toml +++ b/src/librustc_typeck/Cargo.toml @@ -7,6 +7,7 @@ version = "0.0.0" name = "rustc_typeck" path = "lib.rs" crate-type = ["dylib"] +test = false [dependencies] log = { path = "../liblog" } diff --git a/src/librustc_typeck/astconv.rs b/src/librustc_typeck/astconv.rs index 21122e7095..1df12b63e0 100644 --- a/src/librustc_typeck/astconv.rs +++ b/src/librustc_typeck/astconv.rs @@ -48,38 +48,41 @@ //! case but `&a` in the second. Basically, defaults that appear inside //! an rptr (`&r.T`) use the region `r` that appears in the rptr. -use middle::astconv_util::{prim_ty_to_ty, prohibit_type_params, prohibit_projection}; use middle::const_val::ConstVal; -use rustc_const_eval::eval_const_expr_partial; +use rustc_const_eval::{eval_const_expr_partial, ConstEvalErr}; use rustc_const_eval::EvalHint::UncheckedExprHint; +use rustc_const_eval::ErrKind::ErroneousReferencedConstant; +use hir::{self, SelfKind}; use hir::def::{self, Def}; use hir::def_id::DefId; +use hir::print as pprust; use middle::resolve_lifetime as rl; +use rustc::lint; use rustc::ty::subst::{FnSpace, TypeSpace, SelfSpace, Subst, Substs, ParamSpace}; use rustc::traits; use rustc::ty::{self, Ty, TyCtxt, ToPredicate, TypeFoldable}; use rustc::ty::wf::object_region_bounds; +use rustc_back::slice; use require_c_abi_if_variadic; use rscope::{self, UnelidableRscope, RegionScope, ElidableRscope, ObjectLifetimeDefaultRscope, ShiftedRscope, BindingRscope, ElisionFailureInfo, ElidedLifetime}; use util::common::{ErrorReported, FN_OUTPUT_NAME}; -use util::nodemap::FnvHashSet; +use util::nodemap::{NodeMap, FnvHashSet}; use rustc_const_math::ConstInt; - +use std::cell::RefCell; use syntax::{abi, ast}; use syntax::codemap::{Span, Pos}; use syntax::errors::DiagnosticBuilder; use syntax::feature_gate::{GateIssue, emit_feature_err}; -use syntax::parse::token; +use syntax::parse::token::{self, keywords}; -use rustc::hir::print as pprust; -use rustc::hir; -use rustc_back::slice; +pub trait AstConv<'gcx, 'tcx> { + fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx>; -pub trait AstConv<'tcx> { - fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx>; + /// A cache used for the result of `ast_ty_to_ty_cache` + fn ast_ty_to_ty_cache(&self) -> &RefCell>>; /// Identify the type scheme for an item with a type, like a type /// alias, fn, or struct. This allows you to figure out the set of @@ -112,16 +115,14 @@ pub trait AstConv<'tcx> { /// are in scope into free ones. This function should only return Some /// within a fn body. /// See ParameterEnvironment::free_substs for more information. - fn get_free_substs(&self) -> Option<&Substs<'tcx>> { - None - } + fn get_free_substs(&self) -> Option<&Substs<'tcx>>; /// What type should we use when a type is omitted? - fn ty_infer(&self, - param_and_substs: Option>, - substs: Option<&mut Substs<'tcx>>, - space: Option, - span: Span) -> Ty<'tcx>; + fn ty_infer(&self, + param_and_substs: Option>, + substs: Option<&mut Substs<'tcx>>, + space: Option, + span: Span) -> Ty<'tcx>; /// Projecting an associated type from a (potentially) /// higher-ranked trait reference is more complicated, because of @@ -134,18 +135,7 @@ pub trait AstConv<'tcx> { span: Span, poly_trait_ref: ty::PolyTraitRef<'tcx>, item_name: ast::Name) - -> Ty<'tcx> - { - if let Some(trait_ref) = self.tcx().no_late_bound_regions(&poly_trait_ref) { - self.projected_ty(span, trait_ref, item_name) - } else { - // no late-bound regions, we can just ignore the binder - span_err!(self.tcx().sess, span, E0212, - "cannot extract an associated type from a higher-ranked trait bound \ - in this context"); - self.tcx().types.err - } - } + -> Ty<'tcx>; /// Project an associated type from a non-higher-ranked trait reference. /// This is fairly straightforward and can be accommodated in any context. @@ -154,9 +144,31 @@ pub trait AstConv<'tcx> { _trait_ref: ty::TraitRef<'tcx>, _item_name: ast::Name) -> Ty<'tcx>; + + /// Invoked when we encounter an error from some prior pass + /// (e.g. resolve) that is translated into a ty-error. This is + /// used to help suppress derived errors typeck might otherwise + /// report. + fn set_tainted_by_errors(&self); +} + +#[derive(PartialEq, Eq)] +pub enum PathParamMode { + // Any path in a type context. + Explicit, + // The `module::Type` in `module::Type::method` in an expression. + Optional +} + +struct ConvertedBinding<'tcx> { + item_name: ast::Name, + ty: Ty<'tcx>, + span: Span, } -pub fn ast_region_to_region(tcx: &TyCtxt, lifetime: &hir::Lifetime) +type TraitAndProjections<'tcx> = (ty::PolyTraitRef<'tcx>, Vec>); + +pub fn ast_region_to_region(tcx: TyCtxt, lifetime: &hir::Lifetime) -> ty::Region { let r = match tcx.named_region_map.get(&lifetime.id) { None => { @@ -199,20 +211,22 @@ pub fn ast_region_to_region(tcx: &TyCtxt, lifetime: &hir::Lifetime) fn report_elision_failure( db: &mut DiagnosticBuilder, - default_span: Span, params: Vec) { let mut m = String::new(); let len = params.len(); - let mut any_lifetimes = false; - for (i, info) in params.into_iter().enumerate() { + let elided_params: Vec<_> = params.into_iter() + .filter(|info| info.lifetime_count > 0) + .collect(); + + let elided_len = elided_params.len(); + + for (i, info) in elided_params.into_iter().enumerate() { let ElisionFailureInfo { name, lifetime_count: n, have_bound_regions } = info; - any_lifetimes = any_lifetimes || (n > 0); - let help_name = if name.is_empty() { format!("argument {}", i + 1) } else { @@ -226,1919 +240,1955 @@ fn report_elision_failure( if have_bound_regions { "free " } else { "" } ) })[..]); - if len == 2 && i == 0 { + if elided_len == 2 && i == 0 { m.push_str(" or "); - } else if i + 2 == len { + } else if i + 2 == elided_len { m.push_str(", or "); - } else if i + 1 != len { + } else if i != elided_len - 1 { m.push_str(", "); } + } if len == 0 { - fileline_help!(db, default_span, - "this function's return type contains a borrowed value, but \ - there is no value for it to be borrowed from"); - fileline_help!(db, default_span, - "consider giving it a 'static lifetime"); - } else if !any_lifetimes { - fileline_help!(db, default_span, - "this function's return type contains a borrowed value with \ - an elided lifetime, but the lifetime cannot be derived from \ - the arguments"); - fileline_help!(db, default_span, - "consider giving it an explicit bounded or 'static \ - lifetime"); - } else if len == 1 { - fileline_help!(db, default_span, - "this function's return type contains a borrowed value, but \ - the signature does not say which {} it is borrowed from", - m); + help!(db, + "this function's return type contains a borrowed value, but \ + there is no value for it to be borrowed from"); + help!(db, + "consider giving it a 'static lifetime"); + } else if elided_len == 0 { + help!(db, + "this function's return type contains a borrowed value with \ + an elided lifetime, but the lifetime cannot be derived from \ + the arguments"); + help!(db, + "consider giving it an explicit bounded or 'static \ + lifetime"); + } else if elided_len == 1 { + help!(db, + "this function's return type contains a borrowed value, but \ + the signature does not say which {} it is borrowed from", + m); } else { - fileline_help!(db, default_span, - "this function's return type contains a borrowed value, but \ - the signature does not say whether it is borrowed from {}", - m); + help!(db, + "this function's return type contains a borrowed value, but \ + the signature does not say whether it is borrowed from {}", + m); } } -pub fn opt_ast_region_to_region<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - default_span: Span, - opt_lifetime: &Option) -> ty::Region -{ - let r = match *opt_lifetime { - Some(ref lifetime) => { - ast_region_to_region(this.tcx(), lifetime) - } +impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o { + pub fn opt_ast_region_to_region(&self, + rscope: &RegionScope, + default_span: Span, + opt_lifetime: &Option) -> ty::Region + { + let r = match *opt_lifetime { + Some(ref lifetime) => { + ast_region_to_region(self.tcx(), lifetime) + } - None => match rscope.anon_regions(default_span, 1) { - Ok(rs) => rs[0], - Err(params) => { - let mut err = struct_span_err!(this.tcx().sess, default_span, E0106, - "missing lifetime specifier"); - if let Some(params) = params { - report_elision_failure(&mut err, default_span, params); + None => match rscope.anon_regions(default_span, 1) { + Ok(rs) => rs[0], + Err(params) => { + let mut err = struct_span_err!(self.tcx().sess, default_span, E0106, + "missing lifetime specifier"); + if let Some(params) = params { + report_elision_failure(&mut err, params); + } + err.emit(); + ty::ReStatic } - err.emit(); - ty::ReStatic } - } - }; - - debug!("opt_ast_region_to_region(opt_lifetime={:?}) yields {:?}", - opt_lifetime, - r); + }; - r -} + debug!("opt_ast_region_to_region(opt_lifetime={:?}) yields {:?}", + opt_lifetime, + r); -/// Given a path `path` that refers to an item `I` with the declared generics `decl_generics`, -/// returns an appropriate set of substitutions for this particular reference to `I`. -pub fn ast_path_substs_for_ty<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - decl_generics: &ty::Generics<'tcx>, - item_segment: &hir::PathSegment) - -> Substs<'tcx> -{ - let tcx = this.tcx(); - - // ast_path_substs() is only called to convert paths that are - // known to refer to traits, types, or structs. In these cases, - // all type parameters defined for the item being referenced will - // be in the TypeSpace or SelfSpace. - // - // Note: in the case of traits, the self parameter is also - // defined, but we don't currently create a `type_param_def` for - // `Self` because it is implicit. - assert!(decl_generics.regions.all(|d| d.space == TypeSpace)); - assert!(decl_generics.types.all(|d| d.space != FnSpace)); - - let (regions, types, assoc_bindings) = match item_segment.parameters { - hir::AngleBracketedParameters(ref data) => { - convert_angle_bracketed_parameters(this, rscope, span, decl_generics, data) - } - hir::ParenthesizedParameters(..) => { - span_err!(tcx.sess, span, E0214, - "parenthesized parameters may only be used with a trait"); - let ty_param_defs = decl_generics.types.get_slice(TypeSpace); - (Substs::empty(), - ty_param_defs.iter().map(|_| tcx.types.err).collect(), - vec![]) - } - }; + r + } - prohibit_projections(this.tcx(), &assoc_bindings); + /// Given a path `path` that refers to an item `I` with the declared generics `decl_generics`, + /// returns an appropriate set of substitutions for this particular reference to `I`. + pub fn ast_path_substs_for_ty(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + decl_generics: &ty::Generics<'tcx>, + item_segment: &hir::PathSegment) + -> Substs<'tcx> + { + let tcx = self.tcx(); + + // ast_path_substs() is only called to convert paths that are + // known to refer to traits, types, or structs. In these cases, + // all type parameters defined for the item being referenced will + // be in the TypeSpace or SelfSpace. + // + // Note: in the case of traits, the self parameter is also + // defined, but we don't currently create a `type_param_def` for + // `Self` because it is implicit. + assert!(decl_generics.regions.all(|d| d.space == TypeSpace)); + assert!(decl_generics.types.all(|d| d.space != FnSpace)); + + let (regions, types, assoc_bindings) = match item_segment.parameters { + hir::AngleBracketedParameters(ref data) => { + self.convert_angle_bracketed_parameters(rscope, span, decl_generics, data) + } + hir::ParenthesizedParameters(..) => { + span_err!(tcx.sess, span, E0214, + "parenthesized parameters may only be used with a trait"); + let ty_param_defs = decl_generics.types.get_slice(TypeSpace); + (Substs::empty(), + ty_param_defs.iter().map(|_| tcx.types.err).collect(), + vec![]) + } + }; - create_substs_for_ast_path(this, - span, - param_mode, - decl_generics, - None, - types, - regions) -} + assoc_bindings.first().map(|b| self.tcx().prohibit_projection(b.span)); -#[derive(PartialEq, Eq)] -pub enum PathParamMode { - // Any path in a type context. - Explicit, - // The `module::Type` in `module::Type::method` in an expression. - Optional -} + self.create_substs_for_ast_path(span, + param_mode, + decl_generics, + None, + types, + regions) + } -fn create_region_substs<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - decl_generics: &ty::Generics<'tcx>, - regions_provided: Vec) - -> Substs<'tcx> -{ - let tcx = this.tcx(); - - // If the type is parameterized by this region, then replace this - // region with the current anon region binding (in other words, - // whatever & would get replaced with). - let expected_num_region_params = decl_generics.regions.len(TypeSpace); - let supplied_num_region_params = regions_provided.len(); - let regions = if expected_num_region_params == supplied_num_region_params { - regions_provided - } else { - let anon_regions = - rscope.anon_regions(span, expected_num_region_params); + fn create_region_substs(&self, + rscope: &RegionScope, + span: Span, + decl_generics: &ty::Generics<'tcx>, + regions_provided: Vec) + -> Substs<'tcx> + { + let tcx = self.tcx(); + + // If the type is parameterized by this region, then replace this + // region with the current anon region binding (in other words, + // whatever & would get replaced with). + let expected_num_region_params = decl_generics.regions.len(TypeSpace); + let supplied_num_region_params = regions_provided.len(); + let regions = if expected_num_region_params == supplied_num_region_params { + regions_provided + } else { + let anon_regions = + rscope.anon_regions(span, expected_num_region_params); - if supplied_num_region_params != 0 || anon_regions.is_err() { - report_lifetime_number_error(tcx, span, - supplied_num_region_params, - expected_num_region_params); - } + if supplied_num_region_params != 0 || anon_regions.is_err() { + report_lifetime_number_error(tcx, span, + supplied_num_region_params, + expected_num_region_params); + } - match anon_regions { - Ok(anon_regions) => anon_regions, - Err(_) => (0..expected_num_region_params).map(|_| ty::ReStatic).collect() - } - }; - Substs::new_type(vec![], regions) -} + match anon_regions { + Ok(anon_regions) => anon_regions, + Err(_) => (0..expected_num_region_params).map(|_| ty::ReStatic).collect() + } + }; + Substs::new_type(vec![], regions) + } -/// Given the type/region arguments provided to some path (along with -/// an implicit Self, if this is a trait reference) returns the complete -/// set of substitutions. This may involve applying defaulted type parameters. -/// -/// Note that the type listing given here is *exactly* what the user provided. -/// -/// The `region_substs` should be the result of `create_region_substs` -/// -- that is, a substitution with no types but the correct number of -/// regions. -fn create_substs_for_ast_path<'tcx>( - this: &AstConv<'tcx>, - span: Span, - param_mode: PathParamMode, - decl_generics: &ty::Generics<'tcx>, - self_ty: Option>, - types_provided: Vec>, - region_substs: Substs<'tcx>) - -> Substs<'tcx> -{ - let tcx = this.tcx(); - - debug!("create_substs_for_ast_path(decl_generics={:?}, self_ty={:?}, \ - types_provided={:?}, region_substs={:?})", - decl_generics, self_ty, types_provided, - region_substs); - - assert_eq!(region_substs.regions.len(TypeSpace), decl_generics.regions.len(TypeSpace)); - assert!(region_substs.types.is_empty()); - - // Convert the type parameters supplied by the user. - let ty_param_defs = decl_generics.types.get_slice(TypeSpace); - let formal_ty_param_count = ty_param_defs.len(); - let required_ty_param_count = ty_param_defs.iter() - .take_while(|x| x.default.is_none()) - .count(); - - let mut type_substs = get_type_substs_for_defs(this, - span, - types_provided, - param_mode, - ty_param_defs, - region_substs.clone(), - self_ty); - - let supplied_ty_param_count = type_substs.len(); - check_type_argument_count(this.tcx(), span, supplied_ty_param_count, - required_ty_param_count, formal_ty_param_count); - - if supplied_ty_param_count < required_ty_param_count { - while type_substs.len() < required_ty_param_count { - type_substs.push(tcx.types.err); + /// Given the type/region arguments provided to some path (along with + /// an implicit Self, if this is a trait reference) returns the complete + /// set of substitutions. This may involve applying defaulted type parameters. + /// + /// Note that the type listing given here is *exactly* what the user provided. + /// + /// The `region_substs` should be the result of `create_region_substs` + /// -- that is, a substitution with no types but the correct number of + /// regions. + fn create_substs_for_ast_path(&self, + span: Span, + param_mode: PathParamMode, + decl_generics: &ty::Generics<'tcx>, + self_ty: Option>, + types_provided: Vec>, + region_substs: Substs<'tcx>) + -> Substs<'tcx> + { + let tcx = self.tcx(); + + debug!("create_substs_for_ast_path(decl_generics={:?}, self_ty={:?}, \ + types_provided={:?}, region_substs={:?})", + decl_generics, self_ty, types_provided, + region_substs); + + assert_eq!(region_substs.regions.len(TypeSpace), decl_generics.regions.len(TypeSpace)); + assert!(region_substs.types.is_empty()); + + // Convert the type parameters supplied by the user. + let ty_param_defs = decl_generics.types.get_slice(TypeSpace); + let formal_ty_param_count = ty_param_defs.len(); + let required_ty_param_count = ty_param_defs.iter() + .take_while(|x| x.default.is_none()) + .count(); + + let mut type_substs = self.get_type_substs_for_defs(span, + types_provided, + param_mode, + ty_param_defs, + region_substs.clone(), + self_ty); + + let supplied_ty_param_count = type_substs.len(); + check_type_argument_count(self.tcx(), span, supplied_ty_param_count, + required_ty_param_count, formal_ty_param_count); + + if supplied_ty_param_count < required_ty_param_count { + while type_substs.len() < required_ty_param_count { + type_substs.push(tcx.types.err); + } + } else if supplied_ty_param_count > formal_ty_param_count { + type_substs.truncate(formal_ty_param_count); } - } else if supplied_ty_param_count > formal_ty_param_count { - type_substs.truncate(formal_ty_param_count); - } - assert!(type_substs.len() >= required_ty_param_count && - type_substs.len() <= formal_ty_param_count); + assert!(type_substs.len() >= required_ty_param_count && + type_substs.len() <= formal_ty_param_count); - let mut substs = region_substs; - substs.types.extend(TypeSpace, type_substs.into_iter()); + let mut substs = region_substs; + substs.types.extend(TypeSpace, type_substs.into_iter()); - match self_ty { - None => { - // If no self-type is provided, it's still possible that - // one was declared, because this could be an object type. - } - Some(ty) => { - // If a self-type is provided, one should have been - // "declared" (in other words, this should be a - // trait-ref). - assert!(decl_generics.types.get_self().is_some()); - substs.types.push(SelfSpace, ty); + match self_ty { + None => { + // If no self-type is provided, it's still possible that + // one was declared, because this could be an object type. + } + Some(ty) => { + // If a self-type is provided, one should have been + // "declared" (in other words, this should be a + // trait-ref). + assert!(decl_generics.types.get_self().is_some()); + substs.types.push(SelfSpace, ty); + } } - } - let actual_supplied_ty_param_count = substs.types.len(TypeSpace); - for param in &ty_param_defs[actual_supplied_ty_param_count..] { - if let Some(default) = param.default { - // If we are converting an object type, then the - // `Self` parameter is unknown. However, some of the - // other type parameters may reference `Self` in their - // defaults. This will lead to an ICE if we are not - // careful! - if self_ty.is_none() && default.has_self_ty() { - span_err!(tcx.sess, span, E0393, - "the type parameter `{}` must be explicitly specified \ - in an object type because its default value `{}` references \ - the type `Self`", - param.name, - default); - substs.types.push(TypeSpace, tcx.types.err); + let actual_supplied_ty_param_count = substs.types.len(TypeSpace); + for param in &ty_param_defs[actual_supplied_ty_param_count..] { + if let Some(default) = param.default { + // If we are converting an object type, then the + // `Self` parameter is unknown. However, some of the + // other type parameters may reference `Self` in their + // defaults. This will lead to an ICE if we are not + // careful! + if self_ty.is_none() && default.has_self_ty() { + span_err!(tcx.sess, span, E0393, + "the type parameter `{}` must be explicitly specified \ + in an object type because its default value `{}` references \ + the type `Self`", + param.name, + default); + substs.types.push(TypeSpace, tcx.types.err); + } else { + // This is a default type parameter. + let default = default.subst_spanned(tcx, + &substs, + Some(span)); + substs.types.push(TypeSpace, default); + } } else { - // This is a default type parameter. - let default = default.subst_spanned(tcx, - &substs, - Some(span)); - substs.types.push(TypeSpace, default); + span_bug!(span, "extra parameter without default"); } - } else { - span_bug!(span, "extra parameter without default"); } - } - debug!("create_substs_for_ast_path(decl_generics={:?}, self_ty={:?}) -> {:?}", - decl_generics, self_ty, substs); + debug!("create_substs_for_ast_path(decl_generics={:?}, self_ty={:?}) -> {:?}", + decl_generics, self_ty, substs); - substs -} + substs + } -/// Returns types_provided if it is not empty, otherwise populating the -/// type parameters with inference variables as appropriate. -fn get_type_substs_for_defs<'tcx>(this: &AstConv<'tcx>, - span: Span, - types_provided: Vec>, - param_mode: PathParamMode, - ty_param_defs: &[ty::TypeParameterDef<'tcx>], - mut substs: Substs<'tcx>, - self_ty: Option>) - -> Vec> -{ - fn default_type_parameter<'tcx>(p: &ty::TypeParameterDef<'tcx>, self_ty: Option>) - -> Option> + /// Returns types_provided if it is not empty, otherwise populating the + /// type parameters with inference variables as appropriate. + fn get_type_substs_for_defs(&self, + span: Span, + types_provided: Vec>, + param_mode: PathParamMode, + ty_param_defs: &[ty::TypeParameterDef<'tcx>], + mut substs: Substs<'tcx>, + self_ty: Option>) + -> Vec> { - if let Some(ref default) = p.default { - if self_ty.is_none() && default.has_self_ty() { - // There is no suitable inference default for a type parameter - // that references self with no self-type provided. - return None; + fn default_type_parameter<'tcx>(p: &ty::TypeParameterDef<'tcx>, self_ty: Option>) + -> Option> + { + if let Some(ref default) = p.default { + if self_ty.is_none() && default.has_self_ty() { + // There is no suitable inference default for a type parameter + // that references self with no self-type provided. + return None; + } } + + Some(p.clone()) } - Some(p.clone()) + if param_mode == PathParamMode::Optional && types_provided.is_empty() { + ty_param_defs + .iter() + .map(|p| self.ty_infer(default_type_parameter(p, self_ty), Some(&mut substs), + Some(TypeSpace), span)) + .collect() + } else { + types_provided + } } - if param_mode == PathParamMode::Optional && types_provided.is_empty() { - ty_param_defs - .iter() - .map(|p| this.ty_infer(default_type_parameter(p, self_ty), Some(&mut substs), - Some(TypeSpace), span)) - .collect() - } else { - types_provided + fn convert_angle_bracketed_parameters(&self, + rscope: &RegionScope, + span: Span, + decl_generics: &ty::Generics<'tcx>, + data: &hir::AngleBracketedParameterData) + -> (Substs<'tcx>, + Vec>, + Vec>) + { + let regions: Vec<_> = + data.lifetimes.iter() + .map(|l| ast_region_to_region(self.tcx(), l)) + .collect(); + + let region_substs = + self.create_region_substs(rscope, span, decl_generics, regions); + + let types: Vec<_> = + data.types.iter() + .enumerate() + .map(|(i,t)| self.ast_ty_arg_to_ty(rscope, decl_generics, + i, ®ion_substs, t)) + .collect(); + + let assoc_bindings: Vec<_> = + data.bindings.iter() + .map(|b| ConvertedBinding { item_name: b.name, + ty: self.ast_ty_to_ty(rscope, &b.ty), + span: b.span }) + .collect(); + + (region_substs, types, assoc_bindings) } -} -struct ConvertedBinding<'tcx> { - item_name: ast::Name, - ty: Ty<'tcx>, - span: Span, -} + /// Returns the appropriate lifetime to use for any output lifetimes + /// (if one exists) and a vector of the (pattern, number of lifetimes) + /// corresponding to each input type/pattern. + fn find_implied_output_region(&self, + input_tys: &[Ty<'tcx>], + input_pats: Vec) -> ElidedLifetime + { + let tcx = self.tcx(); + let mut lifetimes_for_params = Vec::new(); + let mut possible_implied_output_region = None; + + for (input_type, input_pat) in input_tys.iter().zip(input_pats) { + let mut regions = FnvHashSet(); + let have_bound_regions = tcx.collect_regions(input_type, &mut regions); + + debug!("find_implied_output_regions: collected {:?} from {:?} \ + have_bound_regions={:?}", ®ions, input_type, have_bound_regions); + + if regions.len() == 1 { + // there's a chance that the unique lifetime of this + // iteration will be the appropriate lifetime for output + // parameters, so lets store it. + possible_implied_output_region = regions.iter().cloned().next(); + } -fn convert_angle_bracketed_parameters<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - decl_generics: &ty::Generics<'tcx>, - data: &hir::AngleBracketedParameterData) - -> (Substs<'tcx>, - Vec>, - Vec>) -{ - let regions: Vec<_> = - data.lifetimes.iter() - .map(|l| ast_region_to_region(this.tcx(), l)) - .collect(); + lifetimes_for_params.push(ElisionFailureInfo { + name: input_pat, + lifetime_count: regions.len(), + have_bound_regions: have_bound_regions + }); + } - let region_substs = - create_region_substs(this, rscope, span, decl_generics, regions); + if lifetimes_for_params.iter().map(|e| e.lifetime_count).sum::() == 1 { + Ok(possible_implied_output_region.unwrap()) + } else { + Err(Some(lifetimes_for_params)) + } + } - let types: Vec<_> = - data.types.iter() - .enumerate() - .map(|(i,t)| ast_ty_arg_to_ty(this, rscope, decl_generics, - i, ®ion_substs, t)) - .collect(); + fn convert_ty_with_lifetime_elision(&self, + elided_lifetime: ElidedLifetime, + ty: &hir::Ty) + -> Ty<'tcx> + { + match elided_lifetime { + Ok(implied_output_region) => { + let rb = ElidableRscope::new(implied_output_region); + self.ast_ty_to_ty(&rb, ty) + } + Err(param_lifetimes) => { + // All regions must be explicitly specified in the output + // if the lifetime elision rules do not apply. This saves + // the user from potentially-confusing errors. + let rb = UnelidableRscope::new(param_lifetimes); + self.ast_ty_to_ty(&rb, ty) + } + } + } - let assoc_bindings: Vec<_> = - data.bindings.iter() - .map(|b| ConvertedBinding { item_name: b.name, - ty: ast_ty_to_ty(this, rscope, &b.ty), - span: b.span }) - .collect(); + fn convert_parenthesized_parameters(&self, + rscope: &RegionScope, + span: Span, + decl_generics: &ty::Generics<'tcx>, + data: &hir::ParenthesizedParameterData) + -> (Substs<'tcx>, + Vec>, + Vec>) + { + let region_substs = + self.create_region_substs(rscope, span, decl_generics, Vec::new()); - (region_substs, types, assoc_bindings) -} + let binding_rscope = BindingRscope::new(); + let inputs = + data.inputs.iter() + .map(|a_t| self.ast_ty_arg_to_ty(&binding_rscope, decl_generics, + 0, ®ion_substs, a_t)) + .collect::>>(); -/// Returns the appropriate lifetime to use for any output lifetimes -/// (if one exists) and a vector of the (pattern, number of lifetimes) -/// corresponding to each input type/pattern. -fn find_implied_output_region<'tcx>(tcx: &TyCtxt<'tcx>, - input_tys: &[Ty<'tcx>], - input_pats: Vec) -> ElidedLifetime -{ - let mut lifetimes_for_params = Vec::new(); - let mut possible_implied_output_region = None; + let input_params = vec![String::new(); inputs.len()]; + let implied_output_region = self.find_implied_output_region(&inputs, input_params); - for (input_type, input_pat) in input_tys.iter().zip(input_pats) { - let mut regions = FnvHashSet(); - let have_bound_regions = tcx.collect_regions(input_type, &mut regions); + let input_ty = self.tcx().mk_tup(inputs); - debug!("find_implied_output_regions: collected {:?} from {:?} \ - have_bound_regions={:?}", ®ions, input_type, have_bound_regions); + let (output, output_span) = match data.output { + Some(ref output_ty) => { + (self.convert_ty_with_lifetime_elision(implied_output_region, &output_ty), + output_ty.span) + } + None => { + (self.tcx().mk_nil(), data.span) + } + }; - if regions.len() == 1 { - // there's a chance that the unique lifetime of this - // iteration will be the appropriate lifetime for output - // parameters, so lets store it. - possible_implied_output_region = regions.iter().cloned().next(); - } + let output_binding = ConvertedBinding { + item_name: token::intern(FN_OUTPUT_NAME), + ty: output, + span: output_span + }; - lifetimes_for_params.push(ElisionFailureInfo { - name: input_pat, - lifetime_count: regions.len(), - have_bound_regions: have_bound_regions - }); + (region_substs, vec![input_ty], vec![output_binding]) } - if lifetimes_for_params.iter().map(|e| e.lifetime_count).sum::() == 1 { - Ok(possible_implied_output_region.unwrap()) - } else { - Err(Some(lifetimes_for_params)) + pub fn instantiate_poly_trait_ref(&self, + rscope: &RegionScope, + ast_trait_ref: &hir::PolyTraitRef, + self_ty: Option>, + poly_projections: &mut Vec>) + -> ty::PolyTraitRef<'tcx> + { + let trait_ref = &ast_trait_ref.trait_ref; + let trait_def_id = self.trait_def_id(trait_ref); + self.ast_path_to_poly_trait_ref(rscope, + trait_ref.path.span, + PathParamMode::Explicit, + trait_def_id, + self_ty, + trait_ref.ref_id, + trait_ref.path.segments.last().unwrap(), + poly_projections) } -} -fn convert_ty_with_lifetime_elision<'tcx>(this: &AstConv<'tcx>, - elided_lifetime: ElidedLifetime, - ty: &hir::Ty) - -> Ty<'tcx> -{ - match elided_lifetime { - Ok(implied_output_region) => { - let rb = ElidableRscope::new(implied_output_region); - ast_ty_to_ty(this, &rb, ty) - } - Err(param_lifetimes) => { - // All regions must be explicitly specified in the output - // if the lifetime elision rules do not apply. This saves - // the user from potentially-confusing errors. - let rb = UnelidableRscope::new(param_lifetimes); - ast_ty_to_ty(this, &rb, ty) - } + /// Instantiates the path for the given trait reference, assuming that it's + /// bound to a valid trait type. Returns the def_id for the defining trait. + /// Fails if the type is a type other than a trait type. + /// + /// If the `projections` argument is `None`, then assoc type bindings like `Foo` + /// are disallowed. Otherwise, they are pushed onto the vector given. + pub fn instantiate_mono_trait_ref(&self, + rscope: &RegionScope, + trait_ref: &hir::TraitRef, + self_ty: Option>) + -> ty::TraitRef<'tcx> + { + let trait_def_id = self.trait_def_id(trait_ref); + self.ast_path_to_mono_trait_ref(rscope, + trait_ref.path.span, + PathParamMode::Explicit, + trait_def_id, + self_ty, + trait_ref.path.segments.last().unwrap()) } -} - -fn convert_parenthesized_parameters<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - decl_generics: &ty::Generics<'tcx>, - data: &hir::ParenthesizedParameterData) - -> (Substs<'tcx>, - Vec>, - Vec>) -{ - let region_substs = - create_region_substs(this, rscope, span, decl_generics, Vec::new()); - - let binding_rscope = BindingRscope::new(); - let inputs = - data.inputs.iter() - .map(|a_t| ast_ty_arg_to_ty(this, &binding_rscope, decl_generics, - 0, ®ion_substs, a_t)) - .collect::>>(); - - let input_params = vec![String::new(); inputs.len()]; - let implied_output_region = find_implied_output_region(this.tcx(), &inputs, input_params); - - let input_ty = this.tcx().mk_tup(inputs); - - let (output, output_span) = match data.output { - Some(ref output_ty) => { - (convert_ty_with_lifetime_elision(this, - implied_output_region, - &output_ty), - output_ty.span) - } - None => { - (this.tcx().mk_nil(), data.span) - } - }; - - let output_binding = ConvertedBinding { - item_name: token::intern(FN_OUTPUT_NAME), - ty: output, - span: output_span - }; - - (region_substs, vec![input_ty], vec![output_binding]) -} - -pub fn instantiate_poly_trait_ref<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - ast_trait_ref: &hir::PolyTraitRef, - self_ty: Option>, - poly_projections: &mut Vec>) - -> ty::PolyTraitRef<'tcx> -{ - let trait_ref = &ast_trait_ref.trait_ref; - let trait_def_id = trait_def_id(this, trait_ref); - ast_path_to_poly_trait_ref(this, - rscope, - trait_ref.path.span, - PathParamMode::Explicit, - trait_def_id, - self_ty, - trait_ref.path.segments.last().unwrap(), - poly_projections) -} - -/// Instantiates the path for the given trait reference, assuming that it's -/// bound to a valid trait type. Returns the def_id for the defining trait. -/// Fails if the type is a type other than a trait type. -/// -/// If the `projections` argument is `None`, then assoc type bindings like `Foo` -/// are disallowed. Otherwise, they are pushed onto the vector given. -pub fn instantiate_mono_trait_ref<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - trait_ref: &hir::TraitRef, - self_ty: Option>) - -> ty::TraitRef<'tcx> -{ - let trait_def_id = trait_def_id(this, trait_ref); - ast_path_to_mono_trait_ref(this, - rscope, - trait_ref.path.span, - PathParamMode::Explicit, - trait_def_id, - self_ty, - trait_ref.path.segments.last().unwrap()) -} -fn trait_def_id<'tcx>(this: &AstConv<'tcx>, trait_ref: &hir::TraitRef) -> DefId { - let path = &trait_ref.path; - match ::lookup_full_def(this.tcx(), path.span, trait_ref.ref_id) { - Def::Trait(trait_def_id) => trait_def_id, - Def::Err => { - this.tcx().sess.fatal("cannot continue compilation due to previous error"); - } - _ => { - span_fatal!(this.tcx().sess, path.span, E0245, "`{}` is not a trait", - path); + fn trait_def_id(&self, trait_ref: &hir::TraitRef) -> DefId { + let path = &trait_ref.path; + match ::lookup_full_def(self.tcx(), path.span, trait_ref.ref_id) { + Def::Trait(trait_def_id) => trait_def_id, + Def::Err => { + self.tcx().sess.fatal("cannot continue compilation due to previous error"); + } + _ => { + span_fatal!(self.tcx().sess, path.span, E0245, "`{}` is not a trait", + path); + } } } -} - -fn object_path_to_poly_trait_ref<'a,'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - trait_def_id: DefId, - trait_segment: &hir::PathSegment, - mut projections: &mut Vec>) - -> ty::PolyTraitRef<'tcx> -{ - ast_path_to_poly_trait_ref(this, - rscope, - span, - param_mode, - trait_def_id, - None, - trait_segment, - projections) -} -fn ast_path_to_poly_trait_ref<'a,'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - trait_def_id: DefId, - self_ty: Option>, - trait_segment: &hir::PathSegment, - poly_projections: &mut Vec>) - -> ty::PolyTraitRef<'tcx> -{ - debug!("ast_path_to_poly_trait_ref(trait_segment={:?})", trait_segment); - // The trait reference introduces a binding level here, so - // we need to shift the `rscope`. It'd be nice if we could - // do away with this rscope stuff and work this knowledge - // into resolve_lifetimes, as we do with non-omitted - // lifetimes. Oh well, not there yet. - let shifted_rscope = &ShiftedRscope::new(rscope); - - let (substs, assoc_bindings) = - create_substs_for_ast_trait_ref(this, - shifted_rscope, + fn object_path_to_poly_trait_ref(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + trait_def_id: DefId, + trait_path_ref_id: ast::NodeId, + trait_segment: &hir::PathSegment, + mut projections: &mut Vec>) + -> ty::PolyTraitRef<'tcx> + { + self.ast_path_to_poly_trait_ref(rscope, span, param_mode, trait_def_id, - self_ty, - trait_segment); - let poly_trait_ref = ty::Binder(ty::TraitRef::new(trait_def_id, substs)); + None, + trait_path_ref_id, + trait_segment, + projections) + } + fn ast_path_to_poly_trait_ref(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + trait_def_id: DefId, + self_ty: Option>, + path_id: ast::NodeId, + trait_segment: &hir::PathSegment, + poly_projections: &mut Vec>) + -> ty::PolyTraitRef<'tcx> { - let converted_bindings = - assoc_bindings - .iter() - .filter_map(|binding| { - // specify type to assert that error was already reported in Err case: - let predicate: Result<_, ErrorReported> = - ast_type_binding_to_poly_projection_predicate(this, - poly_trait_ref.clone(), - self_ty, - binding); - predicate.ok() // ok to ignore Err() because ErrorReported (see above) - }); - poly_projections.extend(converted_bindings); + debug!("ast_path_to_poly_trait_ref(trait_segment={:?})", trait_segment); + // The trait reference introduces a binding level here, so + // we need to shift the `rscope`. It'd be nice if we could + // do away with this rscope stuff and work this knowledge + // into resolve_lifetimes, as we do with non-omitted + // lifetimes. Oh well, not there yet. + let shifted_rscope = &ShiftedRscope::new(rscope); + + let (substs, assoc_bindings) = + self.create_substs_for_ast_trait_ref(shifted_rscope, + span, + param_mode, + trait_def_id, + self_ty, + trait_segment); + let poly_trait_ref = ty::Binder(ty::TraitRef::new(trait_def_id, substs)); + + { + let converted_bindings = + assoc_bindings + .iter() + .filter_map(|binding| { + // specify type to assert that error was already reported in Err case: + let predicate: Result<_, ErrorReported> = + self.ast_type_binding_to_poly_projection_predicate(path_id, + poly_trait_ref.clone(), + self_ty, + binding); + predicate.ok() // ok to ignore Err() because ErrorReported (see above) + }); + poly_projections.extend(converted_bindings); + } + + debug!("ast_path_to_poly_trait_ref(trait_segment={:?}, projections={:?}) -> {:?}", + trait_segment, poly_projections, poly_trait_ref); + poly_trait_ref } - debug!("ast_path_to_poly_trait_ref(trait_segment={:?}, projections={:?}) -> {:?}", - trait_segment, poly_projections, poly_trait_ref); - poly_trait_ref -} + fn ast_path_to_mono_trait_ref(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + trait_def_id: DefId, + self_ty: Option>, + trait_segment: &hir::PathSegment) + -> ty::TraitRef<'tcx> + { + let (substs, assoc_bindings) = + self.create_substs_for_ast_trait_ref(rscope, + span, + param_mode, + trait_def_id, + self_ty, + trait_segment); + assoc_bindings.first().map(|b| self.tcx().prohibit_projection(b.span)); + ty::TraitRef::new(trait_def_id, substs) + } -fn ast_path_to_mono_trait_ref<'a,'tcx>(this: &AstConv<'tcx>, + fn create_substs_for_ast_trait_ref(&self, rscope: &RegionScope, span: Span, param_mode: PathParamMode, trait_def_id: DefId, self_ty: Option>, trait_segment: &hir::PathSegment) - -> ty::TraitRef<'tcx> -{ - let (substs, assoc_bindings) = - create_substs_for_ast_trait_ref(this, - rscope, - span, - param_mode, - trait_def_id, - self_ty, - trait_segment); - prohibit_projections(this.tcx(), &assoc_bindings); - ty::TraitRef::new(trait_def_id, substs) -} + -> (&'tcx Substs<'tcx>, Vec>) + { + debug!("create_substs_for_ast_trait_ref(trait_segment={:?})", + trait_segment); + + let trait_def = match self.get_trait_def(span, trait_def_id) { + Ok(trait_def) => trait_def, + Err(ErrorReported) => { + // No convenient way to recover from a cycle here. Just bail. Sorry! + self.tcx().sess.abort_if_errors(); + bug!("ErrorReported returned, but no errors reports?") + } + }; -fn create_substs_for_ast_trait_ref<'a,'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - trait_def_id: DefId, - self_ty: Option>, - trait_segment: &hir::PathSegment) - -> (&'tcx Substs<'tcx>, Vec>) -{ - debug!("create_substs_for_ast_trait_ref(trait_segment={:?})", - trait_segment); - - let trait_def = match this.get_trait_def(span, trait_def_id) { - Ok(trait_def) => trait_def, - Err(ErrorReported) => { - // No convenient way to recover from a cycle here. Just bail. Sorry! - this.tcx().sess.abort_if_errors(); - bug!("ErrorReported returned, but no errors reports?") - } - }; + let (regions, types, assoc_bindings) = match trait_segment.parameters { + hir::AngleBracketedParameters(ref data) => { + // For now, require that parenthetical notation be used + // only with `Fn()` etc. + if !self.tcx().sess.features.borrow().unboxed_closures && trait_def.paren_sugar { + emit_feature_err(&self.tcx().sess.parse_sess.span_diagnostic, + "unboxed_closures", span, GateIssue::Language, + "\ + the precise format of `Fn`-family traits' \ + type parameters is subject to change. \ + Use parenthetical notation (Fn(Foo, Bar) -> Baz) instead"); + } - let (regions, types, assoc_bindings) = match trait_segment.parameters { - hir::AngleBracketedParameters(ref data) => { - // For now, require that parenthetical notation be used - // only with `Fn()` etc. - if !this.tcx().sess.features.borrow().unboxed_closures && trait_def.paren_sugar { - emit_feature_err(&this.tcx().sess.parse_sess.span_diagnostic, - "unboxed_closures", span, GateIssue::Language, - "\ - the precise format of `Fn`-family traits' type parameters is \ - subject to change. Use parenthetical notation (Fn(Foo, Bar) -> Baz) instead"); + self.convert_angle_bracketed_parameters(rscope, span, &trait_def.generics, data) } + hir::ParenthesizedParameters(ref data) => { + // For now, require that parenthetical notation be used + // only with `Fn()` etc. + if !self.tcx().sess.features.borrow().unboxed_closures && !trait_def.paren_sugar { + emit_feature_err(&self.tcx().sess.parse_sess.span_diagnostic, + "unboxed_closures", span, GateIssue::Language, + "\ + parenthetical notation is only stable when used with `Fn`-family traits"); + } - convert_angle_bracketed_parameters(this, rscope, span, &trait_def.generics, data) - } - hir::ParenthesizedParameters(ref data) => { - // For now, require that parenthetical notation be used - // only with `Fn()` etc. - if !this.tcx().sess.features.borrow().unboxed_closures && !trait_def.paren_sugar { - emit_feature_err(&this.tcx().sess.parse_sess.span_diagnostic, - "unboxed_closures", span, GateIssue::Language, - "\ - parenthetical notation is only stable when used with `Fn`-family traits"); + self.convert_parenthesized_parameters(rscope, span, &trait_def.generics, data) } + }; - convert_parenthesized_parameters(this, rscope, span, &trait_def.generics, data) - } - }; + let substs = self.create_substs_for_ast_path(span, + param_mode, + &trait_def.generics, + self_ty, + types, + regions); - let substs = create_substs_for_ast_path(this, - span, - param_mode, - &trait_def.generics, - self_ty, - types, - regions); + (self.tcx().mk_substs(substs), assoc_bindings) + } - (this.tcx().mk_substs(substs), assoc_bindings) -} + fn ast_type_binding_to_poly_projection_predicate( + &self, + path_id: ast::NodeId, + mut trait_ref: ty::PolyTraitRef<'tcx>, + self_ty: Option>, + binding: &ConvertedBinding<'tcx>) + -> Result, ErrorReported> + { + let tcx = self.tcx(); + + // Given something like `U : SomeTrait`, we want to produce a + // predicate like `::T = X`. This is somewhat + // subtle in the event that `T` is defined in a supertrait of + // `SomeTrait`, because in that case we need to upcast. + // + // That is, consider this case: + // + // ``` + // trait SubTrait : SuperTrait { } + // trait SuperTrait { type T; } + // + // ... B : SubTrait ... + // ``` + // + // We want to produce `>::T == foo`. + + // Find any late-bound regions declared in `ty` that are not + // declared in the trait-ref. These are not wellformed. + // + // Example: + // + // for<'a> ::Item = &'a str // <-- 'a is bad + // for<'a> >::Output = &'a str // <-- 'a is ok + let late_bound_in_trait_ref = tcx.collect_constrained_late_bound_regions(&trait_ref); + let late_bound_in_ty = tcx.collect_referenced_late_bound_regions(&ty::Binder(binding.ty)); + debug!("late_bound_in_trait_ref = {:?}", late_bound_in_trait_ref); + debug!("late_bound_in_ty = {:?}", late_bound_in_ty); + for br in late_bound_in_ty.difference(&late_bound_in_trait_ref) { + let br_name = match *br { + ty::BrNamed(_, name) => name, + _ => { + span_bug!( + binding.span, + "anonymous bound region {:?} in binding but not trait ref", + br); + } + }; + tcx.sess.add_lint( + lint::builtin::HR_LIFETIME_IN_ASSOC_TYPE, + path_id, + binding.span, + format!("binding for associated type `{}` references lifetime `{}`, \ + which does not appear in the trait input types", + binding.item_name, br_name)); + } + + // Simple case: X is defined in the current trait. + if self.trait_defines_associated_type_named(trait_ref.def_id(), binding.item_name) { + return Ok(ty::Binder(ty::ProjectionPredicate { // <-------------------+ + projection_ty: ty::ProjectionTy { // | + trait_ref: trait_ref.skip_binder().clone(), // Binder moved here --+ + item_name: binding.item_name, + }, + ty: binding.ty, + })); + } + + // Otherwise, we have to walk through the supertraits to find + // those that do. This is complicated by the fact that, for an + // object type, the `Self` type is not present in the + // substitutions (after all, it's being constructed right now), + // but the `supertraits` iterator really wants one. To handle + // this, we currently insert a dummy type and then remove it + // later. Yuck. + + let dummy_self_ty = tcx.mk_infer(ty::FreshTy(0)); + if self_ty.is_none() { // if converting for an object type + let mut dummy_substs = trait_ref.skip_binder().substs.clone(); // binder moved here -+ + assert!(dummy_substs.self_ty().is_none()); // | + dummy_substs.types.push(SelfSpace, dummy_self_ty); // | + trait_ref = ty::Binder(ty::TraitRef::new(trait_ref.def_id(), // <------------+ + tcx.mk_substs(dummy_substs))); + } + + self.ensure_super_predicates(binding.span, trait_ref.def_id())?; + + let mut candidates: Vec = + traits::supertraits(tcx, trait_ref.clone()) + .filter(|r| self.trait_defines_associated_type_named(r.def_id(), binding.item_name)) + .collect(); + + // If converting for an object type, then remove the dummy-ty from `Self` now. + // Yuckety yuck. + if self_ty.is_none() { + for candidate in &mut candidates { + let mut dummy_substs = candidate.0.substs.clone(); + assert!(dummy_substs.self_ty() == Some(dummy_self_ty)); + dummy_substs.types.pop(SelfSpace); + *candidate = ty::Binder(ty::TraitRef::new(candidate.def_id(), + tcx.mk_substs(dummy_substs))); + } + } -fn ast_type_binding_to_poly_projection_predicate<'tcx>( - this: &AstConv<'tcx>, - mut trait_ref: ty::PolyTraitRef<'tcx>, - self_ty: Option>, - binding: &ConvertedBinding<'tcx>) - -> Result, ErrorReported> -{ - let tcx = this.tcx(); - - // Given something like `U : SomeTrait`, we want to produce a - // predicate like `::T = X`. This is somewhat - // subtle in the event that `T` is defined in a supertrait of - // `SomeTrait`, because in that case we need to upcast. - // - // That is, consider this case: - // - // ``` - // trait SubTrait : SuperTrait { } - // trait SuperTrait { type T; } - // - // ... B : SubTrait ... - // ``` - // - // We want to produce `>::T == foo`. - - // Simple case: X is defined in the current trait. - if this.trait_defines_associated_type_named(trait_ref.def_id(), binding.item_name) { - return Ok(ty::Binder(ty::ProjectionPredicate { // <-------------------+ - projection_ty: ty::ProjectionTy { // | - trait_ref: trait_ref.skip_binder().clone(), // Binder moved here --+ + let candidate = self.one_bound_for_assoc_type(candidates, + &trait_ref.to_string(), + &binding.item_name.as_str(), + binding.span)?; + + Ok(ty::Binder(ty::ProjectionPredicate { // <-------------------------+ + projection_ty: ty::ProjectionTy { // | + trait_ref: candidate.skip_binder().clone(), // binder is moved up here --+ item_name: binding.item_name, }, ty: binding.ty, - })); + })) } - // Otherwise, we have to walk through the supertraits to find - // those that do. This is complicated by the fact that, for an - // object type, the `Self` type is not present in the - // substitutions (after all, it's being constructed right now), - // but the `supertraits` iterator really wants one. To handle - // this, we currently insert a dummy type and then remove it - // later. Yuck. - - let dummy_self_ty = tcx.mk_infer(ty::FreshTy(0)); - if self_ty.is_none() { // if converting for an object type - let mut dummy_substs = trait_ref.skip_binder().substs.clone(); // binder moved here -+ - assert!(dummy_substs.self_ty().is_none()); // | - dummy_substs.types.push(SelfSpace, dummy_self_ty); // | - trait_ref = ty::Binder(ty::TraitRef::new(trait_ref.def_id(), // <------------+ - tcx.mk_substs(dummy_substs))); - } - - this.ensure_super_predicates(binding.span, trait_ref.def_id())?; - - let mut candidates: Vec = - traits::supertraits(tcx, trait_ref.clone()) - .filter(|r| this.trait_defines_associated_type_named(r.def_id(), binding.item_name)) - .collect(); - - // If converting for an object type, then remove the dummy-ty from `Self` now. - // Yuckety yuck. - if self_ty.is_none() { - for candidate in &mut candidates { - let mut dummy_substs = candidate.0.substs.clone(); - assert!(dummy_substs.self_ty() == Some(dummy_self_ty)); - dummy_substs.types.pop(SelfSpace); - *candidate = ty::Binder(ty::TraitRef::new(candidate.def_id(), - tcx.mk_substs(dummy_substs))); - } - } + fn ast_path_to_ty(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + did: DefId, + item_segment: &hir::PathSegment) + -> Ty<'tcx> + { + let tcx = self.tcx(); + let (generics, decl_ty) = match self.get_item_type_scheme(span, did) { + Ok(ty::TypeScheme { generics, ty: decl_ty }) => { + (generics, decl_ty) + } + Err(ErrorReported) => { + return tcx.types.err; + } + }; - let candidate = one_bound_for_assoc_type(tcx, - candidates, - &trait_ref.to_string(), - &binding.item_name.as_str(), - binding.span)?; - - Ok(ty::Binder(ty::ProjectionPredicate { // <-------------------------+ - projection_ty: ty::ProjectionTy { // | - trait_ref: candidate.skip_binder().clone(), // binder is moved up here --+ - item_name: binding.item_name, - }, - ty: binding.ty, - })) -} + let substs = self.ast_path_substs_for_ty(rscope, + span, + param_mode, + &generics, + item_segment); -fn ast_path_to_ty<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - did: DefId, - item_segment: &hir::PathSegment) - -> Ty<'tcx> -{ - let tcx = this.tcx(); - let (generics, decl_ty) = match this.get_item_type_scheme(span, did) { - Ok(ty::TypeScheme { generics, ty: decl_ty }) => { - (generics, decl_ty) - } - Err(ErrorReported) => { - return tcx.types.err; + // FIXME(#12938): This is a hack until we have full support for DST. + if Some(did) == self.tcx().lang_items.owned_box() { + assert_eq!(substs.types.len(TypeSpace), 1); + return self.tcx().mk_box(*substs.types.get(TypeSpace, 0)); } - }; - - let substs = ast_path_substs_for_ty(this, - rscope, - span, - param_mode, - &generics, - item_segment); - // FIXME(#12938): This is a hack until we have full support for DST. - if Some(did) == this.tcx().lang_items.owned_box() { - assert_eq!(substs.types.len(TypeSpace), 1); - return this.tcx().mk_box(*substs.types.get(TypeSpace, 0)); + decl_ty.subst(self.tcx(), &substs) } - decl_ty.subst(this.tcx(), &substs) -} - -type TraitAndProjections<'tcx> = (ty::PolyTraitRef<'tcx>, Vec>); - -fn ast_ty_to_trait_ref<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - ty: &hir::Ty, - bounds: &[hir::TyParamBound]) - -> Result, ErrorReported> -{ - /*! - * In a type like `Foo + Send`, we want to wait to collect the - * full set of bounds before we make the object type, because we - * need them to infer a region bound. (For example, if we tried - * made a type from just `Foo`, then it wouldn't be enough to - * infer a 'static bound, and hence the user would get an error.) - * So this function is used when we're dealing with a sum type to - * convert the LHS. It only accepts a type that refers to a trait - * name, and reports an error otherwise. - */ - - match ty.node { - hir::TyPath(None, ref path) => { - let def = match this.tcx().def_map.borrow().get(&ty.id) { - Some(&def::PathResolution { base_def, depth: 0, .. }) => Some(base_def), - _ => None - }; - match def { - Some(Def::Trait(trait_def_id)) => { - let mut projection_bounds = Vec::new(); - let trait_ref = object_path_to_poly_trait_ref(this, - rscope, - path.span, - PathParamMode::Explicit, - trait_def_id, - path.segments.last().unwrap(), - &mut projection_bounds); - Ok((trait_ref, projection_bounds)) - } - _ => { - span_err!(this.tcx().sess, ty.span, E0172, "expected a reference to a trait"); - Err(ErrorReported) + fn ast_ty_to_trait_ref(&self, + rscope: &RegionScope, + ty: &hir::Ty, + bounds: &[hir::TyParamBound]) + -> Result, ErrorReported> + { + /*! + * In a type like `Foo + Send`, we want to wait to collect the + * full set of bounds before we make the object type, because we + * need them to infer a region bound. (For example, if we tried + * made a type from just `Foo`, then it wouldn't be enough to + * infer a 'static bound, and hence the user would get an error.) + * So this function is used when we're dealing with a sum type to + * convert the LHS. It only accepts a type that refers to a trait + * name, and reports an error otherwise. + */ + + match ty.node { + hir::TyPath(None, ref path) => { + let def = match self.tcx().def_map.borrow().get(&ty.id) { + Some(&def::PathResolution { base_def, depth: 0, .. }) => Some(base_def), + _ => None + }; + match def { + Some(Def::Trait(trait_def_id)) => { + let mut projection_bounds = Vec::new(); + let trait_ref = + self.object_path_to_poly_trait_ref(rscope, + path.span, + PathParamMode::Explicit, + trait_def_id, + ty.id, + path.segments.last().unwrap(), + &mut projection_bounds); + Ok((trait_ref, projection_bounds)) + } + _ => { + span_err!(self.tcx().sess, ty.span, E0172, + "expected a reference to a trait"); + Err(ErrorReported) + } } } - } - _ => { - let mut err = struct_span_err!(this.tcx().sess, ty.span, E0178, - "expected a path on the left-hand side of `+`, not `{}`", - pprust::ty_to_string(ty)); - let hi = bounds.iter().map(|x| match *x { - hir::TraitTyParamBound(ref tr, _) => tr.span.hi, - hir::RegionTyParamBound(ref r) => r.span.hi, - }).max_by_key(|x| x.to_usize()); - let full_span = hi.map(|hi| Span { - lo: ty.span.lo, - hi: hi, - expn_id: ty.span.expn_id, - }); - match (&ty.node, full_span) { - (&hir::TyRptr(None, ref mut_ty), Some(full_span)) => { - let mutbl_str = if mut_ty.mutbl == hir::MutMutable { "mut " } else { "" }; - err.span_suggestion(full_span, "try adding parentheses (per RFC 438):", - format!("&{}({} +{})", - mutbl_str, - pprust::ty_to_string(&mut_ty.ty), - pprust::bounds_to_string(bounds))); - } - (&hir::TyRptr(Some(ref lt), ref mut_ty), Some(full_span)) => { - let mutbl_str = if mut_ty.mutbl == hir::MutMutable { "mut " } else { "" }; - err.span_suggestion(full_span, "try adding parentheses (per RFC 438):", - format!("&{} {}({} +{})", - pprust::lifetime_to_string(lt), - mutbl_str, - pprust::ty_to_string(&mut_ty.ty), - pprust::bounds_to_string(bounds))); - } + _ => { + let mut err = struct_span_err!(self.tcx().sess, ty.span, E0178, + "expected a path on the left-hand side \ + of `+`, not `{}`", + pprust::ty_to_string(ty)); + let hi = bounds.iter().map(|x| match *x { + hir::TraitTyParamBound(ref tr, _) => tr.span.hi, + hir::RegionTyParamBound(ref r) => r.span.hi, + }).max_by_key(|x| x.to_usize()); + let full_span = hi.map(|hi| Span { + lo: ty.span.lo, + hi: hi, + expn_id: ty.span.expn_id, + }); + match (&ty.node, full_span) { + (&hir::TyRptr(None, ref mut_ty), Some(full_span)) => { + let mutbl_str = if mut_ty.mutbl == hir::MutMutable { "mut " } else { "" }; + err.span_suggestion(full_span, "try adding parentheses (per RFC 438):", + format!("&{}({} +{})", + mutbl_str, + pprust::ty_to_string(&mut_ty.ty), + pprust::bounds_to_string(bounds))); + } + (&hir::TyRptr(Some(ref lt), ref mut_ty), Some(full_span)) => { + let mutbl_str = if mut_ty.mutbl == hir::MutMutable { "mut " } else { "" }; + err.span_suggestion(full_span, "try adding parentheses (per RFC 438):", + format!("&{} {}({} +{})", + pprust::lifetime_to_string(lt), + mutbl_str, + pprust::ty_to_string(&mut_ty.ty), + pprust::bounds_to_string(bounds))); + } - _ => { - fileline_help!(&mut err, ty.span, - "perhaps you forgot parentheses? (per RFC 438)"); + _ => { + help!(&mut err, + "perhaps you forgot parentheses? (per RFC 438)"); + } } + err.emit(); + Err(ErrorReported) } - err.emit(); - Err(ErrorReported) } } -} -fn trait_ref_to_object_type<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - trait_ref: ty::PolyTraitRef<'tcx>, - projection_bounds: Vec>, - bounds: &[hir::TyParamBound]) - -> Ty<'tcx> -{ - let existential_bounds = conv_existential_bounds(this, - rscope, - span, - trait_ref.clone(), - projection_bounds, - bounds); - - let result = make_object_type(this, span, trait_ref, existential_bounds); - debug!("trait_ref_to_object_type: result={:?}", - result); - - result -} + fn trait_ref_to_object_type(&self, + rscope: &RegionScope, + span: Span, + trait_ref: ty::PolyTraitRef<'tcx>, + projection_bounds: Vec>, + bounds: &[hir::TyParamBound]) + -> Ty<'tcx> + { + let existential_bounds = self.conv_existential_bounds(rscope, + span, + trait_ref.clone(), + projection_bounds, + bounds); -fn make_object_type<'tcx>(this: &AstConv<'tcx>, - span: Span, - principal: ty::PolyTraitRef<'tcx>, - bounds: ty::ExistentialBounds<'tcx>) - -> Ty<'tcx> { - let tcx = this.tcx(); - let object = ty::TraitTy { - principal: principal, - bounds: bounds - }; - let object_trait_ref = - object.principal_trait_ref_with_self_ty(tcx, tcx.types.err); + let result = self.make_object_type(span, trait_ref, existential_bounds); + debug!("trait_ref_to_object_type: result={:?}", + result); - // ensure the super predicates and stop if we encountered an error - if this.ensure_super_predicates(span, principal.def_id()).is_err() { - return tcx.types.err; + result } - // check that there are no gross object safety violations, - // most importantly, that the supertraits don't contain Self, - // to avoid ICE-s. - let object_safety_violations = - traits::astconv_object_safety_violations(tcx, principal.def_id()); - if !object_safety_violations.is_empty() { - traits::report_object_safety_error( - tcx, span, principal.def_id(), object_safety_violations) - .emit(); - return tcx.types.err; - } + fn make_object_type(&self, + span: Span, + principal: ty::PolyTraitRef<'tcx>, + bounds: ty::ExistentialBounds<'tcx>) + -> Ty<'tcx> { + let tcx = self.tcx(); + let object = ty::TraitTy { + principal: principal, + bounds: bounds + }; + let object_trait_ref = + object.principal_trait_ref_with_self_ty(tcx, tcx.types.err); - let mut associated_types: FnvHashSet<(DefId, ast::Name)> = - traits::supertraits(tcx, object_trait_ref) - .flat_map(|tr| { - let trait_def = tcx.lookup_trait_def(tr.def_id()); - trait_def.associated_type_names - .clone() - .into_iter() - .map(move |associated_type_name| (tr.def_id(), associated_type_name)) - }) - .collect(); - - for projection_bound in &object.bounds.projection_bounds { - let pair = (projection_bound.0.projection_ty.trait_ref.def_id, - projection_bound.0.projection_ty.item_name); - associated_types.remove(&pair); - } + // ensure the super predicates and stop if we encountered an error + if self.ensure_super_predicates(span, principal.def_id()).is_err() { + return tcx.types.err; + } + + // check that there are no gross object safety violations, + // most importantly, that the supertraits don't contain Self, + // to avoid ICE-s. + let object_safety_violations = + tcx.astconv_object_safety_violations(principal.def_id()); + if !object_safety_violations.is_empty() { + tcx.report_object_safety_error( + span, principal.def_id(), None, object_safety_violations) + .unwrap().emit(); + return tcx.types.err; + } + + let mut associated_types: FnvHashSet<(DefId, ast::Name)> = + traits::supertraits(tcx, object_trait_ref) + .flat_map(|tr| { + let trait_def = tcx.lookup_trait_def(tr.def_id()); + trait_def.associated_type_names + .clone() + .into_iter() + .map(move |associated_type_name| (tr.def_id(), associated_type_name)) + }) + .collect(); + + for projection_bound in &object.bounds.projection_bounds { + let pair = (projection_bound.0.projection_ty.trait_ref.def_id, + projection_bound.0.projection_ty.item_name); + associated_types.remove(&pair); + } + + for (trait_def_id, name) in associated_types { + span_err!(tcx.sess, span, E0191, + "the value of the associated type `{}` (from the trait `{}`) must be specified", + name, + tcx.item_path_str(trait_def_id)); + } - for (trait_def_id, name) in associated_types { - span_err!(tcx.sess, span, E0191, - "the value of the associated type `{}` (from the trait `{}`) must be specified", - name, - tcx.item_path_str(trait_def_id)); + tcx.mk_trait(object.principal, object.bounds) } - tcx.mk_trait(object.principal, object.bounds) -} + fn report_ambiguous_associated_type(&self, + span: Span, + type_str: &str, + trait_str: &str, + name: &str) { + span_err!(self.tcx().sess, span, E0223, + "ambiguous associated type; specify the type using the syntax \ + `<{} as {}>::{}`", + type_str, trait_str, name); + } -fn report_ambiguous_associated_type(tcx: &TyCtxt, - span: Span, - type_str: &str, - trait_str: &str, - name: &str) { - span_err!(tcx.sess, span, E0223, - "ambiguous associated type; specify the type using the syntax \ - `<{} as {}>::{}`", - type_str, trait_str, name); -} + // Search for a bound on a type parameter which includes the associated item + // given by assoc_name. ty_param_node_id is the node id for the type parameter + // (which might be `Self`, but only if it is the `Self` of a trait, not an + // impl). This function will fail if there are no suitable bounds or there is + // any ambiguity. + fn find_bound_for_assoc_item(&self, + ty_param_node_id: ast::NodeId, + ty_param_name: ast::Name, + assoc_name: ast::Name, + span: Span) + -> Result, ErrorReported> + { + let tcx = self.tcx(); -// Search for a bound on a type parameter which includes the associated item -// given by assoc_name. ty_param_node_id is the node id for the type parameter -// (which might be `Self`, but only if it is the `Self` of a trait, not an -// impl). This function will fail if there are no suitable bounds or there is -// any ambiguity. -fn find_bound_for_assoc_item<'tcx>(this: &AstConv<'tcx>, - ty_param_node_id: ast::NodeId, - ty_param_name: ast::Name, - assoc_name: ast::Name, - span: Span) - -> Result, ErrorReported> -{ - let tcx = this.tcx(); + let bounds = match self.get_type_parameter_bounds(span, ty_param_node_id) { + Ok(v) => v, + Err(ErrorReported) => { + return Err(ErrorReported); + } + }; - let bounds = match this.get_type_parameter_bounds(span, ty_param_node_id) { - Ok(v) => v, - Err(ErrorReported) => { + // Ensure the super predicates and stop if we encountered an error. + if bounds.iter().any(|b| self.ensure_super_predicates(span, b.def_id()).is_err()) { return Err(ErrorReported); } - }; - // Ensure the super predicates and stop if we encountered an error. - if bounds.iter().any(|b| this.ensure_super_predicates(span, b.def_id()).is_err()) { - return Err(ErrorReported); + // Check that there is exactly one way to find an associated type with the + // correct name. + let suitable_bounds: Vec<_> = + traits::transitive_bounds(tcx, &bounds) + .filter(|b| self.trait_defines_associated_type_named(b.def_id(), assoc_name)) + .collect(); + + self.one_bound_for_assoc_type(suitable_bounds, + &ty_param_name.as_str(), + &assoc_name.as_str(), + span) } - // Check that there is exactly one way to find an associated type with the - // correct name. - let suitable_bounds: Vec<_> = - traits::transitive_bounds(tcx, &bounds) - .filter(|b| this.trait_defines_associated_type_named(b.def_id(), assoc_name)) - .collect(); - - one_bound_for_assoc_type(tcx, - suitable_bounds, - &ty_param_name.as_str(), - &assoc_name.as_str(), - span) -} + // Checks that bounds contains exactly one element and reports appropriate + // errors otherwise. + fn one_bound_for_assoc_type(&self, + bounds: Vec>, + ty_param_name: &str, + assoc_name: &str, + span: Span) + -> Result, ErrorReported> + { + if bounds.is_empty() { + span_err!(self.tcx().sess, span, E0220, + "associated type `{}` not found for `{}`", + assoc_name, + ty_param_name); + return Err(ErrorReported); + } -// Checks that bounds contains exactly one element and reports appropriate -// errors otherwise. -fn one_bound_for_assoc_type<'tcx>(tcx: &TyCtxt<'tcx>, - bounds: Vec>, - ty_param_name: &str, - assoc_name: &str, - span: Span) - -> Result, ErrorReported> -{ - if bounds.is_empty() { - span_err!(tcx.sess, span, E0220, - "associated type `{}` not found for `{}`", - assoc_name, - ty_param_name); - return Err(ErrorReported); - } + if bounds.len() > 1 { + let mut err = struct_span_err!(self.tcx().sess, span, E0221, + "ambiguous associated type `{}` in bounds of `{}`", + assoc_name, + ty_param_name); - if bounds.len() > 1 { - let mut err = struct_span_err!(tcx.sess, span, E0221, - "ambiguous associated type `{}` in bounds of `{}`", - assoc_name, - ty_param_name); - - for bound in &bounds { - span_note!(&mut err, span, - "associated type `{}` could derive from `{}`", - ty_param_name, - bound); + for bound in &bounds { + span_note!(&mut err, span, + "associated type `{}` could derive from `{}`", + ty_param_name, + bound); + } + err.emit(); } - err.emit(); + + Ok(bounds[0].clone()) } - Ok(bounds[0].clone()) -} + // Create a type from a path to an associated type. + // For a path A::B::C::D, ty and ty_path_def are the type and def for A::B::C + // and item_segment is the path segment for D. We return a type and a def for + // the whole path. + // Will fail except for T::A and Self::A; i.e., if ty/ty_path_def are not a type + // parameter or Self. + fn associated_path_def_to_ty(&self, + span: Span, + ty: Ty<'tcx>, + ty_path_def: Def, + item_segment: &hir::PathSegment) + -> (Ty<'tcx>, Def) + { + let tcx = self.tcx(); + let assoc_name = item_segment.name; + + debug!("associated_path_def_to_ty: {:?}::{}", ty, assoc_name); + + tcx.prohibit_type_params(slice::ref_slice(item_segment)); + + // Find the type of the associated item, and the trait where the associated + // item is declared. + let bound = match (&ty.sty, ty_path_def) { + (_, Def::SelfTy(Some(trait_did), Some(impl_id))) => { + // `Self` in an impl of a trait - we have a concrete self type and a + // trait reference. + let trait_ref = tcx.impl_trait_ref(tcx.map.local_def_id(impl_id)).unwrap(); + let trait_ref = if let Some(free_substs) = self.get_free_substs() { + trait_ref.subst(tcx, free_substs) + } else { + trait_ref + }; -// Create a type from a path to an associated type. -// For a path A::B::C::D, ty and ty_path_def are the type and def for A::B::C -// and item_segment is the path segment for D. We return a type and a def for -// the whole path. -// Will fail except for T::A and Self::A; i.e., if ty/ty_path_def are not a type -// parameter or Self. -fn associated_path_def_to_ty<'tcx>(this: &AstConv<'tcx>, - span: Span, - ty: Ty<'tcx>, - ty_path_def: Def, - item_segment: &hir::PathSegment) - -> (Ty<'tcx>, Def) -{ - let tcx = this.tcx(); - let assoc_name = item_segment.identifier.name; - - debug!("associated_path_def_to_ty: {:?}::{}", ty, assoc_name); - - prohibit_type_params(tcx, slice::ref_slice(item_segment)); - - // Find the type of the associated item, and the trait where the associated - // item is declared. - let bound = match (&ty.sty, ty_path_def) { - (_, Def::SelfTy(Some(trait_did), Some((impl_id, _)))) => { - // `Self` in an impl of a trait - we have a concrete self type and a - // trait reference. - let trait_ref = tcx.impl_trait_ref(tcx.map.local_def_id(impl_id)).unwrap(); - let trait_ref = if let Some(free_substs) = this.get_free_substs() { - trait_ref.subst(tcx, free_substs) - } else { - trait_ref - }; + if self.ensure_super_predicates(span, trait_did).is_err() { + return (tcx.types.err, ty_path_def); + } - if this.ensure_super_predicates(span, trait_did).is_err() { - return (tcx.types.err, ty_path_def); + let candidates: Vec = + traits::supertraits(tcx, ty::Binder(trait_ref)) + .filter(|r| self.trait_defines_associated_type_named(r.def_id(), + assoc_name)) + .collect(); + + match self.one_bound_for_assoc_type(candidates, + "Self", + &assoc_name.as_str(), + span) { + Ok(bound) => bound, + Err(ErrorReported) => return (tcx.types.err, ty_path_def), + } } - - let candidates: Vec = - traits::supertraits(tcx, ty::Binder(trait_ref)) - .filter(|r| this.trait_defines_associated_type_named(r.def_id(), - assoc_name)) - .collect(); - - match one_bound_for_assoc_type(tcx, - candidates, - "Self", - &assoc_name.as_str(), - span) { - Ok(bound) => bound, - Err(ErrorReported) => return (tcx.types.err, ty_path_def), + (&ty::TyParam(_), Def::SelfTy(Some(trait_did), None)) => { + let trait_node_id = tcx.map.as_local_node_id(trait_did).unwrap(); + match self.find_bound_for_assoc_item(trait_node_id, + keywords::SelfType.name(), + assoc_name, + span) { + Ok(bound) => bound, + Err(ErrorReported) => return (tcx.types.err, ty_path_def), + } } - } - (&ty::TyParam(_), Def::SelfTy(Some(trait_did), None)) => { - let trait_node_id = tcx.map.as_local_node_id(trait_did).unwrap(); - match find_bound_for_assoc_item(this, - trait_node_id, - token::special_idents::type_self.name, - assoc_name, - span) { - Ok(bound) => bound, - Err(ErrorReported) => return (tcx.types.err, ty_path_def), + (&ty::TyParam(_), Def::TyParam(_, _, param_did, param_name)) => { + let param_node_id = tcx.map.as_local_node_id(param_did).unwrap(); + match self.find_bound_for_assoc_item(param_node_id, + param_name, + assoc_name, + span) { + Ok(bound) => bound, + Err(ErrorReported) => return (tcx.types.err, ty_path_def), + } } - } - (&ty::TyParam(_), Def::TyParam(_, _, param_did, param_name)) => { - let param_node_id = tcx.map.as_local_node_id(param_did).unwrap(); - match find_bound_for_assoc_item(this, - param_node_id, - param_name, - assoc_name, - span) { - Ok(bound) => bound, - Err(ErrorReported) => return (tcx.types.err, ty_path_def), + _ => { + self.report_ambiguous_associated_type(span, + &ty.to_string(), + "Trait", + &assoc_name.as_str()); + return (tcx.types.err, ty_path_def); } - } - _ => { - report_ambiguous_associated_type(tcx, - span, - &ty.to_string(), - "Trait", - &assoc_name.as_str()); - return (tcx.types.err, ty_path_def); - } - }; - - let trait_did = bound.0.def_id; - let ty = this.projected_ty_from_poly_trait_ref(span, bound, assoc_name); + }; - let item_did = if let Some(trait_id) = tcx.map.as_local_node_id(trait_did) { - // `ty::trait_items` used below requires information generated - // by type collection, which may be in progress at this point. - match tcx.map.expect_item(trait_id).node { - hir::ItemTrait(_, _, _, ref trait_items) => { - let item = trait_items.iter() - .find(|i| i.name == assoc_name) - .expect("missing associated type"); - tcx.map.local_def_id(item.id) + let trait_did = bound.0.def_id; + let ty = self.projected_ty_from_poly_trait_ref(span, bound, assoc_name); + + let item_did = if let Some(trait_id) = tcx.map.as_local_node_id(trait_did) { + // `ty::trait_items` used below requires information generated + // by type collection, which may be in progress at this point. + match tcx.map.expect_item(trait_id).node { + hir::ItemTrait(_, _, _, ref trait_items) => { + let item = trait_items.iter() + .find(|i| i.name == assoc_name) + .expect("missing associated type"); + tcx.map.local_def_id(item.id) + } + _ => bug!() } - _ => bug!() - } - } else { - let trait_items = tcx.trait_items(trait_did); - let item = trait_items.iter().find(|i| i.name() == assoc_name); - item.expect("missing associated type").def_id() - }; + } else { + let trait_items = tcx.trait_items(trait_did); + let item = trait_items.iter().find(|i| i.name() == assoc_name); + item.expect("missing associated type").def_id() + }; - (ty, Def::AssociatedTy(trait_did, item_did)) -} + (ty, Def::AssociatedTy(trait_did, item_did)) + } -fn qpath_to_ty<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - opt_self_ty: Option>, - trait_def_id: DefId, - trait_segment: &hir::PathSegment, - item_segment: &hir::PathSegment) - -> Ty<'tcx> -{ - let tcx = this.tcx(); + fn qpath_to_ty(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + opt_self_ty: Option>, + trait_def_id: DefId, + trait_segment: &hir::PathSegment, + item_segment: &hir::PathSegment) + -> Ty<'tcx> + { + let tcx = self.tcx(); - prohibit_type_params(tcx, slice::ref_slice(item_segment)); + tcx.prohibit_type_params(slice::ref_slice(item_segment)); - let self_ty = if let Some(ty) = opt_self_ty { - ty - } else { - let path_str = tcx.item_path_str(trait_def_id); - report_ambiguous_associated_type(tcx, - span, - "Type", - &path_str, - &item_segment.identifier.name.as_str()); - return tcx.types.err; - }; + let self_ty = if let Some(ty) = opt_self_ty { + ty + } else { + let path_str = tcx.item_path_str(trait_def_id); + self.report_ambiguous_associated_type(span, + "Type", + &path_str, + &item_segment.name.as_str()); + return tcx.types.err; + }; - debug!("qpath_to_ty: self_type={:?}", self_ty); + debug!("qpath_to_ty: self_type={:?}", self_ty); - let trait_ref = ast_path_to_mono_trait_ref(this, - rscope, - span, - param_mode, - trait_def_id, - Some(self_ty), - trait_segment); + let trait_ref = self.ast_path_to_mono_trait_ref(rscope, + span, + param_mode, + trait_def_id, + Some(self_ty), + trait_segment); - debug!("qpath_to_ty: trait_ref={:?}", trait_ref); + debug!("qpath_to_ty: trait_ref={:?}", trait_ref); - this.projected_ty(span, trait_ref, item_segment.identifier.name) -} + self.projected_ty(span, trait_ref, item_segment.name) + } -/// Convert a type supplied as value for a type argument from AST into our -/// our internal representation. This is the same as `ast_ty_to_ty` but that -/// it applies the object lifetime default. -/// -/// # Parameters -/// -/// * `this`, `rscope`: the surrounding context -/// * `decl_generics`: the generics of the struct/enum/trait declaration being -/// referenced -/// * `index`: the index of the type parameter being instantiated from the list -/// (we assume it is in the `TypeSpace`) -/// * `region_substs`: a partial substitution consisting of -/// only the region type parameters being supplied to this type. -/// * `ast_ty`: the ast representation of the type being supplied -pub fn ast_ty_arg_to_ty<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - decl_generics: &ty::Generics<'tcx>, - index: usize, - region_substs: &Substs<'tcx>, - ast_ty: &hir::Ty) - -> Ty<'tcx> -{ - let tcx = this.tcx(); + /// Convert a type supplied as value for a type argument from AST into our + /// our internal representation. This is the same as `ast_ty_to_ty` but that + /// it applies the object lifetime default. + /// + /// # Parameters + /// + /// * `this`, `rscope`: the surrounding context + /// * `decl_generics`: the generics of the struct/enum/trait declaration being + /// referenced + /// * `index`: the index of the type parameter being instantiated from the list + /// (we assume it is in the `TypeSpace`) + /// * `region_substs`: a partial substitution consisting of + /// only the region type parameters being supplied to this type. + /// * `ast_ty`: the ast representation of the type being supplied + pub fn ast_ty_arg_to_ty(&self, + rscope: &RegionScope, + decl_generics: &ty::Generics<'tcx>, + index: usize, + region_substs: &Substs<'tcx>, + ast_ty: &hir::Ty) + -> Ty<'tcx> + { + let tcx = self.tcx(); - if let Some(def) = decl_generics.types.opt_get(TypeSpace, index) { - let object_lifetime_default = def.object_lifetime_default.subst(tcx, region_substs); - let rscope1 = &ObjectLifetimeDefaultRscope::new(rscope, object_lifetime_default); - ast_ty_to_ty(this, rscope1, ast_ty) - } else { - ast_ty_to_ty(this, rscope, ast_ty) + if let Some(def) = decl_generics.types.opt_get(TypeSpace, index) { + let object_lifetime_default = def.object_lifetime_default.subst(tcx, region_substs); + let rscope1 = &ObjectLifetimeDefaultRscope::new(rscope, object_lifetime_default); + self.ast_ty_to_ty(rscope1, ast_ty) + } else { + self.ast_ty_to_ty(rscope, ast_ty) + } } -} -// Check the base def in a PathResolution and convert it to a Ty. If there are -// associated types in the PathResolution, these will need to be separately -// resolved. -fn base_def_to_ty<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - def: &Def, - opt_self_ty: Option>, - base_segments: &[hir::PathSegment]) - -> Ty<'tcx> { - let tcx = this.tcx(); - - match *def { - Def::Trait(trait_def_id) => { - // N.B. this case overlaps somewhat with - // TyObjectSum, see that fn for details - let mut projection_bounds = Vec::new(); - - let trait_ref = object_path_to_poly_trait_ref(this, - rscope, - span, - param_mode, - trait_def_id, - base_segments.last().unwrap(), - &mut projection_bounds); - - prohibit_type_params(tcx, base_segments.split_last().unwrap().1); - trait_ref_to_object_type(this, - rscope, - span, - trait_ref, - projection_bounds, - &[]) - } - Def::Enum(did) | Def::TyAlias(did) | Def::Struct(did) => { - prohibit_type_params(tcx, base_segments.split_last().unwrap().1); - ast_path_to_ty(this, - rscope, - span, - param_mode, - did, - base_segments.last().unwrap()) - } - Def::TyParam(space, index, _, name) => { - prohibit_type_params(tcx, base_segments); - tcx.mk_param(space, index, name) - } - Def::SelfTy(_, Some((_, self_ty_id))) => { - // Self in impl (we know the concrete type). - prohibit_type_params(tcx, base_segments); - if let Some(&ty) = tcx.ast_ty_to_ty_cache.borrow().get(&self_ty_id) { - if let Some(free_substs) = this.get_free_substs() { + // Check the base def in a PathResolution and convert it to a Ty. If there are + // associated types in the PathResolution, these will need to be separately + // resolved. + fn base_def_to_ty(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + def: Def, + opt_self_ty: Option>, + base_path_ref_id: ast::NodeId, + base_segments: &[hir::PathSegment]) + -> Ty<'tcx> { + let tcx = self.tcx(); + + debug!("base_def_to_ty(def={:?}, opt_self_ty={:?}, base_segments={:?})", + def, opt_self_ty, base_segments); + + match def { + Def::Trait(trait_def_id) => { + // N.B. this case overlaps somewhat with + // TyObjectSum, see that fn for details + let mut projection_bounds = Vec::new(); + + let trait_ref = + self.object_path_to_poly_trait_ref(rscope, + span, + param_mode, + trait_def_id, + base_path_ref_id, + base_segments.last().unwrap(), + &mut projection_bounds); + + tcx.prohibit_type_params(base_segments.split_last().unwrap().1); + self.trait_ref_to_object_type(rscope, + span, + trait_ref, + projection_bounds, + &[]) + } + Def::Enum(did) | Def::TyAlias(did) | Def::Struct(did) => { + tcx.prohibit_type_params(base_segments.split_last().unwrap().1); + self.ast_path_to_ty(rscope, + span, + param_mode, + did, + base_segments.last().unwrap()) + } + Def::TyParam(space, index, _, name) => { + tcx.prohibit_type_params(base_segments); + tcx.mk_param(space, index, name) + } + Def::SelfTy(_, Some(impl_id)) => { + // Self in impl (we know the concrete type). + tcx.prohibit_type_params(base_segments); + let ty = tcx.node_id_to_type(impl_id); + if let Some(free_substs) = self.get_free_substs() { ty.subst(tcx, free_substs) } else { ty } - } else { - span_bug!(span, "self type has not been fully resolved") } - } - Def::SelfTy(Some(_), None) => { - // Self in trait. - prohibit_type_params(tcx, base_segments); - tcx.mk_self_type() - } - Def::AssociatedTy(trait_did, _) => { - prohibit_type_params(tcx, &base_segments[..base_segments.len()-2]); - qpath_to_ty(this, - rscope, - span, - param_mode, - opt_self_ty, - trait_did, - &base_segments[base_segments.len()-2], - base_segments.last().unwrap()) - } - Def::Mod(..) => { - // Used as sentinel by callers to indicate the `::A::B::C` form. - // FIXME(#22519) This part of the resolution logic should be - // avoided entirely for that form, once we stop needed a Def - // for `associated_path_def_to_ty`. - // Fixing this will also let use resolve ::Foo the same way we - // resolve Self::Foo, at the moment we can't resolve the former because - // we don't have the trait information around, which is just sad. - - assert!(base_segments.is_empty()); - - opt_self_ty.expect("missing T in ::a::b::c") - } - Def::PrimTy(prim_ty) => { - prim_ty_to_ty(tcx, base_segments, prim_ty) - } - Def::Err => { - return this.tcx().types.err; - } - _ => { - span_err!(tcx.sess, span, E0248, - "found value `{}` used as a type", - tcx.item_path_str(def.def_id())); - return this.tcx().types.err; + Def::SelfTy(Some(_), None) => { + // Self in trait. + tcx.prohibit_type_params(base_segments); + tcx.mk_self_type() + } + Def::AssociatedTy(trait_did, _) => { + tcx.prohibit_type_params(&base_segments[..base_segments.len()-2]); + self.qpath_to_ty(rscope, + span, + param_mode, + opt_self_ty, + trait_did, + &base_segments[base_segments.len()-2], + base_segments.last().unwrap()) + } + Def::Mod(..) => { + // Used as sentinel by callers to indicate the `::A::B::C` form. + // FIXME(#22519) This part of the resolution logic should be + // avoided entirely for that form, once we stop needed a Def + // for `associated_path_def_to_ty`. + // Fixing this will also let use resolve ::Foo the same way we + // resolve Self::Foo, at the moment we can't resolve the former because + // we don't have the trait information around, which is just sad. + + assert!(base_segments.is_empty()); + + opt_self_ty.expect("missing T in ::a::b::c") + } + Def::PrimTy(prim_ty) => { + tcx.prim_ty_to_ty(base_segments, prim_ty) + } + Def::Err => { + self.set_tainted_by_errors(); + return self.tcx().types.err; + } + _ => { + span_err!(tcx.sess, span, E0248, + "found value `{}` used as a type", + tcx.item_path_str(def.def_id())); + return self.tcx().types.err; + } } } -} -// Note that both base_segments and assoc_segments may be empty, although not at -// the same time. -pub fn finish_resolving_def_to_ty<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - param_mode: PathParamMode, - def: &Def, - opt_self_ty: Option>, - base_segments: &[hir::PathSegment], - assoc_segments: &[hir::PathSegment]) - -> Ty<'tcx> { - let mut ty = base_def_to_ty(this, - rscope, - span, - param_mode, - def, - opt_self_ty, - base_segments); - let mut def = *def; - // If any associated type segments remain, attempt to resolve them. - for segment in assoc_segments { - if ty.sty == ty::TyError { - break; - } - // This is pretty bad (it will fail except for T::A and Self::A). - let (a_ty, a_def) = associated_path_def_to_ty(this, - span, - ty, - def, - segment); - ty = a_ty; - def = a_def; + // Note that both base_segments and assoc_segments may be empty, although not at + // the same time. + pub fn finish_resolving_def_to_ty(&self, + rscope: &RegionScope, + span: Span, + param_mode: PathParamMode, + mut def: Def, + opt_self_ty: Option>, + base_path_ref_id: ast::NodeId, + base_segments: &[hir::PathSegment], + assoc_segments: &[hir::PathSegment]) + -> (Ty<'tcx>, Def) { + debug!("finish_resolving_def_to_ty(def={:?}, \ + base_segments={:?}, \ + assoc_segments={:?})", + def, + base_segments, + assoc_segments); + let mut ty = self.base_def_to_ty(rscope, + span, + param_mode, + def, + opt_self_ty, + base_path_ref_id, + base_segments); + debug!("finish_resolving_def_to_ty: base_def_to_ty returned {:?}", ty); + // If any associated type segments remain, attempt to resolve them. + for segment in assoc_segments { + debug!("finish_resolving_def_to_ty: segment={:?}", segment); + if ty.sty == ty::TyError { + break; + } + // This is pretty bad (it will fail except for T::A and Self::A). + let (a_ty, a_def) = self.associated_path_def_to_ty(span, + ty, + def, + segment); + ty = a_ty; + def = a_def; + } + (ty, def) } - ty -} -/// Parses the programmer's textual representation of a type into our -/// internal notion of a type. -pub fn ast_ty_to_ty<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - ast_ty: &hir::Ty) - -> Ty<'tcx> -{ - debug!("ast_ty_to_ty(id={:?}, ast_ty={:?})", - ast_ty.id, ast_ty); - - let tcx = this.tcx(); + /// Parses the programmer's textual representation of a type into our + /// internal notion of a type. + pub fn ast_ty_to_ty(&self, rscope: &RegionScope, ast_ty: &hir::Ty) -> Ty<'tcx> { + debug!("ast_ty_to_ty(id={:?}, ast_ty={:?})", + ast_ty.id, ast_ty); - if let Some(&ty) = tcx.ast_ty_to_ty_cache.borrow().get(&ast_ty.id) { - debug!("ast_ty_to_ty: id={:?} ty={:?} (cached)", ast_ty.id, ty); - return ty; - } + let tcx = self.tcx(); - let typ = match ast_ty.node { - hir::TyVec(ref ty) => { - tcx.mk_slice(ast_ty_to_ty(this, rscope, &ty)) + let cache = self.ast_ty_to_ty_cache(); + match cache.borrow().get(&ast_ty.id) { + Some(ty) => { return ty; } + None => { } } - hir::TyObjectSum(ref ty, ref bounds) => { - match ast_ty_to_trait_ref(this, rscope, &ty, bounds) { - Ok((trait_ref, projection_bounds)) => { - trait_ref_to_object_type(this, - rscope, - ast_ty.span, - trait_ref, - projection_bounds, - bounds) - } - Err(ErrorReported) => { - this.tcx().types.err + + let result_ty = match ast_ty.node { + hir::TyVec(ref ty) => { + tcx.mk_slice(self.ast_ty_to_ty(rscope, &ty)) + } + hir::TyObjectSum(ref ty, ref bounds) => { + match self.ast_ty_to_trait_ref(rscope, &ty, bounds) { + Ok((trait_ref, projection_bounds)) => { + self.trait_ref_to_object_type(rscope, + ast_ty.span, + trait_ref, + projection_bounds, + bounds) + } + Err(ErrorReported) => { + self.tcx().types.err + } } } - } - hir::TyPtr(ref mt) => { - tcx.mk_ptr(ty::TypeAndMut { - ty: ast_ty_to_ty(this, rscope, &mt.ty), - mutbl: mt.mutbl - }) - } - hir::TyRptr(ref region, ref mt) => { - let r = opt_ast_region_to_region(this, rscope, ast_ty.span, region); - debug!("TyRef r={:?}", r); - let rscope1 = - &ObjectLifetimeDefaultRscope::new( - rscope, - ty::ObjectLifetimeDefault::Specific(r)); - let t = ast_ty_to_ty(this, rscope1, &mt.ty); - tcx.mk_ref(tcx.mk_region(r), ty::TypeAndMut {ty: t, mutbl: mt.mutbl}) - } - hir::TyTup(ref fields) => { - let flds = fields.iter() - .map(|t| ast_ty_to_ty(this, rscope, &t)) - .collect(); - tcx.mk_tup(flds) - } - hir::TyBareFn(ref bf) => { - require_c_abi_if_variadic(tcx, &bf.decl, bf.abi, ast_ty.span); - tcx.mk_fn_ptr(ty_of_bare_fn(this, bf.unsafety, bf.abi, &bf.decl)) - } - hir::TyPolyTraitRef(ref bounds) => { - conv_ty_poly_trait_ref(this, rscope, ast_ty.span, bounds) - } - hir::TyPath(ref maybe_qself, ref path) => { - let path_res = if let Some(&d) = tcx.def_map.borrow().get(&ast_ty.id) { - d - } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself { - // Create some fake resolution that can't possibly be a type. - def::PathResolution { - base_def: Def::Mod(tcx.map.local_def_id(ast::CRATE_NODE_ID)), - depth: path.segments.len() + hir::TyPtr(ref mt) => { + tcx.mk_ptr(ty::TypeAndMut { + ty: self.ast_ty_to_ty(rscope, &mt.ty), + mutbl: mt.mutbl + }) + } + hir::TyRptr(ref region, ref mt) => { + let r = self.opt_ast_region_to_region(rscope, ast_ty.span, region); + debug!("TyRef r={:?}", r); + let rscope1 = + &ObjectLifetimeDefaultRscope::new( + rscope, + ty::ObjectLifetimeDefault::Specific(r)); + let t = self.ast_ty_to_ty(rscope1, &mt.ty); + tcx.mk_ref(tcx.mk_region(r), ty::TypeAndMut {ty: t, mutbl: mt.mutbl}) + } + hir::TyTup(ref fields) => { + let flds = fields.iter() + .map(|t| self.ast_ty_to_ty(rscope, &t)) + .collect(); + tcx.mk_tup(flds) + } + hir::TyBareFn(ref bf) => { + require_c_abi_if_variadic(tcx, &bf.decl, bf.abi, ast_ty.span); + let bare_fn_ty = self.ty_of_bare_fn(bf.unsafety, bf.abi, &bf.decl); + + // Find any late-bound regions declared in return type that do + // not appear in the arguments. These are not wellformed. + // + // Example: + // + // for<'a> fn() -> &'a str <-- 'a is bad + // for<'a> fn(&'a String) -> &'a str <-- 'a is ok + // + // Note that we do this check **here** and not in + // `ty_of_bare_fn` because the latter is also used to make + // the types for fn items, and we do not want to issue a + // warning then. (Once we fix #32330, the regions we are + // checking for here would be considered early bound + // anyway.) + let inputs = bare_fn_ty.sig.inputs(); + let late_bound_in_args = tcx.collect_constrained_late_bound_regions(&inputs); + let output = bare_fn_ty.sig.output(); + let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output); + for br in late_bound_in_ret.difference(&late_bound_in_args) { + let br_name = match *br { + ty::BrNamed(_, name) => name, + _ => { + span_bug!( + bf.decl.output.span(), + "anonymous bound region {:?} in return but not args", + br); + } + }; + tcx.sess.add_lint( + lint::builtin::HR_LIFETIME_IN_ASSOC_TYPE, + ast_ty.id, + ast_ty.span, + format!("return type references lifetime `{}`, \ + which does not appear in the trait input types", + br_name)); } - } else { - span_bug!(ast_ty.span, "unbound path {:?}", ast_ty) - }; - let def = path_res.base_def; - let base_ty_end = path.segments.len() - path_res.depth; - let opt_self_ty = maybe_qself.as_ref().map(|qself| { - ast_ty_to_ty(this, rscope, &qself.ty) - }); - let ty = finish_resolving_def_to_ty(this, - rscope, - ast_ty.span, - PathParamMode::Explicit, - &def, - opt_self_ty, - &path.segments[..base_ty_end], - &path.segments[base_ty_end..]); - - if path_res.depth != 0 && ty.sty != ty::TyError { - // Write back the new resolution. - tcx.def_map.borrow_mut().insert(ast_ty.id, def::PathResolution { - base_def: def, - depth: 0 - }); + tcx.mk_fn_ptr(bare_fn_ty) } + hir::TyPolyTraitRef(ref bounds) => { + self.conv_ty_poly_trait_ref(rscope, ast_ty.span, bounds) + } + hir::TyPath(ref maybe_qself, ref path) => { + debug!("ast_ty_to_ty: maybe_qself={:?} path={:?}", maybe_qself, path); + let path_res = if let Some(&d) = tcx.def_map.borrow().get(&ast_ty.id) { + d + } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself { + // Create some fake resolution that can't possibly be a type. + def::PathResolution { + base_def: Def::Mod(tcx.map.local_def_id(ast::CRATE_NODE_ID)), + depth: path.segments.len() + } + } else { + span_bug!(ast_ty.span, "unbound path {:?}", ast_ty) + }; + let def = path_res.base_def; + let base_ty_end = path.segments.len() - path_res.depth; + let opt_self_ty = maybe_qself.as_ref().map(|qself| { + self.ast_ty_to_ty(rscope, &qself.ty) + }); + let (ty, _def) = self.finish_resolving_def_to_ty(rscope, + ast_ty.span, + PathParamMode::Explicit, + def, + opt_self_ty, + ast_ty.id, + &path.segments[..base_ty_end], + &path.segments[base_ty_end..]); + + if path_res.depth != 0 && ty.sty != ty::TyError { + // Write back the new resolution. + tcx.def_map.borrow_mut().insert(ast_ty.id, def::PathResolution { + base_def: def, + depth: 0 + }); + } - ty - } - hir::TyFixedLengthVec(ref ty, ref e) => { - let hint = UncheckedExprHint(tcx.types.usize); - match eval_const_expr_partial(tcx, &e, hint, None) { - Ok(ConstVal::Integral(ConstInt::Usize(i))) => { - let i = i.as_u64(tcx.sess.target.uint_type); - assert_eq!(i as usize as u64, i); - tcx.mk_array(ast_ty_to_ty(this, rscope, &ty), i as usize) - }, - Ok(val) => { - span_err!(tcx.sess, ast_ty.span, E0249, - "expected usize value for array length, got {}", val.description()); - this.tcx().types.err - }, - Err(ref r) => { - let mut err = struct_span_err!(tcx.sess, r.span, E0250, - "array length constant evaluation error: {}", - r.description()); - if !ast_ty.span.contains(r.span) { - span_note!(&mut err, ast_ty.span, "for array length here") + ty + } + hir::TyFixedLengthVec(ref ty, ref e) => { + let hint = UncheckedExprHint(tcx.types.usize); + match eval_const_expr_partial(tcx.global_tcx(), &e, hint, None) { + Ok(ConstVal::Integral(ConstInt::Usize(i))) => { + let i = i.as_u64(tcx.sess.target.uint_type); + assert_eq!(i as usize as u64, i); + tcx.mk_array(self.ast_ty_to_ty(rscope, &ty), i as usize) + }, + Ok(val) => { + span_err!(tcx.sess, ast_ty.span, E0249, + "expected usize value for array length, got {}", + val.description()); + self.tcx().types.err + }, + // array length errors happen before the global constant check + // so we need to report the real error + Err(ConstEvalErr { kind: ErroneousReferencedConstant(box r), ..}) | + Err(r) => { + let mut err = struct_span_err!(tcx.sess, r.span, E0250, + "array length constant \ + evaluation error: {}", + r.description()); + if !ast_ty.span.contains(r.span) { + span_note!(&mut err, ast_ty.span, "for array length here") + } + err.emit(); + self.tcx().types.err } - err.emit(); - this.tcx().types.err } } - } - hir::TyTypeof(ref _e) => { - span_err!(tcx.sess, ast_ty.span, E0516, - "`typeof` is a reserved keyword but unimplemented"); - tcx.types.err - } - hir::TyInfer => { - // TyInfer also appears as the type of arguments or return - // values in a ExprClosure, or as - // the type of local variables. Both of these cases are - // handled specially and will not descend into this routine. - this.ty_infer(None, None, None, ast_ty.span) - } - }; + hir::TyTypeof(ref _e) => { + span_err!(tcx.sess, ast_ty.span, E0516, + "`typeof` is a reserved keyword but unimplemented"); + tcx.types.err + } + hir::TyInfer => { + // TyInfer also appears as the type of arguments or return + // values in a ExprClosure, or as + // the type of local variables. Both of these cases are + // handled specially and will not descend into this routine. + self.ty_infer(None, None, None, ast_ty.span) + } + }; - debug!("ast_ty_to_ty: id={:?} ty={:?}", ast_ty.id, typ); - tcx.ast_ty_to_ty_cache.borrow_mut().insert(ast_ty.id, typ); - return typ; -} + cache.borrow_mut().insert(ast_ty.id, result_ty); -pub fn ty_of_arg<'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - a: &hir::Arg, - expected_ty: Option>) - -> Ty<'tcx> -{ - match a.ty.node { - hir::TyInfer if expected_ty.is_some() => expected_ty.unwrap(), - hir::TyInfer => this.ty_infer(None, None, None, a.ty.span), - _ => ast_ty_to_ty(this, rscope, &a.ty), + result_ty } -} -struct SelfInfo<'a, 'tcx> { - untransformed_self_ty: Ty<'tcx>, - explicit_self: &'a hir::ExplicitSelf, -} + pub fn ty_of_arg(&self, + rscope: &RegionScope, + a: &hir::Arg, + expected_ty: Option>) + -> Ty<'tcx> + { + match a.ty.node { + hir::TyInfer if expected_ty.is_some() => expected_ty.unwrap(), + hir::TyInfer => self.ty_infer(None, None, None, a.ty.span), + _ => self.ast_ty_to_ty(rscope, &a.ty), + } + } -pub fn ty_of_method<'tcx>(this: &AstConv<'tcx>, - sig: &hir::MethodSig, - untransformed_self_ty: Ty<'tcx>) - -> (ty::BareFnTy<'tcx>, ty::ExplicitSelfCategory) { - let self_info = Some(SelfInfo { - untransformed_self_ty: untransformed_self_ty, - explicit_self: &sig.explicit_self, - }); - let (bare_fn_ty, optional_explicit_self_category) = - ty_of_method_or_bare_fn(this, - sig.unsafety, - sig.abi, - self_info, - &sig.decl); - (bare_fn_ty, optional_explicit_self_category.unwrap()) -} + pub fn ty_of_method(&self, + sig: &hir::MethodSig, + untransformed_self_ty: Ty<'tcx>) + -> (&'tcx ty::BareFnTy<'tcx>, ty::ExplicitSelfCategory) { + let (bare_fn_ty, optional_explicit_self_category) = + self.ty_of_method_or_bare_fn(sig.unsafety, + sig.abi, + Some(untransformed_self_ty), + &sig.decl); + (bare_fn_ty, optional_explicit_self_category) + } -pub fn ty_of_bare_fn<'tcx>(this: &AstConv<'tcx>, unsafety: hir::Unsafety, abi: abi::Abi, - decl: &hir::FnDecl) -> ty::BareFnTy<'tcx> { - let (bare_fn_ty, _) = ty_of_method_or_bare_fn(this, unsafety, abi, None, decl); - bare_fn_ty -} + pub fn ty_of_bare_fn(&self, + unsafety: hir::Unsafety, + abi: abi::Abi, + decl: &hir::FnDecl) + -> &'tcx ty::BareFnTy<'tcx> { + self.ty_of_method_or_bare_fn(unsafety, abi, None, decl).0 + } -fn ty_of_method_or_bare_fn<'a, 'tcx>(this: &AstConv<'tcx>, - unsafety: hir::Unsafety, - abi: abi::Abi, - opt_self_info: Option>, - decl: &hir::FnDecl) - -> (ty::BareFnTy<'tcx>, Option) -{ - debug!("ty_of_method_or_bare_fn"); - - // New region names that appear inside of the arguments of the function - // declaration are bound to that function type. - let rb = rscope::BindingRscope::new(); - - // `implied_output_region` is the region that will be assumed for any - // region parameters in the return type. In accordance with the rules for - // lifetime elision, we can determine it in two ways. First (determined - // here), if self is by-reference, then the implied output region is the - // region of the self parameter. - let (self_ty, explicit_self_category) = match opt_self_info { - None => (None, None), - Some(self_info) => determine_self_type(this, &rb, self_info) - }; + fn ty_of_method_or_bare_fn<'a>(&self, + unsafety: hir::Unsafety, + abi: abi::Abi, + opt_untransformed_self_ty: Option>, + decl: &hir::FnDecl) + -> (&'tcx ty::BareFnTy<'tcx>, ty::ExplicitSelfCategory) + { + debug!("ty_of_method_or_bare_fn"); + + // New region names that appear inside of the arguments of the function + // declaration are bound to that function type. + let rb = rscope::BindingRscope::new(); + + // `implied_output_region` is the region that will be assumed for any + // region parameters in the return type. In accordance with the rules for + // lifetime elision, we can determine it in two ways. First (determined + // here), if self is by-reference, then the implied output region is the + // region of the self parameter. + let explicit_self = decl.inputs.get(0).and_then(hir::Arg::to_self); + let (self_ty, explicit_self_category) = match (opt_untransformed_self_ty, explicit_self) { + (Some(untransformed_self_ty), Some(explicit_self)) => { + let self_type = self.determine_self_type(&rb, untransformed_self_ty, + &explicit_self); + (Some(self_type.0), self_type.1) + } + _ => (None, ty::ExplicitSelfCategory::Static), + }; - // HACK(eddyb) replace the fake self type in the AST with the actual type. - let arg_params = if self_ty.is_some() { - &decl.inputs[1..] - } else { - &decl.inputs[..] - }; - let arg_tys: Vec = - arg_params.iter().map(|a| ty_of_arg(this, &rb, a, None)).collect(); - let arg_pats: Vec = - arg_params.iter().map(|a| pprust::pat_to_string(&a.pat)).collect(); - - // Second, if there was exactly one lifetime (either a substitution or a - // reference) in the arguments, then any anonymous regions in the output - // have that lifetime. - let implied_output_region = match explicit_self_category { - Some(ty::ExplicitSelfCategory::ByReference(region, _)) => Ok(region), - _ => find_implied_output_region(this.tcx(), &arg_tys, arg_pats) - }; + // HACK(eddyb) replace the fake self type in the AST with the actual type. + let arg_params = if self_ty.is_some() { + &decl.inputs[1..] + } else { + &decl.inputs[..] + }; + let arg_tys: Vec = + arg_params.iter().map(|a| self.ty_of_arg(&rb, a, None)).collect(); + let arg_pats: Vec = + arg_params.iter().map(|a| pprust::pat_to_string(&a.pat)).collect(); + + // Second, if there was exactly one lifetime (either a substitution or a + // reference) in the arguments, then any anonymous regions in the output + // have that lifetime. + let implied_output_region = match explicit_self_category { + ty::ExplicitSelfCategory::ByReference(region, _) => Ok(region), + _ => self.find_implied_output_region(&arg_tys, arg_pats) + }; - let output_ty = match decl.output { - hir::Return(ref output) => - ty::FnConverging(convert_ty_with_lifetime_elision(this, - implied_output_region, - &output)), - hir::DefaultReturn(..) => ty::FnConverging(this.tcx().mk_nil()), - hir::NoReturn(..) => ty::FnDiverging - }; + let output_ty = match decl.output { + hir::Return(ref output) => + ty::FnConverging(self.convert_ty_with_lifetime_elision(implied_output_region, + &output)), + hir::DefaultReturn(..) => ty::FnConverging(self.tcx().mk_nil()), + hir::NoReturn(..) => ty::FnDiverging + }; - (ty::BareFnTy { - unsafety: unsafety, - abi: abi, - sig: ty::Binder(ty::FnSig { - inputs: self_ty.into_iter().chain(arg_tys).collect(), - output: output_ty, - variadic: decl.variadic - }), - }, explicit_self_category) -} + (self.tcx().mk_bare_fn(ty::BareFnTy { + unsafety: unsafety, + abi: abi, + sig: ty::Binder(ty::FnSig { + inputs: self_ty.into_iter().chain(arg_tys).collect(), + output: output_ty, + variadic: decl.variadic + }), + }), explicit_self_category) + } -fn determine_self_type<'a, 'tcx>(this: &AstConv<'tcx>, - rscope: &RegionScope, - self_info: SelfInfo<'a, 'tcx>) - -> (Option>, Option) -{ - let self_ty = self_info.untransformed_self_ty; - return match self_info.explicit_self.node { - hir::SelfStatic => (None, Some(ty::ExplicitSelfCategory::Static)), - hir::SelfValue(_) => { - (Some(self_ty), Some(ty::ExplicitSelfCategory::ByValue)) - } - hir::SelfRegion(ref lifetime, mutability, _) => { - let region = - opt_ast_region_to_region(this, - rscope, - self_info.explicit_self.span, - lifetime); - (Some(this.tcx().mk_ref( - this.tcx().mk_region(region), - ty::TypeAndMut { - ty: self_ty, - mutbl: mutability - })), - Some(ty::ExplicitSelfCategory::ByReference(region, mutability))) - } - hir::SelfExplicit(ref ast_type, _) => { - let explicit_type = ast_ty_to_ty(this, rscope, &ast_type); - - // We wish to (for now) categorize an explicit self - // declaration like `self: SomeType` into either `self`, - // `&self`, `&mut self`, or `Box`. We do this here - // by some simple pattern matching. A more precise check - // is done later in `check_method_self_type()`. - // - // Examples: - // - // ``` - // impl Foo for &T { - // // Legal declarations: - // fn method1(self: &&T); // ExplicitSelfCategory::ByReference - // fn method2(self: &T); // ExplicitSelfCategory::ByValue - // fn method3(self: Box<&T>); // ExplicitSelfCategory::ByBox - // - // // Invalid cases will be caught later by `check_method_self_type`: - // fn method_err1(self: &mut T); // ExplicitSelfCategory::ByReference - // } - // ``` - // - // To do the check we just count the number of "modifiers" - // on each type and compare them. If they are the same or - // the impl has more, we call it "by value". Otherwise, we - // look at the outermost modifier on the method decl and - // call it by-ref, by-box as appropriate. For method1, for - // example, the impl type has one modifier, but the method - // type has two, so we end up with - // ExplicitSelfCategory::ByReference. - - let impl_modifiers = count_modifiers(self_info.untransformed_self_ty); - let method_modifiers = count_modifiers(explicit_type); - - debug!("determine_explicit_self_category(self_info.untransformed_self_ty={:?} \ - explicit_type={:?} \ - modifiers=({},{})", - self_info.untransformed_self_ty, - explicit_type, - impl_modifiers, - method_modifiers); - - let category = if impl_modifiers >= method_modifiers { - ty::ExplicitSelfCategory::ByValue - } else { - match explicit_type.sty { - ty::TyRef(r, mt) => ty::ExplicitSelfCategory::ByReference(*r, mt.mutbl), - ty::TyBox(_) => ty::ExplicitSelfCategory::ByBox, - _ => ty::ExplicitSelfCategory::ByValue, - } - }; + fn determine_self_type<'a>(&self, + rscope: &RegionScope, + untransformed_self_ty: Ty<'tcx>, + explicit_self: &hir::ExplicitSelf) + -> (Ty<'tcx>, ty::ExplicitSelfCategory) + { + return match explicit_self.node { + SelfKind::Value(..) => { + (untransformed_self_ty, ty::ExplicitSelfCategory::ByValue) + } + SelfKind::Region(ref lifetime, mutability) => { + let region = + self.opt_ast_region_to_region( + rscope, + explicit_self.span, + lifetime); + (self.tcx().mk_ref( + self.tcx().mk_region(region), + ty::TypeAndMut { + ty: untransformed_self_ty, + mutbl: mutability + }), + ty::ExplicitSelfCategory::ByReference(region, mutability)) + } + SelfKind::Explicit(ref ast_type, _) => { + let explicit_type = self.ast_ty_to_ty(rscope, &ast_type); + + // We wish to (for now) categorize an explicit self + // declaration like `self: SomeType` into either `self`, + // `&self`, `&mut self`, or `Box`. We do this here + // by some simple pattern matching. A more precise check + // is done later in `check_method_self_type()`. + // + // Examples: + // + // ``` + // impl Foo for &T { + // // Legal declarations: + // fn method1(self: &&T); // ExplicitSelfCategory::ByReference + // fn method2(self: &T); // ExplicitSelfCategory::ByValue + // fn method3(self: Box<&T>); // ExplicitSelfCategory::ByBox + // + // // Invalid cases will be caught later by `check_method_self_type`: + // fn method_err1(self: &mut T); // ExplicitSelfCategory::ByReference + // } + // ``` + // + // To do the check we just count the number of "modifiers" + // on each type and compare them. If they are the same or + // the impl has more, we call it "by value". Otherwise, we + // look at the outermost modifier on the method decl and + // call it by-ref, by-box as appropriate. For method1, for + // example, the impl type has one modifier, but the method + // type has two, so we end up with + // ExplicitSelfCategory::ByReference. + + let impl_modifiers = count_modifiers(untransformed_self_ty); + let method_modifiers = count_modifiers(explicit_type); + + debug!("determine_explicit_self_category(self_info.untransformed_self_ty={:?} \ + explicit_type={:?} \ + modifiers=({},{})", + untransformed_self_ty, + explicit_type, + impl_modifiers, + method_modifiers); + + let category = if impl_modifiers >= method_modifiers { + ty::ExplicitSelfCategory::ByValue + } else { + match explicit_type.sty { + ty::TyRef(r, mt) => ty::ExplicitSelfCategory::ByReference(*r, mt.mutbl), + ty::TyBox(_) => ty::ExplicitSelfCategory::ByBox, + _ => ty::ExplicitSelfCategory::ByValue, + } + }; - (Some(explicit_type), Some(category)) - } - }; + (explicit_type, category) + } + }; - fn count_modifiers(ty: Ty) -> usize { - match ty.sty { - ty::TyRef(_, mt) => count_modifiers(mt.ty) + 1, - ty::TyBox(t) => count_modifiers(t) + 1, - _ => 0, + fn count_modifiers(ty: Ty) -> usize { + match ty.sty { + ty::TyRef(_, mt) => count_modifiers(mt.ty) + 1, + ty::TyBox(t) => count_modifiers(t) + 1, + _ => 0, + } } } -} -pub fn ty_of_closure<'tcx>( - this: &AstConv<'tcx>, - unsafety: hir::Unsafety, - decl: &hir::FnDecl, - abi: abi::Abi, - expected_sig: Option>) - -> ty::ClosureTy<'tcx> -{ - debug!("ty_of_closure(expected_sig={:?})", - expected_sig); - - // new region names that appear inside of the fn decl are bound to - // that function type - let rb = rscope::BindingRscope::new(); - - let input_tys: Vec<_> = decl.inputs.iter().enumerate().map(|(i, a)| { - let expected_arg_ty = expected_sig.as_ref().and_then(|e| { - // no guarantee that the correct number of expected args - // were supplied - if i < e.inputs.len() { - Some(e.inputs[i]) - } else { - None - } - }); - ty_of_arg(this, &rb, a, expected_arg_ty) - }).collect(); + pub fn ty_of_closure(&self, + unsafety: hir::Unsafety, + decl: &hir::FnDecl, + abi: abi::Abi, + expected_sig: Option>) + -> ty::ClosureTy<'tcx> + { + debug!("ty_of_closure(expected_sig={:?})", + expected_sig); + + // new region names that appear inside of the fn decl are bound to + // that function type + let rb = rscope::BindingRscope::new(); + + let input_tys: Vec<_> = decl.inputs.iter().enumerate().map(|(i, a)| { + let expected_arg_ty = expected_sig.as_ref().and_then(|e| { + // no guarantee that the correct number of expected args + // were supplied + if i < e.inputs.len() { + Some(e.inputs[i]) + } else { + None + } + }); + self.ty_of_arg(&rb, a, expected_arg_ty) + }).collect(); - let expected_ret_ty = expected_sig.map(|e| e.output); + let expected_ret_ty = expected_sig.map(|e| e.output); - let is_infer = match decl.output { - hir::Return(ref output) if output.node == hir::TyInfer => true, - hir::DefaultReturn(..) => true, - _ => false - }; + let is_infer = match decl.output { + hir::Return(ref output) if output.node == hir::TyInfer => true, + hir::DefaultReturn(..) => true, + _ => false + }; - let output_ty = match decl.output { - _ if is_infer && expected_ret_ty.is_some() => - expected_ret_ty.unwrap(), - _ if is_infer => - ty::FnConverging(this.ty_infer(None, None, None, decl.output.span())), - hir::Return(ref output) => - ty::FnConverging(ast_ty_to_ty(this, &rb, &output)), - hir::DefaultReturn(..) => bug!(), - hir::NoReturn(..) => ty::FnDiverging - }; + let output_ty = match decl.output { + _ if is_infer && expected_ret_ty.is_some() => + expected_ret_ty.unwrap(), + _ if is_infer => + ty::FnConverging(self.ty_infer(None, None, None, decl.output.span())), + hir::Return(ref output) => + ty::FnConverging(self.ast_ty_to_ty(&rb, &output)), + hir::DefaultReturn(..) => bug!(), + hir::NoReturn(..) => ty::FnDiverging + }; - debug!("ty_of_closure: input_tys={:?}", input_tys); - debug!("ty_of_closure: output_ty={:?}", output_ty); + debug!("ty_of_closure: input_tys={:?}", input_tys); + debug!("ty_of_closure: output_ty={:?}", output_ty); - ty::ClosureTy { - unsafety: unsafety, - abi: abi, - sig: ty::Binder(ty::FnSig {inputs: input_tys, - output: output_ty, - variadic: decl.variadic}), + ty::ClosureTy { + unsafety: unsafety, + abi: abi, + sig: ty::Binder(ty::FnSig {inputs: input_tys, + output: output_ty, + variadic: decl.variadic}), + } } -} -/// Given an existential type like `Foo+'a+Bar`, this routine converts the `'a` and `Bar` intos an -/// `ExistentialBounds` struct. The `main_trait_refs` argument specifies the `Foo` -- it is absent -/// for closures. Eventually this should all be normalized, I think, so that there is no "main -/// trait ref" and instead we just have a flat list of bounds as the existential type. -fn conv_existential_bounds<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - principal_trait_ref: ty::PolyTraitRef<'tcx>, - projection_bounds: Vec>, - ast_bounds: &[hir::TyParamBound]) - -> ty::ExistentialBounds<'tcx> -{ - let partitioned_bounds = - partition_bounds(this.tcx(), span, ast_bounds); - - conv_existential_bounds_from_partitioned_bounds( - this, rscope, span, principal_trait_ref, projection_bounds, partitioned_bounds) -} + /// Given an existential type like `Foo+'a+Bar`, this routine converts + /// the `'a` and `Bar` intos an `ExistentialBounds` struct. + /// The `main_trait_refs` argument specifies the `Foo` -- it is absent + /// for closures. Eventually this should all be normalized, I think, + /// so that there is no "main trait ref" and instead we just have a flat + /// list of bounds as the existential type. + fn conv_existential_bounds(&self, + rscope: &RegionScope, + span: Span, + principal_trait_ref: ty::PolyTraitRef<'tcx>, + projection_bounds: Vec>, + ast_bounds: &[hir::TyParamBound]) + -> ty::ExistentialBounds<'tcx> + { + let partitioned_bounds = + partition_bounds(self.tcx(), span, ast_bounds); -fn conv_ty_poly_trait_ref<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - ast_bounds: &[hir::TyParamBound]) - -> Ty<'tcx> -{ - let mut partitioned_bounds = partition_bounds(this.tcx(), span, &ast_bounds[..]); - - let mut projection_bounds = Vec::new(); - let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() { - let trait_bound = partitioned_bounds.trait_bounds.remove(0); - instantiate_poly_trait_ref(this, - rscope, - trait_bound, - None, - &mut projection_bounds) - } else { - span_err!(this.tcx().sess, span, E0224, - "at least one non-builtin trait is required for an object type"); - return this.tcx().types.err; - }; + self.conv_existential_bounds_from_partitioned_bounds( + rscope, span, principal_trait_ref, projection_bounds, partitioned_bounds) + } - let bounds = - conv_existential_bounds_from_partitioned_bounds(this, - rscope, - span, - main_trait_bound.clone(), - projection_bounds, - partitioned_bounds); + fn conv_ty_poly_trait_ref(&self, + rscope: &RegionScope, + span: Span, + ast_bounds: &[hir::TyParamBound]) + -> Ty<'tcx> + { + let mut partitioned_bounds = partition_bounds(self.tcx(), span, &ast_bounds[..]); + + let mut projection_bounds = Vec::new(); + let main_trait_bound = if !partitioned_bounds.trait_bounds.is_empty() { + let trait_bound = partitioned_bounds.trait_bounds.remove(0); + self.instantiate_poly_trait_ref(rscope, + trait_bound, + None, + &mut projection_bounds) + } else { + span_err!(self.tcx().sess, span, E0224, + "at least one non-builtin trait is required for an object type"); + return self.tcx().types.err; + }; - make_object_type(this, span, main_trait_bound, bounds) -} + let bounds = + self.conv_existential_bounds_from_partitioned_bounds(rscope, + span, + main_trait_bound.clone(), + projection_bounds, + partitioned_bounds); -pub fn conv_existential_bounds_from_partitioned_bounds<'tcx>( - this: &AstConv<'tcx>, - rscope: &RegionScope, - span: Span, - principal_trait_ref: ty::PolyTraitRef<'tcx>, - projection_bounds: Vec>, // Empty for boxed closures - partitioned_bounds: PartitionedBounds) - -> ty::ExistentialBounds<'tcx> -{ - let PartitionedBounds { builtin_bounds, - trait_bounds, - region_bounds } = - partitioned_bounds; - - if !trait_bounds.is_empty() { - let b = &trait_bounds[0]; - span_err!(this.tcx().sess, b.trait_ref.path.span, E0225, - "only the builtin traits can be used as closure or object bounds"); + self.make_object_type(span, main_trait_bound, bounds) } - let region_bound = - compute_object_lifetime_bound(this, - span, - ®ion_bounds, - principal_trait_ref, - builtin_bounds); - - let region_bound = match region_bound { - Some(r) => r, - None => { - match rscope.object_lifetime_default(span) { - Some(r) => r, - None => { - span_err!(this.tcx().sess, span, E0228, - "the lifetime bound for this object type cannot be deduced \ - from context; please supply an explicit bound"); - ty::ReStatic + pub fn conv_existential_bounds_from_partitioned_bounds(&self, + rscope: &RegionScope, + span: Span, + principal_trait_ref: ty::PolyTraitRef<'tcx>, + projection_bounds: Vec>, // Empty for boxed closures + partitioned_bounds: PartitionedBounds) + -> ty::ExistentialBounds<'tcx> + { + let PartitionedBounds { builtin_bounds, + trait_bounds, + region_bounds } = + partitioned_bounds; + + if !trait_bounds.is_empty() { + let b = &trait_bounds[0]; + span_err!(self.tcx().sess, b.trait_ref.path.span, E0225, + "only the builtin traits can be used as closure or object bounds"); + } + + let region_bound = + self.compute_object_lifetime_bound(span, + ®ion_bounds, + principal_trait_ref, + builtin_bounds); + + let region_bound = match region_bound { + Some(r) => r, + None => { + match rscope.object_lifetime_default(span) { + Some(r) => r, + None => { + span_err!(self.tcx().sess, span, E0228, + "the lifetime bound for this object type cannot be deduced \ + from context; please supply an explicit bound"); + ty::ReStatic + } } } - } - }; + }; - debug!("region_bound: {:?}", region_bound); + debug!("region_bound: {:?}", region_bound); - ty::ExistentialBounds::new(region_bound, builtin_bounds, projection_bounds) -} + ty::ExistentialBounds::new(region_bound, builtin_bounds, projection_bounds) + } -/// Given the bounds on an object, determines what single region bound -/// (if any) we can use to summarize this type. The basic idea is that we will use the bound the -/// user provided, if they provided one, and otherwise search the supertypes of trait bounds for -/// region bounds. It may be that we can derive no bound at all, in which case we return `None`. -fn compute_object_lifetime_bound<'tcx>( - this: &AstConv<'tcx>, - span: Span, - explicit_region_bounds: &[&hir::Lifetime], - principal_trait_ref: ty::PolyTraitRef<'tcx>, - builtin_bounds: ty::BuiltinBounds) - -> Option // if None, use the default -{ - let tcx = this.tcx(); + /// Given the bounds on an object, determines what single region bound (if any) we can + /// use to summarize this type. The basic idea is that we will use the bound the user + /// provided, if they provided one, and otherwise search the supertypes of trait bounds + /// for region bounds. It may be that we can derive no bound at all, in which case + /// we return `None`. + fn compute_object_lifetime_bound(&self, + span: Span, + explicit_region_bounds: &[&hir::Lifetime], + principal_trait_ref: ty::PolyTraitRef<'tcx>, + builtin_bounds: ty::BuiltinBounds) + -> Option // if None, use the default + { + let tcx = self.tcx(); - debug!("compute_opt_region_bound(explicit_region_bounds={:?}, \ - principal_trait_ref={:?}, builtin_bounds={:?})", - explicit_region_bounds, - principal_trait_ref, - builtin_bounds); + debug!("compute_opt_region_bound(explicit_region_bounds={:?}, \ + principal_trait_ref={:?}, builtin_bounds={:?})", + explicit_region_bounds, + principal_trait_ref, + builtin_bounds); - if explicit_region_bounds.len() > 1 { - span_err!(tcx.sess, explicit_region_bounds[1].span, E0226, - "only a single explicit lifetime bound is permitted"); - } + if explicit_region_bounds.len() > 1 { + span_err!(tcx.sess, explicit_region_bounds[1].span, E0226, + "only a single explicit lifetime bound is permitted"); + } - if !explicit_region_bounds.is_empty() { - // Explicitly specified region bound. Use that. - let r = explicit_region_bounds[0]; - return Some(ast_region_to_region(tcx, r)); - } + if !explicit_region_bounds.is_empty() { + // Explicitly specified region bound. Use that. + let r = explicit_region_bounds[0]; + return Some(ast_region_to_region(tcx, r)); + } - if let Err(ErrorReported) = this.ensure_super_predicates(span,principal_trait_ref.def_id()) { - return Some(ty::ReStatic); - } + if let Err(ErrorReported) = + self.ensure_super_predicates(span, principal_trait_ref.def_id()) { + return Some(ty::ReStatic); + } - // No explicit region bound specified. Therefore, examine trait - // bounds and see if we can derive region bounds from those. - let derived_region_bounds = - object_region_bounds(tcx, &principal_trait_ref, builtin_bounds); + // No explicit region bound specified. Therefore, examine trait + // bounds and see if we can derive region bounds from those. + let derived_region_bounds = + object_region_bounds(tcx, &principal_trait_ref, builtin_bounds); - // If there are no derived region bounds, then report back that we - // can find no region bound. The caller will use the default. - if derived_region_bounds.is_empty() { - return None; - } + // If there are no derived region bounds, then report back that we + // can find no region bound. The caller will use the default. + if derived_region_bounds.is_empty() { + return None; + } - // If any of the derived region bounds are 'static, that is always - // the best choice. - if derived_region_bounds.iter().any(|r| ty::ReStatic == *r) { - return Some(ty::ReStatic); - } + // If any of the derived region bounds are 'static, that is always + // the best choice. + if derived_region_bounds.iter().any(|r| ty::ReStatic == *r) { + return Some(ty::ReStatic); + } - // Determine whether there is exactly one unique region in the set - // of derived region bounds. If so, use that. Otherwise, report an - // error. - let r = derived_region_bounds[0]; - if derived_region_bounds[1..].iter().any(|r1| r != *r1) { - span_err!(tcx.sess, span, E0227, - "ambiguous lifetime bound, explicit lifetime bound required"); + // Determine whether there is exactly one unique region in the set + // of derived region bounds. If so, use that. Otherwise, report an + // error. + let r = derived_region_bounds[0]; + if derived_region_bounds[1..].iter().any(|r1| r != *r1) { + span_err!(tcx.sess, span, E0227, + "ambiguous lifetime bound, explicit lifetime bound required"); + } + return Some(r); } - return Some(r); } pub struct PartitionedBounds<'a> { @@ -2149,10 +2199,10 @@ pub struct PartitionedBounds<'a> { /// Divides a list of bounds from the AST into three groups: builtin bounds (Copy, Sized etc), /// general trait bounds, and region bounds. -pub fn partition_bounds<'a>(tcx: &TyCtxt, - _span: Span, - ast_bounds: &'a [hir::TyParamBound]) - -> PartitionedBounds<'a> +pub fn partition_bounds<'a, 'b, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + _span: Span, + ast_bounds: &'b [hir::TyParamBound]) + -> PartitionedBounds<'b> { let mut builtin_bounds = ty::BuiltinBounds::empty(); let mut region_bounds = Vec::new(); @@ -2198,15 +2248,7 @@ pub fn partition_bounds<'a>(tcx: &TyCtxt, } } -fn prohibit_projections<'tcx>(tcx: &TyCtxt<'tcx>, - bindings: &[ConvertedBinding<'tcx>]) -{ - for binding in bindings.iter().take(1) { - prohibit_projection(tcx, binding.span); - } -} - -fn check_type_argument_count(tcx: &TyCtxt, span: Span, supplied: usize, +fn check_type_argument_count(tcx: TyCtxt, span: Span, supplied: usize, required: usize, accepted: usize) { if supplied < required { let expected = if required < accepted { @@ -2231,7 +2273,7 @@ fn check_type_argument_count(tcx: &TyCtxt, span: Span, supplied: usize, } } -fn report_lifetime_number_error(tcx: &TyCtxt, span: Span, number: usize, expected: usize) { +fn report_lifetime_number_error(tcx: TyCtxt, span: Span, number: usize, expected: usize) { span_err!(tcx.sess, span, E0107, "wrong number of lifetime parameters: expected {}, found {}", expected, number); @@ -2247,16 +2289,14 @@ pub struct Bounds<'tcx> { pub projection_bounds: Vec>, } -impl<'tcx> Bounds<'tcx> { - pub fn predicates(&self, - tcx: &TyCtxt<'tcx>, - param_ty: Ty<'tcx>) - -> Vec> +impl<'a, 'gcx, 'tcx> Bounds<'tcx> { + pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, param_ty: Ty<'tcx>) + -> Vec> { let mut vec = Vec::new(); for builtin_bound in &self.builtin_bounds { - match traits::trait_ref_for_builtin_bound(tcx, builtin_bound, param_ty) { + match tcx.trait_ref_for_builtin_bound(builtin_bound, param_ty) { Ok(trait_ref) => { vec.push(trait_ref.to_predicate()); } Err(ErrorReported) => { } } diff --git a/src/librustc_typeck/check/_match.rs b/src/librustc_typeck/check/_match.rs index 8dbd6496b6..10c8ea84bf 100644 --- a/src/librustc_typeck/check/_match.rs +++ b/src/librustc_typeck/check/_match.rs @@ -14,18 +14,14 @@ use hir::pat_util::{PatIdMap, pat_id_map, pat_is_binding}; use hir::pat_util::pat_is_resolved_const; use rustc::ty::subst::Substs; use rustc::ty::{self, Ty, TypeFoldable, LvaluePreference}; -use check::{check_expr, check_expr_has_type, check_expr_with_expectation}; -use check::{demand, FnCtxt, Expectation}; -use check::{check_expr_with_lvalue_pref}; -use check::{instantiate_path, resolve_ty_and_def_ufcs, structurally_resolved_type}; -use check::coercion; +use check::{FnCtxt, Expectation}; use lint; -use require_same_types; use util::nodemap::FnvHashMap; use session::Session; use std::cmp; use std::collections::hash_map::Entry::{Occupied, Vacant}; +use std::ops::Deref; use syntax::ast; use syntax::codemap::{Span, Spanned}; use syntax::ptr::P; @@ -33,791 +29,794 @@ use syntax::ptr::P; use rustc::hir::{self, PatKind}; use rustc::hir::print as pprust; -pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, - pat: &'tcx hir::Pat, - expected: Ty<'tcx>) -{ - let fcx = pcx.fcx; - let tcx = pcx.fcx.ccx.tcx; +pub struct PatCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + pub fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, + pub map: PatIdMap, +} - debug!("check_pat(pat={:?},expected={:?})", - pat, - expected); +impl<'a, 'gcx, 'tcx> Deref for PatCtxt<'a, 'gcx, 'tcx> { + type Target = FnCtxt<'a, 'gcx, 'tcx>; + fn deref(&self) -> &Self::Target { + self.fcx + } +} - match pat.node { - PatKind::Wild => { - fcx.write_ty(pat.id, expected); - } - PatKind::Lit(ref lt) => { - check_expr(fcx, <); - let expr_ty = fcx.expr_ty(<); - - // Byte string patterns behave the same way as array patterns - // They can denote both statically and dynamically sized byte arrays - let mut pat_ty = expr_ty; - if let hir::ExprLit(ref lt) = lt.node { - if let ast::LitKind::ByteStr(_) = lt.node { - let expected_ty = structurally_resolved_type(fcx, pat.span, expected); - if let ty::TyRef(_, mt) = expected_ty.sty { - if let ty::TySlice(_) = mt.ty.sty { - pat_ty = tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), - tcx.mk_slice(tcx.types.u8)) +// This function exists due to the warning "diagnostic code E0164 already used" +fn bad_struct_kind_err(sess: &Session, pat: &hir::Pat, path: &hir::Path, lint: bool) { + let name = pprust::path_to_string(path); + let msg = format!("`{}` does not name a tuple variant or a tuple struct", name); + if lint { + sess.add_lint(lint::builtin::MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT, + pat.id, + pat.span, + msg); + } else { + span_err!(sess, pat.span, E0164, "{}", msg); + } +} + +impl<'a, 'gcx, 'tcx> PatCtxt<'a, 'gcx, 'tcx> { + pub fn check_pat(&self, pat: &'gcx hir::Pat, expected: Ty<'tcx>) { + let tcx = self.tcx; + + debug!("check_pat(pat={:?},expected={:?})", pat, expected); + + match pat.node { + PatKind::Wild => { + self.write_ty(pat.id, expected); + } + PatKind::Lit(ref lt) => { + self.check_expr(<); + let expr_ty = self.expr_ty(<); + + // Byte string patterns behave the same way as array patterns + // They can denote both statically and dynamically sized byte arrays + let mut pat_ty = expr_ty; + if let hir::ExprLit(ref lt) = lt.node { + if let ast::LitKind::ByteStr(_) = lt.node { + let expected_ty = self.structurally_resolved_type(pat.span, expected); + if let ty::TyRef(_, mt) = expected_ty.sty { + if let ty::TySlice(_) = mt.ty.sty { + pat_ty = tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), + tcx.mk_slice(tcx.types.u8)) + } } } } - } - fcx.write_ty(pat.id, pat_ty); - - // somewhat surprising: in this case, the subtyping - // relation goes the opposite way as the other - // cases. Actually what we really want is not a subtyping - // relation at all but rather that there exists a LUB (so - // that they can be compared). However, in practice, - // constants are always scalars or strings. For scalars - // subtyping is irrelevant, and for strings `expr_ty` is - // type is `&'static str`, so if we say that - // - // &'static str <: expected - // - // that's equivalent to there existing a LUB. - demand::suptype(fcx, pat.span, expected, pat_ty); - } - PatKind::Range(ref begin, ref end) => { - check_expr(fcx, begin); - check_expr(fcx, end); - - let lhs_ty = fcx.expr_ty(begin); - let rhs_ty = fcx.expr_ty(end); - - // Check that both end-points are of numeric or char type. - let numeric_or_char = |ty: Ty| ty.is_numeric() || ty.is_char(); - let lhs_compat = numeric_or_char(lhs_ty); - let rhs_compat = numeric_or_char(rhs_ty); - - if !lhs_compat || !rhs_compat { - let span = if !lhs_compat && !rhs_compat { - pat.span - } else if !lhs_compat { - begin.span - } else { - end.span - }; - - // Note: spacing here is intentional, we want a space before "start" and "end". - span_err!(tcx.sess, span, E0029, - "only char and numeric types are allowed in range patterns\n \ - start type: {}\n end type: {}", - fcx.infcx().ty_to_string(lhs_ty), - fcx.infcx().ty_to_string(rhs_ty) - ); - return; + self.write_ty(pat.id, pat_ty); + + // somewhat surprising: in this case, the subtyping + // relation goes the opposite way as the other + // cases. Actually what we really want is not a subtyping + // relation at all but rather that there exists a LUB (so + // that they can be compared). However, in practice, + // constants are always scalars or strings. For scalars + // subtyping is irrelevant, and for strings `expr_ty` is + // type is `&'static str`, so if we say that + // + // &'static str <: expected + // + // that's equivalent to there existing a LUB. + self.demand_suptype(pat.span, expected, pat_ty); } + PatKind::Range(ref begin, ref end) => { + self.check_expr(begin); + self.check_expr(end); + + let lhs_ty = self.expr_ty(begin); + let rhs_ty = self.expr_ty(end); + + // Check that both end-points are of numeric or char type. + let numeric_or_char = |ty: Ty| ty.is_numeric() || ty.is_char(); + let lhs_compat = numeric_or_char(lhs_ty); + let rhs_compat = numeric_or_char(rhs_ty); + + if !lhs_compat || !rhs_compat { + let span = if !lhs_compat && !rhs_compat { + pat.span + } else if !lhs_compat { + begin.span + } else { + end.span + }; + + // Note: spacing here is intentional, we want a space before "start" and "end". + span_err!(tcx.sess, span, E0029, + "only char and numeric types are allowed in range patterns\n \ + start type: {}\n end type: {}", + self.ty_to_string(lhs_ty), + self.ty_to_string(rhs_ty) + ); + return; + } - // Check that the types of the end-points can be unified. - let types_unify = require_same_types( - tcx, Some(fcx.infcx()), false, pat.span, rhs_ty, lhs_ty, - || "mismatched types in range".to_string() - ); + // Check that the types of the end-points can be unified. + let types_unify = self.require_same_types(pat.span, rhs_ty, lhs_ty, + "mismatched types in range"); - // It's ok to return without a message as `require_same_types` prints an error. - if !types_unify { - return; - } + // It's ok to return without a message as `require_same_types` prints an error. + if !types_unify { + return; + } - // Now that we know the types can be unified we find the unified type and use - // it to type the entire expression. - let common_type = fcx.infcx().resolve_type_vars_if_possible(&lhs_ty); + // Now that we know the types can be unified we find the unified type and use + // it to type the entire expression. + let common_type = self.resolve_type_vars_if_possible(&lhs_ty); - fcx.write_ty(pat.id, common_type); + self.write_ty(pat.id, common_type); - // subtyping doesn't matter here, as the value is some kind of scalar - demand::eqtype(fcx, pat.span, expected, lhs_ty); - } - PatKind::Path(..) | PatKind::Ident(..) - if pat_is_resolved_const(&tcx.def_map.borrow(), pat) => { - if let Some(pat_def) = tcx.def_map.borrow().get(&pat.id) { - let const_did = pat_def.def_id(); - let const_scheme = tcx.lookup_item_type(const_did); - assert!(const_scheme.generics.is_empty()); - let const_ty = pcx.fcx.instantiate_type_scheme(pat.span, - &Substs::empty(), - &const_scheme.ty); - fcx.write_ty(pat.id, const_ty); - - // FIXME(#20489) -- we should limit the types here to scalars or something! - - // As with PatKind::Lit, what we really want here is that there - // exist a LUB, but for the cases that can occur, subtype - // is good enough. - demand::suptype(fcx, pat.span, expected, const_ty); - } else { - fcx.write_error(pat.id); + // subtyping doesn't matter here, as the value is some kind of scalar + self.demand_eqtype(pat.span, expected, lhs_ty); } - } - PatKind::Ident(bm, ref path, ref sub) if pat_is_binding(&tcx.def_map.borrow(), pat) => { - let typ = fcx.local_ty(pat.span, pat.id); - match bm { - hir::BindByRef(mutbl) => { - // if the binding is like - // ref x | ref const x | ref mut x - // then `x` is assigned a value of type `&M T` where M is the mutability - // and T is the expected type. - let region_var = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); - let mt = ty::TypeAndMut { ty: expected, mutbl: mutbl }; - let region_ty = tcx.mk_ref(tcx.mk_region(region_var), mt); - - // `x` is assigned a value of type `&M T`, hence `&M T <: typeof(x)` is - // required. However, we use equality, which is stronger. See (*) for - // an explanation. - demand::eqtype(fcx, pat.span, region_ty, typ); - } - // otherwise the type of x is the expected type T - hir::BindByValue(_) => { - // As above, `T <: typeof(x)` is required but we - // use equality, see (*) below. - demand::eqtype(fcx, pat.span, expected, typ); + PatKind::Path(..) | PatKind::Ident(..) + if pat_is_resolved_const(&tcx.def_map.borrow(), pat) => { + if let Some(pat_def) = tcx.def_map.borrow().get(&pat.id) { + let const_did = pat_def.def_id(); + let const_scheme = tcx.lookup_item_type(const_did); + assert!(const_scheme.generics.is_empty()); + let const_ty = self.instantiate_type_scheme(pat.span, + &Substs::empty(), + &const_scheme.ty); + self.write_ty(pat.id, const_ty); + + // FIXME(#20489) -- we should limit the types here to scalars or something! + + // As with PatKind::Lit, what we really want here is that there + // exist a LUB, but for the cases that can occur, subtype + // is good enough. + self.demand_suptype(pat.span, expected, const_ty); + } else { + self.write_error(pat.id); } } + PatKind::Ident(bm, ref path, ref sub) + if pat_is_binding(&tcx.def_map.borrow(), pat) => { + let typ = self.local_ty(pat.span, pat.id); + match bm { + hir::BindByRef(mutbl) => { + // if the binding is like + // ref x | ref const x | ref mut x + // then `x` is assigned a value of type `&M T` where M is the mutability + // and T is the expected type. + let region_var = self.next_region_var(infer::PatternRegion(pat.span)); + let mt = ty::TypeAndMut { ty: expected, mutbl: mutbl }; + let region_ty = tcx.mk_ref(tcx.mk_region(region_var), mt); + + // `x` is assigned a value of type `&M T`, hence `&M T <: typeof(x)` is + // required. However, we use equality, which is stronger. See (*) for + // an explanation. + self.demand_eqtype(pat.span, region_ty, typ); + } + // otherwise the type of x is the expected type T + hir::BindByValue(_) => { + // As above, `T <: typeof(x)` is required but we + // use equality, see (*) below. + self.demand_eqtype(pat.span, expected, typ); + } + } - fcx.write_ty(pat.id, typ); + self.write_ty(pat.id, typ); - // if there are multiple arms, make sure they all agree on - // what the type of the binding `x` ought to be - if let Some(&canon_id) = pcx.map.get(&path.node.name) { - if canon_id != pat.id { - let ct = fcx.local_ty(pat.span, canon_id); - demand::eqtype(fcx, pat.span, ct, typ); - } + // if there are multiple arms, make sure they all agree on + // what the type of the binding `x` ought to be + if let Some(&canon_id) = self.map.get(&path.node) { + if canon_id != pat.id { + let ct = self.local_ty(pat.span, canon_id); + self.demand_eqtype(pat.span, ct, typ); + } - if let Some(ref p) = *sub { - check_pat(pcx, &p, expected); + if let Some(ref p) = *sub { + self.check_pat(&p, expected); + } } } - } - PatKind::Ident(_, ref path, _) => { - let path = hir::Path::from_ident(path.span, path.node); - check_pat_enum(pcx, pat, &path, Some(&[]), expected, false); - } - PatKind::TupleStruct(ref path, ref subpats) => { - check_pat_enum(pcx, pat, path, subpats.as_ref().map(|v| &v[..]), expected, true); - } - PatKind::Path(ref path) => { - check_pat_enum(pcx, pat, path, Some(&[]), expected, false); - } - PatKind::QPath(ref qself, ref path) => { - let self_ty = fcx.to_ty(&qself.ty); - let path_res = if let Some(&d) = tcx.def_map.borrow().get(&pat.id) { - if d.base_def == Def::Err { - fcx.write_error(pat.id); + PatKind::Ident(_, ref path, _) => { + let path = hir::Path::from_name(path.span, path.node); + self.check_pat_enum(pat, &path, Some(&[]), expected, false); + } + PatKind::TupleStruct(ref path, ref subpats) => { + self.check_pat_enum(pat, path, subpats.as_ref().map(|v| &v[..]), expected, true); + } + PatKind::Path(ref path) => { + self.check_pat_enum(pat, path, Some(&[]), expected, false); + } + PatKind::QPath(ref qself, ref path) => { + let self_ty = self.to_ty(&qself.ty); + let path_res = if let Some(&d) = tcx.def_map.borrow().get(&pat.id) { + if d.base_def == Def::Err { + self.set_tainted_by_errors(); + self.write_error(pat.id); + return; + } + d + } else if qself.position == 0 { + // This is just a sentinel for finish_resolving_def_to_ty. + let sentinel = self.tcx.map.local_def_id(ast::CRATE_NODE_ID); + def::PathResolution { + base_def: Def::Mod(sentinel), + depth: path.segments.len() + } + } else { + debug!("unbound path {:?}", pat); + self.write_error(pat.id); return; + }; + if let Some((opt_ty, segments, def)) = + self.resolve_ty_and_def_ufcs(path_res, Some(self_ty), + path, pat.span, pat.id) { + if self.check_assoc_item_is_const(def, pat.span) { + let scheme = tcx.lookup_item_type(def.def_id()); + let predicates = tcx.lookup_predicates(def.def_id()); + self.instantiate_path(segments, scheme, &predicates, + opt_ty, def, pat.span, pat.id); + let const_ty = self.node_ty(pat.id); + self.demand_suptype(pat.span, expected, const_ty); + } else { + self.write_error(pat.id) + } } - d - } else if qself.position == 0 { - // This is just a sentinel for finish_resolving_def_to_ty. - let sentinel = fcx.tcx().map.local_def_id(ast::CRATE_NODE_ID); - def::PathResolution { - base_def: Def::Mod(sentinel), - depth: path.segments.len() + } + PatKind::Struct(ref path, ref fields, etc) => { + self.check_pat_struct(pat, path, fields, etc, expected); + } + PatKind::Tup(ref elements) => { + let element_tys: Vec<_> = + (0..elements.len()).map(|_| self.next_ty_var()).collect(); + let pat_ty = tcx.mk_tup(element_tys.clone()); + self.write_ty(pat.id, pat_ty); + self.demand_eqtype(pat.span, expected, pat_ty); + for (element_pat, element_ty) in elements.iter().zip(element_tys) { + self.check_pat(&element_pat, element_ty); } - } else { - debug!("unbound path {:?}", pat); - fcx.write_error(pat.id); - return; - }; - if let Some((opt_ty, segments, def)) = - resolve_ty_and_def_ufcs(fcx, path_res, Some(self_ty), - path, pat.span, pat.id) { - if check_assoc_item_is_const(pcx, def, pat.span) { - let scheme = tcx.lookup_item_type(def.def_id()); - let predicates = tcx.lookup_predicates(def.def_id()); - instantiate_path(fcx, segments, - scheme, &predicates, - opt_ty, def, pat.span, pat.id); - let const_ty = fcx.node_ty(pat.id); - demand::suptype(fcx, pat.span, expected, const_ty); + } + PatKind::Box(ref inner) => { + let inner_ty = self.next_ty_var(); + let uniq_ty = tcx.mk_box(inner_ty); + + if self.check_dereferencable(pat.span, expected, &inner) { + // Here, `demand::subtype` is good enough, but I don't + // think any errors can be introduced by using + // `demand::eqtype`. + self.demand_eqtype(pat.span, expected, uniq_ty); + self.write_ty(pat.id, uniq_ty); + self.check_pat(&inner, inner_ty); } else { - fcx.write_error(pat.id) + self.write_error(pat.id); + self.check_pat(&inner, tcx.types.err); } } - } - PatKind::Struct(ref path, ref fields, etc) => { - check_pat_struct(pcx, pat, path, fields, etc, expected); - } - PatKind::Tup(ref elements) => { - let element_tys: Vec<_> = - (0..elements.len()).map(|_| fcx.infcx().next_ty_var()) - .collect(); - let pat_ty = tcx.mk_tup(element_tys.clone()); - fcx.write_ty(pat.id, pat_ty); - demand::eqtype(fcx, pat.span, expected, pat_ty); - for (element_pat, element_ty) in elements.iter().zip(element_tys) { - check_pat(pcx, &element_pat, element_ty); - } - } - PatKind::Box(ref inner) => { - let inner_ty = fcx.infcx().next_ty_var(); - let uniq_ty = tcx.mk_box(inner_ty); - - if check_dereferencable(pcx, pat.span, expected, &inner) { - // Here, `demand::subtype` is good enough, but I don't - // think any errors can be introduced by using - // `demand::eqtype`. - demand::eqtype(fcx, pat.span, expected, uniq_ty); - fcx.write_ty(pat.id, uniq_ty); - check_pat(pcx, &inner, inner_ty); - } else { - fcx.write_error(pat.id); - check_pat(pcx, &inner, tcx.types.err); - } - } - PatKind::Ref(ref inner, mutbl) => { - let expected = fcx.infcx().shallow_resolve(expected); - if check_dereferencable(pcx, pat.span, expected, &inner) { - // `demand::subtype` would be good enough, but using - // `eqtype` turns out to be equally general. See (*) - // below for details. + PatKind::Ref(ref inner, mutbl) => { + let expected = self.shallow_resolve(expected); + if self.check_dereferencable(pat.span, expected, &inner) { + // `demand::subtype` would be good enough, but using + // `eqtype` turns out to be equally general. See (*) + // below for details. + + // Take region, inner-type from expected type if we + // can, to avoid creating needless variables. This + // also helps with the bad interactions of the given + // hack detailed in (*) below. + let (rptr_ty, inner_ty) = match expected.sty { + ty::TyRef(_, mt) if mt.mutbl == mutbl => { + (expected, mt.ty) + } + _ => { + let inner_ty = self.next_ty_var(); + let mt = ty::TypeAndMut { ty: inner_ty, mutbl: mutbl }; + let region = self.next_region_var(infer::PatternRegion(pat.span)); + let rptr_ty = tcx.mk_ref(tcx.mk_region(region), mt); + self.demand_eqtype(pat.span, expected, rptr_ty); + (rptr_ty, inner_ty) + } + }; - // Take region, inner-type from expected type if we - // can, to avoid creating needless variables. This - // also helps with the bad interactions of the given - // hack detailed in (*) below. - let (rptr_ty, inner_ty) = match expected.sty { - ty::TyRef(_, mt) if mt.mutbl == mutbl => { - (expected, mt.ty) - } + self.write_ty(pat.id, rptr_ty); + self.check_pat(&inner, inner_ty); + } else { + self.write_error(pat.id); + self.check_pat(&inner, tcx.types.err); + } + } + PatKind::Vec(ref before, ref slice, ref after) => { + let expected_ty = self.structurally_resolved_type(pat.span, expected); + let inner_ty = self.next_ty_var(); + let pat_ty = match expected_ty.sty { + ty::TyArray(_, size) => tcx.mk_array(inner_ty, { + let min_len = before.len() + after.len(); + match *slice { + Some(_) => cmp::max(min_len, size), + None => min_len + } + }), _ => { - let inner_ty = fcx.infcx().next_ty_var(); - let mt = ty::TypeAndMut { ty: inner_ty, mutbl: mutbl }; - let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); - let rptr_ty = tcx.mk_ref(tcx.mk_region(region), mt); - demand::eqtype(fcx, pat.span, expected, rptr_ty); - (rptr_ty, inner_ty) + let region = self.next_region_var(infer::PatternRegion(pat.span)); + tcx.mk_ref(tcx.mk_region(region), ty::TypeAndMut { + ty: tcx.mk_slice(inner_ty), + mutbl: expected_ty.builtin_deref(true, ty::NoPreference) + .map_or(hir::MutImmutable, |mt| mt.mutbl) + }) } }; - fcx.write_ty(pat.id, rptr_ty); - check_pat(pcx, &inner, inner_ty); - } else { - fcx.write_error(pat.id); - check_pat(pcx, &inner, tcx.types.err); - } - } - PatKind::Vec(ref before, ref slice, ref after) => { - let expected_ty = structurally_resolved_type(fcx, pat.span, expected); - let inner_ty = fcx.infcx().next_ty_var(); - let pat_ty = match expected_ty.sty { - ty::TyArray(_, size) => tcx.mk_array(inner_ty, { - let min_len = before.len() + after.len(); - match *slice { - Some(_) => cmp::max(min_len, size), - None => min_len - } - }), - _ => { - let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); - tcx.mk_ref(tcx.mk_region(region), ty::TypeAndMut { - ty: tcx.mk_slice(inner_ty), - mutbl: expected_ty.builtin_deref(true, ty::NoPreference).map(|mt| mt.mutbl) - .unwrap_or(hir::MutImmutable) - }) - } - }; + self.write_ty(pat.id, pat_ty); - fcx.write_ty(pat.id, pat_ty); - - // `demand::subtype` would be good enough, but using - // `eqtype` turns out to be equally general. See (*) - // below for details. - demand::eqtype(fcx, pat.span, expected, pat_ty); + // `demand::subtype` would be good enough, but using + // `eqtype` turns out to be equally general. See (*) + // below for details. + self.demand_eqtype(pat.span, expected, pat_ty); - for elt in before { - check_pat(pcx, &elt, inner_ty); - } - if let Some(ref slice) = *slice { - let region = fcx.infcx().next_region_var(infer::PatternRegion(pat.span)); - let mutbl = expected_ty.builtin_deref(true, ty::NoPreference) - .map_or(hir::MutImmutable, |mt| mt.mutbl); + for elt in before { + self.check_pat(&elt, inner_ty); + } + if let Some(ref slice) = *slice { + let region = self.next_region_var(infer::PatternRegion(pat.span)); + let mutbl = expected_ty.builtin_deref(true, ty::NoPreference) + .map_or(hir::MutImmutable, |mt| mt.mutbl); - let slice_ty = tcx.mk_ref(tcx.mk_region(region), ty::TypeAndMut { - ty: tcx.mk_slice(inner_ty), - mutbl: mutbl - }); - check_pat(pcx, &slice, slice_ty); - } - for elt in after { - check_pat(pcx, &elt, inner_ty); + let slice_ty = tcx.mk_ref(tcx.mk_region(region), ty::TypeAndMut { + ty: tcx.mk_slice(inner_ty), + mutbl: mutbl + }); + self.check_pat(&slice, slice_ty); + } + for elt in after { + self.check_pat(&elt, inner_ty); + } } } - } - - // (*) In most of the cases above (literals and constants being - // the exception), we relate types using strict equality, evewn - // though subtyping would be sufficient. There are a few reasons - // for this, some of which are fairly subtle and which cost me - // (nmatsakis) an hour or two debugging to remember, so I thought - // I'd write them down this time. - // - // 1. There is no loss of expressiveness here, though it does - // cause some inconvenience. What we are saying is that the type - // of `x` becomes *exactly* what is expected. This can cause unnecessary - // errors in some cases, such as this one: - // it will cause errors in a case like this: - // - // ``` - // fn foo<'x>(x: &'x int) { - // let a = 1; - // let mut z = x; - // z = &a; - // } - // ``` - // - // The reason we might get an error is that `z` might be - // assigned a type like `&'x int`, and then we would have - // a problem when we try to assign `&a` to `z`, because - // the lifetime of `&a` (i.e., the enclosing block) is - // shorter than `'x`. - // - // HOWEVER, this code works fine. The reason is that the - // expected type here is whatever type the user wrote, not - // the initializer's type. In this case the user wrote - // nothing, so we are going to create a type variable `Z`. - // Then we will assign the type of the initializer (`&'x - // int`) as a subtype of `Z`: `&'x int <: Z`. And hence we - // will instantiate `Z` as a type `&'0 int` where `'0` is - // a fresh region variable, with the constraint that `'x : - // '0`. So basically we're all set. - // - // Note that there are two tests to check that this remains true - // (`regions-reassign-{match,let}-bound-pointer.rs`). - // - // 2. Things go horribly wrong if we use subtype. The reason for - // THIS is a fairly subtle case involving bound regions. See the - // `givens` field in `region_inference`, as well as the test - // `regions-relate-bound-regions-on-closures-to-inference-variables.rs`, - // for details. Short version is that we must sometimes detect - // relationships between specific region variables and regions - // bound in a closure signature, and that detection gets thrown - // off when we substitute fresh region variables here to enable - // subtyping. -} -fn check_assoc_item_is_const(pcx: &pat_ctxt, def: Def, span: Span) -> bool { - match def { - Def::AssociatedConst(..) => true, - Def::Method(..) => { - span_err!(pcx.fcx.ccx.tcx.sess, span, E0327, - "associated items in match patterns must be constants"); - false - } - _ => { - span_bug!(span, "non-associated item in check_assoc_item_is_const"); - } + // (*) In most of the cases above (literals and constants being + // the exception), we relate types using strict equality, evewn + // though subtyping would be sufficient. There are a few reasons + // for this, some of which are fairly subtle and which cost me + // (nmatsakis) an hour or two debugging to remember, so I thought + // I'd write them down this time. + // + // 1. There is no loss of expressiveness here, though it does + // cause some inconvenience. What we are saying is that the type + // of `x` becomes *exactly* what is expected. This can cause unnecessary + // errors in some cases, such as this one: + // it will cause errors in a case like this: + // + // ``` + // fn foo<'x>(x: &'x int) { + // let a = 1; + // let mut z = x; + // z = &a; + // } + // ``` + // + // The reason we might get an error is that `z` might be + // assigned a type like `&'x int`, and then we would have + // a problem when we try to assign `&a` to `z`, because + // the lifetime of `&a` (i.e., the enclosing block) is + // shorter than `'x`. + // + // HOWEVER, this code works fine. The reason is that the + // expected type here is whatever type the user wrote, not + // the initializer's type. In this case the user wrote + // nothing, so we are going to create a type variable `Z`. + // Then we will assign the type of the initializer (`&'x + // int`) as a subtype of `Z`: `&'x int <: Z`. And hence we + // will instantiate `Z` as a type `&'0 int` where `'0` is + // a fresh region variable, with the constraint that `'x : + // '0`. So basically we're all set. + // + // Note that there are two tests to check that this remains true + // (`regions-reassign-{match,let}-bound-pointer.rs`). + // + // 2. Things go horribly wrong if we use subtype. The reason for + // THIS is a fairly subtle case involving bound regions. See the + // `givens` field in `region_inference`, as well as the test + // `regions-relate-bound-regions-on-closures-to-inference-variables.rs`, + // for details. Short version is that we must sometimes detect + // relationships between specific region variables and regions + // bound in a closure signature, and that detection gets thrown + // off when we substitute fresh region variables here to enable + // subtyping. } -} -pub fn check_dereferencable<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, - span: Span, expected: Ty<'tcx>, - inner: &hir::Pat) -> bool { - let fcx = pcx.fcx; - let tcx = pcx.fcx.ccx.tcx; - if pat_is_binding(&tcx.def_map.borrow(), inner) { - let expected = fcx.infcx().shallow_resolve(expected); - expected.builtin_deref(true, ty::NoPreference).map_or(true, |mt| match mt.ty.sty { - ty::TyTrait(_) => { - // This is "x = SomeTrait" being reduced from - // "let &x = &SomeTrait" or "let box x = Box", an error. - span_err!(tcx.sess, span, E0033, - "type `{}` cannot be dereferenced", - fcx.infcx().ty_to_string(expected)); + fn check_assoc_item_is_const(&self, def: Def, span: Span) -> bool { + match def { + Def::AssociatedConst(..) => true, + Def::Method(..) => { + span_err!(self.tcx.sess, span, E0327, + "associated items in match patterns must be constants"); false } - _ => true - }) - } else { - true - } -} - -pub fn check_match<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - discrim: &'tcx hir::Expr, - arms: &'tcx [hir::Arm], - expected: Expectation<'tcx>, - match_src: hir::MatchSource) { - let tcx = fcx.ccx.tcx; - - // Not entirely obvious: if matches may create ref bindings, we - // want to use the *precise* type of the discriminant, *not* some - // supertype, as the "discriminant type" (issue #23116). - let contains_ref_bindings = arms.iter() - .filter_map(|a| tcx.arm_contains_ref_binding(a)) - .max_by_key(|m| match *m { - hir::MutMutable => 1, - hir::MutImmutable => 0, - }); - let discrim_ty; - if let Some(m) = contains_ref_bindings { - check_expr_with_lvalue_pref(fcx, discrim, LvaluePreference::from_mutbl(m)); - discrim_ty = fcx.expr_ty(discrim); - } else { - // ...but otherwise we want to use any supertype of the - // discriminant. This is sort of a workaround, see note (*) in - // `check_pat` for some details. - discrim_ty = fcx.infcx().next_ty_var(); - check_expr_has_type(fcx, discrim, discrim_ty); - }; - - // Typecheck the patterns first, so that we get types for all the - // bindings. - for arm in arms { - let mut pcx = pat_ctxt { - fcx: fcx, - map: pat_id_map(&tcx.def_map, &arm.pats[0]), - }; - for p in &arm.pats { - check_pat(&mut pcx, &p, discrim_ty); + _ => { + span_bug!(span, "non-associated item in check_assoc_item_is_const"); + } } } - // Now typecheck the blocks. - // - // The result of the match is the common supertype of all the - // arms. Start out the value as bottom, since it's the, well, - // bottom the type lattice, and we'll be moving up the lattice as - // we process each arm. (Note that any match with 0 arms is matching - // on any empty type and is therefore unreachable; should the flow - // of execution reach it, we will panic, so bottom is an appropriate - // type in that case) - let expected = expected.adjust_for_branches(fcx); - let mut result_ty = fcx.infcx().next_diverging_ty_var(); - let coerce_first = match expected { - // We don't coerce to `()` so that if the match expression is a - // statement it's branches can have any consistent type. That allows - // us to give better error messages (pointing to a usually better - // arm for inconsistent arms or to the whole match when a `()` type - // is required). - Expectation::ExpectHasType(ety) if ety != fcx.tcx().mk_nil() => { - ety - } - _ => result_ty - }; - for (i, arm) in arms.iter().enumerate() { - if let Some(ref e) = arm.guard { - check_expr_has_type(fcx, e, tcx.types.bool); - } - check_expr_with_expectation(fcx, &arm.body, expected); - let arm_ty = fcx.expr_ty(&arm.body); - - if result_ty.references_error() || arm_ty.references_error() { - result_ty = tcx.types.err; - continue; + pub fn check_dereferencable(&self, span: Span, expected: Ty<'tcx>, inner: &hir::Pat) -> bool { + let tcx = self.tcx; + if pat_is_binding(&tcx.def_map.borrow(), inner) { + let expected = self.shallow_resolve(expected); + expected.builtin_deref(true, ty::NoPreference).map_or(true, |mt| match mt.ty.sty { + ty::TyTrait(_) => { + // This is "x = SomeTrait" being reduced from + // "let &x = &SomeTrait" or "let box x = Box", an error. + span_err!(tcx.sess, span, E0033, + "type `{}` cannot be dereferenced", + self.ty_to_string(expected)); + false + } + _ => true + }) + } else { + true } + } +} - // Handle the fallback arm of a desugared if-let like a missing else. - let is_if_let_fallback = match match_src { - hir::MatchSource::IfLetDesugar { contains_else_clause: false } => { - i == arms.len() - 1 && arm_ty.is_nil() - } - _ => false - }; - - let origin = if is_if_let_fallback { - TypeOrigin::IfExpressionWithNoElse(expr.span) +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn check_match(&self, + expr: &'gcx hir::Expr, + discrim: &'gcx hir::Expr, + arms: &'gcx [hir::Arm], + expected: Expectation<'tcx>, + match_src: hir::MatchSource) { + let tcx = self.tcx; + + // Not entirely obvious: if matches may create ref bindings, we + // want to use the *precise* type of the discriminant, *not* some + // supertype, as the "discriminant type" (issue #23116). + let contains_ref_bindings = arms.iter() + .filter_map(|a| tcx.arm_contains_ref_binding(a)) + .max_by_key(|m| match *m { + hir::MutMutable => 1, + hir::MutImmutable => 0, + }); + let discrim_ty; + if let Some(m) = contains_ref_bindings { + self.check_expr_with_lvalue_pref(discrim, LvaluePreference::from_mutbl(m)); + discrim_ty = self.expr_ty(discrim); } else { - TypeOrigin::MatchExpressionArm(expr.span, arm.body.span, match_src) + // ...but otherwise we want to use any supertype of the + // discriminant. This is sort of a workaround, see note (*) in + // `check_pat` for some details. + discrim_ty = self.next_ty_var(); + self.check_expr_has_type(discrim, discrim_ty); }; - let result = if is_if_let_fallback { - fcx.infcx().eq_types(true, origin, arm_ty, result_ty) - .map(|InferOk { obligations, .. }| { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - arm_ty - }) - } else if i == 0 { - // Special-case the first arm, as it has no "previous expressions". - coercion::try(fcx, &arm.body, coerce_first) - } else { - let prev_arms = || arms[..i].iter().map(|arm| &*arm.body); - coercion::try_find_lub(fcx, origin, prev_arms, result_ty, &arm.body) - }; + // Typecheck the patterns first, so that we get types for all the + // bindings. + for arm in arms { + let pcx = PatCtxt { + fcx: self, + map: pat_id_map(&tcx.def_map, &arm.pats[0]), + }; + for p in &arm.pats { + pcx.check_pat(&p, discrim_ty); + } + } - result_ty = match result { - Ok(ty) => ty, - Err(e) => { - let (expected, found) = if is_if_let_fallback { - (arm_ty, result_ty) - } else { - (result_ty, arm_ty) - }; - fcx.infcx().report_mismatched_types(origin, expected, found, e); - fcx.tcx().types.err + // Now typecheck the blocks. + // + // The result of the match is the common supertype of all the + // arms. Start out the value as bottom, since it's the, well, + // bottom the type lattice, and we'll be moving up the lattice as + // we process each arm. (Note that any match with 0 arms is matching + // on any empty type and is therefore unreachable; should the flow + // of execution reach it, we will panic, so bottom is an appropriate + // type in that case) + let expected = expected.adjust_for_branches(self); + let mut result_ty = self.next_diverging_ty_var(); + let coerce_first = match expected { + // We don't coerce to `()` so that if the match expression is a + // statement it's branches can have any consistent type. That allows + // us to give better error messages (pointing to a usually better + // arm for inconsistent arms or to the whole match when a `()` type + // is required). + Expectation::ExpectHasType(ety) if ety != self.tcx.mk_nil() => { + ety } + _ => result_ty }; - } + for (i, arm) in arms.iter().enumerate() { + if let Some(ref e) = arm.guard { + self.check_expr_has_type(e, tcx.types.bool); + } + self.check_expr_with_expectation(&arm.body, expected); + let arm_ty = self.expr_ty(&arm.body); - fcx.write_ty(expr.id, result_ty); -} + if result_ty.references_error() || arm_ty.references_error() { + result_ty = tcx.types.err; + continue; + } -pub struct pat_ctxt<'a, 'tcx: 'a> { - pub fcx: &'a FnCtxt<'a, 'tcx>, - pub map: PatIdMap, -} + // Handle the fallback arm of a desugared if-let like a missing else. + let is_if_let_fallback = match match_src { + hir::MatchSource::IfLetDesugar { contains_else_clause: false } => { + i == arms.len() - 1 && arm_ty.is_nil() + } + _ => false + }; -pub fn check_pat_struct<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, pat: &'tcx hir::Pat, - path: &hir::Path, fields: &'tcx [Spanned], - etc: bool, expected: Ty<'tcx>) { - let fcx = pcx.fcx; - let tcx = pcx.fcx.ccx.tcx; + let origin = if is_if_let_fallback { + TypeOrigin::IfExpressionWithNoElse(expr.span) + } else { + TypeOrigin::MatchExpressionArm(expr.span, arm.body.span, match_src) + }; - let def = tcx.def_map.borrow().get(&pat.id).unwrap().full_def(); - let variant = match fcx.def_struct_variant(def, path.span) { - Some((_, variant)) => variant, - None => { - let name = pprust::path_to_string(path); - span_err!(tcx.sess, pat.span, E0163, - "`{}` does not name a struct or a struct variant", name); - fcx.write_error(pat.id); + let result = if is_if_let_fallback { + self.eq_types(true, origin, arm_ty, result_ty) + .map(|InferOk { obligations, .. }| { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + arm_ty + }) + } else if i == 0 { + // Special-case the first arm, as it has no "previous expressions". + self.try_coerce(&arm.body, coerce_first) + } else { + let prev_arms = || arms[..i].iter().map(|arm| &*arm.body); + self.try_find_coercion_lub(origin, prev_arms, result_ty, &arm.body) + }; - for field in fields { - check_pat(pcx, &field.node.pat, tcx.types.err); - } - return; + result_ty = match result { + Ok(ty) => ty, + Err(e) => { + let (expected, found) = if is_if_let_fallback { + (arm_ty, result_ty) + } else { + (result_ty, arm_ty) + }; + self.report_mismatched_types(origin, expected, found, e); + self.tcx.types.err + } + }; } - }; - - let pat_ty = pcx.fcx.instantiate_type(def.def_id(), path); - let item_substs = match pat_ty.sty { - ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs, - _ => span_bug!(pat.span, "struct variant is not an ADT") - }; - demand::eqtype(fcx, pat.span, expected, pat_ty); - check_struct_pat_fields(pcx, pat.span, fields, variant, &item_substs, etc); - - fcx.write_ty(pat.id, pat_ty); - fcx.write_substs(pat.id, ty::ItemSubsts { substs: item_substs.clone() }); -} -// This function exists due to the warning "diagnostic code E0164 already used" -fn bad_struct_kind_err(sess: &Session, pat: &hir::Pat, path: &hir::Path, lint: bool) { - let name = pprust::path_to_string(path); - let msg = format!("`{}` does not name a tuple variant or a tuple struct", name); - if lint { - sess.add_lint(lint::builtin::MATCH_OF_UNIT_VARIANT_VIA_PAREN_DOTDOT, - pat.id, - pat.span, - msg); - } else { - span_err!(sess, pat.span, E0164, "{}", msg); + self.write_ty(expr.id, result_ty); } } -fn check_pat_enum<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, - pat: &hir::Pat, - path: &hir::Path, - subpats: Option<&'tcx [P]>, - expected: Ty<'tcx>, - is_tuple_struct_pat: bool) -{ - // Typecheck the path. - let fcx = pcx.fcx; - let tcx = pcx.fcx.ccx.tcx; - - let path_res = match tcx.def_map.borrow().get(&pat.id) { - Some(&path_res) if path_res.base_def != Def::Err => path_res, - _ => { - fcx.write_error(pat.id); - - if let Some(subpats) = subpats { - for pat in subpats { - check_pat(pcx, &pat, tcx.types.err); +impl<'a, 'gcx, 'tcx> PatCtxt<'a, 'gcx, 'tcx> { + pub fn check_pat_struct(&self, pat: &'gcx hir::Pat, + path: &hir::Path, fields: &'gcx [Spanned], + etc: bool, expected: Ty<'tcx>) { + let tcx = self.tcx; + + let def = tcx.def_map.borrow().get(&pat.id).unwrap().full_def(); + let variant = match self.def_struct_variant(def, path.span) { + Some((_, variant)) => variant, + None => { + let name = pprust::path_to_string(path); + span_err!(tcx.sess, pat.span, E0163, + "`{}` does not name a struct or a struct variant", name); + self.write_error(pat.id); + + for field in fields { + self.check_pat(&field.node.pat, tcx.types.err); } + return; } + }; - return; - } - }; - - let (opt_ty, segments, def) = match resolve_ty_and_def_ufcs(fcx, path_res, - None, path, - pat.span, pat.id) { - Some(resolution) => resolution, - // Error handling done inside resolve_ty_and_def_ufcs, so if - // resolution fails just return. - None => {return;} - }; - - // Items that were partially resolved before should have been resolved to - // associated constants (i.e. not methods). - if path_res.depth != 0 && !check_assoc_item_is_const(pcx, def, pat.span) { - fcx.write_error(pat.id); - return; + let pat_ty = self.instantiate_type(def.def_id(), path); + let item_substs = match pat_ty.sty { + ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs, + _ => span_bug!(pat.span, "struct variant is not an ADT") + }; + self.demand_eqtype(pat.span, expected, pat_ty); + self.check_struct_pat_fields(pat.span, fields, variant, &item_substs, etc); + + self.write_ty(pat.id, pat_ty); + self.write_substs(pat.id, ty::ItemSubsts { + substs: item_substs + }); } - let enum_def = def.variant_def_ids() - .map_or_else(|| def.def_id(), |(enum_def, _)| enum_def); + fn check_pat_enum(&self, + pat: &hir::Pat, + path: &hir::Path, + subpats: Option<&'gcx [P]>, + expected: Ty<'tcx>, + is_tuple_struct_pat: bool) + { + // Typecheck the path. + let tcx = self.tcx; + + let path_res = match tcx.def_map.borrow().get(&pat.id) { + Some(&path_res) if path_res.base_def != Def::Err => path_res, + _ => { + self.set_tainted_by_errors(); + self.write_error(pat.id); + + if let Some(subpats) = subpats { + for pat in subpats { + self.check_pat(&pat, tcx.types.err); + } + } - let ctor_scheme = tcx.lookup_item_type(enum_def); - let ctor_predicates = tcx.lookup_predicates(enum_def); - let path_scheme = if ctor_scheme.ty.is_fn() { - let fn_ret = tcx.no_late_bound_regions(&ctor_scheme.ty.fn_ret()).unwrap(); - ty::TypeScheme { - ty: fn_ret.unwrap(), - generics: ctor_scheme.generics, - } - } else { - ctor_scheme - }; - instantiate_path(pcx.fcx, segments, - path_scheme, &ctor_predicates, - opt_ty, def, pat.span, pat.id); - - let report_bad_struct_kind = |is_warning| { - bad_struct_kind_err(tcx.sess, pat, path, is_warning); - if is_warning { return; } - fcx.write_error(pat.id); - if let Some(subpats) = subpats { - for pat in subpats { - check_pat(pcx, &pat, tcx.types.err); + return; } - } - }; - - // If we didn't have a fully resolved path to start with, we had an - // associated const, and we should quit now, since the rest of this - // function uses checks specific to structs and enums. - if path_res.depth != 0 { - if is_tuple_struct_pat { - report_bad_struct_kind(false); - } else { - let pat_ty = fcx.node_ty(pat.id); - demand::suptype(fcx, pat.span, expected, pat_ty); - } - return; - } + }; - let pat_ty = fcx.node_ty(pat.id); - demand::eqtype(fcx, pat.span, expected, pat_ty); + let (opt_ty, segments, def) = match self.resolve_ty_and_def_ufcs(path_res, + None, path, + pat.span, pat.id) { + Some(resolution) => resolution, + // Error handling done inside resolve_ty_and_def_ufcs, so if + // resolution fails just return. + None => {return;} + }; - let real_path_ty = fcx.node_ty(pat.id); - let (kind_name, variant, expected_substs) = match real_path_ty.sty { - ty::TyEnum(enum_def, expected_substs) => { - let variant = enum_def.variant_of_def(def); - ("variant", variant, expected_substs) - } - ty::TyStruct(struct_def, expected_substs) => { - let variant = struct_def.struct_variant(); - ("struct", variant, expected_substs) - } - _ => { - report_bad_struct_kind(false); + // Items that were partially resolved before should have been resolved to + // associated constants (i.e. not methods). + if path_res.depth != 0 && !self.check_assoc_item_is_const(def, pat.span) { + self.write_error(pat.id); return; } - }; - match (is_tuple_struct_pat, variant.kind()) { - (true, ty::VariantKind::Unit) => { - // Matching unit structs with tuple variant patterns (`UnitVariant(..)`) - // is allowed for backward compatibility. - report_bad_struct_kind(true); - } - (_, ty::VariantKind::Struct) => { - report_bad_struct_kind(false); - return - } - _ => {} - } + let enum_def = def.variant_def_ids() + .map_or_else(|| def.def_id(), |(enum_def, _)| enum_def); - if let Some(subpats) = subpats { - if subpats.len() == variant.fields.len() { - for (subpat, field) in subpats.iter().zip(&variant.fields) { - let field_ty = fcx.field_ty(subpat.span, field, expected_substs); - check_pat(pcx, &subpat, field_ty); + let ctor_scheme = tcx.lookup_item_type(enum_def); + let ctor_predicates = tcx.lookup_predicates(enum_def); + let path_scheme = if ctor_scheme.ty.is_fn() { + let fn_ret = tcx.no_late_bound_regions(&ctor_scheme.ty.fn_ret()).unwrap(); + ty::TypeScheme { + ty: fn_ret.unwrap(), + generics: ctor_scheme.generics, } - } else if variant.fields.is_empty() { - span_err!(tcx.sess, pat.span, E0024, - "this pattern has {} field{}, but the corresponding {} has no fields", - subpats.len(), if subpats.len() == 1 {""} else {"s"}, kind_name); + } else { + ctor_scheme + }; + self.instantiate_path(segments, path_scheme, &ctor_predicates, + opt_ty, def, pat.span, pat.id); - for pat in subpats { - check_pat(pcx, &pat, tcx.types.err); + let report_bad_struct_kind = |is_warning| { + bad_struct_kind_err(tcx.sess, pat, path, is_warning); + if is_warning { return; } + self.write_error(pat.id); + if let Some(subpats) = subpats { + for pat in subpats { + self.check_pat(&pat, tcx.types.err); + } } - } else { - span_err!(tcx.sess, pat.span, E0023, - "this pattern has {} field{}, but the corresponding {} has {} field{}", - subpats.len(), if subpats.len() == 1 {""} else {"s"}, - kind_name, - variant.fields.len(), if variant.fields.len() == 1 {""} else {"s"}); + }; - for pat in subpats { - check_pat(pcx, &pat, tcx.types.err); + // If we didn't have a fully resolved path to start with, we had an + // associated const, and we should quit now, since the rest of this + // function uses checks specific to structs and enums. + if path_res.depth != 0 { + if is_tuple_struct_pat { + report_bad_struct_kind(false); + } else { + let pat_ty = self.node_ty(pat.id); + self.demand_suptype(pat.span, expected, pat_ty); } + return; } - } -} -/// `path` is the AST path item naming the type of this struct. -/// `fields` is the field patterns of the struct pattern. -/// `struct_fields` describes the type of each field of the struct. -/// `struct_id` is the ID of the struct. -/// `etc` is true if the pattern said '...' and false otherwise. -pub fn check_struct_pat_fields<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>, - span: Span, - fields: &'tcx [Spanned], - variant: ty::VariantDef<'tcx>, - substs: &Substs<'tcx>, - etc: bool) { - let tcx = pcx.fcx.ccx.tcx; - - // Index the struct fields' types. - let field_map = variant.fields - .iter() - .map(|field| (field.name, field)) - .collect::>(); - - // Keep track of which fields have already appeared in the pattern. - let mut used_fields = FnvHashMap(); - - // Typecheck each field. - for &Spanned { node: ref field, span } in fields { - let field_ty = match used_fields.entry(field.name) { - Occupied(occupied) => { - let mut err = struct_span_err!(tcx.sess, span, E0025, - "field `{}` bound multiple times in the pattern", - field.name); - span_note!(&mut err, *occupied.get(), - "field `{}` previously bound here", - field.name); - err.emit(); - tcx.types.err - } - Vacant(vacant) => { - vacant.insert(span); - field_map.get(&field.name) - .map(|f| pcx.fcx.field_ty(span, f, substs)) - .unwrap_or_else(|| { - span_err!(tcx.sess, span, E0026, - "struct `{}` does not have a field named `{}`", - tcx.item_path_str(variant.did), - field.name); - tcx.types.err - }) + let pat_ty = self.node_ty(pat.id); + self.demand_eqtype(pat.span, expected, pat_ty); + + let real_path_ty = self.node_ty(pat.id); + let (kind_name, variant, expected_substs) = match real_path_ty.sty { + ty::TyEnum(enum_def, expected_substs) => { + let variant = enum_def.variant_of_def(def); + ("variant", variant, expected_substs) + } + ty::TyStruct(struct_def, expected_substs) => { + let variant = struct_def.struct_variant(); + ("struct", variant, expected_substs) + } + _ => { + report_bad_struct_kind(false); + return; } }; - check_pat(pcx, &field.pat, field_ty); + match (is_tuple_struct_pat, variant.kind()) { + (true, ty::VariantKind::Unit) => { + // Matching unit structs with tuple variant patterns (`UnitVariant(..)`) + // is allowed for backward compatibility. + report_bad_struct_kind(true); + } + (_, ty::VariantKind::Struct) => { + report_bad_struct_kind(false); + return + } + _ => {} + } + + if let Some(subpats) = subpats { + if subpats.len() == variant.fields.len() { + for (subpat, field) in subpats.iter().zip(&variant.fields) { + let field_ty = self.field_ty(subpat.span, field, expected_substs); + self.check_pat(&subpat, field_ty); + } + } else if variant.fields.is_empty() { + span_err!(tcx.sess, pat.span, E0024, + "this pattern has {} field{}, but the corresponding {} has no fields", + subpats.len(), if subpats.len() == 1 {""} else {"s"}, kind_name); + + for pat in subpats { + self.check_pat(&pat, tcx.types.err); + } + } else { + span_err!(tcx.sess, pat.span, E0023, + "this pattern has {} field{}, but the corresponding {} has {} field{}", + subpats.len(), if subpats.len() == 1 {""} else {"s"}, + kind_name, + variant.fields.len(), if variant.fields.len() == 1 {""} else {"s"}); + + for pat in subpats { + self.check_pat(&pat, tcx.types.err); + } + } + } } - // Report an error if not all the fields were specified. - if !etc { - for field in variant.fields + /// `path` is the AST path item naming the type of this struct. + /// `fields` is the field patterns of the struct pattern. + /// `struct_fields` describes the type of each field of the struct. + /// `struct_id` is the ID of the struct. + /// `etc` is true if the pattern said '...' and false otherwise. + pub fn check_struct_pat_fields(&self, + span: Span, + fields: &'gcx [Spanned], + variant: ty::VariantDef<'tcx>, + substs: &Substs<'tcx>, + etc: bool) { + let tcx = self.tcx; + + // Index the struct fields' types. + let field_map = variant.fields .iter() - .filter(|field| !used_fields.contains_key(&field.name)) { - span_err!(tcx.sess, span, E0027, - "pattern does not mention field `{}`", - field.name); + .map(|field| (field.name, field)) + .collect::>(); + + // Keep track of which fields have already appeared in the pattern. + let mut used_fields = FnvHashMap(); + + // Typecheck each field. + for &Spanned { node: ref field, span } in fields { + let field_ty = match used_fields.entry(field.name) { + Occupied(occupied) => { + let mut err = struct_span_err!(tcx.sess, span, E0025, + "field `{}` bound multiple times \ + in the pattern", + field.name); + span_note!(&mut err, *occupied.get(), + "field `{}` previously bound here", + field.name); + err.emit(); + tcx.types.err + } + Vacant(vacant) => { + vacant.insert(span); + field_map.get(&field.name) + .map(|f| self.field_ty(span, f, substs)) + .unwrap_or_else(|| { + span_err!(tcx.sess, span, E0026, + "struct `{}` does not have a field named `{}`", + tcx.item_path_str(variant.did), + field.name); + tcx.types.err + }) + } + }; + + self.check_pat(&field.pat, field_ty); + } + + // Report an error if not all the fields were specified. + if !etc { + for field in variant.fields + .iter() + .filter(|field| !used_fields.contains_key(&field.name)) { + span_err!(tcx.sess, span, E0027, + "pattern does not mention field `{}`", + field.name); + } } } } diff --git a/src/librustc_typeck/check/assoc.rs b/src/librustc_typeck/check/assoc.rs index f7726bc9cf..04b0248ccd 100644 --- a/src/librustc_typeck/check/assoc.rs +++ b/src/librustc_typeck/check/assoc.rs @@ -16,12 +16,13 @@ use syntax::ast; use syntax::codemap::Span; //FIXME(@jroesch): Ideally we should be able to drop the fulfillment_cx argument. -pub fn normalize_associated_types_in<'a,'tcx,T>(infcx: &InferCtxt<'a,'tcx>, - fulfillment_cx: &mut FulfillmentContext<'tcx>, - span: Span, - body_id: ast::NodeId, - value: &T) - -> T +pub fn normalize_associated_types_in<'a, 'gcx, 'tcx, T>( + infcx: &InferCtxt<'a, 'gcx, 'tcx>, + fulfillment_cx: &mut FulfillmentContext<'tcx>, + span: Span, + body_id: ast::NodeId, + value: &T) -> T + where T : TypeFoldable<'tcx> { debug!("normalize_associated_types_in(value={:?})", value); diff --git a/src/librustc_typeck/check/callee.rs b/src/librustc_typeck/check/callee.rs index 21800d91d9..7493ca70f5 100644 --- a/src/librustc_typeck/check/callee.rs +++ b/src/librustc_typeck/check/callee.rs @@ -8,21 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use super::autoderef; -use super::check_argument_types; -use super::check_expr; -use super::check_method_argument_types; -use super::demand; -use super::DeferredCallResolution; -use super::err_args; -use super::Expectation; -use super::expected_types_for_fn_args; -use super::FnCtxt; -use super::method; -use super::structurally_resolved_type; -use super::TupleArgumentsFlag; -use super::UnresolvedTypeAction; -use super::write_call; +use super::{DeferredCallResolution, Expectation, FnCtxt, + TupleArgumentsFlag, UnresolvedTypeAction}; use CrateCtxt; use middle::cstore::LOCAL_CRATE; @@ -64,309 +51,304 @@ pub fn check_legal_trait_for_method_call(ccx: &CrateCtxt, span: Span, trait_id: struct_span_err!(tcx.sess, span, E0174, "explicit use of unboxed closure method `{}` is experimental", method) - .fileline_help(span, "add `#![feature(unboxed_closures)]` to the crate \ - attributes to enable") + .help("add `#![feature(unboxed_closures)]` to the crate \ + attributes to enable") .emit(); } } -pub fn check_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - call_expr: &'tcx hir::Expr, - callee_expr: &'tcx hir::Expr, - arg_exprs: &'tcx [P], - expected: Expectation<'tcx>) -{ - check_expr(fcx, callee_expr); - let original_callee_ty = fcx.expr_ty(callee_expr); - let (callee_ty, _, result) = - autoderef(fcx, - callee_expr.span, - original_callee_ty, - || Some(callee_expr), - UnresolvedTypeAction::Error, - LvaluePreference::NoPreference, - |adj_ty, idx| { - try_overloaded_call_step(fcx, call_expr, callee_expr, adj_ty, idx) - }); - - match result { - None => { - // this will report an error since original_callee_ty is not a fn - confirm_builtin_call(fcx, call_expr, original_callee_ty, arg_exprs, expected); - } - - Some(CallStep::Builtin) => { - confirm_builtin_call(fcx, call_expr, callee_ty, arg_exprs, expected); - } - - Some(CallStep::DeferredClosure(fn_sig)) => { - confirm_deferred_closure_call(fcx, call_expr, arg_exprs, expected, fn_sig); - } - - Some(CallStep::Overloaded(method_callee)) => { - confirm_overloaded_call(fcx, call_expr, callee_expr, - arg_exprs, expected, method_callee); - } - } -} - enum CallStep<'tcx> { Builtin, DeferredClosure(ty::FnSig<'tcx>), Overloaded(ty::MethodCallee<'tcx>) } -fn try_overloaded_call_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - call_expr: &'tcx hir::Expr, - callee_expr: &'tcx hir::Expr, - adjusted_ty: Ty<'tcx>, - autoderefs: usize) - -> Option> -{ - debug!("try_overloaded_call_step(call_expr={:?}, adjusted_ty={:?}, autoderefs={})", - call_expr, - adjusted_ty, - autoderefs); - - // If the callee is a bare function or a closure, then we're all set. - match structurally_resolved_type(fcx, callee_expr.span, adjusted_ty).sty { - ty::TyFnDef(..) | ty::TyFnPtr(_) => { - fcx.write_autoderef_adjustment(callee_expr.id, autoderefs); - return Some(CallStep::Builtin); - } +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn check_call(&self, + call_expr: &'gcx hir::Expr, + callee_expr: &'gcx hir::Expr, + arg_exprs: &'gcx [P], + expected: Expectation<'tcx>) + { + self.check_expr(callee_expr); + let original_callee_ty = self.expr_ty(callee_expr); + let (callee_ty, _, result) = + self.autoderef(callee_expr.span, + original_callee_ty, + || Some(callee_expr), + UnresolvedTypeAction::Error, + LvaluePreference::NoPreference, + |adj_ty, idx| { + self.try_overloaded_call_step(call_expr, callee_expr, adj_ty, idx) + }); + + match result { + None => { + // this will report an error since original_callee_ty is not a fn + self.confirm_builtin_call(call_expr, original_callee_ty, arg_exprs, expected); + } - ty::TyClosure(def_id, ref substs) => { - assert_eq!(def_id.krate, LOCAL_CRATE); - - // Check whether this is a call to a closure where we - // haven't yet decided on whether the closure is fn vs - // fnmut vs fnonce. If so, we have to defer further processing. - if fcx.infcx().closure_kind(def_id).is_none() { - let closure_ty = - fcx.infcx().closure_type(def_id, substs); - let fn_sig = - fcx.infcx().replace_late_bound_regions_with_fresh_var(call_expr.span, - infer::FnCall, - &closure_ty.sig).0; - fcx.record_deferred_call_resolution(def_id, Box::new(CallResolution { - call_expr: call_expr, - callee_expr: callee_expr, - adjusted_ty: adjusted_ty, - autoderefs: autoderefs, - fn_sig: fn_sig.clone(), - closure_def_id: def_id - })); - return Some(CallStep::DeferredClosure(fn_sig)); + Some(CallStep::Builtin) => { + self.confirm_builtin_call(call_expr, callee_ty, arg_exprs, expected); } - } - // Hack: we know that there are traits implementing Fn for &F - // where F:Fn and so forth. In the particular case of types - // like `x: &mut FnMut()`, if there is a call `x()`, we would - // normally translate to `FnMut::call_mut(&mut x, ())`, but - // that winds up requiring `mut x: &mut FnMut()`. A little - // over the top. The simplest fix by far is to just ignore - // this case and deref again, so we wind up with - // `FnMut::call_mut(&mut *x, ())`. - ty::TyRef(..) if autoderefs == 0 => { - return None; - } + Some(CallStep::DeferredClosure(fn_sig)) => { + self.confirm_deferred_closure_call(call_expr, arg_exprs, expected, fn_sig); + } - _ => {} + Some(CallStep::Overloaded(method_callee)) => { + self.confirm_overloaded_call(call_expr, callee_expr, + arg_exprs, expected, method_callee); + } + } } - try_overloaded_call_traits(fcx, call_expr, callee_expr, adjusted_ty, autoderefs) - .map(|method_callee| CallStep::Overloaded(method_callee)) -} + fn try_overloaded_call_step(&self, + call_expr: &'gcx hir::Expr, + callee_expr: &'gcx hir::Expr, + adjusted_ty: Ty<'tcx>, + autoderefs: usize) + -> Option> + { + debug!("try_overloaded_call_step(call_expr={:?}, adjusted_ty={:?}, autoderefs={})", + call_expr, + adjusted_ty, + autoderefs); + + // If the callee is a bare function or a closure, then we're all set. + match self.structurally_resolved_type(callee_expr.span, adjusted_ty).sty { + ty::TyFnDef(..) | ty::TyFnPtr(_) => { + self.write_autoderef_adjustment(callee_expr.id, autoderefs); + return Some(CallStep::Builtin); + } -fn try_overloaded_call_traits<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>, - call_expr: &hir::Expr, - callee_expr: &hir::Expr, - adjusted_ty: Ty<'tcx>, - autoderefs: usize) - -> Option> -{ - // Try the options that are least restrictive on the caller first. - for &(opt_trait_def_id, method_name) in &[ - (fcx.tcx().lang_items.fn_trait(), token::intern("call")), - (fcx.tcx().lang_items.fn_mut_trait(), token::intern("call_mut")), - (fcx.tcx().lang_items.fn_once_trait(), token::intern("call_once")), - ] { - let trait_def_id = match opt_trait_def_id { - Some(def_id) => def_id, - None => continue, - }; + ty::TyClosure(def_id, substs) => { + assert_eq!(def_id.krate, LOCAL_CRATE); + + // Check whether this is a call to a closure where we + // haven't yet decided on whether the closure is fn vs + // fnmut vs fnonce. If so, we have to defer further processing. + if self.closure_kind(def_id).is_none() { + let closure_ty = + self.closure_type(def_id, substs); + let fn_sig = + self.replace_late_bound_regions_with_fresh_var(call_expr.span, + infer::FnCall, + &closure_ty.sig).0; + self.record_deferred_call_resolution(def_id, Box::new(CallResolution { + call_expr: call_expr, + callee_expr: callee_expr, + adjusted_ty: adjusted_ty, + autoderefs: autoderefs, + fn_sig: fn_sig.clone(), + closure_def_id: def_id + })); + return Some(CallStep::DeferredClosure(fn_sig)); + } + } - match method::lookup_in_trait_adjusted(fcx, - call_expr.span, - Some(&callee_expr), - method_name, - trait_def_id, - autoderefs, - false, - adjusted_ty, - None) { - None => continue, - Some(method_callee) => { - return Some(method_callee); + // Hack: we know that there are traits implementing Fn for &F + // where F:Fn and so forth. In the particular case of types + // like `x: &mut FnMut()`, if there is a call `x()`, we would + // normally translate to `FnMut::call_mut(&mut x, ())`, but + // that winds up requiring `mut x: &mut FnMut()`. A little + // over the top. The simplest fix by far is to just ignore + // this case and deref again, so we wind up with + // `FnMut::call_mut(&mut *x, ())`. + ty::TyRef(..) if autoderefs == 0 => { + return None; } + + _ => {} } - } - None -} + self.try_overloaded_call_traits(call_expr, callee_expr, adjusted_ty, autoderefs) + .map(|method_callee| CallStep::Overloaded(method_callee)) + } -fn confirm_builtin_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - call_expr: &hir::Expr, - callee_ty: Ty<'tcx>, - arg_exprs: &'tcx [P], - expected: Expectation<'tcx>) -{ - let error_fn_sig; - - let fn_sig = match callee_ty.sty { - ty::TyFnDef(_, _, &ty::BareFnTy {ref sig, ..}) | - ty::TyFnPtr(&ty::BareFnTy {ref sig, ..}) => { - sig + fn try_overloaded_call_traits(&self, + call_expr: &hir::Expr, + callee_expr: &hir::Expr, + adjusted_ty: Ty<'tcx>, + autoderefs: usize) + -> Option> + { + // Try the options that are least restrictive on the caller first. + for &(opt_trait_def_id, method_name) in &[ + (self.tcx.lang_items.fn_trait(), token::intern("call")), + (self.tcx.lang_items.fn_mut_trait(), token::intern("call_mut")), + (self.tcx.lang_items.fn_once_trait(), token::intern("call_once")), + ] { + let trait_def_id = match opt_trait_def_id { + Some(def_id) => def_id, + None => continue, + }; + + match self.lookup_method_in_trait_adjusted(call_expr.span, + Some(&callee_expr), + method_name, + trait_def_id, + autoderefs, + false, + adjusted_ty, + None) { + None => continue, + Some(method_callee) => { + return Some(method_callee); + } + } } - _ => { - let mut err = fcx.type_error_struct(call_expr.span, |actual| { - format!("expected function, found `{}`", actual) - }, callee_ty, None); - - if let hir::ExprCall(ref expr, _) = call_expr.node { - let tcx = fcx.tcx(); - if let Some(pr) = tcx.def_map.borrow().get(&expr.id) { - if pr.depth == 0 && pr.base_def != Def::Err { - if let Some(span) = tcx.map.span_if_local(pr.def_id()) { - err.span_note(span, "defined here"); + + None + } + + fn confirm_builtin_call(&self, + call_expr: &hir::Expr, + callee_ty: Ty<'tcx>, + arg_exprs: &'gcx [P], + expected: Expectation<'tcx>) + { + let error_fn_sig; + + let fn_sig = match callee_ty.sty { + ty::TyFnDef(_, _, &ty::BareFnTy {ref sig, ..}) | + ty::TyFnPtr(&ty::BareFnTy {ref sig, ..}) => { + sig + } + _ => { + let mut err = self.type_error_struct(call_expr.span, |actual| { + format!("expected function, found `{}`", actual) + }, callee_ty, None); + + if let hir::ExprCall(ref expr, _) = call_expr.node { + let tcx = self.tcx; + if let Some(pr) = tcx.def_map.borrow().get(&expr.id) { + if pr.depth == 0 && pr.base_def != Def::Err { + if let Some(span) = tcx.map.span_if_local(pr.def_id()) { + err.span_note(span, "defined here"); + } } } } - } - err.emit(); + err.emit(); - // This is the "default" function signature, used in case of error. - // In that case, we check each argument against "error" in order to - // set up all the node type bindings. - error_fn_sig = ty::Binder(ty::FnSig { - inputs: err_args(fcx.tcx(), arg_exprs.len()), - output: ty::FnConverging(fcx.tcx().types.err), - variadic: false - }); + // This is the "default" function signature, used in case of error. + // In that case, we check each argument against "error" in order to + // set up all the node type bindings. + error_fn_sig = ty::Binder(ty::FnSig { + inputs: self.err_args(arg_exprs.len()), + output: ty::FnConverging(self.tcx.types.err), + variadic: false + }); - &error_fn_sig - } - }; - - // Replace any late-bound regions that appear in the function - // signature with region variables. We also have to - // renormalize the associated types at this point, since they - // previously appeared within a `Binder<>` and hence would not - // have been normalized before. - let fn_sig = - fcx.infcx().replace_late_bound_regions_with_fresh_var(call_expr.span, - infer::FnCall, - fn_sig).0; - let fn_sig = - fcx.normalize_associated_types_in(call_expr.span, &fn_sig); - - // Call the generic checker. - let expected_arg_tys = expected_types_for_fn_args(fcx, - call_expr.span, - expected, - fn_sig.output, - &fn_sig.inputs); - check_argument_types(fcx, - call_expr.span, - &fn_sig.inputs, - &expected_arg_tys[..], - arg_exprs, - fn_sig.variadic, - TupleArgumentsFlag::DontTupleArguments); - - write_call(fcx, call_expr, fn_sig.output); -} + &error_fn_sig + } + }; -fn confirm_deferred_closure_call<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - call_expr: &hir::Expr, - arg_exprs: &'tcx [P], - expected: Expectation<'tcx>, - fn_sig: ty::FnSig<'tcx>) -{ - // `fn_sig` is the *signature* of the cosure being called. We - // don't know the full details yet (`Fn` vs `FnMut` etc), but we - // do know the types expected for each argument and the return - // type. - - let expected_arg_tys = - expected_types_for_fn_args(fcx, - call_expr.span, - expected, - fn_sig.output.clone(), - &fn_sig.inputs); - - check_argument_types(fcx, - call_expr.span, - &fn_sig.inputs, - &expected_arg_tys, - arg_exprs, - fn_sig.variadic, - TupleArgumentsFlag::TupleArguments); - - write_call(fcx, call_expr, fn_sig.output); -} + // Replace any late-bound regions that appear in the function + // signature with region variables. We also have to + // renormalize the associated types at this point, since they + // previously appeared within a `Binder<>` and hence would not + // have been normalized before. + let fn_sig = + self.replace_late_bound_regions_with_fresh_var(call_expr.span, + infer::FnCall, + fn_sig).0; + let fn_sig = + self.normalize_associated_types_in(call_expr.span, &fn_sig); + + // Call the generic checker. + let expected_arg_tys = self.expected_types_for_fn_args(call_expr.span, + expected, + fn_sig.output, + &fn_sig.inputs); + self.check_argument_types(call_expr.span, + &fn_sig.inputs, + &expected_arg_tys[..], + arg_exprs, + fn_sig.variadic, + TupleArgumentsFlag::DontTupleArguments); + + self.write_call(call_expr, fn_sig.output); + } -fn confirm_overloaded_call<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>, - call_expr: &hir::Expr, - callee_expr: &'tcx hir::Expr, - arg_exprs: &'tcx [P], - expected: Expectation<'tcx>, - method_callee: ty::MethodCallee<'tcx>) -{ - let output_type = - check_method_argument_types(fcx, - call_expr.span, - method_callee.ty, - callee_expr, - arg_exprs, - TupleArgumentsFlag::TupleArguments, - expected); - write_call(fcx, call_expr, output_type); - - write_overloaded_call_method_map(fcx, call_expr, method_callee); -} + fn confirm_deferred_closure_call(&self, + call_expr: &hir::Expr, + arg_exprs: &'gcx [P], + expected: Expectation<'tcx>, + fn_sig: ty::FnSig<'tcx>) + { + // `fn_sig` is the *signature* of the cosure being called. We + // don't know the full details yet (`Fn` vs `FnMut` etc), but we + // do know the types expected for each argument and the return + // type. + + let expected_arg_tys = + self.expected_types_for_fn_args(call_expr.span, + expected, + fn_sig.output.clone(), + &fn_sig.inputs); + + self.check_argument_types(call_expr.span, + &fn_sig.inputs, + &expected_arg_tys, + arg_exprs, + fn_sig.variadic, + TupleArgumentsFlag::TupleArguments); + + self.write_call(call_expr, fn_sig.output); + } -fn write_overloaded_call_method_map<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>, - call_expr: &hir::Expr, - method_callee: ty::MethodCallee<'tcx>) { - let method_call = ty::MethodCall::expr(call_expr.id); - fcx.inh.tables.borrow_mut().method_map.insert(method_call, method_callee); + fn confirm_overloaded_call(&self, + call_expr: &hir::Expr, + callee_expr: &'gcx hir::Expr, + arg_exprs: &'gcx [P], + expected: Expectation<'tcx>, + method_callee: ty::MethodCallee<'tcx>) + { + let output_type = + self.check_method_argument_types(call_expr.span, + method_callee.ty, + callee_expr, + arg_exprs, + TupleArgumentsFlag::TupleArguments, + expected); + self.write_call(call_expr, output_type); + + self.write_overloaded_call_method_map(call_expr, method_callee); + } + + fn write_overloaded_call_method_map(&self, + call_expr: &hir::Expr, + method_callee: ty::MethodCallee<'tcx>) { + let method_call = ty::MethodCall::expr(call_expr.id); + self.tables.borrow_mut().method_map.insert(method_call, method_callee); + } } #[derive(Debug)] -struct CallResolution<'tcx> { - call_expr: &'tcx hir::Expr, - callee_expr: &'tcx hir::Expr, +struct CallResolution<'gcx: 'tcx, 'tcx> { + call_expr: &'gcx hir::Expr, + callee_expr: &'gcx hir::Expr, adjusted_ty: Ty<'tcx>, autoderefs: usize, fn_sig: ty::FnSig<'tcx>, closure_def_id: DefId, } -impl<'tcx> DeferredCallResolution<'tcx> for CallResolution<'tcx> { - fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>) { +impl<'gcx, 'tcx> DeferredCallResolution<'gcx, 'tcx> for CallResolution<'gcx, 'tcx> { + fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) { debug!("DeferredCallResolution::resolve() {:?}", self); // we should not be invoked until the closure kind has been // determined by upvar inference - assert!(fcx.infcx().closure_kind(self.closure_def_id).is_some()); + assert!(fcx.closure_kind(self.closure_def_id).is_some()); // We may now know enough to figure out fn vs fnmut etc. - match try_overloaded_call_traits(fcx, self.call_expr, self.callee_expr, - self.adjusted_ty, self.autoderefs) { + match fcx.try_overloaded_call_traits(self.call_expr, self.callee_expr, + self.adjusted_ty, self.autoderefs) { Some(method_callee) => { // One problem is that when we get here, we are going // to have a newly instantiated function signature @@ -376,8 +358,8 @@ impl<'tcx> DeferredCallResolution<'tcx> for CallResolution<'tcx> { // can't because of the annoying need for a TypeTrace. // (This always bites me, should find a way to // refactor it.) - let method_sig = fcx.tcx().no_late_bound_regions(method_callee.ty.fn_sig()) - .unwrap(); + let method_sig = fcx.tcx.no_late_bound_regions(method_callee.ty.fn_sig()) + .unwrap(); debug!("attempt_resolution: method_callee={:?}", method_callee); @@ -385,16 +367,15 @@ impl<'tcx> DeferredCallResolution<'tcx> for CallResolution<'tcx> { for (&method_arg_ty, &self_arg_ty) in method_sig.inputs[1..].iter().zip(&self.fn_sig.inputs) { - demand::eqtype(fcx, self.call_expr.span, self_arg_ty, method_arg_ty); + fcx.demand_eqtype(self.call_expr.span, self_arg_ty, method_arg_ty); } - let nilty = fcx.tcx().mk_nil(); - demand::eqtype(fcx, - self.call_expr.span, - method_sig.output.unwrap_or(nilty), - self.fn_sig.output.unwrap_or(nilty)); + let nilty = fcx.tcx.mk_nil(); + fcx.demand_eqtype(self.call_expr.span, + method_sig.output.unwrap_or(nilty), + self.fn_sig.output.unwrap_or(nilty)); - write_overloaded_call_method_map(fcx, self.call_expr, method_callee); + fcx.write_overloaded_call_method_map(self.call_expr, method_callee); } None => { span_bug!( diff --git a/src/librustc_typeck/check/cast.rs b/src/librustc_typeck/check/cast.rs index 91cdb8d966..690250edb8 100644 --- a/src/librustc_typeck/check/cast.rs +++ b/src/librustc_typeck/check/cast.rs @@ -38,19 +38,17 @@ //! expression, `e as U2` is not necessarily so (in fact it will only be valid if //! `U1` coerces to `U2`). -use super::coercion; -use super::demand; use super::FnCtxt; -use super::structurally_resolved_type; use lint; use hir::def_id::DefId; +use rustc::hir; +use rustc::traits; use rustc::ty::{self, Ty, TypeFoldable}; use rustc::ty::cast::{CastKind, CastTy}; -use syntax::codemap::Span; -use rustc::hir; use syntax::ast; - +use syntax::codemap::Span; +use util::common::ErrorReported; /// Reifies a cast check to be checked once we have full type information for /// a function context. @@ -58,6 +56,7 @@ pub struct CastCheck<'tcx> { expr: &'tcx hir::Expr, expr_ty: Ty<'tcx>, cast_ty: Ty<'tcx>, + cast_span: Span, span: Span, } @@ -73,25 +72,25 @@ enum UnsizeKind<'tcx> { OfParam(&'tcx ty::ParamTy) } -/// Returns the kind of unsize information of t, or None -/// if t is sized or it is unknown. -fn unsize_kind<'a,'tcx>(fcx: &FnCtxt<'a, 'tcx>, - t: Ty<'tcx>) - -> Option> { - match t.sty { - ty::TySlice(_) | ty::TyStr => Some(UnsizeKind::Length), - ty::TyTrait(ref tty) => Some(UnsizeKind::Vtable(tty.principal_def_id())), - ty::TyStruct(def, substs) => { - // FIXME(arielb1): do some kind of normalization - match def.struct_variant().fields.last() { - None => None, - Some(f) => unsize_kind(fcx, f.ty(fcx.tcx(), substs)) +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + /// Returns the kind of unsize information of t, or None + /// if t is sized or it is unknown. + fn unsize_kind(&self, t: Ty<'tcx>) -> Option> { + match t.sty { + ty::TySlice(_) | ty::TyStr => Some(UnsizeKind::Length), + ty::TyTrait(ref tty) => Some(UnsizeKind::Vtable(tty.principal_def_id())), + ty::TyStruct(def, substs) => { + // FIXME(arielb1): do some kind of normalization + match def.struct_variant().fields.last() { + None => None, + Some(f) => self.unsize_kind(f.ty(self.tcx, substs)) + } } + // We should really try to normalize here. + ty::TyProjection(ref pi) => Some(UnsizeKind::OfProjection(pi)), + ty::TyParam(ref p) => Some(UnsizeKind::OfParam(p)), + _ => None } - // We should really try to normalize here. - ty::TyProjection(ref pi) => Some(UnsizeKind::OfProjection(pi)), - ty::TyParam(ref p) => Some(UnsizeKind::OfParam(p)), - _ => None } } @@ -110,19 +109,37 @@ enum CastError { NonScalar, } -impl<'tcx> CastCheck<'tcx> { - pub fn new(expr: &'tcx hir::Expr, expr_ty: Ty<'tcx>, cast_ty: Ty<'tcx>, span: Span) - -> CastCheck<'tcx> { - CastCheck { +impl<'a, 'gcx, 'tcx> CastCheck<'tcx> { + pub fn new(fcx: &FnCtxt<'a, 'gcx, 'tcx>, + expr: &'tcx hir::Expr, + expr_ty: Ty<'tcx>, + cast_ty: Ty<'tcx>, + cast_span: Span, + span: Span) + -> Result, ErrorReported> { + let check = CastCheck { expr: expr, expr_ty: expr_ty, cast_ty: cast_ty, + cast_span: cast_span, span: span, + }; + + // For better error messages, check for some obviously unsized + // cases now. We do a more thorough check at the end, once + // inference is more completely known. + match cast_ty.sty { + ty::TyTrait(..) | ty::TySlice(..) => { + check.report_cast_to_unsized_type(fcx); + Err(ErrorReported) + } + _ => { + Ok(check) + } } } - fn report_cast_error<'a>(&self, fcx: &FnCtxt<'a, 'tcx>, - e: CastError) { + fn report_cast_error(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>, e: CastError) { match e { CastError::NeedViaPtr | CastError::NeedViaThinPtr | @@ -131,10 +148,9 @@ impl<'tcx> CastCheck<'tcx> { fcx.type_error_struct(self.span, |actual| { format!("casting `{}` as `{}` is invalid", actual, - fcx.infcx().ty_to_string(self.cast_ty)) + fcx.ty_to_string(self.cast_ty)) }, self.expr_ty, None) - .fileline_help(self.span, - &format!("cast through {} first", match e { + .help(&format!("cast through {} first", match e { CastError::NeedViaPtr => "a raw pointer", CastError::NeedViaThinPtr => "a thin pointer", CastError::NeedViaInt => "an integer", @@ -144,8 +160,8 @@ impl<'tcx> CastCheck<'tcx> { .emit(); } CastError::CastToBool => { - struct_span_err!(fcx.tcx().sess, self.span, E0054, "cannot cast as `bool`") - .fileline_help(self.span, "compare with zero instead") + struct_span_err!(fcx.tcx.sess, self.span, E0054, "cannot cast as `bool`") + .help("compare with zero instead") .emit(); } CastError::CastToChar => { @@ -157,78 +173,134 @@ impl<'tcx> CastCheck<'tcx> { fcx.type_error_message(self.span, |actual| { format!("non-scalar cast: `{}` as `{}`", actual, - fcx.infcx().ty_to_string(self.cast_ty)) + fcx.ty_to_string(self.cast_ty)) }, self.expr_ty, None); } CastError::IllegalCast => { fcx.type_error_message(self.span, |actual| { format!("casting `{}` as `{}` is invalid", actual, - fcx.infcx().ty_to_string(self.cast_ty)) + fcx.ty_to_string(self.cast_ty)) }, self.expr_ty, None); } CastError::SizedUnsizedCast => { fcx.type_error_message(self.span, |actual| { format!("cannot cast thin pointer `{}` to fat pointer `{}`", actual, - fcx.infcx().ty_to_string(self.cast_ty)) + fcx.ty_to_string(self.cast_ty)) }, self.expr_ty, None) } CastError::DifferingKinds => { fcx.type_error_struct(self.span, |actual| { format!("casting `{}` as `{}` is invalid", actual, - fcx.infcx().ty_to_string(self.cast_ty)) + fcx.ty_to_string(self.cast_ty)) }, self.expr_ty, None) - .fileline_note(self.span, "vtable kinds may not match") + .note("vtable kinds may not match") .emit(); } } } - fn trivial_cast_lint<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) { + fn report_cast_to_unsized_type(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) { + if + self.cast_ty.references_error() || + self.expr_ty.references_error() + { + return; + } + + let tstr = fcx.ty_to_string(self.cast_ty); + let mut err = fcx.type_error_struct(self.span, |actual| { + format!("cast to unsized type: `{}` as `{}`", actual, tstr) + }, self.expr_ty, None); + match self.expr_ty.sty { + ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => { + let mtstr = match mt { + hir::MutMutable => "mut ", + hir::MutImmutable => "" + }; + if self.cast_ty.is_trait() { + match fcx.tcx.sess.codemap().span_to_snippet(self.cast_span) { + Ok(s) => { + err.span_suggestion(self.cast_span, + "try casting to a reference instead:", + format!("&{}{}", mtstr, s)); + }, + Err(_) => + span_help!(err, self.cast_span, + "did you mean `&{}{}`?", mtstr, tstr), + } + } else { + span_help!(err, self.span, + "consider using an implicit coercion to `&{}{}` instead", + mtstr, tstr); + } + } + ty::TyBox(..) => { + match fcx.tcx.sess.codemap().span_to_snippet(self.cast_span) { + Ok(s) => { + err.span_suggestion(self.cast_span, + "try casting to a `Box` instead:", + format!("Box<{}>", s)); + }, + Err(_) => + span_help!(err, self.cast_span, "did you mean `Box<{}>`?", tstr), + } + } + _ => { + span_help!(err, self.expr.span, + "consider using a box or reference as appropriate"); + } + } + err.emit(); + } + + fn trivial_cast_lint(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) { let t_cast = self.cast_ty; let t_expr = self.expr_ty; if t_cast.is_numeric() && t_expr.is_numeric() { - fcx.tcx().sess.add_lint(lint::builtin::TRIVIAL_NUMERIC_CASTS, - self.expr.id, - self.span, - format!("trivial numeric cast: `{}` as `{}`. Cast can be \ - replaced by coercion, this might require type \ - ascription or a temporary variable", - fcx.infcx().ty_to_string(t_expr), - fcx.infcx().ty_to_string(t_cast))); + fcx.tcx.sess.add_lint(lint::builtin::TRIVIAL_NUMERIC_CASTS, + self.expr.id, + self.span, + format!("trivial numeric cast: `{}` as `{}`. Cast can be \ + replaced by coercion, this might require type \ + ascription or a temporary variable", + fcx.ty_to_string(t_expr), + fcx.ty_to_string(t_cast))); } else { - fcx.tcx().sess.add_lint(lint::builtin::TRIVIAL_CASTS, - self.expr.id, - self.span, - format!("trivial cast: `{}` as `{}`. Cast can be \ - replaced by coercion, this might require type \ - ascription or a temporary variable", - fcx.infcx().ty_to_string(t_expr), - fcx.infcx().ty_to_string(t_cast))); + fcx.tcx.sess.add_lint(lint::builtin::TRIVIAL_CASTS, + self.expr.id, + self.span, + format!("trivial cast: `{}` as `{}`. Cast can be \ + replaced by coercion, this might require type \ + ascription or a temporary variable", + fcx.ty_to_string(t_expr), + fcx.ty_to_string(t_cast))); } } - pub fn check<'a>(mut self, fcx: &FnCtxt<'a, 'tcx>) { - self.expr_ty = structurally_resolved_type(fcx, self.span, self.expr_ty); - self.cast_ty = structurally_resolved_type(fcx, self.span, self.cast_ty); + pub fn check(mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) { + self.expr_ty = fcx.structurally_resolved_type(self.span, self.expr_ty); + self.cast_ty = fcx.structurally_resolved_type(self.span, self.cast_ty); debug!("check_cast({}, {:?} as {:?})", self.expr.id, self.expr_ty, self.cast_ty); - if self.expr_ty.references_error() || self.cast_ty.references_error() { + if !fcx.type_is_known_to_be_sized(self.cast_ty, self.span) { + self.report_cast_to_unsized_type(fcx); + } else if self.expr_ty.references_error() || self.cast_ty.references_error() { // No sense in giving duplicate error messages } else if self.try_coercion_cast(fcx) { self.trivial_cast_lint(fcx); debug!(" -> CoercionCast"); - fcx.tcx().cast_kinds.borrow_mut().insert(self.expr.id, - CastKind::CoercionCast); + fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, + CastKind::CoercionCast); } else { match self.do_check(fcx) { Ok(k) => { debug!(" -> {:?}", k); - fcx.tcx().cast_kinds.borrow_mut().insert(self.expr.id, k); + fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, k); } Err(e) => self.report_cast_error(fcx, e) };} @@ -237,7 +309,7 @@ impl<'tcx> CastCheck<'tcx> { /// Check a cast, and report an error if one exists. In some cases, this /// can return Ok and create type errors in the fcx rather than returning /// directly. coercion-cast is handled in check instead of here. - fn do_check<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Result { + fn do_check(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Result { use rustc::ty::cast::IntTy::*; use rustc::ty::cast::CastTy::*; @@ -248,8 +320,7 @@ impl<'tcx> CastCheck<'tcx> { (None, Some(t_cast)) => { if let ty::TyFnDef(_, _, f) = self.expr_ty.sty { // Attempt a coercion to a fn pointer type. - let res = coercion::try(fcx, self.expr, - fcx.tcx().mk_ty(ty::TyFnPtr(f))); + let res = fcx.try_coerce(self.expr, fcx.tcx.mk_fn_ptr(f)); if !res.is_ok() { return Err(CastError::NonScalar); } @@ -304,11 +375,11 @@ impl<'tcx> CastCheck<'tcx> { } } - fn check_ptr_ptr_cast<'a>(&self, - fcx: &FnCtxt<'a, 'tcx>, - m_expr: &'tcx ty::TypeAndMut<'tcx>, - m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result + fn check_ptr_ptr_cast(&self, + fcx: &FnCtxt<'a, 'gcx, 'tcx>, + m_expr: &'tcx ty::TypeAndMut<'tcx>, + m_cast: &'tcx ty::TypeAndMut<'tcx>) + -> Result { debug!("check_ptr_ptr_cast m_expr={:?} m_cast={:?}", m_expr, m_cast); @@ -325,16 +396,16 @@ impl<'tcx> CastCheck<'tcx> { } // vtable kinds must match - match (unsize_kind(fcx, m_cast.ty), unsize_kind(fcx, m_expr.ty)) { + match (fcx.unsize_kind(m_cast.ty), fcx.unsize_kind(m_expr.ty)) { (Some(a), Some(b)) if a == b => Ok(CastKind::PtrPtrCast), _ => Err(CastError::DifferingKinds) } } - fn check_fptr_ptr_cast<'a>(&self, - fcx: &FnCtxt<'a, 'tcx>, - m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result + fn check_fptr_ptr_cast(&self, + fcx: &FnCtxt<'a, 'gcx, 'tcx>, + m_cast: &'tcx ty::TypeAndMut<'tcx>) + -> Result { // fptr-ptr cast. must be to sized ptr @@ -345,10 +416,10 @@ impl<'tcx> CastCheck<'tcx> { } } - fn check_ptr_addr_cast<'a>(&self, - fcx: &FnCtxt<'a, 'tcx>, - m_expr: &'tcx ty::TypeAndMut<'tcx>) - -> Result + fn check_ptr_addr_cast(&self, + fcx: &FnCtxt<'a, 'gcx, 'tcx>, + m_expr: &'tcx ty::TypeAndMut<'tcx>) + -> Result { // ptr-addr cast. must be from sized ptr @@ -359,11 +430,11 @@ impl<'tcx> CastCheck<'tcx> { } } - fn check_ref_cast<'a>(&self, - fcx: &FnCtxt<'a, 'tcx>, - m_expr: &'tcx ty::TypeAndMut<'tcx>, - m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result + fn check_ref_cast(&self, + fcx: &FnCtxt<'a, 'gcx, 'tcx>, + m_expr: &'tcx ty::TypeAndMut<'tcx>, + m_cast: &'tcx ty::TypeAndMut<'tcx>) + -> Result { // array-ptr-cast. @@ -377,7 +448,7 @@ impl<'tcx> CastCheck<'tcx> { // from a region pointer to a vector. // this will report a type mismatch if needed - demand::eqtype(fcx, self.span, ety, m_cast.ty); + fcx.demand_eqtype(self.span, ety, m_cast.ty); return Ok(CastKind::ArrayPtrCast); } } @@ -385,10 +456,10 @@ impl<'tcx> CastCheck<'tcx> { Err(CastError::IllegalCast) } - fn check_addr_ptr_cast<'a>(&self, - fcx: &FnCtxt<'a, 'tcx>, - m_cast: &'tcx ty::TypeAndMut<'tcx>) - -> Result + fn check_addr_ptr_cast(&self, + fcx: &FnCtxt<'a, 'gcx, 'tcx>, + m_cast: &'tcx ty::TypeAndMut<'tcx>) + -> Result { // ptr-addr cast. pointer must be thin. if fcx.type_is_known_to_be_sized(m_cast.ty, self.span) { @@ -398,8 +469,19 @@ impl<'tcx> CastCheck<'tcx> { } } - fn try_coercion_cast<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> bool { - coercion::try(fcx, self.expr, self.cast_ty).is_ok() + fn try_coercion_cast(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> bool { + fcx.try_coerce(self.expr, self.cast_ty).is_ok() } } + +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + fn type_is_known_to_be_sized(&self, + ty: Ty<'tcx>, + span: Span) + -> bool + { + traits::type_known_to_meet_builtin_bound(self, ty, ty::BoundSized, span) + } +} + diff --git a/src/librustc_typeck/check/closure.rs b/src/librustc_typeck/check/closure.rs index 7bca570411..d3396eb4c1 100644 --- a/src/librustc_typeck/check/closure.rs +++ b/src/librustc_typeck/check/closure.rs @@ -12,247 +12,235 @@ use super::{check_fn, Expectation, FnCtxt}; -use astconv; +use astconv::AstConv; use rustc::ty::subst; use rustc::ty::{self, ToPolyTraitRef, Ty}; use std::cmp; use syntax::abi::Abi; use rustc::hir; -pub fn check_expr_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - expr: &hir::Expr, - _capture: hir::CaptureClause, - decl: &'tcx hir::FnDecl, - body: &'tcx hir::Block, - expected: Expectation<'tcx>) { - debug!("check_expr_closure(expr={:?},expected={:?})", - expr, - expected); - - // It's always helpful for inference if we know the kind of - // closure sooner rather than later, so first examine the expected - // type, and see if can glean a closure kind from there. - let (expected_sig,expected_kind) = match expected.to_option(fcx) { - Some(ty) => deduce_expectations_from_expected_type(fcx, ty), - None => (None, None) - }; - check_closure(fcx, expr, expected_kind, decl, body, expected_sig) -} +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn check_expr_closure(&self, + expr: &hir::Expr, + _capture: hir::CaptureClause, + decl: &'gcx hir::FnDecl, + body: &'gcx hir::Block, + expected: Expectation<'tcx>) { + debug!("check_expr_closure(expr={:?},expected={:?})", + expr, + expected); + + // It's always helpful for inference if we know the kind of + // closure sooner rather than later, so first examine the expected + // type, and see if can glean a closure kind from there. + let (expected_sig,expected_kind) = match expected.to_option(self) { + Some(ty) => self.deduce_expectations_from_expected_type(ty), + None => (None, None) + }; + self.check_closure(expr, expected_kind, decl, body, expected_sig) + } -fn check_closure<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - expr: &hir::Expr, - opt_kind: Option, - decl: &'tcx hir::FnDecl, - body: &'tcx hir::Block, - expected_sig: Option>) { - let expr_def_id = fcx.tcx().map.local_def_id(expr.id); - - debug!("check_closure opt_kind={:?} expected_sig={:?}", - opt_kind, - expected_sig); - - let mut fn_ty = astconv::ty_of_closure(fcx, - hir::Unsafety::Normal, - decl, - Abi::RustCall, - expected_sig); - - // Create type variables (for now) to represent the transformed - // types of upvars. These will be unified during the upvar - // inference phase (`upvar.rs`). - let num_upvars = fcx.tcx().with_freevars(expr.id, |fv| fv.len()); - let upvar_tys = fcx.infcx().next_ty_vars(num_upvars); - - debug!("check_closure: expr.id={:?} upvar_tys={:?}", - expr.id, upvar_tys); - - let closure_type = - fcx.ccx.tcx.mk_closure( - expr_def_id, - fcx.ccx.tcx.mk_substs(fcx.inh.infcx.parameter_environment.free_substs.clone()), + fn check_closure(&self, + expr: &hir::Expr, + opt_kind: Option, + decl: &'gcx hir::FnDecl, + body: &'gcx hir::Block, + expected_sig: Option>) { + let expr_def_id = self.tcx.map.local_def_id(expr.id); + + debug!("check_closure opt_kind={:?} expected_sig={:?}", + opt_kind, + expected_sig); + + let mut fn_ty = AstConv::ty_of_closure(self, + hir::Unsafety::Normal, + decl, + Abi::RustCall, + expected_sig); + + // Create type variables (for now) to represent the transformed + // types of upvars. These will be unified during the upvar + // inference phase (`upvar.rs`). + let num_upvars = self.tcx.with_freevars(expr.id, |fv| fv.len()); + let upvar_tys = self.next_ty_vars(num_upvars); + + debug!("check_closure: expr.id={:?} upvar_tys={:?}", + expr.id, upvar_tys); + + let closure_type = self.tcx.mk_closure(expr_def_id, + self.parameter_environment.free_substs, upvar_tys); - fcx.write_ty(expr.id, closure_type); - - let fn_sig = fcx.tcx().liberate_late_bound_regions( - fcx.tcx().region_maps.call_site_extent(expr.id, body.id), &fn_ty.sig); - - check_fn(fcx.ccx, - hir::Unsafety::Normal, - expr.id, - &fn_sig, - decl, - expr.id, - &body, - fcx.inh); - - // Tuple up the arguments and insert the resulting function type into - // the `closures` table. - fn_ty.sig.0.inputs = vec![fcx.tcx().mk_tup(fn_ty.sig.0.inputs)]; - - debug!("closure for {:?} --> sig={:?} opt_kind={:?}", - expr_def_id, - fn_ty.sig, - opt_kind); - - fcx.inh.tables.borrow_mut().closure_tys.insert(expr_def_id, fn_ty); - match opt_kind { - Some(kind) => { fcx.inh.tables.borrow_mut().closure_kinds.insert(expr_def_id, kind); } - None => { } - } -} + self.write_ty(expr.id, closure_type); -fn deduce_expectations_from_expected_type<'a,'tcx>( - fcx: &FnCtxt<'a,'tcx>, - expected_ty: Ty<'tcx>) - -> (Option>,Option) -{ - debug!("deduce_expectations_from_expected_type(expected_ty={:?})", - expected_ty); - - match expected_ty.sty { - ty::TyTrait(ref object_type) => { - let proj_bounds = object_type.projection_bounds_with_self_ty(fcx.tcx(), - fcx.tcx().types.err); - let sig = proj_bounds.iter() - .filter_map(|pb| deduce_sig_from_projection(fcx, pb)) - .next(); - let kind = fcx.tcx().lang_items.fn_trait_kind(object_type.principal_def_id()); - (sig, kind) - } - ty::TyInfer(ty::TyVar(vid)) => { - deduce_expectations_from_obligations(fcx, vid) - } - _ => { - (None, None) + let fn_sig = self.tcx.liberate_late_bound_regions( + self.tcx.region_maps.call_site_extent(expr.id, body.id), &fn_ty.sig); + + check_fn(self, hir::Unsafety::Normal, expr.id, &fn_sig, decl, expr.id, &body); + + // Tuple up the arguments and insert the resulting function type into + // the `closures` table. + fn_ty.sig.0.inputs = vec![self.tcx.mk_tup(fn_ty.sig.0.inputs)]; + + debug!("closure for {:?} --> sig={:?} opt_kind={:?}", + expr_def_id, + fn_ty.sig, + opt_kind); + + self.tables.borrow_mut().closure_tys.insert(expr_def_id, fn_ty); + match opt_kind { + Some(kind) => { self.tables.borrow_mut().closure_kinds.insert(expr_def_id, kind); } + None => { } } } -} -fn deduce_expectations_from_obligations<'a,'tcx>( - fcx: &FnCtxt<'a,'tcx>, - expected_vid: ty::TyVid) - -> (Option>, Option) -{ - let fulfillment_cx = fcx.inh.fulfillment_cx.borrow(); - // Here `expected_ty` is known to be a type inference variable. - - let expected_sig = - fulfillment_cx - .pending_obligations() - .iter() - .map(|obligation| &obligation.obligation) - .filter_map(|obligation| { - debug!("deduce_expectations_from_obligations: obligation.predicate={:?}", - obligation.predicate); - - match obligation.predicate { - // Given a Projection predicate, we can potentially infer - // the complete signature. - ty::Predicate::Projection(ref proj_predicate) => { - let trait_ref = proj_predicate.to_poly_trait_ref(); - self_type_matches_expected_vid(fcx, trait_ref, expected_vid) - .and_then(|_| deduce_sig_from_projection(fcx, proj_predicate)) - } - _ => { - None - } + fn deduce_expectations_from_expected_type(&self, expected_ty: Ty<'tcx>) + -> (Option>,Option) + { + debug!("deduce_expectations_from_expected_type(expected_ty={:?})", + expected_ty); + + match expected_ty.sty { + ty::TyTrait(ref object_type) => { + let proj_bounds = object_type.projection_bounds_with_self_ty(self.tcx, + self.tcx.types.err); + let sig = proj_bounds.iter() + .filter_map(|pb| self.deduce_sig_from_projection(pb)) + .next(); + let kind = self.tcx.lang_items.fn_trait_kind(object_type.principal_def_id()); + (sig, kind) } - }) - .next(); - - // Even if we can't infer the full signature, we may be able to - // infer the kind. This can occur if there is a trait-reference - // like `F : Fn`. Note that due to subtyping we could encounter - // many viable options, so pick the most restrictive. - let expected_kind = - fulfillment_cx - .pending_obligations() - .iter() - .map(|obligation| &obligation.obligation) - .filter_map(|obligation| { - let opt_trait_ref = match obligation.predicate { - ty::Predicate::Projection(ref data) => Some(data.to_poly_trait_ref()), - ty::Predicate::Trait(ref data) => Some(data.to_poly_trait_ref()), - ty::Predicate::Equate(..) => None, - ty::Predicate::RegionOutlives(..) => None, - ty::Predicate::TypeOutlives(..) => None, - ty::Predicate::WellFormed(..) => None, - ty::Predicate::ObjectSafe(..) => None, - }; - opt_trait_ref - .and_then(|trait_ref| self_type_matches_expected_vid(fcx, trait_ref, expected_vid)) - .and_then(|trait_ref| fcx.tcx().lang_items.fn_trait_kind(trait_ref.def_id())) - }) - .fold(None, pick_most_restrictive_closure_kind); - - (expected_sig, expected_kind) -} + ty::TyInfer(ty::TyVar(vid)) => { + self.deduce_expectations_from_obligations(vid) + } + _ => { + (None, None) + } + } + } -fn pick_most_restrictive_closure_kind(best: Option, - cur: ty::ClosureKind) - -> Option -{ - match best { - None => Some(cur), - Some(best) => Some(cmp::min(best, cur)) + fn deduce_expectations_from_obligations(&self, expected_vid: ty::TyVid) + -> (Option>, Option) + { + let fulfillment_cx = self.fulfillment_cx.borrow(); + // Here `expected_ty` is known to be a type inference variable. + + let expected_sig = + fulfillment_cx + .pending_obligations() + .iter() + .map(|obligation| &obligation.obligation) + .filter_map(|obligation| { + debug!("deduce_expectations_from_obligations: obligation.predicate={:?}", + obligation.predicate); + + match obligation.predicate { + // Given a Projection predicate, we can potentially infer + // the complete signature. + ty::Predicate::Projection(ref proj_predicate) => { + let trait_ref = proj_predicate.to_poly_trait_ref(); + self.self_type_matches_expected_vid(trait_ref, expected_vid) + .and_then(|_| self.deduce_sig_from_projection(proj_predicate)) + } + _ => { + None + } + } + }) + .next(); + + // Even if we can't infer the full signature, we may be able to + // infer the kind. This can occur if there is a trait-reference + // like `F : Fn`. Note that due to subtyping we could encounter + // many viable options, so pick the most restrictive. + let expected_kind = + fulfillment_cx + .pending_obligations() + .iter() + .map(|obligation| &obligation.obligation) + .filter_map(|obligation| { + let opt_trait_ref = match obligation.predicate { + ty::Predicate::Projection(ref data) => Some(data.to_poly_trait_ref()), + ty::Predicate::Trait(ref data) => Some(data.to_poly_trait_ref()), + ty::Predicate::Equate(..) => None, + ty::Predicate::RegionOutlives(..) => None, + ty::Predicate::TypeOutlives(..) => None, + ty::Predicate::WellFormed(..) => None, + ty::Predicate::ObjectSafe(..) => None, + ty::Predicate::Rfc1592(..) => None, + + // NB: This predicate is created by breaking down a + // `ClosureType: FnFoo()` predicate, where + // `ClosureType` represents some `TyClosure`. It can't + // possibly be referring to the current closure, + // because we haven't produced the `TyClosure` for + // this closure yet; this is exactly why the other + // code is looking for a self type of a unresolved + // inference variable. + ty::Predicate::ClosureKind(..) => None, + }; + opt_trait_ref + .and_then(|tr| self.self_type_matches_expected_vid(tr, expected_vid)) + .and_then(|tr| self.tcx.lang_items.fn_trait_kind(tr.def_id())) + }) + .fold(None, |best, cur| Some(best.map_or(cur, |best| cmp::min(best, cur)))); + + (expected_sig, expected_kind) } -} -/// Given a projection like "::Result == Y", we can deduce -/// everything we need to know about a closure. -fn deduce_sig_from_projection<'a,'tcx>( - fcx: &FnCtxt<'a,'tcx>, - projection: &ty::PolyProjectionPredicate<'tcx>) - -> Option> -{ - let tcx = fcx.tcx(); + /// Given a projection like "::Result == Y", we can deduce + /// everything we need to know about a closure. + fn deduce_sig_from_projection(&self, + projection: &ty::PolyProjectionPredicate<'tcx>) + -> Option> + { + let tcx = self.tcx; - debug!("deduce_sig_from_projection({:?})", - projection); + debug!("deduce_sig_from_projection({:?})", + projection); - let trait_ref = projection.to_poly_trait_ref(); + let trait_ref = projection.to_poly_trait_ref(); - if tcx.lang_items.fn_trait_kind(trait_ref.def_id()).is_none() { - return None; - } + if tcx.lang_items.fn_trait_kind(trait_ref.def_id()).is_none() { + return None; + } - let arg_param_ty = *trait_ref.substs().types.get(subst::TypeSpace, 0); - let arg_param_ty = fcx.infcx().resolve_type_vars_if_possible(&arg_param_ty); - debug!("deduce_sig_from_projection: arg_param_ty {:?}", arg_param_ty); + let arg_param_ty = *trait_ref.substs().types.get(subst::TypeSpace, 0); + let arg_param_ty = self.resolve_type_vars_if_possible(&arg_param_ty); + debug!("deduce_sig_from_projection: arg_param_ty {:?}", arg_param_ty); - let input_tys = match arg_param_ty.sty { - ty::TyTuple(ref tys) => { (*tys).clone() } - _ => { return None; } - }; - debug!("deduce_sig_from_projection: input_tys {:?}", input_tys); + let input_tys = match arg_param_ty.sty { + ty::TyTuple(tys) => tys.to_vec(), + _ => { return None; } + }; + debug!("deduce_sig_from_projection: input_tys {:?}", input_tys); - let ret_param_ty = projection.0.ty; - let ret_param_ty = fcx.infcx().resolve_type_vars_if_possible(&ret_param_ty); - debug!("deduce_sig_from_projection: ret_param_ty {:?}", ret_param_ty); + let ret_param_ty = projection.0.ty; + let ret_param_ty = self.resolve_type_vars_if_possible(&ret_param_ty); + debug!("deduce_sig_from_projection: ret_param_ty {:?}", ret_param_ty); - let fn_sig = ty::FnSig { - inputs: input_tys, - output: ty::FnConverging(ret_param_ty), - variadic: false - }; - debug!("deduce_sig_from_projection: fn_sig {:?}", fn_sig); + let fn_sig = ty::FnSig { + inputs: input_tys, + output: ty::FnConverging(ret_param_ty), + variadic: false + }; + debug!("deduce_sig_from_projection: fn_sig {:?}", fn_sig); - Some(fn_sig) -} + Some(fn_sig) + } -fn self_type_matches_expected_vid<'a,'tcx>( - fcx: &FnCtxt<'a,'tcx>, - trait_ref: ty::PolyTraitRef<'tcx>, - expected_vid: ty::TyVid) - -> Option> -{ - let self_ty = fcx.infcx().shallow_resolve(trait_ref.self_ty()); - debug!("self_type_matches_expected_vid(trait_ref={:?}, self_ty={:?})", - trait_ref, - self_ty); - match self_ty.sty { - ty::TyInfer(ty::TyVar(v)) if expected_vid == v => Some(trait_ref), - _ => None, + fn self_type_matches_expected_vid(&self, + trait_ref: ty::PolyTraitRef<'tcx>, + expected_vid: ty::TyVid) + -> Option> + { + let self_ty = self.shallow_resolve(trait_ref.self_ty()); + debug!("self_type_matches_expected_vid(trait_ref={:?}, self_ty={:?})", + trait_ref, + self_ty); + match self_ty.sty { + ty::TyInfer(ty::TyVar(v)) if expected_vid == v => Some(trait_ref), + _ => None, + } } } diff --git a/src/librustc_typeck/check/coercion.rs b/src/librustc_typeck/check/coercion.rs index f1c6868efd..4861ab15e2 100644 --- a/src/librustc_typeck/check/coercion.rs +++ b/src/librustc_typeck/check/coercion.rs @@ -60,31 +60,38 @@ //! sort of a minor point so I've opted to leave it for later---after all //! we may want to adjust precisely when coercions occur. -use check::{autoderef, FnCtxt, UnresolvedTypeAction}; +use check::{FnCtxt, UnresolvedTypeAction}; +use rustc::hir; use rustc::infer::{Coercion, InferOk, TypeOrigin, TypeTrace}; use rustc::traits::{self, ObligationCause}; -use rustc::traits::{predicate_for_trait_def, report_selection_error}; use rustc::ty::adjustment::{AutoAdjustment, AutoDerefRef, AdjustDerefRef}; use rustc::ty::adjustment::{AutoPtr, AutoUnsafe, AdjustReifyFnPointer}; use rustc::ty::adjustment::{AdjustUnsafeFnPointer, AdjustMutToConstPointer}; -use rustc::ty::{self, LvaluePreference, TypeAndMut, Ty, TyCtxt}; +use rustc::ty::{self, LvaluePreference, TypeAndMut, Ty}; use rustc::ty::fold::TypeFoldable; use rustc::ty::error::TypeError; -use rustc::ty::relate::{RelateResult, TypeRelation}; +use rustc::ty::relate::RelateResult; use util::common::indent; use std::cell::RefCell; use std::collections::VecDeque; -use rustc::hir; +use std::ops::Deref; -struct Coerce<'a, 'tcx: 'a> { - fcx: &'a FnCtxt<'a, 'tcx>, +struct Coerce<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, origin: TypeOrigin, use_lub: bool, unsizing_obligations: RefCell>>, } +impl<'a, 'gcx, 'tcx> Deref for Coerce<'a, 'gcx, 'tcx> { + type Target = FnCtxt<'a, 'gcx, 'tcx>; + fn deref(&self) -> &Self::Target { + &self.fcx + } +} + type CoerceResult<'tcx> = RelateResult<'tcx, (Ty<'tcx>, AutoAdjustment<'tcx>)>; fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability, @@ -98,8 +105,8 @@ fn coerce_mutbls<'tcx>(from_mutbl: hir::Mutability, } } -impl<'f, 'tcx> Coerce<'f, 'tcx> { - fn new(fcx: &'f FnCtxt<'f, 'tcx>, origin: TypeOrigin) -> Self { +impl<'f, 'gcx, 'tcx> Coerce<'f, 'gcx, 'tcx> { + fn new(fcx: &'f FnCtxt<'f, 'gcx, 'tcx>, origin: TypeOrigin) -> Self { Coerce { fcx: fcx, origin: origin, @@ -108,23 +115,18 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } } - fn tcx(&self) -> &TyCtxt<'tcx> { - self.fcx.tcx() - } - fn unify(&self, a: Ty<'tcx>, b: Ty<'tcx>) -> RelateResult<'tcx, Ty<'tcx>> { - let infcx = self.fcx.infcx(); - infcx.commit_if_ok(|_| { + self.commit_if_ok(|_| { let trace = TypeTrace::types(self.origin, false, a, b); if self.use_lub { - infcx.lub(false, trace, &a, &b) + self.lub(false, trace, &a, &b) .map(|InferOk { value, obligations }| { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()); value }) } else { - infcx.sub(false, trace, &a, &b) + self.sub(false, trace, &a, &b) .map(|InferOk { value, obligations }| { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()); @@ -157,7 +159,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { where E: Fn() -> I, I: IntoIterator { - let a = self.fcx.infcx().shallow_resolve(a); + let a = self.shallow_resolve(a); debug!("Coerce.tys({:?} => {:?})", a, b); // Just ignore error types. @@ -241,10 +243,10 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { let lvalue_pref = LvaluePreference::from_mutbl(mt_b.mutbl); let mut first_error = None; let mut r_borrow_var = None; - let (_, autoderefs, success) = autoderef(self.fcx, span, a, exprs, - UnresolvedTypeAction::Ignore, - lvalue_pref, - |referent_ty, autoderef| + let (_, autoderefs, success) = self.autoderef(span, a, exprs, + UnresolvedTypeAction::Ignore, + lvalue_pref, + |referent_ty, autoderef| { if autoderef == 0 { // Don't let this pass, otherwise it would cause @@ -329,12 +331,12 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } else { if r_borrow_var.is_none() { // create var lazilly, at most once let coercion = Coercion(span); - let r = self.fcx.infcx().next_region_var(coercion); - r_borrow_var = Some(self.tcx().mk_region(r)); // [4] above + let r = self.next_region_var(coercion); + r_borrow_var = Some(self.tcx.mk_region(r)); // [4] above } r_borrow_var.unwrap() }; - let derefd_ty_a = self.tcx().mk_ref(r, TypeAndMut { + let derefd_ty_a = self.tcx.mk_ref(r, TypeAndMut { ty: referent_ty, mutbl: mt_b.mutbl // [1] above }); @@ -406,8 +408,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { source, target); - let traits = (self.tcx().lang_items.unsize_trait(), - self.tcx().lang_items.coerce_unsized_trait()); + let traits = (self.tcx.lang_items.unsize_trait(), + self.tcx.lang_items.coerce_unsized_trait()); let (unsize_did, coerce_unsized_did) = if let (Some(u), Some(cu)) = traits { (u, cu) } else { @@ -426,8 +428,8 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { coerce_mutbls(mt_a.mutbl, mt_b.mutbl)?; let coercion = Coercion(self.origin.span()); - let r_borrow = self.fcx.infcx().next_region_var(coercion); - let region = self.tcx().mk_region(r_borrow); + let r_borrow = self.next_region_var(coercion); + let region = self.tcx.mk_region(r_borrow); (mt_a.ty, Some(AutoPtr(region, mt_b.mutbl))) } (&ty::TyRef(_, mt_a), &ty::TyRawPtr(mt_b)) => { @@ -436,22 +438,21 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } _ => (source, None) }; - let source = source.adjust_for_autoref(self.tcx(), reborrow); + let source = source.adjust_for_autoref(self.tcx, reborrow); - let mut selcx = traits::SelectionContext::new(self.fcx.infcx()); + let mut selcx = traits::SelectionContext::new(self); // Use a FIFO queue for this custom fulfillment procedure. let mut queue = VecDeque::new(); let mut leftover_predicates = vec![]; // Create an obligation for `Source: CoerceUnsized`. - let cause = ObligationCause::misc(self.origin.span(), self.fcx.body_id); - queue.push_back(predicate_for_trait_def(self.tcx(), - cause, - coerce_unsized_did, - 0, - source, - vec![target])); + let cause = ObligationCause::misc(self.origin.span(), self.body_id); + queue.push_back(self.tcx.predicate_for_trait_def(cause, + coerce_unsized_did, + 0, + source, + vec![target])); // Keep resolving `CoerceUnsized` and `Unsize` predicates to avoid // emitting a coercion in cases like `Foo<$1>` -> `Foo<$2>`, where @@ -477,7 +478,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { // Object safety violations or miscellaneous. Err(err) => { - report_selection_error(self.fcx.infcx(), &obligation, &err); + self.report_selection_error(&obligation, &err, None); // Treat this like an obligation and follow through // with the unsizing - the lack of a coercion should // be silent, as it causes a type mismatch later. @@ -513,13 +514,13 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { * into a closure or a `proc`. */ - let b = self.fcx.infcx().shallow_resolve(b); + let b = self.shallow_resolve(b); debug!("coerce_from_fn_pointer(a={:?}, b={:?})", a, b); if let ty::TyFnPtr(fn_ty_b) = b.sty { match (fn_ty_a.unsafety, fn_ty_b.unsafety) { (hir::Unsafety::Normal, hir::Unsafety::Unsafe) => { - let unsafe_a = self.tcx().safe_to_unsafe_fn_ty(fn_ty_a); + let unsafe_a = self.tcx.safe_to_unsafe_fn_ty(fn_ty_a); return self.unify_and_identity(unsafe_a, b).map(|(ty, _)| { (ty, AdjustUnsafeFnPointer) }); @@ -540,12 +541,12 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { * into a closure or a `proc`. */ - let b = self.fcx.infcx().shallow_resolve(b); + let b = self.shallow_resolve(b); debug!("coerce_from_fn_item(a={:?}, b={:?})", a, b); match b.sty { ty::TyFnPtr(_) => { - let a_fn_pointer = self.tcx().mk_ty(ty::TyFnPtr(fn_ty_a)); + let a_fn_pointer = self.tcx.mk_fn_ptr(fn_ty_a); self.unify_and_identity(a_fn_pointer, b).map(|(ty, _)| { (ty, AdjustReifyFnPointer) }) @@ -572,7 +573,7 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { }; // Check that the types which they point at are compatible. - let a_unsafe = self.tcx().mk_ptr(ty::TypeAndMut{ mutbl: mutbl_b, ty: mt_a.ty }); + let a_unsafe = self.tcx.mk_ptr(ty::TypeAndMut{ mutbl: mutbl_b, ty: mt_a.ty }); let (ty, noop) = self.unify_and_identity(a_unsafe, b)?; coerce_mutbls(mt_a.mutbl, mutbl_b)?; @@ -593,11 +594,11 @@ impl<'f, 'tcx> Coerce<'f, 'tcx> { } } -fn apply<'a, 'b, 'tcx, E, I>(coerce: &mut Coerce<'a, 'tcx>, - exprs: &E, - a: Ty<'tcx>, - b: Ty<'tcx>) - -> CoerceResult<'tcx> +fn apply<'a, 'b, 'gcx, 'tcx, E, I>(coerce: &mut Coerce<'a, 'gcx, 'tcx>, + exprs: &E, + a: Ty<'tcx>, + b: Ty<'tcx>) + -> CoerceResult<'tcx> where E: Fn() -> I, I: IntoIterator { @@ -616,166 +617,168 @@ fn apply<'a, 'b, 'tcx, E, I>(coerce: &mut Coerce<'a, 'tcx>, Ok((ty, adjustment)) } -/// Attempt to coerce an expression to a type, and return the -/// adjusted type of the expression, if successful. -/// Adjustments are only recorded if the coercion succeeded. -/// The expressions *must not* have any pre-existing adjustments. -pub fn try<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &hir::Expr, - target: Ty<'tcx>) - -> RelateResult<'tcx, Ty<'tcx>> { - let source = fcx.resolve_type_vars_if_possible(fcx.expr_ty(expr)); - debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target); - - let mut coerce = Coerce::new(fcx, TypeOrigin::ExprAssignable(expr.span)); - fcx.infcx().commit_if_ok(|_| { - let (ty, adjustment) = - apply(&mut coerce, &|| Some(expr), source, target)?; - if !adjustment.is_identity() { - debug!("Success, coerced with {:?}", adjustment); - assert!(!fcx.inh.tables.borrow().adjustments.contains_key(&expr.id)); - fcx.write_adjustment(expr.id, adjustment); - } - Ok(ty) - }) -} +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + /// Attempt to coerce an expression to a type, and return the + /// adjusted type of the expression, if successful. + /// Adjustments are only recorded if the coercion succeeded. + /// The expressions *must not* have any pre-existing adjustments. + pub fn try_coerce(&self, + expr: &hir::Expr, + target: Ty<'tcx>) + -> RelateResult<'tcx, Ty<'tcx>> { + let source = self.resolve_type_vars_with_obligations(self.expr_ty(expr)); + debug!("coercion::try({:?}: {:?} -> {:?})", expr, source, target); + + let mut coerce = Coerce::new(self, TypeOrigin::ExprAssignable(expr.span)); + self.commit_if_ok(|_| { + let (ty, adjustment) = + apply(&mut coerce, &|| Some(expr), source, target)?; + if !adjustment.is_identity() { + debug!("Success, coerced with {:?}", adjustment); + assert!(!self.tables.borrow().adjustments.contains_key(&expr.id)); + self.write_adjustment(expr.id, adjustment); + } + Ok(ty) + }) + } -/// Given some expressions, their known unified type and another expression, -/// tries to unify the types, potentially inserting coercions on any of the -/// provided expressions and returns their LUB (aka "common supertype"). -pub fn try_find_lub<'a, 'b, 'tcx, E, I>(fcx: &FnCtxt<'a, 'tcx>, - origin: TypeOrigin, - exprs: E, - prev_ty: Ty<'tcx>, - new: &'b hir::Expr) - -> RelateResult<'tcx, Ty<'tcx>> - // FIXME(eddyb) use copyable iterators when that becomes ergonomic. - where E: Fn() -> I, - I: IntoIterator { + /// Given some expressions, their known unified type and another expression, + /// tries to unify the types, potentially inserting coercions on any of the + /// provided expressions and returns their LUB (aka "common supertype"). + pub fn try_find_coercion_lub<'b, E, I>(&self, + origin: TypeOrigin, + exprs: E, + prev_ty: Ty<'tcx>, + new: &'b hir::Expr) + -> RelateResult<'tcx, Ty<'tcx>> + // FIXME(eddyb) use copyable iterators when that becomes ergonomic. + where E: Fn() -> I, + I: IntoIterator { - let prev_ty = fcx.resolve_type_vars_if_possible(prev_ty); - let new_ty = fcx.resolve_type_vars_if_possible(fcx.expr_ty(new)); - debug!("coercion::try_find_lub({:?}, {:?})", prev_ty, new_ty); - - let trace = TypeTrace::types(origin, true, prev_ty, new_ty); - - // Special-case that coercion alone cannot handle: - // Two function item types of differing IDs or Substs. - match (&prev_ty.sty, &new_ty.sty) { - (&ty::TyFnDef(a_def_id, a_substs, a_fty), - &ty::TyFnDef(b_def_id, b_substs, b_fty)) => { - // The signature must always match. - let fty = fcx.infcx().lub(true, trace.clone(), a_fty, b_fty) - .map(|InferOk { value, obligations }| { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - value - })?; - - if a_def_id == b_def_id { - // Same function, maybe the parameters match. - let substs = fcx.infcx().commit_if_ok(|_| { - fcx.infcx().lub(true, trace.clone(), a_substs, b_substs) - .map(|InferOk { value, obligations }| { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - value - }) - }).map(|s| fcx.tcx().mk_substs(s)); + let prev_ty = self.resolve_type_vars_with_obligations(prev_ty); + let new_ty = self.resolve_type_vars_with_obligations(self.expr_ty(new)); + debug!("coercion::try_find_lub({:?}, {:?})", prev_ty, new_ty); - if let Ok(substs) = substs { - // We have a LUB of prev_ty and new_ty, just return it. - return Ok(fcx.tcx().mk_fn_def(a_def_id, substs, fty)); - } - } + let trace = TypeTrace::types(origin, true, prev_ty, new_ty); - // Reify both sides and return the reified fn pointer type. - for expr in exprs().into_iter().chain(Some(new)) { - // No adjustments can produce a fn item, so this should never trip. - assert!(!fcx.inh.tables.borrow().adjustments.contains_key(&expr.id)); - fcx.write_adjustment(expr.id, AdjustReifyFnPointer); - } - return Ok(fcx.tcx().mk_fn_ptr(fty)); - } - _ => {} - } + // Special-case that coercion alone cannot handle: + // Two function item types of differing IDs or Substs. + match (&prev_ty.sty, &new_ty.sty) { + (&ty::TyFnDef(a_def_id, a_substs, a_fty), + &ty::TyFnDef(b_def_id, b_substs, b_fty)) => { + // The signature must always match. + let fty = self.lub(true, trace.clone(), &a_fty, &b_fty) + .map(|InferOk { value, obligations }| { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + value + })?; + + if a_def_id == b_def_id { + // Same function, maybe the parameters match. + let substs = self.commit_if_ok(|_| { + self.lub(true, trace.clone(), &a_substs, &b_substs) + .map(|InferOk { value, obligations }| { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + value + }) + }); - let mut coerce = Coerce::new(fcx, origin); - coerce.use_lub = true; + if let Ok(substs) = substs { + // We have a LUB of prev_ty and new_ty, just return it. + return Ok(self.tcx.mk_fn_def(a_def_id, substs, fty)); + } + } - // First try to coerce the new expression to the type of the previous ones, - // but only if the new expression has no coercion already applied to it. - let mut first_error = None; - if !fcx.inh.tables.borrow().adjustments.contains_key(&new.id) { - let result = fcx.infcx().commit_if_ok(|_| { - apply(&mut coerce, &|| Some(new), new_ty, prev_ty) - }); - match result { - Ok((ty, adjustment)) => { - if !adjustment.is_identity() { - fcx.write_adjustment(new.id, adjustment); + // Reify both sides and return the reified fn pointer type. + for expr in exprs().into_iter().chain(Some(new)) { + // No adjustments can produce a fn item, so this should never trip. + assert!(!self.tables.borrow().adjustments.contains_key(&expr.id)); + self.write_adjustment(expr.id, AdjustReifyFnPointer); } - return Ok(ty); + return Ok(self.tcx.mk_fn_ptr(fty)); } - Err(e) => first_error = Some(e) + _ => {} } - } - // Then try to coerce the previous expressions to the type of the new one. - // This requires ensuring there are no coercions applied to *any* of the - // previous expressions, other than noop reborrows (ignoring lifetimes). - for expr in exprs() { - let noop = match fcx.inh.tables.borrow().adjustments.get(&expr.id) { - Some(&AdjustDerefRef(AutoDerefRef { - autoderefs: 1, - autoref: Some(AutoPtr(_, mutbl_adj)), - unsize: None - })) => match fcx.expr_ty(expr).sty { - ty::TyRef(_, mt_orig) => { - // Reborrow that we can safely ignore. - mutbl_adj == mt_orig.mutbl - } - _ => false - }, - Some(_) => false, - None => true - }; + let mut coerce = Coerce::new(self, origin); + coerce.use_lub = true; - if !noop { - return fcx.infcx().commit_if_ok(|_| { - fcx.infcx().lub(true, trace.clone(), &prev_ty, &new_ty) - .map(|InferOk { value, obligations }| { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - value - }) + // First try to coerce the new expression to the type of the previous ones, + // but only if the new expression has no coercion already applied to it. + let mut first_error = None; + if !self.tables.borrow().adjustments.contains_key(&new.id) { + let result = self.commit_if_ok(|_| { + apply(&mut coerce, &|| Some(new), new_ty, prev_ty) }); + match result { + Ok((ty, adjustment)) => { + if !adjustment.is_identity() { + self.write_adjustment(new.id, adjustment); + } + return Ok(ty); + } + Err(e) => first_error = Some(e) + } } - } - match fcx.infcx().commit_if_ok(|_| apply(&mut coerce, &exprs, prev_ty, new_ty)) { - Err(_) => { - // Avoid giving strange errors on failed attempts. - if let Some(e) = first_error { - Err(e) - } else { - fcx.infcx().commit_if_ok(|_| { - fcx.infcx().lub(true, trace, &prev_ty, &new_ty) + // Then try to coerce the previous expressions to the type of the new one. + // This requires ensuring there are no coercions applied to *any* of the + // previous expressions, other than noop reborrows (ignoring lifetimes). + for expr in exprs() { + let noop = match self.tables.borrow().adjustments.get(&expr.id) { + Some(&AdjustDerefRef(AutoDerefRef { + autoderefs: 1, + autoref: Some(AutoPtr(_, mutbl_adj)), + unsize: None + })) => match self.expr_ty(expr).sty { + ty::TyRef(_, mt_orig) => { + // Reborrow that we can safely ignore. + mutbl_adj == mt_orig.mutbl + } + _ => false + }, + Some(_) => false, + None => true + }; + + if !noop { + return self.commit_if_ok(|_| { + self.lub(true, trace.clone(), &prev_ty, &new_ty) .map(|InferOk { value, obligations }| { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()); value }) - }) + }); } } - Ok((ty, adjustment)) => { - if !adjustment.is_identity() { - for expr in exprs() { - fcx.write_adjustment(expr.id, adjustment); + + match self.commit_if_ok(|_| apply(&mut coerce, &exprs, prev_ty, new_ty)) { + Err(_) => { + // Avoid giving strange errors on failed attempts. + if let Some(e) = first_error { + Err(e) + } else { + self.commit_if_ok(|_| { + self.lub(true, trace, &prev_ty, &new_ty) + .map(|InferOk { value, obligations }| { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + value + }) + }) } } - Ok(ty) + Ok((ty, adjustment)) => { + if !adjustment.is_identity() { + for expr in exprs() { + self.write_adjustment(expr.id, adjustment); + } + } + Ok(ty) + } } } } diff --git a/src/librustc_typeck/check/compare_method.rs b/src/librustc_typeck/check/compare_method.rs index 3c12ab8d59..a1a6a83d34 100644 --- a/src/librustc_typeck/check/compare_method.rs +++ b/src/librustc_typeck/check/compare_method.rs @@ -10,13 +10,14 @@ use middle::free_region::FreeRegionMap; use rustc::infer::{self, InferOk, TypeOrigin}; -use rustc::ty::{self, TyCtxt}; +use rustc::ty; use rustc::traits::{self, ProjectionMode}; use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace}; use syntax::ast; use syntax::codemap::Span; +use CrateCtxt; use super::assoc; /// Checks that a method from an impl conforms to the signature of @@ -30,20 +31,19 @@ use super::assoc; /// - trait_m: the method in the trait /// - impl_trait_ref: the TraitRef corresponding to the trait implementation -pub fn compare_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, - impl_m: &ty::Method<'tcx>, - impl_m_span: Span, - impl_m_body_id: ast::NodeId, - trait_m: &ty::Method<'tcx>, - impl_trait_ref: &ty::TraitRef<'tcx>) { +pub fn compare_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + impl_m: &ty::Method<'tcx>, + impl_m_span: Span, + impl_m_body_id: ast::NodeId, + trait_m: &ty::Method<'tcx>, + impl_trait_ref: &ty::TraitRef<'tcx>) { debug!("compare_impl_method(impl_trait_ref={:?})", impl_trait_ref); debug!("compare_impl_method: impl_trait_ref (liberated) = {:?}", impl_trait_ref); - let mut infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, ProjectionMode::AnyFinal); - let mut fulfillment_cx = traits::FulfillmentContext::new(); + let tcx = ccx.tcx; let trait_to_impl_substs = &impl_trait_ref.substs; @@ -178,7 +178,7 @@ pub fn compare_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, // Create mapping from trait to skolemized. let trait_to_skol_substs = trait_to_impl_substs - .subst(tcx, impl_to_skol_substs) + .subst(tcx, impl_to_skol_substs).clone() .with_method(impl_to_skol_substs.types.get_slice(subst::FnSpace).to_vec(), impl_to_skol_substs.regions.get_slice(subst::FnSpace).to_vec()); debug!("compare_impl_method: trait_to_skol_substs={:?}", @@ -186,7 +186,7 @@ pub fn compare_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, // Check region bounds. FIXME(@jroesch) refactor this away when removing // ParamBounds. - if !check_region_bounds_on_impl_method(tcx, + if !check_region_bounds_on_impl_method(ccx, impl_m_span, impl_m, &trait_m.generics, @@ -196,182 +196,187 @@ pub fn compare_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, return; } - // Create obligations for each predicate declared by the impl - // definition in the context of the trait's parameter - // environment. We can't just use `impl_env.caller_bounds`, - // however, because we want to replace all late-bound regions with - // region variables. - let impl_bounds = - impl_m.predicates.instantiate(tcx, impl_to_skol_substs); - - let (impl_bounds, _) = - infcx.replace_late_bound_regions_with_fresh_var( - impl_m_span, - infer::HigherRankedType, - &ty::Binder(impl_bounds)); - debug!("compare_impl_method: impl_bounds={:?}", - impl_bounds); - - // Normalize the associated types in the trait_bounds. - let trait_bounds = trait_m.predicates.instantiate(tcx, &trait_to_skol_substs); - - // Obtain the predicate split predicate sets for each. - let trait_pred = trait_bounds.predicates.split(); - let impl_pred = impl_bounds.predicates.split(); - - // This is the only tricky bit of the new way we check implementation methods - // We need to build a set of predicates where only the FnSpace bounds - // are from the trait and we assume all other bounds from the implementation - // to be previously satisfied. - // - // We then register the obligations from the impl_m and check to see - // if all constraints hold. - let hybrid_preds = VecPerParamSpace::new( - impl_pred.types, - impl_pred.selfs, - trait_pred.fns - ); - - // Construct trait parameter environment and then shift it into the skolemized viewpoint. - // The key step here is to update the caller_bounds's predicates to be - // the new hybrid bounds we computed. - let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_body_id); - let trait_param_env = impl_param_env.with_caller_bounds(hybrid_preds.into_vec()); - let trait_param_env = traits::normalize_param_env_or_error(trait_param_env, - normalize_cause.clone()); - // FIXME(@jroesch) this seems ugly, but is a temporary change - infcx.parameter_environment = trait_param_env; - - debug!("compare_impl_method: trait_bounds={:?}", - infcx.parameter_environment.caller_bounds); - - let mut selcx = traits::SelectionContext::new(&infcx); - - for predicate in impl_pred.fns { - let traits::Normalized { value: predicate, .. } = - traits::normalize(&mut selcx, normalize_cause.clone(), &predicate); - - let cause = traits::ObligationCause { - span: impl_m_span, - body_id: impl_m_body_id, - code: traits::ObligationCauseCode::CompareImplMethodObligation - }; - - fulfillment_cx.register_predicate_obligation( - &infcx, - traits::Obligation::new(cause, predicate)); - } + tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|mut infcx| { + let mut fulfillment_cx = traits::FulfillmentContext::new(); + + // Normalize the associated types in the trait_bounds. + let trait_bounds = trait_m.predicates.instantiate(tcx, &trait_to_skol_substs); + + // Create obligations for each predicate declared by the impl + // definition in the context of the trait's parameter + // environment. We can't just use `impl_env.caller_bounds`, + // however, because we want to replace all late-bound regions with + // region variables. + let impl_bounds = + impl_m.predicates.instantiate(tcx, impl_to_skol_substs); + + debug!("compare_impl_method: impl_bounds={:?}", impl_bounds); + + // Obtain the predicate split predicate sets for each. + let trait_pred = trait_bounds.predicates.split(); + let impl_pred = impl_bounds.predicates.split(); + + // This is the only tricky bit of the new way we check implementation methods + // We need to build a set of predicates where only the FnSpace bounds + // are from the trait and we assume all other bounds from the implementation + // to be previously satisfied. + // + // We then register the obligations from the impl_m and check to see + // if all constraints hold. + let hybrid_preds = VecPerParamSpace::new( + impl_pred.types, + impl_pred.selfs, + trait_pred.fns + ); + + // Construct trait parameter environment and then shift it into the skolemized viewpoint. + // The key step here is to update the caller_bounds's predicates to be + // the new hybrid bounds we computed. + let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_body_id); + let trait_param_env = impl_param_env.with_caller_bounds(hybrid_preds.into_vec()); + let trait_param_env = traits::normalize_param_env_or_error(tcx, + trait_param_env, + normalize_cause.clone()); + // FIXME(@jroesch) this seems ugly, but is a temporary change + infcx.parameter_environment = trait_param_env; + + debug!("compare_impl_method: trait_bounds={:?}", + infcx.parameter_environment.caller_bounds); + + let mut selcx = traits::SelectionContext::new(&infcx); + + let (impl_pred_fns, _) = + infcx.replace_late_bound_regions_with_fresh_var( + impl_m_span, + infer::HigherRankedType, + &ty::Binder(impl_pred.fns)); + for predicate in impl_pred_fns { + let traits::Normalized { value: predicate, .. } = + traits::normalize(&mut selcx, normalize_cause.clone(), &predicate); + + let cause = traits::ObligationCause { + span: impl_m_span, + body_id: impl_m_body_id, + code: traits::ObligationCauseCode::CompareImplMethodObligation + }; + + fulfillment_cx.register_predicate_obligation( + &infcx, + traits::Obligation::new(cause, predicate)); + } - // We now need to check that the signature of the impl method is - // compatible with that of the trait method. We do this by - // checking that `impl_fty <: trait_fty`. - // - // FIXME. Unfortunately, this doesn't quite work right now because - // associated type normalization is not integrated into subtype - // checks. For the comparison to be valid, we need to - // normalize the associated types in the impl/trait methods - // first. However, because function types bind regions, just - // calling `normalize_associated_types_in` would have no effect on - // any associated types appearing in the fn arguments or return - // type. - - // Compute skolemized form of impl and trait method tys. - let impl_fty = tcx.mk_fn_ptr(impl_m.fty.clone()); - let impl_fty = impl_fty.subst(tcx, impl_to_skol_substs); - let trait_fty = tcx.mk_fn_ptr(trait_m.fty.clone()); - let trait_fty = trait_fty.subst(tcx, &trait_to_skol_substs); - - let err = infcx.commit_if_ok(|snapshot| { - let origin = TypeOrigin::MethodCompatCheck(impl_m_span); - - let (impl_sig, _) = - infcx.replace_late_bound_regions_with_fresh_var(impl_m_span, - infer::HigherRankedType, - &impl_m.fty.sig); - let impl_sig = - impl_sig.subst(tcx, impl_to_skol_substs); - let impl_sig = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_m_span, - impl_m_body_id, - &impl_sig); - let impl_fty = tcx.mk_fn_ptr(ty::BareFnTy { - unsafety: impl_m.fty.unsafety, - abi: impl_m.fty.abi, - sig: ty::Binder(impl_sig) - }); - debug!("compare_impl_method: impl_fty={:?}", - impl_fty); - - let (trait_sig, skol_map) = - infcx.skolemize_late_bound_regions(&trait_m.fty.sig, snapshot); - let trait_sig = - trait_sig.subst(tcx, &trait_to_skol_substs); - let trait_sig = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_m_span, - impl_m_body_id, - &trait_sig); - let trait_fty = tcx.mk_fn_ptr(ty::BareFnTy { - unsafety: trait_m.fty.unsafety, - abi: trait_m.fty.abi, - sig: ty::Binder(trait_sig) - }); + // We now need to check that the signature of the impl method is + // compatible with that of the trait method. We do this by + // checking that `impl_fty <: trait_fty`. + // + // FIXME. Unfortunately, this doesn't quite work right now because + // associated type normalization is not integrated into subtype + // checks. For the comparison to be valid, we need to + // normalize the associated types in the impl/trait methods + // first. However, because function types bind regions, just + // calling `normalize_associated_types_in` would have no effect on + // any associated types appearing in the fn arguments or return + // type. + + // Compute skolemized form of impl and trait method tys. + let impl_fty = tcx.mk_fn_ptr(impl_m.fty); + let impl_fty = impl_fty.subst(tcx, impl_to_skol_substs); + let trait_fty = tcx.mk_fn_ptr(trait_m.fty); + let trait_fty = trait_fty.subst(tcx, &trait_to_skol_substs); + + let err = infcx.commit_if_ok(|snapshot| { + let tcx = infcx.tcx; + let origin = TypeOrigin::MethodCompatCheck(impl_m_span); + + let (impl_sig, _) = + infcx.replace_late_bound_regions_with_fresh_var(impl_m_span, + infer::HigherRankedType, + &impl_m.fty.sig); + let impl_sig = + impl_sig.subst(tcx, impl_to_skol_substs); + let impl_sig = + assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_m_span, + impl_m_body_id, + &impl_sig); + let impl_fty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { + unsafety: impl_m.fty.unsafety, + abi: impl_m.fty.abi, + sig: ty::Binder(impl_sig) + })); + debug!("compare_impl_method: impl_fty={:?}", + impl_fty); + + let (trait_sig, skol_map) = + infcx.skolemize_late_bound_regions(&trait_m.fty.sig, snapshot); + let trait_sig = + trait_sig.subst(tcx, &trait_to_skol_substs); + let trait_sig = + assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_m_span, + impl_m_body_id, + &trait_sig); + let trait_fty = tcx.mk_fn_ptr(tcx.mk_bare_fn(ty::BareFnTy { + unsafety: trait_m.fty.unsafety, + abi: trait_m.fty.abi, + sig: ty::Binder(trait_sig) + })); + + debug!("compare_impl_method: trait_fty={:?}", + trait_fty); - debug!("compare_impl_method: trait_fty={:?}", - trait_fty); + infcx.sub_types(false, origin, impl_fty, trait_fty)?; - infer::mk_subty(&infcx, false, origin, impl_fty, trait_fty)?; + infcx.leak_check(false, &skol_map, snapshot) + }); - infcx.leak_check(&skol_map, snapshot) - }); + match err { + Ok(()) => { } + Err(terr) => { + debug!("checking trait method for compatibility: impl ty {:?}, trait ty {:?}", + impl_fty, + trait_fty); + span_err!(tcx.sess, impl_m_span, E0053, + "method `{}` has an incompatible type for trait: {}", + trait_m.name, + terr); + return; + } + } - match err { - Ok(()) => { } - Err(terr) => { - debug!("checking trait method for compatibility: impl ty {:?}, trait ty {:?}", - impl_fty, - trait_fty); - span_err!(tcx.sess, impl_m_span, E0053, - "method `{}` has an incompatible type for trait: {}", - trait_m.name, - terr); - return; + // Check that all obligations are satisfied by the implementation's + // version. + match fulfillment_cx.select_all_or_error(&infcx) { + Err(ref errors) => { infcx.report_fulfillment_errors(errors) } + Ok(_) => {} } - } - // Check that all obligations are satisfied by the implementation's - // version. - match fulfillment_cx.select_all_or_error(&infcx) { - Err(ref errors) => { traits::report_fulfillment_errors(&infcx, errors) } - Ok(_) => {} - } + // Finally, resolve all regions. This catches wily misuses of + // lifetime parameters. We have to build up a plausible lifetime + // environment based on what we find in the trait. We could also + // include the obligations derived from the method argument types, + // but I don't think it's necessary -- after all, those are still + // in effect when type-checking the body, and all the + // where-clauses in the header etc should be implied by the trait + // anyway, so it shouldn't be needed there either. Anyway, we can + // always add more relations later (it's backwards compat). + let mut free_regions = FreeRegionMap::new(); + free_regions.relate_free_regions_from_predicates( + &infcx.parameter_environment.caller_bounds); + + infcx.resolve_regions_and_report_errors(&free_regions, impl_m_body_id); + }); - // Finally, resolve all regions. This catches wily misuses of - // lifetime parameters. We have to build up a plausible lifetime - // environment based on what we find in the trait. We could also - // include the obligations derived from the method argument types, - // but I don't think it's necessary -- after all, those are still - // in effect when type-checking the body, and all the - // where-clauses in the header etc should be implied by the trait - // anyway, so it shouldn't be needed there either. Anyway, we can - // always add more relations later (it's backwards compat). - let mut free_regions = FreeRegionMap::new(); - free_regions.relate_free_regions_from_predicates(tcx, - &infcx.parameter_environment.caller_bounds); - - infcx.resolve_regions_and_report_errors(&free_regions, impl_m_body_id); - - fn check_region_bounds_on_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, - span: Span, - impl_m: &ty::Method<'tcx>, - trait_generics: &ty::Generics<'tcx>, - impl_generics: &ty::Generics<'tcx>, - trait_to_skol_substs: &Substs<'tcx>, - impl_to_skol_substs: &Substs<'tcx>) - -> bool + fn check_region_bounds_on_impl_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + span: Span, + impl_m: &ty::Method<'tcx>, + trait_generics: &ty::Generics<'tcx>, + impl_generics: &ty::Generics<'tcx>, + trait_to_skol_substs: &Substs<'tcx>, + impl_to_skol_substs: &Substs<'tcx>) + -> bool { let trait_params = trait_generics.regions.get_slice(subst::FnSpace); @@ -397,7 +402,7 @@ pub fn compare_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, // are zero. Since I don't quite know how to phrase things at // the moment, give a kind of vague error message. if trait_params.len() != impl_params.len() { - span_err!(tcx.sess, span, E0195, + span_err!(ccx.tcx.sess, span, E0195, "lifetime parameters or bounds on method `{}` do \ not match the trait declaration", impl_m.name); @@ -408,78 +413,78 @@ pub fn compare_impl_method<'tcx>(tcx: &TyCtxt<'tcx>, } } -pub fn compare_const_impl<'tcx>(tcx: &TyCtxt<'tcx>, - impl_c: &ty::AssociatedConst<'tcx>, - impl_c_span: Span, - trait_c: &ty::AssociatedConst<'tcx>, - impl_trait_ref: &ty::TraitRef<'tcx>) { +pub fn compare_const_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + impl_c: &ty::AssociatedConst<'tcx>, + impl_c_span: Span, + trait_c: &ty::AssociatedConst<'tcx>, + impl_trait_ref: &ty::TraitRef<'tcx>) { debug!("compare_const_impl(impl_trait_ref={:?})", impl_trait_ref); - let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, ProjectionMode::AnyFinal); - let mut fulfillment_cx = traits::FulfillmentContext::new(); - - // The below is for the most part highly similar to the procedure - // for methods above. It is simpler in many respects, especially - // because we shouldn't really have to deal with lifetimes or - // predicates. In fact some of this should probably be put into - // shared functions because of DRY violations... - let trait_to_impl_substs = &impl_trait_ref.substs; - - // Create a parameter environment that represents the implementation's - // method. - let impl_c_node_id = tcx.map.as_local_node_id(impl_c.def_id).unwrap(); - let impl_param_env = ty::ParameterEnvironment::for_item(tcx, impl_c_node_id); - - // Create mapping from impl to skolemized. - let impl_to_skol_substs = &impl_param_env.free_substs; - - // Create mapping from trait to skolemized. - let trait_to_skol_substs = - trait_to_impl_substs - .subst(tcx, impl_to_skol_substs) - .with_method(impl_to_skol_substs.types.get_slice(subst::FnSpace).to_vec(), - impl_to_skol_substs.regions.get_slice(subst::FnSpace).to_vec()); - debug!("compare_const_impl: trait_to_skol_substs={:?}", - trait_to_skol_substs); - - // Compute skolemized form of impl and trait const tys. - let impl_ty = impl_c.ty.subst(tcx, impl_to_skol_substs); - let trait_ty = trait_c.ty.subst(tcx, &trait_to_skol_substs); - - let err = infcx.commit_if_ok(|_| { - let origin = TypeOrigin::Misc(impl_c_span); - - // There is no "body" here, so just pass dummy id. - let impl_ty = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_c_span, - 0, - &impl_ty); - - debug!("compare_const_impl: impl_ty={:?}", - impl_ty); - - let trait_ty = - assoc::normalize_associated_types_in(&infcx, - &mut fulfillment_cx, - impl_c_span, - 0, - &trait_ty); - - debug!("compare_const_impl: trait_ty={:?}", - trait_ty); - - infer::mk_subty(&infcx, false, origin, impl_ty, trait_ty) - }); + let tcx = ccx.tcx; + tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|infcx| { + let mut fulfillment_cx = traits::FulfillmentContext::new(); + + // The below is for the most part highly similar to the procedure + // for methods above. It is simpler in many respects, especially + // because we shouldn't really have to deal with lifetimes or + // predicates. In fact some of this should probably be put into + // shared functions because of DRY violations... + let trait_to_impl_substs = &impl_trait_ref.substs; + + // Create a parameter environment that represents the implementation's + // method. + let impl_c_node_id = tcx.map.as_local_node_id(impl_c.def_id).unwrap(); + let impl_param_env = ty::ParameterEnvironment::for_item(tcx, impl_c_node_id); + + // Create mapping from impl to skolemized. + let impl_to_skol_substs = &impl_param_env.free_substs; + + // Create mapping from trait to skolemized. + let trait_to_skol_substs = + trait_to_impl_substs + .subst(tcx, impl_to_skol_substs).clone() + .with_method(impl_to_skol_substs.types.get_slice(subst::FnSpace).to_vec(), + impl_to_skol_substs.regions.get_slice(subst::FnSpace).to_vec()); + debug!("compare_const_impl: trait_to_skol_substs={:?}", + trait_to_skol_substs); + + // Compute skolemized form of impl and trait const tys. + let impl_ty = impl_c.ty.subst(tcx, impl_to_skol_substs); + let trait_ty = trait_c.ty.subst(tcx, &trait_to_skol_substs); + + let err = infcx.commit_if_ok(|_| { + let origin = TypeOrigin::Misc(impl_c_span); + + // There is no "body" here, so just pass dummy id. + let impl_ty = + assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_c_span, + 0, + &impl_ty); + + debug!("compare_const_impl: impl_ty={:?}", + impl_ty); + + let trait_ty = + assoc::normalize_associated_types_in(&infcx, + &mut fulfillment_cx, + impl_c_span, + 0, + &trait_ty); + + debug!("compare_const_impl: trait_ty={:?}", + trait_ty); + + infcx.sub_types(false, origin, impl_ty, trait_ty) + .map(|InferOk { obligations, .. }| { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()) + }) + }); - match err { - Ok(InferOk { obligations, .. }) => { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()) - } - Err(terr) => { + if let Err(terr) = err { debug!("checking associated const for compatibility: impl ty {:?}, trait ty {:?}", impl_ty, trait_ty); @@ -488,7 +493,6 @@ pub fn compare_const_impl<'tcx>(tcx: &TyCtxt<'tcx>, trait: {}", trait_c.name, terr); - return; } - } + }); } diff --git a/src/librustc_typeck/check/demand.rs b/src/librustc_typeck/check/demand.rs index 5b163eb7de..7c8eb62b0e 100644 --- a/src/librustc_typeck/check/demand.rs +++ b/src/librustc_typeck/check/demand.rs @@ -9,52 +9,61 @@ // except according to those terms. -use check::{coercion, FnCtxt}; +use check::FnCtxt; use rustc::ty::Ty; use rustc::infer::{InferOk, TypeOrigin}; use syntax::codemap::Span; use rustc::hir; -// Requires that the two types unify, and prints an error message if -// they don't. -pub fn suptype<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, sp: Span, - expected: Ty<'tcx>, actual: Ty<'tcx>) { - let origin = TypeOrigin::Misc(sp); - match fcx.infcx().sub_types(false, origin, actual, expected) { - Ok(InferOk { obligations, .. }) => { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - }, - Err(e) => { - fcx.infcx().report_mismatched_types(origin, expected, actual, e); +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + // Requires that the two types unify, and prints an error message if + // they don't. + pub fn demand_suptype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) { + let origin = TypeOrigin::Misc(sp); + match self.sub_types(false, origin, actual, expected) { + Ok(InferOk { obligations, .. }) => { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + }, + Err(e) => { + self.report_mismatched_types(origin, expected, actual, e); + } } } -} -pub fn eqtype<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, sp: Span, - expected: Ty<'tcx>, actual: Ty<'tcx>) { - let origin = TypeOrigin::Misc(sp); - match fcx.infcx().eq_types(false, origin, actual, expected) { - Ok(InferOk { obligations, .. }) => { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - }, - Err(e) => { - fcx.infcx().report_mismatched_types(origin, expected, actual, e); + pub fn demand_eqtype(&self, sp: Span, expected: Ty<'tcx>, actual: Ty<'tcx>) { + let origin = TypeOrigin::Misc(sp); + match self.eq_types(false, origin, actual, expected) { + Ok(InferOk { obligations, .. }) => { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + }, + Err(e) => { + self.report_mismatched_types(origin, expected, actual, e); + } } } -} -// Checks that the type of `expr` can be coerced to `expected`. -pub fn coerce<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - sp: Span, - expected: Ty<'tcx>, - expr: &hir::Expr) { - let expected = fcx.resolve_type_vars_if_possible(expected); - if let Err(e) = coercion::try(fcx, expr, expected) { - let origin = TypeOrigin::Misc(sp); - let expr_ty = fcx.resolve_type_vars_if_possible(fcx.expr_ty(expr)); - fcx.infcx().report_mismatched_types(origin, expected, expr_ty, e); + // Checks that the type of `expr` can be coerced to `expected`. + pub fn demand_coerce(&self, expr: &hir::Expr, expected: Ty<'tcx>) { + let expected = self.resolve_type_vars_with_obligations(expected); + if let Err(e) = self.try_coerce(expr, expected) { + let origin = TypeOrigin::Misc(expr.span); + let expr_ty = self.resolve_type_vars_with_obligations(self.expr_ty(expr)); + self.report_mismatched_types(origin, expected, expr_ty, e); + } + } + + pub fn require_same_types(&self, span: Span, t1: Ty<'tcx>, t2: Ty<'tcx>, msg: &str) + -> bool { + if let Err(err) = self.eq_types(false, TypeOrigin::Misc(span), t1, t2) { + let found_ty = self.resolve_type_vars_if_possible(&t1); + let expected_ty = self.resolve_type_vars_if_possible(&t2); + ::emit_type_err(self.tcx, span, found_ty, expected_ty, &err, msg); + false + } else { + true + } } } diff --git a/src/librustc_typeck/check/dropck.rs b/src/librustc_typeck/check/dropck.rs index 368b826b1b..ae614d7b02 100644 --- a/src/librustc_typeck/check/dropck.rs +++ b/src/librustc_typeck/check/dropck.rs @@ -8,7 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use check::regionck::{self, Rcx}; +use CrateCtxt; +use check::regionck::RegionCtxt; use hir::def_id::DefId; use middle::free_region::FreeRegionMap; @@ -39,20 +40,20 @@ use syntax::codemap::{self, Span}; /// struct/enum definition for the nominal type itself (i.e. /// cannot do `struct S; impl Drop for S { ... }`). /// -pub fn check_drop_impl(tcx: &TyCtxt, drop_impl_did: DefId) -> Result<(), ()> { +pub fn check_drop_impl(ccx: &CrateCtxt, drop_impl_did: DefId) -> Result<(), ()> { let ty::TypeScheme { generics: ref dtor_generics, - ty: dtor_self_type } = tcx.lookup_item_type(drop_impl_did); - let dtor_predicates = tcx.lookup_predicates(drop_impl_did); + ty: dtor_self_type } = ccx.tcx.lookup_item_type(drop_impl_did); + let dtor_predicates = ccx.tcx.lookup_predicates(drop_impl_did); match dtor_self_type.sty { ty::TyEnum(adt_def, self_to_impl_substs) | ty::TyStruct(adt_def, self_to_impl_substs) => { - ensure_drop_params_and_item_params_correspond(tcx, + ensure_drop_params_and_item_params_correspond(ccx, drop_impl_did, dtor_generics, &dtor_self_type, adt_def.did)?; - ensure_drop_predicates_are_implied_by_item_defn(tcx, + ensure_drop_predicates_are_implied_by_item_defn(ccx, drop_impl_did, &dtor_predicates, adt_def.did, @@ -61,7 +62,7 @@ pub fn check_drop_impl(tcx: &TyCtxt, drop_impl_did: DefId) -> Result<(), ()> { _ => { // Destructors only work on nominal types. This was // already checked by coherence, so we can panic here. - let span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); + let span = ccx.tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); span_bug!(span, "should have been rejected by coherence check: {}", dtor_self_type); @@ -69,60 +70,64 @@ pub fn check_drop_impl(tcx: &TyCtxt, drop_impl_did: DefId) -> Result<(), ()> { } } -fn ensure_drop_params_and_item_params_correspond<'tcx>( - tcx: &TyCtxt<'tcx>, +fn ensure_drop_params_and_item_params_correspond<'a, 'tcx>( + ccx: &CrateCtxt<'a, 'tcx>, drop_impl_did: DefId, drop_impl_generics: &ty::Generics<'tcx>, drop_impl_ty: &ty::Ty<'tcx>, self_type_did: DefId) -> Result<(), ()> { + let tcx = ccx.tcx; let drop_impl_node_id = tcx.map.as_local_node_id(drop_impl_did).unwrap(); let self_type_node_id = tcx.map.as_local_node_id(self_type_did).unwrap(); // check that the impl type can be made to match the trait type. let impl_param_env = ty::ParameterEnvironment::for_item(tcx, self_type_node_id); - let infcx = infer::new_infer_ctxt(tcx, - &tcx.tables, - Some(impl_param_env), - ProjectionMode::AnyFinal); - let mut fulfillment_cx = traits::FulfillmentContext::new(); + tcx.infer_ctxt(None, Some(impl_param_env), ProjectionMode::AnyFinal).enter(|infcx| { + let tcx = infcx.tcx; + let mut fulfillment_cx = traits::FulfillmentContext::new(); - let named_type = tcx.lookup_item_type(self_type_did).ty; - let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs); + let named_type = tcx.lookup_item_type(self_type_did).ty; + let named_type = named_type.subst(tcx, &infcx.parameter_environment.free_substs); - let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); - let fresh_impl_substs = - infcx.fresh_substs_for_generics(drop_impl_span, drop_impl_generics); - let fresh_impl_self_ty = drop_impl_ty.subst(tcx, &fresh_impl_substs); - - if let Err(_) = infer::mk_eqty(&infcx, true, infer::TypeOrigin::Misc(drop_impl_span), - named_type, fresh_impl_self_ty) { - let item_span = tcx.map.span(self_type_node_id); - struct_span_err!(tcx.sess, drop_impl_span, E0366, - "Implementations of Drop cannot be specialized") - .span_note(item_span, - "Use same sequence of generic type and region \ - parameters that is on the struct/enum definition") - .emit(); - return Err(()); - } + let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); + let fresh_impl_substs = + infcx.fresh_substs_for_generics(drop_impl_span, drop_impl_generics); + let fresh_impl_self_ty = drop_impl_ty.subst(tcx, &fresh_impl_substs); - if let Err(ref errors) = fulfillment_cx.select_all_or_error(&infcx) { - // this could be reached when we get lazy normalization - traits::report_fulfillment_errors(&infcx, errors); - return Err(()); + if let Err(_) = infcx.eq_types(true, infer::TypeOrigin::Misc(drop_impl_span), + named_type, fresh_impl_self_ty) { + let item_span = tcx.map.span(self_type_node_id); + struct_span_err!(tcx.sess, drop_impl_span, E0366, + "Implementations of Drop cannot be specialized") + .span_note(item_span, + "Use same sequence of generic type and region \ + parameters that is on the struct/enum definition") + .emit(); + return Err(()); + } + + if let Err(ref errors) = fulfillment_cx.select_all_or_error(&infcx) { + // this could be reached when we get lazy normalization + infcx.report_fulfillment_errors(errors); + return Err(()); + } + + if let Err(ref errors) = fulfillment_cx.select_rfc1592_obligations(&infcx) { + infcx.report_fulfillment_errors_as_warnings(errors, drop_impl_node_id); } - let free_regions = FreeRegionMap::new(); - infcx.resolve_regions_and_report_errors(&free_regions, drop_impl_node_id); - Ok(()) + let free_regions = FreeRegionMap::new(); + infcx.resolve_regions_and_report_errors(&free_regions, drop_impl_node_id); + Ok(()) + }) } /// Confirms that every predicate imposed by dtor_predicates is /// implied by assuming the predicates attached to self_type_did. -fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>( - tcx: &TyCtxt<'tcx>, +fn ensure_drop_predicates_are_implied_by_item_defn<'a, 'tcx>( + ccx: &CrateCtxt<'a, 'tcx>, drop_impl_did: DefId, dtor_predicates: &ty::GenericPredicates<'tcx>, self_type_did: DefId, @@ -163,6 +168,8 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>( // absent. So we report an error that the Drop impl injected a // predicate that is not present on the struct definition. + let tcx = ccx.tcx; + let self_type_node_id = tcx.map.as_local_node_id(self_type_did).unwrap(); let drop_impl_span = tcx.map.def_id_span(drop_impl_did, codemap::DUMMY_SP); @@ -268,14 +275,16 @@ fn ensure_drop_predicates_are_implied_by_item_defn<'tcx>( /// ensuring that they do not access data nor invoke methods of /// values that have been previously dropped). /// -pub fn check_safety_of_destructor_if_necessary<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, - typ: ty::Ty<'tcx>, - span: Span, - scope: region::CodeExtent) { +pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>( + rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>, + typ: ty::Ty<'tcx>, + span: Span, + scope: region::CodeExtent) +{ debug!("check_safety_of_destructor_if_necessary typ: {:?} scope: {:?}", typ, scope); - let parent_scope = rcx.tcx().region_maps.opt_encl_scope(scope).unwrap_or_else(|| { + let parent_scope = rcx.tcx.region_maps.opt_encl_scope(scope).unwrap_or_else(|| { span_bug!(span, "no enclosing scope found for scope: {:?}", scope) }); @@ -292,7 +301,7 @@ pub fn check_safety_of_destructor_if_necessary<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx> match result { Ok(()) => {} Err(Error::Overflow(ref ctxt, ref detected_on_typ)) => { - let tcx = rcx.tcx(); + let tcx = rcx.tcx; let mut err = struct_span_err!(tcx.sess, span, E0320, "overflow while adding drop-check rules for {}", typ); match *ctxt { @@ -337,8 +346,8 @@ enum TypeContext { } } -struct DropckContext<'a, 'b: 'a, 'tcx: 'b> { - rcx: &'a mut Rcx<'b, 'tcx>, +struct DropckContext<'a, 'b: 'a, 'gcx: 'b+'tcx, 'tcx: 'b> { + rcx: &'a mut RegionCtxt<'b, 'gcx, 'tcx>, /// types that have already been traversed breadcrumbs: FnvHashSet>, /// span for error reporting @@ -348,13 +357,13 @@ struct DropckContext<'a, 'b: 'a, 'tcx: 'b> { } // `context` is used for reporting overflow errors -fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'tcx>( - cx: &mut DropckContext<'a, 'b, 'tcx>, +fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'gcx, 'tcx>( + cx: &mut DropckContext<'a, 'b, 'gcx, 'tcx>, context: TypeContext, ty: Ty<'tcx>, depth: usize) -> Result<(), Error<'tcx>> { - let tcx = cx.rcx.tcx(); + let tcx = cx.rcx.tcx; // Issue #22443: Watch out for overflow. While we are careful to // handle regular types properly, non-regular ones cause problems. let recursion_limit = tcx.sess.recursion_limit.get(); @@ -367,7 +376,7 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'tcx>( // canoncialize the regions in `ty` before inserting - infinitely many // region variables can refer to the same region. - let ty = cx.rcx.infcx().resolve_type_and_region_vars_if_possible(&ty); + let ty = cx.rcx.resolve_type_and_region_vars_if_possible(&ty); if !cx.breadcrumbs.insert(ty) { debug!("iterate_over_potentially_unsafe_regions_in_type \ @@ -410,10 +419,8 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'tcx>( (0..depth).map(|_| ' ').collect::(), ty); - regionck::type_must_outlive(cx.rcx, - infer::SubregionOrigin::SafeDestructor(cx.span), - ty, - ty::ReScope(cx.parent_scope)); + cx.rcx.type_must_outlive(infer::SubregionOrigin::SafeDestructor(cx.span), + ty, ty::ReScope(cx.parent_scope)); return Ok(()); } @@ -449,7 +456,7 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'tcx>( for variant in &def.variants { for field in variant.fields.iter() { let fty = field.ty(tcx, substs); - let fty = cx.rcx.fcx.resolve_type_vars_if_possible( + let fty = cx.rcx.fcx.resolve_type_vars_with_obligations( cx.rcx.fcx.normalize_associated_types_in(cx.span, &fty)); iterate_over_potentially_unsafe_regions_in_type( cx, @@ -465,8 +472,8 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'tcx>( Ok(()) } - ty::TyTuple(ref tys) | - ty::TyClosure(_, box ty::ClosureSubsts { upvar_tys: ref tys, .. }) => { + ty::TyTuple(tys) | + ty::TyClosure(_, ty::ClosureSubsts { upvar_tys: tys, .. }) => { for ty in tys { iterate_over_potentially_unsafe_regions_in_type(cx, context, ty, depth+1)? } @@ -496,8 +503,8 @@ fn iterate_over_potentially_unsafe_regions_in_type<'a, 'b, 'tcx>( } } -fn has_dtor_of_interest<'tcx>(tcx: &TyCtxt<'tcx>, - ty: ty::Ty<'tcx>) -> bool { +fn has_dtor_of_interest<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + ty: Ty<'tcx>) -> bool { match ty.sty { ty::TyEnum(def, _) | ty::TyStruct(def, _) => { def.is_dtorck(tcx) diff --git a/src/librustc_typeck/check/intrinsic.rs b/src/librustc_typeck/check/intrinsic.rs index eae0cfb0f2..c02139140a 100644 --- a/src/librustc_typeck/check/intrinsic.rs +++ b/src/librustc_typeck/check/intrinsic.rs @@ -11,35 +11,35 @@ //! Type-checking for the rust-intrinsic and platform-intrinsic //! intrinsics that the compiler exposes. -use astconv::AstConv; use intrinsics; use rustc::ty::subst::{self, Substs}; use rustc::ty::FnSig; -use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::fold::TypeFolder; +use rustc::ty::{self, Ty}; use {CrateCtxt, require_same_types}; use std::collections::{HashMap}; use syntax::abi::Abi; use syntax::ast; -use syntax::attr::AttrMetaMethods; use syntax::codemap::Span; use syntax::parse::token; use rustc::hir; -fn equate_intrinsic_type<'a, 'tcx>(tcx: &TyCtxt<'tcx>, it: &hir::ForeignItem, +fn equate_intrinsic_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + it: &hir::ForeignItem, n_tps: usize, abi: Abi, inputs: Vec>, output: ty::FnOutput<'tcx>) { + let tcx = ccx.tcx; let def_id = tcx.map.local_def_id(it.id); let i_ty = tcx.lookup_item_type(def_id); let mut substs = Substs::empty(); substs.types = i_ty.generics.types.map(|def| tcx.mk_param_from_def(def)); - let fty = tcx.mk_fn_def(def_id, tcx.mk_substs(substs), ty::BareFnTy { + let fty = tcx.mk_fn_def(def_id, tcx.mk_substs(substs), + tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Unsafe, abi: abi, sig: ty::Binder(FnSig { @@ -47,7 +47,7 @@ fn equate_intrinsic_type<'a, 'tcx>(tcx: &TyCtxt<'tcx>, it: &hir::ForeignItem, output: output, variadic: false, }), - }); + })); let i_n_tps = i_ty.generics.types.len(subst::FnSpace); if i_n_tps != n_tps { span_err!(tcx.sess, it.span, E0094, @@ -55,16 +55,11 @@ fn equate_intrinsic_type<'a, 'tcx>(tcx: &TyCtxt<'tcx>, it: &hir::ForeignItem, parameters: found {}, expected {}", i_n_tps, n_tps); } else { - require_same_types(tcx, - None, - false, + require_same_types(ccx, it.span, i_ty.ty, fty, - || { - format!("intrinsic has wrong type: expected `{}`", - fty) - }); + "intrinsic has wrong type"); } } @@ -290,7 +285,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { "try" => { let mut_u8 = tcx.mk_mut_ptr(tcx.types.u8); - let fn_ty = ty::BareFnTy { + let fn_ty = tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: Abi::Rust, sig: ty::Binder(FnSig { @@ -298,7 +293,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { output: ty::FnOutput::FnConverging(tcx.mk_nil()), variadic: false, }), - }; + }); (0, vec![tcx.mk_fn_ptr(fn_ty), mut_u8, mut_u8], tcx.types.i32) } @@ -310,14 +305,7 @@ pub fn check_intrinsic_type(ccx: &CrateCtxt, it: &hir::ForeignItem) { }; (n_tps, inputs, ty::FnConverging(output)) }; - equate_intrinsic_type( - tcx, - it, - n_tps, - Abi::RustIntrinsic, - inputs, - output - ) + equate_intrinsic_type(ccx, it, n_tps, Abi::RustIntrinsic, inputs, output) } /// Type-check `extern "platform-intrinsic" { ... }` functions. @@ -383,10 +371,10 @@ pub fn check_platform_intrinsic_type(ccx: &CrateCtxt, } let input_pairs = intr.inputs.iter().zip(&sig.inputs); for (i, (expected_arg, arg)) in input_pairs.enumerate() { - match_intrinsic_type_to_type(tcx, &format!("argument {}", i + 1), it.span, + match_intrinsic_type_to_type(ccx, &format!("argument {}", i + 1), it.span, &mut structural_to_nomimal, expected_arg, arg); } - match_intrinsic_type_to_type(tcx, "return value", it.span, + match_intrinsic_type_to_type(ccx, "return value", it.span, &mut structural_to_nomimal, &intr.output, sig.output.unwrap()); return @@ -400,21 +388,15 @@ pub fn check_platform_intrinsic_type(ccx: &CrateCtxt, } }; - equate_intrinsic_type( - tcx, - it, - n_tps, - Abi::PlatformIntrinsic, - inputs, - ty::FnConverging(output) - ) + equate_intrinsic_type(ccx, it, n_tps, Abi::PlatformIntrinsic, + inputs, ty::FnConverging(output)) } // walk the expected type and the actual type in lock step, checking they're // the same, in a kinda-structural way, i.e. `Vector`s have to be simd structs with // exactly the right element type fn match_intrinsic_type_to_type<'tcx, 'a>( - tcx: &TyCtxt<'tcx>, + ccx: &CrateCtxt<'a, 'tcx>, position: &str, span: Span, structural_to_nominal: &mut HashMap<&'a intrinsics::Type, ty::Ty<'tcx>>, @@ -423,7 +405,7 @@ fn match_intrinsic_type_to_type<'tcx, 'a>( use intrinsics::Type::*; let simple_error = |real: &str, expected: &str| { - span_err!(tcx.sess, span, E0442, + span_err!(ccx.tcx.sess, span, E0442, "intrinsic {} has wrong type: found {}, expected {}", position, real, expected) }; @@ -461,7 +443,7 @@ fn match_intrinsic_type_to_type<'tcx, 'a>( simple_error(&format!("`{}`", t), if const_ {"const pointer"} else {"mut pointer"}) } - match_intrinsic_type_to_type(tcx, position, span, structural_to_nominal, + match_intrinsic_type_to_type(ccx, position, span, structural_to_nominal, inner_expected, ty) } _ => simple_error(&format!("`{}`", t), "raw pointer"), @@ -472,19 +454,19 @@ fn match_intrinsic_type_to_type<'tcx, 'a>( simple_error(&format!("non-simd type `{}`", t), "simd type"); return; } - let t_len = t.simd_size(tcx); + let t_len = t.simd_size(ccx.tcx); if len as usize != t_len { simple_error(&format!("vector with length {}", t_len), &format!("length {}", len)); return; } - let t_ty = t.simd_type(tcx); + let t_ty = t.simd_type(ccx.tcx); { // check that a given structural type always has the same an intrinsic definition let previous = structural_to_nominal.entry(expected).or_insert(t); if *previous != t { // this gets its own error code because it is non-trivial - span_err!(tcx.sess, span, E0443, + span_err!(ccx.tcx.sess, span, E0443, "intrinsic {} has wrong type: found `{}`, expected `{}` which \ was used for this vector type previously in this signature", position, @@ -493,7 +475,7 @@ fn match_intrinsic_type_to_type<'tcx, 'a>( return; } } - match_intrinsic_type_to_type(tcx, + match_intrinsic_type_to_type(ccx, position, span, structural_to_nominal, @@ -502,14 +484,14 @@ fn match_intrinsic_type_to_type<'tcx, 'a>( } Aggregate(_flatten, ref expected_contents) => { match t.sty { - ty::TyTuple(ref contents) => { + ty::TyTuple(contents) => { if contents.len() != expected_contents.len() { simple_error(&format!("tuple with length {}", contents.len()), &format!("tuple with length {}", expected_contents.len())); return } for (e, c) in expected_contents.iter().zip(contents) { - match_intrinsic_type_to_type(tcx, position, span, structural_to_nominal, + match_intrinsic_type_to_type(ccx, position, span, structural_to_nominal, e, c) } } diff --git a/src/librustc_typeck/check/method/confirm.rs b/src/librustc_typeck/check/method/confirm.rs index b98f1884f0..6faf6f415c 100644 --- a/src/librustc_typeck/check/method/confirm.rs +++ b/src/librustc_typeck/check/method/confirm.rs @@ -10,24 +10,32 @@ use super::probe; -use check::{self, FnCtxt, callee, demand}; +use check::{FnCtxt, callee}; use check::UnresolvedTypeAction; use hir::def_id::DefId; use rustc::ty::subst::{self}; use rustc::traits; -use rustc::ty::{self, NoPreference, PreferMutLvalue, Ty, TyCtxt}; +use rustc::ty::{self, NoPreference, PreferMutLvalue, Ty}; use rustc::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr}; use rustc::ty::fold::TypeFoldable; -use rustc::infer; -use rustc::infer::{InferCtxt, TypeOrigin}; +use rustc::infer::{self, InferOk, TypeOrigin}; use syntax::codemap::Span; use rustc::hir; -struct ConfirmContext<'a, 'tcx:'a> { - fcx: &'a FnCtxt<'a, 'tcx>, +use std::ops::Deref; + +struct ConfirmContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a>{ + fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, - self_expr: &'tcx hir::Expr, - call_expr: &'tcx hir::Expr, + self_expr: &'gcx hir::Expr, + call_expr: &'gcx hir::Expr, +} + +impl<'a, 'gcx, 'tcx> Deref for ConfirmContext<'a, 'gcx, 'tcx> { + type Target = FnCtxt<'a, 'gcx, 'tcx>; + fn deref(&self) -> &Self::Target { + &self.fcx + } } struct InstantiatedMethodSig<'tcx> { @@ -44,30 +52,32 @@ struct InstantiatedMethodSig<'tcx> { method_predicates: ty::InstantiatedPredicates<'tcx>, } -pub fn confirm<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - self_expr: &'tcx hir::Expr, - call_expr: &'tcx hir::Expr, - unadjusted_self_ty: Ty<'tcx>, - pick: probe::Pick<'tcx>, - supplied_method_types: Vec>) - -> ty::MethodCallee<'tcx> -{ - debug!("confirm(unadjusted_self_ty={:?}, pick={:?}, supplied_method_types={:?})", - unadjusted_self_ty, - pick, - supplied_method_types); - - let mut confirm_cx = ConfirmContext::new(fcx, span, self_expr, call_expr); - confirm_cx.confirm(unadjusted_self_ty, pick, supplied_method_types) +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn confirm_method(&self, + span: Span, + self_expr: &'gcx hir::Expr, + call_expr: &'gcx hir::Expr, + unadjusted_self_ty: Ty<'tcx>, + pick: probe::Pick<'tcx>, + supplied_method_types: Vec>) + -> ty::MethodCallee<'tcx> + { + debug!("confirm(unadjusted_self_ty={:?}, pick={:?}, supplied_method_types={:?})", + unadjusted_self_ty, + pick, + supplied_method_types); + + let mut confirm_cx = ConfirmContext::new(self, span, self_expr, call_expr); + confirm_cx.confirm(unadjusted_self_ty, pick, supplied_method_types) + } } -impl<'a,'tcx> ConfirmContext<'a,'tcx> { - fn new(fcx: &'a FnCtxt<'a, 'tcx>, +impl<'a, 'gcx, 'tcx> ConfirmContext<'a, 'gcx, 'tcx> { + fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, - self_expr: &'tcx hir::Expr, - call_expr: &'tcx hir::Expr) - -> ConfirmContext<'a, 'tcx> + self_expr: &'gcx hir::Expr, + call_expr: &'gcx hir::Expr) + -> ConfirmContext<'a, 'gcx, 'tcx> { ConfirmContext { fcx: fcx, span: span, self_expr: self_expr, call_expr: call_expr } } @@ -98,7 +108,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { let InstantiatedMethodSig { method_sig, all_substs, method_predicates } = self.instantiate_method_sig(&pick, all_substs); - let all_substs = self.tcx().mk_substs(all_substs); + let all_substs = self.tcx.mk_substs(all_substs); let method_self_ty = method_sig.inputs[0]; // Unify the (adjusted) self type with what the method expects. @@ -107,11 +117,12 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // Create the method type let def_id = pick.item.def_id(); let method_ty = pick.item.as_opt_method().unwrap(); - let fty = self.tcx().mk_fn_def(def_id, all_substs, ty::BareFnTy { + let fty = self.tcx.mk_fn_def(def_id, all_substs, + self.tcx.mk_bare_fn(ty::BareFnTy { sig: ty::Binder(method_sig), unsafety: method_ty.fty.unsafety, abi: method_ty.fty.abi.clone(), - }); + })); // Add any trait/regions obligations specified on the method's type parameters. self.add_obligations(fty, all_substs, &method_predicates); @@ -139,10 +150,10 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { -> Ty<'tcx> { let (autoref, unsize) = if let Some(mutbl) = pick.autoref { - let region = self.infcx().next_region_var(infer::Autoref(self.span)); - let autoref = AutoPtr(self.tcx().mk_region(region), mutbl); + let region = self.next_region_var(infer::Autoref(self.span)); + let autoref = AutoPtr(self.tcx.mk_region(region), mutbl); (Some(autoref), pick.unsize.map(|target| { - target.adjust_for_autoref(self.tcx(), Some(autoref)) + target.adjust_for_autoref(self.tcx, Some(autoref)) })) } else { // No unsizing should be performed without autoref (at @@ -155,13 +166,12 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // Commit the autoderefs by calling `autoderef again, but this // time writing the results into the various tables. - let (autoderefd_ty, n, result) = check::autoderef(self.fcx, - self.span, - unadjusted_self_ty, - || Some(self.self_expr), - UnresolvedTypeAction::Error, - NoPreference, - |_, n| { + let (autoderefd_ty, n, result) = self.autoderef(self.span, + unadjusted_self_ty, + || Some(self.self_expr), + UnresolvedTypeAction::Error, + NoPreference, + |_, n| { if n == pick.autoderefs { Some(()) } else { @@ -172,8 +182,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { assert_eq!(result, Some(())); // Write out the final adjustment. - self.fcx.write_adjustment(self.self_expr.id, - AdjustDerefRef(AutoDerefRef { + self.write_adjustment(self.self_expr.id, AdjustDerefRef(AutoDerefRef { autoderefs: pick.autoderefs, autoref: autoref, unsize: unsize @@ -182,7 +191,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { if let Some(target) = unsize { target } else { - autoderefd_ty.adjust_for_autoref(self.tcx(), autoref) + autoderefd_ty.adjust_for_autoref(self.tcx, autoref) } } @@ -203,9 +212,9 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { match pick.kind { probe::InherentImplPick => { let impl_def_id = pick.item.container().id(); - assert!(self.tcx().impl_trait_ref(impl_def_id).is_none(), + assert!(self.tcx.impl_trait_ref(impl_def_id).is_none(), "impl {:?} is not an inherent impl", impl_def_id); - check::impl_self_ty(self.fcx, self.span, impl_def_id).substs + self.impl_self_ty(self.span, impl_def_id).substs } probe::ObjectPick => { @@ -222,7 +231,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // been ruled out when we deemed the trait to be // "object safe". let original_poly_trait_ref = - data.principal_trait_ref_with_self_ty(this.tcx(), object_ty); + data.principal_trait_ref_with_self_ty(this.tcx, object_ty); let upcast_poly_trait_ref = this.upcast(original_poly_trait_ref.clone(), trait_def_id); let upcast_trait_ref = @@ -245,27 +254,27 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // respectively, then we want to return the type // parameters from the trait ([$A,$B]), not those from // the impl ([$A,$B,$C]) not the receiver type ([$C]). - let impl_polytype = check::impl_self_ty(self.fcx, self.span, impl_def_id); + let impl_polytype = self.impl_self_ty(self.span, impl_def_id); let impl_trait_ref = - self.fcx.instantiate_type_scheme( + self.instantiate_type_scheme( self.span, &impl_polytype.substs, - &self.tcx().impl_trait_ref(impl_def_id).unwrap()); + &self.tcx.impl_trait_ref(impl_def_id).unwrap()); impl_trait_ref.substs.clone() } probe::TraitPick => { let trait_def_id = pick.item.container().id(); - let trait_def = self.tcx().lookup_trait_def(trait_def_id); + let trait_def = self.tcx.lookup_trait_def(trait_def_id); // Make a trait reference `$0 : Trait<$1...$n>` // consisting entirely of type variables. Later on in // the process we will unify the transformed-self-type // of the method with the actual type in order to // unify some of these variables. - self.infcx().fresh_substs_for_trait(self.span, - &trait_def.generics, - self.infcx().next_ty_var()) + self.fresh_substs_for_trait(self.span, + &trait_def.generics, + self.next_ty_var()) } probe::WhereClausePick(ref poly_trait_ref) => { @@ -277,20 +286,19 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { } fn extract_trait_ref(&mut self, self_ty: Ty<'tcx>, mut closure: F) -> R where - F: FnMut(&mut ConfirmContext<'a, 'tcx>, Ty<'tcx>, &ty::TraitTy<'tcx>) -> R, + F: FnMut(&mut ConfirmContext<'a, 'gcx, 'tcx>, Ty<'tcx>, &ty::TraitTy<'tcx>) -> R, { // If we specified that this is an object method, then the // self-type ought to be something that can be dereferenced to // yield an object-type (e.g., `&Object` or `Box` // etc). - let (_, _, result) = check::autoderef(self.fcx, - self.span, - self_ty, - || None, - UnresolvedTypeAction::Error, - NoPreference, - |ty, _| { + let (_, _, result) = self.fcx.autoderef(self.span, + self_ty, + || None, + UnresolvedTypeAction::Error, + NoPreference, + |ty, _| { match ty.sty { ty::TyTrait(ref data) => Some(closure(self, ty, &data)), _ => None, @@ -328,7 +336,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // // FIXME -- permit users to manually specify lifetimes let method_regions = - self.fcx.infcx().region_vars_for_defs( + self.region_vars_for_defs( self.span, pick.item.as_opt_method().unwrap() .generics.regions.get_slice(subst::FnSpace)); @@ -338,26 +346,26 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { let mut final_substs = subst::Substs { types: types, regions: regions }; if num_supplied_types == 0 { - self.fcx.infcx().type_vars_for_defs( + self.type_vars_for_defs( self.span, subst::FnSpace, &mut final_substs, method_types); } else if num_method_types == 0 { - span_err!(self.tcx().sess, self.span, E0035, + span_err!(self.tcx.sess, self.span, E0035, "does not take type parameters"); - self.fcx.infcx().type_vars_for_defs( + self.type_vars_for_defs( self.span, subst::FnSpace, &mut final_substs, method_types); } else if num_supplied_types != num_method_types { - span_err!(self.tcx().sess, self.span, E0036, + span_err!(self.tcx.sess, self.span, E0036, "incorrect number of type parameters given for this method: expected {}, found {}", num_method_types, num_supplied_types); final_substs.types.replace( subst::FnSpace, - vec![self.tcx().types.err; num_method_types]); + vec![self.tcx.types.err; num_method_types]); } else { final_substs.types.replace(subst::FnSpace, supplied_method_types); } @@ -369,8 +377,12 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { self_ty: Ty<'tcx>, method_self_ty: Ty<'tcx>) { - match self.fcx.mk_subty(false, TypeOrigin::Misc(self.span), self_ty, method_self_ty) { - Ok(_) => {} + match self.sub_types(false, TypeOrigin::Misc(self.span), + self_ty, method_self_ty) { + Ok(InferOk { obligations, .. }) => { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + } Err(_) => { span_bug!( self.span, @@ -396,9 +408,9 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // type/early-bound-regions substitutions performed. There can // be no late-bound regions appearing here. let method_predicates = pick.item.as_opt_method().unwrap() - .predicates.instantiate(self.tcx(), &all_substs); - let method_predicates = self.fcx.normalize_associated_types_in(self.span, - &method_predicates); + .predicates.instantiate(self.tcx, &all_substs); + let method_predicates = self.normalize_associated_types_in(self.span, + &method_predicates); debug!("method_predicates after subst = {:?}", method_predicates); @@ -414,7 +426,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { debug!("late-bound lifetimes from method instantiated, method_sig={:?}", method_sig); - let method_sig = self.fcx.instantiate_type_scheme(self.span, &all_substs, &method_sig); + let method_sig = self.instantiate_type_scheme(self.span, &all_substs, &method_sig); debug!("type scheme substituted, method_sig={:?}", method_sig); @@ -434,20 +446,18 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { all_substs, method_predicates); - self.fcx.add_obligations_for_parameters( - traits::ObligationCause::misc(self.span, self.fcx.body_id), + self.add_obligations_for_parameters( + traits::ObligationCause::misc(self.span, self.body_id), method_predicates); // this is a projection from a trait reference, so we have to // make sure that the trait reference inputs are well-formed. - self.fcx.add_wf_bounds( - all_substs, - self.call_expr); + self.add_wf_bounds(all_substs, self.call_expr); // the function type must also be well-formed (this is not // implied by the substs being well-formed because of inherent // impls and late-bound regions - see issue #28609). - self.fcx.register_wf_obligation(fty, self.span, traits::MiscObligation); + self.register_wf_obligation(fty, self.span, traits::MiscObligation); } /////////////////////////////////////////////////////////////////////////// @@ -491,9 +501,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // Fix up autoderefs and derefs. for (i, &expr) in exprs.iter().rev().enumerate() { // Count autoderefs. - let autoderef_count = match self.fcx - .inh - .tables + let autoderef_count = match self.tables .borrow() .adjustments .get(&expr.id) { @@ -506,19 +514,18 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { i, expr, autoderef_count); if autoderef_count > 0 { - check::autoderef(self.fcx, - expr.span, - self.fcx.expr_ty(expr), - || Some(expr), - UnresolvedTypeAction::Error, - PreferMutLvalue, - |_, autoderefs| { - if autoderefs == autoderef_count + 1 { - Some(()) - } else { - None - } - }); + self.autoderef(expr.span, + self.expr_ty(expr), + || Some(expr), + UnresolvedTypeAction::Error, + PreferMutLvalue, + |_, autoderefs| { + if autoderefs == autoderef_count + 1 { + Some(()) + } else { + None + } + }); } // Don't retry the first one or we might infinite loop! @@ -535,8 +542,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { // expects. This is annoying and horrible. We // ought to recode this routine so it doesn't // (ab)use the normal type checking paths. - let adj = self.fcx.inh.tables.borrow().adjustments.get(&base_expr.id) - .cloned(); + let adj = self.tables.borrow().adjustments.get(&base_expr.id).cloned(); let (autoderefs, unsize) = match adj { Some(AdjustDerefRef(adr)) => match adr.autoref { None => { @@ -567,17 +573,16 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { let (adjusted_base_ty, unsize) = if let Some(target) = unsize { (target, true) } else { - (self.fcx.adjust_expr_ty(base_expr, + (self.adjust_expr_ty(base_expr, Some(&AdjustDerefRef(AutoDerefRef { autoderefs: autoderefs, autoref: None, unsize: None }))), false) }; - let index_expr_ty = self.fcx.expr_ty(&index_expr); + let index_expr_ty = self.expr_ty(&index_expr); - let result = check::try_index_step( - self.fcx, + let result = self.try_index_step( ty::MethodCall::expr(expr.id), expr, &base_expr, @@ -588,25 +593,23 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { index_expr_ty); if let Some((input_ty, return_ty)) = result { - demand::suptype(self.fcx, index_expr.span, input_ty, index_expr_ty); + self.demand_suptype(index_expr.span, input_ty, index_expr_ty); - let expr_ty = self.fcx.expr_ty(&expr); - demand::suptype(self.fcx, expr.span, expr_ty, return_ty); + let expr_ty = self.expr_ty(&expr); + self.demand_suptype(expr.span, expr_ty, return_ty); } } hir::ExprUnary(hir::UnDeref, ref base_expr) => { // if this is an overloaded deref, then re-evaluate with // a preference for mut let method_call = ty::MethodCall::expr(expr.id); - if self.fcx.inh.tables.borrow().method_map.contains_key(&method_call) { - let method = check::try_overloaded_deref( - self.fcx, - expr.span, + if self.tables.borrow().method_map.contains_key(&method_call) { + let method = self.try_overloaded_deref(expr.span, Some(&base_expr), - self.fcx.expr_ty(&base_expr), + self.expr_ty(&base_expr), PreferMutLvalue); let method = method.expect("re-trying deref failed"); - self.fcx.inh.tables.borrow_mut().method_map.insert(method_call, method); + self.tables.borrow_mut().method_map.insert(method_call, method); } } _ => {} @@ -617,19 +620,11 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { /////////////////////////////////////////////////////////////////////////// // MISCELLANY - fn tcx(&self) -> &'a TyCtxt<'tcx> { - self.fcx.tcx() - } - - fn infcx(&self) -> &'a InferCtxt<'a, 'tcx> { - self.fcx.infcx() - } - fn enforce_illegal_method_limitations(&self, pick: &probe::Pick) { // Disallow calls to the method `drop` defined in the `Drop` trait. match pick.item.container() { ty::TraitContainer(trait_def_id) => { - callee::check_legal_trait_for_method_call(self.fcx.ccx, self.span, trait_def_id) + callee::check_legal_trait_for_method_call(self.ccx, self.span, trait_def_id) } ty::ImplContainer(..) => {} } @@ -640,9 +635,8 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { target_trait_def_id: DefId) -> ty::PolyTraitRef<'tcx> { - let upcast_trait_refs = traits::upcast(self.tcx(), - source_trait_ref.clone(), - target_trait_def_id); + let upcast_trait_refs = self.tcx.upcast_choices(source_trait_ref.clone(), + target_trait_def_id); // must be exactly one trait ref or we'd get an ambig error etc if upcast_trait_refs.len() != 1 { @@ -660,7 +654,7 @@ impl<'a,'tcx> ConfirmContext<'a,'tcx> { fn replace_late_bound_regions_with_fresh_var(&self, value: &ty::Binder) -> T where T : TypeFoldable<'tcx> { - self.infcx().replace_late_bound_regions_with_fresh_var( + self.fcx.replace_late_bound_regions_with_fresh_var( self.span, infer::FnCall, value).0 } } diff --git a/src/librustc_typeck/check/method/mod.rs b/src/librustc_typeck/check/method/mod.rs index 31d95af4fb..00eeefa044 100644 --- a/src/librustc_typeck/check/method/mod.rs +++ b/src/librustc_typeck/check/method/mod.rs @@ -10,13 +10,12 @@ //! Method lookup: the secret sauce of Rust. See `README.md`. -use astconv::AstConv; use check::FnCtxt; use hir::def::Def; use hir::def_id::DefId; use rustc::ty::subst; use rustc::traits; -use rustc::ty::{self, TyCtxt, ToPredicate, ToPolyTraitRef, TraitRef, TypeFoldable}; +use rustc::ty::{self, ToPredicate, ToPolyTraitRef, TraitRef, TypeFoldable}; use rustc::ty::adjustment::{AdjustDerefRef, AutoDerefRef, AutoPtr}; use rustc::infer; @@ -28,7 +27,7 @@ use rustc::hir; pub use self::MethodError::*; pub use self::CandidateSource::*; -pub use self::suggest::{report_error, AllTraitsVec}; +pub use self::suggest::AllTraitsVec; mod confirm; mod probe; @@ -79,300 +78,316 @@ pub enum CandidateSource { TraitSource(/* trait id */ DefId), } -/// Determines whether the type `self_ty` supports a method name `method_name` or not. -pub fn exists<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - method_name: ast::Name, - self_ty: ty::Ty<'tcx>, - call_expr_id: ast::NodeId) - -> bool -{ - let mode = probe::Mode::MethodCall; - match probe::probe(fcx, span, mode, method_name, self_ty, call_expr_id) { - Ok(..) => true, - Err(NoMatch(..)) => false, - Err(Ambiguity(..)) => true, - Err(ClosureAmbiguity(..)) => true, - Err(PrivateMatch(..)) => true, +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + /// Determines whether the type `self_ty` supports a method name `method_name` or not. + pub fn method_exists(&self, + span: Span, + method_name: ast::Name, + self_ty: ty::Ty<'tcx>, + call_expr_id: ast::NodeId, + allow_private: bool) + -> bool + { + let mode = probe::Mode::MethodCall; + match self.probe_method(span, mode, method_name, self_ty, call_expr_id) { + Ok(..) => true, + Err(NoMatch(..)) => false, + Err(Ambiguity(..)) => true, + Err(ClosureAmbiguity(..)) => true, + Err(PrivateMatch(..)) => allow_private, + } } -} - -/// Performs method lookup. If lookup is successful, it will return the callee and store an -/// appropriate adjustment for the self-expr. In some cases it may report an error (e.g., invoking -/// the `drop` method). -/// -/// # Arguments -/// -/// Given a method call like `foo.bar::(...)`: -/// -/// * `fcx`: the surrounding `FnCtxt` (!) -/// * `span`: the span for the method call -/// * `method_name`: the name of the method being called (`bar`) -/// * `self_ty`: the (unadjusted) type of the self expression (`foo`) -/// * `supplied_method_types`: the explicit method type parameters, if any (`T1..Tn`) -/// * `self_expr`: the self expression (`foo`) -pub fn lookup<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - method_name: ast::Name, - self_ty: ty::Ty<'tcx>, - supplied_method_types: Vec>, - call_expr: &'tcx hir::Expr, - self_expr: &'tcx hir::Expr) - -> Result, MethodError<'tcx>> -{ - debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})", - method_name, - self_ty, - call_expr, - self_expr); - - let mode = probe::Mode::MethodCall; - let self_ty = fcx.infcx().resolve_type_vars_if_possible(&self_ty); - let pick = probe::probe(fcx, span, mode, method_name, self_ty, call_expr.id)?; - Ok(confirm::confirm(fcx, span, self_expr, call_expr, self_ty, pick, supplied_method_types)) -} -pub fn lookup_in_trait<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - self_expr: Option<&hir::Expr>, - m_name: ast::Name, - trait_def_id: DefId, - self_ty: ty::Ty<'tcx>, - opt_input_types: Option>>) - -> Option> -{ - lookup_in_trait_adjusted(fcx, span, self_expr, m_name, trait_def_id, - 0, false, self_ty, opt_input_types) -} - -/// `lookup_in_trait_adjusted` is used for overloaded operators. It does a very narrow slice of -/// what the normal probe/confirm path does. In particular, it doesn't really do any probing: it -/// simply constructs an obligation for a particular trait with the given self-type and checks -/// whether that trait is implemented. -/// -/// FIXME(#18741) -- It seems likely that we can consolidate some of this code with the other -/// method-lookup code. In particular, autoderef on index is basically identical to autoderef with -/// normal probes, except that the test also looks for built-in indexing. Also, the second half of -/// this method is basically the same as confirmation. -pub fn lookup_in_trait_adjusted<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - self_expr: Option<&hir::Expr>, - m_name: ast::Name, - trait_def_id: DefId, - autoderefs: usize, - unsize: bool, - self_ty: ty::Ty<'tcx>, - opt_input_types: Option>>) - -> Option> -{ - debug!("lookup_in_trait_adjusted(self_ty={:?}, self_expr={:?}, m_name={}, trait_def_id={:?})", - self_ty, - self_expr, - m_name, - trait_def_id); - - let trait_def = fcx.tcx().lookup_trait_def(trait_def_id); - - let type_parameter_defs = trait_def.generics.types.get_slice(subst::TypeSpace); - let expected_number_of_input_types = type_parameter_defs.len(); - - assert_eq!(trait_def.generics.types.len(subst::FnSpace), 0); - assert!(trait_def.generics.regions.is_empty()); - - // Construct a trait-reference `self_ty : Trait` - let mut substs = subst::Substs::new_trait(Vec::new(), Vec::new(), self_ty); - - match opt_input_types { - Some(input_types) => { - assert_eq!(expected_number_of_input_types, input_types.len()); - substs.types.replace(subst::ParamSpace::TypeSpace, input_types); + /// Performs method lookup. If lookup is successful, it will return the callee + /// and store an appropriate adjustment for the self-expr. In some cases it may + /// report an error (e.g., invoking the `drop` method). + /// + /// # Arguments + /// + /// Given a method call like `foo.bar::(...)`: + /// + /// * `fcx`: the surrounding `FnCtxt` (!) + /// * `span`: the span for the method call + /// * `method_name`: the name of the method being called (`bar`) + /// * `self_ty`: the (unadjusted) type of the self expression (`foo`) + /// * `supplied_method_types`: the explicit method type parameters, if any (`T1..Tn`) + /// * `self_expr`: the self expression (`foo`) + pub fn lookup_method(&self, + span: Span, + method_name: ast::Name, + self_ty: ty::Ty<'tcx>, + supplied_method_types: Vec>, + call_expr: &'gcx hir::Expr, + self_expr: &'gcx hir::Expr) + -> Result, MethodError<'tcx>> + { + debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})", + method_name, + self_ty, + call_expr, + self_expr); + + let mode = probe::Mode::MethodCall; + let self_ty = self.resolve_type_vars_if_possible(&self_ty); + let pick = self.probe_method(span, mode, method_name, self_ty, call_expr.id)?; + + if let Some(import_id) = pick.import_id { + self.tcx.used_trait_imports.borrow_mut().insert(import_id); } - None => { - fcx.inh.infcx.type_vars_for_defs( - span, - subst::ParamSpace::TypeSpace, - &mut substs, - type_parameter_defs); - } + Ok(self.confirm_method(span, self_expr, call_expr, self_ty, pick, supplied_method_types)) } - let trait_ref = ty::TraitRef::new(trait_def_id, fcx.tcx().mk_substs(substs)); + pub fn lookup_method_in_trait(&self, + span: Span, + self_expr: Option<&hir::Expr>, + m_name: ast::Name, + trait_def_id: DefId, + self_ty: ty::Ty<'tcx>, + opt_input_types: Option>>) + -> Option> + { + self.lookup_method_in_trait_adjusted(span, self_expr, m_name, trait_def_id, + 0, false, self_ty, opt_input_types) + } - // Construct an obligation - let poly_trait_ref = trait_ref.to_poly_trait_ref(); - let obligation = traits::Obligation::misc(span, - fcx.body_id, - poly_trait_ref.to_predicate()); + /// `lookup_in_trait_adjusted` is used for overloaded operators. + /// It does a very narrow slice of what the normal probe/confirm path does. + /// In particular, it doesn't really do any probing: it simply constructs + /// an obligation for aparticular trait with the given self-type and checks + /// whether that trait is implemented. + /// + /// FIXME(#18741) -- It seems likely that we can consolidate some of this + /// code with the other method-lookup code. In particular, autoderef on + /// index is basically identical to autoderef with normal probes, except + /// that the test also looks for built-in indexing. Also, the second half of + /// this method is basically the same as confirmation. + pub fn lookup_method_in_trait_adjusted(&self, + span: Span, + self_expr: Option<&hir::Expr>, + m_name: ast::Name, + trait_def_id: DefId, + autoderefs: usize, + unsize: bool, + self_ty: ty::Ty<'tcx>, + opt_input_types: Option>>) + -> Option> + { + debug!("lookup_in_trait_adjusted(self_ty={:?}, self_expr={:?}, \ + m_name={}, trait_def_id={:?})", + self_ty, + self_expr, + m_name, + trait_def_id); + + let trait_def = self.tcx.lookup_trait_def(trait_def_id); + + let type_parameter_defs = trait_def.generics.types.get_slice(subst::TypeSpace); + let expected_number_of_input_types = type_parameter_defs.len(); + + assert_eq!(trait_def.generics.types.len(subst::FnSpace), 0); + assert!(trait_def.generics.regions.is_empty()); + + // Construct a trait-reference `self_ty : Trait` + let mut substs = subst::Substs::new_trait(Vec::new(), Vec::new(), self_ty); + + match opt_input_types { + Some(input_types) => { + assert_eq!(expected_number_of_input_types, input_types.len()); + substs.types.replace(subst::ParamSpace::TypeSpace, input_types); + } - // Now we want to know if this can be matched - let mut selcx = traits::SelectionContext::new(fcx.infcx()); - if !selcx.evaluate_obligation(&obligation) { - debug!("--> Cannot match obligation"); - return None; // Cannot be matched, no such method resolution is possible. - } + None => { + self.type_vars_for_defs( + span, + subst::ParamSpace::TypeSpace, + &mut substs, + type_parameter_defs); + } + } - // Trait must have a method named `m_name` and it should not have - // type parameters or early-bound regions. - let tcx = fcx.tcx(); - let method_item = trait_item(tcx, trait_def_id, m_name).unwrap(); - let method_ty = method_item.as_opt_method().unwrap(); - assert_eq!(method_ty.generics.types.len(subst::FnSpace), 0); - assert_eq!(method_ty.generics.regions.len(subst::FnSpace), 0); - - debug!("lookup_in_trait_adjusted: method_item={:?} method_ty={:?}", - method_item, method_ty); - - // Instantiate late-bound regions and substitute the trait - // parameters into the method type to get the actual method type. - // - // NB: Instantiate late-bound regions first so that - // `instantiate_type_scheme` can normalize associated types that - // may reference those regions. - let fn_sig = fcx.infcx().replace_late_bound_regions_with_fresh_var(span, - infer::FnCall, - &method_ty.fty.sig).0; - let fn_sig = fcx.instantiate_type_scheme(span, trait_ref.substs, &fn_sig); - let transformed_self_ty = fn_sig.inputs[0]; - let def_id = method_item.def_id(); - let fty = tcx.mk_fn_def(def_id, trait_ref.substs, ty::BareFnTy { - sig: ty::Binder(fn_sig), - unsafety: method_ty.fty.unsafety, - abi: method_ty.fty.abi.clone(), - }); - - debug!("lookup_in_trait_adjusted: matched method fty={:?} obligation={:?}", - fty, - obligation); - - // Register obligations for the parameters. This will include the - // `Self` parameter, which in turn has a bound of the main trait, - // so this also effectively registers `obligation` as well. (We - // used to register `obligation` explicitly, but that resulted in - // double error messages being reported.) - // - // Note that as the method comes from a trait, it should not have - // any late-bound regions appearing in its bounds. - let method_bounds = fcx.instantiate_bounds(span, trait_ref.substs, &method_ty.predicates); - assert!(!method_bounds.has_escaping_regions()); - fcx.add_obligations_for_parameters( - traits::ObligationCause::misc(span, fcx.body_id), - &method_bounds); - - // Also register an obligation for the method type being well-formed. - fcx.register_wf_obligation(fty, span, traits::MiscObligation); - - // FIXME(#18653) -- Try to resolve obligations, giving us more - // typing information, which can sometimes be needed to avoid - // pathological region inference failures. - fcx.select_obligations_where_possible(); - - // Insert any adjustments needed (always an autoref of some mutability). - match self_expr { - None => { } - - Some(self_expr) => { - debug!("lookup_in_trait_adjusted: inserting adjustment if needed \ - (self-id={}, autoderefs={}, unsize={}, explicit_self={:?})", - self_expr.id, autoderefs, unsize, - method_ty.explicit_self); - - match method_ty.explicit_self { - ty::ExplicitSelfCategory::ByValue => { - // Trait method is fn(self), no transformation needed. - assert!(!unsize); - fcx.write_autoderef_adjustment(self_expr.id, autoderefs); - } + let trait_ref = ty::TraitRef::new(trait_def_id, self.tcx.mk_substs(substs)); - ty::ExplicitSelfCategory::ByReference(..) => { - // Trait method is fn(&self) or fn(&mut self), need an - // autoref. Pull the region etc out of the type of first argument. - match transformed_self_ty.sty { - ty::TyRef(region, ty::TypeAndMut { mutbl, ty: _ }) => { - fcx.write_adjustment(self_expr.id, - AdjustDerefRef(AutoDerefRef { - autoderefs: autoderefs, - autoref: Some(AutoPtr(region, mutbl)), - unsize: if unsize { - Some(transformed_self_ty) - } else { - None - } - })); - } + // Construct an obligation + let poly_trait_ref = trait_ref.to_poly_trait_ref(); + let obligation = traits::Obligation::misc(span, + self.body_id, + poly_trait_ref.to_predicate()); - _ => { - span_bug!( - span, - "trait method is &self but first arg is: {}", - transformed_self_ty); + // Now we want to know if this can be matched + let mut selcx = traits::SelectionContext::new(self); + if !selcx.evaluate_obligation(&obligation) { + debug!("--> Cannot match obligation"); + return None; // Cannot be matched, no such method resolution is possible. + } + + // Trait must have a method named `m_name` and it should not have + // type parameters or early-bound regions. + let tcx = self.tcx; + let method_item = self.trait_item(trait_def_id, m_name).unwrap(); + let method_ty = method_item.as_opt_method().unwrap(); + assert_eq!(method_ty.generics.types.len(subst::FnSpace), 0); + assert_eq!(method_ty.generics.regions.len(subst::FnSpace), 0); + + debug!("lookup_in_trait_adjusted: method_item={:?} method_ty={:?}", + method_item, method_ty); + + // Instantiate late-bound regions and substitute the trait + // parameters into the method type to get the actual method type. + // + // NB: Instantiate late-bound regions first so that + // `instantiate_type_scheme` can normalize associated types that + // may reference those regions. + let fn_sig = self.replace_late_bound_regions_with_fresh_var(span, + infer::FnCall, + &method_ty.fty.sig).0; + let fn_sig = self.instantiate_type_scheme(span, trait_ref.substs, &fn_sig); + let transformed_self_ty = fn_sig.inputs[0]; + let def_id = method_item.def_id(); + let fty = tcx.mk_fn_def(def_id, trait_ref.substs, + tcx.mk_bare_fn(ty::BareFnTy { + sig: ty::Binder(fn_sig), + unsafety: method_ty.fty.unsafety, + abi: method_ty.fty.abi.clone(), + })); + + debug!("lookup_in_trait_adjusted: matched method fty={:?} obligation={:?}", + fty, + obligation); + + // Register obligations for the parameters. This will include the + // `Self` parameter, which in turn has a bound of the main trait, + // so this also effectively registers `obligation` as well. (We + // used to register `obligation` explicitly, but that resulted in + // double error messages being reported.) + // + // Note that as the method comes from a trait, it should not have + // any late-bound regions appearing in its bounds. + let method_bounds = self.instantiate_bounds(span, trait_ref.substs, &method_ty.predicates); + assert!(!method_bounds.has_escaping_regions()); + self.add_obligations_for_parameters( + traits::ObligationCause::misc(span, self.body_id), + &method_bounds); + + // Also register an obligation for the method type being well-formed. + self.register_wf_obligation(fty, span, traits::MiscObligation); + + // FIXME(#18653) -- Try to resolve obligations, giving us more + // typing information, which can sometimes be needed to avoid + // pathological region inference failures. + self.select_obligations_where_possible(); + + // Insert any adjustments needed (always an autoref of some mutability). + match self_expr { + None => { } + + Some(self_expr) => { + debug!("lookup_in_trait_adjusted: inserting adjustment if needed \ + (self-id={}, autoderefs={}, unsize={}, explicit_self={:?})", + self_expr.id, autoderefs, unsize, + method_ty.explicit_self); + + match method_ty.explicit_self { + ty::ExplicitSelfCategory::ByValue => { + // Trait method is fn(self), no transformation needed. + assert!(!unsize); + self.write_autoderef_adjustment(self_expr.id, autoderefs); + } + + ty::ExplicitSelfCategory::ByReference(..) => { + // Trait method is fn(&self) or fn(&mut self), need an + // autoref. Pull the region etc out of the type of first argument. + match transformed_self_ty.sty { + ty::TyRef(region, ty::TypeAndMut { mutbl, ty: _ }) => { + self.write_adjustment(self_expr.id, + AdjustDerefRef(AutoDerefRef { + autoderefs: autoderefs, + autoref: Some(AutoPtr(region, mutbl)), + unsize: if unsize { + Some(transformed_self_ty) + } else { + None + } + })); + } + + _ => { + span_bug!( + span, + "trait method is &self but first arg is: {}", + transformed_self_ty); + } } } - } - _ => { - span_bug!( - span, - "unexpected explicit self type in operator method: {:?}", - method_ty.explicit_self); + _ => { + span_bug!( + span, + "unexpected explicit self type in operator method: {:?}", + method_ty.explicit_self); + } } } } - } - let callee = ty::MethodCallee { - def_id: def_id, - ty: fty, - substs: trait_ref.substs - }; + let callee = ty::MethodCallee { + def_id: def_id, + ty: fty, + substs: trait_ref.substs + }; - debug!("callee = {:?}", callee); + debug!("callee = {:?}", callee); - Some(callee) -} + Some(callee) + } -pub fn resolve_ufcs<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - method_name: ast::Name, - self_ty: ty::Ty<'tcx>, - expr_id: ast::NodeId) - -> Result> -{ - let mode = probe::Mode::Path; - let pick = probe::probe(fcx, span, mode, method_name, self_ty, expr_id)?; - let def = pick.item.def(); - - if let probe::InherentImplPick = pick.kind { - if !pick.item.vis().is_accessible_from(fcx.body_id, &fcx.tcx().map) { - let msg = format!("{} `{}` is private", def.kind_name(), &method_name.as_str()); - fcx.tcx().sess.span_err(span, &msg); + pub fn resolve_ufcs(&self, + span: Span, + method_name: ast::Name, + self_ty: ty::Ty<'tcx>, + expr_id: ast::NodeId) + -> Result> + { + let mode = probe::Mode::Path; + let pick = self.probe_method(span, mode, method_name, self_ty, expr_id)?; + + if let Some(import_id) = pick.import_id { + self.tcx.used_trait_imports.borrow_mut().insert(import_id); + } + + let def = pick.item.def(); + if let probe::InherentImplPick = pick.kind { + if !pick.item.vis().is_accessible_from(self.body_id, &self.tcx.map) { + let msg = format!("{} `{}` is private", def.kind_name(), &method_name.as_str()); + self.tcx.sess.span_err(span, &msg); + } } + Ok(def) } - Ok(def) -} -/// Find item with name `item_name` defined in `trait_def_id` -/// and return it, or `None`, if no such item. -fn trait_item<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId, - item_name: ast::Name) - -> Option> -{ - let trait_items = tcx.trait_items(trait_def_id); - trait_items.iter() - .find(|item| item.name() == item_name) - .cloned() -} + /// Find item with name `item_name` defined in `trait_def_id` + /// and return it, or `None`, if no such item. + pub fn trait_item(&self, + trait_def_id: DefId, + item_name: ast::Name) + -> Option> + { + let trait_items = self.tcx.trait_items(trait_def_id); + trait_items.iter() + .find(|item| item.name() == item_name) + .cloned() + } -fn impl_item<'tcx>(tcx: &TyCtxt<'tcx>, - impl_def_id: DefId, - item_name: ast::Name) - -> Option> -{ - let impl_items = tcx.impl_items.borrow(); - let impl_items = impl_items.get(&impl_def_id).unwrap(); - impl_items - .iter() - .map(|&did| tcx.impl_or_trait_item(did.def_id())) - .find(|m| m.name() == item_name) + pub fn impl_item(&self, + impl_def_id: DefId, + item_name: ast::Name) + -> Option> + { + let impl_items = self.tcx.impl_items.borrow(); + let impl_items = impl_items.get(&impl_def_id).unwrap(); + impl_items + .iter() + .map(|&did| self.tcx.impl_or_trait_item(did.def_id())) + .find(|m| m.name() == item_name) + } } diff --git a/src/librustc_typeck/check/method/probe.rs b/src/librustc_typeck/check/method/probe.rs index 0ffbbfea84..08c0412251 100644 --- a/src/librustc_typeck/check/method/probe.rs +++ b/src/librustc_typeck/check/method/probe.rs @@ -13,27 +13,27 @@ use super::NoMatchData; use super::{CandidateSource, ImplSource, TraitSource}; use super::suggest; -use check; use check::{FnCtxt, UnresolvedTypeAction}; use hir::def_id::DefId; use hir::def::Def; use rustc::ty::subst; use rustc::ty::subst::Subst; use rustc::traits; -use rustc::ty::{self, NoPreference, Ty, TyCtxt, ToPolyTraitRef, TraitRef, TypeFoldable}; -use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin}; +use rustc::ty::{self, NoPreference, Ty, ToPolyTraitRef, TraitRef, TypeFoldable}; +use rustc::infer::{InferOk, TypeOrigin}; use syntax::ast; use syntax::codemap::{Span, DUMMY_SP}; use rustc::hir; use std::collections::HashSet; use std::mem; +use std::ops::Deref; use std::rc::Rc; use self::CandidateKind::*; pub use self::PickKind::*; -struct ProbeContext<'a, 'tcx:'a> { - fcx: &'a FnCtxt<'a, 'tcx>, +struct ProbeContext<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, mode: Mode, item_name: ast::Name, @@ -42,6 +42,7 @@ struct ProbeContext<'a, 'tcx:'a> { inherent_candidates: Vec>, extension_candidates: Vec>, impl_dups: HashSet, + import_id: Option, /// Collects near misses when the candidate functions are missing a `self` keyword and is only /// used for error reporting @@ -55,6 +56,13 @@ struct ProbeContext<'a, 'tcx:'a> { unsatisfied_predicates: Vec> } +impl<'a, 'gcx, 'tcx> Deref for ProbeContext<'a, 'gcx, 'tcx> { + type Target = FnCtxt<'a, 'gcx, 'tcx>; + fn deref(&self) -> &Self::Target { + &self.fcx + } +} + #[derive(Debug)] struct CandidateStep<'tcx> { self_ty: Ty<'tcx>, @@ -67,6 +75,7 @@ struct Candidate<'tcx> { xform_self_ty: Ty<'tcx>, item: ty::ImplOrTraitItem<'tcx>, kind: CandidateKind<'tcx>, + import_id: Option, } #[derive(Debug)] @@ -84,6 +93,7 @@ enum CandidateKind<'tcx> { pub struct Pick<'tcx> { pub item: ty::ImplOrTraitItem<'tcx>, pub kind: PickKind<'tcx>, + pub import_id: Option, // Indicates that the source expression should be autoderef'd N times // @@ -126,118 +136,119 @@ pub enum Mode { Path } -pub fn probe<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - mode: Mode, - item_name: ast::Name, - self_ty: Ty<'tcx>, - scope_expr_id: ast::NodeId) - -> PickResult<'tcx> -{ - debug!("probe(self_ty={:?}, item_name={}, scope_expr_id={})", - self_ty, - item_name, - scope_expr_id); - - // FIXME(#18741) -- right now, creating the steps involves evaluating the - // `*` operator, which registers obligations that then escape into - // the global fulfillment context and thus has global - // side-effects. This is a bit of a pain to refactor. So just let - // it ride, although it's really not great, and in fact could I - // think cause spurious errors. Really though this part should - // take place in the `fcx.infcx().probe` below. - let steps = if mode == Mode::MethodCall { - match create_steps(fcx, span, self_ty) { - Some(steps) => steps, - None =>return Err(MethodError::NoMatch(NoMatchData::new(Vec::new(), Vec::new(), - Vec::new(), mode))), - } - } else { - vec![CandidateStep { - self_ty: self_ty, - autoderefs: 0, - unsize: false - }] - }; - - // Create a list of simplified self types, if we can. - let mut simplified_steps = Vec::new(); - for step in &steps { - match ty::fast_reject::simplify_type(fcx.tcx(), step.self_ty, true) { - None => { break; } - Some(simplified_type) => { simplified_steps.push(simplified_type); } - } - } - let opt_simplified_steps = - if simplified_steps.len() < steps.len() { - None // failed to convert at least one of the steps +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn probe_method(&self, + span: Span, + mode: Mode, + item_name: ast::Name, + self_ty: Ty<'tcx>, + scope_expr_id: ast::NodeId) + -> PickResult<'tcx> + { + debug!("probe(self_ty={:?}, item_name={}, scope_expr_id={})", + self_ty, + item_name, + scope_expr_id); + + // FIXME(#18741) -- right now, creating the steps involves evaluating the + // `*` operator, which registers obligations that then escape into + // the global fulfillment context and thus has global + // side-effects. This is a bit of a pain to refactor. So just let + // it ride, although it's really not great, and in fact could I + // think cause spurious errors. Really though this part should + // take place in the `self.probe` below. + let steps = if mode == Mode::MethodCall { + match self.create_steps(span, self_ty) { + Some(steps) => steps, + None =>return Err(MethodError::NoMatch(NoMatchData::new(Vec::new(), Vec::new(), + Vec::new(), mode))), + } } else { - Some(simplified_steps) + vec![CandidateStep { + self_ty: self_ty, + autoderefs: 0, + unsize: false + }] }; - debug!("ProbeContext: steps for self_ty={:?} are {:?}", - self_ty, - steps); - - // this creates one big transaction so that all type variables etc - // that we create during the probe process are removed later - fcx.infcx().probe(|_| { - let mut probe_cx = ProbeContext::new(fcx, - span, - mode, - item_name, - steps, - opt_simplified_steps); - probe_cx.assemble_inherent_candidates(); - probe_cx.assemble_extension_candidates_for_traits_in_scope(scope_expr_id)?; - probe_cx.pick() - }) -} + // Create a list of simplified self types, if we can. + let mut simplified_steps = Vec::new(); + for step in &steps { + match ty::fast_reject::simplify_type(self.tcx, step.self_ty, true) { + None => { break; } + Some(simplified_type) => { simplified_steps.push(simplified_type); } + } + } + let opt_simplified_steps = + if simplified_steps.len() < steps.len() { + None // failed to convert at least one of the steps + } else { + Some(simplified_steps) + }; -fn create_steps<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - self_ty: Ty<'tcx>) - -> Option>> { - let mut steps = Vec::new(); - - let (final_ty, dereferences, _) = check::autoderef(fcx, - span, - self_ty, - || None, - UnresolvedTypeAction::Error, - NoPreference, - |t, d| { - steps.push(CandidateStep { - self_ty: t, - autoderefs: d, - unsize: false - }); - None::<()> // keep iterating until we can't anymore - }); + debug!("ProbeContext: steps for self_ty={:?} are {:?}", + self_ty, + steps); + + // this creates one big transaction so that all type variables etc + // that we create during the probe process are removed later + self.probe(|_| { + let mut probe_cx = ProbeContext::new(self, + span, + mode, + item_name, + steps, + opt_simplified_steps); + probe_cx.assemble_inherent_candidates(); + probe_cx.assemble_extension_candidates_for_traits_in_scope(scope_expr_id)?; + probe_cx.pick() + }) + } - match final_ty.sty { - ty::TyArray(elem_ty, _) => { + fn create_steps(&self, + span: Span, + self_ty: Ty<'tcx>) + -> Option>> { + let mut steps = Vec::new(); + + let (final_ty, dereferences, _) = self.autoderef(span, + self_ty, + || None, + UnresolvedTypeAction::Error, + NoPreference, + |t, d| { steps.push(CandidateStep { - self_ty: fcx.tcx().mk_slice(elem_ty), - autoderefs: dereferences, - unsize: true + self_ty: t, + autoderefs: d, + unsize: false }); + None::<()> // keep iterating until we can't anymore + }); + + match final_ty.sty { + ty::TyArray(elem_ty, _) => { + steps.push(CandidateStep { + self_ty: self.tcx.mk_slice(elem_ty), + autoderefs: dereferences, + unsize: true + }); + } + ty::TyError => return None, + _ => (), } - ty::TyError => return None, - _ => (), - } - Some(steps) + Some(steps) + } } -impl<'a,'tcx> ProbeContext<'a,'tcx> { - fn new(fcx: &'a FnCtxt<'a,'tcx>, +impl<'a, 'gcx, 'tcx> ProbeContext<'a, 'gcx, 'tcx> { + fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, span: Span, mode: Mode, item_name: ast::Name, steps: Vec>, opt_simplified_steps: Option>) - -> ProbeContext<'a,'tcx> + -> ProbeContext<'a, 'gcx, 'tcx> { ProbeContext { fcx: fcx, @@ -247,6 +258,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { inherent_candidates: Vec::new(), extension_candidates: Vec::new(), impl_dups: HashSet::new(), + import_id: None, steps: Rc::new(steps), opt_simplified_steps: opt_simplified_steps, static_candidates: Vec::new(), @@ -263,14 +275,6 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.private_candidate = None; } - fn tcx(&self) -> &'a TyCtxt<'tcx> { - self.fcx.tcx() - } - - fn infcx(&self) -> &'a InferCtxt<'a, 'tcx> { - self.fcx.infcx() - } - /////////////////////////////////////////////////////////////////////////// // CANDIDATE ASSEMBLY @@ -295,7 +299,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.assemble_inherent_impl_candidates_for_type(def.did); } ty::TyBox(_) => { - if let Some(box_did) = self.tcx().lang_items.owned_box() { + if let Some(box_did) = self.tcx.lang_items.owned_box() { self.assemble_inherent_impl_candidates_for_type(box_did); } } @@ -303,71 +307,71 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.assemble_inherent_candidates_from_param(self_ty, p); } ty::TyChar => { - let lang_def_id = self.tcx().lang_items.char_impl(); + let lang_def_id = self.tcx.lang_items.char_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyStr => { - let lang_def_id = self.tcx().lang_items.str_impl(); + let lang_def_id = self.tcx.lang_items.str_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TySlice(_) => { - let lang_def_id = self.tcx().lang_items.slice_impl(); + let lang_def_id = self.tcx.lang_items.slice_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutImmutable }) => { - let lang_def_id = self.tcx().lang_items.const_ptr_impl(); + let lang_def_id = self.tcx.lang_items.const_ptr_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyRawPtr(ty::TypeAndMut { ty: _, mutbl: hir::MutMutable }) => { - let lang_def_id = self.tcx().lang_items.mut_ptr_impl(); + let lang_def_id = self.tcx.lang_items.mut_ptr_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyInt(ast::IntTy::I8) => { - let lang_def_id = self.tcx().lang_items.i8_impl(); + let lang_def_id = self.tcx.lang_items.i8_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyInt(ast::IntTy::I16) => { - let lang_def_id = self.tcx().lang_items.i16_impl(); + let lang_def_id = self.tcx.lang_items.i16_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyInt(ast::IntTy::I32) => { - let lang_def_id = self.tcx().lang_items.i32_impl(); + let lang_def_id = self.tcx.lang_items.i32_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyInt(ast::IntTy::I64) => { - let lang_def_id = self.tcx().lang_items.i64_impl(); + let lang_def_id = self.tcx.lang_items.i64_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyInt(ast::IntTy::Is) => { - let lang_def_id = self.tcx().lang_items.isize_impl(); + let lang_def_id = self.tcx.lang_items.isize_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyUint(ast::UintTy::U8) => { - let lang_def_id = self.tcx().lang_items.u8_impl(); + let lang_def_id = self.tcx.lang_items.u8_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyUint(ast::UintTy::U16) => { - let lang_def_id = self.tcx().lang_items.u16_impl(); + let lang_def_id = self.tcx.lang_items.u16_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyUint(ast::UintTy::U32) => { - let lang_def_id = self.tcx().lang_items.u32_impl(); + let lang_def_id = self.tcx.lang_items.u32_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyUint(ast::UintTy::U64) => { - let lang_def_id = self.tcx().lang_items.u64_impl(); + let lang_def_id = self.tcx.lang_items.u64_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyUint(ast::UintTy::Us) => { - let lang_def_id = self.tcx().lang_items.usize_impl(); + let lang_def_id = self.tcx.lang_items.usize_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyFloat(ast::FloatTy::F32) => { - let lang_def_id = self.tcx().lang_items.f32_impl(); + let lang_def_id = self.tcx.lang_items.f32_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } ty::TyFloat(ast::FloatTy::F64) => { - let lang_def_id = self.tcx().lang_items.f64_impl(); + let lang_def_id = self.tcx.lang_items.f64_impl(); self.assemble_inherent_impl_for_primitive(lang_def_id); } _ => { @@ -377,7 +381,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { fn assemble_inherent_impl_for_primitive(&mut self, lang_def_id: Option) { if let Some(impl_def_id) = lang_def_id { - self.tcx().populate_implementations_for_primitive_if_necessary(impl_def_id); + self.tcx.populate_implementations_for_primitive_if_necessary(impl_def_id); self.assemble_inherent_impl_probe(impl_def_id); } @@ -386,9 +390,9 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { fn assemble_inherent_impl_candidates_for_type(&mut self, def_id: DefId) { // Read the inherent implementation candidates for this type from the // metadata if necessary. - self.tcx().populate_inherent_implementations_for_type_if_necessary(def_id); + self.tcx.populate_inherent_implementations_for_type_if_necessary(def_id); - if let Some(impl_infos) = self.tcx().inherent_impls.borrow().get(&def_id) { + if let Some(impl_infos) = self.tcx.inherent_impls.borrow().get(&def_id) { for &impl_def_id in impl_infos.iter() { self.assemble_inherent_impl_probe(impl_def_id); } @@ -402,7 +406,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("assemble_inherent_impl_probe {:?}", impl_def_id); - let item = match impl_item(self.tcx(), impl_def_id, self.item_name) { + let item = match self.impl_item(impl_def_id) { Some(m) => m, None => { return; } // No method with correct name on this impl }; @@ -412,21 +416,21 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { return self.record_static_candidate(ImplSource(impl_def_id)); } - if !item.vis().is_accessible_from(self.fcx.body_id, &self.tcx().map) { + if !item.vis().is_accessible_from(self.body_id, &self.tcx.map) { self.private_candidate = Some(item.def()); return } let (impl_ty, impl_substs) = self.impl_ty_and_substs(impl_def_id); - let impl_ty = impl_ty.subst(self.tcx(), &impl_substs); + let impl_ty = impl_ty.subst(self.tcx, &impl_substs); // Determine the receiver type that the method itself expects. let xform_self_ty = self.xform_self_ty(&item, impl_ty, &impl_substs); // We can't use normalize_associated_types_in as it will pollute the // fcx's fulfillment context after this probe is over. - let cause = traits::ObligationCause::misc(self.span, self.fcx.body_id); - let mut selcx = &mut traits::SelectionContext::new(self.fcx.infcx()); + let cause = traits::ObligationCause::misc(self.span, self.body_id); + let mut selcx = &mut traits::SelectionContext::new(self.fcx); let traits::Normalized { value: xform_self_ty, obligations } = traits::normalize(selcx, cause, &xform_self_ty); debug!("assemble_inherent_impl_probe: xform_self_ty = {:?}", @@ -435,7 +439,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.inherent_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item, - kind: InherentImplCandidate(impl_substs, obligations) + kind: InherentImplCandidate(impl_substs, obligations), + import_id: self.import_id, }); } @@ -452,7 +457,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // a substitution that replaces `Self` with the object type // itself. Hence, a `&self` method will wind up with an // argument type like `&Trait`. - let trait_ref = data.principal_trait_ref_with_self_ty(self.tcx(), self_ty); + let trait_ref = data.principal_trait_ref_with_self_ty(self.tcx, self_ty); self.elaborate_bounds(&[trait_ref], |this, new_trait_ref, item| { let new_trait_ref = this.erase_late_bound_regions(&new_trait_ref); @@ -463,7 +468,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { this.inherent_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item, - kind: ObjectCandidate + kind: ObjectCandidate, + import_id: this.import_id, }); }); } @@ -474,7 +480,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // FIXME -- Do we want to commit to this behavior for param bounds? let bounds: Vec<_> = - self.fcx.inh.infcx.parameter_environment.caller_bounds + self.parameter_environment.caller_bounds .iter() .filter_map(|predicate| { match *predicate { @@ -491,6 +497,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { ty::Predicate::RegionOutlives(..) | ty::Predicate::WellFormed(..) | ty::Predicate::ObjectSafe(..) | + ty::Predicate::ClosureKind(..) | + ty::Predicate::Rfc1592(..) | ty::Predicate::TypeOutlives(..) => { None } @@ -532,7 +540,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { this.inherent_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item, - kind: WhereClauseCandidate(poly_trait_ref) + kind: WhereClauseCandidate(poly_trait_ref), + import_id: this.import_id, }); }); } @@ -545,18 +554,16 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { mut mk_cand: F, ) where F: for<'b> FnMut( - &mut ProbeContext<'b, 'tcx>, + &mut ProbeContext<'b, 'gcx, 'tcx>, ty::PolyTraitRef<'tcx>, ty::ImplOrTraitItem<'tcx>, ), { debug!("elaborate_bounds(bounds={:?})", bounds); - let tcx = self.tcx(); + let tcx = self.tcx; for bound_trait_ref in traits::transitive_bounds(tcx, bounds) { - let item = match trait_item(tcx, - bound_trait_ref.def_id(), - self.item_name) { + let item = match self.trait_item(bound_trait_ref.def_id()) { Some(v) => v, None => { continue; } }; @@ -574,11 +581,15 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { -> Result<(), MethodError<'tcx>> { let mut duplicates = HashSet::new(); - let opt_applicable_traits = self.fcx.ccx.trait_map.get(&expr_id); + let opt_applicable_traits = self.ccx.trait_map.get(&expr_id); if let Some(applicable_traits) = opt_applicable_traits { - for &trait_did in applicable_traits { + for trait_candidate in applicable_traits { + let trait_did = trait_candidate.def_id; if duplicates.insert(trait_did) { - self.assemble_extension_candidates_for_trait(trait_did)?; + self.import_id = trait_candidate.import_id; + let result = self.assemble_extension_candidates_for_trait(trait_did); + self.import_id = None; + result?; } } } @@ -587,7 +598,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { fn assemble_extension_candidates_for_all_traits(&mut self) -> Result<(), MethodError<'tcx>> { let mut duplicates = HashSet::new(); - for trait_info in suggest::all_traits(self.fcx.ccx) { + for trait_info in suggest::all_traits(self.ccx) { if duplicates.insert(trait_info.def_id) { self.assemble_extension_candidates_for_trait(trait_info.def_id)?; } @@ -604,7 +615,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // Check whether `trait_def_id` defines a method with suitable name: let trait_items = - self.tcx().trait_items(trait_def_id); + self.tcx.trait_items(trait_def_id); let maybe_item = trait_items.iter() .find(|item| item.name() == self.item_name); @@ -635,10 +646,10 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { trait_def_id: DefId, item: ty::ImplOrTraitItem<'tcx>) { - let trait_def = self.tcx().lookup_trait_def(trait_def_id); + let trait_def = self.tcx.lookup_trait_def(trait_def_id); // FIXME(arielb1): can we use for_each_relevant_impl here? - trait_def.for_each_impl(self.tcx(), |impl_def_id| { + trait_def.for_each_impl(self.tcx, |impl_def_id| { debug!("assemble_extension_candidates_for_trait_impl: trait_def_id={:?} \ impl_def_id={:?}", trait_def_id, @@ -653,9 +664,9 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("impl_substs={:?}", impl_substs); let impl_trait_ref = - self.tcx().impl_trait_ref(impl_def_id) + self.tcx.impl_trait_ref(impl_def_id) .unwrap() // we know this is a trait impl - .subst(self.tcx(), &impl_substs); + .subst(self.tcx, &impl_substs); debug!("impl_trait_ref={:?}", impl_trait_ref); @@ -668,8 +679,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // Normalize the receiver. We can't use normalize_associated_types_in // as it will pollute the fcx's fulfillment context after this probe // is over. - let cause = traits::ObligationCause::misc(self.span, self.fcx.body_id); - let mut selcx = &mut traits::SelectionContext::new(self.fcx.infcx()); + let cause = traits::ObligationCause::misc(self.span, self.body_id); + let mut selcx = &mut traits::SelectionContext::new(self.fcx); let traits::Normalized { value: xform_self_ty, obligations } = traits::normalize(selcx, cause, &xform_self_ty); @@ -678,7 +689,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.extension_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item.clone(), - kind: ExtensionImplCandidate(impl_def_id, impl_substs, obligations) + kind: ExtensionImplCandidate(impl_def_id, impl_substs, obligations), + import_id: self.import_id, }); }); } @@ -689,9 +701,9 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { None => { return true; } }; - let impl_type = self.tcx().lookup_item_type(impl_def_id); + let impl_type = self.tcx.lookup_item_type(impl_def_id); let impl_simplified_type = - match ty::fast_reject::simplify_type(self.tcx(), impl_type.ty, false) { + match ty::fast_reject::simplify_type(self.tcx, impl_type.ty, false) { Some(simplified_type) => simplified_type, None => { return true; } }; @@ -705,7 +717,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { -> Result<(), MethodError<'tcx>> { // Check if this is one of the Fn,FnMut,FnOnce traits. - let tcx = self.tcx(); + let tcx = self.tcx; let kind = if Some(trait_def_id) == tcx.lang_items.fn_trait() { ty::ClosureKind::Fn } else if Some(trait_def_id) == tcx.lang_items.fn_mut_trait() { @@ -725,7 +737,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { _ => continue, }; - let closure_kinds = &self.fcx.inh.tables.borrow().closure_kinds; + let closure_kinds = &self.tables.borrow().closure_kinds; let closure_kind = match closure_kinds.get(&closure_def_id) { Some(&k) => k, None => { @@ -742,10 +754,10 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // for the purposes of our method lookup, we only take // receiver type into account, so we can just substitute // fresh types here to use during substitution and subtyping. - let trait_def = self.tcx().lookup_trait_def(trait_def_id); - let substs = self.infcx().fresh_substs_for_trait(self.span, - &trait_def.generics, - step.self_ty); + let trait_def = self.tcx.lookup_trait_def(trait_def_id); + let substs = self.fresh_substs_for_trait(self.span, + &trait_def.generics, + step.self_ty); let xform_self_ty = self.xform_self_ty(&item, step.self_ty, @@ -753,7 +765,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.inherent_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item.clone(), - kind: TraitCandidate + kind: TraitCandidate, + import_id: self.import_id, }); } @@ -782,13 +795,13 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("assemble_projection_candidates: projection_trait_ref={:?}", projection_trait_ref); - let trait_predicates = self.tcx().lookup_predicates(projection_trait_ref.def_id); - let bounds = trait_predicates.instantiate(self.tcx(), projection_trait_ref.substs); + let trait_predicates = self.tcx.lookup_predicates(projection_trait_ref.def_id); + let bounds = trait_predicates.instantiate(self.tcx, projection_trait_ref.substs); let predicates = bounds.predicates.into_vec(); debug!("assemble_projection_candidates: predicates={:?}", predicates); for poly_bound in - traits::elaborate_predicates(self.tcx(), predicates) + traits::elaborate_predicates(self.tcx, predicates) .filter_map(|p| p.to_opt_poly_trait_ref()) .filter(|b| b.def_id() == trait_def_id) { @@ -798,7 +811,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { projection_trait_ref, bound); - if self.infcx().can_equate(&step.self_ty, &bound.self_ty()).is_ok() { + if self.can_equate(&step.self_ty, &bound.self_ty()).is_ok() { let xform_self_ty = self.xform_self_ty(&item, bound.self_ty(), bound.substs); @@ -810,7 +823,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.extension_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item.clone(), - kind: TraitCandidate + kind: TraitCandidate, + import_id: self.import_id, }); } } @@ -824,8 +838,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { debug!("assemble_where_clause_candidates(trait_def_id={:?})", trait_def_id); - let caller_predicates = self.fcx.inh.infcx.parameter_environment.caller_bounds.clone(); - for poly_bound in traits::elaborate_predicates(self.tcx(), caller_predicates) + let caller_predicates = self.parameter_environment.caller_bounds.clone(); + for poly_bound in traits::elaborate_predicates(self.tcx, caller_predicates) .filter_map(|p| p.to_opt_poly_trait_ref()) .filter(|b| b.def_id() == trait_def_id) { @@ -841,7 +855,8 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.extension_candidates.push(Candidate { xform_self_ty: xform_self_ty, item: item.clone(), - kind: WhereClauseCandidate(poly_bound) + kind: WhereClauseCandidate(poly_bound), + import_id: self.import_id, }); } } @@ -863,7 +878,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self.reset(); let span = self.span; - let tcx = self.tcx(); + let tcx = self.tcx; self.assemble_extension_candidates_for_all_traits()?; @@ -958,7 +973,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { step: &CandidateStep<'tcx>) -> Option> { - let tcx = self.tcx(); + let tcx = self.tcx; // In general, during probing we erase regions. See // `impl_self_ty()` for an explanation. @@ -984,7 +999,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { } fn pick_method(&mut self, self_ty: Ty<'tcx>) -> Option> { - debug!("pick_method(self_ty={})", self.infcx().ty_to_string(self_ty)); + debug!("pick_method(self_ty={})", self.ty_to_string(self_ty)); let mut possibly_unsatisfied_predicates = Vec::new(); @@ -1042,10 +1057,14 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { self_ty, probe); - self.infcx().probe(|_| { + self.probe(|_| { // First check that the self type can be related. - match self.make_sub_ty(self_ty, probe.xform_self_ty) { - Ok(()) => { } + match self.sub_types(false, TypeOrigin::Misc(DUMMY_SP), + self_ty, probe.xform_self_ty) { + Ok(InferOk { obligations, .. }) => { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()) + } Err(_) => { debug!("--> cannot relate self-types"); return false; @@ -1073,12 +1092,12 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { } }; - let selcx = &mut traits::SelectionContext::new(self.infcx()); - let cause = traits::ObligationCause::misc(self.span, self.fcx.body_id); + let selcx = &mut traits::SelectionContext::new(self); + let cause = traits::ObligationCause::misc(self.span, self.body_id); // Check whether the impl imposes obligations we have to worry about. - let impl_bounds = self.tcx().lookup_predicates(impl_def_id); - let impl_bounds = impl_bounds.instantiate(self.tcx(), substs); + let impl_bounds = self.tcx.lookup_predicates(impl_def_id); + let impl_bounds = impl_bounds.instantiate(self.tcx, substs); let traits::Normalized { value: impl_bounds, obligations: norm_obligations } = traits::normalize(selcx, cause.clone(), &impl_bounds); @@ -1139,6 +1158,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { Some(Pick { item: probes[0].item.clone(), kind: TraitPick, + import_id: probes[0].import_id, autoderefs: 0, autoref: None, unsize: None @@ -1147,13 +1167,6 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { /////////////////////////////////////////////////////////////////////////// // MISCELLANY - - fn make_sub_ty(&self, sub: Ty<'tcx>, sup: Ty<'tcx>) -> infer::UnitResult<'tcx> { - self.infcx().sub_types(false, TypeOrigin::Misc(DUMMY_SP), sub, sup) - // FIXME(#32730) propagate obligations - .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())) - } - fn has_applicable_self(&self, item: &ty::ImplOrTraitItem) -> bool { // "fast track" -- check for usage of sugar match *item { @@ -1233,7 +1246,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { placeholder = (*substs).clone().with_method(Vec::new(), method_regions); - self.infcx().type_vars_for_defs( + self.type_vars_for_defs( self.span, subst::FnSpace, &mut placeholder, @@ -1246,7 +1259,7 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { // in the values from the substitution. let xform_self_ty = method.fty.sig.input(0); let xform_self_ty = self.erase_late_bound_regions(&xform_self_ty); - let xform_self_ty = xform_self_ty.subst(self.tcx(), substs); + let xform_self_ty = xform_self_ty.subst(self.tcx, substs); xform_self_ty } @@ -1256,11 +1269,11 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { impl_def_id: DefId) -> (Ty<'tcx>, subst::Substs<'tcx>) { - let impl_pty = self.tcx().lookup_item_type(impl_def_id); + let impl_pty = self.tcx.lookup_item_type(impl_def_id); let type_vars = impl_pty.generics.types.map( - |_| self.infcx().next_ty_var()); + |_| self.next_ty_var()); let region_placeholders = impl_pty.generics.regions.map( @@ -1291,35 +1304,22 @@ impl<'a,'tcx> ProbeContext<'a,'tcx> { fn erase_late_bound_regions(&self, value: &ty::Binder) -> T where T : TypeFoldable<'tcx> { - self.tcx().erase_late_bound_regions(value) + self.tcx.erase_late_bound_regions(value) } -} -fn impl_item<'tcx>(tcx: &TyCtxt<'tcx>, - impl_def_id: DefId, - item_name: ast::Name) - -> Option> -{ - let impl_items = tcx.impl_items.borrow(); - let impl_items = impl_items.get(&impl_def_id).unwrap(); - impl_items - .iter() - .map(|&did| tcx.impl_or_trait_item(did.def_id())) - .find(|item| item.name() == item_name) -} + fn impl_item(&self, impl_def_id: DefId) + -> Option> + { + self.fcx.impl_item(impl_def_id, self.item_name) + } -/// Find item with name `item_name` defined in `trait_def_id` -/// and return it, or `None`, if no such item. -fn trait_item<'tcx>(tcx: &TyCtxt<'tcx>, - trait_def_id: DefId, - item_name: ast::Name) - -> Option> -{ - let trait_items = tcx.trait_items(trait_def_id); - debug!("trait_method; items: {:?}", trait_items); - trait_items.iter() - .find(|item| item.name() == item_name) - .cloned() + /// Find item with name `item_name` defined in `trait_def_id` + /// and return it, or `None`, if no such item. + fn trait_item(&self, trait_def_id: DefId) + -> Option> + { + self.fcx.trait_item(trait_def_id, self.item_name) + } } impl<'tcx> Candidate<'tcx> { @@ -1344,6 +1344,7 @@ impl<'tcx> Candidate<'tcx> { WhereClausePick(trait_ref.clone()) } }, + import_id: self.import_id, autoderefs: 0, autoref: None, unsize: None diff --git a/src/librustc_typeck/check/method/suggest.rs b/src/librustc_typeck/check/method/suggest.rs index c5195cf878..2cd60d2025 100644 --- a/src/librustc_typeck/check/method/suggest.rs +++ b/src/librustc_typeck/check/method/suggest.rs @@ -13,11 +13,10 @@ use CrateCtxt; -use astconv::AstConv; -use check::{self, FnCtxt, UnresolvedTypeAction, autoderef}; +use check::{self, FnCtxt, UnresolvedTypeAction}; use rustc::hir::map as hir_map; use rustc::ty::{self, Ty, ToPolyTraitRef, ToPredicate, TypeFoldable}; -use middle::cstore::{self, CrateStore}; +use middle::cstore; use hir::def::Def; use hir::def_id::DefId; use middle::lang_items::FnOnceTraitLangItem; @@ -37,370 +36,368 @@ use rustc::hir::Expr_; use std::cell; use std::cmp::Ordering; -use super::{MethodError, NoMatchData, CandidateSource, impl_item, trait_item}; +use super::{MethodError, NoMatchData, CandidateSource}; use super::probe::Mode; -fn is_fn_ty<'a, 'tcx>(ty: &Ty<'tcx>, fcx: &FnCtxt<'a, 'tcx>, span: Span) -> bool { - let cx = fcx.tcx(); - match ty.sty { - // Not all of these (e.g. unsafe fns) implement FnOnce - // so we look for these beforehand - ty::TyClosure(..) | ty::TyFnDef(..) | ty::TyFnPtr(_) => true, - // If it's not a simple function, look for things which implement FnOnce - _ => { - if let Ok(fn_once_trait_did) = - cx.lang_items.require(FnOnceTraitLangItem) { - let infcx = fcx.infcx(); - let (_, _, opt_is_fn) = autoderef(fcx, - span, - ty, - || None, - UnresolvedTypeAction::Ignore, - LvaluePreference::NoPreference, - |ty, _| { - infcx.probe(|_| { - let fn_once_substs = - Substs::new_trait(vec![infcx.next_ty_var()], - Vec::new(), - ty); - let trait_ref = - ty::TraitRef::new(fn_once_trait_did, - cx.mk_substs(fn_once_substs)); - let poly_trait_ref = trait_ref.to_poly_trait_ref(); - let obligation = Obligation::misc(span, - fcx.body_id, - poly_trait_ref - .to_predicate()); - let mut selcx = SelectionContext::new(infcx); - - if selcx.evaluate_obligation(&obligation) { - Some(()) - } else { - None - } - }) - }); - - opt_is_fn.is_some() - } else { - false +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + fn is_fn_ty(&self, ty: &Ty<'tcx>, span: Span) -> bool { + let tcx = self.tcx; + match ty.sty { + // Not all of these (e.g. unsafe fns) implement FnOnce + // so we look for these beforehand + ty::TyClosure(..) | ty::TyFnDef(..) | ty::TyFnPtr(_) => true, + // If it's not a simple function, look for things which implement FnOnce + _ => { + if let Ok(fn_once_trait_did) = + tcx.lang_items.require(FnOnceTraitLangItem) { + let (_, _, opt_is_fn) = self.autoderef(span, + ty, + || None, + UnresolvedTypeAction::Ignore, + LvaluePreference::NoPreference, + |ty, _| { + self.probe(|_| { + let fn_once_substs = + Substs::new_trait(vec![self.next_ty_var()], vec![], ty); + let trait_ref = + ty::TraitRef::new(fn_once_trait_did, + tcx.mk_substs(fn_once_substs)); + let poly_trait_ref = trait_ref.to_poly_trait_ref(); + let obligation = Obligation::misc(span, + self.body_id, + poly_trait_ref + .to_predicate()); + let mut selcx = SelectionContext::new(self); + + if selcx.evaluate_obligation(&obligation) { + Some(()) + } else { + None + } + }) + }); + + opt_is_fn.is_some() + } else { + false + } } } } -} - -pub fn report_error<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - rcvr_ty: Ty<'tcx>, - item_name: ast::Name, - rcvr_expr: Option<&hir::Expr>, - error: MethodError<'tcx>) -{ - // avoid suggestions when we don't know what's going on. - if rcvr_ty.references_error() { - return - } + pub fn report_method_error(&self, + span: Span, + rcvr_ty: Ty<'tcx>, + item_name: ast::Name, + rcvr_expr: Option<&hir::Expr>, + error: MethodError<'tcx>) + { + // avoid suggestions when we don't know what's going on. + if rcvr_ty.references_error() { + return + } - match error { - MethodError::NoMatch(NoMatchData { static_candidates: static_sources, - unsatisfied_predicates, - out_of_scope_traits, - mode, .. }) => { - let cx = fcx.tcx(); - - let mut err = fcx.type_error_struct( - span, - |actual| { - format!("no {} named `{}` found for type `{}` \ - in the current scope", - if mode == Mode::MethodCall { "method" } - else { "associated item" }, - item_name, - actual) - }, - rcvr_ty, - None); - - // If the item has the name of a field, give a help note - if let (&ty::TyStruct(def, substs), Some(expr)) = (&rcvr_ty.sty, rcvr_expr) { - if let Some(field) = def.struct_variant().find_field_named(item_name) { - let expr_string = match cx.sess.codemap().span_to_snippet(expr.span) { - Ok(expr_string) => expr_string, - _ => "s".into() // Default to a generic placeholder for the - // expression when we can't generate a string - // snippet - }; - - let field_ty = field.ty(cx, substs); - - if is_fn_ty(&field_ty, &fcx, span) { - err.span_note(span, - &format!("use `({0}.{1})(...)` if you meant to call \ - the function stored in the `{1}` field", - expr_string, item_name)); - } else { - err.span_note(span, &format!("did you mean to write `{0}.{1}`?", - expr_string, item_name)); + let report_candidates = |err: &mut DiagnosticBuilder, + mut sources: Vec| { + + sources.sort(); + sources.dedup(); + // Dynamic limit to avoid hiding just one candidate, which is silly. + let limit = if sources.len() == 5 { 5 } else { 4 }; + + for (idx, source) in sources.iter().take(limit).enumerate() { + match *source { + CandidateSource::ImplSource(impl_did) => { + // Provide the best span we can. Use the item, if local to crate, else + // the impl, if local to crate (item may be defaulted), else nothing. + let item = self.impl_item(impl_did, item_name) + .or_else(|| { + self.trait_item( + self.tcx.impl_trait_ref(impl_did).unwrap().def_id, + + item_name + ) + }).unwrap(); + let note_span = self.tcx.map.span_if_local(item.def_id()).or_else(|| { + self.tcx.map.span_if_local(impl_did) + }); + + let impl_ty = self.impl_self_ty(span, impl_did).ty; + + let insertion = match self.tcx.impl_trait_ref(impl_did) { + None => format!(""), + Some(trait_ref) => { + format!(" of the trait `{}`", + self.tcx.item_path_str(trait_ref.def_id)) + } + }; + + let note_str = format!("candidate #{} is defined in an impl{} \ + for the type `{}`", + idx + 1, + insertion, + impl_ty); + if let Some(note_span) = note_span { + // We have a span pointing to the method. Show note with snippet. + err.span_note(note_span, ¬e_str); + } else { + err.note(¬e_str); + } + } + CandidateSource::TraitSource(trait_did) => { + let item = self.trait_item(trait_did, item_name).unwrap(); + let item_span = self.tcx.map.def_id_span(item.def_id(), span); + span_note!(err, item_span, + "candidate #{} is defined in the trait `{}`", + idx + 1, + self.tcx.item_path_str(trait_did)); } } } + if sources.len() > limit { + err.note(&format!("and {} others", sources.len() - limit)); + } + }; - if is_fn_ty(&rcvr_ty, &fcx, span) { - macro_rules! report_function { - ($span:expr, $name:expr) => { - err.fileline_note( - $span, - &format!("{} is a function, perhaps you wish to call it", - $name)); + match error { + MethodError::NoMatch(NoMatchData { static_candidates: static_sources, + unsatisfied_predicates, + out_of_scope_traits, + mode, .. }) => { + let tcx = self.tcx; + + let mut err = self.type_error_struct( + span, + |actual| { + format!("no {} named `{}` found for type `{}` \ + in the current scope", + if mode == Mode::MethodCall { "method" } + else { "associated item" }, + item_name, + actual) + }, + rcvr_ty, + None); + + // If the item has the name of a field, give a help note + if let (&ty::TyStruct(def, substs), Some(expr)) = (&rcvr_ty.sty, rcvr_expr) { + if let Some(field) = def.struct_variant().find_field_named(item_name) { + let expr_string = match tcx.sess.codemap().span_to_snippet(expr.span) { + Ok(expr_string) => expr_string, + _ => "s".into() // Default to a generic placeholder for the + // expression when we can't generate a string + // snippet + }; + + let field_ty = field.ty(tcx, substs); + + if self.is_fn_ty(&field_ty, span) { + err.span_note(span, + &format!("use `({0}.{1})(...)` if you meant to call \ + the function stored in the `{1}` field", + expr_string, item_name)); + } else { + err.span_note(span, &format!("did you mean to write `{0}.{1}`?", + expr_string, item_name)); + } } } - if let Some(expr) = rcvr_expr { - if let Ok (expr_string) = cx.sess.codemap().span_to_snippet(expr.span) { - report_function!(expr.span, expr_string); - err.span_suggestion(expr.span, - "try calling the base function:", - format!("{}()", - expr_string)); + if self.is_fn_ty(&rcvr_ty, span) { + macro_rules! report_function { + ($span:expr, $name:expr) => { + err.note(&format!("{} is a function, perhaps you wish to call it", + $name)); + } } - else if let Expr_::ExprPath(_, path) = expr.node.clone() { - if let Some(segment) = path.segments.last() { - report_function!(expr.span, segment.identifier.name); + + if let Some(expr) = rcvr_expr { + if let Ok (expr_string) = tcx.sess.codemap().span_to_snippet(expr.span) { + report_function!(expr.span, expr_string); + } + else if let Expr_::ExprPath(_, path) = expr.node.clone() { + if let Some(segment) = path.segments.last() { + report_function!(expr.span, segment.name); + } } } } - } - if !static_sources.is_empty() { - err.fileline_note( - span, - "found the following associated functions; to be used as \ - methods, functions must have a `self` parameter"); + if !static_sources.is_empty() { + err.note( + "found the following associated functions; to be used as \ + methods, functions must have a `self` parameter"); - report_candidates(fcx, &mut err, span, item_name, static_sources); - } + report_candidates(&mut err, static_sources); + } - if !unsatisfied_predicates.is_empty() { - let bound_list = unsatisfied_predicates.iter() - .map(|p| format!("`{} : {}`", - p.self_ty(), - p)) - .collect::>() - .join(", "); - err.fileline_note( - span, - &format!("the method `{}` exists but the \ - following trait bounds were not satisfied: {}", - item_name, - bound_list)); - } + if !unsatisfied_predicates.is_empty() { + let bound_list = unsatisfied_predicates.iter() + .map(|p| format!("`{} : {}`", + p.self_ty(), + p)) + .collect::>() + .join(", "); + err.note( + &format!("the method `{}` exists but the \ + following trait bounds were not satisfied: {}", + item_name, + bound_list)); + } - suggest_traits_to_import(fcx, &mut err, span, rcvr_ty, item_name, - rcvr_expr, out_of_scope_traits); - err.emit(); - } + self.suggest_traits_to_import(&mut err, span, rcvr_ty, item_name, + rcvr_expr, out_of_scope_traits); + err.emit(); + } - MethodError::Ambiguity(sources) => { - let mut err = struct_span_err!(fcx.sess(), span, E0034, - "multiple applicable items in scope"); + MethodError::Ambiguity(sources) => { + let mut err = struct_span_err!(self.sess(), span, E0034, + "multiple applicable items in scope"); - report_candidates(fcx, &mut err, span, item_name, sources); - err.emit(); - } + report_candidates(&mut err, sources); + err.emit(); + } - MethodError::ClosureAmbiguity(trait_def_id) => { - let msg = format!("the `{}` method from the `{}` trait cannot be explicitly \ - invoked on this closure as we have not yet inferred what \ - kind of closure it is", - item_name, - fcx.tcx().item_path_str(trait_def_id)); - let msg = if let Some(callee) = rcvr_expr { - format!("{}; use overloaded call notation instead (e.g., `{}()`)", - msg, pprust::expr_to_string(callee)) - } else { - msg - }; - fcx.sess().span_err(span, &msg); - } + MethodError::ClosureAmbiguity(trait_def_id) => { + let msg = format!("the `{}` method from the `{}` trait cannot be explicitly \ + invoked on this closure as we have not yet inferred what \ + kind of closure it is", + item_name, + self.tcx.item_path_str(trait_def_id)); + let msg = if let Some(callee) = rcvr_expr { + format!("{}; use overloaded call notation instead (e.g., `{}()`)", + msg, pprust::expr_to_string(callee)) + } else { + msg + }; + self.sess().span_err(span, &msg); + } - MethodError::PrivateMatch(def) => { - let msg = format!("{} `{}` is private", def.kind_name(), item_name); - fcx.tcx().sess.span_err(span, &msg); + MethodError::PrivateMatch(def) => { + let msg = format!("{} `{}` is private", def.kind_name(), item_name); + self.tcx.sess.span_err(span, &msg); + } } } - fn report_candidates(fcx: &FnCtxt, - err: &mut DiagnosticBuilder, - span: Span, - item_name: ast::Name, - mut sources: Vec) { - sources.sort(); - sources.dedup(); - - for (idx, source) in sources.iter().enumerate() { - match *source { - CandidateSource::ImplSource(impl_did) => { - // Provide the best span we can. Use the item, if local to crate, else - // the impl, if local to crate (item may be defaulted), else the call site. - let item = impl_item(fcx.tcx(), impl_did, item_name) - .or_else(|| { - trait_item( - fcx.tcx(), - fcx.tcx().impl_trait_ref(impl_did).unwrap().def_id, - item_name - ) - }).unwrap(); - let impl_span = fcx.tcx().map.def_id_span(impl_did, span); - let item_span = fcx.tcx().map.def_id_span(item.def_id(), impl_span); - - let impl_ty = check::impl_self_ty(fcx, span, impl_did).ty; - - let insertion = match fcx.tcx().impl_trait_ref(impl_did) { - None => format!(""), - Some(trait_ref) => { - format!(" of the trait `{}`", - fcx.tcx().item_path_str(trait_ref.def_id)) - } - }; - - span_note!(err, item_span, - "candidate #{} is defined in an impl{} for the type `{}`", - idx + 1, - insertion, - impl_ty); - } - CandidateSource::TraitSource(trait_did) => { - let item = trait_item(fcx.tcx(), trait_did, item_name).unwrap(); - let item_span = fcx.tcx().map.def_id_span(item.def_id(), span); - span_note!(err, item_span, - "candidate #{} is defined in the trait `{}`", - idx + 1, - fcx.tcx().item_path_str(trait_did)); - } + fn suggest_traits_to_import(&self, + err: &mut DiagnosticBuilder, + span: Span, + rcvr_ty: Ty<'tcx>, + item_name: ast::Name, + rcvr_expr: Option<&hir::Expr>, + valid_out_of_scope_traits: Vec) + { + if !valid_out_of_scope_traits.is_empty() { + let mut candidates = valid_out_of_scope_traits; + candidates.sort(); + candidates.dedup(); + let msg = format!( + "items from traits can only be used if the trait is in scope; \ + the following {traits_are} implemented but not in scope, \ + perhaps add a `use` for {one_of_them}:", + traits_are = if candidates.len() == 1 {"trait is"} else {"traits are"}, + one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}); + + err.help(&msg[..]); + + let limit = if candidates.len() == 5 { 5 } else { 4 }; + for (i, trait_did) in candidates.iter().take(limit).enumerate() { + err.help(&format!("candidate #{}: `use {}`", + i + 1, + self.tcx.item_path_str(*trait_did))); + } + if candidates.len() > limit { + err.note(&format!("and {} others", candidates.len() - limit)); } + return } - } -} - -pub type AllTraitsVec = Vec; - -fn suggest_traits_to_import<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - err: &mut DiagnosticBuilder, - span: Span, - rcvr_ty: Ty<'tcx>, - item_name: ast::Name, - rcvr_expr: Option<&hir::Expr>, - valid_out_of_scope_traits: Vec) -{ - let tcx = fcx.tcx(); - - if !valid_out_of_scope_traits.is_empty() { - let mut candidates = valid_out_of_scope_traits; - candidates.sort(); - candidates.dedup(); - let msg = format!( - "items from traits can only be used if the trait is in scope; \ - the following {traits_are} implemented but not in scope, \ - perhaps add a `use` for {one_of_them}:", - traits_are = if candidates.len() == 1 {"trait is"} else {"traits are"}, - one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}); - - err.fileline_help(span, &msg[..]); - - for (i, trait_did) in candidates.iter().enumerate() { - err.fileline_help(span, - &format!("candidate #{}: `use {}`", - i + 1, - fcx.tcx().item_path_str(*trait_did))); + let type_is_local = self.type_derefs_to_local(span, rcvr_ty, rcvr_expr); + + // there's no implemented traits, so lets suggest some traits to + // implement, by finding ones that have the item name, and are + // legal to implement. + let mut candidates = all_traits(self.ccx) + .filter(|info| { + // we approximate the coherence rules to only suggest + // traits that are legal to implement by requiring that + // either the type or trait is local. Multidispatch means + // this isn't perfect (that is, there are cases when + // implementing a trait would be legal but is rejected + // here). + (type_is_local || info.def_id.is_local()) + && self.trait_item(info.def_id, item_name).is_some() + }) + .collect::>(); + + if !candidates.is_empty() { + // sort from most relevant to least relevant + candidates.sort_by(|a, b| a.cmp(b).reverse()); + candidates.dedup(); + + // FIXME #21673 this help message could be tuned to the case + // of a type parameter: suggest adding a trait bound rather + // than implementing. + let msg = format!( + "items from traits can only be used if the trait is implemented and in scope; \ + the following {traits_define} an item `{name}`, \ + perhaps you need to implement {one_of_them}:", + traits_define = if candidates.len() == 1 {"trait defines"} else {"traits define"}, + one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}, + name = item_name); + + err.help(&msg[..]); + + for (i, trait_info) in candidates.iter().enumerate() { + err.help(&format!("candidate #{}: `{}`", + i + 1, + self.tcx.item_path_str(trait_info.def_id))); + } } - return } - let type_is_local = type_derefs_to_local(fcx, span, rcvr_ty, rcvr_expr); - - // there's no implemented traits, so lets suggest some traits to - // implement, by finding ones that have the item name, and are - // legal to implement. - let mut candidates = all_traits(fcx.ccx) - .filter(|info| { - // we approximate the coherence rules to only suggest - // traits that are legal to implement by requiring that - // either the type or trait is local. Multidispatch means - // this isn't perfect (that is, there are cases when - // implementing a trait would be legal but is rejected - // here). - (type_is_local || info.def_id.is_local()) - && trait_item(tcx, info.def_id, item_name).is_some() - }) - .collect::>(); - - if !candidates.is_empty() { - // sort from most relevant to least relevant - candidates.sort_by(|a, b| a.cmp(b).reverse()); - candidates.dedup(); - - // FIXME #21673 this help message could be tuned to the case - // of a type parameter: suggest adding a trait bound rather - // than implementing. - let msg = format!( - "items from traits can only be used if the trait is implemented and in scope; \ - the following {traits_define} an item `{name}`, \ - perhaps you need to implement {one_of_them}:", - traits_define = if candidates.len() == 1 {"trait defines"} else {"traits define"}, - one_of_them = if candidates.len() == 1 {"it"} else {"one of them"}, - name = item_name); - - err.fileline_help(span, &msg[..]); - - for (i, trait_info) in candidates.iter().enumerate() { - err.fileline_help(span, - &format!("candidate #{}: `{}`", - i + 1, - fcx.tcx().item_path_str(trait_info.def_id))); + /// Checks whether there is a local type somewhere in the chain of + /// autoderefs of `rcvr_ty`. + fn type_derefs_to_local(&self, + span: Span, + rcvr_ty: Ty<'tcx>, + rcvr_expr: Option<&hir::Expr>) -> bool { + fn is_local(ty: Ty) -> bool { + match ty.sty { + ty::TyEnum(def, _) | ty::TyStruct(def, _) => def.did.is_local(), + + ty::TyTrait(ref tr) => tr.principal_def_id().is_local(), + + ty::TyParam(_) => true, + + // everything else (primitive types etc.) is effectively + // non-local (there are "edge" cases, e.g. (LocalType,), but + // the noise from these sort of types is usually just really + // annoying, rather than any sort of help). + _ => false + } } - } -} - -/// Checks whether there is a local type somewhere in the chain of -/// autoderefs of `rcvr_ty`. -fn type_derefs_to_local<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - rcvr_ty: Ty<'tcx>, - rcvr_expr: Option<&hir::Expr>) -> bool { - fn is_local(ty: Ty) -> bool { - match ty.sty { - ty::TyEnum(def, _) | ty::TyStruct(def, _) => def.did.is_local(), - - ty::TyTrait(ref tr) => tr.principal_def_id().is_local(), - - ty::TyParam(_) => true, - // everything else (primitive types etc.) is effectively - // non-local (there are "edge" cases, e.g. (LocalType,), but - // the noise from these sort of types is usually just really - // annoying, rather than any sort of help). - _ => false + // This occurs for UFCS desugaring of `T::method`, where there is no + // receiver expression for the method call, and thus no autoderef. + if rcvr_expr.is_none() { + return is_local(self.resolve_type_vars_with_obligations(rcvr_ty)); } - } - // This occurs for UFCS desugaring of `T::method`, where there is no - // receiver expression for the method call, and thus no autoderef. - if rcvr_expr.is_none() { - return is_local(fcx.resolve_type_vars_if_possible(rcvr_ty)); + self.autoderef(span, rcvr_ty, || None, + check::UnresolvedTypeAction::Ignore, ty::NoPreference, + |ty, _| { + if is_local(ty) { + Some(()) + } else { + None + } + }).2.is_some() } - - check::autoderef(fcx, span, rcvr_ty, || None, - check::UnresolvedTypeAction::Ignore, ty::NoPreference, - |ty, _| { - if is_local(ty) { - Some(()) - } else { - None - } - }).2.is_some() } +pub type AllTraitsVec = Vec; + #[derive(Copy, Clone)] pub struct TraitInfo { pub def_id: DefId, diff --git a/src/librustc_typeck/check/mod.rs b/src/librustc_typeck/check/mod.rs index 67b91f7838..264003bb62 100644 --- a/src/librustc_typeck/check/mod.rs +++ b/src/librustc_typeck/check/mod.rs @@ -80,27 +80,24 @@ pub use self::Expectation::*; pub use self::compare_method::{compare_impl_method, compare_const_impl}; use self::TupleArgumentsFlag::*; -use astconv::{self, ast_region_to_region, ast_ty_to_ty, AstConv, PathParamMode}; -use check::_match::pat_ctxt; +use astconv::{AstConv, ast_region_to_region, PathParamMode}; +use check::_match::PatCtxt; use dep_graph::DepNode; use fmt_macros::{Parser, Piece, Position}; -use middle::astconv_util::prohibit_type_params; use middle::cstore::LOCAL_CRATE; use hir::def::{self, Def}; use hir::def_id::DefId; -use rustc::infer::{self, InferOk, TypeOrigin, TypeTrace, type_variable}; +use rustc::infer::{self, InferCtxt, InferOk, TypeOrigin, TypeTrace, type_variable}; use hir::pat_util::{self, pat_id_map}; use rustc::ty::subst::{self, Subst, Substs, VecPerParamSpace, ParamSpace}; -use rustc::traits::{self, report_fulfillment_errors, ProjectionMode}; +use rustc::traits::{self, ProjectionMode}; use rustc::ty::{GenericPredicates, TypeScheme}; use rustc::ty::{ParamTy, ParameterEnvironment}; use rustc::ty::{LvaluePreference, NoPreference, PreferMutLvalue}; use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, Visibility}; use rustc::ty::{MethodCall, MethodCallee}; use rustc::ty::adjustment; -use rustc::ty::error::TypeError; -use rustc::ty::fold::{TypeFolder, TypeFoldable}; -use rustc::ty::relate::TypeRelation; +use rustc::ty::fold::TypeFoldable; use rustc::ty::util::{Representability, IntTypeExt}; use require_c_abi_if_variadic; use rscope::{ElisionFailureInfo, RegionScope}; @@ -114,13 +111,14 @@ use util::nodemap::{DefIdMap, FnvHashMap, NodeMap}; use std::cell::{Cell, Ref, RefCell}; use std::collections::{HashSet}; use std::mem::replace; +use std::ops::Deref; use syntax::abi::Abi; use syntax::ast; use syntax::attr; use syntax::attr::AttrMetaMethods; use syntax::codemap::{self, Span, Spanned}; use syntax::errors::DiagnosticBuilder; -use syntax::parse::token::{self, InternedString, special_idents}; +use syntax::parse::token::{self, InternedString, keywords}; use syntax::ptr::P; use syntax::util::lev_distance::find_best_match_for_name; @@ -156,14 +154,13 @@ mod op; /// Here, the function `foo()` and the closure passed to /// `bar()` will each have their own `FnCtxt`, but they will /// share the inherited fields. -pub struct Inherited<'a, 'tcx: 'a> { - infcx: infer::InferCtxt<'a, 'tcx>, +pub struct Inherited<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + ccx: &'a CrateCtxt<'a, 'gcx>, + infcx: InferCtxt<'a, 'gcx, 'tcx>, locals: RefCell>>, fulfillment_cx: RefCell>, - tables: &'a RefCell>, - // When we process a call like `c()` where `c` is a closure type, // we may not have decided yet whether `c` is a `Fn`, `FnMut`, or // `FnOnce` closure. In that case, we defer full resolution of the @@ -171,16 +168,23 @@ pub struct Inherited<'a, 'tcx: 'a> { // decision. We keep these deferred resolutions grouped by the // def-id of the closure, so that once we decide, we can easily go // back and process them. - deferred_call_resolutions: RefCell>>>, + deferred_call_resolutions: RefCell>>>, deferred_cast_checks: RefCell>>, } -trait DeferredCallResolution<'tcx> { - fn resolve<'a>(&mut self, fcx: &FnCtxt<'a,'tcx>); +impl<'a, 'gcx, 'tcx> Deref for Inherited<'a, 'gcx, 'tcx> { + type Target = InferCtxt<'a, 'gcx, 'tcx>; + fn deref(&self) -> &Self::Target { + &self.infcx + } +} + +trait DeferredCallResolution<'gcx, 'tcx> { + fn resolve<'a>(&mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>); } -type DeferredCallResolutionHandler<'tcx> = Box+'tcx>; +type DeferredCallResolutionHandler<'gcx, 'tcx> = Box+'tcx>; /// When type-checking an expression, we propagate downward /// whatever type hint we are able in the form of an `Expectation`. @@ -200,7 +204,7 @@ pub enum Expectation<'tcx> { ExpectRvalueLikeUnsized(Ty<'tcx>), } -impl<'tcx> Expectation<'tcx> { +impl<'a, 'gcx, 'tcx> Expectation<'tcx> { // Disregard "castable to" expectations because they // can lead us astray. Consider for example `if cond // {22} else {c} as u8` -- if we propagate the @@ -217,10 +221,10 @@ impl<'tcx> Expectation<'tcx> { // an expected type. Otherwise, we might write parts of the type // when checking the 'then' block which are incompatible with the // 'else' branch. - fn adjust_for_branches<'a>(&self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> { + fn adjust_for_branches(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> { match *self { ExpectHasType(ety) => { - let ety = fcx.infcx().shallow_resolve(ety); + let ety = fcx.shallow_resolve(ety); if !ety.is_ty_var() { ExpectHasType(ety) } else { @@ -233,6 +237,70 @@ impl<'tcx> Expectation<'tcx> { _ => NoExpectation } } + + /// Provide an expectation for an rvalue expression given an *optional* + /// hint, which is not required for type safety (the resulting type might + /// be checked higher up, as is the case with `&expr` and `box expr`), but + /// is useful in determining the concrete type. + /// + /// The primary use case is where the expected type is a fat pointer, + /// like `&[isize]`. For example, consider the following statement: + /// + /// let x: &[isize] = &[1, 2, 3]; + /// + /// In this case, the expected type for the `&[1, 2, 3]` expression is + /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the + /// expectation `ExpectHasType([isize])`, that would be too strong -- + /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`. + /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced + /// to the type `&[isize]`. Therefore, we propagate this more limited hint, + /// which still is useful, because it informs integer literals and the like. + /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169 + /// for examples of where this comes up,. + fn rvalue_hint(fcx: &FnCtxt<'a, 'gcx, 'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> { + match fcx.tcx.struct_tail(ty).sty { + ty::TySlice(_) | ty::TyStr | ty::TyTrait(..) => { + ExpectRvalueLikeUnsized(ty) + } + _ => ExpectHasType(ty) + } + } + + // Resolves `expected` by a single level if it is a variable. If + // there is no expected type or resolution is not possible (e.g., + // no constraints yet present), just returns `None`. + fn resolve(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Expectation<'tcx> { + match self { + NoExpectation => { + NoExpectation + } + ExpectCastableToType(t) => { + ExpectCastableToType(fcx.resolve_type_vars_if_possible(&t)) + } + ExpectHasType(t) => { + ExpectHasType(fcx.resolve_type_vars_if_possible(&t)) + } + ExpectRvalueLikeUnsized(t) => { + ExpectRvalueLikeUnsized(fcx.resolve_type_vars_if_possible(&t)) + } + } + } + + fn to_option(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option> { + match self.resolve(fcx) { + NoExpectation => None, + ExpectCastableToType(ty) | + ExpectHasType(ty) | + ExpectRvalueLikeUnsized(ty) => Some(ty), + } + } + + fn only_has_type(self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Option> { + match self.resolve(fcx) { + ExpectHasType(ty) => Some(ty), + _ => None + } + } } #[derive(Copy, Clone)] @@ -277,7 +345,9 @@ impl UnsafetyState { } #[derive(Clone)] -pub struct FnCtxt<'a, 'tcx: 'a> { +pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + ast_ty_to_ty_cache: RefCell>>, + body_id: ast::NodeId, // This flag is set to true if, during the writeback phase, we encounter @@ -294,27 +364,55 @@ pub struct FnCtxt<'a, 'tcx: 'a> { ps: RefCell, - inh: &'a Inherited<'a, 'tcx>, + inh: &'a Inherited<'a, 'gcx, 'tcx>, +} + +impl<'a, 'gcx, 'tcx> Deref for FnCtxt<'a, 'gcx, 'tcx> { + type Target = Inherited<'a, 'gcx, 'tcx>; + fn deref(&self) -> &Self::Target { + &self.inh + } +} - ccx: &'a CrateCtxt<'a, 'tcx>, +/// Helper type of a temporary returned by ccx.inherited(...). +/// Necessary because we can't write the following bound: +/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>). +pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + ccx: &'a CrateCtxt<'a, 'gcx>, + infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> Inherited<'a, 'tcx> { - fn new(tcx: &'a TyCtxt<'tcx>, - tables: &'a RefCell>, - param_env: ty::ParameterEnvironment<'a, 'tcx>) - -> Inherited<'a, 'tcx> { - - Inherited { - infcx: infer::new_infer_ctxt(tcx, tables, Some(param_env), ProjectionMode::AnyFinal), - fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()), - locals: RefCell::new(NodeMap()), - tables: tables, - deferred_call_resolutions: RefCell::new(DefIdMap()), - deferred_cast_checks: RefCell::new(Vec::new()), +impl<'a, 'gcx, 'tcx> CrateCtxt<'a, 'gcx> { + pub fn inherited(&'a self, param_env: Option>) + -> InheritedBuilder<'a, 'gcx, 'tcx> { + InheritedBuilder { + ccx: self, + infcx: self.tcx.infer_ctxt(Some(ty::Tables::empty()), + param_env, + ProjectionMode::AnyFinal) } } +} + +impl<'a, 'gcx, 'tcx> InheritedBuilder<'a, 'gcx, 'tcx> { + fn enter(&'tcx mut self, f: F) -> R + where F: for<'b> FnOnce(Inherited<'b, 'gcx, 'tcx>) -> R + { + let ccx = self.ccx; + self.infcx.enter(|infcx| { + f(Inherited { + ccx: ccx, + infcx: infcx, + fulfillment_cx: RefCell::new(traits::FulfillmentContext::new()), + locals: RefCell::new(NodeMap()), + deferred_call_resolutions: RefCell::new(DefIdMap()), + deferred_cast_checks: RefCell::new(Vec::new()), + }) + }) + } +} +impl<'a, 'gcx, 'tcx> Inherited<'a, 'gcx, 'tcx> { fn normalize_associated_types_in(&self, span: Span, body_id: ast::NodeId, @@ -322,7 +420,7 @@ impl<'a, 'tcx> Inherited<'a, 'tcx> { -> T where T : TypeFoldable<'tcx> { - assoc::normalize_associated_types_in(&self.infcx, + assoc::normalize_associated_types_in(self, &mut self.fulfillment_cx.borrow_mut(), span, body_id, @@ -331,32 +429,6 @@ impl<'a, 'tcx> Inherited<'a, 'tcx> { } -// Used by check_const and check_enum_variants -pub fn blank_fn_ctxt<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, - inh: &'a Inherited<'a, 'tcx>, - rty: ty::FnOutput<'tcx>, - body_id: ast::NodeId) - -> FnCtxt<'a, 'tcx> { - FnCtxt { - body_id: body_id, - writeback_errors: Cell::new(false), - err_count_on_creation: ccx.tcx.sess.err_count(), - ret_ty: rty, - ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)), - inh: inh, - ccx: ccx - } -} - -fn static_inherited_fields<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, - tables: &'a RefCell>) - -> Inherited<'a, 'tcx> { - // It's kind of a kludge to manufacture a fake function context - // and statement context, but we might as well do write the code only once - let param_env = ccx.tcx.empty_parameter_environment(); - Inherited::new(ccx.tcx, &tables, param_env) -} - struct CheckItemTypesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> } struct CheckItemBodiesVisitor<'a, 'tcx: 'a> { ccx: &'a CrateCtxt<'a, 'tcx> } @@ -414,7 +486,7 @@ pub fn check_drop_impls(ccx: &CrateCtxt) -> CompileResult { drop_trait.for_each_impl(ccx.tcx, |drop_impl_did| { let _task = ccx.tcx.dep_graph.in_task(DepNode::DropckImpl(drop_impl_did)); if drop_impl_did.is_local() { - match dropck::check_drop_impl(ccx.tcx, drop_impl_did) { + match dropck::check_drop_impl(ccx, drop_impl_did) { Ok(()) => {} Err(()) => { assert!(ccx.tcx.sess.has_errors()); @@ -431,65 +503,61 @@ fn check_bare_fn<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, fn_id: ast::NodeId, fn_span: Span, raw_fty: Ty<'tcx>, - param_env: ty::ParameterEnvironment<'a, 'tcx>) + param_env: ty::ParameterEnvironment<'tcx>) { - match raw_fty.sty { - ty::TyFnDef(_, _, ref fn_ty) => { - let tables = RefCell::new(ty::Tables::empty()); - let inh = Inherited::new(ccx.tcx, &tables, param_env); - - // Compute the fty from point of view of inside fn. - let fn_scope = ccx.tcx.region_maps.call_site_extent(fn_id, body.id); - let fn_sig = - fn_ty.sig.subst(ccx.tcx, &inh.infcx.parameter_environment.free_substs); - let fn_sig = - ccx.tcx.liberate_late_bound_regions(fn_scope, &fn_sig); - let fn_sig = - inh.normalize_associated_types_in(body.span, - body.id, - &fn_sig); - - let fcx = check_fn(ccx, fn_ty.unsafety, fn_id, &fn_sig, - decl, fn_id, body, &inh); - - fcx.select_all_obligations_and_apply_defaults(); - upvar::closure_analyze_fn(&fcx, fn_id, decl, body); - fcx.select_obligations_where_possible(); - fcx.check_casts(); - fcx.select_all_obligations_or_error(); // Casts can introduce new obligations. - - regionck::regionck_fn(&fcx, fn_id, fn_span, decl, body); - writeback::resolve_type_vars_in_fn(&fcx, decl, body); - } + let fn_ty = match raw_fty.sty { + ty::TyFnDef(_, _, f) => f, _ => span_bug!(body.span, "check_bare_fn: function type expected") - } + }; + + ccx.inherited(Some(param_env)).enter(|inh| { + // Compute the fty from point of view of inside fn. + let fn_scope = inh.tcx.region_maps.call_site_extent(fn_id, body.id); + let fn_sig = + fn_ty.sig.subst(inh.tcx, &inh.parameter_environment.free_substs); + let fn_sig = + inh.tcx.liberate_late_bound_regions(fn_scope, &fn_sig); + let fn_sig = + inh.normalize_associated_types_in(body.span, body.id, &fn_sig); + + let fcx = check_fn(&inh, fn_ty.unsafety, fn_id, &fn_sig, decl, fn_id, body); + + fcx.select_all_obligations_and_apply_defaults(); + fcx.closure_analyze_fn(body); + fcx.select_obligations_where_possible(); + fcx.check_casts(); + fcx.select_all_obligations_or_error(); // Casts can introduce new obligations. + + fcx.regionck_fn(fn_id, fn_span, decl, body); + fcx.resolve_type_vars_in_fn(decl, body); + }); } -struct GatherLocalsVisitor<'a, 'tcx: 'a> { - fcx: &'a FnCtxt<'a, 'tcx> +struct GatherLocalsVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fcx: &'a FnCtxt<'a, 'gcx, 'tcx> } -impl<'a, 'tcx> GatherLocalsVisitor<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> GatherLocalsVisitor<'a, 'gcx, 'tcx> { fn assign(&mut self, _span: Span, nid: ast::NodeId, ty_opt: Option>) -> Ty<'tcx> { match ty_opt { None => { // infer the variable's type - let var_ty = self.fcx.infcx().next_ty_var(); - self.fcx.inh.locals.borrow_mut().insert(nid, var_ty); + let var_ty = self.fcx.next_ty_var(); + self.fcx.locals.borrow_mut().insert(nid, var_ty); var_ty } Some(typ) => { // take type that the user specified - self.fcx.inh.locals.borrow_mut().insert(nid, typ); + self.fcx.locals.borrow_mut().insert(nid, typ); typ } } } } -impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> Visitor<'gcx> for GatherLocalsVisitor<'a, 'gcx, 'tcx> { // Add explicitly-declared locals. - fn visit_local(&mut self, local: &'tcx hir::Local) { + fn visit_local(&mut self, local: &'gcx hir::Local) { let o_ty = match local.ty { Some(ref ty) => Some(self.fcx.to_ty(&ty)), None => None @@ -497,15 +565,15 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { self.assign(local.span, local.id, o_ty); debug!("Local variable {:?} is assigned type {}", local.pat, - self.fcx.infcx().ty_to_string( - self.fcx.inh.locals.borrow().get(&local.id).unwrap().clone())); + self.fcx.ty_to_string( + self.fcx.locals.borrow().get(&local.id).unwrap().clone())); intravisit::walk_local(self, local); } // Add pattern bindings. - fn visit_pat(&mut self, p: &'tcx hir::Pat) { + fn visit_pat(&mut self, p: &'gcx hir::Pat) { if let PatKind::Ident(_, ref path1, _) = p.node { - if pat_util::pat_is_binding(&self.fcx.ccx.tcx.def_map.borrow(), p) { + if pat_util::pat_is_binding(&self.fcx.tcx.def_map.borrow(), p) { let var_ty = self.assign(p.span, p.id, None); self.fcx.require_type_is_sized(var_ty, p.span, @@ -513,15 +581,15 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { debug!("Pattern binding {} is assigned to {} with type {:?}", path1.node, - self.fcx.infcx().ty_to_string( - self.fcx.inh.locals.borrow().get(&p.id).unwrap().clone()), + self.fcx.ty_to_string( + self.fcx.locals.borrow().get(&p.id).unwrap().clone()), var_ty); } } intravisit::walk_pat(self, p); } - fn visit_block(&mut self, b: &'tcx hir::Block) { + fn visit_block(&mut self, b: &'gcx hir::Block) { // non-obvious: the `blk` variable maps to region lb, so // we have to keep this up-to-date. This // is... unfortunate. It'd be nice to not need this. @@ -530,11 +598,11 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { // Since an expr occurs as part of the type fixed size arrays we // need to record the type for that node - fn visit_ty(&mut self, t: &'tcx hir::Ty) { + fn visit_ty(&mut self, t: &'gcx hir::Ty) { match t.node { hir::TyFixedLengthVec(ref ty, ref count_expr) => { self.visit_ty(&ty); - check_expr_with_hint(self.fcx, &count_expr, self.fcx.tcx().types.usize); + self.fcx.check_expr_with_hint(&count_expr, self.fcx.tcx.types.usize); } hir::TyBareFn(ref function_declaration) => { intravisit::walk_fn_decl_nopat(self, &function_declaration.decl); @@ -545,8 +613,8 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { } // Don't descend into the bodies of nested closures - fn visit_fn(&mut self, _: intravisit::FnKind<'tcx>, _: &'tcx hir::FnDecl, - _: &'tcx hir::Block, _: Span, _: ast::NodeId) { } + fn visit_fn(&mut self, _: intravisit::FnKind<'gcx>, _: &'gcx hir::FnDecl, + _: &'gcx hir::Block, _: Span, _: ast::NodeId) { } } /// Helper used by check_bare_fn and check_expr_fn. Does the grungy work of checking a function @@ -555,18 +623,16 @@ impl<'a, 'tcx> Visitor<'tcx> for GatherLocalsVisitor<'a, 'tcx> { /// /// * ... /// * inherited: other fields inherited from the enclosing fn (if any) -fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, - unsafety: hir::Unsafety, - unsafety_id: ast::NodeId, - fn_sig: &ty::FnSig<'tcx>, - decl: &'tcx hir::FnDecl, - fn_id: ast::NodeId, - body: &'tcx hir::Block, - inherited: &'a Inherited<'a, 'tcx>) - -> FnCtxt<'a, 'tcx> +fn check_fn<'a, 'gcx, 'tcx>(inherited: &'a Inherited<'a, 'gcx, 'tcx>, + unsafety: hir::Unsafety, + unsafety_id: ast::NodeId, + fn_sig: &ty::FnSig<'tcx>, + decl: &'gcx hir::FnDecl, + fn_id: ast::NodeId, + body: &'gcx hir::Block) + -> FnCtxt<'a, 'gcx, 'tcx> { - let tcx = ccx.tcx; - let err_count_on_creation = tcx.sess.err_count(); + let tcx = inherited.tcx; let arg_tys = &fn_sig.inputs; let ret_ty = fn_sig.output; @@ -578,15 +644,8 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, // Create the function context. This is either derived from scratch or, // in the case of function expressions, based on the outer context. - let fcx = FnCtxt { - body_id: body.id, - writeback_errors: Cell::new(false), - err_count_on_creation: err_count_on_creation, - ret_ty: ret_ty, - ps: RefCell::new(UnsafetyState::function(unsafety, unsafety_id)), - inh: inherited, - ccx: ccx - }; + let fcx = FnCtxt::new(inherited, ret_ty, body.id); + *fcx.ps.borrow_mut() = UnsafetyState::function(unsafety, unsafety_id); if let ty::FnConverging(ret_ty) = ret_ty { fcx.require_type_is_sized(ret_ty, decl.output.span(), traits::ReturnType); @@ -620,17 +679,17 @@ fn check_fn<'a, 'tcx>(ccx: &'a CrateCtxt<'a, 'tcx>, }); // Check the pattern. - let pcx = pat_ctxt { + let pcx = PatCtxt { fcx: &fcx, map: pat_id_map(&tcx.def_map, &input.pat), }; - _match::check_pat(&pcx, &input.pat, *arg_ty); + pcx.check_pat(&input.pat, *arg_ty); } visit.visit_block(body); } - check_block_with_expected(&fcx, body, match ret_ty { + fcx.check_block_with_expected(body, match ret_ty { ty::FnConverging(result_type) => ExpectHasType(result_type), ty::FnDiverging => NoExpectation }); @@ -673,17 +732,26 @@ pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { let impl_def_id = ccx.tcx.map.local_def_id(it.id); match ccx.tcx.impl_trait_ref(impl_def_id) { Some(impl_trait_ref) => { - check_impl_items_against_trait(ccx, - it.span, - impl_def_id, - &impl_trait_ref, - impl_items); + let trait_def_id = impl_trait_ref.def_id; + + check_impl_items_against_trait(ccx, + it.span, + impl_def_id, + &impl_trait_ref, + impl_items); + check_on_unimplemented( + ccx, + &ccx.tcx.lookup_trait_def(trait_def_id).generics, + it, + ccx.tcx.item_name(trait_def_id)); } None => { } } } - hir::ItemTrait(_, ref generics, _, _) => { - check_trait_on_unimplemented(ccx, generics, it); + hir::ItemTrait(..) => { + let def_id = ccx.tcx.map.local_def_id(it.id); + let generics = &ccx.tcx.lookup_trait_def(def_id).generics; + check_on_unimplemented(ccx, generics, it, it.name); } hir::ItemStruct(..) => { check_struct(ccx, it.id, it.span); @@ -795,15 +863,16 @@ fn check_trait_fn_not_const<'a,'tcx>(ccx: &CrateCtxt<'a, 'tcx>, } } -fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, - generics: &hir::Generics, - item: &hir::Item) { +fn check_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + generics: &ty::Generics, + item: &hir::Item, + name: ast::Name) { if let Some(ref attr) = item.attrs.iter().find(|a| { a.check_name("rustc_on_unimplemented") }) { if let Some(ref istring) = attr.value_str() { let parser = Parser::new(&istring); - let types = &generics.ty_params; + let types = &generics.types; for token in parser { match token { Piece::String(_) => (), // Normal string, no need to check it @@ -819,7 +888,7 @@ fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, span_err!(ccx.tcx.sess, attr.span, E0230, "there is no type parameter \ {} on trait {}", - s, item.name); + s, name); } }, // `{:1}` and `{}` are not to be used @@ -862,9 +931,9 @@ fn check_method_body<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, check_bare_fn(ccx, &sig.decl, body, id, span, fty, param_env); } -fn report_forbidden_specialization(tcx: &TyCtxt, - impl_item: &hir::ImplItem, - parent_impl: DefId) +fn report_forbidden_specialization<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + impl_item: &hir::ImplItem, + parent_impl: DefId) { let mut err = struct_span_err!( tcx.sess, impl_item.span, E0520, @@ -885,8 +954,10 @@ fn report_forbidden_specialization(tcx: &TyCtxt, err.emit(); } -fn check_specialization_validity<'tcx>(tcx: &TyCtxt<'tcx>, trait_def: &ty::TraitDef<'tcx>, - impl_id: DefId, impl_item: &hir::ImplItem) +fn check_specialization_validity<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_def: &ty::TraitDef<'tcx>, + impl_id: DefId, + impl_item: &hir::ImplItem) { let ancestors = trait_def.ancestors(impl_id); @@ -948,7 +1019,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, // Find associated const definition. if let &ty::ConstTraitItem(ref trait_const) = ty_trait_item { - compare_const_impl(ccx.tcx, + compare_const_impl(ccx, &impl_const, impl_item.span, trait_const, @@ -970,7 +1041,7 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, }; if let &ty::MethodTraitItem(ref trait_method) = ty_trait_item { - compare_impl_method(ccx.tcx, + compare_impl_method(ccx, &impl_method, impl_item.span, body.id, @@ -1076,66 +1147,137 @@ fn check_impl_items_against_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, } } -fn report_cast_to_unsized_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - t_span: Span, - e_span: Span, - t_cast: Ty<'tcx>, - t_expr: Ty<'tcx>, - id: ast::NodeId) { - if t_cast.references_error() || t_expr.references_error() { - return; - } - let tstr = fcx.infcx().ty_to_string(t_cast); - let mut err = fcx.type_error_struct(span, |actual| { - format!("cast to unsized type: `{}` as `{}`", actual, tstr) - }, t_expr, None); - match t_expr.sty { - ty::TyRef(_, ty::TypeAndMut { mutbl: mt, .. }) => { - let mtstr = match mt { - hir::MutMutable => "mut ", - hir::MutImmutable => "" - }; - if t_cast.is_trait() { - match fcx.tcx().sess.codemap().span_to_snippet(t_span) { - Ok(s) => { - err.span_suggestion(t_span, - "try casting to a reference instead:", - format!("&{}{}", mtstr, s)); - }, - Err(_) => - span_help!(err, t_span, - "did you mean `&{}{}`?", mtstr, tstr), +/// Checks a constant appearing in a type. At the moment this is just the +/// length expression in a fixed-length vector, but someday it might be +/// extended to type-level numeric literals. +fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>, + expr: &'tcx hir::Expr, + expected_type: Ty<'tcx>) { + ccx.inherited(None).enter(|inh| { + let fcx = FnCtxt::new(&inh, ty::FnConverging(expected_type), expr.id); + fcx.check_const_with_ty(expr.span, expr, expected_type); + }); +} + +fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, + sp: Span, + e: &'tcx hir::Expr, + id: ast::NodeId) { + let param_env = ParameterEnvironment::for_item(ccx.tcx, id); + ccx.inherited(Some(param_env)).enter(|inh| { + let rty = ccx.tcx.node_id_to_type(id); + let fcx = FnCtxt::new(&inh, ty::FnConverging(rty), e.id); + let declty = fcx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty; + fcx.check_const_with_ty(sp, e, declty); + }); +} + +/// Checks whether a type can be represented in memory. In particular, it +/// identifies types that contain themselves without indirection through a +/// pointer, which would mean their size is unbounded. +pub fn check_representable<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + sp: Span, + item_id: ast::NodeId, + _designation: &str) -> bool { + let rty = tcx.node_id_to_type(item_id); + + // Check that it is possible to represent this type. This call identifies + // (1) types that contain themselves and (2) types that contain a different + // recursive type. It is only necessary to throw an error on those that + // contain themselves. For case 2, there must be an inner type that will be + // caught by case 1. + match rty.is_representable(tcx, sp) { + Representability::SelfRecursive => { + let item_def_id = tcx.map.local_def_id(item_id); + tcx.recursive_type_with_infinite_size_error(item_def_id).emit(); + return false + } + Representability::Representable | Representability::ContainsRecursive => (), + } + return true +} + +pub fn check_simd<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, sp: Span, id: ast::NodeId) { + let t = tcx.node_id_to_type(id); + match t.sty { + ty::TyStruct(def, substs) => { + let fields = &def.struct_variant().fields; + if fields.is_empty() { + span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty"); + return; + } + let e = fields[0].ty(tcx, substs); + if !fields.iter().all(|f| f.ty(tcx, substs) == e) { + span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous"); + return; + } + match e.sty { + ty::TyParam(_) => { /* struct(T, T, T, T) is ok */ } + _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ } + _ => { + span_err!(tcx.sess, sp, E0077, + "SIMD vector element type should be machine type"); + return; } - } else { - span_help!(err, span, - "consider using an implicit coercion to `&{}{}` instead", - mtstr, tstr); } } - ty::TyBox(..) => { - match fcx.tcx().sess.codemap().span_to_snippet(t_span) { - Ok(s) => { - err.span_suggestion(t_span, - "try casting to a `Box` instead:", - format!("Box<{}>", s)); - }, - Err(_) => - span_help!(err, t_span, "did you mean `Box<{}>`?", tstr), + _ => () + } +} + +#[allow(trivial_numeric_casts)] +pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, + sp: Span, + vs: &'tcx [hir::Variant], + id: ast::NodeId) { + let def_id = ccx.tcx.map.local_def_id(id); + let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny); + + if hint != attr::ReprAny && vs.is_empty() { + span_err!(ccx.tcx.sess, sp, E0084, + "unsupported representation for zero-variant enum"); + } + + ccx.inherited(None).enter(|inh| { + let rty = ccx.tcx.node_id_to_type(id); + let fcx = FnCtxt::new(&inh, ty::FnConverging(rty), id); + + let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(ccx.tcx); + for v in vs { + if let Some(ref e) = v.node.disr_expr { + fcx.check_const_with_ty(e.span, e, repr_type_ty); } } - _ => { - span_help!(err, e_span, - "consider using a box or reference as appropriate"); + + let def_id = ccx.tcx.map.local_def_id(id); + + let variants = &ccx.tcx.lookup_adt_def(def_id).variants; + let mut disr_vals: Vec = Vec::new(); + for (v, variant) in vs.iter().zip(variants.iter()) { + let current_disr_val = variant.disr_val; + + // Check for duplicate discriminant values + if let Some(i) = disr_vals.iter().position(|&x| x == current_disr_val) { + let mut err = struct_span_err!(ccx.tcx.sess, v.span, E0081, + "discriminant value `{}` already exists", disr_vals[i]); + let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap(); + span_note!(&mut err, ccx.tcx.map.span(variant_i_node_id), + "conflicting discriminant here"); + err.emit(); + } + disr_vals.push(current_disr_val); } - } - err.emit(); - fcx.write_error(id); + }); + + check_representable(ccx.tcx, sp, id, "enum"); } +impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx } -impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { self.ccx.tcx } + fn ast_ty_to_ty_cache(&self) -> &RefCell>> { + &self.ast_ty_to_ty_cache + } fn get_item_type_scheme(&self, _: Span, id: DefId) -> Result, ErrorReported> @@ -1155,7 +1297,7 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { } fn get_free_substs(&self) -> Option<&Substs<'tcx>> { - Some(&self.inh.infcx.parameter_environment.free_substs) + Some(&self.parameter_environment.free_substs) } fn get_type_parameter_bounds(&self, @@ -1163,8 +1305,8 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { node_id: ast::NodeId) -> Result>, ErrorReported> { - let def = self.tcx().type_parameter_def(node_id); - let r = self.inh.infcx.parameter_environment + let def = self.tcx.type_parameter_def(node_id); + let r = self.parameter_environment .caller_bounds .iter() .filter_map(|predicate| { @@ -1190,7 +1332,7 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { assoc_name: ast::Name) -> bool { - let trait_def = self.ccx.tcx.lookup_trait_def(trait_def_id); + let trait_def = self.tcx().lookup_trait_def(trait_def_id); trait_def.associated_type_names.contains(&assoc_name) } @@ -1208,7 +1350,7 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { }) }); - let ty_var = self.infcx().next_ty_var_with_default(default); + let ty_var = self.next_ty_var_with_default(default); // Finally we add the type variable to the substs match substs { @@ -1224,7 +1366,7 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { -> Ty<'tcx> { let (trait_ref, _) = - self.infcx().replace_late_bound_regions_with_fresh_var( + self.replace_late_bound_regions_with_fresh_var( span, infer::LateBoundRegionConversionTime::AssocTypeProjection(item_name), &poly_trait_ref); @@ -1240,43 +1382,114 @@ impl<'a, 'tcx> AstConv<'tcx> for FnCtxt<'a, 'tcx> { { self.normalize_associated_type(span, trait_ref, item_name) } + + fn set_tainted_by_errors(&self) { + self.infcx.set_tainted_by_errors() + } +} + +impl<'a, 'gcx, 'tcx> RegionScope for FnCtxt<'a, 'gcx, 'tcx> { + fn object_lifetime_default(&self, span: Span) -> Option { + Some(self.base_object_lifetime_default(span)) + } + + fn base_object_lifetime_default(&self, span: Span) -> ty::Region { + // RFC #599 specifies that object lifetime defaults take + // precedence over other defaults. But within a fn body we + // don't have a *default* region, rather we use inference to + // find the *correct* region, which is strictly more general + // (and anyway, within a fn body the right region may not even + // be something the user can write explicitly, since it might + // be some expression). + self.next_region_var(infer::MiscVariable(span)) + } + + fn anon_regions(&self, span: Span, count: usize) + -> Result, Option>> { + Ok((0..count).map(|_| { + self.next_region_var(infer::MiscVariable(span)) + }).collect()) + } +} + +/// Whether `autoderef` requires types to resolve. +#[derive(Copy, Clone, Debug, PartialEq, Eq)] +pub enum UnresolvedTypeAction { + /// Produce an error and return `TyError` whenever a type cannot + /// be resolved (i.e. it is `TyInfer`). + Error, + /// Go on without emitting any errors, and return the unresolved + /// type. Useful for probing, e.g. in coercions. + Ignore } -impl<'a, 'tcx> FnCtxt<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { self.ccx.tcx } +/// Controls whether the arguments are tupled. This is used for the call +/// operator. +/// +/// Tupling means that all call-side arguments are packed into a tuple and +/// passed as a single parameter. For example, if tupling is enabled, this +/// function: +/// +/// fn f(x: (isize, isize)) +/// +/// Can be called as: +/// +/// f(1, 2); +/// +/// Instead of: +/// +/// f((1, 2)); +#[derive(Clone, Eq, PartialEq)] +enum TupleArgumentsFlag { + DontTupleArguments, + TupleArguments, +} - pub fn infcx(&self) -> &infer::InferCtxt<'a,'tcx> { - &self.inh.infcx +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>, + rty: ty::FnOutput<'tcx>, + body_id: ast::NodeId) + -> FnCtxt<'a, 'gcx, 'tcx> { + FnCtxt { + ast_ty_to_ty_cache: RefCell::new(NodeMap()), + body_id: body_id, + writeback_errors: Cell::new(false), + err_count_on_creation: inh.tcx.sess.err_count(), + ret_ty: rty, + ps: RefCell::new(UnsafetyState::function(hir::Unsafety::Normal, 0)), + inh: inh, + } } - pub fn param_env(&self) -> &ty::ParameterEnvironment<'a,'tcx> { - &self.inh.infcx.parameter_environment + pub fn param_env(&self) -> &ty::ParameterEnvironment<'tcx> { + &self.parameter_environment } pub fn sess(&self) -> &Session { - &self.tcx().sess + &self.tcx.sess } pub fn err_count_since_creation(&self) -> usize { - self.ccx.tcx.sess.err_count() - self.err_count_on_creation + self.tcx.sess.err_count() - self.err_count_on_creation } /// Resolves type variables in `ty` if possible. Unlike the infcx - /// version, this version will also select obligations if it seems - /// useful, in an effort to get more type information. - fn resolve_type_vars_if_possible(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> { - debug!("resolve_type_vars_if_possible(ty={:?})", ty); + /// version (resolve_type_vars_if_possible), this version will + /// also select obligations if it seems useful, in an effort + /// to get more type information. + fn resolve_type_vars_with_obligations(&self, mut ty: Ty<'tcx>) -> Ty<'tcx> { + debug!("resolve_type_vars_with_obligations(ty={:?})", ty); // No TyInfer()? Nothing needs doing. if !ty.has_infer_types() { - debug!("resolve_type_vars_if_possible: ty={:?}", ty); + debug!("resolve_type_vars_with_obligations: ty={:?}", ty); return ty; } // If `ty` is a type variable, see whether we already know what it is. - ty = self.infcx().resolve_type_vars_if_possible(&ty); + ty = self.resolve_type_vars_if_possible(&ty); if !ty.has_infer_types() { - debug!("resolve_type_vars_if_possible: ty={:?}", ty); + debug!("resolve_type_vars_with_obligations: ty={:?}", ty); return ty; } @@ -1285,24 +1498,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // indirect dependencies that don't seem worth tracking // precisely. self.select_obligations_where_possible(); - ty = self.infcx().resolve_type_vars_if_possible(&ty); + ty = self.resolve_type_vars_if_possible(&ty); - debug!("resolve_type_vars_if_possible: ty={:?}", ty); + debug!("resolve_type_vars_with_obligations: ty={:?}", ty); ty } fn record_deferred_call_resolution(&self, closure_def_id: DefId, - r: DeferredCallResolutionHandler<'tcx>) { - let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut(); + r: DeferredCallResolutionHandler<'gcx, 'tcx>) { + let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut(); deferred_call_resolutions.entry(closure_def_id).or_insert(vec![]).push(r); } fn remove_deferred_call_resolutions(&self, closure_def_id: DefId) - -> Vec> + -> Vec> { - let mut deferred_call_resolutions = self.inh.deferred_call_resolutions.borrow_mut(); + let mut deferred_call_resolutions = self.deferred_call_resolutions.borrow_mut(); deferred_call_resolutions.remove(&closure_def_id).unwrap_or(Vec::new()) } @@ -1312,13 +1525,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } pub fn local_ty(&self, span: Span, nid: ast::NodeId) -> Ty<'tcx> { - match self.inh.locals.borrow().get(&nid) { + match self.locals.borrow().get(&nid) { Some(&t) => t, None => { - span_err!(self.tcx().sess, span, E0513, + span_err!(self.tcx.sess, span, E0513, "no type for local variable {}", nid); - self.tcx().types.err + self.tcx.types.err } } } @@ -1327,7 +1540,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { pub fn write_ty(&self, node_id: ast::NodeId, ty: Ty<'tcx>) { debug!("write_ty({}, {:?}) in fcx {}", node_id, ty, self.tag()); - self.inh.tables.borrow_mut().node_types.insert(node_id, ty); + self.tables.borrow_mut().node_types.insert(node_id, ty); } pub fn write_substs(&self, node_id: ast::NodeId, substs: ty::ItemSubsts<'tcx>) { @@ -1337,7 +1550,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { substs, self.tag()); - self.inh.tables.borrow_mut().item_substs.insert(node_id, substs); + self.tables.borrow_mut().item_substs.insert(node_id, substs); } } @@ -1363,7 +1576,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { return; } - self.inh.tables.borrow_mut().adjustments.insert(node_id, adj); + self.tables.borrow_mut().adjustments.insert(node_id, adj); } /// Basically whenever we are converting from a type scheme into @@ -1376,7 +1589,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { -> T where T : TypeFoldable<'tcx> { - let value = value.subst(self.tcx(), substs); + let value = value.subst(self.tcx, substs); let result = self.normalize_associated_types_in(span, &value); debug!("instantiate_type_scheme(value={:?}, substs={:?}) = {:?}", value, @@ -1414,10 +1627,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let cause = traits::ObligationCause::new(span, self.body_id, traits::ObligationCauseCode::MiscObligation); - self.inh - .fulfillment_cx + self.fulfillment_cx .borrow_mut() - .normalize_projection_type(self.infcx(), + .normalize_projection_type(self, ty::ProjectionTy { trait_ref: trait_ref, item_name: item_name, @@ -1437,10 +1649,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { debug!("instantiate_type(did={:?}, path={:?})", did, path); let type_scheme = - self.tcx().lookup_item_type(did); + self.tcx.lookup_item_type(did); let type_predicates = - self.tcx().lookup_predicates(did); - let substs = astconv::ast_path_substs_for_ty(self, self, + self.tcx.lookup_predicates(did); + let substs = AstConv::ast_path_substs_for_ty(self, self, path.span, PathParamMode::Optional, &type_scheme.generics, @@ -1466,11 +1678,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { let (adt, variant) = match def { Def::Variant(enum_id, variant_id) => { - let adt = self.tcx().lookup_adt_def(enum_id); + let adt = self.tcx.lookup_adt_def(enum_id); (adt, adt.variant_with_id(variant_id)) } Def::Struct(did) | Def::TyAlias(did) => { - let typ = self.tcx().lookup_item_type(did); + let typ = self.tcx.lookup_item_type(did); if let ty::TyStruct(adt, _) = typ.ty.sty { (adt, adt.struct_variant()) } else { @@ -1491,10 +1703,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } pub fn write_nil(&self, node_id: ast::NodeId) { - self.write_ty(node_id, self.tcx().mk_nil()); + self.write_ty(node_id, self.tcx.mk_nil()); } pub fn write_error(&self, node_id: ast::NodeId) { - self.write_ty(node_id, self.tcx().types.err); + self.write_ty(node_id, self.tcx.types.err); } pub fn require_type_meets(&self, @@ -1524,24 +1736,13 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { self.require_type_is_sized(self.expr_ty(expr), expr.span, code); } - pub fn type_is_known_to_be_sized(&self, - ty: Ty<'tcx>, - span: Span) - -> bool - { - traits::type_known_to_meet_builtin_bound(self.infcx(), - ty, - ty::BoundSized, - span) - } - pub fn register_builtin_bound(&self, ty: Ty<'tcx>, builtin_bound: ty::BuiltinBound, cause: traits::ObligationCause<'tcx>) { - self.inh.fulfillment_cx.borrow_mut() - .register_builtin_bound(self.infcx(), ty, builtin_bound, cause); + self.fulfillment_cx.borrow_mut() + .register_builtin_bound(self, ty, builtin_bound, cause); } pub fn register_predicate(&self, @@ -1549,19 +1750,19 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { { debug!("register_predicate({:?})", obligation); - self.inh.fulfillment_cx + self.fulfillment_cx .borrow_mut() - .register_predicate_obligation(self.infcx(), obligation); + .register_predicate_obligation(self, obligation); } pub fn to_ty(&self, ast_t: &hir::Ty) -> Ty<'tcx> { - let t = ast_ty_to_ty(self, self, ast_t); + let t = AstConv::ast_ty_to_ty(self, self, ast_t); self.register_wf_obligation(t, ast_t.span, traits::MiscObligation); t } pub fn expr_ty(&self, ex: &hir::Expr) -> Ty<'tcx> { - match self.inh.tables.borrow().node_types.get(&ex.id) { + match self.tables.borrow().node_types.get(&ex.id) { Some(&t) => t, None => { bug!("no type for expr in fcx {}", self.tag()); @@ -1576,21 +1777,21 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { -> Ty<'tcx> { let raw_ty = self.expr_ty(expr); - let raw_ty = self.infcx().shallow_resolve(raw_ty); - let resolve_ty = |ty: Ty<'tcx>| self.infcx().resolve_type_vars_if_possible(&ty); - raw_ty.adjust(self.tcx(), expr.span, expr.id, adjustment, |method_call| { - self.inh.tables.borrow().method_map.get(&method_call) + let raw_ty = self.shallow_resolve(raw_ty); + let resolve_ty = |ty: Ty<'tcx>| self.resolve_type_vars_if_possible(&ty); + raw_ty.adjust(self.tcx, expr.span, expr.id, adjustment, |method_call| { + self.tables.borrow().method_map.get(&method_call) .map(|method| resolve_ty(method.ty)) }) } pub fn node_ty(&self, id: ast::NodeId) -> Ty<'tcx> { - match self.inh.tables.borrow().node_types.get(&id) { + match self.tables.borrow().node_types.get(&id) { Some(&t) => t, - None if self.err_count_since_creation() != 0 => self.tcx().types.err, + None if self.err_count_since_creation() != 0 => self.tcx.types.err, None => { bug!("no type for node {}: {} in fcx {}", - id, self.tcx().map.node_to_string(id), + id, self.tcx.map.node_to_string(id), self.tag()); } } @@ -1604,7 +1805,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { &tables.item_substs } - Ref::map(self.inh.tables.borrow(), project_item_susbts) + Ref::map(self.tables.borrow(), project_item_susbts) } pub fn opt_node_ty_substs(&self, @@ -1612,62 +1813,12 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { f: F) where F: FnOnce(&ty::ItemSubsts<'tcx>), { - match self.inh.tables.borrow().item_substs.get(&id) { + match self.tables.borrow().item_substs.get(&id) { Some(s) => { f(s) } None => { } } } - pub fn mk_subty(&self, - a_is_expected: bool, - origin: TypeOrigin, - sub: Ty<'tcx>, - sup: Ty<'tcx>) - -> Result<(), TypeError<'tcx>> { - infer::mk_subty(self.infcx(), a_is_expected, origin, sub, sup) - // FIXME(#32730) propagate obligations - .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())) - } - - pub fn mk_eqty(&self, - a_is_expected: bool, - origin: TypeOrigin, - sub: Ty<'tcx>, - sup: Ty<'tcx>) - -> Result<(), TypeError<'tcx>> { - infer::mk_eqty(self.infcx(), a_is_expected, origin, sub, sup) - // FIXME(#32730) propagate obligations - .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())) - } - - pub fn mk_subr(&self, - origin: infer::SubregionOrigin<'tcx>, - sub: ty::Region, - sup: ty::Region) { - infer::mk_subr(self.infcx(), origin, sub, sup) - } - - pub fn type_error_message(&self, - sp: Span, - mk_msg: M, - actual_ty: Ty<'tcx>, - err: Option<&TypeError<'tcx>>) - where M: FnOnce(String) -> String, - { - self.infcx().type_error_message(sp, mk_msg, actual_ty, err); - } - - pub fn type_error_struct(&self, - sp: Span, - mk_msg: M, - actual_ty: Ty<'tcx>, - err: Option<&TypeError<'tcx>>) - -> DiagnosticBuilder<'tcx> - where M: FnOnce(String) -> String, - { - self.infcx().type_error_struct(sp, mk_msg, actual_ty, err) - } - /// Registers an obligation for checking later, during regionck, that the type `ty` must /// outlive the region `r`. pub fn register_region_obligation(&self, @@ -1675,7 +1826,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { region: ty::Region, cause: traits::ObligationCause<'tcx>) { - let mut fulfillment_cx = self.inh.fulfillment_cx.borrow_mut(); + let mut fulfillment_cx = self.fulfillment_cx.borrow_mut(); fulfillment_cx.register_region_obligation(ty, region, cause); } @@ -1756,11 +1907,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { -> Ty<'tcx> { self.normalize_associated_types_in(span, - &field.ty(self.tcx(), substs)) + &field.ty(self.tcx, substs)) } fn check_casts(&self) { - let mut deferred_cast_checks = self.inh.deferred_cast_checks.borrow_mut(); + let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut(); for cast in deferred_cast_checks.drain(..) { cast.check(self); } @@ -1771,17 +1922,38 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { fn default_type_parameters(&self) { use rustc::ty::error::UnconstrainedNumeric::Neither; use rustc::ty::error::UnconstrainedNumeric::{UnconstrainedInt, UnconstrainedFloat}; - for ty in &self.infcx().unsolved_variables() { - let resolved = self.infcx().resolve_type_vars_if_possible(ty); - if self.infcx().type_var_diverges(resolved) { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil()); + + // Defaulting inference variables becomes very dubious if we have + // encountered type-checking errors. Therefore, if we think we saw + // some errors in this function, just resolve all uninstanted type + // varibles to TyError. + if self.is_tainted_by_errors() { + for ty in &self.unsolved_variables() { + if let ty::TyInfer(_) = self.shallow_resolve(ty).sty { + debug!("default_type_parameters: defaulting `{:?}` to error", ty); + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx().types.err); + } + } + return; + } + + for ty in &self.unsolved_variables() { + let resolved = self.resolve_type_vars_if_possible(ty); + if self.type_var_diverges(resolved) { + debug!("default_type_parameters: defaulting `{:?}` to `()` because it diverges", + resolved); + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { - match self.infcx().type_is_unconstrained_numeric(resolved) { + match self.type_is_unconstrained_numeric(resolved) { UnconstrainedInt => { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32) + debug!("default_type_parameters: defaulting `{:?}` to `i32`", + resolved); + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32) }, UnconstrainedFloat => { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64) + debug!("default_type_parameters: defaulting `{:?}` to `f32`", + resolved); + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64) } Neither => { } } @@ -1790,7 +1962,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { } fn select_all_obligations_and_apply_defaults(&self) { - if self.tcx().sess.features.borrow().default_type_parameter_fallback { + if self.tcx.sess.features.borrow().default_type_parameter_fallback { self.new_select_all_obligations_and_apply_defaults(); } else { self.old_select_all_obligations_and_apply_defaults(); @@ -1810,11 +1982,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // For the time being this errs on the side of being memory wasteful but provides better // error reporting. - // let type_variables = self.infcx().type_variables.clone(); + // let type_variables = self.type_variables.clone(); // There is a possibility that this algorithm will have to run an arbitrary number of times // to terminate so we bound it by the compiler's recursion limit. - for _ in 0..self.tcx().sess.recursion_limit.get() { + for _ in 0..self.tcx.sess.recursion_limit.get() { // First we try to solve all obligations, it is possible that the last iteration // has made it possible to make more progress. self.select_obligations_where_possible(); @@ -1822,7 +1994,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let mut conflicts = Vec::new(); // Collect all unsolved type, integral and floating point variables. - let unsolved_variables = self.inh.infcx.unsolved_variables(); + let unsolved_variables = self.unsolved_variables(); // We must collect the defaults *before* we do any unification. Because we have // directly attached defaults to the type variables any unification that occurs @@ -1830,7 +2002,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let default_map: FnvHashMap<_, _> = unsolved_variables .iter() - .filter_map(|t| self.infcx().default(t).map(|d| (t, d))) + .filter_map(|t| self.default(t).map(|d| (t, d))) .collect(); let mut unbound_tyvars = HashSet::new(); @@ -1842,11 +2014,11 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // variables. We do this so we only apply literal fallback to type // variables without defaults. for ty in &unsolved_variables { - let resolved = self.infcx().resolve_type_vars_if_possible(ty); - if self.infcx().type_var_diverges(resolved) { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil()); + let resolved = self.resolve_type_vars_if_possible(ty); + if self.type_var_diverges(resolved) { + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { - match self.infcx().type_is_unconstrained_numeric(resolved) { + match self.type_is_unconstrained_numeric(resolved) { UnconstrainedInt | UnconstrainedFloat => { unbound_tyvars.insert(resolved); }, @@ -1859,9 +2031,10 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // the type variable with a defined fallback. for ty in &unsolved_variables { if let Some(_default) = default_map.get(ty) { - let resolved = self.infcx().resolve_type_vars_if_possible(ty); + let resolved = self.resolve_type_vars_if_possible(ty); - debug!("select_all_obligations_and_apply_defaults: ty: {:?} with default: {:?}", + debug!("select_all_obligations_and_apply_defaults: \ + ty: {:?} with default: {:?}", ty, _default); match resolved.sty { @@ -1898,24 +2071,24 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // for conflicts and correctly report them. - let _ = self.infcx().commit_if_ok(|_: &infer::CombinedSnapshot| { + let _ = self.commit_if_ok(|_: &infer::CombinedSnapshot| { for ty in &unbound_tyvars { - if self.infcx().type_var_diverges(ty) { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil()); + if self.type_var_diverges(ty) { + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { - match self.infcx().type_is_unconstrained_numeric(ty) { + match self.type_is_unconstrained_numeric(ty) { UnconstrainedInt => { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32) + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32) }, UnconstrainedFloat => { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64) + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64) } Neither => { if let Some(default) = default_map.get(ty) { let default = default.clone(); - match infer::mk_eqty(self.infcx(), false, - TypeOrigin::Misc(default.origin_span), - ty, default.ty) { + match self.eq_types(false, + TypeOrigin::Misc(default.origin_span), + ty, default.ty) { Ok(InferOk { obligations, .. }) => { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()) @@ -1945,9 +2118,9 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { let conflicting_default = self.find_conflicting_default(&unbound_tyvars, &default_map, conflict) .unwrap_or(type_variable::Default { - ty: self.infcx().next_ty_var(), + ty: self.next_ty_var(), origin_span: codemap::DUMMY_SP, - def_id: self.tcx().map.local_def_id(0) // what do I put here? + def_id: self.tcx.map.local_def_id(0) // what do I put here? }); // This is to ensure that we elimnate any non-determinism from the error @@ -1961,7 +2134,7 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { }; - self.infcx().report_conflicting_default_types( + self.report_conflicting_default_types( first_default.origin_span, first_default, second_default) @@ -1996,22 +2169,22 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // We also run this inside snapshot that never commits so we can do error // reporting for more then one conflict. for ty in &unbound_tyvars { - if self.infcx().type_var_diverges(ty) { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().mk_nil()); + if self.type_var_diverges(ty) { + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.mk_nil()); } else { - match self.infcx().type_is_unconstrained_numeric(ty) { + match self.type_is_unconstrained_numeric(ty) { UnconstrainedInt => { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.i32) + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.i32) }, UnconstrainedFloat => { - demand::eqtype(self, codemap::DUMMY_SP, *ty, self.tcx().types.f64) + self.demand_eqtype(codemap::DUMMY_SP, *ty, self.tcx.types.f64) }, Neither => { if let Some(default) = default_map.get(ty) { let default = default.clone(); - match infer::mk_eqty(self.infcx(), false, - TypeOrigin::Misc(default.origin_span), - ty, default.ty) { + match self.eq_types(false, + TypeOrigin::Misc(default.origin_span), + ty, default.ty) { // FIXME(#32730) propagate obligations Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()), Err(_) => { @@ -2032,886 +2205,789 @@ impl<'a, 'tcx> FnCtxt<'a, 'tcx> { // upvar inference should have ensured that all deferred call // resolutions are handled by now. - assert!(self.inh.deferred_call_resolutions.borrow().is_empty()); + assert!(self.deferred_call_resolutions.borrow().is_empty()); self.select_all_obligations_and_apply_defaults(); - let mut fulfillment_cx = self.inh.fulfillment_cx.borrow_mut(); - match fulfillment_cx.select_all_or_error(self.infcx()) { + let mut fulfillment_cx = self.fulfillment_cx.borrow_mut(); + match fulfillment_cx.select_all_or_error(self) { Ok(()) => { } - Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); } + Err(errors) => { self.report_fulfillment_errors(&errors); } + } + + if let Err(ref errors) = fulfillment_cx.select_rfc1592_obligations(self) { + self.report_fulfillment_errors_as_warnings(errors, self.body_id); } } /// Select as many obligations as we can at present. fn select_obligations_where_possible(&self) { - match - self.inh.fulfillment_cx - .borrow_mut() - .select_where_possible(self.infcx()) - { + match self.fulfillment_cx.borrow_mut().select_where_possible(self) { Ok(()) => { } - Err(errors) => { report_fulfillment_errors(self.infcx(), &errors); } + Err(errors) => { self.report_fulfillment_errors(&errors); } } } -} - -impl<'a, 'tcx> RegionScope for FnCtxt<'a, 'tcx> { - fn object_lifetime_default(&self, span: Span) -> Option { - Some(self.base_object_lifetime_default(span)) - } - fn base_object_lifetime_default(&self, span: Span) -> ty::Region { - // RFC #599 specifies that object lifetime defaults take - // precedence over other defaults. But within a fn body we - // don't have a *default* region, rather we use inference to - // find the *correct* region, which is strictly more general - // (and anyway, within a fn body the right region may not even - // be something the user can write explicitly, since it might - // be some expression). - self.infcx().next_region_var(infer::MiscVariable(span)) - } + /// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` + /// to decide whether to terminate the loop. Returns the final type and number of + /// derefs that it performed. + /// + /// Note: this method does not modify the adjustments table. The caller is responsible for + /// inserting an AutoAdjustment record into the `self` using one of the suitable methods. + pub fn autoderef<'b, E, I, T, F>(&self, + sp: Span, + base_ty: Ty<'tcx>, + maybe_exprs: E, + unresolved_type_action: UnresolvedTypeAction, + mut lvalue_pref: LvaluePreference, + mut should_stop: F) + -> (Ty<'tcx>, usize, Option) + // FIXME(eddyb) use copyable iterators when that becomes ergonomic. + where E: Fn() -> I, + I: IntoIterator, + F: FnMut(Ty<'tcx>, usize) -> Option, + { + debug!("autoderef(base_ty={:?}, lvalue_pref={:?})", + base_ty, lvalue_pref); + + let mut t = base_ty; + for autoderefs in 0..self.tcx.sess.recursion_limit.get() { + let resolved_t = match unresolved_type_action { + UnresolvedTypeAction::Error => { + self.structurally_resolved_type(sp, t) + } + UnresolvedTypeAction::Ignore => { + // We can continue even when the type cannot be resolved + // (i.e. it is an inference variable) because `Ty::builtin_deref` + // and `try_overloaded_deref` both simply return `None` + // in such a case without producing spurious errors. + self.resolve_type_vars_if_possible(&t) + } + }; + if resolved_t.references_error() { + return (resolved_t, autoderefs, None); + } - fn anon_regions(&self, span: Span, count: usize) - -> Result, Option>> { - Ok((0..count).map(|_| { - self.infcx().next_region_var(infer::MiscVariable(span)) - }).collect()) - } -} + match should_stop(resolved_t, autoderefs) { + Some(x) => return (resolved_t, autoderefs, Some(x)), + None => {} + } -/// Whether `autoderef` requires types to resolve. -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub enum UnresolvedTypeAction { - /// Produce an error and return `TyError` whenever a type cannot - /// be resolved (i.e. it is `TyInfer`). - Error, - /// Go on without emitting any errors, and return the unresolved - /// type. Useful for probing, e.g. in coercions. - Ignore -} + // Otherwise, deref if type is derefable: + + // Super subtle: it might seem as though we should + // pass `opt_expr` to `try_overloaded_deref`, so that + // the (implicit) autoref of using an overloaded deref + // would get added to the adjustment table. However we + // do not do that, because it's kind of a + // "meta-adjustment" -- instead, we just leave it + // unrecorded and know that there "will be" an + // autoref. regionck and other bits of the code base, + // when they encounter an overloaded autoderef, have + // to do some reconstructive surgery. This is a pretty + // complex mess that is begging for a proper MIR. + let mt = if let Some(mt) = resolved_t.builtin_deref(false, lvalue_pref) { + mt + } else if let Some(method) = self.try_overloaded_deref(sp, None, + resolved_t, lvalue_pref) { + for expr in maybe_exprs() { + let method_call = MethodCall::autoderef(expr.id, autoderefs as u32); + self.tables.borrow_mut().method_map.insert(method_call, method); + } + self.make_overloaded_lvalue_return_type(method) + } else { + return (resolved_t, autoderefs, None); + }; -/// Executes an autoderef loop for the type `t`. At each step, invokes `should_stop` to decide -/// whether to terminate the loop. Returns the final type and number of derefs that it performed. -/// -/// Note: this method does not modify the adjustments table. The caller is responsible for -/// inserting an AutoAdjustment record into the `fcx` using one of the suitable methods. -pub fn autoderef<'a, 'b, 'tcx, E, I, T, F>(fcx: &FnCtxt<'a, 'tcx>, - sp: Span, - base_ty: Ty<'tcx>, - maybe_exprs: E, - unresolved_type_action: UnresolvedTypeAction, - mut lvalue_pref: LvaluePreference, - mut should_stop: F) - -> (Ty<'tcx>, usize, Option) - // FIXME(eddyb) use copyable iterators when that becomes ergonomic. - where E: Fn() -> I, - I: IntoIterator, - F: FnMut(Ty<'tcx>, usize) -> Option, -{ - debug!("autoderef(base_ty={:?}, lvalue_pref={:?})", - base_ty, lvalue_pref); - - let mut t = base_ty; - for autoderefs in 0..fcx.tcx().sess.recursion_limit.get() { - let resolved_t = match unresolved_type_action { - UnresolvedTypeAction::Error => { - structurally_resolved_type(fcx, sp, t) - } - UnresolvedTypeAction::Ignore => { - // We can continue even when the type cannot be resolved - // (i.e. it is an inference variable) because `Ty::builtin_deref` - // and `try_overloaded_deref` both simply return `None` - // in such a case without producing spurious errors. - fcx.infcx().resolve_type_vars_if_possible(&t) + t = mt.ty; + if mt.mutbl == hir::MutImmutable { + lvalue_pref = NoPreference; } - }; - if resolved_t.references_error() { - return (resolved_t, autoderefs, None); } - match should_stop(resolved_t, autoderefs) { - Some(x) => return (resolved_t, autoderefs, Some(x)), - None => {} - } + // We've reached the recursion limit, error gracefully. + span_err!(self.tcx.sess, sp, E0055, + "reached the recursion limit while auto-dereferencing {:?}", + base_ty); + (self.tcx.types.err, 0, None) + } - // Otherwise, deref if type is derefable: - - // Super subtle: it might seem as though we should - // pass `opt_expr` to `try_overloaded_deref`, so that - // the (implicit) autoref of using an overloaded deref - // would get added to the adjustment table. However we - // do not do that, because it's kind of a - // "meta-adjustment" -- instead, we just leave it - // unrecorded and know that there "will be" an - // autoref. regionck and other bits of the code base, - // when they encounter an overloaded autoderef, have - // to do some reconstructive surgery. This is a pretty - // complex mess that is begging for a proper MIR. - let mt = if let Some(mt) = resolved_t.builtin_deref(false, lvalue_pref) { - mt - } else if let Some(method) = try_overloaded_deref(fcx, sp, None, - resolved_t, lvalue_pref) { - for expr in maybe_exprs() { - let method_call = MethodCall::autoderef(expr.id, autoderefs as u32); - fcx.inh.tables.borrow_mut().method_map.insert(method_call, method); + fn try_overloaded_deref(&self, + span: Span, + base_expr: Option<&hir::Expr>, + base_ty: Ty<'tcx>, + lvalue_pref: LvaluePreference) + -> Option> + { + // Try DerefMut first, if preferred. + let method = match (lvalue_pref, self.tcx.lang_items.deref_mut_trait()) { + (PreferMutLvalue, Some(trait_did)) => { + self.lookup_method_in_trait(span, base_expr, + token::intern("deref_mut"), trait_did, + base_ty, None) } - make_overloaded_lvalue_return_type(fcx.tcx(), method) - } else { - return (resolved_t, autoderefs, None); + _ => None }; - t = mt.ty; - if mt.mutbl == hir::MutImmutable { - lvalue_pref = NoPreference; - } + // Otherwise, fall back to Deref. + let method = match (method, self.tcx.lang_items.deref_trait()) { + (None, Some(trait_did)) => { + self.lookup_method_in_trait(span, base_expr, + token::intern("deref"), trait_did, + base_ty, None) + } + (method, _) => method + }; + + method } - // We've reached the recursion limit, error gracefully. - span_err!(fcx.tcx().sess, sp, E0055, - "reached the recursion limit while auto-dereferencing {:?}", - base_ty); - (fcx.tcx().types.err, 0, None) -} + /// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait + /// returns a type of `&T`, but the actual type we assign to the + /// *expression* is `T`. So this function just peels off the return + /// type by one layer to yield `T`. + fn make_overloaded_lvalue_return_type(&self, + method: MethodCallee<'tcx>) + -> ty::TypeAndMut<'tcx> + { + // extract method return type, which will be &T; + // all LB regions should have been instantiated during method lookup + let ret_ty = method.ty.fn_ret(); + let ret_ty = self.tcx.no_late_bound_regions(&ret_ty).unwrap().unwrap(); + + // method returns &T, but the type as visible to user is T, so deref + ret_ty.builtin_deref(true, NoPreference).unwrap() + } + + fn lookup_indexing(&self, + expr: &hir::Expr, + base_expr: &'gcx hir::Expr, + base_ty: Ty<'tcx>, + idx_ty: Ty<'tcx>, + lvalue_pref: LvaluePreference) + -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> + { + // FIXME(#18741) -- this is almost but not quite the same as the + // autoderef that normal method probing does. They could likely be + // consolidated. + + let (ty, autoderefs, final_mt) = self.autoderef(base_expr.span, + base_ty, + || Some(base_expr), + UnresolvedTypeAction::Error, + lvalue_pref, + |adj_ty, idx| { + self.try_index_step(MethodCall::expr(expr.id), expr, base_expr, + adj_ty, idx, false, lvalue_pref, idx_ty) + }); -fn try_overloaded_deref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, - base_expr: Option<&hir::Expr>, - base_ty: Ty<'tcx>, - lvalue_pref: LvaluePreference) - -> Option> -{ - // Try DerefMut first, if preferred. - let method = match (lvalue_pref, fcx.tcx().lang_items.deref_mut_trait()) { - (PreferMutLvalue, Some(trait_did)) => { - method::lookup_in_trait(fcx, span, base_expr, - token::intern("deref_mut"), trait_did, - base_ty, None) + if final_mt.is_some() { + return final_mt; } - _ => None - }; - // Otherwise, fall back to Deref. - let method = match (method, fcx.tcx().lang_items.deref_trait()) { - (None, Some(trait_did)) => { - method::lookup_in_trait(fcx, span, base_expr, - token::intern("deref"), trait_did, - base_ty, None) + // After we have fully autoderef'd, if the resulting type is [T; n], then + // do a final unsized coercion to yield [T]. + if let ty::TyArray(element_ty, _) = ty.sty { + let adjusted_ty = self.tcx.mk_slice(element_ty); + self.try_index_step(MethodCall::expr(expr.id), expr, base_expr, + adjusted_ty, autoderefs, true, lvalue_pref, idx_ty) + } else { + None } - (method, _) => method - }; - - method -} - -/// For the overloaded lvalue expressions (`*x`, `x[3]`), the trait returns a type of `&T`, but the -/// actual type we assign to the *expression* is `T`. So this function just peels off the return -/// type by one layer to yield `T`. -fn make_overloaded_lvalue_return_type<'tcx>(tcx: &TyCtxt<'tcx>, - method: MethodCallee<'tcx>) - -> ty::TypeAndMut<'tcx> -{ - // extract method return type, which will be &T; - // all LB regions should have been instantiated during method lookup - let ret_ty = method.ty.fn_ret(); - let ret_ty = tcx.no_late_bound_regions(&ret_ty).unwrap().unwrap(); - - // method returns &T, but the type as visible to user is T, so deref - ret_ty.builtin_deref(true, NoPreference).unwrap() -} - -fn lookup_indexing<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &hir::Expr, - base_expr: &'tcx hir::Expr, - base_ty: Ty<'tcx>, - idx_ty: Ty<'tcx>, - lvalue_pref: LvaluePreference) - -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> -{ - // FIXME(#18741) -- this is almost but not quite the same as the - // autoderef that normal method probing does. They could likely be - // consolidated. - - let (ty, autoderefs, final_mt) = autoderef(fcx, - base_expr.span, - base_ty, - || Some(base_expr), - UnresolvedTypeAction::Error, - lvalue_pref, - |adj_ty, idx| { - try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr, - adj_ty, idx, false, lvalue_pref, idx_ty) - }); - - if final_mt.is_some() { - return final_mt; - } - - // After we have fully autoderef'd, if the resulting type is [T; n], then - // do a final unsized coercion to yield [T]. - if let ty::TyArray(element_ty, _) = ty.sty { - let adjusted_ty = fcx.tcx().mk_slice(element_ty); - try_index_step(fcx, MethodCall::expr(expr.id), expr, base_expr, - adjusted_ty, autoderefs, true, lvalue_pref, idx_ty) - } else { - None } -} -/// To type-check `base_expr[index_expr]`, we progressively autoderef (and otherwise adjust) -/// `base_expr`, looking for a type which either supports builtin indexing or overloaded indexing. -/// This loop implements one step in that search; the autoderef loop is implemented by -/// `lookup_indexing`. -fn try_index_step<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - method_call: MethodCall, - expr: &hir::Expr, - base_expr: &'tcx hir::Expr, - adjusted_ty: Ty<'tcx>, - autoderefs: usize, - unsize: bool, - lvalue_pref: LvaluePreference, - index_ty: Ty<'tcx>) - -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> -{ - let tcx = fcx.tcx(); - debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \ - autoderefs={}, unsize={}, index_ty={:?})", - expr, - base_expr, - adjusted_ty, - autoderefs, - unsize, - index_ty); - - let input_ty = fcx.infcx().next_ty_var(); - - // First, try built-in indexing. - match (adjusted_ty.builtin_index(), &index_ty.sty) { - (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => { - debug!("try_index_step: success, using built-in indexing"); - // If we had `[T; N]`, we should've caught it before unsizing to `[T]`. - assert!(!unsize); - fcx.write_autoderef_adjustment(base_expr.id, autoderefs); - return Some((tcx.types.usize, ty)); - } - _ => {} - } - - // Try `IndexMut` first, if preferred. - let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) { - (PreferMutLvalue, Some(trait_did)) => { - method::lookup_in_trait_adjusted(fcx, - expr.span, - Some(&base_expr), - token::intern("index_mut"), - trait_did, - autoderefs, - unsize, - adjusted_ty, - Some(vec![input_ty])) - } - _ => None, - }; - - // Otherwise, fall back to `Index`. - let method = match (method, tcx.lang_items.index_trait()) { - (None, Some(trait_did)) => { - method::lookup_in_trait_adjusted(fcx, - expr.span, - Some(&base_expr), - token::intern("index"), - trait_did, - autoderefs, - unsize, - adjusted_ty, - Some(vec![input_ty])) + /// To type-check `base_expr[index_expr]`, we progressively autoderef + /// (and otherwise adjust) `base_expr`, looking for a type which either + /// supports builtin indexing or overloaded indexing. + /// This loop implements one step in that search; the autoderef loop + /// is implemented by `lookup_indexing`. + fn try_index_step(&self, + method_call: MethodCall, + expr: &hir::Expr, + base_expr: &'gcx hir::Expr, + adjusted_ty: Ty<'tcx>, + autoderefs: usize, + unsize: bool, + lvalue_pref: LvaluePreference, + index_ty: Ty<'tcx>) + -> Option<(/*index type*/ Ty<'tcx>, /*element type*/ Ty<'tcx>)> + { + let tcx = self.tcx; + debug!("try_index_step(expr={:?}, base_expr.id={:?}, adjusted_ty={:?}, \ + autoderefs={}, unsize={}, index_ty={:?})", + expr, + base_expr, + adjusted_ty, + autoderefs, + unsize, + index_ty); + + let input_ty = self.next_ty_var(); + + // First, try built-in indexing. + match (adjusted_ty.builtin_index(), &index_ty.sty) { + (Some(ty), &ty::TyUint(ast::UintTy::Us)) | (Some(ty), &ty::TyInfer(ty::IntVar(_))) => { + debug!("try_index_step: success, using built-in indexing"); + // If we had `[T; N]`, we should've caught it before unsizing to `[T]`. + assert!(!unsize); + self.write_autoderef_adjustment(base_expr.id, autoderefs); + return Some((tcx.types.usize, ty)); + } + _ => {} } - (method, _) => method, - }; - - // If some lookup succeeds, write callee into table and extract index/element - // type from the method signature. - // If some lookup succeeded, install method in table - method.map(|method| { - debug!("try_index_step: success, using overloaded indexing"); - fcx.inh.tables.borrow_mut().method_map.insert(method_call, method); - (input_ty, make_overloaded_lvalue_return_type(fcx.tcx(), method).ty) - }) -} -fn check_method_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - sp: Span, - method_fn_ty: Ty<'tcx>, - callee_expr: &'tcx hir::Expr, - args_no_rcvr: &'tcx [P], - tuple_arguments: TupleArgumentsFlag, - expected: Expectation<'tcx>) - -> ty::FnOutput<'tcx> { - if method_fn_ty.references_error() { - let err_inputs = err_args(fcx.tcx(), args_no_rcvr.len()); - - let err_inputs = match tuple_arguments { - DontTupleArguments => err_inputs, - TupleArguments => vec![fcx.tcx().mk_tup(err_inputs)], + // Try `IndexMut` first, if preferred. + let method = match (lvalue_pref, tcx.lang_items.index_mut_trait()) { + (PreferMutLvalue, Some(trait_did)) => { + self.lookup_method_in_trait_adjusted(expr.span, + Some(&base_expr), + token::intern("index_mut"), + trait_did, + autoderefs, + unsize, + adjusted_ty, + Some(vec![input_ty])) + } + _ => None, }; - check_argument_types(fcx, - sp, - &err_inputs[..], - &[], - args_no_rcvr, - false, - tuple_arguments); - ty::FnConverging(fcx.tcx().types.err) - } else { - match method_fn_ty.sty { - ty::TyFnDef(_, _, ref fty) => { - // HACK(eddyb) ignore self in the definition (see above). - let expected_arg_tys = expected_types_for_fn_args(fcx, - sp, - expected, - fty.sig.0.output, - &fty.sig.0.inputs[1..]); - check_argument_types(fcx, - sp, - &fty.sig.0.inputs[1..], - &expected_arg_tys[..], - args_no_rcvr, - fty.sig.0.variadic, - tuple_arguments); - fty.sig.0.output + // Otherwise, fall back to `Index`. + let method = match (method, tcx.lang_items.index_trait()) { + (None, Some(trait_did)) => { + self.lookup_method_in_trait_adjusted(expr.span, + Some(&base_expr), + token::intern("index"), + trait_did, + autoderefs, + unsize, + adjusted_ty, + Some(vec![input_ty])) } - _ => { - span_bug!(callee_expr.span, "method without bare fn type"); + (method, _) => method, + }; + + // If some lookup succeeds, write callee into table and extract index/element + // type from the method signature. + // If some lookup succeeded, install method in table + method.map(|method| { + debug!("try_index_step: success, using overloaded indexing"); + self.tables.borrow_mut().method_map.insert(method_call, method); + (input_ty, self.make_overloaded_lvalue_return_type(method).ty) + }) + } + + fn check_method_argument_types(&self, + sp: Span, + method_fn_ty: Ty<'tcx>, + callee_expr: &'gcx hir::Expr, + args_no_rcvr: &'gcx [P], + tuple_arguments: TupleArgumentsFlag, + expected: Expectation<'tcx>) + -> ty::FnOutput<'tcx> { + if method_fn_ty.references_error() { + let err_inputs = self.err_args(args_no_rcvr.len()); + + let err_inputs = match tuple_arguments { + DontTupleArguments => err_inputs, + TupleArguments => vec![self.tcx.mk_tup(err_inputs)], + }; + + self.check_argument_types(sp, &err_inputs[..], &[], args_no_rcvr, + false, tuple_arguments); + ty::FnConverging(self.tcx.types.err) + } else { + match method_fn_ty.sty { + ty::TyFnDef(_, _, ref fty) => { + // HACK(eddyb) ignore self in the definition (see above). + let expected_arg_tys = self.expected_types_for_fn_args(sp, expected, + fty.sig.0.output, + &fty.sig.0.inputs[1..]); + self.check_argument_types(sp, &fty.sig.0.inputs[1..], &expected_arg_tys[..], + args_no_rcvr, fty.sig.0.variadic, tuple_arguments); + fty.sig.0.output + } + _ => { + span_bug!(callee_expr.span, "method without bare fn type"); + } } } } -} -/// Generic function that factors out common logic from function calls, method calls and overloaded -/// operators. -fn check_argument_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - sp: Span, - fn_inputs: &[Ty<'tcx>], - expected_arg_tys: &[Ty<'tcx>], - args: &'tcx [P], - variadic: bool, - tuple_arguments: TupleArgumentsFlag) { - let tcx = fcx.ccx.tcx; - - // Grab the argument types, supplying fresh type variables - // if the wrong number of arguments were supplied - let supplied_arg_count = if tuple_arguments == DontTupleArguments { - args.len() - } else { - 1 - }; + /// Generic function that factors out common logic from function calls, + /// method calls and overloaded operators. + fn check_argument_types(&self, + sp: Span, + fn_inputs: &[Ty<'tcx>], + expected_arg_tys: &[Ty<'tcx>], + args: &'gcx [P], + variadic: bool, + tuple_arguments: TupleArgumentsFlag) { + let tcx = self.tcx; + + // Grab the argument types, supplying fresh type variables + // if the wrong number of arguments were supplied + let supplied_arg_count = if tuple_arguments == DontTupleArguments { + args.len() + } else { + 1 + }; - // All the input types from the fn signature must outlive the call - // so as to validate implied bounds. - for &fn_input_ty in fn_inputs { - fcx.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation); - } - - let mut expected_arg_tys = expected_arg_tys; - let expected_arg_count = fn_inputs.len(); - let formal_tys = if tuple_arguments == TupleArguments { - let tuple_type = structurally_resolved_type(fcx, sp, fn_inputs[0]); - match tuple_type.sty { - ty::TyTuple(ref arg_types) => { - if arg_types.len() != args.len() { - span_err!(tcx.sess, sp, E0057, - "this function takes {} parameter{} but {} parameter{} supplied", - arg_types.len(), - if arg_types.len() == 1 {""} else {"s"}, - args.len(), - if args.len() == 1 {" was"} else {"s were"}); + // All the input types from the fn signature must outlive the call + // so as to validate implied bounds. + for &fn_input_ty in fn_inputs { + self.register_wf_obligation(fn_input_ty, sp, traits::MiscObligation); + } + + let mut expected_arg_tys = expected_arg_tys; + let expected_arg_count = fn_inputs.len(); + let formal_tys = if tuple_arguments == TupleArguments { + let tuple_type = self.structurally_resolved_type(sp, fn_inputs[0]); + match tuple_type.sty { + ty::TyTuple(arg_types) => { + if arg_types.len() != args.len() { + span_err!(tcx.sess, sp, E0057, + "this function takes {} parameter{} but {} parameter{} supplied", + arg_types.len(), + if arg_types.len() == 1 {""} else {"s"}, + args.len(), + if args.len() == 1 {" was"} else {"s were"}); + expected_arg_tys = &[]; + self.err_args(args.len()) + } else { + expected_arg_tys = match expected_arg_tys.get(0) { + Some(&ty) => match ty.sty { + ty::TyTuple(ref tys) => &tys, + _ => &[] + }, + None => &[] + }; + arg_types.to_vec() + } + } + _ => { + span_err!(tcx.sess, sp, E0059, + "cannot use call notation; the first type parameter \ + for the function trait is neither a tuple nor unit"); expected_arg_tys = &[]; - err_args(fcx.tcx(), args.len()) - } else { - expected_arg_tys = match expected_arg_tys.get(0) { - Some(&ty) => match ty.sty { - ty::TyTuple(ref tys) => &tys, - _ => &[] - }, - None => &[] - }; - (*arg_types).clone() + self.err_args(args.len()) } } - _ => { - span_err!(tcx.sess, sp, E0059, - "cannot use call notation; the first type parameter \ - for the function trait is neither a tuple nor unit"); + } else if expected_arg_count == supplied_arg_count { + fn_inputs.to_vec() + } else if variadic { + if supplied_arg_count >= expected_arg_count { + fn_inputs.to_vec() + } else { + span_err!(tcx.sess, sp, E0060, + "this function takes at least {} parameter{} \ + but {} parameter{} supplied", + expected_arg_count, + if expected_arg_count == 1 {""} else {"s"}, + supplied_arg_count, + if supplied_arg_count == 1 {" was"} else {"s were"}); expected_arg_tys = &[]; - err_args(fcx.tcx(), args.len()) + self.err_args(supplied_arg_count) } - } - } else if expected_arg_count == supplied_arg_count { - fn_inputs.to_vec() - } else if variadic { - if supplied_arg_count >= expected_arg_count { - fn_inputs.to_vec() } else { - span_err!(tcx.sess, sp, E0060, - "this function takes at least {} parameter{} \ - but {} parameter{} supplied", + span_err!(tcx.sess, sp, E0061, + "this function takes {} parameter{} but {} parameter{} supplied", expected_arg_count, if expected_arg_count == 1 {""} else {"s"}, supplied_arg_count, if supplied_arg_count == 1 {" was"} else {"s were"}); expected_arg_tys = &[]; - err_args(fcx.tcx(), supplied_arg_count) - } - } else { - span_err!(tcx.sess, sp, E0061, - "this function takes {} parameter{} but {} parameter{} supplied", - expected_arg_count, - if expected_arg_count == 1 {""} else {"s"}, - supplied_arg_count, - if supplied_arg_count == 1 {" was"} else {"s were"}); - expected_arg_tys = &[]; - err_args(fcx.tcx(), supplied_arg_count) - }; - - debug!("check_argument_types: formal_tys={:?}", - formal_tys.iter().map(|t| fcx.infcx().ty_to_string(*t)).collect::>()); - - // Check the arguments. - // We do this in a pretty awful way: first we typecheck any arguments - // that are not anonymous functions, then we typecheck the anonymous - // functions. This is so that we have more information about the types - // of arguments when we typecheck the functions. This isn't really the - // right way to do this. - let xs = [false, true]; - let mut any_diverges = false; // has any of the arguments diverged? - let mut warned = false; // have we already warned about unreachable code? - for check_blocks in &xs { - let check_blocks = *check_blocks; - debug!("check_blocks={}", check_blocks); - - // More awful hacks: before we check argument types, try to do - // an "opportunistic" vtable resolution of any trait bounds on - // the call. This helps coercions. - if check_blocks { - fcx.select_obligations_where_possible(); - } - - // For variadic functions, we don't have a declared type for all of - // the arguments hence we only do our usual type checking with - // the arguments who's types we do know. - let t = if variadic { - expected_arg_count - } else if tuple_arguments == TupleArguments { - args.len() - } else { - supplied_arg_count + self.err_args(supplied_arg_count) }; - for (i, arg) in args.iter().take(t).enumerate() { - if any_diverges && !warned { - fcx.ccx - .tcx - .sess - .add_lint(lint::builtin::UNREACHABLE_CODE, - arg.id, - arg.span, - "unreachable expression".to_string()); - warned = true; + + debug!("check_argument_types: formal_tys={:?}", + formal_tys.iter().map(|t| self.ty_to_string(*t)).collect::>()); + + // Check the arguments. + // We do this in a pretty awful way: first we typecheck any arguments + // that are not anonymous functions, then we typecheck the anonymous + // functions. This is so that we have more information about the types + // of arguments when we typecheck the functions. This isn't really the + // right way to do this. + let xs = [false, true]; + let mut any_diverges = false; // has any of the arguments diverged? + let mut warned = false; // have we already warned about unreachable code? + for check_blocks in &xs { + let check_blocks = *check_blocks; + debug!("check_blocks={}", check_blocks); + + // More awful hacks: before we check argument types, try to do + // an "opportunistic" vtable resolution of any trait bounds on + // the call. This helps coercions. + if check_blocks { + self.select_obligations_where_possible(); } - let is_block = match arg.node { - hir::ExprClosure(..) => true, - _ => false + + // For variadic functions, we don't have a declared type for all of + // the arguments hence we only do our usual type checking with + // the arguments who's types we do know. + let t = if variadic { + expected_arg_count + } else if tuple_arguments == TupleArguments { + args.len() + } else { + supplied_arg_count }; + for (i, arg) in args.iter().take(t).enumerate() { + if any_diverges && !warned { + self.tcx + .sess + .add_lint(lint::builtin::UNREACHABLE_CODE, + arg.id, + arg.span, + "unreachable expression".to_string()); + warned = true; + } + let is_block = match arg.node { + hir::ExprClosure(..) => true, + _ => false + }; - if is_block == check_blocks { - debug!("checking the argument"); - let formal_ty = formal_tys[i]; + if is_block == check_blocks { + debug!("checking the argument"); + let formal_ty = formal_tys[i]; - // The special-cased logic below has three functions: - // 1. Provide as good of an expected type as possible. - let expected = expected_arg_tys.get(i).map(|&ty| { - Expectation::rvalue_hint(fcx.tcx(), ty) - }); + // The special-cased logic below has three functions: + // 1. Provide as good of an expected type as possible. + let expected = expected_arg_tys.get(i).map(|&ty| { + Expectation::rvalue_hint(self, ty) + }); - check_expr_with_expectation(fcx, &arg, - expected.unwrap_or(ExpectHasType(formal_ty))); - // 2. Coerce to the most detailed type that could be coerced - // to, which is `expected_ty` if `rvalue_hint` returns an - // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise. - let coerce_ty = expected.and_then(|e| e.only_has_type(fcx)); - demand::coerce(fcx, arg.span, coerce_ty.unwrap_or(formal_ty), &arg); - - // 3. Relate the expected type and the formal one, - // if the expected type was used for the coercion. - coerce_ty.map(|ty| demand::suptype(fcx, arg.span, formal_ty, ty)); - } + self.check_expr_with_expectation(&arg, + expected.unwrap_or(ExpectHasType(formal_ty))); + // 2. Coerce to the most detailed type that could be coerced + // to, which is `expected_ty` if `rvalue_hint` returns an + // `ExpectHasType(expected_ty)`, or the `formal_ty` otherwise. + let coerce_ty = expected.and_then(|e| e.only_has_type(self)); + self.demand_coerce(&arg, coerce_ty.unwrap_or(formal_ty)); + + // 3. Relate the expected type and the formal one, + // if the expected type was used for the coercion. + coerce_ty.map(|ty| self.demand_suptype(arg.span, formal_ty, ty)); + } - if let Some(&arg_ty) = fcx.inh.tables.borrow().node_types.get(&arg.id) { - any_diverges = any_diverges || fcx.infcx().type_var_diverges(arg_ty); + if let Some(&arg_ty) = self.tables.borrow().node_types.get(&arg.id) { + any_diverges = any_diverges || self.type_var_diverges(arg_ty); + } } - } - if any_diverges && !warned { - let parent = fcx.ccx.tcx.map.get_parent_node(args[0].id); - fcx.ccx - .tcx - .sess - .add_lint(lint::builtin::UNREACHABLE_CODE, - parent, - sp, - "unreachable call".to_string()); - warned = true; + if any_diverges && !warned { + let parent = self.tcx.map.get_parent_node(args[0].id); + self.tcx + .sess + .add_lint(lint::builtin::UNREACHABLE_CODE, + parent, + sp, + "unreachable call".to_string()); + warned = true; + } + } - } + // We also need to make sure we at least write the ty of the other + // arguments which we skipped above. + if variadic { + for arg in args.iter().skip(expected_arg_count) { + self.check_expr(&arg); - // We also need to make sure we at least write the ty of the other - // arguments which we skipped above. - if variadic { - for arg in args.iter().skip(expected_arg_count) { - check_expr(fcx, &arg); - - // There are a few types which get autopromoted when passed via varargs - // in C but we just error out instead and require explicit casts. - let arg_ty = structurally_resolved_type(fcx, arg.span, - fcx.expr_ty(&arg)); - match arg_ty.sty { - ty::TyFloat(ast::FloatTy::F32) => { - fcx.type_error_message(arg.span, - |t| { - format!("can't pass an `{}` to variadic \ - function, cast to `c_double`", t) - }, arg_ty, None); - } - ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => { - fcx.type_error_message(arg.span, |t| { - format!("can't pass `{}` to variadic \ - function, cast to `c_int`", - t) - }, arg_ty, None); - } - ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => { - fcx.type_error_message(arg.span, |t| { - format!("can't pass `{}` to variadic \ - function, cast to `c_uint`", - t) - }, arg_ty, None); - } - ty::TyFnDef(_, _, f) => { - let ptr_ty = fcx.tcx().mk_ty(ty::TyFnPtr(f)); - let ptr_ty = fcx.infcx().resolve_type_vars_if_possible(&ptr_ty); - fcx.type_error_message(arg.span, - |t| { - format!("can't pass `{}` to variadic \ - function, cast to `{}`", t, ptr_ty) - }, arg_ty, None); + // There are a few types which get autopromoted when passed via varargs + // in C but we just error out instead and require explicit casts. + let arg_ty = self.structurally_resolved_type(arg.span, + self.expr_ty(&arg)); + match arg_ty.sty { + ty::TyFloat(ast::FloatTy::F32) => { + self.type_error_message(arg.span, |t| { + format!("can't pass an `{}` to variadic \ + function, cast to `c_double`", t) + }, arg_ty, None); + } + ty::TyInt(ast::IntTy::I8) | ty::TyInt(ast::IntTy::I16) | ty::TyBool => { + self.type_error_message(arg.span, |t| { + format!("can't pass `{}` to variadic \ + function, cast to `c_int`", + t) + }, arg_ty, None); + } + ty::TyUint(ast::UintTy::U8) | ty::TyUint(ast::UintTy::U16) => { + self.type_error_message(arg.span, |t| { + format!("can't pass `{}` to variadic \ + function, cast to `c_uint`", + t) + }, arg_ty, None); + } + ty::TyFnDef(_, _, f) => { + let ptr_ty = self.tcx.mk_fn_ptr(f); + let ptr_ty = self.resolve_type_vars_if_possible(&ptr_ty); + self.type_error_message(arg.span, + |t| { + format!("can't pass `{}` to variadic \ + function, cast to `{}`", t, ptr_ty) + }, arg_ty, None); + } + _ => {} } - _ => {} } } } -} -// FIXME(#17596) Ty<'tcx> is incorrectly invariant w.r.t 'tcx. -fn err_args<'tcx>(tcx: &TyCtxt<'tcx>, len: usize) -> Vec> { - (0..len).map(|_| tcx.types.err).collect() -} - -fn write_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - call_expr: &hir::Expr, - output: ty::FnOutput<'tcx>) { - fcx.write_ty(call_expr.id, match output { - ty::FnConverging(output_ty) => output_ty, - ty::FnDiverging => fcx.infcx().next_diverging_ty_var() - }); -} - -// AST fragment checking -fn check_lit<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - lit: &ast::Lit, - expected: Expectation<'tcx>) - -> Ty<'tcx> -{ - let tcx = fcx.ccx.tcx; - - match lit.node { - ast::LitKind::Str(..) => tcx.mk_static_str(), - ast::LitKind::ByteStr(ref v) => { - tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), - tcx.mk_array(tcx.types.u8, v.len())) - } - ast::LitKind::Byte(_) => tcx.types.u8, - ast::LitKind::Char(_) => tcx.types.char, - ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t), - ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t), - ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => { - let opt_ty = expected.to_option(fcx).and_then(|ty| { - match ty.sty { - ty::TyInt(_) | ty::TyUint(_) => Some(ty), - ty::TyChar => Some(tcx.types.u8), - ty::TyRawPtr(..) => Some(tcx.types.usize), - ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize), - _ => None - } - }); - opt_ty.unwrap_or_else( - || tcx.mk_int_var(fcx.infcx().next_int_var_id())) - } - ast::LitKind::Float(_, t) => tcx.mk_mach_float(t), - ast::LitKind::FloatUnsuffixed(_) => { - let opt_ty = expected.to_option(fcx).and_then(|ty| { - match ty.sty { - ty::TyFloat(_) => Some(ty), - _ => None - } - }); - opt_ty.unwrap_or_else( - || tcx.mk_float_var(fcx.infcx().next_float_var_id())) - } - ast::LitKind::Bool(_) => tcx.types.bool + fn err_args(&self, len: usize) -> Vec> { + (0..len).map(|_| self.tcx.types.err).collect() } -} - -fn check_expr_eq_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - expected: Ty<'tcx>) { - check_expr_with_hint(fcx, expr, expected); - demand::eqtype(fcx, expr.span, expected, fcx.expr_ty(expr)); -} - -pub fn check_expr_has_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - expected: Ty<'tcx>) { - check_expr_with_hint(fcx, expr, expected); - demand::suptype(fcx, expr.span, expected, fcx.expr_ty(expr)); -} - -fn check_expr_coercable_to_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - expected: Ty<'tcx>) { - check_expr_with_hint(fcx, expr, expected); - demand::coerce(fcx, expr.span, expected, expr); -} - -fn check_expr_with_hint<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, expr: &'tcx hir::Expr, - expected: Ty<'tcx>) { - check_expr_with_expectation(fcx, expr, ExpectHasType(expected)) -} - -fn check_expr_with_expectation<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - expected: Expectation<'tcx>) { - check_expr_with_expectation_and_lvalue_pref(fcx, expr, expected, NoPreference) -} - -fn check_expr<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr) { - check_expr_with_expectation(fcx, expr, NoExpectation) -} -fn check_expr_with_lvalue_pref<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, expr: &'tcx hir::Expr, - lvalue_pref: LvaluePreference) { - check_expr_with_expectation_and_lvalue_pref(fcx, expr, NoExpectation, lvalue_pref) -} - -// determine the `self` type, using fresh variables for all variables -// declared on the impl declaration e.g., `impl for Vec<(A,B)>` -// would return ($0, $1) where $0 and $1 are freshly instantiated type -// variables. -pub fn impl_self_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - span: Span, // (potential) receiver for this impl - did: DefId) - -> TypeAndSubsts<'tcx> { - let tcx = fcx.tcx(); - - let ity = tcx.lookup_item_type(did); - let (tps, rps, raw_ty) = - (ity.generics.types.get_slice(subst::TypeSpace), - ity.generics.regions.get_slice(subst::TypeSpace), - ity.ty); - - debug!("impl_self_ty: tps={:?} rps={:?} raw_ty={:?}", tps, rps, raw_ty); - - let rps = fcx.inh.infcx.region_vars_for_defs(span, rps); - let mut substs = subst::Substs::new( - VecPerParamSpace::empty(), - VecPerParamSpace::new(rps, Vec::new(), Vec::new())); - fcx.inh.infcx.type_vars_for_defs(span, ParamSpace::TypeSpace, &mut substs, tps); - let substd_ty = fcx.instantiate_type_scheme(span, &substs, &raw_ty); - - TypeAndSubsts { substs: substs, ty: substd_ty } -} - -/// Controls whether the arguments are tupled. This is used for the call -/// operator. -/// -/// Tupling means that all call-side arguments are packed into a tuple and -/// passed as a single parameter. For example, if tupling is enabled, this -/// function: -/// -/// fn f(x: (isize, isize)) -/// -/// Can be called as: -/// -/// f(1, 2); -/// -/// Instead of: -/// -/// f((1, 2)); -#[derive(Clone, Eq, PartialEq)] -enum TupleArgumentsFlag { - DontTupleArguments, - TupleArguments, -} + fn write_call(&self, + call_expr: &hir::Expr, + output: ty::FnOutput<'tcx>) { + self.write_ty(call_expr.id, match output { + ty::FnConverging(output_ty) => output_ty, + ty::FnDiverging => self.next_diverging_ty_var() + }); + } -/// Unifies the return type with the expected type early, for more coercions -/// and forward type information on the argument expressions. -fn expected_types_for_fn_args<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - call_span: Span, - expected_ret: Expectation<'tcx>, - formal_ret: ty::FnOutput<'tcx>, - formal_args: &[Ty<'tcx>]) - -> Vec> { - let expected_args = expected_ret.only_has_type(fcx).and_then(|ret_ty| { - if let ty::FnConverging(formal_ret_ty) = formal_ret { - fcx.infcx().commit_regions_if_ok(|| { - // Attempt to apply a subtyping relationship between the formal - // return type (likely containing type variables if the function - // is polymorphic) and the expected return type. - // No argument expectations are produced if unification fails. - let origin = TypeOrigin::Misc(call_span); - let ures = fcx.infcx().sub_types(false, origin, formal_ret_ty, ret_ty); - // FIXME(#15760) can't use try! here, FromError doesn't default - // to identity so the resulting type is not constrained. - match ures { - // FIXME(#32730) propagate obligations - Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()), - Err(e) => return Err(e), - } + // AST fragment checking + fn check_lit(&self, + lit: &ast::Lit, + expected: Expectation<'tcx>) + -> Ty<'tcx> + { + let tcx = self.tcx; - // Record all the argument types, with the substitutions - // produced from the above subtyping unification. - Ok(formal_args.iter().map(|ty| { - fcx.infcx().resolve_type_vars_if_possible(ty) - }).collect()) - }).ok() - } else { - None - } - }).unwrap_or(vec![]); - debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})", - formal_args, formal_ret, - expected_args, expected_ret); - expected_args -} + match lit.node { + ast::LitKind::Str(..) => tcx.mk_static_str(), + ast::LitKind::ByteStr(ref v) => { + tcx.mk_imm_ref(tcx.mk_region(ty::ReStatic), + tcx.mk_array(tcx.types.u8, v.len())) + } + ast::LitKind::Byte(_) => tcx.types.u8, + ast::LitKind::Char(_) => tcx.types.char, + ast::LitKind::Int(_, ast::LitIntType::Signed(t)) => tcx.mk_mach_int(t), + ast::LitKind::Int(_, ast::LitIntType::Unsigned(t)) => tcx.mk_mach_uint(t), + ast::LitKind::Int(_, ast::LitIntType::Unsuffixed) => { + let opt_ty = expected.to_option(self).and_then(|ty| { + match ty.sty { + ty::TyInt(_) | ty::TyUint(_) => Some(ty), + ty::TyChar => Some(tcx.types.u8), + ty::TyRawPtr(..) => Some(tcx.types.usize), + ty::TyFnDef(..) | ty::TyFnPtr(_) => Some(tcx.types.usize), + _ => None + } + }); + opt_ty.unwrap_or_else( + || tcx.mk_int_var(self.next_int_var_id())) + } + ast::LitKind::Float(_, t) => tcx.mk_mach_float(t), + ast::LitKind::FloatUnsuffixed(_) => { + let opt_ty = expected.to_option(self).and_then(|ty| { + match ty.sty { + ty::TyFloat(_) => Some(ty), + _ => None + } + }); + opt_ty.unwrap_or_else( + || tcx.mk_float_var(self.next_float_var_id())) + } + ast::LitKind::Bool(_) => tcx.types.bool + } + } + + fn check_expr_eq_type(&self, + expr: &'gcx hir::Expr, + expected: Ty<'tcx>) { + self.check_expr_with_hint(expr, expected); + self.demand_eqtype(expr.span, expected, self.expr_ty(expr)); + } + + pub fn check_expr_has_type(&self, + expr: &'gcx hir::Expr, + expected: Ty<'tcx>) { + self.check_expr_with_hint(expr, expected); + self.demand_suptype(expr.span, expected, self.expr_ty(expr)); + } + + fn check_expr_coercable_to_type(&self, + expr: &'gcx hir::Expr, + expected: Ty<'tcx>) { + self.check_expr_with_hint(expr, expected); + self.demand_coerce(expr, expected); + } + + fn check_expr_with_hint(&self, expr: &'gcx hir::Expr, + expected: Ty<'tcx>) { + self.check_expr_with_expectation(expr, ExpectHasType(expected)) + } + + fn check_expr_with_expectation(&self, + expr: &'gcx hir::Expr, + expected: Expectation<'tcx>) { + self.check_expr_with_expectation_and_lvalue_pref(expr, expected, NoPreference) + } + + fn check_expr(&self, expr: &'gcx hir::Expr) { + self.check_expr_with_expectation(expr, NoExpectation) + } + + fn check_expr_with_lvalue_pref(&self, expr: &'gcx hir::Expr, + lvalue_pref: LvaluePreference) { + self.check_expr_with_expectation_and_lvalue_pref(expr, NoExpectation, lvalue_pref) + } + + // determine the `self` type, using fresh variables for all variables + // declared on the impl declaration e.g., `impl for Vec<(A,B)>` + // would return ($0, $1) where $0 and $1 are freshly instantiated type + // variables. + pub fn impl_self_ty(&self, + span: Span, // (potential) receiver for this impl + did: DefId) + -> TypeAndSubsts<'tcx> { + let tcx = self.tcx; + + let ity = tcx.lookup_item_type(did); + let (tps, rps, raw_ty) = + (ity.generics.types.get_slice(subst::TypeSpace), + ity.generics.regions.get_slice(subst::TypeSpace), + ity.ty); + + debug!("impl_self_ty: tps={:?} rps={:?} raw_ty={:?}", tps, rps, raw_ty); + + let rps = self.region_vars_for_defs(span, rps); + let mut substs = subst::Substs::new( + VecPerParamSpace::empty(), + VecPerParamSpace::new(rps, Vec::new(), Vec::new())); + self.type_vars_for_defs(span, ParamSpace::TypeSpace, &mut substs, tps); + let substd_ty = self.instantiate_type_scheme(span, &substs, &raw_ty); + + TypeAndSubsts { substs: substs, ty: substd_ty } + } + + /// Unifies the return type with the expected type early, for more coercions + /// and forward type information on the argument expressions. + fn expected_types_for_fn_args(&self, + call_span: Span, + expected_ret: Expectation<'tcx>, + formal_ret: ty::FnOutput<'tcx>, + formal_args: &[Ty<'tcx>]) + -> Vec> { + let expected_args = expected_ret.only_has_type(self).and_then(|ret_ty| { + if let ty::FnConverging(formal_ret_ty) = formal_ret { + self.commit_regions_if_ok(|| { + // Attempt to apply a subtyping relationship between the formal + // return type (likely containing type variables if the function + // is polymorphic) and the expected return type. + // No argument expectations are produced if unification fails. + let origin = TypeOrigin::Misc(call_span); + let ures = self.sub_types(false, origin, formal_ret_ty, ret_ty); + // FIXME(#15760) can't use try! here, FromError doesn't default + // to identity so the resulting type is not constrained. + match ures { + // FIXME(#32730) propagate obligations + Ok(InferOk { obligations, .. }) => assert!(obligations.is_empty()), + Err(e) => return Err(e), + } -/// Invariant: -/// If an expression has any sub-expressions that result in a type error, -/// inspecting that expression's type with `ty.references_error()` will return -/// true. Likewise, if an expression is known to diverge, inspecting its -/// type with `ty::type_is_bot` will return true (n.b.: since Rust is -/// strict, _|_ can appear in the type of an expression that does not, -/// itself, diverge: for example, fn() -> _|_.) -/// Note that inspecting a type's structure *directly* may expose the fact -/// that there are actually multiple representations for `TyError`, so avoid -/// that when err needs to be handled differently. -fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - expected: Expectation<'tcx>, - lvalue_pref: LvaluePreference) { - debug!(">> typechecking: expr={:?} expected={:?}", - expr, expected); + // Record all the argument types, with the substitutions + // produced from the above subtyping unification. + Ok(formal_args.iter().map(|ty| { + self.resolve_type_vars_if_possible(ty) + }).collect()) + }).ok() + } else { + None + } + }).unwrap_or(vec![]); + debug!("expected_types_for_fn_args(formal={:?} -> {:?}, expected={:?} -> {:?})", + formal_args, formal_ret, + expected_args, expected_ret); + expected_args + } // Checks a method call. - fn check_method_call<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - method_name: Spanned, - args: &'tcx [P], - tps: &[P], - expected: Expectation<'tcx>, - lvalue_pref: LvaluePreference) { + fn check_method_call(&self, + expr: &'gcx hir::Expr, + method_name: Spanned, + args: &'gcx [P], + tps: &[P], + expected: Expectation<'tcx>, + lvalue_pref: LvaluePreference) { let rcvr = &args[0]; - check_expr_with_lvalue_pref(fcx, &rcvr, lvalue_pref); + self.check_expr_with_lvalue_pref(&rcvr, lvalue_pref); // no need to check for bot/err -- callee does that - let expr_t = structurally_resolved_type(fcx, - expr.span, - fcx.expr_ty(&rcvr)); - - let tps = tps.iter().map(|ast_ty| fcx.to_ty(&ast_ty)).collect::>(); - let fn_ty = match method::lookup(fcx, - method_name.span, - method_name.node, - expr_t, - tps, - expr, - rcvr) { + let expr_t = self.structurally_resolved_type(expr.span, self.expr_ty(&rcvr)); + + let tps = tps.iter().map(|ast_ty| self.to_ty(&ast_ty)).collect::>(); + let fn_ty = match self.lookup_method(method_name.span, + method_name.node, + expr_t, + tps, + expr, + rcvr) { Ok(method) => { let method_ty = method.ty; let method_call = MethodCall::expr(expr.id); - fcx.inh.tables.borrow_mut().method_map.insert(method_call, method); + self.tables.borrow_mut().method_map.insert(method_call, method); method_ty } Err(error) => { - if method_name.node != special_idents::invalid.name { - method::report_error(fcx, method_name.span, expr_t, - method_name.node, Some(rcvr), error); + if method_name.node != keywords::Invalid.name() { + self.report_method_error(method_name.span, expr_t, + method_name.node, Some(rcvr), error); } - fcx.write_error(expr.id); - fcx.tcx().types.err + self.write_error(expr.id); + self.tcx.types.err } }; // Call the generic checker. - let ret_ty = check_method_argument_types(fcx, - method_name.span, - fn_ty, - expr, - &args[1..], - DontTupleArguments, - expected); + let ret_ty = self.check_method_argument_types(method_name.span, fn_ty, + expr, &args[1..], + DontTupleArguments, + expected); - write_call(fcx, expr, ret_ty); + self.write_call(expr, ret_ty); } // A generic function for checking the then and else in an if // or if-else. - fn check_then_else<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - cond_expr: &'tcx hir::Expr, - then_blk: &'tcx hir::Block, - opt_else_expr: Option<&'tcx hir::Expr>, - id: ast::NodeId, - sp: Span, - expected: Expectation<'tcx>) { - check_expr_has_type(fcx, cond_expr, fcx.tcx().types.bool); - - let expected = expected.adjust_for_branches(fcx); - check_block_with_expected(fcx, then_blk, expected); - let then_ty = fcx.node_ty(then_blk.id); - - let unit = fcx.tcx().mk_nil(); + fn check_then_else(&self, + cond_expr: &'gcx hir::Expr, + then_blk: &'gcx hir::Block, + opt_else_expr: Option<&'gcx hir::Expr>, + id: ast::NodeId, + sp: Span, + expected: Expectation<'tcx>) { + self.check_expr_has_type(cond_expr, self.tcx.types.bool); + + let expected = expected.adjust_for_branches(self); + self.check_block_with_expected(then_blk, expected); + let then_ty = self.node_ty(then_blk.id); + + let unit = self.tcx.mk_nil(); let (origin, expected, found, result) = if let Some(else_expr) = opt_else_expr { - check_expr_with_expectation(fcx, else_expr, expected); - let else_ty = fcx.expr_ty(else_expr); + self.check_expr_with_expectation(else_expr, expected); + let else_ty = self.expr_ty(else_expr); let origin = TypeOrigin::IfExpression(sp); // Only try to coerce-unify if we have a then expression // to assign coercions to, otherwise it's () or diverging. let result = if let Some(ref then) = then_blk.expr { - let res = coercion::try_find_lub(fcx, origin, || Some(&**then), - then_ty, else_expr); + let res = self.try_find_coercion_lub(origin, || Some(&**then), + then_ty, else_expr); // In case we did perform an adjustment, we have to update // the type of the block, because old trans still uses it. - let adj = fcx.inh.tables.borrow().adjustments.get(&then.id).cloned(); + let adj = self.tables.borrow().adjustments.get(&then.id).cloned(); if res.is_ok() && adj.is_some() { - fcx.write_ty(then_blk.id, fcx.adjust_expr_ty(then, adj.as_ref())); + self.write_ty(then_blk.id, self.adjust_expr_ty(then, adj.as_ref())); } res } else { - fcx.infcx().commit_if_ok(|_| { + self.commit_if_ok(|_| { let trace = TypeTrace::types(origin, true, then_ty, else_ty); - fcx.infcx().lub(true, trace, &then_ty, &else_ty) + self.lub(true, trace, &then_ty, &else_ty) .map(|InferOk { value, obligations }| { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()); @@ -2923,7 +2999,7 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } else { let origin = TypeOrigin::IfExpressionWithNoElse(sp); (origin, unit, then_ty, - fcx.infcx().eq_types(true, origin, unit, then_ty) + self.eq_types(true, origin, unit, then_ty) .map(|InferOk { obligations, .. }| { // FIXME(#32730) propagate obligations assert!(obligations.is_empty()); @@ -2933,42 +3009,42 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, let if_ty = match result { Ok(ty) => { - if fcx.expr_ty(cond_expr).references_error() { - fcx.tcx().types.err + if self.expr_ty(cond_expr).references_error() { + self.tcx.types.err } else { ty } } Err(e) => { - fcx.infcx().report_mismatched_types(origin, expected, found, e); - fcx.tcx().types.err + self.report_mismatched_types(origin, expected, found, e); + self.tcx.types.err } }; - fcx.write_ty(id, if_ty); + self.write_ty(id, if_ty); } // Check field access expressions - fn check_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - expr: &'tcx hir::Expr, - lvalue_pref: LvaluePreference, - base: &'tcx hir::Expr, - field: &Spanned) { - check_expr_with_lvalue_pref(fcx, base, lvalue_pref); - let expr_t = structurally_resolved_type(fcx, expr.span, fcx.expr_ty(base)); + fn check_field(&self, + expr: &'gcx hir::Expr, + lvalue_pref: LvaluePreference, + base: &'gcx hir::Expr, + field: &Spanned) { + self.check_expr_with_lvalue_pref(base, lvalue_pref); + let expr_t = self.structurally_resolved_type(expr.span, + self.expr_ty(base)); let mut private_candidate = None; - let (_, autoderefs, field_ty) = autoderef(fcx, - expr.span, - expr_t, - || Some(base), - UnresolvedTypeAction::Error, - lvalue_pref, - |base_t, _| { + let (_, autoderefs, field_ty) = self.autoderef(expr.span, + expr_t, + || Some(base), + UnresolvedTypeAction::Error, + lvalue_pref, + |base_t, _| { if let ty::TyStruct(base_def, substs) = base_t.sty { debug!("struct named {:?}", base_t); if let Some(field) = base_def.struct_variant().find_field_named(field.node) { - let field_ty = fcx.field_ty(expr.span, field, substs); - if field.vis.is_accessible_from(fcx.body_id, &fcx.tcx().map) { + let field_ty = self.field_ty(expr.span, field, substs); + if field.vis.is_accessible_from(self.body_id, &self.tcx().map) { return Some(field_ty); } private_candidate = Some((base_def.did, field_ty)); @@ -2978,68 +3054,65 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, }); match field_ty { Some(field_ty) => { - fcx.write_ty(expr.id, field_ty); - fcx.write_autoderef_adjustment(base.id, autoderefs); + self.write_ty(expr.id, field_ty); + self.write_autoderef_adjustment(base.id, autoderefs); return; } None => {} } if let Some((did, field_ty)) = private_candidate { - let struct_path = fcx.tcx().item_path_str(did); + let struct_path = self.tcx().item_path_str(did); + self.write_ty(expr.id, field_ty); let msg = format!("field `{}` of struct `{}` is private", field.node, struct_path); - fcx.tcx().sess.span_err(expr.span, &msg); - fcx.write_ty(expr.id, field_ty); - } else if field.node == special_idents::invalid.name { - fcx.write_error(expr.id); - } else if method::exists(fcx, field.span, field.node, expr_t, expr.id) { - fcx.type_error_struct(field.span, - |actual| { - format!("attempted to take value of method `{}` on type \ - `{}`", field.node, actual) - }, - expr_t, None) - .fileline_help(field.span, - "maybe a `()` to call it is missing? \ - If not, try an anonymous function") + let mut err = self.tcx().sess.struct_span_err(expr.span, &msg); + // Also check if an accessible method exists, which is often what is meant. + if self.method_exists(field.span, field.node, expr_t, expr.id, false) { + err.note(&format!("a method `{}` also exists, perhaps you wish to call it", + field.node)); + } + err.emit(); + } else if field.node == keywords::Invalid.name() { + self.write_error(expr.id); + } else if self.method_exists(field.span, field.node, expr_t, expr.id, true) { + self.type_error_struct(field.span, |actual| { + format!("attempted to take value of method `{}` on type \ + `{}`", field.node, actual) + }, expr_t, None) + .help( + "maybe a `()` to call it is missing? \ + If not, try an anonymous function") .emit(); - fcx.write_error(expr.id); + self.write_error(expr.id); } else { - let mut err = fcx.type_error_struct( - expr.span, - |actual| { - format!("attempted access of field `{}` on \ - type `{}`, but no field with that \ - name was found", - field.node, - actual) - }, - expr_t, None); + let mut err = self.type_error_struct(expr.span, |actual| { + format!("attempted access of field `{}` on type `{}`, \ + but no field with that name was found", + field.node, actual) + }, expr_t, None); if let ty::TyStruct(def, _) = expr_t.sty { - suggest_field_names(&mut err, def.struct_variant(), field, vec![]); + Self::suggest_field_names(&mut err, def.struct_variant(), field, vec![]); } err.emit(); - fcx.write_error(expr.id); + self.write_error(expr.id); } } // displays hints about the closest matches in field names - fn suggest_field_names<'tcx>(err: &mut DiagnosticBuilder, - variant: ty::VariantDef<'tcx>, - field: &Spanned, - skip : Vec) { + fn suggest_field_names(err: &mut DiagnosticBuilder, + variant: ty::VariantDef<'tcx>, + field: &Spanned, + skip : Vec) { let name = field.node.as_str(); - let names = variant.fields - .iter() - .filter_map(|ref field| { - // ignore already set fields and private fields from non-local crates - if skip.iter().any(|x| *x == field.name.as_str()) || - (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) { - None - } else { - Some(&field.name) - } - }); + let names = variant.fields.iter().filter_map(|field| { + // ignore already set fields and private fields from non-local crates + if skip.iter().any(|x| *x == field.name.as_str()) || + (variant.did.krate != LOCAL_CRATE && field.vis != Visibility::Public) { + None + } else { + Some(&field.name) + } + }); // only find fits with at least one matching letter if let Some(name) = find_best_match_for_name(names, &name, Some(name.len())) { @@ -3049,22 +3122,22 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } // Check tuple index expressions - fn check_tup_field<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - expr: &'tcx hir::Expr, - lvalue_pref: LvaluePreference, - base: &'tcx hir::Expr, - idx: codemap::Spanned) { - check_expr_with_lvalue_pref(fcx, base, lvalue_pref); - let expr_t = structurally_resolved_type(fcx, expr.span, fcx.expr_ty(base)); + fn check_tup_field(&self, + expr: &'gcx hir::Expr, + lvalue_pref: LvaluePreference, + base: &'gcx hir::Expr, + idx: codemap::Spanned) { + self.check_expr_with_lvalue_pref(base, lvalue_pref); + let expr_t = self.structurally_resolved_type(expr.span, + self.expr_ty(base)); let mut private_candidate = None; let mut tuple_like = false; - let (_, autoderefs, field_ty) = autoderef(fcx, - expr.span, - expr_t, - || Some(base), - UnresolvedTypeAction::Error, - lvalue_pref, - |base_t, _| { + let (_, autoderefs, field_ty) = self.autoderef(expr.span, + expr_t, + || Some(base), + UnresolvedTypeAction::Error, + lvalue_pref, + |base_t, _| { let (base_def, substs) = match base_t.sty { ty::TyStruct(base_def, substs) => (base_def, substs), ty::TyTuple(ref v) => { @@ -3079,8 +3152,8 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, debug!("tuple struct named {:?}", base_t); if let Some(field) = base_def.struct_variant().fields.get(idx.node) { - let field_ty = fcx.field_ty(expr.span, field, substs); - if field.vis.is_accessible_from(fcx.body_id, &fcx.tcx().map) { + let field_ty = self.field_ty(expr.span, field, substs); + if field.vis.is_accessible_from(self.body_id, &self.tcx().map) { return Some(field_ty); } private_candidate = Some((base_def.did, field_ty)); @@ -3089,22 +3162,22 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, }); match field_ty { Some(field_ty) => { - fcx.write_ty(expr.id, field_ty); - fcx.write_autoderef_adjustment(base.id, autoderefs); + self.write_ty(expr.id, field_ty); + self.write_autoderef_adjustment(base.id, autoderefs); return; } None => {} } if let Some((did, field_ty)) = private_candidate { - let struct_path = fcx.tcx().item_path_str(did); + let struct_path = self.tcx().item_path_str(did); let msg = format!("field `{}` of struct `{}` is private", idx.node, struct_path); - fcx.tcx().sess.span_err(expr.span, &msg); - fcx.write_ty(expr.id, field_ty); + self.tcx().sess.span_err(expr.span, &msg); + self.write_ty(expr.id, field_ty); return; } - fcx.type_error_message( + self.type_error_message( expr.span, |actual| { if tuple_like { @@ -3121,15 +3194,15 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, }, expr_t, None); - fcx.write_error(expr.id); + self.write_error(expr.id); } - fn report_unknown_field<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - ty: Ty<'tcx>, - variant: ty::VariantDef<'tcx>, - field: &hir::Field, - skip_fields: &[hir::Field]) { - let mut err = fcx.type_error_struct( + fn report_unknown_field(&self, + ty: Ty<'tcx>, + variant: ty::VariantDef<'tcx>, + field: &hir::Field, + skip_fields: &[hir::Field]) { + let mut err = self.type_error_struct( field.name.span, |actual| if let ty::TyEnum(..) = ty.sty { format!("struct variant `{}::{}` has no field named `{}`", @@ -3142,17 +3215,17 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, None); // prevent all specified fields from being suggested let skip_fields = skip_fields.iter().map(|ref x| x.name.node.as_str()); - suggest_field_names(&mut err, variant, &field.name, skip_fields.collect()); + Self::suggest_field_names(&mut err, variant, &field.name, skip_fields.collect()); err.emit(); } - fn check_expr_struct_fields<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - adt_ty: Ty<'tcx>, - span: Span, - variant: ty::VariantDef<'tcx>, - ast_fields: &'tcx [hir::Field], - check_completeness: bool) { - let tcx = fcx.ccx.tcx; + fn check_expr_struct_fields(&self, + adt_ty: Ty<'tcx>, + span: Span, + variant: ty::VariantDef<'tcx>, + ast_fields: &'gcx [hir::Field], + check_completeness: bool) { + let tcx = self.tcx; let substs = match adt_ty.sty { ty::TyStruct(_, substs) | ty::TyEnum(_, substs) => substs, _ => span_bug!(span, "non-ADT passed to check_expr_struct_fields") @@ -3170,22 +3243,22 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, let expected_field_type; if let Some(v_field) = remaining_fields.remove(&field.name.node) { - expected_field_type = fcx.field_ty(field.span, v_field, substs); + expected_field_type = self.field_ty(field.span, v_field, substs); } else { error_happened = true; expected_field_type = tcx.types.err; if let Some(_) = variant.find_field_named(field.name.node) { - span_err!(fcx.tcx().sess, field.name.span, E0062, + span_err!(self.tcx.sess, field.name.span, E0062, "field `{}` specified more than once", field.name.node); } else { - report_unknown_field(fcx, adt_ty, variant, field, ast_fields); + self.report_unknown_field(adt_ty, variant, field, ast_fields); } } // Make sure to give a type to the field even if there's // an error, so we can continue typechecking - check_expr_coercable_to_type(fcx, &field.expr, expected_field_type); + self.check_expr_coercable_to_type(&field.expr, expected_field_type); } // Make sure the programmer specified all the fields. @@ -3205,60 +3278,61 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } - fn check_struct_fields_on_error<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - id: ast::NodeId, - fields: &'tcx [hir::Field], - base_expr: &'tcx Option>) { + fn check_struct_fields_on_error(&self, + id: ast::NodeId, + fields: &'gcx [hir::Field], + base_expr: &'gcx Option>) { // Make sure to still write the types // otherwise we might ICE - fcx.write_error(id); + self.write_error(id); for field in fields { - check_expr(fcx, &field.expr); + self.check_expr(&field.expr); } match *base_expr { - Some(ref base) => check_expr(fcx, &base), + Some(ref base) => self.check_expr(&base), None => {} } } - fn check_expr_struct<'a, 'tcx>(fcx: &FnCtxt<'a,'tcx>, - expr: &hir::Expr, - path: &hir::Path, - fields: &'tcx [hir::Field], - base_expr: &'tcx Option>) + fn check_expr_struct(&self, + expr: &hir::Expr, + path: &hir::Path, + fields: &'gcx [hir::Field], + base_expr: &'gcx Option>) { - let tcx = fcx.tcx(); + let tcx = self.tcx; // Find the relevant variant let def = lookup_full_def(tcx, path.span, expr.id); if def == Def::Err { - check_struct_fields_on_error(fcx, expr.id, fields, base_expr); + self.set_tainted_by_errors(); + self.check_struct_fields_on_error(expr.id, fields, base_expr); return; } - let variant = match fcx.def_struct_variant(def, path.span) { + let variant = match self.def_struct_variant(def, path.span) { Some((_, variant)) => variant, None => { - span_err!(fcx.tcx().sess, path.span, E0071, + span_err!(self.tcx.sess, path.span, E0071, "`{}` does not name a structure", pprust::path_to_string(path)); - check_struct_fields_on_error(fcx, expr.id, fields, base_expr); + self.check_struct_fields_on_error(expr.id, fields, base_expr); return; } }; - let expr_ty = fcx.instantiate_type(def.def_id(), path); - fcx.write_ty(expr.id, expr_ty); + let expr_ty = self.instantiate_type(def.def_id(), path); + self.write_ty(expr.id, expr_ty); - check_expr_struct_fields(fcx, expr_ty, expr.span, variant, fields, - base_expr.is_none()); + self.check_expr_struct_fields(expr_ty, path.span, variant, fields, + base_expr.is_none()); if let &Some(ref base_expr) = base_expr { - check_expr_has_type(fcx, base_expr, expr_ty); + self.check_expr_has_type(base_expr, expr_ty); match expr_ty.sty { ty::TyStruct(adt, substs) => { - fcx.inh.tables.borrow_mut().fru_field_types.insert( + self.tables.borrow_mut().fru_field_types.insert( expr.id, adt.struct_variant().fields.iter().map(|f| { - fcx.normalize_associated_types_in( + self.normalize_associated_types_in( expr.span, &f.ty(tcx, substs) ) }).collect() @@ -3272,1248 +3346,1083 @@ fn check_expr_with_expectation_and_lvalue_pref<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } } - type ExprCheckerWithTy = fn(&FnCtxt, &hir::Expr, Ty); - let tcx = fcx.ccx.tcx; - let id = expr.id; - match expr.node { - hir::ExprBox(ref subexpr) => { - let expected_inner = expected.to_option(fcx).map_or(NoExpectation, |ty| { - match ty.sty { - ty::TyBox(ty) => Expectation::rvalue_hint(tcx, ty), - _ => NoExpectation - } - }); - check_expr_with_expectation(fcx, subexpr, expected_inner); - let referent_ty = fcx.expr_ty(&subexpr); - fcx.write_ty(id, tcx.mk_box(referent_ty)); - } - - hir::ExprLit(ref lit) => { - let typ = check_lit(fcx, &lit, expected); - fcx.write_ty(id, typ); - } - hir::ExprBinary(op, ref lhs, ref rhs) => { - op::check_binop(fcx, expr, op, lhs, rhs); - } - hir::ExprAssignOp(op, ref lhs, ref rhs) => { - op::check_binop_assign(fcx, expr, op, lhs, rhs); - } - hir::ExprUnary(unop, ref oprnd) => { - let expected_inner = match unop { - hir::UnNot | hir::UnNeg => { - expected - } - hir::UnDeref => { - NoExpectation - } - }; - let lvalue_pref = match unop { - hir::UnDeref => lvalue_pref, - _ => NoPreference - }; - check_expr_with_expectation_and_lvalue_pref( - fcx, &oprnd, expected_inner, lvalue_pref); - let mut oprnd_t = fcx.expr_ty(&oprnd); + /// Invariant: + /// If an expression has any sub-expressions that result in a type error, + /// inspecting that expression's type with `ty.references_error()` will return + /// true. Likewise, if an expression is known to diverge, inspecting its + /// type with `ty::type_is_bot` will return true (n.b.: since Rust is + /// strict, _|_ can appear in the type of an expression that does not, + /// itself, diverge: for example, fn() -> _|_.) + /// Note that inspecting a type's structure *directly* may expose the fact + /// that there are actually multiple representations for `TyError`, so avoid + /// that when err needs to be handled differently. + fn check_expr_with_expectation_and_lvalue_pref(&self, + expr: &'gcx hir::Expr, + expected: Expectation<'tcx>, + lvalue_pref: LvaluePreference) { + debug!(">> typechecking: expr={:?} expected={:?}", + expr, expected); + + let tcx = self.tcx; + let id = expr.id; + match expr.node { + hir::ExprBox(ref subexpr) => { + let expected_inner = expected.to_option(self).map_or(NoExpectation, |ty| { + match ty.sty { + ty::TyBox(ty) => Expectation::rvalue_hint(self, ty), + _ => NoExpectation + } + }); + self.check_expr_with_expectation(subexpr, expected_inner); + let referent_ty = self.expr_ty(&subexpr); + self.write_ty(id, tcx.mk_box(referent_ty)); + } - if !oprnd_t.references_error() { - match unop { + hir::ExprLit(ref lit) => { + let typ = self.check_lit(&lit, expected); + self.write_ty(id, typ); + } + hir::ExprBinary(op, ref lhs, ref rhs) => { + self.check_binop(expr, op, lhs, rhs); + } + hir::ExprAssignOp(op, ref lhs, ref rhs) => { + self.check_binop_assign(expr, op, lhs, rhs); + } + hir::ExprUnary(unop, ref oprnd) => { + let expected_inner = match unop { + hir::UnNot | hir::UnNeg => { + expected + } hir::UnDeref => { - oprnd_t = structurally_resolved_type(fcx, expr.span, oprnd_t); - - if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) { - oprnd_t = mt.ty; - } else if let Some(method) = try_overloaded_deref( - fcx, expr.span, Some(&oprnd), oprnd_t, lvalue_pref) { - oprnd_t = make_overloaded_lvalue_return_type(tcx, method).ty; - fcx.inh.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id), - method); - } else { - fcx.type_error_message(expr.span, |actual| { - format!("type `{}` cannot be \ - dereferenced", actual) - }, oprnd_t, None); - oprnd_t = tcx.types.err; - } + NoExpectation } - hir::UnNot => { - oprnd_t = structurally_resolved_type(fcx, oprnd.span, - oprnd_t); - if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) { - oprnd_t = op::check_user_unop(fcx, "!", "not", - tcx.lang_items.not_trait(), - expr, &oprnd, oprnd_t, unop); + }; + let lvalue_pref = match unop { + hir::UnDeref => lvalue_pref, + _ => NoPreference + }; + self.check_expr_with_expectation_and_lvalue_pref(&oprnd, + expected_inner, + lvalue_pref); + let mut oprnd_t = self.expr_ty(&oprnd); + + if !oprnd_t.references_error() { + match unop { + hir::UnDeref => { + oprnd_t = self.structurally_resolved_type(expr.span, oprnd_t); + + if let Some(mt) = oprnd_t.builtin_deref(true, NoPreference) { + oprnd_t = mt.ty; + } else if let Some(method) = self.try_overloaded_deref( + expr.span, Some(&oprnd), oprnd_t, lvalue_pref) { + oprnd_t = self.make_overloaded_lvalue_return_type(method).ty; + self.tables.borrow_mut().method_map.insert(MethodCall::expr(expr.id), + method); + } else { + self.type_error_message(expr.span, |actual| { + format!("type `{}` cannot be \ + dereferenced", actual) + }, oprnd_t, None); + oprnd_t = tcx.types.err; + } } - } - hir::UnNeg => { - oprnd_t = structurally_resolved_type(fcx, oprnd.span, - oprnd_t); - if !(oprnd_t.is_integral() || oprnd_t.is_fp()) { - oprnd_t = op::check_user_unop(fcx, "-", "neg", - tcx.lang_items.neg_trait(), - expr, &oprnd, oprnd_t, unop); + hir::UnNot => { + oprnd_t = self.structurally_resolved_type(oprnd.span, + oprnd_t); + if !(oprnd_t.is_integral() || oprnd_t.sty == ty::TyBool) { + oprnd_t = self.check_user_unop("!", "not", + tcx.lang_items.not_trait(), + expr, &oprnd, oprnd_t, unop); + } } - } - } - } - fcx.write_ty(id, oprnd_t); - } - hir::ExprAddrOf(mutbl, ref oprnd) => { - let hint = expected.only_has_type(fcx).map_or(NoExpectation, |ty| { - match ty.sty { - ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => { - if fcx.tcx().expr_is_lval(&oprnd) { - // Lvalues may legitimately have unsized types. - // For example, dereferences of a fat pointer and - // the last field of a struct can be unsized. - ExpectHasType(mt.ty) - } else { - Expectation::rvalue_hint(tcx, mt.ty) + hir::UnNeg => { + oprnd_t = self.structurally_resolved_type(oprnd.span, + oprnd_t); + if !(oprnd_t.is_integral() || oprnd_t.is_fp()) { + oprnd_t = self.check_user_unop("-", "neg", + tcx.lang_items.neg_trait(), + expr, &oprnd, oprnd_t, unop); + } } } - _ => NoExpectation } - }); - let lvalue_pref = LvaluePreference::from_mutbl(mutbl); - check_expr_with_expectation_and_lvalue_pref(fcx, - &oprnd, - hint, - lvalue_pref); - - let tm = ty::TypeAndMut { ty: fcx.expr_ty(&oprnd), mutbl: mutbl }; - let oprnd_t = if tm.ty.references_error() { - tcx.types.err - } else { - // Note: at this point, we cannot say what the best lifetime - // is to use for resulting pointer. We want to use the - // shortest lifetime possible so as to avoid spurious borrowck - // errors. Moreover, the longest lifetime will depend on the - // precise details of the value whose address is being taken - // (and how long it is valid), which we don't know yet until type - // inference is complete. - // - // Therefore, here we simply generate a region variable. The - // region inferencer will then select the ultimate value. - // Finally, borrowck is charged with guaranteeing that the - // value whose address was taken can actually be made to live - // as long as it needs to live. - let region = fcx.infcx().next_region_var(infer::AddrOfRegion(expr.span)); - tcx.mk_ref(tcx.mk_region(region), tm) - }; - fcx.write_ty(id, oprnd_t); - } - hir::ExprPath(ref maybe_qself, ref path) => { - let opt_self_ty = maybe_qself.as_ref().map(|qself| { - fcx.to_ty(&qself.ty) - }); - - let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) { - d - } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself { - // Create some fake resolution that can't possibly be a type. - def::PathResolution { - base_def: Def::Mod(tcx.map.local_def_id(ast::CRATE_NODE_ID)), - depth: path.segments.len() + self.write_ty(id, oprnd_t); + } + hir::ExprAddrOf(mutbl, ref oprnd) => { + let hint = expected.only_has_type(self).map_or(NoExpectation, |ty| { + match ty.sty { + ty::TyRef(_, ref mt) | ty::TyRawPtr(ref mt) => { + if self.tcx.expr_is_lval(&oprnd) { + // Lvalues may legitimately have unsized types. + // For example, dereferences of a fat pointer and + // the last field of a struct can be unsized. + ExpectHasType(mt.ty) + } else { + Expectation::rvalue_hint(self, mt.ty) + } + } + _ => NoExpectation } + }); + let lvalue_pref = LvaluePreference::from_mutbl(mutbl); + self.check_expr_with_expectation_and_lvalue_pref(&oprnd, hint, lvalue_pref); + + let tm = ty::TypeAndMut { ty: self.expr_ty(&oprnd), mutbl: mutbl }; + let oprnd_t = if tm.ty.references_error() { + tcx.types.err } else { - span_bug!(expr.span, "unbound path {:?}", expr) - }; - - if let Some((opt_ty, segments, def)) = - resolve_ty_and_def_ufcs(fcx, path_res, opt_self_ty, path, - expr.span, expr.id) { - if def != Def::Err { - let (scheme, predicates) = type_scheme_and_predicates_for_def(fcx, - expr.span, - def); - instantiate_path(fcx, - segments, - scheme, - &predicates, - opt_ty, - def, - expr.span, - id); - } else { - fcx.write_ty(id, fcx.tcx().types.err); - } + // Note: at this point, we cannot say what the best lifetime + // is to use for resulting pointer. We want to use the + // shortest lifetime possible so as to avoid spurious borrowck + // errors. Moreover, the longest lifetime will depend on the + // precise details of the value whose address is being taken + // (and how long it is valid), which we don't know yet until type + // inference is complete. + // + // Therefore, here we simply generate a region variable. The + // region inferencer will then select the ultimate value. + // Finally, borrowck is charged with guaranteeing that the + // value whose address was taken can actually be made to live + // as long as it needs to live. + let region = self.next_region_var(infer::AddrOfRegion(expr.span)); + tcx.mk_ref(tcx.mk_region(region), tm) + }; + self.write_ty(id, oprnd_t); } + hir::ExprPath(ref maybe_qself, ref path) => { + let opt_self_ty = maybe_qself.as_ref().map(|qself| { + self.to_ty(&qself.ty) + }); + + let path_res = if let Some(&d) = tcx.def_map.borrow().get(&id) { + d + } else if let Some(hir::QSelf { position: 0, .. }) = *maybe_qself { + // Create some fake resolution that can't possibly be a type. + def::PathResolution { + base_def: Def::Mod(tcx.map.local_def_id(ast::CRATE_NODE_ID)), + depth: path.segments.len() + } + } else { + span_bug!(expr.span, "unbound path {:?}", expr) + }; + + if let Some((opt_ty, segments, def)) = + self.resolve_ty_and_def_ufcs(path_res, opt_self_ty, path, + expr.span, expr.id) { + if def != Def::Err { + let (scheme, predicates) = self.type_scheme_and_predicates_for_def(expr.span, + def); + self.instantiate_path(segments, scheme, &predicates, + opt_ty, def, expr.span, id); + } else { + self.set_tainted_by_errors(); + self.write_ty(id, self.tcx.types.err); + } + } - // We always require that the type provided as the value for - // a type parameter outlives the moment of instantiation. - fcx.opt_node_ty_substs(expr.id, |item_substs| { - fcx.add_wf_bounds(&item_substs.substs, expr); - }); - } - hir::ExprInlineAsm(_, ref outputs, ref inputs) => { - for output in outputs { - check_expr(fcx, output); + // We always require that the type provided as the value for + // a type parameter outlives the moment of instantiation. + self.opt_node_ty_substs(expr.id, |item_substs| { + self.add_wf_bounds(&item_substs.substs, expr); + }); } - for input in inputs { - check_expr(fcx, input); + hir::ExprInlineAsm(_, ref outputs, ref inputs) => { + for output in outputs { + self.check_expr(output); + } + for input in inputs { + self.check_expr(input); + } + self.write_nil(id); } - fcx.write_nil(id); - } - hir::ExprBreak(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); } - hir::ExprAgain(_) => { fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); } - hir::ExprRet(ref expr_opt) => { - match fcx.ret_ty { - ty::FnConverging(result_type) => { - match *expr_opt { - None => - if let Err(_) = fcx.mk_eqty(false, TypeOrigin::Misc(expr.span), - result_type, fcx.tcx().mk_nil()) { + hir::ExprBreak(_) => { self.write_ty(id, self.next_diverging_ty_var()); } + hir::ExprAgain(_) => { self.write_ty(id, self.next_diverging_ty_var()); } + hir::ExprRet(ref expr_opt) => { + match self.ret_ty { + ty::FnConverging(result_type) => { + if let Some(ref e) = *expr_opt { + self.check_expr_coercable_to_type(&e, result_type); + } else { + let eq_result = self.eq_types(false, + TypeOrigin::Misc(expr.span), + result_type, + tcx.mk_nil()) + // FIXME(#32730) propagate obligations + .map(|InferOk { obligations, .. }| assert!(obligations.is_empty())); + if eq_result.is_err() { span_err!(tcx.sess, expr.span, E0069, - "`return;` in a function whose return type is \ - not `()`"); - }, - Some(ref e) => { - check_expr_coercable_to_type(fcx, &e, result_type); + "`return;` in a function whose return type is not `()`"); + } } } - } - ty::FnDiverging => { - if let Some(ref e) = *expr_opt { - check_expr(fcx, &e); + ty::FnDiverging => { + if let Some(ref e) = *expr_opt { + self.check_expr(&e); + } + span_err!(tcx.sess, expr.span, E0166, + "`return` in a function declared as diverging"); } - span_err!(tcx.sess, expr.span, E0166, - "`return` in a function declared as diverging"); } - } - fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); - } - hir::ExprAssign(ref lhs, ref rhs) => { - check_expr_with_lvalue_pref(fcx, &lhs, PreferMutLvalue); - - let tcx = fcx.tcx(); - if !tcx.expr_is_lval(&lhs) { - span_err!(tcx.sess, expr.span, E0070, - "invalid left-hand side expression"); - } + self.write_ty(id, self.next_diverging_ty_var()); + } + hir::ExprAssign(ref lhs, ref rhs) => { + self.check_expr_with_lvalue_pref(&lhs, PreferMutLvalue); - let lhs_ty = fcx.expr_ty(&lhs); - check_expr_coercable_to_type(fcx, &rhs, lhs_ty); - let rhs_ty = fcx.expr_ty(&rhs); + let tcx = self.tcx; + if !tcx.expr_is_lval(&lhs) { + span_err!(tcx.sess, expr.span, E0070, + "invalid left-hand side expression"); + } - fcx.require_expr_have_sized_type(&lhs, traits::AssignmentLhsSized); + let lhs_ty = self.expr_ty(&lhs); + self.check_expr_coercable_to_type(&rhs, lhs_ty); + let rhs_ty = self.expr_ty(&rhs); - if lhs_ty.references_error() || rhs_ty.references_error() { - fcx.write_error(id); - } else { - fcx.write_nil(id); - } - } - hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => { - check_then_else(fcx, &cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e), - id, expr.span, expected); - } - hir::ExprWhile(ref cond, ref body, _) => { - check_expr_has_type(fcx, &cond, tcx.types.bool); - check_block_no_value(fcx, &body); - let cond_ty = fcx.expr_ty(&cond); - let body_ty = fcx.node_ty(body.id); - if cond_ty.references_error() || body_ty.references_error() { - fcx.write_error(id); - } - else { - fcx.write_nil(id); - } - } - hir::ExprLoop(ref body, _) => { - check_block_no_value(fcx, &body); - if !may_break(tcx, expr.id, &body) { - fcx.write_ty(id, fcx.infcx().next_diverging_ty_var()); - } else { - fcx.write_nil(id); - } - } - hir::ExprMatch(ref discrim, ref arms, match_src) => { - _match::check_match(fcx, expr, &discrim, arms, expected, match_src); - } - hir::ExprClosure(capture, ref decl, ref body) => { - closure::check_expr_closure(fcx, expr, capture, &decl, &body, expected); - } - hir::ExprBlock(ref b) => { - check_block_with_expected(fcx, &b, expected); - fcx.write_ty(id, fcx.node_ty(b.id)); - } - hir::ExprCall(ref callee, ref args) => { - callee::check_call(fcx, expr, &callee, &args[..], expected); + self.require_expr_have_sized_type(&lhs, traits::AssignmentLhsSized); - // we must check that return type of called functions is WF: - let ret_ty = fcx.expr_ty(expr); - fcx.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation); - } - hir::ExprMethodCall(name, ref tps, ref args) => { - check_method_call(fcx, expr, name, &args[..], &tps[..], expected, lvalue_pref); - let arg_tys = args.iter().map(|a| fcx.expr_ty(&a)); - let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error()); - if args_err { - fcx.write_error(id); + if lhs_ty.references_error() || rhs_ty.references_error() { + self.write_error(id); + } else { + self.write_nil(id); + } } - } - hir::ExprCast(ref e, ref t) => { - if let hir::TyFixedLengthVec(_, ref count_expr) = t.node { - check_expr_with_hint(fcx, &count_expr, tcx.types.usize); - } - - // Find the type of `e`. Supply hints based on the type we are casting to, - // if appropriate. - let t_cast = fcx.to_ty(t); - let t_cast = structurally_resolved_type(fcx, expr.span, t_cast); - check_expr_with_expectation(fcx, e, ExpectCastableToType(t_cast)); - let t_expr = fcx.expr_ty(e); - let t_cast = fcx.infcx().resolve_type_vars_if_possible(&t_cast); - - // Eagerly check for some obvious errors. - if t_expr.references_error() || t_cast.references_error() { - fcx.write_error(id); - } else if !fcx.type_is_known_to_be_sized(t_cast, expr.span) { - report_cast_to_unsized_type(fcx, expr.span, t.span, e.span, t_cast, t_expr, id); - } else { - // Write a type for the whole expression, assuming everything is going - // to work out Ok. - fcx.write_ty(id, t_cast); - - // Defer other checks until we're done type checking. - let mut deferred_cast_checks = fcx.inh.deferred_cast_checks.borrow_mut(); - let cast_check = cast::CastCheck::new(e, t_expr, t_cast, expr.span); - deferred_cast_checks.push(cast_check); - } - } - hir::ExprType(ref e, ref t) => { - let typ = fcx.to_ty(&t); - check_expr_eq_type(fcx, &e, typ); - fcx.write_ty(id, typ); - } - hir::ExprVec(ref args) => { - let uty = expected.to_option(fcx).and_then(|uty| { - match uty.sty { - ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty), - _ => None + hir::ExprIf(ref cond, ref then_blk, ref opt_else_expr) => { + self.check_then_else(&cond, &then_blk, opt_else_expr.as_ref().map(|e| &**e), + id, expr.span, expected); + } + hir::ExprWhile(ref cond, ref body, _) => { + self.check_expr_has_type(&cond, tcx.types.bool); + self.check_block_no_value(&body); + let cond_ty = self.expr_ty(&cond); + let body_ty = self.node_ty(body.id); + if cond_ty.references_error() || body_ty.references_error() { + self.write_error(id); } - }); - - let mut unified = fcx.infcx().next_ty_var(); - let coerce_to = uty.unwrap_or(unified); + else { + self.write_nil(id); + } + } + hir::ExprLoop(ref body, _) => { + self.check_block_no_value(&body); + if !may_break(tcx, expr.id, &body) { + self.write_ty(id, self.next_diverging_ty_var()); + } else { + self.write_nil(id); + } + } + hir::ExprMatch(ref discrim, ref arms, match_src) => { + self.check_match(expr, &discrim, arms, expected, match_src); + } + hir::ExprClosure(capture, ref decl, ref body, _) => { + self.check_expr_closure(expr, capture, &decl, &body, expected); + } + hir::ExprBlock(ref b) => { + self.check_block_with_expected(&b, expected); + self.write_ty(id, self.node_ty(b.id)); + } + hir::ExprCall(ref callee, ref args) => { + self.check_call(expr, &callee, &args[..], expected); - for (i, e) in args.iter().enumerate() { - check_expr_with_hint(fcx, e, coerce_to); - let e_ty = fcx.expr_ty(e); - let origin = TypeOrigin::Misc(e.span); + // we must check that return type of called functions is WF: + let ret_ty = self.expr_ty(expr); + self.register_wf_obligation(ret_ty, expr.span, traits::MiscObligation); + } + hir::ExprMethodCall(name, ref tps, ref args) => { + self.check_method_call(expr, name, &args[..], &tps[..], expected, lvalue_pref); + let arg_tys = args.iter().map(|a| self.expr_ty(&a)); + let args_err = arg_tys.fold(false, |rest_err, a| rest_err || a.references_error()); + if args_err { + self.write_error(id); + } + } + hir::ExprCast(ref e, ref t) => { + if let hir::TyFixedLengthVec(_, ref count_expr) = t.node { + self.check_expr_with_hint(&count_expr, tcx.types.usize); + } - // Special-case the first element, as it has no "previous expressions". - let result = if i == 0 { - coercion::try(fcx, e, coerce_to) + // Find the type of `e`. Supply hints based on the type we are casting to, + // if appropriate. + let t_cast = self.to_ty(t); + let t_cast = self.resolve_type_vars_if_possible(&t_cast); + self.check_expr_with_expectation(e, ExpectCastableToType(t_cast)); + let t_expr = self.expr_ty(e); + let t_cast = self.resolve_type_vars_if_possible(&t_cast); + + // Eagerly check for some obvious errors. + if t_expr.references_error() || t_cast.references_error() { + self.write_error(id); } else { - let prev_elems = || args[..i].iter().map(|e| &**e); - coercion::try_find_lub(fcx, origin, prev_elems, unified, e) - }; - - match result { - Ok(ty) => unified = ty, - Err(e) => { - fcx.infcx().report_mismatched_types(origin, unified, e_ty, e); + // Write a type for the whole expression, assuming everything is going + // to work out Ok. + self.write_ty(id, t_cast); + + // Defer other checks until we're done type checking. + let mut deferred_cast_checks = self.deferred_cast_checks.borrow_mut(); + match cast::CastCheck::new(self, e, t_expr, t_cast, t.span, expr.span) { + Ok(cast_check) => { + deferred_cast_checks.push(cast_check); + } + Err(ErrorReported) => { + self.write_error(id); + } } } - } - fcx.write_ty(id, tcx.mk_array(unified, args.len())); - } - hir::ExprRepeat(ref element, ref count_expr) => { - check_expr_has_type(fcx, &count_expr, tcx.types.usize); - let count = eval_repeat_count(fcx.tcx(), &count_expr); - - let uty = match expected { - ExpectHasType(uty) => { + } + hir::ExprType(ref e, ref t) => { + let typ = self.to_ty(&t); + self.check_expr_eq_type(&e, typ); + self.write_ty(id, typ); + } + hir::ExprVec(ref args) => { + let uty = expected.to_option(self).and_then(|uty| { match uty.sty { ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty), _ => None } - } - _ => None - }; + }); - let (element_ty, t) = match uty { - Some(uty) => { - check_expr_coercable_to_type(fcx, &element, uty); - (uty, uty) - } - None => { - let t: Ty = fcx.infcx().next_ty_var(); - check_expr_has_type(fcx, &element, t); - (fcx.expr_ty(&element), t) - } - }; + let mut unified = self.next_ty_var(); + let coerce_to = uty.unwrap_or(unified); - if count > 1 { - // For [foo, ..n] where n > 1, `foo` must have - // Copy type: - fcx.require_type_meets( - t, - expr.span, - traits::RepeatVec, - ty::BoundCopy); - } + for (i, e) in args.iter().enumerate() { + self.check_expr_with_hint(e, coerce_to); + let e_ty = self.expr_ty(e); + let origin = TypeOrigin::Misc(e.span); - if element_ty.references_error() { - fcx.write_error(id); - } else { - let t = tcx.mk_array(t, count); - fcx.write_ty(id, t); - } - } - hir::ExprTup(ref elts) => { - let flds = expected.only_has_type(fcx).and_then(|ty| { - match ty.sty { - ty::TyTuple(ref flds) => Some(&flds[..]), - _ => None - } - }); - let mut err_field = false; - - let elt_ts = elts.iter().enumerate().map(|(i, e)| { - let t = match flds { - Some(ref fs) if i < fs.len() => { - let ety = fs[i]; - check_expr_coercable_to_type(fcx, &e, ety); - ety + // Special-case the first element, as it has no "previous expressions". + let result = if i == 0 { + self.try_coerce(e, coerce_to) + } else { + let prev_elems = || args[..i].iter().map(|e| &**e); + self.try_find_coercion_lub(origin, prev_elems, unified, e) + }; + + match result { + Ok(ty) => unified = ty, + Err(e) => { + self.report_mismatched_types(origin, unified, e_ty, e); + } } - _ => { - check_expr_with_expectation(fcx, &e, NoExpectation); - fcx.expr_ty(&e) + } + self.write_ty(id, tcx.mk_array(unified, args.len())); + } + hir::ExprRepeat(ref element, ref count_expr) => { + self.check_expr_has_type(&count_expr, tcx.types.usize); + let count = eval_repeat_count(self.tcx.global_tcx(), &count_expr); + + let uty = match expected { + ExpectHasType(uty) => { + match uty.sty { + ty::TyArray(ty, _) | ty::TySlice(ty) => Some(ty), + _ => None + } } + _ => None }; - err_field = err_field || t.references_error(); - t - }).collect(); - if err_field { - fcx.write_error(id); - } else { - let typ = tcx.mk_tup(elt_ts); - fcx.write_ty(id, typ); - } - } - hir::ExprStruct(ref path, ref fields, ref base_expr) => { - check_expr_struct(fcx, expr, path, fields, base_expr); - - fcx.require_expr_have_sized_type(expr, traits::StructInitializerSized); - } - hir::ExprField(ref base, ref field) => { - check_field(fcx, expr, lvalue_pref, &base, field); - } - hir::ExprTupField(ref base, idx) => { - check_tup_field(fcx, expr, lvalue_pref, &base, idx); - } - hir::ExprIndex(ref base, ref idx) => { - check_expr_with_lvalue_pref(fcx, &base, lvalue_pref); - check_expr(fcx, &idx); - - let base_t = fcx.expr_ty(&base); - let idx_t = fcx.expr_ty(&idx); - - if base_t.references_error() { - fcx.write_ty(id, base_t); - } else if idx_t.references_error() { - fcx.write_ty(id, idx_t); - } else { - let base_t = structurally_resolved_type(fcx, expr.span, base_t); - match lookup_indexing(fcx, expr, base, base_t, idx_t, lvalue_pref) { - Some((index_ty, element_ty)) => { - let idx_expr_ty = fcx.expr_ty(idx); - demand::eqtype(fcx, expr.span, index_ty, idx_expr_ty); - fcx.write_ty(id, element_ty); - } - None => { - check_expr_has_type(fcx, &idx, fcx.tcx().types.err); - fcx.type_error_message( - expr.span, - |actual| { - format!("cannot index a value of type `{}`", - actual) - }, - base_t, - None); - fcx.write_ty(id, fcx.tcx().types.err); - } - } - } - } - } - debug!("type of expr({}) {} is...", expr.id, - pprust::expr_to_string(expr)); - debug!("... {:?}, expected is {:?}", - fcx.expr_ty(expr), - expected); -} + let (element_ty, t) = match uty { + Some(uty) => { + self.check_expr_coercable_to_type(&element, uty); + (uty, uty) + } + None => { + let t: Ty = self.next_ty_var(); + self.check_expr_has_type(&element, t); + (self.expr_ty(&element), t) + } + }; -pub fn resolve_ty_and_def_ufcs<'a, 'b, 'tcx>(fcx: &FnCtxt<'b, 'tcx>, - path_res: def::PathResolution, - opt_self_ty: Option>, - path: &'a hir::Path, - span: Span, - node_id: ast::NodeId) - -> Option<(Option>, - &'a [hir::PathSegment], - Def)> -{ + if count > 1 { + // For [foo, ..n] where n > 1, `foo` must have + // Copy type: + self.require_type_meets(t, expr.span, traits::RepeatVec, ty::BoundCopy); + } - // If fully resolved already, we don't have to do anything. - if path_res.depth == 0 { - Some((opt_self_ty, &path.segments, path_res.base_def)) - } else { - let mut def = path_res.base_def; - let ty_segments = path.segments.split_last().unwrap().1; - let base_ty_end = path.segments.len() - path_res.depth; - let ty = astconv::finish_resolving_def_to_ty(fcx, fcx, span, - PathParamMode::Optional, - &mut def, - opt_self_ty, - &ty_segments[..base_ty_end], - &ty_segments[base_ty_end..]); - let item_segment = path.segments.last().unwrap(); - let item_name = item_segment.identifier.name; - let def = match method::resolve_ufcs(fcx, span, item_name, ty, node_id) { - Ok(def) => Some(def), - Err(error) => { - let def = match error { - method::MethodError::PrivateMatch(def) => Some(def), - _ => None, - }; - if item_name != special_idents::invalid.name { - method::report_error(fcx, span, ty, item_name, None, error); + if element_ty.references_error() { + self.write_error(id); + } else { + let t = tcx.mk_array(t, count); + self.write_ty(id, t); + } + } + hir::ExprTup(ref elts) => { + let flds = expected.only_has_type(self).and_then(|ty| { + match ty.sty { + ty::TyTuple(ref flds) => Some(&flds[..]), + _ => None } - def + }); + let mut err_field = false; + + let elt_ts = elts.iter().enumerate().map(|(i, e)| { + let t = match flds { + Some(ref fs) if i < fs.len() => { + let ety = fs[i]; + self.check_expr_coercable_to_type(&e, ety); + ety + } + _ => { + self.check_expr_with_expectation(&e, NoExpectation); + self.expr_ty(&e) + } + }; + err_field = err_field || t.references_error(); + t + }).collect(); + if err_field { + self.write_error(id); + } else { + let typ = tcx.mk_tup(elt_ts); + self.write_ty(id, typ); } - }; + } + hir::ExprStruct(ref path, ref fields, ref base_expr) => { + self.check_expr_struct(expr, path, fields, base_expr); - if let Some(def) = def { - // Write back the new resolution. - fcx.ccx.tcx.def_map.borrow_mut().insert(node_id, def::PathResolution { - base_def: def, - depth: 0, - }); - Some((Some(ty), slice::ref_slice(item_segment), def)) - } else { - fcx.write_error(node_id); - None - } - } -} + self.require_expr_have_sized_type(expr, traits::StructInitializerSized); + } + hir::ExprField(ref base, ref field) => { + self.check_field(expr, lvalue_pref, &base, field); + } + hir::ExprTupField(ref base, idx) => { + self.check_tup_field(expr, lvalue_pref, &base, idx); + } + hir::ExprIndex(ref base, ref idx) => { + self.check_expr_with_lvalue_pref(&base, lvalue_pref); + self.check_expr(&idx); -impl<'tcx> Expectation<'tcx> { - /// Provide an expectation for an rvalue expression given an *optional* - /// hint, which is not required for type safety (the resulting type might - /// be checked higher up, as is the case with `&expr` and `box expr`), but - /// is useful in determining the concrete type. - /// - /// The primary use case is where the expected type is a fat pointer, - /// like `&[isize]`. For example, consider the following statement: - /// - /// let x: &[isize] = &[1, 2, 3]; - /// - /// In this case, the expected type for the `&[1, 2, 3]` expression is - /// `&[isize]`. If however we were to say that `[1, 2, 3]` has the - /// expectation `ExpectHasType([isize])`, that would be too strong -- - /// `[1, 2, 3]` does not have the type `[isize]` but rather `[isize; 3]`. - /// It is only the `&[1, 2, 3]` expression as a whole that can be coerced - /// to the type `&[isize]`. Therefore, we propagate this more limited hint, - /// which still is useful, because it informs integer literals and the like. - /// See the test case `test/run-pass/coerce-expect-unsized.rs` and #20169 - /// for examples of where this comes up,. - fn rvalue_hint(tcx: &TyCtxt<'tcx>, ty: Ty<'tcx>) -> Expectation<'tcx> { - match tcx.struct_tail(ty).sty { - ty::TySlice(_) | ty::TyStr | ty::TyTrait(..) => { - ExpectRvalueLikeUnsized(ty) - } - _ => ExpectHasType(ty) - } - } + let base_t = self.expr_ty(&base); + let idx_t = self.expr_ty(&idx); - // Resolves `expected` by a single level if it is a variable. If - // there is no expected type or resolution is not possible (e.g., - // no constraints yet present), just returns `None`. - fn resolve<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Expectation<'tcx> { - match self { - NoExpectation => { - NoExpectation - } - ExpectCastableToType(t) => { - ExpectCastableToType( - fcx.infcx().resolve_type_vars_if_possible(&t)) - } - ExpectHasType(t) => { - ExpectHasType( - fcx.infcx().resolve_type_vars_if_possible(&t)) - } - ExpectRvalueLikeUnsized(t) => { - ExpectRvalueLikeUnsized( - fcx.infcx().resolve_type_vars_if_possible(&t)) - } + if base_t.references_error() { + self.write_ty(id, base_t); + } else if idx_t.references_error() { + self.write_ty(id, idx_t); + } else { + let base_t = self.structurally_resolved_type(expr.span, base_t); + match self.lookup_indexing(expr, base, base_t, idx_t, lvalue_pref) { + Some((index_ty, element_ty)) => { + let idx_expr_ty = self.expr_ty(idx); + self.demand_eqtype(expr.span, index_ty, idx_expr_ty); + self.write_ty(id, element_ty); + } + None => { + self.check_expr_has_type(&idx, self.tcx.types.err); + let mut err = self.type_error_struct( + expr.span, + |actual| { + format!("cannot index a value of type `{}`", + actual) + }, + base_t, + None); + // Try to give some advice about indexing tuples. + if let ty::TyTuple(_) = base_t.sty { + let mut needs_note = true; + // If the index is an integer, we can show the actual + // fixed expression: + if let hir::ExprLit(ref lit) = idx.node { + if let ast::LitKind::Int(i, + ast::LitIntType::Unsuffixed) = lit.node { + let snip = tcx.sess.codemap().span_to_snippet(base.span); + if let Ok(snip) = snip { + err.span_suggestion(expr.span, + "to access tuple elements, \ + use tuple indexing syntax \ + as shown", + format!("{}.{}", snip, i)); + needs_note = false; + } + } + } + if needs_note { + err.help("to access tuple elements, use tuple indexing \ + syntax (e.g. `tuple.0`)"); + } + } + err.emit(); + self.write_ty(id, self.tcx().types.err); + } + } + } + } } - } - fn to_option<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option> { - match self.resolve(fcx) { - NoExpectation => None, - ExpectCastableToType(ty) | - ExpectHasType(ty) | - ExpectRvalueLikeUnsized(ty) => Some(ty), - } + debug!("type of expr({}) {} is...", expr.id, + pprust::expr_to_string(expr)); + debug!("... {:?}, expected is {:?}", + self.expr_ty(expr), + expected); } - fn only_has_type<'a>(self, fcx: &FnCtxt<'a, 'tcx>) -> Option> { - match self.resolve(fcx) { - ExpectHasType(ty) => Some(ty), - _ => None + pub fn resolve_ty_and_def_ufcs<'b>(&self, + path_res: def::PathResolution, + opt_self_ty: Option>, + path: &'b hir::Path, + span: Span, + node_id: ast::NodeId) + -> Option<(Option>, &'b [hir::PathSegment], Def)> + { + + // If fully resolved already, we don't have to do anything. + if path_res.depth == 0 { + Some((opt_self_ty, &path.segments, path_res.base_def)) + } else { + let def = path_res.base_def; + let ty_segments = path.segments.split_last().unwrap().1; + let base_ty_end = path.segments.len() - path_res.depth; + let (ty, _def) = AstConv::finish_resolving_def_to_ty(self, self, span, + PathParamMode::Optional, + def, + opt_self_ty, + node_id, + &ty_segments[..base_ty_end], + &ty_segments[base_ty_end..]); + let item_segment = path.segments.last().unwrap(); + let item_name = item_segment.name; + let def = match self.resolve_ufcs(span, item_name, ty, node_id) { + Ok(def) => Some(def), + Err(error) => { + let def = match error { + method::MethodError::PrivateMatch(def) => Some(def), + _ => None, + }; + if item_name != keywords::Invalid.name() { + self.report_method_error(span, ty, item_name, None, error); + } + def + } + }; + + if let Some(def) = def { + // Write back the new resolution. + self.tcx().def_map.borrow_mut().insert(node_id, def::PathResolution { + base_def: def, + depth: 0, + }); + Some((Some(ty), slice::ref_slice(item_segment), def)) + } else { + self.write_error(node_id); + None + } } } -} -pub fn check_decl_initializer<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - local: &'tcx hir::Local, - init: &'tcx hir::Expr) -{ - let ref_bindings = fcx.tcx().pat_contains_ref_binding(&local.pat); - - let local_ty = fcx.local_ty(init.span, local.id); - if let Some(m) = ref_bindings { - // Somewhat subtle: if we have a `ref` binding in the pattern, - // we want to avoid introducing coercions for the RHS. This is - // both because it helps preserve sanity and, in the case of - // ref mut, for soundness (issue #23116). In particular, in - // the latter case, we need to be clear that the type of the - // referent for the reference that results is *equal to* the - // type of the lvalue it is referencing, and not some - // supertype thereof. - check_expr_with_lvalue_pref(fcx, init, LvaluePreference::from_mutbl(m)); - let init_ty = fcx.expr_ty(init); - demand::eqtype(fcx, init.span, init_ty, local_ty); - } else { - check_expr_coercable_to_type(fcx, init, local_ty) - }; -} + pub fn check_decl_initializer(&self, + local: &'gcx hir::Local, + init: &'gcx hir::Expr) + { + let ref_bindings = self.tcx.pat_contains_ref_binding(&local.pat); + + let local_ty = self.local_ty(init.span, local.id); + if let Some(m) = ref_bindings { + // Somewhat subtle: if we have a `ref` binding in the pattern, + // we want to avoid introducing coercions for the RHS. This is + // both because it helps preserve sanity and, in the case of + // ref mut, for soundness (issue #23116). In particular, in + // the latter case, we need to be clear that the type of the + // referent for the reference that results is *equal to* the + // type of the lvalue it is referencing, and not some + // supertype thereof. + self.check_expr_with_lvalue_pref(init, LvaluePreference::from_mutbl(m)); + let init_ty = self.expr_ty(init); + self.demand_eqtype(init.span, init_ty, local_ty); + } else { + self.check_expr_coercable_to_type(init, local_ty) + }; + } -pub fn check_decl_local<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, local: &'tcx hir::Local) { - let tcx = fcx.ccx.tcx; + pub fn check_decl_local(&self, local: &'gcx hir::Local) { + let tcx = self.tcx; - let t = fcx.local_ty(local.span, local.id); - fcx.write_ty(local.id, t); + let t = self.local_ty(local.span, local.id); + self.write_ty(local.id, t); - if let Some(ref init) = local.init { - check_decl_initializer(fcx, local, &init); - let init_ty = fcx.expr_ty(&init); - if init_ty.references_error() { - fcx.write_ty(local.id, init_ty); + if let Some(ref init) = local.init { + self.check_decl_initializer(local, &init); + let init_ty = self.expr_ty(&init); + if init_ty.references_error() { + self.write_ty(local.id, init_ty); + } } - } - - let pcx = pat_ctxt { - fcx: fcx, - map: pat_id_map(&tcx.def_map, &local.pat), - }; - _match::check_pat(&pcx, &local.pat, t); - let pat_ty = fcx.node_ty(local.pat.id); - if pat_ty.references_error() { - fcx.write_ty(local.id, pat_ty); - } -} -pub fn check_stmt<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, stmt: &'tcx hir::Stmt) { - let node_id; - let mut saw_bot = false; - let mut saw_err = false; - match stmt.node { - hir::StmtDecl(ref decl, id) => { - node_id = id; - match decl.node { - hir::DeclLocal(ref l) => { - check_decl_local(fcx, &l); - let l_t = fcx.node_ty(l.id); - saw_bot = saw_bot || fcx.infcx().type_var_diverges(l_t); - saw_err = saw_err || l_t.references_error(); + let pcx = PatCtxt { + fcx: self, + map: pat_id_map(&tcx.def_map, &local.pat), + }; + pcx.check_pat(&local.pat, t); + let pat_ty = self.node_ty(local.pat.id); + if pat_ty.references_error() { + self.write_ty(local.id, pat_ty); + } + } + + pub fn check_stmt(&self, stmt: &'gcx hir::Stmt) { + let node_id; + let mut saw_bot = false; + let mut saw_err = false; + match stmt.node { + hir::StmtDecl(ref decl, id) => { + node_id = id; + match decl.node { + hir::DeclLocal(ref l) => { + self.check_decl_local(&l); + let l_t = self.node_ty(l.id); + saw_bot = saw_bot || self.type_var_diverges(l_t); + saw_err = saw_err || l_t.references_error(); + } + hir::DeclItem(_) => {/* ignore for now */ } + } + } + hir::StmtExpr(ref expr, id) => { + node_id = id; + // Check with expected type of () + self.check_expr_has_type(&expr, self.tcx.mk_nil()); + let expr_ty = self.expr_ty(&expr); + saw_bot = saw_bot || self.type_var_diverges(expr_ty); + saw_err = saw_err || expr_ty.references_error(); + } + hir::StmtSemi(ref expr, id) => { + node_id = id; + self.check_expr(&expr); + let expr_ty = self.expr_ty(&expr); + saw_bot |= self.type_var_diverges(expr_ty); + saw_err |= expr_ty.references_error(); } - hir::DeclItem(_) => {/* ignore for now */ } } - } - hir::StmtExpr(ref expr, id) => { - node_id = id; - // Check with expected type of () - check_expr_has_type(fcx, &expr, fcx.tcx().mk_nil()); - let expr_ty = fcx.expr_ty(&expr); - saw_bot = saw_bot || fcx.infcx().type_var_diverges(expr_ty); - saw_err = saw_err || expr_ty.references_error(); - } - hir::StmtSemi(ref expr, id) => { - node_id = id; - check_expr(fcx, &expr); - let expr_ty = fcx.expr_ty(&expr); - saw_bot |= fcx.infcx().type_var_diverges(expr_ty); - saw_err |= expr_ty.references_error(); - } - } - if saw_bot { - fcx.write_ty(node_id, fcx.infcx().next_diverging_ty_var()); - } - else if saw_err { - fcx.write_error(node_id); - } - else { - fcx.write_nil(node_id) + if saw_bot { + self.write_ty(node_id, self.next_diverging_ty_var()); + } + else if saw_err { + self.write_error(node_id); + } + else { + self.write_nil(node_id) + } } -} -pub fn check_block_no_value<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, blk: &'tcx hir::Block) { - check_block_with_expected(fcx, blk, ExpectHasType(fcx.tcx().mk_nil())); - let blkty = fcx.node_ty(blk.id); - if blkty.references_error() { - fcx.write_error(blk.id); - } else { - let nilty = fcx.tcx().mk_nil(); - demand::suptype(fcx, blk.span, nilty, blkty); + pub fn check_block_no_value(&self, blk: &'gcx hir::Block) { + self.check_block_with_expected(blk, ExpectHasType(self.tcx.mk_nil())); + let blkty = self.node_ty(blk.id); + if blkty.references_error() { + self.write_error(blk.id); + } else { + let nilty = self.tcx.mk_nil(); + self.demand_suptype(blk.span, nilty, blkty); + } } -} -fn check_block_with_expected<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - blk: &'tcx hir::Block, - expected: Expectation<'tcx>) { - let prev = { - let mut fcx_ps = fcx.ps.borrow_mut(); - let unsafety_state = fcx_ps.recurse(blk); - replace(&mut *fcx_ps, unsafety_state) - }; + fn check_block_with_expected(&self, + blk: &'gcx hir::Block, + expected: Expectation<'tcx>) { + let prev = { + let mut fcx_ps = self.ps.borrow_mut(); + let unsafety_state = fcx_ps.recurse(blk); + replace(&mut *fcx_ps, unsafety_state) + }; - let mut warned = false; - let mut any_diverges = false; - let mut any_err = false; - for s in &blk.stmts { - check_stmt(fcx, s); - let s_id = s.node.id(); - let s_ty = fcx.node_ty(s_id); - if any_diverges && !warned && match s.node { - hir::StmtDecl(ref decl, _) => { - match decl.node { - hir::DeclLocal(_) => true, - _ => false, + let mut warned = false; + let mut any_diverges = false; + let mut any_err = false; + for s in &blk.stmts { + self.check_stmt(s); + let s_id = s.node.id(); + let s_ty = self.node_ty(s_id); + if any_diverges && !warned && match s.node { + hir::StmtDecl(ref decl, _) => { + match decl.node { + hir::DeclLocal(_) => true, + _ => false, + } } - } - hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true, - } { - fcx.ccx - .tcx - .sess - .add_lint(lint::builtin::UNREACHABLE_CODE, - s_id, - s.span, - "unreachable statement".to_string()); - warned = true; - } - any_diverges = any_diverges || fcx.infcx().type_var_diverges(s_ty); - any_err = any_err || s_ty.references_error(); - } - match blk.expr { - None => if any_err { - fcx.write_error(blk.id); - } else if any_diverges { - fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var()); - } else { - fcx.write_nil(blk.id); - }, - Some(ref e) => { - if any_diverges && !warned { - fcx.ccx - .tcx + hir::StmtExpr(_, _) | hir::StmtSemi(_, _) => true, + } { + self.tcx .sess .add_lint(lint::builtin::UNREACHABLE_CODE, - e.id, - e.span, - "unreachable expression".to_string()); + s_id, + s.span, + "unreachable statement".to_string()); + warned = true; } - let ety = match expected { - ExpectHasType(ety) => { - check_expr_coercable_to_type(fcx, &e, ety); - ety - } - _ => { - check_expr_with_expectation(fcx, &e, expected); - fcx.expr_ty(&e) - } - }; - - if any_err { - fcx.write_error(blk.id); + any_diverges = any_diverges || self.type_var_diverges(s_ty); + any_err = any_err || s_ty.references_error(); + } + match blk.expr { + None => if any_err { + self.write_error(blk.id); } else if any_diverges { - fcx.write_ty(blk.id, fcx.infcx().next_diverging_ty_var()); + self.write_ty(blk.id, self.next_diverging_ty_var()); } else { - fcx.write_ty(blk.id, ety); - } - } - }; - - *fcx.ps.borrow_mut() = prev; -} + self.write_nil(blk.id); + }, + Some(ref e) => { + if any_diverges && !warned { + self.tcx + .sess + .add_lint(lint::builtin::UNREACHABLE_CODE, + e.id, + e.span, + "unreachable expression".to_string()); + } + let ety = match expected { + ExpectHasType(ety) => { + self.check_expr_coercable_to_type(&e, ety); + ety + } + _ => { + self.check_expr_with_expectation(&e, expected); + self.expr_ty(&e) + } + }; -/// Checks a constant appearing in a type. At the moment this is just the -/// length expression in a fixed-length vector, but someday it might be -/// extended to type-level numeric literals. -fn check_const_in_type<'a,'tcx>(ccx: &'a CrateCtxt<'a,'tcx>, - expr: &'tcx hir::Expr, - expected_type: Ty<'tcx>) { - let tables = RefCell::new(ty::Tables::empty()); - let inh = static_inherited_fields(ccx, &tables); - let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(expected_type), expr.id); - check_const_with_ty(&fcx, expr.span, expr, expected_type); -} + if any_err { + self.write_error(blk.id); + } else if any_diverges { + self.write_ty(blk.id, self.next_diverging_ty_var()); + } else { + self.write_ty(blk.id, ety); + } + } + }; -fn check_const<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, - sp: Span, - e: &'tcx hir::Expr, - id: ast::NodeId) { - let tables = RefCell::new(ty::Tables::empty()); - let inh = static_inherited_fields(ccx, &tables); - let rty = ccx.tcx.node_id_to_type(id); - let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), e.id); - let declty = fcx.ccx.tcx.lookup_item_type(ccx.tcx.map.local_def_id(id)).ty; - check_const_with_ty(&fcx, sp, e, declty); -} + *self.ps.borrow_mut() = prev; + } -fn check_const_with_ty<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - _: Span, - e: &'tcx hir::Expr, - declty: Ty<'tcx>) { - // Gather locals in statics (because of block expressions). - // This is technically unnecessary because locals in static items are forbidden, - // but prevents type checking from blowing up before const checking can properly - // emit an error. - GatherLocalsVisitor { fcx: fcx }.visit_expr(e); - - check_expr_with_hint(fcx, e, declty); - demand::coerce(fcx, e.span, declty, e); - - fcx.select_all_obligations_and_apply_defaults(); - upvar::closure_analyze_const(&fcx, e); - fcx.select_obligations_where_possible(); - fcx.check_casts(); - fcx.select_all_obligations_or_error(); - - regionck::regionck_expr(fcx, e); - writeback::resolve_type_vars_in_expr(fcx, e); -} -/// Checks whether a type can be represented in memory. In particular, it -/// identifies types that contain themselves without indirection through a -/// pointer, which would mean their size is unbounded. -pub fn check_representable(tcx: &TyCtxt, - sp: Span, - item_id: ast::NodeId, - _designation: &str) -> bool { - let rty = tcx.node_id_to_type(item_id); + fn check_const_with_ty(&self, + _: Span, + e: &'gcx hir::Expr, + declty: Ty<'tcx>) { + // Gather locals in statics (because of block expressions). + // This is technically unnecessary because locals in static items are forbidden, + // but prevents type checking from blowing up before const checking can properly + // emit an error. + GatherLocalsVisitor { fcx: self }.visit_expr(e); - // Check that it is possible to represent this type. This call identifies - // (1) types that contain themselves and (2) types that contain a different - // recursive type. It is only necessary to throw an error on those that - // contain themselves. For case 2, there must be an inner type that will be - // caught by case 1. - match rty.is_representable(tcx, sp) { - Representability::SelfRecursive => { - let item_def_id = tcx.map.local_def_id(item_id); - traits::recursive_type_with_infinite_size_error(tcx, item_def_id).emit(); - return false - } - Representability::Representable | Representability::ContainsRecursive => (), - } - return true -} + self.check_expr_coercable_to_type(e, declty); -pub fn check_simd(tcx: &TyCtxt, sp: Span, id: ast::NodeId) { - let t = tcx.node_id_to_type(id); - match t.sty { - ty::TyStruct(def, substs) => { - let fields = &def.struct_variant().fields; - if fields.is_empty() { - span_err!(tcx.sess, sp, E0075, "SIMD vector cannot be empty"); - return; + self.select_all_obligations_and_apply_defaults(); + self.closure_analyze_const(e); + self.select_obligations_where_possible(); + self.check_casts(); + self.select_all_obligations_or_error(); + + self.regionck_expr(e); + self.resolve_type_vars_in_expr(e); + } + + // Returns the type parameter count and the type for the given definition. + fn type_scheme_and_predicates_for_def(&self, + sp: Span, + defn: Def) + -> (TypeScheme<'tcx>, GenericPredicates<'tcx>) { + match defn { + Def::Local(_, nid) | Def::Upvar(_, nid, _, _) => { + let typ = self.local_ty(sp, nid); + (ty::TypeScheme { generics: ty::Generics::empty(), ty: typ }, + ty::GenericPredicates::empty()) } - let e = fields[0].ty(tcx, substs); - if !fields.iter().all(|f| f.ty(tcx, substs) == e) { - span_err!(tcx.sess, sp, E0076, "SIMD vector should be homogeneous"); - return; + Def::Fn(id) | Def::Method(id) | + Def::Static(id, _) | Def::Variant(_, id) | + Def::Struct(id) | Def::Const(id) | Def::AssociatedConst(id) => { + (self.tcx.lookup_item_type(id), self.tcx.lookup_predicates(id)) } - match e.sty { - ty::TyParam(_) => { /* struct(T, T, T, T) is ok */ } - _ if e.is_machine() => { /* struct(u8, u8, u8, u8) is ok */ } - _ => { - span_err!(tcx.sess, sp, E0077, - "SIMD vector element type should be machine type"); - return; - } + Def::Trait(_) | + Def::Enum(..) | + Def::TyAlias(..) | + Def::AssociatedTy(..) | + Def::PrimTy(_) | + Def::TyParam(..) | + Def::Mod(..) | + Def::ForeignMod(..) | + Def::Label(..) | + Def::SelfTy(..) | + Def::Err => { + span_bug!(sp, "expected value, found {:?}", defn); } } - _ => () } -} -pub fn check_enum_variants<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, - sp: Span, - vs: &'tcx [hir::Variant], - id: ast::NodeId) { - fn do_check<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, - vs: &'tcx [hir::Variant], - id: ast::NodeId, - hint: attr::ReprAttr) { - #![allow(trivial_numeric_casts)] - - let rty = ccx.tcx.node_id_to_type(id); - let mut disr_vals: Vec = Vec::new(); - - let tables = RefCell::new(ty::Tables::empty()); - let inh = static_inherited_fields(ccx, &tables); - let fcx = blank_fn_ctxt(ccx, &inh, ty::FnConverging(rty), id); - - let repr_type_ty = ccx.tcx.enum_repr_type(Some(&hint)).to_ty(&ccx.tcx); - for v in vs { - if let Some(ref e) = v.node.disr_expr { - check_const_with_ty(&fcx, e.span, e, repr_type_ty); + // Instantiates the given path, which must refer to an item with the given + // number of type parameters and type. + pub fn instantiate_path(&self, + segments: &[hir::PathSegment], + type_scheme: TypeScheme<'tcx>, + type_predicates: &ty::GenericPredicates<'tcx>, + opt_self_ty: Option>, + def: Def, + span: Span, + node_id: ast::NodeId) { + debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})", + segments, + def, + node_id, + type_scheme); + + // We need to extract the type parameters supplied by the user in + // the path `path`. Due to the current setup, this is a bit of a + // tricky-process; the problem is that resolve only tells us the + // end-point of the path resolution, and not the intermediate steps. + // Luckily, we can (at least for now) deduce the intermediate steps + // just from the end-point. + // + // There are basically four cases to consider: + // + // 1. Reference to a *type*, such as a struct or enum: + // + // mod a { struct Foo { ... } } + // + // Because we don't allow types to be declared within one + // another, a path that leads to a type will always look like + // `a::b::Foo` where `a` and `b` are modules. This implies + // that only the final segment can have type parameters, and + // they are located in the TypeSpace. + // + // *Note:* Generally speaking, references to types don't + // actually pass through this function, but rather the + // `ast_ty_to_ty` function in `astconv`. However, in the case + // of struct patterns (and maybe literals) we do invoke + // `instantiate_path` to get the general type of an instance of + // a struct. (In these cases, there are actually no type + // parameters permitted at present, but perhaps we will allow + // them in the future.) + // + // 1b. Reference to an enum variant or tuple-like struct: + // + // struct foo(...) + // enum E { foo(...) } + // + // In these cases, the parameters are declared in the type + // space. + // + // 2. Reference to a *fn item*: + // + // fn foo() { } + // + // In this case, the path will again always have the form + // `a::b::foo::` where only the final segment should have + // type parameters. However, in this case, those parameters are + // declared on a value, and hence are in the `FnSpace`. + // + // 3. Reference to a *method*: + // + // impl SomeStruct { + // fn foo(...) + // } + // + // Here we can have a path like + // `a::b::SomeStruct::::foo::`, in which case parameters + // may appear in two places. The penultimate segment, + // `SomeStruct::`, contains parameters in TypeSpace, and the + // final segment, `foo::` contains parameters in fn space. + // + // 4. Reference to an *associated const*: + // + // impl AnotherStruct { + // const FOO: B = BAR; + // } + // + // The path in this case will look like + // `a::b::AnotherStruct::::FOO`, so the penultimate segment + // only will have parameters in TypeSpace. + // + // The first step then is to categorize the segments appropriately. + + assert!(!segments.is_empty()); + + let mut ufcs_associated = None; + let mut segment_spaces: Vec<_>; + match def { + // Case 1 and 1b. Reference to a *type* or *enum variant*. + Def::SelfTy(..) | + Def::Struct(..) | + Def::Variant(..) | + Def::Enum(..) | + Def::TyAlias(..) | + Def::AssociatedTy(..) | + Def::Trait(..) | + Def::PrimTy(..) | + Def::TyParam(..) => { + // Everything but the final segment should have no + // parameters at all. + segment_spaces = vec![None; segments.len() - 1]; + segment_spaces.push(Some(subst::TypeSpace)); } - } - let def_id = ccx.tcx.map.local_def_id(id); + // Case 2. Reference to a top-level value. + Def::Fn(..) | + Def::Const(..) | + Def::Static(..) => { + segment_spaces = vec![None; segments.len() - 1]; + segment_spaces.push(Some(subst::FnSpace)); + } - let variants = &ccx.tcx.lookup_adt_def(def_id).variants; - for (v, variant) in vs.iter().zip(variants.iter()) { - let current_disr_val = variant.disr_val; + // Case 3. Reference to a method. + Def::Method(def_id) => { + let container = self.tcx.impl_or_trait_item(def_id).container(); + match container { + ty::TraitContainer(trait_did) => { + callee::check_legal_trait_for_method_call(self.ccx, span, trait_did) + } + ty::ImplContainer(_) => {} + } - // Check for duplicate discriminant values - match disr_vals.iter().position(|&x| x == current_disr_val) { - Some(i) => { - let mut err = struct_span_err!(ccx.tcx.sess, v.span, E0081, - "discriminant value `{}` already exists", disr_vals[i]); - let variant_i_node_id = ccx.tcx.map.as_local_node_id(variants[i].did).unwrap(); - span_note!(&mut err, ccx.tcx.map.span(variant_i_node_id), - "conflicting discriminant here"); - err.emit(); + if segments.len() >= 2 { + segment_spaces = vec![None; segments.len() - 2]; + segment_spaces.push(Some(subst::TypeSpace)); + segment_spaces.push(Some(subst::FnSpace)); + } else { + // `::method` will end up here, and so can `T::method`. + let self_ty = opt_self_ty.expect("UFCS sugared method missing Self"); + segment_spaces = vec![Some(subst::FnSpace)]; + ufcs_associated = Some((container, self_ty)); } - None => {} } - disr_vals.push(current_disr_val); - } - } - - let def_id = ccx.tcx.map.local_def_id(id); - let hint = *ccx.tcx.lookup_repr_hints(def_id).get(0).unwrap_or(&attr::ReprAny); - - if hint != attr::ReprAny && vs.is_empty() { - span_err!(ccx.tcx.sess, sp, E0084, - "unsupported representation for zero-variant enum"); - } - - do_check(ccx, vs, id, hint); - check_representable(ccx.tcx, sp, id, "enum"); -} - -// Returns the type parameter count and the type for the given definition. -fn type_scheme_and_predicates_for_def<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - sp: Span, - defn: Def) - -> (TypeScheme<'tcx>, GenericPredicates<'tcx>) { - match defn { - Def::Local(_, nid) | Def::Upvar(_, nid, _, _) => { - let typ = fcx.local_ty(sp, nid); - (ty::TypeScheme { generics: ty::Generics::empty(), ty: typ }, - ty::GenericPredicates::empty()) - } - Def::Fn(id) | Def::Method(id) | - Def::Static(id, _) | Def::Variant(_, id) | - Def::Struct(id) | Def::Const(id) | Def::AssociatedConst(id) => { - (fcx.tcx().lookup_item_type(id), fcx.tcx().lookup_predicates(id)) - } - Def::Trait(_) | - Def::Enum(..) | - Def::TyAlias(..) | - Def::AssociatedTy(..) | - Def::PrimTy(_) | - Def::TyParam(..) | - Def::Mod(..) | - Def::ForeignMod(..) | - Def::Label(..) | - Def::SelfTy(..) | - Def::Err => { - span_bug!(sp, "expected value, found {:?}", defn); - } - } -} - -// Instantiates the given path, which must refer to an item with the given -// number of type parameters and type. -pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - segments: &[hir::PathSegment], - type_scheme: TypeScheme<'tcx>, - type_predicates: &ty::GenericPredicates<'tcx>, - opt_self_ty: Option>, - def: Def, - span: Span, - node_id: ast::NodeId) { - debug!("instantiate_path(path={:?}, def={:?}, node_id={}, type_scheme={:?})", - segments, - def, - node_id, - type_scheme); - - // We need to extract the type parameters supplied by the user in - // the path `path`. Due to the current setup, this is a bit of a - // tricky-process; the problem is that resolve only tells us the - // end-point of the path resolution, and not the intermediate steps. - // Luckily, we can (at least for now) deduce the intermediate steps - // just from the end-point. - // - // There are basically four cases to consider: - // - // 1. Reference to a *type*, such as a struct or enum: - // - // mod a { struct Foo { ... } } - // - // Because we don't allow types to be declared within one - // another, a path that leads to a type will always look like - // `a::b::Foo` where `a` and `b` are modules. This implies - // that only the final segment can have type parameters, and - // they are located in the TypeSpace. - // - // *Note:* Generally speaking, references to types don't - // actually pass through this function, but rather the - // `ast_ty_to_ty` function in `astconv`. However, in the case - // of struct patterns (and maybe literals) we do invoke - // `instantiate_path` to get the general type of an instance of - // a struct. (In these cases, there are actually no type - // parameters permitted at present, but perhaps we will allow - // them in the future.) - // - // 1b. Reference to an enum variant or tuple-like struct: - // - // struct foo(...) - // enum E { foo(...) } - // - // In these cases, the parameters are declared in the type - // space. - // - // 2. Reference to a *fn item*: - // - // fn foo() { } - // - // In this case, the path will again always have the form - // `a::b::foo::` where only the final segment should have - // type parameters. However, in this case, those parameters are - // declared on a value, and hence are in the `FnSpace`. - // - // 3. Reference to a *method*: - // - // impl SomeStruct { - // fn foo(...) - // } - // - // Here we can have a path like - // `a::b::SomeStruct::::foo::`, in which case parameters - // may appear in two places. The penultimate segment, - // `SomeStruct::`, contains parameters in TypeSpace, and the - // final segment, `foo::` contains parameters in fn space. - // - // 4. Reference to an *associated const*: - // - // impl AnotherStruct { - // const FOO: B = BAR; - // } - // - // The path in this case will look like - // `a::b::AnotherStruct::::FOO`, so the penultimate segment - // only will have parameters in TypeSpace. - // - // The first step then is to categorize the segments appropriately. - - assert!(!segments.is_empty()); - - let mut ufcs_associated = None; - let mut segment_spaces: Vec<_>; - match def { - // Case 1 and 1b. Reference to a *type* or *enum variant*. - Def::SelfTy(..) | - Def::Struct(..) | - Def::Variant(..) | - Def::Enum(..) | - Def::TyAlias(..) | - Def::AssociatedTy(..) | - Def::Trait(..) | - Def::PrimTy(..) | - Def::TyParam(..) => { - // Everything but the final segment should have no - // parameters at all. - segment_spaces = vec![None; segments.len() - 1]; - segment_spaces.push(Some(subst::TypeSpace)); - } - - // Case 2. Reference to a top-level value. - Def::Fn(..) | - Def::Const(..) | - Def::Static(..) => { - segment_spaces = vec![None; segments.len() - 1]; - segment_spaces.push(Some(subst::FnSpace)); - } + Def::AssociatedConst(def_id) => { + let container = self.tcx.impl_or_trait_item(def_id).container(); + match container { + ty::TraitContainer(trait_did) => { + callee::check_legal_trait_for_method_call(self.ccx, span, trait_did) + } + ty::ImplContainer(_) => {} + } - // Case 3. Reference to a method. - Def::Method(def_id) => { - let container = fcx.tcx().impl_or_trait_item(def_id).container(); - match container { - ty::TraitContainer(trait_did) => { - callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did) + if segments.len() >= 2 { + segment_spaces = vec![None; segments.len() - 2]; + segment_spaces.push(Some(subst::TypeSpace)); + segment_spaces.push(None); + } else { + // `::CONST` will end up here, and so can `T::CONST`. + let self_ty = opt_self_ty.expect("UFCS sugared const missing Self"); + segment_spaces = vec![None]; + ufcs_associated = Some((container, self_ty)); } - ty::ImplContainer(_) => {} } - if segments.len() >= 2 { - segment_spaces = vec![None; segments.len() - 2]; - segment_spaces.push(Some(subst::TypeSpace)); - segment_spaces.push(Some(subst::FnSpace)); - } else { - // `::method` will end up here, and so can `T::method`. - let self_ty = opt_self_ty.expect("UFCS sugared method missing Self"); - segment_spaces = vec![Some(subst::FnSpace)]; - ufcs_associated = Some((container, self_ty)); + // Other cases. Various nonsense that really shouldn't show up + // here. If they do, an error will have been reported + // elsewhere. (I hope) + Def::Mod(..) | + Def::ForeignMod(..) | + Def::Local(..) | + Def::Label(..) | + Def::Upvar(..) => { + segment_spaces = vec![None; segments.len()]; } - } - Def::AssociatedConst(def_id) => { - let container = fcx.tcx().impl_or_trait_item(def_id).container(); - match container { - ty::TraitContainer(trait_did) => { - callee::check_legal_trait_for_method_call(fcx.ccx, span, trait_did) - } - ty::ImplContainer(_) => {} + Def::Err => { + self.set_tainted_by_errors(); + segment_spaces = vec![None; segments.len()]; } - - if segments.len() >= 2 { - segment_spaces = vec![None; segments.len() - 2]; - segment_spaces.push(Some(subst::TypeSpace)); - segment_spaces.push(None); + } + assert_eq!(segment_spaces.len(), segments.len()); + + // In `>::method`, `A` and `B` are mandatory, but + // `opt_self_ty` can also be Some for `Foo::method`, where Foo's + // type parameters are not mandatory. + let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none(); + + debug!("segment_spaces={:?}", segment_spaces); + + // Next, examine the definition, and determine how many type + // parameters we expect from each space. + let type_defs = &type_scheme.generics.types; + let region_defs = &type_scheme.generics.regions; + + // Now that we have categorized what space the parameters for each + // segment belong to, let's sort out the parameters that the user + // provided (if any) into their appropriate spaces. We'll also report + // errors if type parameters are provided in an inappropriate place. + let mut substs = Substs::empty(); + for (&opt_space, segment) in segment_spaces.iter().zip(segments) { + if let Some(space) = opt_space { + self.push_explicit_parameters_from_segment_to_substs(space, + span, + type_defs, + region_defs, + segment, + &mut substs); } else { - // `::CONST` will end up here, and so can `T::CONST`. - let self_ty = opt_self_ty.expect("UFCS sugared const missing Self"); - segment_spaces = vec![None]; - ufcs_associated = Some((container, self_ty)); + self.tcx.prohibit_type_params(slice::ref_slice(segment)); } } - - // Other cases. Various nonsense that really shouldn't show up - // here. If they do, an error will have been reported - // elsewhere. (I hope) - Def::Mod(..) | - Def::ForeignMod(..) | - Def::Local(..) | - Def::Label(..) | - Def::Upvar(..) | - Def::Err => { - segment_spaces = vec![None; segments.len()]; + if let Some(self_ty) = opt_self_ty { + if type_defs.len(subst::SelfSpace) == 1 { + substs.types.push(subst::SelfSpace, self_ty); + } } - } - assert_eq!(segment_spaces.len(), segments.len()); - // In `>::method`, `A` and `B` are mandatory, but - // `opt_self_ty` can also be Some for `Foo::method`, where Foo's - // type parameters are not mandatory. - let require_type_space = opt_self_ty.is_some() && ufcs_associated.is_none(); + // Now we have to compare the types that the user *actually* + // provided against the types that were *expected*. If the user + // did not provide any types, then we want to substitute inference + // variables. If the user provided some types, we may still need + // to add defaults. If the user provided *too many* types, that's + // a problem. + for &space in &[subst::SelfSpace, subst::TypeSpace, subst::FnSpace] { + self.adjust_type_parameters(span, space, type_defs, + require_type_space, &mut substs); + assert_eq!(substs.types.len(space), type_defs.len(space)); - debug!("segment_spaces={:?}", segment_spaces); + self.adjust_region_parameters(span, space, region_defs, &mut substs); + assert_eq!(substs.regions.len(space), region_defs.len(space)); + } - // Next, examine the definition, and determine how many type - // parameters we expect from each space. - let type_defs = &type_scheme.generics.types; - let region_defs = &type_scheme.generics.regions; + // The things we are substituting into the type should not contain + // escaping late-bound regions, and nor should the base type scheme. + let substs = self.tcx.mk_substs(substs); + assert!(!substs.has_regions_escaping_depth(0)); + assert!(!type_scheme.has_escaping_regions()); - // Now that we have categorized what space the parameters for each - // segment belong to, let's sort out the parameters that the user - // provided (if any) into their appropriate spaces. We'll also report - // errors if type parameters are provided in an inappropriate place. - let mut substs = Substs::empty(); - for (opt_space, segment) in segment_spaces.iter().zip(segments) { - match *opt_space { - None => { - prohibit_type_params(fcx.tcx(), slice::ref_slice(segment)); - } + // Add all the obligations that are required, substituting and + // normalized appropriately. + let bounds = self.instantiate_bounds(span, &substs, &type_predicates); + self.add_obligations_for_parameters( + traits::ObligationCause::new(span, self.body_id, traits::ItemObligation(def.def_id())), + &bounds); - Some(space) => { - push_explicit_parameters_from_segment_to_substs(fcx, - space, - span, - type_defs, - region_defs, - segment, - &mut substs); + // Substitute the values for the type parameters into the type of + // the referenced item. + let ty_substituted = self.instantiate_type_scheme(span, &substs, &type_scheme.ty); + + + if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated { + // In the case of `Foo::method` and `>::method`, if `method` + // is inherent, there is no `Self` parameter, instead, the impl needs + // type parameters, which we can infer by unifying the provided `Self` + // with the substituted impl type. + let impl_scheme = self.tcx.lookup_item_type(impl_def_id); + assert_eq!(substs.types.len(subst::TypeSpace), + impl_scheme.generics.types.len(subst::TypeSpace)); + assert_eq!(substs.regions.len(subst::TypeSpace), + impl_scheme.generics.regions.len(subst::TypeSpace)); + + let impl_ty = self.instantiate_type_scheme(span, &substs, &impl_scheme.ty); + match self.sub_types(false, TypeOrigin::Misc(span), self_ty, impl_ty) { + Ok(InferOk { obligations, .. }) => { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + } + Err(_) => { + span_bug!(span, + "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?", + self_ty, + impl_ty); + } } } - } - if let Some(self_ty) = opt_self_ty { - if type_defs.len(subst::SelfSpace) == 1 { - substs.types.push(subst::SelfSpace, self_ty); - } - } - // Now we have to compare the types that the user *actually* - // provided against the types that were *expected*. If the user - // did not provide any types, then we want to substitute inference - // variables. If the user provided some types, we may still need - // to add defaults. If the user provided *too many* types, that's - // a problem. - for &space in &[subst::SelfSpace, subst::TypeSpace, subst::FnSpace] { - adjust_type_parameters(fcx, span, space, type_defs, - require_type_space, &mut substs); - assert_eq!(substs.types.len(space), type_defs.len(space)); - - adjust_region_parameters(fcx, span, space, region_defs, &mut substs); - assert_eq!(substs.regions.len(space), region_defs.len(space)); - } - - // The things we are substituting into the type should not contain - // escaping late-bound regions, and nor should the base type scheme. - assert!(!substs.has_regions_escaping_depth(0)); - assert!(!type_scheme.has_escaping_regions()); - - // Add all the obligations that are required, substituting and - // normalized appropriately. - let bounds = fcx.instantiate_bounds(span, &substs, &type_predicates); - fcx.add_obligations_for_parameters( - traits::ObligationCause::new(span, fcx.body_id, traits::ItemObligation(def.def_id())), - &bounds); - - // Substitute the values for the type parameters into the type of - // the referenced item. - let ty_substituted = fcx.instantiate_type_scheme(span, &substs, &type_scheme.ty); - - - if let Some((ty::ImplContainer(impl_def_id), self_ty)) = ufcs_associated { - // In the case of `Foo::method` and `>::method`, if `method` - // is inherent, there is no `Self` parameter, instead, the impl needs - // type parameters, which we can infer by unifying the provided `Self` - // with the substituted impl type. - let impl_scheme = fcx.tcx().lookup_item_type(impl_def_id); - assert_eq!(substs.types.len(subst::TypeSpace), - impl_scheme.generics.types.len(subst::TypeSpace)); - assert_eq!(substs.regions.len(subst::TypeSpace), - impl_scheme.generics.regions.len(subst::TypeSpace)); - - let impl_ty = fcx.instantiate_type_scheme(span, &substs, &impl_scheme.ty); - if fcx.mk_subty(false, TypeOrigin::Misc(span), self_ty, impl_ty).is_err() { - span_bug!(span, - "instantiate_path: (UFCS) {:?} was a subtype of {:?} but now is not?", - self_ty, - impl_ty); - } + debug!("instantiate_path: type of {:?} is {:?}", + node_id, + ty_substituted); + self.write_ty(node_id, ty_substituted); + self.write_substs(node_id, ty::ItemSubsts { + substs: substs + }); } - debug!("instantiate_path: type of {:?} is {:?}", - node_id, - ty_substituted); - fcx.write_ty(node_id, ty_substituted); - fcx.write_substs(node_id, ty::ItemSubsts { substs: substs }); - return; - /// Finds the parameters that the user provided and adds them to `substs`. If too many /// parameters are provided, then reports an error and clears the output vector. /// @@ -4523,8 +4432,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, /// Note that we *do not* check for *too few* parameters here. Due to the presence of defaults /// etc that is more complicated. I wanted however to do the reporting of *too many* parameters /// here because we can easily use the precise span of the N+1'th parameter. - fn push_explicit_parameters_from_segment_to_substs<'a, 'tcx>( - fcx: &FnCtxt<'a, 'tcx>, + fn push_explicit_parameters_from_segment_to_substs(&self, space: subst::ParamSpace, span: Span, type_defs: &VecPerParamSpace>, @@ -4534,21 +4442,20 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, { match segment.parameters { hir::AngleBracketedParameters(ref data) => { - push_explicit_angle_bracketed_parameters_from_segment_to_substs( - fcx, space, type_defs, region_defs, data, substs); + self.push_explicit_angle_bracketed_parameters_from_segment_to_substs( + space, type_defs, region_defs, data, substs); } hir::ParenthesizedParameters(ref data) => { - span_err!(fcx.tcx().sess, span, E0238, + span_err!(self.tcx.sess, span, E0238, "parenthesized parameters may only be used with a trait"); - push_explicit_parenthesized_parameters_from_segment_to_substs( - fcx, space, span, type_defs, data, substs); + self.push_explicit_parenthesized_parameters_from_segment_to_substs( + space, span, type_defs, data, substs); } } } - fn push_explicit_angle_bracketed_parameters_from_segment_to_substs<'a, 'tcx>( - fcx: &FnCtxt<'a, 'tcx>, + fn push_explicit_angle_bracketed_parameters_from_segment_to_substs(&self, space: subst::ParamSpace, type_defs: &VecPerParamSpace>, region_defs: &VecPerParamSpace, @@ -4559,11 +4466,11 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, let type_count = type_defs.len(space); assert_eq!(substs.types.len(space), 0); for (i, typ) in data.types.iter().enumerate() { - let t = fcx.to_ty(&typ); + let t = self.to_ty(&typ); if i < type_count { substs.types.push(space, t); } else if i == type_count { - span_err!(fcx.tcx().sess, typ.span, E0087, + span_err!(self.tcx.sess, typ.span, E0087, "too many type parameters provided: \ expected at most {} parameter{}, \ found {} parameter{}", @@ -4578,7 +4485,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, } if !data.bindings.is_empty() { - span_err!(fcx.tcx().sess, data.bindings[0].span, E0182, + span_err!(self.tcx.sess, data.bindings[0].span, E0182, "unexpected binding of associated item in expression path \ (only allowed in type paths)"); } @@ -4587,11 +4494,11 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, let region_count = region_defs.len(space); assert_eq!(substs.regions.len(space), 0); for (i, lifetime) in data.lifetimes.iter().enumerate() { - let r = ast_region_to_region(fcx.tcx(), lifetime); + let r = ast_region_to_region(self.tcx, lifetime); if i < region_count { substs.regions.push(space, r); } else if i == region_count { - span_err!(fcx.tcx().sess, lifetime.span, E0088, + span_err!(self.tcx.sess, lifetime.span, E0088, "too many lifetime parameters provided: \ expected {} parameter{}, found {} parameter{}", region_count, @@ -4611,8 +4518,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, /// roughly the same thing as `Foo<(A,B),C>`. One important /// difference has to do with the treatment of anonymous /// regions, which are translated into bound regions (NYI). - fn push_explicit_parenthesized_parameters_from_segment_to_substs<'a, 'tcx>( - fcx: &FnCtxt<'a, 'tcx>, + fn push_explicit_parenthesized_parameters_from_segment_to_substs(&self, space: subst::ParamSpace, span: Span, type_defs: &VecPerParamSpace>, @@ -4621,34 +4527,33 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, { let type_count = type_defs.len(space); if type_count < 2 { - span_err!(fcx.tcx().sess, span, E0167, + span_err!(self.tcx.sess, span, E0167, "parenthesized form always supplies 2 type parameters, \ but only {} parameter(s) were expected", type_count); } let input_tys: Vec = - data.inputs.iter().map(|ty| fcx.to_ty(&ty)).collect(); + data.inputs.iter().map(|ty| self.to_ty(&ty)).collect(); - let tuple_ty = fcx.tcx().mk_tup(input_tys); + let tuple_ty = self.tcx.mk_tup(input_tys); if type_count >= 1 { substs.types.push(space, tuple_ty); } let output_ty: Option = - data.output.as_ref().map(|ty| fcx.to_ty(&ty)); + data.output.as_ref().map(|ty| self.to_ty(&ty)); let output_ty = - output_ty.unwrap_or(fcx.tcx().mk_nil()); + output_ty.unwrap_or(self.tcx.mk_nil()); if type_count >= 2 { substs.types.push(space, output_ty); } } - fn adjust_type_parameters<'a, 'tcx>( - fcx: &FnCtxt<'a, 'tcx>, + fn adjust_type_parameters(&self, span: Span, space: ParamSpace, defs: &VecPerParamSpace>, @@ -4677,7 +4582,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // everything. if provided_len == 0 && !(require_type_space && space == subst::TypeSpace) { substs.types.replace(space, Vec::new()); - fcx.infcx().type_vars_for_defs(span, space, substs, &desired[..]); + self.type_vars_for_defs(span, space, substs, &desired[..]); return; } @@ -4686,14 +4591,14 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, if provided_len < required_len { let qualifier = if desired.len() != required_len { "at least " } else { "" }; - span_err!(fcx.tcx().sess, span, E0089, + span_err!(self.tcx.sess, span, E0089, "too few type parameters provided: expected {}{} parameter{}, \ found {} parameter{}", qualifier, required_len, if required_len == 1 {""} else {"s"}, provided_len, if provided_len == 1 {""} else {"s"}); - substs.types.replace(space, vec![fcx.tcx().types.err; desired.len()]); + substs.types.replace(space, vec![self.tcx.types.err; desired.len()]); return; } @@ -4706,7 +4611,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // partial substitution that we have built up. for i in provided_len..desired.len() { let default = desired[i].default.unwrap(); - let default = default.subst_spanned(fcx.tcx(), substs, Some(span)); + let default = default.subst_spanned(self.tcx, substs, Some(span)); substs.types.push(space, default); } assert_eq!(substs.types.len(space), desired.len()); @@ -4714,8 +4619,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, debug!("Final substs: {:?}", substs); } - fn adjust_region_parameters( - fcx: &FnCtxt, + fn adjust_region_parameters(&self, span: Span, space: ParamSpace, defs: &VecPerParamSpace, @@ -4731,7 +4635,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, if provided_len == 0 { substs.regions.replace( space, - fcx.infcx().region_vars_for_defs(span, desired)); + self.region_vars_for_defs(span, desired)); return; } @@ -4742,7 +4646,7 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, // Otherwise, too few were provided. Report an error and then // use inference variables. - span_err!(fcx.tcx().sess, span, E0090, + span_err!(self.tcx.sess, span, E0090, "too few lifetime parameters provided: expected {} parameter{}, \ found {} parameter{}", desired.len(), @@ -4752,51 +4656,47 @@ pub fn instantiate_path<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, substs.regions.replace( space, - fcx.infcx().region_vars_for_defs(span, desired)); + self.region_vars_for_defs(span, desired)); } -} -fn structurally_resolve_type_or_else<'a, 'tcx, F>(fcx: &FnCtxt<'a, 'tcx>, - sp: Span, - ty: Ty<'tcx>, - f: F) -> Ty<'tcx> - where F: Fn() -> Ty<'tcx> -{ - let mut ty = fcx.resolve_type_vars_if_possible(ty); - - if ty.is_ty_var() { - let alternative = f(); - - // If not, error. - if alternative.is_ty_var() || alternative.references_error() { - fcx.type_error_message(sp, |_actual| { - "the type of this value must be known in this context".to_string() - }, ty, None); - demand::suptype(fcx, sp, fcx.tcx().types.err, ty); - ty = fcx.tcx().types.err; - } else { - demand::suptype(fcx, sp, alternative, ty); - ty = alternative; + fn structurally_resolve_type_or_else(&self, sp: Span, ty: Ty<'tcx>, f: F) + -> Ty<'tcx> + where F: Fn() -> Ty<'tcx> + { + let mut ty = self.resolve_type_vars_with_obligations(ty); + + if ty.is_ty_var() { + let alternative = f(); + + // If not, error. + if alternative.is_ty_var() || alternative.references_error() { + if !self.is_tainted_by_errors() { + self.type_error_message(sp, |_actual| { + "the type of this value must be known in this context".to_string() + }, ty, None); + } + self.demand_suptype(sp, self.tcx.types.err, ty); + ty = self.tcx.types.err; + } else { + self.demand_suptype(sp, alternative, ty); + ty = alternative; + } } - } - ty -} + ty + } -// Resolves `typ` by a single level if `typ` is a type variable. If no -// resolution is possible, then an error is reported. -pub fn structurally_resolved_type<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - sp: Span, - ty: Ty<'tcx>) - -> Ty<'tcx> -{ - structurally_resolve_type_or_else(fcx, sp, ty, || { - fcx.tcx().types.err - }) + // Resolves `typ` by a single level if `typ` is a type variable. If no + // resolution is possible, then an error is reported. + pub fn structurally_resolved_type(&self, sp: Span, ty: Ty<'tcx>) -> Ty<'tcx> { + self.structurally_resolve_type_or_else(sp, ty, || { + self.tcx.types.err + }) + } } // Returns true if b contains a break that can exit from b -pub fn may_break(cx: &TyCtxt, id: ast::NodeId, b: &hir::Block) -> bool { +pub fn may_break(tcx: TyCtxt, id: ast::NodeId, b: &hir::Block) -> bool { // First: is there an unlabeled break immediately // inside the loop? (loop_query(&b, |e| { @@ -4809,7 +4709,7 @@ pub fn may_break(cx: &TyCtxt, id: ast::NodeId, b: &hir::Block) -> bool { // nested anywhere inside the loop? (block_query(b, |e| { if let hir::ExprBreak(Some(_)) = e.node { - lookup_full_def(cx, e.span, e.id) == Def::Label(id) + lookup_full_def(tcx, e.span, e.id) == Def::Label(id) } else { false } diff --git a/src/librustc_typeck/check/op.rs b/src/librustc_typeck/check/op.rs index debf925875..8604dadf46 100644 --- a/src/librustc_typeck/check/op.rs +++ b/src/librustc_typeck/check/op.rs @@ -10,341 +10,340 @@ //! Code related to processing overloaded binary and unary operators. -use super::{ - check_expr, - check_expr_coercable_to_type, - check_expr_with_lvalue_pref, - demand, - method, - FnCtxt, -}; +use super::FnCtxt; use hir::def_id::DefId; use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue}; use syntax::ast; use syntax::parse::token; use rustc::hir; -/// Check a `a = b` -pub fn check_binop_assign<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - expr: &'tcx hir::Expr, - op: hir::BinOp, - lhs_expr: &'tcx hir::Expr, - rhs_expr: &'tcx hir::Expr) -{ - check_expr_with_lvalue_pref(fcx, lhs_expr, PreferMutLvalue); - - let lhs_ty = fcx.resolve_type_vars_if_possible(fcx.expr_ty(lhs_expr)); - let (rhs_ty, return_ty) = - check_overloaded_binop(fcx, expr, lhs_expr, lhs_ty, rhs_expr, op, IsAssign::Yes); - let rhs_ty = fcx.resolve_type_vars_if_possible(rhs_ty); - - if !lhs_ty.is_ty_var() && !rhs_ty.is_ty_var() && is_builtin_binop(lhs_ty, rhs_ty, op) { - enforce_builtin_binop_types(fcx, lhs_expr, lhs_ty, rhs_expr, rhs_ty, op); - fcx.write_nil(expr.id); - } else { - fcx.write_ty(expr.id, return_ty); - } +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + /// Check a `a = b` + pub fn check_binop_assign(&self, + expr: &'gcx hir::Expr, + op: hir::BinOp, + lhs_expr: &'gcx hir::Expr, + rhs_expr: &'gcx hir::Expr) + { + self.check_expr_with_lvalue_pref(lhs_expr, PreferMutLvalue); + + let lhs_ty = self.resolve_type_vars_with_obligations(self.expr_ty(lhs_expr)); + let (rhs_ty, return_ty) = + self.check_overloaded_binop(expr, lhs_expr, lhs_ty, rhs_expr, op, IsAssign::Yes); + let rhs_ty = self.resolve_type_vars_with_obligations(rhs_ty); + + if !lhs_ty.is_ty_var() && !rhs_ty.is_ty_var() && is_builtin_binop(lhs_ty, rhs_ty, op) { + self.enforce_builtin_binop_types(lhs_expr, lhs_ty, rhs_expr, rhs_ty, op); + self.write_nil(expr.id); + } else { + self.write_ty(expr.id, return_ty); + } - let tcx = fcx.tcx(); - if !tcx.expr_is_lval(lhs_expr) { - span_err!(tcx.sess, lhs_expr.span, E0067, "invalid left-hand side expression"); + let tcx = self.tcx; + if !tcx.expr_is_lval(lhs_expr) { + span_err!(tcx.sess, lhs_expr.span, E0067, "invalid left-hand side expression"); + } } -} - -/// Check a potentially overloaded binary operator. -pub fn check_binop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - op: hir::BinOp, - lhs_expr: &'tcx hir::Expr, - rhs_expr: &'tcx hir::Expr) -{ - let tcx = fcx.ccx.tcx; - debug!("check_binop(expr.id={}, expr={:?}, op={:?}, lhs_expr={:?}, rhs_expr={:?})", - expr.id, - expr, - op, - lhs_expr, - rhs_expr); - - check_expr(fcx, lhs_expr); - let lhs_ty = fcx.resolve_type_vars_if_possible(fcx.expr_ty(lhs_expr)); - - match BinOpCategory::from(op) { - BinOpCategory::Shortcircuit => { - // && and || are a simple case. - demand::suptype(fcx, lhs_expr.span, tcx.mk_bool(), lhs_ty); - check_expr_coercable_to_type(fcx, rhs_expr, tcx.mk_bool()); - fcx.write_ty(expr.id, tcx.mk_bool()); - } - _ => { - // Otherwise, we always treat operators as if they are - // overloaded. This is the way to be most flexible w/r/t - // types that get inferred. - let (rhs_ty, return_ty) = - check_overloaded_binop(fcx, expr, lhs_expr, lhs_ty, rhs_expr, op, IsAssign::No); - - // Supply type inference hints if relevant. Probably these - // hints should be enforced during select as part of the - // `consider_unification_despite_ambiguity` routine, but this - // more convenient for now. - // - // The basic idea is to help type inference by taking - // advantage of things we know about how the impls for - // scalar types are arranged. This is important in a - // scenario like `1_u32 << 2`, because it lets us quickly - // deduce that the result type should be `u32`, even - // though we don't know yet what type 2 has and hence - // can't pin this down to a specific impl. - let rhs_ty = fcx.resolve_type_vars_if_possible(rhs_ty); - if - !lhs_ty.is_ty_var() && !rhs_ty.is_ty_var() && - is_builtin_binop(lhs_ty, rhs_ty, op) - { - let builtin_return_ty = - enforce_builtin_binop_types(fcx, lhs_expr, lhs_ty, rhs_expr, rhs_ty, op); - demand::suptype(fcx, expr.span, builtin_return_ty, return_ty); + /// Check a potentially overloaded binary operator. + pub fn check_binop(&self, + expr: &'gcx hir::Expr, + op: hir::BinOp, + lhs_expr: &'gcx hir::Expr, + rhs_expr: &'gcx hir::Expr) + { + let tcx = self.tcx; + + debug!("check_binop(expr.id={}, expr={:?}, op={:?}, lhs_expr={:?}, rhs_expr={:?})", + expr.id, + expr, + op, + lhs_expr, + rhs_expr); + + self.check_expr(lhs_expr); + let lhs_ty = self.resolve_type_vars_with_obligations(self.expr_ty(lhs_expr)); + + match BinOpCategory::from(op) { + BinOpCategory::Shortcircuit => { + // && and || are a simple case. + self.demand_suptype(lhs_expr.span, tcx.mk_bool(), lhs_ty); + self.check_expr_coercable_to_type(rhs_expr, tcx.mk_bool()); + self.write_ty(expr.id, tcx.mk_bool()); } + _ => { + // Otherwise, we always treat operators as if they are + // overloaded. This is the way to be most flexible w/r/t + // types that get inferred. + let (rhs_ty, return_ty) = + self.check_overloaded_binop(expr, lhs_expr, lhs_ty, + rhs_expr, op, IsAssign::No); + + // Supply type inference hints if relevant. Probably these + // hints should be enforced during select as part of the + // `consider_unification_despite_ambiguity` routine, but this + // more convenient for now. + // + // The basic idea is to help type inference by taking + // advantage of things we know about how the impls for + // scalar types are arranged. This is important in a + // scenario like `1_u32 << 2`, because it lets us quickly + // deduce that the result type should be `u32`, even + // though we don't know yet what type 2 has and hence + // can't pin this down to a specific impl. + let rhs_ty = self.resolve_type_vars_with_obligations(rhs_ty); + if + !lhs_ty.is_ty_var() && !rhs_ty.is_ty_var() && + is_builtin_binop(lhs_ty, rhs_ty, op) + { + let builtin_return_ty = + self.enforce_builtin_binop_types(lhs_expr, lhs_ty, rhs_expr, rhs_ty, op); + self.demand_suptype(expr.span, builtin_return_ty, return_ty); + } - fcx.write_ty(expr.id, return_ty); + self.write_ty(expr.id, return_ty); + } } } -} -fn enforce_builtin_binop_types<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - lhs_expr: &'tcx hir::Expr, - lhs_ty: Ty<'tcx>, - rhs_expr: &'tcx hir::Expr, - rhs_ty: Ty<'tcx>, - op: hir::BinOp) - -> Ty<'tcx> -{ - debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, op)); - - let tcx = fcx.tcx(); - match BinOpCategory::from(op) { - BinOpCategory::Shortcircuit => { - demand::suptype(fcx, lhs_expr.span, tcx.mk_bool(), lhs_ty); - demand::suptype(fcx, rhs_expr.span, tcx.mk_bool(), rhs_ty); - tcx.mk_bool() - } + fn enforce_builtin_binop_types(&self, + lhs_expr: &'gcx hir::Expr, + lhs_ty: Ty<'tcx>, + rhs_expr: &'gcx hir::Expr, + rhs_ty: Ty<'tcx>, + op: hir::BinOp) + -> Ty<'tcx> + { + debug_assert!(is_builtin_binop(lhs_ty, rhs_ty, op)); + + let tcx = self.tcx; + match BinOpCategory::from(op) { + BinOpCategory::Shortcircuit => { + self.demand_suptype(lhs_expr.span, tcx.mk_bool(), lhs_ty); + self.demand_suptype(rhs_expr.span, tcx.mk_bool(), rhs_ty); + tcx.mk_bool() + } - BinOpCategory::Shift => { - // result type is same as LHS always - lhs_ty - } + BinOpCategory::Shift => { + // result type is same as LHS always + lhs_ty + } - BinOpCategory::Math | - BinOpCategory::Bitwise => { - // both LHS and RHS and result will have the same type - demand::suptype(fcx, rhs_expr.span, lhs_ty, rhs_ty); - lhs_ty - } + BinOpCategory::Math | + BinOpCategory::Bitwise => { + // both LHS and RHS and result will have the same type + self.demand_suptype(rhs_expr.span, lhs_ty, rhs_ty); + lhs_ty + } - BinOpCategory::Comparison => { - // both LHS and RHS and result will have the same type - demand::suptype(fcx, rhs_expr.span, lhs_ty, rhs_ty); - tcx.mk_bool() + BinOpCategory::Comparison => { + // both LHS and RHS and result will have the same type + self.demand_suptype(rhs_expr.span, lhs_ty, rhs_ty); + tcx.mk_bool() + } } } -} -fn check_overloaded_binop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - lhs_expr: &'tcx hir::Expr, - lhs_ty: Ty<'tcx>, - rhs_expr: &'tcx hir::Expr, - op: hir::BinOp, - is_assign: IsAssign) - -> (Ty<'tcx>, Ty<'tcx>) -{ - debug!("check_overloaded_binop(expr.id={}, lhs_ty={:?}, is_assign={:?})", - expr.id, - lhs_ty, - is_assign); - - let (name, trait_def_id) = name_and_trait_def_id(fcx, op, is_assign); - - // NB: As we have not yet type-checked the RHS, we don't have the - // type at hand. Make a variable to represent it. The whole reason - // for this indirection is so that, below, we can check the expr - // using this variable as the expected type, which sometimes lets - // us do better coercions than we would be able to do otherwise, - // particularly for things like `String + &String`. - let rhs_ty_var = fcx.infcx().next_ty_var(); - - let return_ty = match lookup_op_method(fcx, expr, lhs_ty, vec![rhs_ty_var], - token::intern(name), trait_def_id, - lhs_expr) { - Ok(return_ty) => return_ty, - Err(()) => { - // error types are considered "builtin" - if !lhs_ty.references_error() { - if let IsAssign::Yes = is_assign { - span_err!(fcx.tcx().sess, lhs_expr.span, E0368, - "binary assignment operation `{}=` cannot be applied to type `{}`", - op.node.as_str(), - lhs_ty); - } else { - let mut err = struct_span_err!(fcx.tcx().sess, lhs_expr.span, E0369, - "binary operation `{}` cannot be applied to type `{}`", - op.node.as_str(), - lhs_ty); - let missing_trait = match op.node { - hir::BiAdd => Some("std::ops::Add"), - hir::BiSub => Some("std::ops::Sub"), - hir::BiMul => Some("std::ops::Mul"), - hir::BiDiv => Some("std::ops::Div"), - hir::BiRem => Some("std::ops::Rem"), - hir::BiBitAnd => Some("std::ops::BitAnd"), - hir::BiBitOr => Some("std::ops::BitOr"), - hir::BiShl => Some("std::ops::Shl"), - hir::BiShr => Some("std::ops::Shr"), - hir::BiEq | hir::BiNe => Some("std::cmp::PartialEq"), - hir::BiLt | hir::BiLe | hir::BiGt | hir::BiGe => - Some("std::cmp::PartialOrd"), - _ => None - }; - - if let Some(missing_trait) = missing_trait { - span_note!(&mut err, lhs_expr.span, - "an implementation of `{}` might be missing for `{}`", - missing_trait, lhs_ty); + fn check_overloaded_binop(&self, + expr: &'gcx hir::Expr, + lhs_expr: &'gcx hir::Expr, + lhs_ty: Ty<'tcx>, + rhs_expr: &'gcx hir::Expr, + op: hir::BinOp, + is_assign: IsAssign) + -> (Ty<'tcx>, Ty<'tcx>) + { + debug!("check_overloaded_binop(expr.id={}, lhs_ty={:?}, is_assign={:?})", + expr.id, + lhs_ty, + is_assign); + + let (name, trait_def_id) = self.name_and_trait_def_id(op, is_assign); + + // NB: As we have not yet type-checked the RHS, we don't have the + // type at hand. Make a variable to represent it. The whole reason + // for this indirection is so that, below, we can check the expr + // using this variable as the expected type, which sometimes lets + // us do better coercions than we would be able to do otherwise, + // particularly for things like `String + &String`. + let rhs_ty_var = self.next_ty_var(); + + let return_ty = match self.lookup_op_method(expr, lhs_ty, vec![rhs_ty_var], + token::intern(name), trait_def_id, + lhs_expr) { + Ok(return_ty) => return_ty, + Err(()) => { + // error types are considered "builtin" + if !lhs_ty.references_error() { + if let IsAssign::Yes = is_assign { + span_err!(self.tcx.sess, lhs_expr.span, E0368, + "binary assignment operation `{}=` \ + cannot be applied to type `{}`", + op.node.as_str(), + lhs_ty); + } else { + let mut err = struct_span_err!(self.tcx.sess, lhs_expr.span, E0369, + "binary operation `{}` cannot be applied to type `{}`", + op.node.as_str(), + lhs_ty); + let missing_trait = match op.node { + hir::BiAdd => Some("std::ops::Add"), + hir::BiSub => Some("std::ops::Sub"), + hir::BiMul => Some("std::ops::Mul"), + hir::BiDiv => Some("std::ops::Div"), + hir::BiRem => Some("std::ops::Rem"), + hir::BiBitAnd => Some("std::ops::BitAnd"), + hir::BiBitOr => Some("std::ops::BitOr"), + hir::BiShl => Some("std::ops::Shl"), + hir::BiShr => Some("std::ops::Shr"), + hir::BiEq | hir::BiNe => Some("std::cmp::PartialEq"), + hir::BiLt | hir::BiLe | hir::BiGt | hir::BiGe => + Some("std::cmp::PartialOrd"), + _ => None + }; + + if let Some(missing_trait) = missing_trait { + span_note!(&mut err, lhs_expr.span, + "an implementation of `{}` might be missing for `{}`", + missing_trait, lhs_ty); + } + err.emit(); } - err.emit(); } + self.tcx.types.err } - fcx.tcx().types.err - } - }; + }; - // see `NB` above - check_expr_coercable_to_type(fcx, rhs_expr, rhs_ty_var); + // see `NB` above + self.check_expr_coercable_to_type(rhs_expr, rhs_ty_var); - (rhs_ty_var, return_ty) -} + (rhs_ty_var, return_ty) + } -pub fn check_user_unop<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - op_str: &str, - mname: &str, - trait_did: Option, - ex: &'tcx hir::Expr, - operand_expr: &'tcx hir::Expr, - operand_ty: Ty<'tcx>, - op: hir::UnOp) - -> Ty<'tcx> -{ - assert!(op.is_by_value()); - match lookup_op_method(fcx, ex, operand_ty, vec![], - token::intern(mname), trait_did, - operand_expr) { - Ok(t) => t, - Err(()) => { - fcx.type_error_message(ex.span, |actual| { - format!("cannot apply unary operator `{}` to type `{}`", - op_str, actual) - }, operand_ty, None); - fcx.tcx().types.err + pub fn check_user_unop(&self, + op_str: &str, + mname: &str, + trait_did: Option, + ex: &'gcx hir::Expr, + operand_expr: &'gcx hir::Expr, + operand_ty: Ty<'tcx>, + op: hir::UnOp) + -> Ty<'tcx> + { + assert!(op.is_by_value()); + match self.lookup_op_method(ex, operand_ty, vec![], + token::intern(mname), trait_did, + operand_expr) { + Ok(t) => t, + Err(()) => { + self.type_error_message(ex.span, |actual| { + format!("cannot apply unary operator `{}` to type `{}`", + op_str, actual) + }, operand_ty, None); + self.tcx.types.err + } } } -} -fn name_and_trait_def_id(fcx: &FnCtxt, - op: hir::BinOp, - is_assign: IsAssign) - -> (&'static str, Option) { - let lang = &fcx.tcx().lang_items; - - if let IsAssign::Yes = is_assign { - match op.node { - hir::BiAdd => ("add_assign", lang.add_assign_trait()), - hir::BiSub => ("sub_assign", lang.sub_assign_trait()), - hir::BiMul => ("mul_assign", lang.mul_assign_trait()), - hir::BiDiv => ("div_assign", lang.div_assign_trait()), - hir::BiRem => ("rem_assign", lang.rem_assign_trait()), - hir::BiBitXor => ("bitxor_assign", lang.bitxor_assign_trait()), - hir::BiBitAnd => ("bitand_assign", lang.bitand_assign_trait()), - hir::BiBitOr => ("bitor_assign", lang.bitor_assign_trait()), - hir::BiShl => ("shl_assign", lang.shl_assign_trait()), - hir::BiShr => ("shr_assign", lang.shr_assign_trait()), - hir::BiLt | hir::BiLe | hir::BiGe | hir::BiGt | hir::BiEq | hir::BiNe | hir::BiAnd | - hir::BiOr => { - span_bug!(op.span, - "impossible assignment operation: {}=", - op.node.as_str()) + fn name_and_trait_def_id(&self, + op: hir::BinOp, + is_assign: IsAssign) + -> (&'static str, Option) { + let lang = &self.tcx.lang_items; + + if let IsAssign::Yes = is_assign { + match op.node { + hir::BiAdd => ("add_assign", lang.add_assign_trait()), + hir::BiSub => ("sub_assign", lang.sub_assign_trait()), + hir::BiMul => ("mul_assign", lang.mul_assign_trait()), + hir::BiDiv => ("div_assign", lang.div_assign_trait()), + hir::BiRem => ("rem_assign", lang.rem_assign_trait()), + hir::BiBitXor => ("bitxor_assign", lang.bitxor_assign_trait()), + hir::BiBitAnd => ("bitand_assign", lang.bitand_assign_trait()), + hir::BiBitOr => ("bitor_assign", lang.bitor_assign_trait()), + hir::BiShl => ("shl_assign", lang.shl_assign_trait()), + hir::BiShr => ("shr_assign", lang.shr_assign_trait()), + hir::BiLt | hir::BiLe | + hir::BiGe | hir::BiGt | + hir::BiEq | hir::BiNe | + hir::BiAnd | hir::BiOr => { + span_bug!(op.span, + "impossible assignment operation: {}=", + op.node.as_str()) + } } - } - } else { - match op.node { - hir::BiAdd => ("add", lang.add_trait()), - hir::BiSub => ("sub", lang.sub_trait()), - hir::BiMul => ("mul", lang.mul_trait()), - hir::BiDiv => ("div", lang.div_trait()), - hir::BiRem => ("rem", lang.rem_trait()), - hir::BiBitXor => ("bitxor", lang.bitxor_trait()), - hir::BiBitAnd => ("bitand", lang.bitand_trait()), - hir::BiBitOr => ("bitor", lang.bitor_trait()), - hir::BiShl => ("shl", lang.shl_trait()), - hir::BiShr => ("shr", lang.shr_trait()), - hir::BiLt => ("lt", lang.ord_trait()), - hir::BiLe => ("le", lang.ord_trait()), - hir::BiGe => ("ge", lang.ord_trait()), - hir::BiGt => ("gt", lang.ord_trait()), - hir::BiEq => ("eq", lang.eq_trait()), - hir::BiNe => ("ne", lang.eq_trait()), - hir::BiAnd | hir::BiOr => { - span_bug!(op.span, "&& and || are not overloadable") + } else { + match op.node { + hir::BiAdd => ("add", lang.add_trait()), + hir::BiSub => ("sub", lang.sub_trait()), + hir::BiMul => ("mul", lang.mul_trait()), + hir::BiDiv => ("div", lang.div_trait()), + hir::BiRem => ("rem", lang.rem_trait()), + hir::BiBitXor => ("bitxor", lang.bitxor_trait()), + hir::BiBitAnd => ("bitand", lang.bitand_trait()), + hir::BiBitOr => ("bitor", lang.bitor_trait()), + hir::BiShl => ("shl", lang.shl_trait()), + hir::BiShr => ("shr", lang.shr_trait()), + hir::BiLt => ("lt", lang.ord_trait()), + hir::BiLe => ("le", lang.ord_trait()), + hir::BiGe => ("ge", lang.ord_trait()), + hir::BiGt => ("gt", lang.ord_trait()), + hir::BiEq => ("eq", lang.eq_trait()), + hir::BiNe => ("ne", lang.eq_trait()), + hir::BiAnd | hir::BiOr => { + span_bug!(op.span, "&& and || are not overloadable") + } } } } -} -fn lookup_op_method<'a, 'tcx>(fcx: &'a FnCtxt<'a, 'tcx>, - expr: &'tcx hir::Expr, - lhs_ty: Ty<'tcx>, - other_tys: Vec>, - opname: ast::Name, - trait_did: Option, - lhs_expr: &'a hir::Expr) - -> Result,()> -{ - debug!("lookup_op_method(expr={:?}, lhs_ty={:?}, opname={:?}, trait_did={:?}, lhs_expr={:?})", - expr, - lhs_ty, - opname, - trait_did, - lhs_expr); - - let method = match trait_did { - Some(trait_did) => { - method::lookup_in_trait_adjusted(fcx, - expr.span, - Some(lhs_expr), - opname, - trait_did, - 0, - false, - lhs_ty, - Some(other_tys)) - } - None => None - }; + fn lookup_op_method(&self, + expr: &'gcx hir::Expr, + lhs_ty: Ty<'tcx>, + other_tys: Vec>, + opname: ast::Name, + trait_did: Option, + lhs_expr: &'a hir::Expr) + -> Result,()> + { + debug!("lookup_op_method(expr={:?}, lhs_ty={:?}, opname={:?}, \ + trait_did={:?}, lhs_expr={:?})", + expr, + lhs_ty, + opname, + trait_did, + lhs_expr); + + let method = match trait_did { + Some(trait_did) => { + self.lookup_method_in_trait_adjusted(expr.span, + Some(lhs_expr), + opname, + trait_did, + 0, + false, + lhs_ty, + Some(other_tys)) + } + None => None + }; - match method { - Some(method) => { - let method_ty = method.ty; + match method { + Some(method) => { + let method_ty = method.ty; - // HACK(eddyb) Fully qualified path to work around a resolve bug. - let method_call = ::rustc::ty::MethodCall::expr(expr.id); - fcx.inh.tables.borrow_mut().method_map.insert(method_call, method); + // HACK(eddyb) Fully qualified path to work around a resolve bug. + let method_call = ::rustc::ty::MethodCall::expr(expr.id); + self.tables.borrow_mut().method_map.insert(method_call, method); - // extract return type for method; all late bound regions - // should have been instantiated by now - let ret_ty = method_ty.fn_ret(); - Ok(fcx.tcx().no_late_bound_regions(&ret_ty).unwrap().unwrap()) - } - None => { - Err(()) + // extract return type for method; all late bound regions + // should have been instantiated by now + let ret_ty = method_ty.fn_ret(); + Ok(self.tcx.no_late_bound_regions(&ret_ty).unwrap().unwrap()) + } + None => { + Err(()) + } } } } @@ -428,11 +427,7 @@ enum IsAssign { /// Reason #2 is the killer. I tried for a while to always use /// overloaded logic and just check the types in constants/trans after /// the fact, and it worked fine, except for SIMD types. -nmatsakis -fn is_builtin_binop<'tcx>(lhs: Ty<'tcx>, - rhs: Ty<'tcx>, - op: hir::BinOp) - -> bool -{ +fn is_builtin_binop(lhs: Ty, rhs: Ty, op: hir::BinOp) -> bool { match BinOpCategory::from(op) { BinOpCategory::Shortcircuit => { true diff --git a/src/librustc_typeck/check/regionck.rs b/src/librustc_typeck/check/regionck.rs index 2a4de6e091..7b79f2ec9b 100644 --- a/src/librustc_typeck/check/regionck.rs +++ b/src/librustc_typeck/check/regionck.rs @@ -82,7 +82,6 @@ //! relation, except that a borrowed pointer never owns its //! contents. -use astconv::AstConv; use check::dropck; use check::FnCtxt; use middle::free_region::FreeRegionMap; @@ -91,13 +90,14 @@ use middle::mem_categorization::Categorization; use middle::region::{self, CodeExtent}; use rustc::ty::subst::Substs; use rustc::traits; -use rustc::ty::{self, Ty, TyCtxt, MethodCall, TypeFoldable}; -use rustc::infer::{self, GenericKind, InferCtxt, InferOk, SubregionOrigin, TypeOrigin, VerifyBound}; +use rustc::ty::{self, Ty, MethodCall, TypeFoldable}; +use rustc::infer::{self, GenericKind, InferOk, SubregionOrigin, TypeOrigin, VerifyBound}; use hir::pat_util; use rustc::ty::adjustment; use rustc::ty::wf::ImpliedBound; use std::mem; +use std::ops::Deref; use syntax::ast; use syntax::codemap::Span; use rustc::hir::intravisit::{self, Visitor}; @@ -113,62 +113,62 @@ macro_rules! ignore_err { /////////////////////////////////////////////////////////////////////////// // PUBLIC ENTRY POINTS -pub fn regionck_expr(fcx: &FnCtxt, e: &hir::Expr) { - let mut rcx = Rcx::new(fcx, RepeatingScope(e.id), e.id, Subject(e.id)); - if fcx.err_count_since_creation() == 0 { - // regionck assumes typeck succeeded - rcx.visit_expr(e); - rcx.visit_region_obligations(e.id); +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn regionck_expr(&self, e: &hir::Expr) { + let mut rcx = RegionCtxt::new(self, RepeatingScope(e.id), e.id, Subject(e.id)); + if self.err_count_since_creation() == 0 { + // regionck assumes typeck succeeded + rcx.visit_expr(e); + rcx.visit_region_obligations(e.id); + } + rcx.resolve_regions_and_report_errors(); } - rcx.resolve_regions_and_report_errors(); -} - -/// Region checking during the WF phase for items. `wf_tys` are the -/// types from which we should derive implied bounds, if any. -pub fn regionck_item<'a,'tcx>(fcx: &FnCtxt<'a,'tcx>, - item_id: ast::NodeId, - span: Span, - wf_tys: &[Ty<'tcx>]) { - debug!("regionck_item(item.id={:?}, wf_tys={:?}", item_id, wf_tys); - let mut rcx = Rcx::new(fcx, RepeatingScope(item_id), item_id, Subject(item_id)); - let tcx = fcx.tcx(); - rcx.free_region_map - .relate_free_regions_from_predicates(tcx, &fcx.infcx().parameter_environment.caller_bounds); - rcx.relate_free_regions(wf_tys, item_id, span); - rcx.visit_region_obligations(item_id); - rcx.resolve_regions_and_report_errors(); -} -pub fn regionck_fn(fcx: &FnCtxt, - fn_id: ast::NodeId, - fn_span: Span, - decl: &hir::FnDecl, - blk: &hir::Block) { - debug!("regionck_fn(id={})", fn_id); - let mut rcx = Rcx::new(fcx, RepeatingScope(blk.id), blk.id, Subject(fn_id)); - - if fcx.err_count_since_creation() == 0 { - // regionck assumes typeck succeeded - rcx.visit_fn_body(fn_id, decl, blk, fn_span); + /// Region checking during the WF phase for items. `wf_tys` are the + /// types from which we should derive implied bounds, if any. + pub fn regionck_item(&self, + item_id: ast::NodeId, + span: Span, + wf_tys: &[Ty<'tcx>]) { + debug!("regionck_item(item.id={:?}, wf_tys={:?}", item_id, wf_tys); + let mut rcx = RegionCtxt::new(self, RepeatingScope(item_id), item_id, Subject(item_id)); + rcx.free_region_map.relate_free_regions_from_predicates( + &self.parameter_environment.caller_bounds); + rcx.relate_free_regions(wf_tys, item_id, span); + rcx.visit_region_obligations(item_id); + rcx.resolve_regions_and_report_errors(); } - let tcx = fcx.tcx(); - rcx.free_region_map - .relate_free_regions_from_predicates(tcx, &fcx.infcx().parameter_environment.caller_bounds); + pub fn regionck_fn(&self, + fn_id: ast::NodeId, + fn_span: Span, + decl: &hir::FnDecl, + blk: &hir::Block) { + debug!("regionck_fn(id={})", fn_id); + let mut rcx = RegionCtxt::new(self, RepeatingScope(blk.id), blk.id, Subject(fn_id)); + + if self.err_count_since_creation() == 0 { + // regionck assumes typeck succeeded + rcx.visit_fn_body(fn_id, decl, blk, fn_span); + } + + rcx.free_region_map.relate_free_regions_from_predicates( + &self.parameter_environment.caller_bounds); - rcx.resolve_regions_and_report_errors(); + rcx.resolve_regions_and_report_errors(); - // For the top-level fn, store the free-region-map. We don't store - // any map for closures; they just share the same map as the - // function that created them. - fcx.tcx().store_free_region_map(fn_id, rcx.free_region_map); + // For the top-level fn, store the free-region-map. We don't store + // any map for closures; they just share the same map as the + // function that created them. + self.tcx.store_free_region_map(fn_id, rcx.free_region_map); + } } /////////////////////////////////////////////////////////////////////////// // INTERNALS -pub struct Rcx<'a, 'tcx: 'a> { - pub fcx: &'a FnCtxt<'a, 'tcx>, +pub struct RegionCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + pub fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, region_bound_pairs: Vec<(ty::Region, GenericKind<'tcx>)>, @@ -188,33 +188,33 @@ pub struct Rcx<'a, 'tcx: 'a> { } +impl<'a, 'gcx, 'tcx> Deref for RegionCtxt<'a, 'gcx, 'tcx> { + type Target = FnCtxt<'a, 'gcx, 'tcx>; + fn deref(&self) -> &Self::Target { + &self.fcx + } +} + pub struct RepeatingScope(ast::NodeId); pub enum SubjectNode { Subject(ast::NodeId), None } -impl<'a, 'tcx> Rcx<'a, 'tcx> { - pub fn new(fcx: &'a FnCtxt<'a, 'tcx>, +impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { + pub fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, initial_repeating_scope: RepeatingScope, initial_body_id: ast::NodeId, - subject: SubjectNode) -> Rcx<'a, 'tcx> { + subject: SubjectNode) -> RegionCtxt<'a, 'gcx, 'tcx> { let RepeatingScope(initial_repeating_scope) = initial_repeating_scope; - Rcx { fcx: fcx, - repeating_scope: initial_repeating_scope, - body_id: initial_body_id, - call_site_scope: None, - subject: subject, - region_bound_pairs: Vec::new(), - free_region_map: FreeRegionMap::new(), + RegionCtxt { + fcx: fcx, + repeating_scope: initial_repeating_scope, + body_id: initial_body_id, + call_site_scope: None, + subject: subject, + region_bound_pairs: Vec::new(), + free_region_map: FreeRegionMap::new(), } } - pub fn tcx(&self) -> &'a TyCtxt<'tcx> { - self.fcx.ccx.tcx - } - - pub fn infcx(&self) -> &InferCtxt<'a,'tcx> { - self.fcx.infcx() - } - fn set_call_site_scope(&mut self, call_site_scope: Option) -> Option { mem::replace(&mut self.call_site_scope, call_site_scope) } @@ -251,17 +251,17 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { /// of b will be `&.i32` and then `*b` will require that `` be bigger than the let and /// the `*b` expression, so we will effectively resolve `` to be the block B. pub fn resolve_type(&self, unresolved_ty: Ty<'tcx>) -> Ty<'tcx> { - self.fcx.infcx().resolve_type_vars_if_possible(&unresolved_ty) + self.resolve_type_vars_if_possible(&unresolved_ty) } /// Try to resolve the type for the given node. fn resolve_node_type(&self, id: ast::NodeId) -> Ty<'tcx> { - let t = self.fcx.node_ty(id); + let t = self.node_ty(id); self.resolve_type(t) } fn resolve_method_type(&self, method_call: MethodCall) -> Option> { - let method_ty = self.fcx.inh.tables.borrow().method_map + let method_ty = self.tables.borrow().method_map .get(&method_call).map(|method| method.ty); method_ty.map(|method_ty| self.resolve_type(method_ty)) } @@ -273,8 +273,8 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { ty_unadjusted } else { ty_unadjusted.adjust( - self.fcx.tcx(), expr.span, expr.id, - self.fcx.inh.tables.borrow().adjustments.get(&expr.id), + self.tcx, expr.span, expr.id, + self.tables.borrow().adjustments.get(&expr.id), |method_call| self.resolve_method_type(method_call)) } } @@ -288,12 +288,12 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { // When we enter a function, we can derive debug!("visit_fn_body(id={})", id); - let call_site = self.fcx.tcx().region_maps.lookup_code_extent( + let call_site = self.tcx.region_maps.lookup_code_extent( region::CodeExtentData::CallSiteScope { fn_id: id, body_id: body.id }); let old_call_site_scope = self.set_call_site_scope(Some(call_site)); let fn_sig = { - let fn_sig_map = &self.infcx().tables.borrow().liberated_fn_sigs; + let fn_sig_map = &self.tables.borrow().liberated_fn_sigs; match fn_sig_map.get(&id) { Some(f) => f.clone(), None => { @@ -312,24 +312,22 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { let fn_sig_tys: Vec<_> = fn_sig.inputs.iter() .cloned() - .chain(Some(fn_sig.output.unwrap_or(self.tcx().types.bool))) + .chain(Some(fn_sig.output.unwrap_or(self.tcx.types.bool))) .collect(); let old_body_id = self.set_body_id(body.id); self.relate_free_regions(&fn_sig_tys[..], body.id, span); - link_fn_args(self, - self.tcx().region_maps.node_extent(body.id), - &fn_decl.inputs[..]); + self.link_fn_args(self.tcx.region_maps.node_extent(body.id), + &fn_decl.inputs[..]); self.visit_block(body); self.visit_region_obligations(body.id); let call_site_scope = self.call_site_scope.unwrap(); debug!("visit_fn_body body.id {} call_site_scope: {:?}", body.id, call_site_scope); - type_of_node_must_outlive(self, - infer::CallReturn(span), - body.id, - ty::ReScope(call_site_scope)); + self.type_of_node_must_outlive(infer::CallReturn(span), + body.id, + ty::ReScope(call_site_scope)); self.region_bound_pairs.truncate(old_region_bounds_pairs_len); @@ -344,14 +342,12 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { // region checking can introduce new pending obligations // which, when processed, might generate new region // obligations. So make sure we process those. - self.fcx.select_all_obligations_or_error(); + self.select_all_obligations_or_error(); // Make a copy of the region obligations vec because we'll need // to be able to borrow the fulfillment-cx below when projecting. let region_obligations = - self.fcx - .inh - .fulfillment_cx + self.fulfillment_cx .borrow() .region_obligations(node_id) .to_vec(); @@ -361,12 +357,12 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { r_o, r_o.cause); let sup_type = self.resolve_type(r_o.sup_type); let origin = self.code_to_origin(r_o.cause.span, sup_type, &r_o.cause.code); - type_must_outlive(self, origin, sup_type, r_o.sub_region); + self.type_must_outlive(origin, sup_type, r_o.sub_region); } // Processing the region obligations should not cause the list to grow further: assert_eq!(region_obligations.len(), - self.fcx.inh.fulfillment_cx.borrow().region_obligations(node_id).len()); + self.fulfillment_cx.borrow().region_obligations(node_id).len()); } fn code_to_origin(&self, @@ -401,7 +397,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { for &ty in fn_sig_tys { let ty = self.resolve_type(ty); debug!("relate_free_regions(t={:?})", ty); - let implied_bounds = ty::wf::implied_bounds(self.fcx.infcx(), body_id, ty, span); + let implied_bounds = ty::wf::implied_bounds(self, body_id, ty, span); // Record any relations between free regions that we observe into the free-region-map. self.free_region_map.relate_free_regions_from_implied_bounds(&implied_bounds); @@ -414,7 +410,7 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { match implication { ImpliedBound::RegionSubRegion(ty::ReFree(free_a), ty::ReVar(vid_b)) => { - self.fcx.inh.infcx.add_given(free_a, vid_b); + self.add_given(free_a, vid_b); } ImpliedBound::RegionSubParam(r_a, param_b) => { self.region_bound_pairs.push((r_a, GenericKind::Param(param_b))); @@ -449,12 +445,49 @@ impl<'a, 'tcx> Rcx<'a, 'tcx> { } }; - self.fcx.infcx().resolve_regions_and_report_errors(&self.free_region_map, - subject_node_id); + self.fcx.resolve_regions_and_report_errors(&self.free_region_map, + subject_node_id); + } + + fn constrain_bindings_in_pat(&mut self, pat: &hir::Pat) { + let tcx = self.tcx; + debug!("regionck::visit_pat(pat={:?})", pat); + pat_util::pat_bindings(&tcx.def_map, pat, |_, id, span, _| { + // If we have a variable that contains region'd data, that + // data will be accessible from anywhere that the variable is + // accessed. We must be wary of loops like this: + // + // // from src/test/compile-fail/borrowck-lend-flow.rs + // let mut v = box 3, w = box 4; + // let mut x = &mut w; + // loop { + // **x += 1; // (2) + // borrow(v); //~ ERROR cannot borrow + // x = &mut v; // (1) + // } + // + // Typically, we try to determine the region of a borrow from + // those points where it is dereferenced. In this case, one + // might imagine that the lifetime of `x` need only be the + // body of the loop. But of course this is incorrect because + // the pointer that is created at point (1) is consumed at + // point (2), meaning that it must be live across the loop + // iteration. The easiest way to guarantee this is to require + // that the lifetime of any regions that appear in a + // variable's type enclose at least the variable's scope. + + let var_scope = tcx.region_maps.var_scope(id); + + let origin = infer::BindingTypeIsNotValidAtDecl(span); + self.type_of_node_must_outlive(origin, id, ty::ReScope(var_scope)); + + let typ = self.resolve_node_type(id); + dropck::check_safety_of_destructor_if_necessary(self, typ, span, var_scope); + }) } } -impl<'a, 'tcx, 'v> Visitor<'v> for Rcx<'a, 'tcx> { +impl<'a, 'gcx, 'tcx, 'v> Visitor<'v> for RegionCtxt<'a, 'gcx, 'tcx> { // (..) FIXME(#3238) should use visit_pat, not visit_arm/visit_local, // However, right now we run into an issue whereby some free // regions are not properly related if they appear within the @@ -468,374 +501,318 @@ impl<'a, 'tcx, 'v> Visitor<'v> for Rcx<'a, 'tcx> { self.visit_fn_body(id, fd, b, span) } - fn visit_expr(&mut self, ex: &hir::Expr) { visit_expr(self, ex); } - //visit_pat: visit_pat, // (..) see above - fn visit_arm(&mut self, a: &hir::Arm) { visit_arm(self, a); } - - fn visit_local(&mut self, l: &hir::Local) { visit_local(self, l); } - - fn visit_block(&mut self, b: &hir::Block) { visit_block(self, b); } -} - -fn visit_block(rcx: &mut Rcx, b: &hir::Block) { - intravisit::walk_block(rcx, b); -} - -fn visit_arm(rcx: &mut Rcx, arm: &hir::Arm) { - // see above - for p in &arm.pats { - constrain_bindings_in_pat(&p, rcx); + fn visit_arm(&mut self, arm: &hir::Arm) { + // see above + for p in &arm.pats { + self.constrain_bindings_in_pat(p); + } + intravisit::walk_arm(self, arm); } - intravisit::walk_arm(rcx, arm); -} - -fn visit_local(rcx: &mut Rcx, l: &hir::Local) { - // see above - constrain_bindings_in_pat(&l.pat, rcx); - link_local(rcx, l); - intravisit::walk_local(rcx, l); -} - -fn constrain_bindings_in_pat(pat: &hir::Pat, rcx: &mut Rcx) { - let tcx = rcx.fcx.tcx(); - debug!("regionck::visit_pat(pat={:?})", pat); - pat_util::pat_bindings(&tcx.def_map, pat, |_, id, span, _| { - // If we have a variable that contains region'd data, that - // data will be accessible from anywhere that the variable is - // accessed. We must be wary of loops like this: - // - // // from src/test/compile-fail/borrowck-lend-flow.rs - // let mut v = box 3, w = box 4; - // let mut x = &mut w; - // loop { - // **x += 1; // (2) - // borrow(v); //~ ERROR cannot borrow - // x = &mut v; // (1) - // } - // - // Typically, we try to determine the region of a borrow from - // those points where it is dereferenced. In this case, one - // might imagine that the lifetime of `x` need only be the - // body of the loop. But of course this is incorrect because - // the pointer that is created at point (1) is consumed at - // point (2), meaning that it must be live across the loop - // iteration. The easiest way to guarantee this is to require - // that the lifetime of any regions that appear in a - // variable's type enclose at least the variable's scope. - - let var_scope = tcx.region_maps.var_scope(id); - - let origin = infer::BindingTypeIsNotValidAtDecl(span); - type_of_node_must_outlive(rcx, origin, id, ty::ReScope(var_scope)); - - let typ = rcx.resolve_node_type(id); - dropck::check_safety_of_destructor_if_necessary(rcx, typ, span, var_scope); - }) -} + fn visit_local(&mut self, l: &hir::Local) { + // see above + self.constrain_bindings_in_pat(&l.pat); + self.link_local(l); + intravisit::walk_local(self, l); + } -fn visit_expr(rcx: &mut Rcx, expr: &hir::Expr) { - debug!("regionck::visit_expr(e={:?}, repeating_scope={})", - expr, rcx.repeating_scope); - - // No matter what, the type of each expression must outlive the - // scope of that expression. This also guarantees basic WF. - let expr_ty = rcx.resolve_node_type(expr.id); - // the region corresponding to this expression - let expr_region = ty::ReScope(rcx.tcx().region_maps.node_extent(expr.id)); - type_must_outlive(rcx, infer::ExprTypeIsNotInScope(expr_ty, expr.span), - expr_ty, expr_region); - - let method_call = MethodCall::expr(expr.id); - let opt_method_callee = rcx.fcx.inh.tables.borrow().method_map.get(&method_call).cloned(); - let has_method_map = opt_method_callee.is_some(); - - // If we are calling a method (either explicitly or via an - // overloaded operator), check that all of the types provided as - // arguments for its type parameters are well-formed, and all the regions - // provided as arguments outlive the call. - if let Some(callee) = opt_method_callee { - let origin = match expr.node { - hir::ExprMethodCall(..) => - infer::ParameterOrigin::MethodCall, - hir::ExprUnary(op, _) if op == hir::UnDeref => - infer::ParameterOrigin::OverloadedDeref, - _ => - infer::ParameterOrigin::OverloadedOperator - }; + fn visit_expr(&mut self, expr: &hir::Expr) { + debug!("regionck::visit_expr(e={:?}, repeating_scope={})", + expr, self.repeating_scope); + + // No matter what, the type of each expression must outlive the + // scope of that expression. This also guarantees basic WF. + let expr_ty = self.resolve_node_type(expr.id); + // the region corresponding to this expression + let expr_region = ty::ReScope(self.tcx.region_maps.node_extent(expr.id)); + self.type_must_outlive(infer::ExprTypeIsNotInScope(expr_ty, expr.span), + expr_ty, expr_region); + + let method_call = MethodCall::expr(expr.id); + let opt_method_callee = self.tables.borrow().method_map.get(&method_call).cloned(); + let has_method_map = opt_method_callee.is_some(); + + // If we are calling a method (either explicitly or via an + // overloaded operator), check that all of the types provided as + // arguments for its type parameters are well-formed, and all the regions + // provided as arguments outlive the call. + if let Some(callee) = opt_method_callee { + let origin = match expr.node { + hir::ExprMethodCall(..) => + infer::ParameterOrigin::MethodCall, + hir::ExprUnary(op, _) if op == hir::UnDeref => + infer::ParameterOrigin::OverloadedDeref, + _ => + infer::ParameterOrigin::OverloadedOperator + }; - substs_wf_in_scope(rcx, origin, &callee.substs, expr.span, expr_region); - type_must_outlive(rcx, infer::ExprTypeIsNotInScope(callee.ty, expr.span), - callee.ty, expr_region); - } + self.substs_wf_in_scope(origin, &callee.substs, expr.span, expr_region); + self.type_must_outlive(infer::ExprTypeIsNotInScope(callee.ty, expr.span), + callee.ty, expr_region); + } - // Check any autoderefs or autorefs that appear. - let adjustment = rcx.fcx.inh.tables.borrow().adjustments.get(&expr.id).map(|a| a.clone()); - if let Some(adjustment) = adjustment { - debug!("adjustment={:?}", adjustment); - match adjustment { - adjustment::AdjustDerefRef(adjustment::AutoDerefRef { - autoderefs, ref autoref, .. - }) => { - let expr_ty = rcx.resolve_node_type(expr.id); - constrain_autoderefs(rcx, expr, autoderefs, expr_ty); - if let Some(ref autoref) = *autoref { - link_autoref(rcx, expr, autoderefs, autoref); - - // Require that the resulting region encompasses - // the current node. - // - // FIXME(#6268) remove to support nested method calls - type_of_node_must_outlive( - rcx, infer::AutoBorrow(expr.span), - expr.id, expr_region); + // Check any autoderefs or autorefs that appear. + let adjustment = self.tables.borrow().adjustments.get(&expr.id).map(|a| a.clone()); + if let Some(adjustment) = adjustment { + debug!("adjustment={:?}", adjustment); + match adjustment { + adjustment::AdjustDerefRef(adjustment::AutoDerefRef { + autoderefs, ref autoref, .. + }) => { + let expr_ty = self.resolve_node_type(expr.id); + self.constrain_autoderefs(expr, autoderefs, expr_ty); + if let Some(ref autoref) = *autoref { + self.link_autoref(expr, autoderefs, autoref); + + // Require that the resulting region encompasses + // the current node. + // + // FIXME(#6268) remove to support nested method calls + self.type_of_node_must_outlive(infer::AutoBorrow(expr.span), + expr.id, expr_region); + } + } + /* + adjustment::AutoObject(_, ref bounds, _, _) => { + // Determine if we are casting `expr` to a trait + // instance. If so, we have to be sure that the type + // of the source obeys the new region bound. + let source_ty = self.resolve_node_type(expr.id); + self.type_must_outlive(infer::RelateObjectBound(expr.span), + source_ty, bounds.region_bound); } + */ + _ => {} } - /* - adjustment::AutoObject(_, ref bounds, _, _) => { - // Determine if we are casting `expr` to a trait - // instance. If so, we have to be sure that the type - // of the source obeys the new region bound. - let source_ty = rcx.resolve_node_type(expr.id); - type_must_outlive(rcx, infer::RelateObjectBound(expr.span), - source_ty, bounds.region_bound); + + // If necessary, constrain destructors in the unadjusted form of this + // expression. + let cmt_result = { + let mc = mc::MemCategorizationContext::new(self); + mc.cat_expr_unadjusted(expr) + }; + match cmt_result { + Ok(head_cmt) => { + self.check_safety_of_rvalue_destructor_if_necessary(head_cmt, + expr.span); + } + Err(..) => { + self.tcx.sess.delay_span_bug(expr.span, "cat_expr_unadjusted Errd"); + } } - */ - _ => {} } - // If necessary, constrain destructors in the unadjusted form of this - // expression. + // If necessary, constrain destructors in this expression. This will be + // the adjusted form if there is an adjustment. let cmt_result = { - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - mc.cat_expr_unadjusted(expr) + let mc = mc::MemCategorizationContext::new(self); + mc.cat_expr(expr) }; match cmt_result { Ok(head_cmt) => { - check_safety_of_rvalue_destructor_if_necessary(rcx, - head_cmt, - expr.span); + self.check_safety_of_rvalue_destructor_if_necessary(head_cmt, expr.span); } Err(..) => { - let tcx = rcx.fcx.tcx(); - tcx.sess.delay_span_bug(expr.span, "cat_expr_unadjusted Errd"); + self.tcx.sess.delay_span_bug(expr.span, "cat_expr Errd"); } } - } - // If necessary, constrain destructors in this expression. This will be - // the adjusted form if there is an adjustment. - let cmt_result = { - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - mc.cat_expr(expr) - }; - match cmt_result { - Ok(head_cmt) => { - check_safety_of_rvalue_destructor_if_necessary(rcx, head_cmt, expr.span); - } - Err(..) => { - let tcx = rcx.fcx.tcx(); - tcx.sess.delay_span_bug(expr.span, "cat_expr Errd"); - } - } - - debug!("regionck::visit_expr(e={:?}, repeating_scope={}) - visiting subexprs", - expr, rcx.repeating_scope); - match expr.node { - hir::ExprPath(..) => { - rcx.fcx.opt_node_ty_substs(expr.id, |item_substs| { - let origin = infer::ParameterOrigin::Path; - substs_wf_in_scope(rcx, origin, &item_substs.substs, expr.span, expr_region); - }); - } - - hir::ExprCall(ref callee, ref args) => { - if has_method_map { - constrain_call(rcx, expr, Some(&callee), - args.iter().map(|e| &**e), false); - } else { - constrain_callee(rcx, callee.id, expr, &callee); - constrain_call(rcx, expr, None, - args.iter().map(|e| &**e), false); + debug!("regionck::visit_expr(e={:?}, repeating_scope={}) - visiting subexprs", + expr, self.repeating_scope); + match expr.node { + hir::ExprPath(..) => { + self.fcx.opt_node_ty_substs(expr.id, |item_substs| { + let origin = infer::ParameterOrigin::Path; + self.substs_wf_in_scope(origin, &item_substs.substs, expr.span, expr_region); + }); } - intravisit::walk_expr(rcx, expr); - } + hir::ExprCall(ref callee, ref args) => { + if has_method_map { + self.constrain_call(expr, Some(&callee), + args.iter().map(|e| &**e), false); + } else { + self.constrain_callee(callee.id, expr, &callee); + self.constrain_call(expr, None, + args.iter().map(|e| &**e), false); + } - hir::ExprMethodCall(_, _, ref args) => { - constrain_call(rcx, expr, Some(&args[0]), - args[1..].iter().map(|e| &**e), false); + intravisit::walk_expr(self, expr); + } - intravisit::walk_expr(rcx, expr); - } + hir::ExprMethodCall(_, _, ref args) => { + self.constrain_call(expr, Some(&args[0]), + args[1..].iter().map(|e| &**e), false); - hir::ExprAssignOp(_, ref lhs, ref rhs) => { - if has_method_map { - constrain_call(rcx, expr, Some(&lhs), - Some(&**rhs).into_iter(), false); + intravisit::walk_expr(self, expr); } - intravisit::walk_expr(rcx, expr); - } + hir::ExprAssignOp(_, ref lhs, ref rhs) => { + if has_method_map { + self.constrain_call(expr, Some(&lhs), + Some(&**rhs).into_iter(), false); + } - hir::ExprIndex(ref lhs, ref rhs) if has_method_map => { - constrain_call(rcx, expr, Some(&lhs), - Some(&**rhs).into_iter(), true); + intravisit::walk_expr(self, expr); + } - intravisit::walk_expr(rcx, expr); - }, + hir::ExprIndex(ref lhs, ref rhs) if has_method_map => { + self.constrain_call(expr, Some(&lhs), + Some(&**rhs).into_iter(), true); - hir::ExprBinary(op, ref lhs, ref rhs) if has_method_map => { - let implicitly_ref_args = !op.node.is_by_value(); + intravisit::walk_expr(self, expr); + }, - // As `expr_method_call`, but the call is via an - // overloaded op. Note that we (sadly) currently use an - // implicit "by ref" sort of passing style here. This - // should be converted to an adjustment! - constrain_call(rcx, expr, Some(&lhs), - Some(&**rhs).into_iter(), implicitly_ref_args); + hir::ExprBinary(op, ref lhs, ref rhs) if has_method_map => { + let implicitly_ref_args = !op.node.is_by_value(); - intravisit::walk_expr(rcx, expr); - } + // As `expr_method_call`, but the call is via an + // overloaded op. Note that we (sadly) currently use an + // implicit "by ref" sort of passing style here. This + // should be converted to an adjustment! + self.constrain_call(expr, Some(&lhs), + Some(&**rhs).into_iter(), implicitly_ref_args); - hir::ExprBinary(_, ref lhs, ref rhs) => { - // If you do `x OP y`, then the types of `x` and `y` must - // outlive the operation you are performing. - let lhs_ty = rcx.resolve_expr_type_adjusted(&lhs); - let rhs_ty = rcx.resolve_expr_type_adjusted(&rhs); - for &ty in &[lhs_ty, rhs_ty] { - type_must_outlive(rcx, - infer::Operand(expr.span), - ty, - expr_region); + intravisit::walk_expr(self, expr); } - intravisit::walk_expr(rcx, expr); - } - hir::ExprUnary(op, ref lhs) if has_method_map => { - let implicitly_ref_args = !op.is_by_value(); + hir::ExprBinary(_, ref lhs, ref rhs) => { + // If you do `x OP y`, then the types of `x` and `y` must + // outlive the operation you are performing. + let lhs_ty = self.resolve_expr_type_adjusted(&lhs); + let rhs_ty = self.resolve_expr_type_adjusted(&rhs); + for &ty in &[lhs_ty, rhs_ty] { + self.type_must_outlive(infer::Operand(expr.span), + ty, expr_region); + } + intravisit::walk_expr(self, expr); + } - // As above. - constrain_call(rcx, expr, Some(&lhs), - None::.iter(), implicitly_ref_args); + hir::ExprUnary(op, ref lhs) if has_method_map => { + let implicitly_ref_args = !op.is_by_value(); - intravisit::walk_expr(rcx, expr); - } + // As above. + self.constrain_call(expr, Some(&lhs), + None::.iter(), implicitly_ref_args); - hir::ExprUnary(hir::UnDeref, ref base) => { - // For *a, the lifetime of a must enclose the deref - let method_call = MethodCall::expr(expr.id); - let base_ty = match rcx.fcx.inh.tables.borrow().method_map.get(&method_call) { - Some(method) => { - constrain_call(rcx, expr, Some(&base), - None::.iter(), true); - let fn_ret = // late-bound regions in overloaded method calls are instantiated - rcx.tcx().no_late_bound_regions(&method.ty.fn_ret()).unwrap(); - fn_ret.unwrap() - } - None => rcx.resolve_node_type(base.id) - }; - if let ty::TyRef(r_ptr, _) = base_ty.sty { - mk_subregion_due_to_dereference( - rcx, expr.span, expr_region, *r_ptr); + intravisit::walk_expr(self, expr); } - intravisit::walk_expr(rcx, expr); - } + hir::ExprUnary(hir::UnDeref, ref base) => { + // For *a, the lifetime of a must enclose the deref + let method_call = MethodCall::expr(expr.id); + let base_ty = match self.tables.borrow().method_map.get(&method_call) { + Some(method) => { + self.constrain_call(expr, Some(&base), + None::.iter(), true); + // late-bound regions in overloaded method calls are instantiated + let fn_ret = self.tcx.no_late_bound_regions(&method.ty.fn_ret()); + fn_ret.unwrap().unwrap() + } + None => self.resolve_node_type(base.id) + }; + if let ty::TyRef(r_ptr, _) = base_ty.sty { + self.mk_subregion_due_to_dereference(expr.span, expr_region, *r_ptr); + } - hir::ExprIndex(ref vec_expr, _) => { - // For a[b], the lifetime of a must enclose the deref - let vec_type = rcx.resolve_expr_type_adjusted(&vec_expr); - constrain_index(rcx, expr, vec_type); + intravisit::walk_expr(self, expr); + } - intravisit::walk_expr(rcx, expr); - } + hir::ExprIndex(ref vec_expr, _) => { + // For a[b], the lifetime of a must enclose the deref + let vec_type = self.resolve_expr_type_adjusted(&vec_expr); + self.constrain_index(expr, vec_type); - hir::ExprCast(ref source, _) => { - // Determine if we are casting `source` to a trait - // instance. If so, we have to be sure that the type of - // the source obeys the trait's region bound. - constrain_cast(rcx, expr, &source); - intravisit::walk_expr(rcx, expr); - } + intravisit::walk_expr(self, expr); + } - hir::ExprAddrOf(m, ref base) => { - link_addr_of(rcx, expr, m, &base); + hir::ExprCast(ref source, _) => { + // Determine if we are casting `source` to a trait + // instance. If so, we have to be sure that the type of + // the source obeys the trait's region bound. + self.constrain_cast(expr, &source); + intravisit::walk_expr(self, expr); + } - // Require that when you write a `&expr` expression, the - // resulting pointer has a lifetime that encompasses the - // `&expr` expression itself. Note that we constraining - // the type of the node expr.id here *before applying - // adjustments*. - // - // FIXME(#6268) nested method calls requires that this rule change - let ty0 = rcx.resolve_node_type(expr.id); - type_must_outlive(rcx, infer::AddrOf(expr.span), ty0, expr_region); - intravisit::walk_expr(rcx, expr); - } + hir::ExprAddrOf(m, ref base) => { + self.link_addr_of(expr, m, &base); + + // Require that when you write a `&expr` expression, the + // resulting pointer has a lifetime that encompasses the + // `&expr` expression itself. Note that we constraining + // the type of the node expr.id here *before applying + // adjustments*. + // + // FIXME(#6268) nested method calls requires that this rule change + let ty0 = self.resolve_node_type(expr.id); + self.type_must_outlive(infer::AddrOf(expr.span), ty0, expr_region); + intravisit::walk_expr(self, expr); + } - hir::ExprMatch(ref discr, ref arms, _) => { - link_match(rcx, &discr, &arms[..]); + hir::ExprMatch(ref discr, ref arms, _) => { + self.link_match(&discr, &arms[..]); - intravisit::walk_expr(rcx, expr); - } + intravisit::walk_expr(self, expr); + } - hir::ExprClosure(_, _, ref body) => { - check_expr_fn_block(rcx, expr, &body); - } + hir::ExprClosure(_, _, ref body, _) => { + self.check_expr_fn_block(expr, &body); + } - hir::ExprLoop(ref body, _) => { - let repeating_scope = rcx.set_repeating_scope(body.id); - intravisit::walk_expr(rcx, expr); - rcx.set_repeating_scope(repeating_scope); - } + hir::ExprLoop(ref body, _) => { + let repeating_scope = self.set_repeating_scope(body.id); + intravisit::walk_expr(self, expr); + self.set_repeating_scope(repeating_scope); + } - hir::ExprWhile(ref cond, ref body, _) => { - let repeating_scope = rcx.set_repeating_scope(cond.id); - rcx.visit_expr(&cond); + hir::ExprWhile(ref cond, ref body, _) => { + let repeating_scope = self.set_repeating_scope(cond.id); + self.visit_expr(&cond); - rcx.set_repeating_scope(body.id); - rcx.visit_block(&body); + self.set_repeating_scope(body.id); + self.visit_block(&body); - rcx.set_repeating_scope(repeating_scope); - } + self.set_repeating_scope(repeating_scope); + } - hir::ExprRet(Some(ref ret_expr)) => { - let call_site_scope = rcx.call_site_scope; - debug!("visit_expr ExprRet ret_expr.id {} call_site_scope: {:?}", - ret_expr.id, call_site_scope); - type_of_node_must_outlive(rcx, - infer::CallReturn(ret_expr.span), - ret_expr.id, - ty::ReScope(call_site_scope.unwrap())); - intravisit::walk_expr(rcx, expr); - } + hir::ExprRet(Some(ref ret_expr)) => { + let call_site_scope = self.call_site_scope; + debug!("visit_expr ExprRet ret_expr.id {} call_site_scope: {:?}", + ret_expr.id, call_site_scope); + self.type_of_node_must_outlive(infer::CallReturn(ret_expr.span), + ret_expr.id, + ty::ReScope(call_site_scope.unwrap())); + intravisit::walk_expr(self, expr); + } - _ => { - intravisit::walk_expr(rcx, expr); + _ => { + intravisit::walk_expr(self, expr); + } } } } -fn constrain_cast(rcx: &mut Rcx, - cast_expr: &hir::Expr, - source_expr: &hir::Expr) -{ - debug!("constrain_cast(cast_expr={:?}, source_expr={:?})", - cast_expr, - source_expr); +impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> { + fn constrain_cast(&mut self, + cast_expr: &hir::Expr, + source_expr: &hir::Expr) + { + debug!("constrain_cast(cast_expr={:?}, source_expr={:?})", + cast_expr, + source_expr); - let source_ty = rcx.resolve_node_type(source_expr.id); - let target_ty = rcx.resolve_node_type(cast_expr.id); + let source_ty = self.resolve_node_type(source_expr.id); + let target_ty = self.resolve_node_type(cast_expr.id); - walk_cast(rcx, cast_expr, source_ty, target_ty); + self.walk_cast(cast_expr, source_ty, target_ty); + } - fn walk_cast<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, - cast_expr: &hir::Expr, - from_ty: Ty<'tcx>, - to_ty: Ty<'tcx>) { + fn walk_cast(&mut self, + cast_expr: &hir::Expr, + from_ty: Ty<'tcx>, + to_ty: Ty<'tcx>) { debug!("walk_cast(from_ty={:?}, to_ty={:?})", from_ty, to_ty); @@ -843,1016 +820,1007 @@ fn constrain_cast(rcx: &mut Rcx, /*From:*/ (&ty::TyRef(from_r, ref from_mt), /*To: */ &ty::TyRef(to_r, ref to_mt)) => { // Target cannot outlive source, naturally. - rcx.fcx.mk_subr(infer::Reborrow(cast_expr.span), *to_r, *from_r); - walk_cast(rcx, cast_expr, from_mt.ty, to_mt.ty); + self.sub_regions(infer::Reborrow(cast_expr.span), *to_r, *from_r); + self.walk_cast(cast_expr, from_mt.ty, to_mt.ty); } /*From:*/ (_, /*To: */ &ty::TyTrait(box ty::TraitTy { ref bounds, .. })) => { // When T is existentially quantified as a trait // `Foo+'to`, it must outlive the region bound `'to`. - type_must_outlive(rcx, infer::RelateObjectBound(cast_expr.span), - from_ty, bounds.region_bound); + self.type_must_outlive(infer::RelateObjectBound(cast_expr.span), + from_ty, bounds.region_bound); } /*From:*/ (&ty::TyBox(from_referent_ty), /*To: */ &ty::TyBox(to_referent_ty)) => { - walk_cast(rcx, cast_expr, from_referent_ty, to_referent_ty); + self.walk_cast(cast_expr, from_referent_ty, to_referent_ty); } _ => { } } } -} -fn check_expr_fn_block(rcx: &mut Rcx, - expr: &hir::Expr, - body: &hir::Block) { - let repeating_scope = rcx.set_repeating_scope(body.id); - intravisit::walk_expr(rcx, expr); - rcx.set_repeating_scope(repeating_scope); -} + fn check_expr_fn_block(&mut self, + expr: &hir::Expr, + body: &hir::Block) { + let repeating_scope = self.set_repeating_scope(body.id); + intravisit::walk_expr(self, expr); + self.set_repeating_scope(repeating_scope); + } -fn constrain_callee(rcx: &mut Rcx, - callee_id: ast::NodeId, - _call_expr: &hir::Expr, - _callee_expr: &hir::Expr) { - let callee_ty = rcx.resolve_node_type(callee_id); - match callee_ty.sty { - ty::TyFnDef(..) | ty::TyFnPtr(_) => { } - _ => { - // this should not happen, but it does if the program is - // erroneous - // - // bug!( - // callee_expr.span, - // "Calling non-function: {}", - // callee_ty); + fn constrain_callee(&mut self, + callee_id: ast::NodeId, + _call_expr: &hir::Expr, + _callee_expr: &hir::Expr) { + let callee_ty = self.resolve_node_type(callee_id); + match callee_ty.sty { + ty::TyFnDef(..) | ty::TyFnPtr(_) => { } + _ => { + // this should not happen, but it does if the program is + // erroneous + // + // bug!( + // callee_expr.span, + // "Calling non-function: {}", + // callee_ty); + } } } -} -fn constrain_call<'a, I: Iterator>(rcx: &mut Rcx, - call_expr: &hir::Expr, - receiver: Option<&hir::Expr>, - arg_exprs: I, - implicitly_ref_args: bool) { - //! Invoked on every call site (i.e., normal calls, method calls, - //! and overloaded operators). Constrains the regions which appear - //! in the type of the function. Also constrains the regions that - //! appear in the arguments appropriately. - - debug!("constrain_call(call_expr={:?}, \ - receiver={:?}, \ - implicitly_ref_args={})", - call_expr, - receiver, - implicitly_ref_args); - - // `callee_region` is the scope representing the time in which the - // call occurs. - // - // FIXME(#6268) to support nested method calls, should be callee_id - let callee_scope = rcx.tcx().region_maps.node_extent(call_expr.id); - let callee_region = ty::ReScope(callee_scope); - - debug!("callee_region={:?}", callee_region); - - for arg_expr in arg_exprs { - debug!("Argument: {:?}", arg_expr); - - // ensure that any regions appearing in the argument type are - // valid for at least the lifetime of the function: - type_of_node_must_outlive( - rcx, infer::CallArg(arg_expr.span), - arg_expr.id, callee_region); - - // unfortunately, there are two means of taking implicit - // references, and we need to propagate constraints as a - // result. modes are going away and the "DerefArgs" code - // should be ported to use adjustments - if implicitly_ref_args { - link_by_ref(rcx, arg_expr, callee_scope); + fn constrain_call<'b, I: Iterator>(&mut self, + call_expr: &hir::Expr, + receiver: Option<&hir::Expr>, + arg_exprs: I, + implicitly_ref_args: bool) { + //! Invoked on every call site (i.e., normal calls, method calls, + //! and overloaded operators). Constrains the regions which appear + //! in the type of the function. Also constrains the regions that + //! appear in the arguments appropriately. + + debug!("constrain_call(call_expr={:?}, \ + receiver={:?}, \ + implicitly_ref_args={})", + call_expr, + receiver, + implicitly_ref_args); + + // `callee_region` is the scope representing the time in which the + // call occurs. + // + // FIXME(#6268) to support nested method calls, should be callee_id + let callee_scope = self.tcx.region_maps.node_extent(call_expr.id); + let callee_region = ty::ReScope(callee_scope); + + debug!("callee_region={:?}", callee_region); + + for arg_expr in arg_exprs { + debug!("Argument: {:?}", arg_expr); + + // ensure that any regions appearing in the argument type are + // valid for at least the lifetime of the function: + self.type_of_node_must_outlive(infer::CallArg(arg_expr.span), + arg_expr.id, callee_region); + + // unfortunately, there are two means of taking implicit + // references, and we need to propagate constraints as a + // result. modes are going away and the "DerefArgs" code + // should be ported to use adjustments + if implicitly_ref_args { + self.link_by_ref(arg_expr, callee_scope); + } } - } - // as loop above, but for receiver - if let Some(r) = receiver { - debug!("receiver: {:?}", r); - type_of_node_must_outlive( - rcx, infer::CallRcvr(r.span), - r.id, callee_region); - if implicitly_ref_args { - link_by_ref(rcx, &r, callee_scope); + // as loop above, but for receiver + if let Some(r) = receiver { + debug!("receiver: {:?}", r); + self.type_of_node_must_outlive(infer::CallRcvr(r.span), + r.id, callee_region); + if implicitly_ref_args { + self.link_by_ref(&r, callee_scope); + } } } -} -/// Invoked on any auto-dereference that occurs. Checks that if this is a region pointer being -/// dereferenced, the lifetime of the pointer includes the deref expr. -fn constrain_autoderefs<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, - deref_expr: &hir::Expr, - derefs: usize, - mut derefd_ty: Ty<'tcx>) -{ - debug!("constrain_autoderefs(deref_expr={:?}, derefs={}, derefd_ty={:?})", - deref_expr, - derefs, - derefd_ty); - - let s_deref_expr = rcx.tcx().region_maps.node_extent(deref_expr.id); - let r_deref_expr = ty::ReScope(s_deref_expr); - for i in 0..derefs { - let method_call = MethodCall::autoderef(deref_expr.id, i as u32); - debug!("constrain_autoderefs: method_call={:?} (of {:?} total)", method_call, derefs); - - let method = rcx.fcx.inh.tables.borrow().method_map.get(&method_call).map(|m| m.clone()); - - derefd_ty = match method { - Some(method) => { - debug!("constrain_autoderefs: #{} is overloaded, method={:?}", - i, method); - - let origin = infer::ParameterOrigin::OverloadedDeref; - substs_wf_in_scope(rcx, origin, method.substs, deref_expr.span, r_deref_expr); - - // Treat overloaded autoderefs as if an AutoRef adjustment - // was applied on the base type, as that is always the case. - let fn_sig = method.ty.fn_sig(); - let fn_sig = // late-bound regions should have been instantiated - rcx.tcx().no_late_bound_regions(fn_sig).unwrap(); - let self_ty = fn_sig.inputs[0]; - let (m, r) = match self_ty.sty { - ty::TyRef(r, ref m) => (m.mutbl, r), - _ => { - span_bug!( - deref_expr.span, - "bad overloaded deref type {:?}", - method.ty) - } - }; + /// Invoked on any auto-dereference that occurs. Checks that if this is a region pointer being + /// dereferenced, the lifetime of the pointer includes the deref expr. + fn constrain_autoderefs(&mut self, + deref_expr: &hir::Expr, + derefs: usize, + mut derefd_ty: Ty<'tcx>) + { + debug!("constrain_autoderefs(deref_expr={:?}, derefs={}, derefd_ty={:?})", + deref_expr, + derefs, + derefd_ty); - debug!("constrain_autoderefs: receiver r={:?} m={:?}", - r, m); + let s_deref_expr = self.tcx.region_maps.node_extent(deref_expr.id); + let r_deref_expr = ty::ReScope(s_deref_expr); + for i in 0..derefs { + let method_call = MethodCall::autoderef(deref_expr.id, i as u32); + debug!("constrain_autoderefs: method_call={:?} (of {:?} total)", method_call, derefs); - { - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - let self_cmt = ignore_err!(mc.cat_expr_autoderefd(deref_expr, i)); - debug!("constrain_autoderefs: self_cmt={:?}", - self_cmt); - link_region(rcx, deref_expr.span, r, - ty::BorrowKind::from_mutbl(m), self_cmt); - } + let method = self.tables.borrow().method_map.get(&method_call).map(|m| m.clone()); + + derefd_ty = match method { + Some(method) => { + debug!("constrain_autoderefs: #{} is overloaded, method={:?}", + i, method); + + let origin = infer::ParameterOrigin::OverloadedDeref; + self.substs_wf_in_scope(origin, method.substs, deref_expr.span, r_deref_expr); + + // Treat overloaded autoderefs as if an AutoRef adjustment + // was applied on the base type, as that is always the case. + let fn_sig = method.ty.fn_sig(); + let fn_sig = // late-bound regions should have been instantiated + self.tcx.no_late_bound_regions(fn_sig).unwrap(); + let self_ty = fn_sig.inputs[0]; + let (m, r) = match self_ty.sty { + ty::TyRef(r, ref m) => (m.mutbl, r), + _ => { + span_bug!( + deref_expr.span, + "bad overloaded deref type {:?}", + method.ty) + } + }; + + debug!("constrain_autoderefs: receiver r={:?} m={:?}", + r, m); + + { + let mc = mc::MemCategorizationContext::new(self); + let self_cmt = ignore_err!(mc.cat_expr_autoderefd(deref_expr, i)); + debug!("constrain_autoderefs: self_cmt={:?}", + self_cmt); + self.link_region(deref_expr.span, r, + ty::BorrowKind::from_mutbl(m), self_cmt); + } - // Specialized version of constrain_call. - type_must_outlive(rcx, infer::CallRcvr(deref_expr.span), - self_ty, r_deref_expr); - match fn_sig.output { - ty::FnConverging(return_type) => { - type_must_outlive(rcx, infer::CallReturn(deref_expr.span), - return_type, r_deref_expr); - return_type + // Specialized version of constrain_call. + self.type_must_outlive(infer::CallRcvr(deref_expr.span), + self_ty, r_deref_expr); + match fn_sig.output { + ty::FnConverging(return_type) => { + self.type_must_outlive(infer::CallReturn(deref_expr.span), + return_type, r_deref_expr); + return_type + } + ty::FnDiverging => bug!() } - ty::FnDiverging => bug!() } - } - None => derefd_ty - }; + None => derefd_ty + }; - if let ty::TyRef(r_ptr, _) = derefd_ty.sty { - mk_subregion_due_to_dereference(rcx, deref_expr.span, - r_deref_expr, *r_ptr); - } + if let ty::TyRef(r_ptr, _) = derefd_ty.sty { + self.mk_subregion_due_to_dereference(deref_expr.span, + r_deref_expr, *r_ptr); + } - match derefd_ty.builtin_deref(true, ty::NoPreference) { - Some(mt) => derefd_ty = mt.ty, - /* if this type can't be dereferenced, then there's already an error - in the session saying so. Just bail out for now */ - None => break + match derefd_ty.builtin_deref(true, ty::NoPreference) { + Some(mt) => derefd_ty = mt.ty, + /* if this type can't be dereferenced, then there's already an error + in the session saying so. Just bail out for now */ + None => break + } } } -} -pub fn mk_subregion_due_to_dereference(rcx: &mut Rcx, - deref_span: Span, - minimum_lifetime: ty::Region, - maximum_lifetime: ty::Region) { - rcx.fcx.mk_subr(infer::DerefPointer(deref_span), - minimum_lifetime, maximum_lifetime) -} + pub fn mk_subregion_due_to_dereference(&mut self, + deref_span: Span, + minimum_lifetime: ty::Region, + maximum_lifetime: ty::Region) { + self.sub_regions(infer::DerefPointer(deref_span), + minimum_lifetime, maximum_lifetime) + } -fn check_safety_of_rvalue_destructor_if_necessary<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, - cmt: mc::cmt<'tcx>, - span: Span) { - match cmt.cat { - Categorization::Rvalue(region) => { - match region { - ty::ReScope(rvalue_scope) => { - let typ = rcx.resolve_type(cmt.ty); - dropck::check_safety_of_destructor_if_necessary(rcx, - typ, - span, - rvalue_scope); - } - ty::ReStatic => {} - region => { - span_bug!(span, - "unexpected rvalue region in rvalue \ - destructor safety checking: `{:?}`", - region); + fn check_safety_of_rvalue_destructor_if_necessary(&mut self, + cmt: mc::cmt<'tcx>, + span: Span) { + match cmt.cat { + Categorization::Rvalue(region) => { + match region { + ty::ReScope(rvalue_scope) => { + let typ = self.resolve_type(cmt.ty); + dropck::check_safety_of_destructor_if_necessary(self, + typ, + span, + rvalue_scope); + } + ty::ReStatic => {} + region => { + span_bug!(span, + "unexpected rvalue region in rvalue \ + destructor safety checking: `{:?}`", + region); + } } } + _ => {} } - _ => {} } -} -/// Invoked on any index expression that occurs. Checks that if this is a slice being indexed, the -/// lifetime of the pointer includes the deref expr. -fn constrain_index<'a, 'tcx>(rcx: &mut Rcx<'a, 'tcx>, - index_expr: &hir::Expr, - indexed_ty: Ty<'tcx>) -{ - debug!("constrain_index(index_expr=?, indexed_ty={}", - rcx.fcx.infcx().ty_to_string(indexed_ty)); - - let r_index_expr = ty::ReScope(rcx.tcx().region_maps.node_extent(index_expr.id)); - if let ty::TyRef(r_ptr, mt) = indexed_ty.sty { - match mt.ty.sty { - ty::TySlice(_) | ty::TyStr => { - rcx.fcx.mk_subr(infer::IndexSlice(index_expr.span), - r_index_expr, *r_ptr); + /// Invoked on any index expression that occurs. Checks that if this is a slice + /// being indexed, the lifetime of the pointer includes the deref expr. + fn constrain_index(&mut self, + index_expr: &hir::Expr, + indexed_ty: Ty<'tcx>) + { + debug!("constrain_index(index_expr=?, indexed_ty={}", + self.ty_to_string(indexed_ty)); + + let r_index_expr = ty::ReScope(self.tcx.region_maps.node_extent(index_expr.id)); + if let ty::TyRef(r_ptr, mt) = indexed_ty.sty { + match mt.ty.sty { + ty::TySlice(_) | ty::TyStr => { + self.sub_regions(infer::IndexSlice(index_expr.span), + r_index_expr, *r_ptr); + } + _ => {} } - _ => {} } } -} -/// Guarantees that any lifetimes which appear in the type of the node `id` (after applying -/// adjustments) are valid for at least `minimum_lifetime` -fn type_of_node_must_outlive<'a, 'tcx>( - rcx: &mut Rcx<'a, 'tcx>, - origin: infer::SubregionOrigin<'tcx>, - id: ast::NodeId, - minimum_lifetime: ty::Region) -{ - let tcx = rcx.fcx.tcx(); - - // Try to resolve the type. If we encounter an error, then typeck - // is going to fail anyway, so just stop here and let typeck - // report errors later on in the writeback phase. - let ty0 = rcx.resolve_node_type(id); - let ty = ty0.adjust(tcx, origin.span(), id, - rcx.fcx.inh.tables.borrow().adjustments.get(&id), - |method_call| rcx.resolve_method_type(method_call)); - debug!("constrain_regions_in_type_of_node(\ - ty={}, ty0={}, id={}, minimum_lifetime={:?})", - ty, ty0, - id, minimum_lifetime); - type_must_outlive(rcx, origin, ty, minimum_lifetime); -} + /// Guarantees that any lifetimes which appear in the type of the node `id` (after applying + /// adjustments) are valid for at least `minimum_lifetime` + fn type_of_node_must_outlive(&mut self, + origin: infer::SubregionOrigin<'tcx>, + id: ast::NodeId, + minimum_lifetime: ty::Region) + { + let tcx = self.tcx; + + // Try to resolve the type. If we encounter an error, then typeck + // is going to fail anyway, so just stop here and let typeck + // report errors later on in the writeback phase. + let ty0 = self.resolve_node_type(id); + let ty = ty0.adjust(tcx, origin.span(), id, + self.tables.borrow().adjustments.get(&id), + |method_call| self.resolve_method_type(method_call)); + debug!("constrain_regions_in_type_of_node(\ + ty={}, ty0={}, id={}, minimum_lifetime={:?})", + ty, ty0, + id, minimum_lifetime); + self.type_must_outlive(origin, ty, minimum_lifetime); + } -/// Computes the guarantor for an expression `&base` and then ensures that the lifetime of the -/// resulting pointer is linked to the lifetime of its guarantor (if any). -fn link_addr_of(rcx: &mut Rcx, expr: &hir::Expr, - mutability: hir::Mutability, base: &hir::Expr) { - debug!("link_addr_of(expr={:?}, base={:?})", expr, base); + /// Computes the guarantor for an expression `&base` and then ensures that the lifetime of the + /// resulting pointer is linked to the lifetime of its guarantor (if any). + fn link_addr_of(&mut self, expr: &hir::Expr, + mutability: hir::Mutability, base: &hir::Expr) { + debug!("link_addr_of(expr={:?}, base={:?})", expr, base); - let cmt = { - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - ignore_err!(mc.cat_expr(base)) - }; + let cmt = { + let mc = mc::MemCategorizationContext::new(self); + ignore_err!(mc.cat_expr(base)) + }; - debug!("link_addr_of: cmt={:?}", cmt); + debug!("link_addr_of: cmt={:?}", cmt); - link_region_from_node_type(rcx, expr.span, expr.id, mutability, cmt); -} + self.link_region_from_node_type(expr.span, expr.id, mutability, cmt); + } -/// Computes the guarantors for any ref bindings in a `let` and -/// then ensures that the lifetime of the resulting pointer is -/// linked to the lifetime of the initialization expression. -fn link_local(rcx: &Rcx, local: &hir::Local) { - debug!("regionck::for_local()"); - let init_expr = match local.init { - None => { return; } - Some(ref expr) => &**expr, - }; - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - let discr_cmt = ignore_err!(mc.cat_expr(init_expr)); - link_pattern(rcx, mc, discr_cmt, &local.pat); -} + /// Computes the guarantors for any ref bindings in a `let` and + /// then ensures that the lifetime of the resulting pointer is + /// linked to the lifetime of the initialization expression. + fn link_local(&self, local: &hir::Local) { + debug!("regionck::for_local()"); + let init_expr = match local.init { + None => { return; } + Some(ref expr) => &**expr, + }; + let mc = mc::MemCategorizationContext::new(self); + let discr_cmt = ignore_err!(mc.cat_expr(init_expr)); + self.link_pattern(mc, discr_cmt, &local.pat); + } -/// Computes the guarantors for any ref bindings in a match and -/// then ensures that the lifetime of the resulting pointer is -/// linked to the lifetime of its guarantor (if any). -fn link_match(rcx: &Rcx, discr: &hir::Expr, arms: &[hir::Arm]) { - debug!("regionck::for_match()"); - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - let discr_cmt = ignore_err!(mc.cat_expr(discr)); - debug!("discr_cmt={:?}", discr_cmt); - for arm in arms { - for root_pat in &arm.pats { - link_pattern(rcx, mc, discr_cmt.clone(), &root_pat); + /// Computes the guarantors for any ref bindings in a match and + /// then ensures that the lifetime of the resulting pointer is + /// linked to the lifetime of its guarantor (if any). + fn link_match(&self, discr: &hir::Expr, arms: &[hir::Arm]) { + debug!("regionck::for_match()"); + let mc = mc::MemCategorizationContext::new(self); + let discr_cmt = ignore_err!(mc.cat_expr(discr)); + debug!("discr_cmt={:?}", discr_cmt); + for arm in arms { + for root_pat in &arm.pats { + self.link_pattern(mc, discr_cmt.clone(), &root_pat); + } } } -} -/// Computes the guarantors for any ref bindings in a match and -/// then ensures that the lifetime of the resulting pointer is -/// linked to the lifetime of its guarantor (if any). -fn link_fn_args(rcx: &Rcx, body_scope: CodeExtent, args: &[hir::Arg]) { - debug!("regionck::link_fn_args(body_scope={:?})", body_scope); - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - for arg in args { - let arg_ty = rcx.fcx.node_ty(arg.id); - let re_scope = ty::ReScope(body_scope); - let arg_cmt = mc.cat_rvalue(arg.id, arg.ty.span, re_scope, arg_ty); - debug!("arg_ty={:?} arg_cmt={:?} arg={:?}", - arg_ty, - arg_cmt, - arg); - link_pattern(rcx, mc, arg_cmt, &arg.pat); + /// Computes the guarantors for any ref bindings in a match and + /// then ensures that the lifetime of the resulting pointer is + /// linked to the lifetime of its guarantor (if any). + fn link_fn_args(&self, body_scope: CodeExtent, args: &[hir::Arg]) { + debug!("regionck::link_fn_args(body_scope={:?})", body_scope); + let mc = mc::MemCategorizationContext::new(self); + for arg in args { + let arg_ty = self.node_ty(arg.id); + let re_scope = ty::ReScope(body_scope); + let arg_cmt = mc.cat_rvalue(arg.id, arg.ty.span, re_scope, arg_ty); + debug!("arg_ty={:?} arg_cmt={:?} arg={:?}", + arg_ty, + arg_cmt, + arg); + self.link_pattern(mc, arg_cmt, &arg.pat); + } } -} -/// Link lifetimes of any ref bindings in `root_pat` to the pointers found in the discriminant, if -/// needed. -fn link_pattern<'t, 'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - mc: mc::MemCategorizationContext<'t, 'a, 'tcx>, - discr_cmt: mc::cmt<'tcx>, - root_pat: &hir::Pat) { - debug!("link_pattern(discr_cmt={:?}, root_pat={:?})", - discr_cmt, - root_pat); - let _ = mc.cat_pattern(discr_cmt, root_pat, |mc, sub_cmt, sub_pat| { - match sub_pat.node { - // `ref x` pattern - PatKind::Ident(hir::BindByRef(mutbl), _, _) => { - link_region_from_node_type( - rcx, sub_pat.span, sub_pat.id, - mutbl, sub_cmt); - } + /// Link lifetimes of any ref bindings in `root_pat` to the pointers found + /// in the discriminant, if needed. + fn link_pattern<'t>(&self, + mc: mc::MemCategorizationContext<'a, 'gcx, 'tcx>, + discr_cmt: mc::cmt<'tcx>, + root_pat: &hir::Pat) { + debug!("link_pattern(discr_cmt={:?}, root_pat={:?})", + discr_cmt, + root_pat); + let _ = mc.cat_pattern(discr_cmt, root_pat, |mc, sub_cmt, sub_pat| { + match sub_pat.node { + // `ref x` pattern + PatKind::Ident(hir::BindByRef(mutbl), _, _) => { + self.link_region_from_node_type(sub_pat.span, sub_pat.id, + mutbl, sub_cmt); + } - // `[_, ..slice, _]` pattern - PatKind::Vec(_, Some(ref slice_pat), _) => { - match mc.cat_slice_pattern(sub_cmt, &slice_pat) { - Ok((slice_cmt, slice_mutbl, slice_r)) => { - link_region(rcx, sub_pat.span, &slice_r, - ty::BorrowKind::from_mutbl(slice_mutbl), - slice_cmt); + // `[_, ..slice, _]` pattern + PatKind::Vec(_, Some(ref slice_pat), _) => { + match mc.cat_slice_pattern(sub_cmt, &slice_pat) { + Ok((slice_cmt, slice_mutbl, slice_r)) => { + self.link_region(sub_pat.span, &slice_r, + ty::BorrowKind::from_mutbl(slice_mutbl), + slice_cmt); + } + Err(()) => {} } - Err(()) => {} } + _ => {} } - _ => {} - } - }); -} + }); + } -/// Link lifetime of borrowed pointer resulting from autoref to lifetimes in the value being -/// autoref'd. -fn link_autoref(rcx: &Rcx, - expr: &hir::Expr, - autoderefs: usize, - autoref: &adjustment::AutoRef) -{ - debug!("link_autoref(autoref={:?})", autoref); - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - let expr_cmt = ignore_err!(mc.cat_expr_autoderefd(expr, autoderefs)); - debug!("expr_cmt={:?}", expr_cmt); - - match *autoref { - adjustment::AutoPtr(r, m) => { - link_region(rcx, expr.span, r, - ty::BorrowKind::from_mutbl(m), expr_cmt); - } + /// Link lifetime of borrowed pointer resulting from autoref to lifetimes in the value being + /// autoref'd. + fn link_autoref(&self, + expr: &hir::Expr, + autoderefs: usize, + autoref: &adjustment::AutoRef) + { + debug!("link_autoref(autoref={:?})", autoref); + let mc = mc::MemCategorizationContext::new(self); + let expr_cmt = ignore_err!(mc.cat_expr_autoderefd(expr, autoderefs)); + debug!("expr_cmt={:?}", expr_cmt); + + match *autoref { + adjustment::AutoPtr(r, m) => { + self.link_region(expr.span, r, + ty::BorrowKind::from_mutbl(m), expr_cmt); + } - adjustment::AutoUnsafe(m) => { - let r = ty::ReScope(rcx.tcx().region_maps.node_extent(expr.id)); - link_region(rcx, expr.span, &r, ty::BorrowKind::from_mutbl(m), expr_cmt); + adjustment::AutoUnsafe(m) => { + let r = ty::ReScope(self.tcx.region_maps.node_extent(expr.id)); + self.link_region(expr.span, &r, ty::BorrowKind::from_mutbl(m), expr_cmt); + } } } -} -/// Computes the guarantor for cases where the `expr` is being passed by implicit reference and -/// must outlive `callee_scope`. -fn link_by_ref(rcx: &Rcx, - expr: &hir::Expr, - callee_scope: CodeExtent) { - debug!("link_by_ref(expr={:?}, callee_scope={:?})", - expr, callee_scope); - let mc = mc::MemCategorizationContext::new(rcx.fcx.infcx()); - let expr_cmt = ignore_err!(mc.cat_expr(expr)); - let borrow_region = ty::ReScope(callee_scope); - link_region(rcx, expr.span, &borrow_region, ty::ImmBorrow, expr_cmt); -} + /// Computes the guarantor for cases where the `expr` is being passed by implicit reference and + /// must outlive `callee_scope`. + fn link_by_ref(&self, + expr: &hir::Expr, + callee_scope: CodeExtent) { + debug!("link_by_ref(expr={:?}, callee_scope={:?})", + expr, callee_scope); + let mc = mc::MemCategorizationContext::new(self); + let expr_cmt = ignore_err!(mc.cat_expr(expr)); + let borrow_region = ty::ReScope(callee_scope); + self.link_region(expr.span, &borrow_region, ty::ImmBorrow, expr_cmt); + } -/// Like `link_region()`, except that the region is extracted from the type of `id`, which must be -/// some reference (`&T`, `&str`, etc). -fn link_region_from_node_type<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - span: Span, - id: ast::NodeId, - mutbl: hir::Mutability, - cmt_borrowed: mc::cmt<'tcx>) { - debug!("link_region_from_node_type(id={:?}, mutbl={:?}, cmt_borrowed={:?})", - id, mutbl, cmt_borrowed); - - let rptr_ty = rcx.resolve_node_type(id); - if let ty::TyRef(&r, _) = rptr_ty.sty { - debug!("rptr_ty={}", rptr_ty); - link_region(rcx, span, &r, ty::BorrowKind::from_mutbl(mutbl), - cmt_borrowed); + /// Like `link_region()`, except that the region is extracted from the type of `id`, + /// which must be some reference (`&T`, `&str`, etc). + fn link_region_from_node_type(&self, + span: Span, + id: ast::NodeId, + mutbl: hir::Mutability, + cmt_borrowed: mc::cmt<'tcx>) { + debug!("link_region_from_node_type(id={:?}, mutbl={:?}, cmt_borrowed={:?})", + id, mutbl, cmt_borrowed); + + let rptr_ty = self.resolve_node_type(id); + if let ty::TyRef(&r, _) = rptr_ty.sty { + debug!("rptr_ty={}", rptr_ty); + self.link_region(span, &r, ty::BorrowKind::from_mutbl(mutbl), + cmt_borrowed); + } } -} -/// Informs the inference engine that `borrow_cmt` is being borrowed with kind `borrow_kind` and -/// lifetime `borrow_region`. In order to ensure borrowck is satisfied, this may create constraints -/// between regions, as explained in `link_reborrowed_region()`. -fn link_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - span: Span, - borrow_region: &ty::Region, - borrow_kind: ty::BorrowKind, - borrow_cmt: mc::cmt<'tcx>) { - let mut borrow_cmt = borrow_cmt; - let mut borrow_kind = borrow_kind; + /// Informs the inference engine that `borrow_cmt` is being borrowed with + /// kind `borrow_kind` and lifetime `borrow_region`. + /// In order to ensure borrowck is satisfied, this may create constraints + /// between regions, as explained in `link_reborrowed_region()`. + fn link_region(&self, + span: Span, + borrow_region: &ty::Region, + borrow_kind: ty::BorrowKind, + borrow_cmt: mc::cmt<'tcx>) { + let mut borrow_cmt = borrow_cmt; + let mut borrow_kind = borrow_kind; + + let origin = infer::DataBorrowed(borrow_cmt.ty, span); + self.type_must_outlive(origin, borrow_cmt.ty, *borrow_region); + + loop { + debug!("link_region(borrow_region={:?}, borrow_kind={:?}, borrow_cmt={:?})", + borrow_region, + borrow_kind, + borrow_cmt); + match borrow_cmt.cat.clone() { + Categorization::Deref(ref_cmt, _, + mc::Implicit(ref_kind, ref_region)) | + Categorization::Deref(ref_cmt, _, + mc::BorrowedPtr(ref_kind, ref_region)) => { + match self.link_reborrowed_region(span, + borrow_region, borrow_kind, + ref_cmt, ref_region, ref_kind, + borrow_cmt.note) { + Some((c, k)) => { + borrow_cmt = c; + borrow_kind = k; + } + None => { + return; + } + } + } - let origin = infer::DataBorrowed(borrow_cmt.ty, span); - type_must_outlive(rcx, origin, borrow_cmt.ty, *borrow_region); + Categorization::Downcast(cmt_base, _) | + Categorization::Deref(cmt_base, _, mc::Unique) | + Categorization::Interior(cmt_base, _) => { + // Borrowing interior or owned data requires the base + // to be valid and borrowable in the same fashion. + borrow_cmt = cmt_base; + borrow_kind = borrow_kind; + } - loop { - debug!("link_region(borrow_region={:?}, borrow_kind={:?}, borrow_cmt={:?})", - borrow_region, - borrow_kind, - borrow_cmt); - match borrow_cmt.cat.clone() { - Categorization::Deref(ref_cmt, _, - mc::Implicit(ref_kind, ref_region)) | - Categorization::Deref(ref_cmt, _, - mc::BorrowedPtr(ref_kind, ref_region)) => { - match link_reborrowed_region(rcx, span, - borrow_region, borrow_kind, - ref_cmt, ref_region, ref_kind, - borrow_cmt.note) { - Some((c, k)) => { - borrow_cmt = c; - borrow_kind = k; + Categorization::Deref(_, _, mc::UnsafePtr(..)) | + Categorization::StaticItem | + Categorization::Upvar(..) | + Categorization::Local(..) | + Categorization::Rvalue(..) => { + // These are all "base cases" with independent lifetimes + // that are not subject to inference + return; + } + } + } + } + + /// This is the most complicated case: the path being borrowed is + /// itself the referent of a borrowed pointer. Let me give an + /// example fragment of code to make clear(er) the situation: + /// + /// let r: &'a mut T = ...; // the original reference "r" has lifetime 'a + /// ... + /// &'z *r // the reborrow has lifetime 'z + /// + /// Now, in this case, our primary job is to add the inference + /// constraint that `'z <= 'a`. Given this setup, let's clarify the + /// parameters in (roughly) terms of the example: + /// + /// A borrow of: `& 'z bk * r` where `r` has type `& 'a bk T` + /// borrow_region ^~ ref_region ^~ + /// borrow_kind ^~ ref_kind ^~ + /// ref_cmt ^ + /// + /// Here `bk` stands for some borrow-kind (e.g., `mut`, `uniq`, etc). + /// + /// Unfortunately, there are some complications beyond the simple + /// scenario I just painted: + /// + /// 1. The reference `r` might in fact be a "by-ref" upvar. In that + /// case, we have two jobs. First, we are inferring whether this reference + /// should be an `&T`, `&mut T`, or `&uniq T` reference, and we must + /// adjust that based on this borrow (e.g., if this is an `&mut` borrow, + /// then `r` must be an `&mut` reference). Second, whenever we link + /// two regions (here, `'z <= 'a`), we supply a *cause*, and in this + /// case we adjust the cause to indicate that the reference being + /// "reborrowed" is itself an upvar. This provides a nicer error message + /// should something go wrong. + /// + /// 2. There may in fact be more levels of reborrowing. In the + /// example, I said the borrow was like `&'z *r`, but it might + /// in fact be a borrow like `&'z **q` where `q` has type `&'a + /// &'b mut T`. In that case, we want to ensure that `'z <= 'a` + /// and `'z <= 'b`. This is explained more below. + /// + /// The return value of this function indicates whether we need to + /// recurse and process `ref_cmt` (see case 2 above). + fn link_reborrowed_region(&self, + span: Span, + borrow_region: &ty::Region, + borrow_kind: ty::BorrowKind, + ref_cmt: mc::cmt<'tcx>, + ref_region: ty::Region, + mut ref_kind: ty::BorrowKind, + note: mc::Note) + -> Option<(mc::cmt<'tcx>, ty::BorrowKind)> + { + // Possible upvar ID we may need later to create an entry in the + // maybe link map. + + // Detect by-ref upvar `x`: + let cause = match note { + mc::NoteUpvarRef(ref upvar_id) => { + let upvar_capture_map = &self.tables.borrow_mut().upvar_capture_map; + match upvar_capture_map.get(upvar_id) { + Some(&ty::UpvarCapture::ByRef(ref upvar_borrow)) => { + // The mutability of the upvar may have been modified + // by the above adjustment, so update our local variable. + ref_kind = upvar_borrow.kind; + + infer::ReborrowUpvar(span, *upvar_id) } - None => { - return; + _ => { + span_bug!( span, "Illegal upvar id: {:?}", upvar_id); } } } + mc::NoteClosureEnv(ref upvar_id) => { + // We don't have any mutability changes to propagate, but + // we do want to note that an upvar reborrow caused this + // link + infer::ReborrowUpvar(span, *upvar_id) + } + _ => { + infer::Reborrow(span) + } + }; - Categorization::Downcast(cmt_base, _) | - Categorization::Deref(cmt_base, _, mc::Unique) | - Categorization::Interior(cmt_base, _) => { - // Borrowing interior or owned data requires the base - // to be valid and borrowable in the same fashion. - borrow_cmt = cmt_base; - borrow_kind = borrow_kind; + debug!("link_reborrowed_region: {:?} <= {:?}", + borrow_region, + ref_region); + self.sub_regions(cause, *borrow_region, ref_region); + + // If we end up needing to recurse and establish a region link + // with `ref_cmt`, calculate what borrow kind we will end up + // needing. This will be used below. + // + // One interesting twist is that we can weaken the borrow kind + // when we recurse: to reborrow an `&mut` referent as mutable, + // borrowck requires a unique path to the `&mut` reference but not + // necessarily a *mutable* path. + let new_borrow_kind = match borrow_kind { + ty::ImmBorrow => + ty::ImmBorrow, + ty::MutBorrow | ty::UniqueImmBorrow => + ty::UniqueImmBorrow + }; + + // Decide whether we need to recurse and link any regions within + // the `ref_cmt`. This is concerned for the case where the value + // being reborrowed is in fact a borrowed pointer found within + // another borrowed pointer. For example: + // + // let p: &'b &'a mut T = ...; + // ... + // &'z **p + // + // What makes this case particularly tricky is that, if the data + // being borrowed is a `&mut` or `&uniq` borrow, borrowck requires + // not only that `'z <= 'a`, (as before) but also `'z <= 'b` + // (otherwise the user might mutate through the `&mut T` reference + // after `'b` expires and invalidate the borrow we are looking at + // now). + // + // So let's re-examine our parameters in light of this more + // complicated (possible) scenario: + // + // A borrow of: `& 'z bk * * p` where `p` has type `&'b bk & 'a bk T` + // borrow_region ^~ ref_region ^~ + // borrow_kind ^~ ref_kind ^~ + // ref_cmt ^~~ + // + // (Note that since we have not examined `ref_cmt.cat`, we don't + // know whether this scenario has occurred; but I wanted to show + // how all the types get adjusted.) + match ref_kind { + ty::ImmBorrow => { + // The reference being reborrowed is a sharable ref of + // type `&'a T`. In this case, it doesn't matter where we + // *found* the `&T` pointer, the memory it references will + // be valid and immutable for `'a`. So we can stop here. + // + // (Note that the `borrow_kind` must also be ImmBorrow or + // else the user is borrowed imm memory as mut memory, + // which means they'll get an error downstream in borrowck + // anyhow.) + return None; } - Categorization::Deref(_, _, mc::UnsafePtr(..)) | - Categorization::StaticItem | - Categorization::Upvar(..) | - Categorization::Local(..) | - Categorization::Rvalue(..) => { - // These are all "base cases" with independent lifetimes - // that are not subject to inference - return; + ty::MutBorrow | ty::UniqueImmBorrow => { + // The reference being reborrowed is either an `&mut T` or + // `&uniq T`. This is the case where recursion is needed. + return Some((ref_cmt, new_borrow_kind)); } } } -} -/// This is the most complicated case: the path being borrowed is -/// itself the referent of a borrowed pointer. Let me give an -/// example fragment of code to make clear(er) the situation: -/// -/// let r: &'a mut T = ...; // the original reference "r" has lifetime 'a -/// ... -/// &'z *r // the reborrow has lifetime 'z -/// -/// Now, in this case, our primary job is to add the inference -/// constraint that `'z <= 'a`. Given this setup, let's clarify the -/// parameters in (roughly) terms of the example: -/// -/// A borrow of: `& 'z bk * r` where `r` has type `& 'a bk T` -/// borrow_region ^~ ref_region ^~ -/// borrow_kind ^~ ref_kind ^~ -/// ref_cmt ^ -/// -/// Here `bk` stands for some borrow-kind (e.g., `mut`, `uniq`, etc). -/// -/// Unfortunately, there are some complications beyond the simple -/// scenario I just painted: -/// -/// 1. The reference `r` might in fact be a "by-ref" upvar. In that -/// case, we have two jobs. First, we are inferring whether this reference -/// should be an `&T`, `&mut T`, or `&uniq T` reference, and we must -/// adjust that based on this borrow (e.g., if this is an `&mut` borrow, -/// then `r` must be an `&mut` reference). Second, whenever we link -/// two regions (here, `'z <= 'a`), we supply a *cause*, and in this -/// case we adjust the cause to indicate that the reference being -/// "reborrowed" is itself an upvar. This provides a nicer error message -/// should something go wrong. -/// -/// 2. There may in fact be more levels of reborrowing. In the -/// example, I said the borrow was like `&'z *r`, but it might -/// in fact be a borrow like `&'z **q` where `q` has type `&'a -/// &'b mut T`. In that case, we want to ensure that `'z <= 'a` -/// and `'z <= 'b`. This is explained more below. -/// -/// The return value of this function indicates whether we need to -/// recurse and process `ref_cmt` (see case 2 above). -fn link_reborrowed_region<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - span: Span, - borrow_region: &ty::Region, - borrow_kind: ty::BorrowKind, - ref_cmt: mc::cmt<'tcx>, - ref_region: ty::Region, - mut ref_kind: ty::BorrowKind, - note: mc::Note) - -> Option<(mc::cmt<'tcx>, ty::BorrowKind)> -{ - // Possible upvar ID we may need later to create an entry in the - // maybe link map. - - // Detect by-ref upvar `x`: - let cause = match note { - mc::NoteUpvarRef(ref upvar_id) => { - let upvar_capture_map = &rcx.fcx.inh.tables.borrow_mut().upvar_capture_map; - match upvar_capture_map.get(upvar_id) { - Some(&ty::UpvarCapture::ByRef(ref upvar_borrow)) => { - // The mutability of the upvar may have been modified - // by the above adjustment, so update our local variable. - ref_kind = upvar_borrow.kind; - - infer::ReborrowUpvar(span, *upvar_id) - } - _ => { - span_bug!( span, "Illegal upvar id: {:?}", upvar_id); - } - } - } - mc::NoteClosureEnv(ref upvar_id) => { - // We don't have any mutability changes to propagate, but - // we do want to note that an upvar reborrow caused this - // link - infer::ReborrowUpvar(span, *upvar_id) - } - _ => { - infer::Reborrow(span) - } - }; - - debug!("link_reborrowed_region: {:?} <= {:?}", - borrow_region, - ref_region); - rcx.fcx.mk_subr(cause, *borrow_region, ref_region); - - // If we end up needing to recurse and establish a region link - // with `ref_cmt`, calculate what borrow kind we will end up - // needing. This will be used below. - // - // One interesting twist is that we can weaken the borrow kind - // when we recurse: to reborrow an `&mut` referent as mutable, - // borrowck requires a unique path to the `&mut` reference but not - // necessarily a *mutable* path. - let new_borrow_kind = match borrow_kind { - ty::ImmBorrow => - ty::ImmBorrow, - ty::MutBorrow | ty::UniqueImmBorrow => - ty::UniqueImmBorrow - }; - - // Decide whether we need to recurse and link any regions within - // the `ref_cmt`. This is concerned for the case where the value - // being reborrowed is in fact a borrowed pointer found within - // another borrowed pointer. For example: - // - // let p: &'b &'a mut T = ...; - // ... - // &'z **p - // - // What makes this case particularly tricky is that, if the data - // being borrowed is a `&mut` or `&uniq` borrow, borrowck requires - // not only that `'z <= 'a`, (as before) but also `'z <= 'b` - // (otherwise the user might mutate through the `&mut T` reference - // after `'b` expires and invalidate the borrow we are looking at - // now). - // - // So let's re-examine our parameters in light of this more - // complicated (possible) scenario: - // - // A borrow of: `& 'z bk * * p` where `p` has type `&'b bk & 'a bk T` - // borrow_region ^~ ref_region ^~ - // borrow_kind ^~ ref_kind ^~ - // ref_cmt ^~~ - // - // (Note that since we have not examined `ref_cmt.cat`, we don't - // know whether this scenario has occurred; but I wanted to show - // how all the types get adjusted.) - match ref_kind { - ty::ImmBorrow => { - // The reference being reborrowed is a sharable ref of - // type `&'a T`. In this case, it doesn't matter where we - // *found* the `&T` pointer, the memory it references will - // be valid and immutable for `'a`. So we can stop here. - // - // (Note that the `borrow_kind` must also be ImmBorrow or - // else the user is borrowed imm memory as mut memory, - // which means they'll get an error downstream in borrowck - // anyhow.) - return None; + /// Checks that the values provided for type/region arguments in a given + /// expression are well-formed and in-scope. + fn substs_wf_in_scope(&mut self, + origin: infer::ParameterOrigin, + substs: &Substs<'tcx>, + expr_span: Span, + expr_region: ty::Region) { + debug!("substs_wf_in_scope(substs={:?}, \ + expr_region={:?}, \ + origin={:?}, \ + expr_span={:?})", + substs, expr_region, origin, expr_span); + + let origin = infer::ParameterInScope(origin, expr_span); + + for ®ion in &substs.regions { + self.sub_regions(origin.clone(), expr_region, region); } - ty::MutBorrow | ty::UniqueImmBorrow => { - // The reference being reborrowed is either an `&mut T` or - // `&uniq T`. This is the case where recursion is needed. - return Some((ref_cmt, new_borrow_kind)); + for &ty in &substs.types { + let ty = self.resolve_type(ty); + self.type_must_outlive(origin.clone(), ty, expr_region); } } -} -/// Checks that the values provided for type/region arguments in a given -/// expression are well-formed and in-scope. -pub fn substs_wf_in_scope<'a,'tcx>(rcx: &mut Rcx<'a,'tcx>, - origin: infer::ParameterOrigin, - substs: &Substs<'tcx>, - expr_span: Span, - expr_region: ty::Region) { - debug!("substs_wf_in_scope(substs={:?}, \ - expr_region={:?}, \ - origin={:?}, \ - expr_span={:?})", - substs, expr_region, origin, expr_span); - - let origin = infer::ParameterInScope(origin, expr_span); - - for ®ion in &substs.regions { - rcx.fcx.mk_subr(origin.clone(), expr_region, region); - } + /// Ensures that type is well-formed in `region`, which implies (among + /// other things) that all borrowed data reachable via `ty` outlives + /// `region`. + pub fn type_must_outlive(&self, + origin: infer::SubregionOrigin<'tcx>, + ty: Ty<'tcx>, + region: ty::Region) + { + let ty = self.resolve_type(ty); - for &ty in &substs.types { - let ty = rcx.resolve_type(ty); - type_must_outlive(rcx, origin.clone(), ty, expr_region); - } -} + debug!("type_must_outlive(ty={:?}, region={:?}, origin={:?})", + ty, + region, + origin); -/// Ensures that type is well-formed in `region`, which implies (among -/// other things) that all borrowed data reachable via `ty` outlives -/// `region`. -pub fn type_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - origin: infer::SubregionOrigin<'tcx>, - ty: Ty<'tcx>, - region: ty::Region) -{ - let ty = rcx.resolve_type(ty); - - debug!("type_must_outlive(ty={:?}, region={:?}, origin={:?})", - ty, - region, - origin); - - assert!(!ty.has_escaping_regions()); - - let components = ty::outlives::components(rcx.infcx(), ty); - components_must_outlive(rcx, origin, components, region); -} + assert!(!ty.has_escaping_regions()); -fn components_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - origin: infer::SubregionOrigin<'tcx>, - components: Vec>, - region: ty::Region) -{ - for component in components { - let origin = origin.clone(); - match component { - ty::outlives::Component::Region(region1) => { - rcx.fcx.mk_subr(origin, region, region1); - } - ty::outlives::Component::Param(param_ty) => { - param_ty_must_outlive(rcx, origin, region, param_ty); - } - ty::outlives::Component::Projection(projection_ty) => { - projection_must_outlive(rcx, origin, region, projection_ty); - } - ty::outlives::Component::EscapingProjection(subcomponents) => { - components_must_outlive(rcx, origin, subcomponents, region); - } - ty::outlives::Component::UnresolvedInferenceVariable(v) => { - // ignore this, we presume it will yield an error - // later, since if a type variable is not resolved by - // this point it never will be - rcx.tcx().sess.delay_span_bug( - origin.span(), - &format!("unresolved inference variable in outlives: {:?}", v)); + let components = self.outlives_components(ty); + self.components_must_outlive(origin, components, region); + } + + fn components_must_outlive(&self, + origin: infer::SubregionOrigin<'tcx>, + components: Vec>, + region: ty::Region) + { + for component in components { + let origin = origin.clone(); + match component { + ty::outlives::Component::Region(region1) => { + self.sub_regions(origin, region, region1); + } + ty::outlives::Component::Param(param_ty) => { + self.param_ty_must_outlive(origin, region, param_ty); + } + ty::outlives::Component::Projection(projection_ty) => { + self.projection_must_outlive(origin, region, projection_ty); + } + ty::outlives::Component::EscapingProjection(subcomponents) => { + self.components_must_outlive(origin, subcomponents, region); + } + ty::outlives::Component::UnresolvedInferenceVariable(v) => { + // ignore this, we presume it will yield an error + // later, since if a type variable is not resolved by + // this point it never will be + self.tcx.sess.delay_span_bug( + origin.span(), + &format!("unresolved inference variable in outlives: {:?}", v)); + } } } } -} - -fn param_ty_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - origin: infer::SubregionOrigin<'tcx>, - region: ty::Region, - param_ty: ty::ParamTy) { - debug!("param_ty_must_outlive(region={:?}, param_ty={:?}, origin={:?})", - region, param_ty, origin); - let verify_bound = param_bound(rcx, param_ty); - let generic = GenericKind::Param(param_ty); - rcx.fcx.infcx().verify_generic_bound(origin, generic, region, verify_bound); -} + fn param_ty_must_outlive(&self, + origin: infer::SubregionOrigin<'tcx>, + region: ty::Region, + param_ty: ty::ParamTy) { + debug!("param_ty_must_outlive(region={:?}, param_ty={:?}, origin={:?})", + region, param_ty, origin); -fn projection_must_outlive<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - origin: infer::SubregionOrigin<'tcx>, - region: ty::Region, - projection_ty: ty::ProjectionTy<'tcx>) -{ - debug!("projection_must_outlive(region={:?}, projection_ty={:?}, origin={:?})", - region, projection_ty, origin); - - // This case is thorny for inference. The fundamental problem is - // that there are many cases where we have choice, and inference - // doesn't like choice (the current region inference in - // particular). :) First off, we have to choose between using the - // OutlivesProjectionEnv, OutlivesProjectionTraitDef, and - // OutlivesProjectionComponent rules, any one of which is - // sufficient. If there are no inference variables involved, it's - // not hard to pick the right rule, but if there are, we're in a - // bit of a catch 22: if we picked which rule we were going to - // use, we could add constraints to the region inference graph - // that make it apply, but if we don't add those constraints, the - // rule might not apply (but another rule might). For now, we err - // on the side of adding too few edges into the graph. - - // Compute the bounds we can derive from the environment or trait - // definition. We know that the projection outlives all the - // regions in this list. - let env_bounds = projection_declared_bounds(rcx, origin.span(), projection_ty); - - debug!("projection_must_outlive: env_bounds={:?}", - env_bounds); - - // If we know that the projection outlives 'static, then we're - // done here. - if env_bounds.contains(&ty::ReStatic) { - debug!("projection_must_outlive: 'static as declared bound"); - return; + let verify_bound = self.param_bound(param_ty); + let generic = GenericKind::Param(param_ty); + self.verify_generic_bound(origin, generic, region, verify_bound); } - // If declared bounds list is empty, the only applicable rule is - // OutlivesProjectionComponent. If there are inference variables, - // then, we can break down the outlives into more primitive - // components without adding unnecessary edges. - // - // If there are *no* inference variables, however, we COULD do - // this, but we choose not to, because the error messages are less - // good. For example, a requirement like `T::Item: 'r` would be - // translated to a requirement that `T: 'r`; when this is reported - // to the user, it will thus say "T: 'r must hold so that T::Item: - // 'r holds". But that makes it sound like the only way to fix - // the problem is to add `T: 'r`, which isn't true. So, if there are no - // inference variables, we use a verify constraint instead of adding - // edges, which winds up enforcing the same condition. - let needs_infer = { - projection_ty.trait_ref.substs.types.iter().any(|t| t.needs_infer()) || - projection_ty.trait_ref.substs.regions.iter().any(|r| r.needs_infer()) - }; - if env_bounds.is_empty() && needs_infer { - debug!("projection_must_outlive: no declared bounds"); - - for &component_ty in &projection_ty.trait_ref.substs.types { - type_must_outlive(rcx, origin.clone(), component_ty, region); + fn projection_must_outlive(&self, + origin: infer::SubregionOrigin<'tcx>, + region: ty::Region, + projection_ty: ty::ProjectionTy<'tcx>) + { + debug!("projection_must_outlive(region={:?}, projection_ty={:?}, origin={:?})", + region, projection_ty, origin); + + // This case is thorny for inference. The fundamental problem is + // that there are many cases where we have choice, and inference + // doesn't like choice (the current region inference in + // particular). :) First off, we have to choose between using the + // OutlivesProjectionEnv, OutlivesProjectionTraitDef, and + // OutlivesProjectionComponent rules, any one of which is + // sufficient. If there are no inference variables involved, it's + // not hard to pick the right rule, but if there are, we're in a + // bit of a catch 22: if we picked which rule we were going to + // use, we could add constraints to the region inference graph + // that make it apply, but if we don't add those constraints, the + // rule might not apply (but another rule might). For now, we err + // on the side of adding too few edges into the graph. + + // Compute the bounds we can derive from the environment or trait + // definition. We know that the projection outlives all the + // regions in this list. + let env_bounds = self.projection_declared_bounds(origin.span(), projection_ty); + + debug!("projection_must_outlive: env_bounds={:?}", + env_bounds); + + // If we know that the projection outlives 'static, then we're + // done here. + if env_bounds.contains(&ty::ReStatic) { + debug!("projection_must_outlive: 'static as declared bound"); + return; } - for &r in &projection_ty.trait_ref.substs.regions { - rcx.fcx.mk_subr(origin.clone(), region, r); - } + // If declared bounds list is empty, the only applicable rule is + // OutlivesProjectionComponent. If there are inference variables, + // then, we can break down the outlives into more primitive + // components without adding unnecessary edges. + // + // If there are *no* inference variables, however, we COULD do + // this, but we choose not to, because the error messages are less + // good. For example, a requirement like `T::Item: 'r` would be + // translated to a requirement that `T: 'r`; when this is reported + // to the user, it will thus say "T: 'r must hold so that T::Item: + // 'r holds". But that makes it sound like the only way to fix + // the problem is to add `T: 'r`, which isn't true. So, if there are no + // inference variables, we use a verify constraint instead of adding + // edges, which winds up enforcing the same condition. + let needs_infer = { + projection_ty.trait_ref.substs.types.iter().any(|t| t.needs_infer()) || + projection_ty.trait_ref.substs.regions.iter().any(|r| r.needs_infer()) + }; + if env_bounds.is_empty() && needs_infer { + debug!("projection_must_outlive: no declared bounds"); - return; - } + for &component_ty in &projection_ty.trait_ref.substs.types { + self.type_must_outlive(origin.clone(), component_ty, region); + } + + for &r in &projection_ty.trait_ref.substs.regions { + self.sub_regions(origin.clone(), region, r); + } - // If we find that there is a unique declared bound `'b`, and this bound - // appears in the trait reference, then the best action is to require that `'b:'r`, - // so do that. This is best no matter what rule we use: - // - // - OutlivesProjectionEnv or OutlivesProjectionTraitDef: these would translate to - // the requirement that `'b:'r` - // - OutlivesProjectionComponent: this would require `'b:'r` in addition to other conditions - if !env_bounds.is_empty() && env_bounds[1..].iter().all(|b| *b == env_bounds[0]) { - let unique_bound = env_bounds[0]; - debug!("projection_must_outlive: unique declared bound = {:?}", unique_bound); - if projection_ty.trait_ref.substs.regions - .iter() - .any(|r| env_bounds.contains(r)) - { - debug!("projection_must_outlive: unique declared bound appears in trait ref"); - rcx.fcx.mk_subr(origin.clone(), region, unique_bound); return; } - } - // Fallback to verifying after the fact that there exists a - // declared bound, or that all the components appearing in the - // projection outlive; in some cases, this may add insufficient - // edges into the inference graph, leading to inference failures - // even though a satisfactory solution exists. - let verify_bound = projection_bound(rcx, origin.span(), env_bounds, projection_ty); - let generic = GenericKind::Projection(projection_ty); - rcx.fcx.infcx().verify_generic_bound(origin, generic.clone(), region, verify_bound); -} - -fn type_bound<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, span: Span, ty: Ty<'tcx>) -> VerifyBound { - match ty.sty { - ty::TyParam(p) => { - param_bound(rcx, p) - } - ty::TyProjection(data) => { - let declared_bounds = projection_declared_bounds(rcx, span, data); - projection_bound(rcx, span, declared_bounds, data) + // If we find that there is a unique declared bound `'b`, and this bound + // appears in the trait reference, then the best action is to require that `'b:'r`, + // so do that. This is best no matter what rule we use: + // + // - OutlivesProjectionEnv or OutlivesProjectionTraitDef: these would translate to + // the requirement that `'b:'r` + // - OutlivesProjectionComponent: this would require `'b:'r` in addition to + // other conditions + if !env_bounds.is_empty() && env_bounds[1..].iter().all(|b| *b == env_bounds[0]) { + let unique_bound = env_bounds[0]; + debug!("projection_must_outlive: unique declared bound = {:?}", unique_bound); + if projection_ty.trait_ref.substs.regions + .iter() + .any(|r| env_bounds.contains(r)) + { + debug!("projection_must_outlive: unique declared bound appears in trait ref"); + self.sub_regions(origin.clone(), region, unique_bound); + return; + } } - _ => { - recursive_type_bound(rcx, span, ty) + + // Fallback to verifying after the fact that there exists a + // declared bound, or that all the components appearing in the + // projection outlive; in some cases, this may add insufficient + // edges into the inference graph, leading to inference failures + // even though a satisfactory solution exists. + let verify_bound = self.projection_bound(origin.span(), env_bounds, projection_ty); + let generic = GenericKind::Projection(projection_ty); + self.verify_generic_bound(origin, generic.clone(), region, verify_bound); + } + + fn type_bound(&self, span: Span, ty: Ty<'tcx>) -> VerifyBound { + match ty.sty { + ty::TyParam(p) => { + self.param_bound(p) + } + ty::TyProjection(data) => { + let declared_bounds = self.projection_declared_bounds(span, data); + self.projection_bound(span, declared_bounds, data) + } + _ => { + self.recursive_type_bound(span, ty) + } } } -} -fn param_bound<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, param_ty: ty::ParamTy) -> VerifyBound { - let param_env = &rcx.infcx().parameter_environment; + fn param_bound(&self, param_ty: ty::ParamTy) -> VerifyBound { + let param_env = &self.parameter_environment; - debug!("param_bound(param_ty={:?})", - param_ty); + debug!("param_bound(param_ty={:?})", + param_ty); - let mut param_bounds = declared_generic_bounds_from_env(rcx, GenericKind::Param(param_ty)); + let mut param_bounds = self.declared_generic_bounds_from_env(GenericKind::Param(param_ty)); - // Add in the default bound of fn body that applies to all in - // scope type parameters: - param_bounds.push(param_env.implicit_region_bound); + // Add in the default bound of fn body that applies to all in + // scope type parameters: + param_bounds.push(param_env.implicit_region_bound); - VerifyBound::AnyRegion(param_bounds) -} + VerifyBound::AnyRegion(param_bounds) + } -fn projection_declared_bounds<'a, 'tcx>(rcx: &Rcx<'a,'tcx>, - span: Span, - projection_ty: ty::ProjectionTy<'tcx>) - -> Vec -{ - // First assemble bounds from where clauses and traits. + fn projection_declared_bounds(&self, + span: Span, + projection_ty: ty::ProjectionTy<'tcx>) + -> Vec + { + // First assemble bounds from where clauses and traits. - let mut declared_bounds = - declared_generic_bounds_from_env(rcx, GenericKind::Projection(projection_ty)); + let mut declared_bounds = + self.declared_generic_bounds_from_env(GenericKind::Projection(projection_ty)); - declared_bounds.extend_from_slice( - &declared_projection_bounds_from_trait(rcx, span, projection_ty)); + declared_bounds.extend_from_slice( + &self.declared_projection_bounds_from_trait(span, projection_ty)); - declared_bounds -} + declared_bounds + } -fn projection_bound<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - span: Span, - declared_bounds: Vec, - projection_ty: ty::ProjectionTy<'tcx>) - -> VerifyBound { - debug!("projection_bound(declared_bounds={:?}, projection_ty={:?})", - declared_bounds, projection_ty); + fn projection_bound(&self, + span: Span, + declared_bounds: Vec, + projection_ty: ty::ProjectionTy<'tcx>) + -> VerifyBound { + debug!("projection_bound(declared_bounds={:?}, projection_ty={:?})", + declared_bounds, projection_ty); - // see the extensive comment in projection_must_outlive + // see the extensive comment in projection_must_outlive - let ty = rcx.tcx().mk_projection(projection_ty.trait_ref, projection_ty.item_name); - let recursive_bound = recursive_type_bound(rcx, span, ty); + let ty = self.tcx.mk_projection(projection_ty.trait_ref, projection_ty.item_name); + let recursive_bound = self.recursive_type_bound(span, ty); - VerifyBound::AnyRegion(declared_bounds).or(recursive_bound) -} + VerifyBound::AnyRegion(declared_bounds).or(recursive_bound) + } -fn recursive_type_bound<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - span: Span, - ty: Ty<'tcx>) - -> VerifyBound { - let mut bounds = vec![]; + fn recursive_type_bound(&self, span: Span, ty: Ty<'tcx>) -> VerifyBound { + let mut bounds = vec![]; - for subty in ty.walk_shallow() { - bounds.push(type_bound(rcx, span, subty)); - } + for subty in ty.walk_shallow() { + bounds.push(self.type_bound(span, subty)); + } - let mut regions = ty.regions(); - regions.retain(|r| !r.is_bound()); // ignore late-bound regions - bounds.push(VerifyBound::AllRegions(regions)); + let mut regions = ty.regions(); + regions.retain(|r| !r.is_bound()); // ignore late-bound regions + bounds.push(VerifyBound::AllRegions(regions)); - // remove bounds that must hold, since they are not interesting - bounds.retain(|b| !b.must_hold()); + // remove bounds that must hold, since they are not interesting + bounds.retain(|b| !b.must_hold()); - if bounds.len() == 1 { - bounds.pop().unwrap() - } else { - VerifyBound::AllBounds(bounds) + if bounds.len() == 1 { + bounds.pop().unwrap() + } else { + VerifyBound::AllBounds(bounds) + } } -} -fn declared_generic_bounds_from_env<'a, 'tcx>(rcx: &Rcx<'a, 'tcx>, - generic: GenericKind<'tcx>) - -> Vec -{ - let param_env = &rcx.infcx().parameter_environment; - - // To start, collect bounds from user: - let mut param_bounds = rcx.tcx().required_region_bounds(generic.to_ty(rcx.tcx()), - param_env.caller_bounds.clone()); - - // Next, collect regions we scraped from the well-formedness - // constraints in the fn signature. To do that, we walk the list - // of known relations from the fn ctxt. - // - // This is crucial because otherwise code like this fails: - // - // fn foo<'a, A>(x: &'a A) { x.bar() } - // - // The problem is that the type of `x` is `&'a A`. To be - // well-formed, then, A must be lower-generic by `'a`, but we - // don't know that this holds from first principles. - for &(r, p) in &rcx.region_bound_pairs { - debug!("generic={:?} p={:?}", - generic, - p); - if generic == p { - param_bounds.push(r); + fn declared_generic_bounds_from_env(&self, generic: GenericKind<'tcx>) + -> Vec + { + let param_env = &self.parameter_environment; + + // To start, collect bounds from user: + let mut param_bounds = self.tcx.required_region_bounds(generic.to_ty(self.tcx), + param_env.caller_bounds.clone()); + + // Next, collect regions we scraped from the well-formedness + // constraints in the fn signature. To do that, we walk the list + // of known relations from the fn ctxt. + // + // This is crucial because otherwise code like this fails: + // + // fn foo<'a, A>(x: &'a A) { x.bar() } + // + // The problem is that the type of `x` is `&'a A`. To be + // well-formed, then, A must be lower-generic by `'a`, but we + // don't know that this holds from first principles. + for &(r, p) in &self.region_bound_pairs { + debug!("generic={:?} p={:?}", + generic, + p); + if generic == p { + param_bounds.push(r); + } } - } - param_bounds -} + param_bounds + } -fn declared_projection_bounds_from_trait<'a,'tcx>(rcx: &Rcx<'a, 'tcx>, - span: Span, - projection_ty: ty::ProjectionTy<'tcx>) - -> Vec -{ - let fcx = rcx.fcx; - let tcx = fcx.tcx(); - let infcx = fcx.infcx(); - - debug!("projection_bounds(projection_ty={:?})", - projection_ty); - - let ty = tcx.mk_projection(projection_ty.trait_ref.clone(), projection_ty.item_name); - - // Say we have a projection `>::SomeType`. We are interested - // in looking for a trait definition like: - // - // ``` - // trait SomeTrait<'a> { - // type SomeType : 'a; - // } - // ``` - // - // we can thus deduce that `>::SomeType : 'a`. - let trait_predicates = tcx.lookup_predicates(projection_ty.trait_ref.def_id); - let predicates = trait_predicates.predicates.as_slice().to_vec(); - traits::elaborate_predicates(tcx, predicates) - .filter_map(|predicate| { - // we're only interesting in `T : 'a` style predicates: - let outlives = match predicate { - ty::Predicate::TypeOutlives(data) => data, - _ => { return None; } - }; + fn declared_projection_bounds_from_trait(&self, + span: Span, + projection_ty: ty::ProjectionTy<'tcx>) + -> Vec + { + debug!("projection_bounds(projection_ty={:?})", + projection_ty); - debug!("projection_bounds: outlives={:?} (1)", - outlives); + let ty = self.tcx.mk_projection(projection_ty.trait_ref.clone(), + projection_ty.item_name); - // apply the substitutions (and normalize any projected types) - let outlives = fcx.instantiate_type_scheme(span, - projection_ty.trait_ref.substs, - &outlives); + // Say we have a projection `>::SomeType`. We are interested + // in looking for a trait definition like: + // + // ``` + // trait SomeTrait<'a> { + // type SomeType : 'a; + // } + // ``` + // + // we can thus deduce that `>::SomeType : 'a`. + let trait_predicates = self.tcx.lookup_predicates(projection_ty.trait_ref.def_id); + let predicates = trait_predicates.predicates.as_slice().to_vec(); + traits::elaborate_predicates(self.tcx, predicates) + .filter_map(|predicate| { + // we're only interesting in `T : 'a` style predicates: + let outlives = match predicate { + ty::Predicate::TypeOutlives(data) => data, + _ => { return None; } + }; - debug!("projection_bounds: outlives={:?} (2)", - outlives); + debug!("projection_bounds: outlives={:?} (1)", + outlives); - let region_result = infcx.commit_if_ok(|_| { - let (outlives, _) = - infcx.replace_late_bound_regions_with_fresh_var( - span, - infer::AssocTypeProjection(projection_ty.item_name), - &outlives); + // apply the substitutions (and normalize any projected types) + let outlives = self.instantiate_type_scheme(span, + projection_ty.trait_ref.substs, + &outlives); - debug!("projection_bounds: outlives={:?} (3)", + debug!("projection_bounds: outlives={:?} (2)", outlives); - // check whether this predicate applies to our current projection - match infer::mk_eqty(infcx, false, TypeOrigin::Misc(span), ty, outlives.0) { - Ok(InferOk { obligations, .. }) => { - // FIXME(#32730) propagate obligations - assert!(obligations.is_empty()); - Ok(outlives.1) + let region_result = self.commit_if_ok(|_| { + let (outlives, _) = + self.replace_late_bound_regions_with_fresh_var( + span, + infer::AssocTypeProjection(projection_ty.item_name), + &outlives); + + debug!("projection_bounds: outlives={:?} (3)", + outlives); + + // check whether this predicate applies to our current projection + match self.eq_types(false, TypeOrigin::Misc(span), ty, outlives.0) { + Ok(InferOk { obligations, .. }) => { + // FIXME(#32730) propagate obligations + assert!(obligations.is_empty()); + Ok(outlives.1) + } + Err(_) => { Err(()) } } - Err(_) => { Err(()) } - } - }); + }); - debug!("projection_bounds: region_result={:?}", - region_result); + debug!("projection_bounds: region_result={:?}", + region_result); - region_result.ok() - }) - .collect() + region_result.ok() + }) + .collect() + } } diff --git a/src/librustc_typeck/check/upvar.rs b/src/librustc_typeck/check/upvar.rs index 40481cda76..19964d736f 100644 --- a/src/librustc_typeck/check/upvar.rs +++ b/src/librustc_typeck/check/upvar.rs @@ -42,12 +42,11 @@ use super::FnCtxt; -use check::demand; use middle::expr_use_visitor as euv; use middle::mem_categorization as mc; use middle::mem_categorization::Categorization; -use rustc::ty::{self, Ty, TyCtxt}; -use rustc::infer::{InferCtxt, UpvarRegion}; +use rustc::ty::{self, Ty}; +use rustc::infer::UpvarRegion; use std::collections::HashSet; use syntax::ast; use syntax::codemap::Span; @@ -57,48 +56,44 @@ use rustc::hir::intravisit::{self, Visitor}; /////////////////////////////////////////////////////////////////////////// // PUBLIC ENTRY POINTS -pub fn closure_analyze_fn(fcx: &FnCtxt, - _id: ast::NodeId, - _decl: &hir::FnDecl, - body: &hir::Block) -{ - let mut seed = SeedBorrowKind::new(fcx); - seed.visit_block(body); - let closures_with_inferred_kinds = seed.closures_with_inferred_kinds; +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn closure_analyze_fn(&self, body: &hir::Block) { + let mut seed = SeedBorrowKind::new(self); + seed.visit_block(body); + let closures_with_inferred_kinds = seed.closures_with_inferred_kinds; - let mut adjust = AdjustBorrowKind::new(fcx, &closures_with_inferred_kinds); - adjust.visit_block(body); + let mut adjust = AdjustBorrowKind::new(self, &closures_with_inferred_kinds); + adjust.visit_block(body); - // it's our job to process these. - assert!(fcx.inh.deferred_call_resolutions.borrow().is_empty()); -} + // it's our job to process these. + assert!(self.deferred_call_resolutions.borrow().is_empty()); + } -pub fn closure_analyze_const(fcx: &FnCtxt, - body: &hir::Expr) -{ - let mut seed = SeedBorrowKind::new(fcx); - seed.visit_expr(body); - let closures_with_inferred_kinds = seed.closures_with_inferred_kinds; + pub fn closure_analyze_const(&self, body: &hir::Expr) { + let mut seed = SeedBorrowKind::new(self); + seed.visit_expr(body); + let closures_with_inferred_kinds = seed.closures_with_inferred_kinds; - let mut adjust = AdjustBorrowKind::new(fcx, &closures_with_inferred_kinds); - adjust.visit_expr(body); + let mut adjust = AdjustBorrowKind::new(self, &closures_with_inferred_kinds); + adjust.visit_expr(body); - // it's our job to process these. - assert!(fcx.inh.deferred_call_resolutions.borrow().is_empty()); + // it's our job to process these. + assert!(self.deferred_call_resolutions.borrow().is_empty()); + } } /////////////////////////////////////////////////////////////////////////// // SEED BORROW KIND -struct SeedBorrowKind<'a,'tcx:'a> { - fcx: &'a FnCtxt<'a,'tcx>, +struct SeedBorrowKind<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, closures_with_inferred_kinds: HashSet, } -impl<'a, 'tcx, 'v> Visitor<'v> for SeedBorrowKind<'a, 'tcx> { +impl<'a, 'gcx, 'tcx, 'v> Visitor<'v> for SeedBorrowKind<'a, 'gcx, 'tcx> { fn visit_expr(&mut self, expr: &hir::Expr) { match expr.node { - hir::ExprClosure(cc, _, ref body) => { + hir::ExprClosure(cc, _, ref body, _) => { self.check_closure(expr, cc, &body); } @@ -109,34 +104,26 @@ impl<'a, 'tcx, 'v> Visitor<'v> for SeedBorrowKind<'a, 'tcx> { } } -impl<'a,'tcx> SeedBorrowKind<'a,'tcx> { - fn new(fcx: &'a FnCtxt<'a,'tcx>) -> SeedBorrowKind<'a,'tcx> { +impl<'a, 'gcx, 'tcx> SeedBorrowKind<'a, 'gcx, 'tcx> { + fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>) -> SeedBorrowKind<'a, 'gcx, 'tcx> { SeedBorrowKind { fcx: fcx, closures_with_inferred_kinds: HashSet::new() } } - fn tcx(&self) -> &'a TyCtxt<'tcx> { - self.fcx.tcx() - } - - fn infcx(&self) -> &'a InferCtxt<'a,'tcx> { - self.fcx.infcx() - } - fn check_closure(&mut self, expr: &hir::Expr, capture_clause: hir::CaptureClause, _body: &hir::Block) { - let closure_def_id = self.tcx().map.local_def_id(expr.id); - if !self.fcx.inh.tables.borrow().closure_kinds.contains_key(&closure_def_id) { + let closure_def_id = self.fcx.tcx.map.local_def_id(expr.id); + if !self.fcx.tables.borrow().closure_kinds.contains_key(&closure_def_id) { self.closures_with_inferred_kinds.insert(expr.id); - self.fcx.inh.tables.borrow_mut().closure_kinds - .insert(closure_def_id, ty::ClosureKind::Fn); + self.fcx.tables.borrow_mut().closure_kinds + .insert(closure_def_id, ty::ClosureKind::Fn); debug!("check_closure: adding closure_id={:?} to closures_with_inferred_kinds", closure_def_id); } - self.tcx().with_freevars(expr.id, |freevars| { + self.fcx.tcx.with_freevars(expr.id, |freevars| { for freevar in freevars { let var_node_id = freevar.def.var_id(); let upvar_id = ty::UpvarId { var_id: var_node_id, @@ -149,14 +136,14 @@ impl<'a,'tcx> SeedBorrowKind<'a,'tcx> { } hir::CaptureByRef => { let origin = UpvarRegion(upvar_id, expr.span); - let freevar_region = self.infcx().next_region_var(origin); + let freevar_region = self.fcx.next_region_var(origin); let upvar_borrow = ty::UpvarBorrow { kind: ty::ImmBorrow, region: freevar_region }; ty::UpvarCapture::ByRef(upvar_borrow) } }; - self.fcx.inh.tables.borrow_mut().upvar_capture_map.insert(upvar_id, capture_kind); + self.fcx.tables.borrow_mut().upvar_capture_map.insert(upvar_id, capture_kind); } }); } @@ -165,15 +152,15 @@ impl<'a,'tcx> SeedBorrowKind<'a,'tcx> { /////////////////////////////////////////////////////////////////////////// // ADJUST BORROW KIND -struct AdjustBorrowKind<'a,'tcx:'a> { - fcx: &'a FnCtxt<'a,'tcx>, +struct AdjustBorrowKind<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, closures_with_inferred_kinds: &'a HashSet, } -impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { - fn new(fcx: &'a FnCtxt<'a,'tcx>, +impl<'a, 'gcx, 'tcx> AdjustBorrowKind<'a, 'gcx, 'tcx> { + fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>, closures_with_inferred_kinds: &'a HashSet) - -> AdjustBorrowKind<'a,'tcx> { + -> AdjustBorrowKind<'a, 'gcx, 'tcx> { AdjustBorrowKind { fcx: fcx, closures_with_inferred_kinds: closures_with_inferred_kinds } } @@ -189,7 +176,7 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { debug!("analyze_closure(id={:?}, body.id={:?})", id, body.id); { - let mut euv = euv::ExprUseVisitor::new(self, self.fcx.infcx()); + let mut euv = euv::ExprUseVisitor::new(self, self.fcx); euv.walk_fn(decl, body); } @@ -221,12 +208,12 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { debug!("analyze_closure: id={:?} closure_substs={:?} final_upvar_tys={:?}", id, closure_substs, final_upvar_tys); for (&upvar_ty, final_upvar_ty) in closure_substs.upvar_tys.iter().zip(final_upvar_tys) { - demand::eqtype(self.fcx, span, final_upvar_ty, upvar_ty); + self.fcx.demand_eqtype(span, final_upvar_ty, upvar_ty); } // Now we must process and remove any deferred resolutions, // since we have a concrete closure kind. - let closure_def_id = self.fcx.tcx().map.local_def_id(id); + let closure_def_id = self.fcx.tcx.map.local_def_id(id); if self.closures_with_inferred_kinds.contains(&id) { let mut deferred_call_resolutions = self.fcx.remove_deferred_call_resolutions(closure_def_id); @@ -243,7 +230,7 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { // local crate or were inlined into it along with some function. // This may change if abstract return types of some sort are // implemented. - let tcx = self.fcx.tcx(); + let tcx = self.fcx.tcx; tcx.with_freevars(closure_id, |freevars| { freevars.iter() .map(|freevar| { @@ -253,7 +240,7 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { var_id: freevar_node_id, closure_expr_id: closure_id }; - let capture = self.fcx.infcx().upvar_capture(upvar_id).unwrap(); + let capture = self.fcx.upvar_capture(upvar_id).unwrap(); debug!("freevar_node_id={:?} freevar_ty={:?} capture={:?}", freevar_node_id, freevar_ty, capture); @@ -301,10 +288,11 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { upvar_id); // to move out of an upvar, this must be a FnOnce closure - self.adjust_closure_kind(upvar_id.closure_expr_id, ty::ClosureKind::FnOnce); + self.adjust_closure_kind(upvar_id.closure_expr_id, + ty::ClosureKind::FnOnce); let upvar_capture_map = - &mut self.fcx.inh.tables.borrow_mut().upvar_capture_map; + &mut self.fcx.tables.borrow_mut().upvar_capture_map; upvar_capture_map.insert(upvar_id, ty::UpvarCapture::ByValue); } mc::NoteClosureEnv(upvar_id) => { @@ -314,7 +302,8 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { // must still adjust the kind of the closure // to be a FnOnce closure to permit moves out // of the environment. - self.adjust_closure_kind(upvar_id.closure_expr_id, ty::ClosureKind::FnOnce); + self.adjust_closure_kind(upvar_id.closure_expr_id, + ty::ClosureKind::FnOnce); } mc::NoteNone => { } @@ -412,7 +401,7 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { // borrow_kind of the upvar to make sure it // is inferred to mutable if necessary { - let upvar_capture_map = &mut self.fcx.inh.tables.borrow_mut().upvar_capture_map; + let upvar_capture_map = &mut self.fcx.tables.borrow_mut().upvar_capture_map; let ub = upvar_capture_map.get_mut(&upvar_id).unwrap(); self.adjust_upvar_borrow_kind(upvar_id, ub, borrow_kind); } @@ -436,9 +425,10 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { } } - /// We infer the borrow_kind with which to borrow upvars in a stack closure. The borrow_kind - /// basically follows a lattice of `imm < unique-imm < mut`, moving from left to right as needed - /// (but never right to left). Here the argument `mutbl` is the borrow_kind that is required by + /// We infer the borrow_kind with which to borrow upvars in a stack closure. + /// The borrow_kind basically follows a lattice of `imm < unique-imm < mut`, + /// moving from left to right as needed (but never right to left). + /// Here the argument `mutbl` is the borrow_kind that is required by /// some particular use. fn adjust_upvar_borrow_kind(&self, upvar_id: ty::UpvarId, @@ -480,8 +470,8 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { return; } - let closure_def_id = self.fcx.tcx().map.local_def_id(closure_id); - let closure_kinds = &mut self.fcx.inh.tables.borrow_mut().closure_kinds; + let closure_def_id = self.fcx.tcx.map.local_def_id(closure_id); + let closure_kinds = &mut self.fcx.tables.borrow_mut().closure_kinds; let existing_kind = *closure_kinds.get(&closure_def_id).unwrap(); debug!("adjust_closure_kind: closure_id={}, existing_kind={:?}, new_kind={:?}", @@ -505,7 +495,7 @@ impl<'a,'tcx> AdjustBorrowKind<'a,'tcx> { } } -impl<'a, 'tcx, 'v> Visitor<'v> for AdjustBorrowKind<'a, 'tcx> { +impl<'a, 'gcx, 'tcx, 'v> Visitor<'v> for AdjustBorrowKind<'a, 'gcx, 'tcx> { fn visit_fn(&mut self, fn_kind: intravisit::FnKind<'v>, decl: &'v hir::FnDecl, @@ -518,7 +508,7 @@ impl<'a, 'tcx, 'v> Visitor<'v> for AdjustBorrowKind<'a, 'tcx> { } } -impl<'a,'tcx> euv::Delegate<'tcx> for AdjustBorrowKind<'a,'tcx> { +impl<'a, 'gcx, 'tcx> euv::Delegate<'tcx> for AdjustBorrowKind<'a, 'gcx, 'tcx> { fn consume(&mut self, _consume_id: ast::NodeId, _consume_span: Span, diff --git a/src/librustc_typeck/check/wfcheck.rs b/src/librustc_typeck/check/wfcheck.rs index 1b21e6ce9e..e0a3418977 100644 --- a/src/librustc_typeck/check/wfcheck.rs +++ b/src/librustc_typeck/check/wfcheck.rs @@ -8,8 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use astconv::AstConv; -use check::{FnCtxt, Inherited, blank_fn_ctxt, regionck}; +use check::FnCtxt; use constrained_type_params::{identify_constrained_type_params, Parameter}; use CrateCtxt; use hir::def_id::DefId; @@ -17,14 +16,12 @@ use middle::region::{CodeExtent}; use rustc::ty::subst::{self, TypeSpace, FnSpace, ParamSpace, SelfSpace}; use rustc::traits; use rustc::ty::{self, Ty, TyCtxt}; -use rustc::ty::fold::{TypeFolder}; -use std::cell::RefCell; use std::collections::HashSet; use syntax::ast; use syntax::codemap::{Span}; use syntax::errors::DiagnosticBuilder; -use syntax::parse::token::{special_idents}; +use syntax::parse::token::keywords; use rustc::hir::intravisit::{self, Visitor}; use rustc::hir; @@ -33,16 +30,46 @@ pub struct CheckTypeWellFormedVisitor<'ccx, 'tcx:'ccx> { code: traits::ObligationCauseCode<'tcx>, } -impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { - pub fn new(ccx: &'ccx CrateCtxt<'ccx, 'tcx>) - -> CheckTypeWellFormedVisitor<'ccx, 'tcx> { +/// Helper type of a temporary returned by .for_item(...). +/// Necessary because we can't write the following bound: +/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(FnCtxt<'b, 'gcx, 'tcx>). +struct CheckWfFcxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + inherited: super::InheritedBuilder<'a, 'gcx, 'tcx>, + code: traits::ObligationCauseCode<'gcx>, + id: ast::NodeId, + span: Span +} + +impl<'a, 'gcx, 'tcx> CheckWfFcxBuilder<'a, 'gcx, 'tcx> { + fn with_fcx(&'tcx mut self, f: F) where + F: for<'b> FnOnce(&FnCtxt<'b, 'gcx, 'tcx>, + &mut CheckTypeWellFormedVisitor<'b, 'gcx>) -> Vec> + { + let code = self.code.clone(); + let id = self.id; + let span = self.span; + self.inherited.enter(|inh| { + let fcx = FnCtxt::new(&inh, ty::FnDiverging, id); + let wf_tys = f(&fcx, &mut CheckTypeWellFormedVisitor { + ccx: fcx.ccx, + code: code + }); + fcx.select_all_obligations_or_error(); + fcx.regionck_item(id, span, &wf_tys); + }); + } +} + +impl<'ccx, 'gcx> CheckTypeWellFormedVisitor<'ccx, 'gcx> { + pub fn new(ccx: &'ccx CrateCtxt<'ccx, 'gcx>) + -> CheckTypeWellFormedVisitor<'ccx, 'gcx> { CheckTypeWellFormedVisitor { ccx: ccx, code: traits::ObligationCauseCode::MiscObligation } } - fn tcx(&self) -> &TyCtxt<'tcx> { + fn tcx(&self) -> TyCtxt<'ccx, 'gcx, 'gcx> { self.ccx.tcx } @@ -110,14 +137,14 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { } hir::ItemStruct(ref struct_def, ref ast_generics) => { self.check_type_defn(item, |fcx| { - vec![struct_variant(fcx, struct_def)] + vec![fcx.struct_variant(struct_def)] }); self.check_variances_for_type_defn(item, ast_generics); } hir::ItemEnum(ref enum_def, ref ast_generics) => { self.check_type_defn(item, |fcx| { - enum_variants(fcx, enum_def) + fcx.enum_variants(enum_def) }); self.check_variances_for_type_defn(item, ast_generics); @@ -131,16 +158,16 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { fn check_trait_or_impl_item(&mut self, item_id: ast::NodeId, span: Span) { let code = self.code.clone(); - self.with_fcx(item_id, span, |fcx, this| { - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; - let free_id_outlive = fcx.inh.infcx.parameter_environment.free_id_outlive; + self.for_id(item_id, span).with_fcx(|fcx, this| { + let free_substs = &fcx.parameter_environment.free_substs; + let free_id_outlive = fcx.parameter_environment.free_id_outlive; - let item = fcx.tcx().impl_or_trait_item(fcx.tcx().map.local_def_id(item_id)); + let item = fcx.tcx.impl_or_trait_item(fcx.tcx.map.local_def_id(item_id)); let (mut implied_bounds, self_ty) = match item.container() { - ty::TraitContainer(_) => (vec![], fcx.tcx().mk_self_type()), - ty::ImplContainer(def_id) => (impl_implied_bounds(fcx, def_id, span), - fcx.tcx().lookup_item_type(def_id).ty) + ty::TraitContainer(_) => (vec![], fcx.tcx.mk_self_type()), + ty::ImplContainer(def_id) => (fcx.impl_implied_bounds(def_id, span), + fcx.tcx.lookup_item_type(def_id).ty) }; match item { @@ -149,7 +176,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { fcx.register_wf_obligation(ty, span, code.clone()); } ty::MethodTraitItem(method) => { - reject_shadowing_type_parameters(fcx.tcx(), span, &method.generics); + reject_shadowing_type_parameters(fcx.tcx, span, &method.generics); let method_ty = fcx.instantiate_type_scheme(span, free_substs, &method.fty); let predicates = fcx.instantiate_bounds(span, free_substs, &method.predicates); this.check_fn_or_method(fcx, span, &method_ty, &predicates, @@ -169,32 +196,28 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { }) } - fn with_item_fcx(&mut self, item: &hir::Item, f: F) where - F: for<'fcx> FnMut(&FnCtxt<'fcx, 'tcx>, - &mut CheckTypeWellFormedVisitor<'ccx,'tcx>) -> Vec>, - { - self.with_fcx(item.id, item.span, f) + fn for_item<'tcx>(&self, item: &hir::Item) + -> CheckWfFcxBuilder<'ccx, 'gcx, 'tcx> { + self.for_id(item.id, item.span) } - fn with_fcx(&mut self, id: ast::NodeId, span: Span, mut f: F) where - F: for<'fcx> FnMut(&FnCtxt<'fcx, 'tcx>, - &mut CheckTypeWellFormedVisitor<'ccx,'tcx>) -> Vec>, - { - let ccx = self.ccx; - let param_env = ty::ParameterEnvironment::for_item(ccx.tcx, id); - let tables = RefCell::new(ty::Tables::empty()); - let inh = Inherited::new(ccx.tcx, &tables, param_env); - let fcx = blank_fn_ctxt(ccx, &inh, ty::FnDiverging, id); - let wf_tys = f(&fcx, self); - fcx.select_all_obligations_or_error(); - regionck::regionck_item(&fcx, id, span, &wf_tys); + fn for_id<'tcx>(&self, id: ast::NodeId, span: Span) + -> CheckWfFcxBuilder<'ccx, 'gcx, 'tcx> { + let param_env = ty::ParameterEnvironment::for_item(self.ccx.tcx, id); + CheckWfFcxBuilder { + inherited: self.ccx.inherited(Some(param_env)), + code: self.code.clone(), + id: id, + span: span + } } /// In a type definition, we check that to ensure that the types of the fields are well-formed. fn check_type_defn(&mut self, item: &hir::Item, mut lookup_fields: F) where - F: for<'fcx> FnMut(&FnCtxt<'fcx, 'tcx>) -> Vec>, + F: for<'fcx, 'tcx> FnMut(&FnCtxt<'fcx, 'gcx, 'tcx>) + -> Vec> { - self.with_item_fcx(item, |fcx, this| { + self.for_item(item).with_fcx(|fcx, this| { let variants = lookup_fields(fcx); for variant in &variants { @@ -216,8 +239,8 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { } } - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; - let predicates = fcx.tcx().lookup_predicates(fcx.tcx().map.local_def_id(item.id)); + let free_substs = &fcx.parameter_environment.free_substs; + let predicates = fcx.tcx.lookup_predicates(fcx.tcx.map.local_def_id(item.id)); let predicates = fcx.instantiate_bounds(item.span, free_substs, &predicates); this.check_where_clauses(fcx, item.span, &predicates); @@ -231,15 +254,15 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { { let trait_def_id = self.tcx().map.local_def_id(item.id); - if self.ccx.tcx.trait_has_default_impl(trait_def_id) { + if self.tcx().trait_has_default_impl(trait_def_id) { if !items.is_empty() { error_380(self.ccx, item.span); } } - self.with_item_fcx(item, |fcx, this| { - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; - let predicates = fcx.tcx().lookup_predicates(trait_def_id); + self.for_item(item).with_fcx(|fcx, this| { + let free_substs = &fcx.parameter_environment.free_substs; + let predicates = fcx.tcx.lookup_predicates(trait_def_id); let predicates = fcx.instantiate_bounds(item.span, free_substs, &predicates); this.check_where_clauses(fcx, item.span, &predicates); vec![] @@ -250,9 +273,9 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { item: &hir::Item, body: &hir::Block) { - self.with_item_fcx(item, |fcx, this| { - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; - let type_scheme = fcx.tcx().lookup_item_type(fcx.tcx().map.local_def_id(item.id)); + self.for_item(item).with_fcx(|fcx, this| { + let free_substs = &fcx.parameter_environment.free_substs; + let type_scheme = fcx.tcx.lookup_item_type(fcx.tcx.map.local_def_id(item.id)); let item_ty = fcx.instantiate_type_scheme(item.span, free_substs, &type_scheme.ty); let bare_fn_ty = match item_ty.sty { ty::TyFnDef(_, _, ref bare_fn_ty) => bare_fn_ty, @@ -261,11 +284,11 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { } }; - let predicates = fcx.tcx().lookup_predicates(fcx.tcx().map.local_def_id(item.id)); + let predicates = fcx.tcx.lookup_predicates(fcx.tcx.map.local_def_id(item.id)); let predicates = fcx.instantiate_bounds(item.span, free_substs, &predicates); let mut implied_bounds = vec![]; - let free_id_outlive = fcx.tcx().region_maps.call_site_extent(item.id, body.id); + let free_id_outlive = fcx.tcx.region_maps.call_site_extent(item.id, body.id); this.check_fn_or_method(fcx, item.span, bare_fn_ty, &predicates, free_id_outlive, &mut implied_bounds); implied_bounds @@ -277,12 +300,10 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { { debug!("check_item_type: {:?}", item); - self.with_item_fcx(item, |fcx, this| { - let type_scheme = fcx.tcx().lookup_item_type(fcx.tcx().map.local_def_id(item.id)); + self.for_item(item).with_fcx(|fcx, this| { + let type_scheme = fcx.tcx.lookup_item_type(fcx.tcx.map.local_def_id(item.id)); let item_ty = fcx.instantiate_type_scheme(item.span, - &fcx.inh - .infcx - .parameter_environment + &fcx.parameter_environment .free_substs, &type_scheme.ty); @@ -299,18 +320,18 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { { debug!("check_impl: {:?}", item); - self.with_item_fcx(item, |fcx, this| { - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; - let item_def_id = fcx.tcx().map.local_def_id(item.id); + self.for_item(item).with_fcx(|fcx, this| { + let free_substs = &fcx.parameter_environment.free_substs; + let item_def_id = fcx.tcx.map.local_def_id(item.id); match *ast_trait_ref { Some(ref ast_trait_ref) => { - let trait_ref = fcx.tcx().impl_trait_ref(item_def_id).unwrap(); + let trait_ref = fcx.tcx.impl_trait_ref(item_def_id).unwrap(); let trait_ref = fcx.instantiate_type_scheme( ast_trait_ref.path.span, free_substs, &trait_ref); let obligations = - ty::wf::trait_obligations(fcx.infcx(), + ty::wf::trait_obligations(fcx, fcx.body_id, &trait_ref, ast_trait_ref.path.span); @@ -319,29 +340,29 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { } } None => { - let self_ty = fcx.tcx().node_id_to_type(item.id); + let self_ty = fcx.tcx.node_id_to_type(item.id); let self_ty = fcx.instantiate_type_scheme(item.span, free_substs, &self_ty); fcx.register_wf_obligation(self_ty, ast_self_ty.span, this.code.clone()); } } - let predicates = fcx.tcx().lookup_predicates(item_def_id); + let predicates = fcx.tcx.lookup_predicates(item_def_id); let predicates = fcx.instantiate_bounds(item.span, free_substs, &predicates); this.check_where_clauses(fcx, item.span, &predicates); - impl_implied_bounds(fcx, fcx.tcx().map.local_def_id(item.id), item.span) + fcx.impl_implied_bounds(fcx.tcx.map.local_def_id(item.id), item.span) }); } - fn check_where_clauses<'fcx>(&mut self, - fcx: &FnCtxt<'fcx,'tcx>, - span: Span, - predicates: &ty::InstantiatedPredicates<'tcx>) + fn check_where_clauses<'fcx, 'tcx>(&mut self, + fcx: &FnCtxt<'fcx, 'gcx, 'tcx>, + span: Span, + predicates: &ty::InstantiatedPredicates<'tcx>) { let obligations = predicates.predicates .iter() - .flat_map(|p| ty::wf::predicate_obligations(fcx.infcx(), + .flat_map(|p| ty::wf::predicate_obligations(fcx, fcx.body_id, p, span)); @@ -351,17 +372,17 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { } } - fn check_fn_or_method<'fcx>(&mut self, - fcx: &FnCtxt<'fcx,'tcx>, - span: Span, - fty: &ty::BareFnTy<'tcx>, - predicates: &ty::InstantiatedPredicates<'tcx>, - free_id_outlive: CodeExtent, - implied_bounds: &mut Vec>) + fn check_fn_or_method<'fcx, 'tcx>(&mut self, + fcx: &FnCtxt<'fcx, 'gcx, 'tcx>, + span: Span, + fty: &'tcx ty::BareFnTy<'tcx>, + predicates: &ty::InstantiatedPredicates<'tcx>, + free_id_outlive: CodeExtent, + implied_bounds: &mut Vec>) { - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; - let fty = fcx.instantiate_type_scheme(span, free_substs, fty); - let sig = fcx.tcx().liberate_late_bound_regions(free_id_outlive, &fty.sig); + let free_substs = &fcx.parameter_environment.free_substs; + let fty = fcx.instantiate_type_scheme(span, free_substs, &fty); + let sig = fcx.tcx.liberate_late_bound_regions(free_id_outlive, &fty.sig); for &input_ty in &sig.inputs { fcx.register_wf_obligation(input_ty, span, self.code.clone()); @@ -381,19 +402,19 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { self.check_where_clauses(fcx, span, predicates); } - fn check_method_receiver<'fcx>(&mut self, - fcx: &FnCtxt<'fcx,'tcx>, - span: Span, - method: &ty::Method<'tcx>, - free_id_outlive: CodeExtent, - self_ty: ty::Ty<'tcx>) + fn check_method_receiver<'fcx, 'tcx>(&mut self, + fcx: &FnCtxt<'fcx, 'gcx, 'tcx>, + span: Span, + method: &ty::Method<'tcx>, + free_id_outlive: CodeExtent, + self_ty: ty::Ty<'tcx>) { // check that the type of the method's receiver matches the // method's first parameter. - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; + let free_substs = &fcx.parameter_environment.free_substs; let fty = fcx.instantiate_type_scheme(span, free_substs, &method.fty); - let sig = fcx.tcx().liberate_late_bound_regions(free_id_outlive, &fty.sig); + let sig = fcx.tcx.liberate_late_bound_regions(free_id_outlive, &fty.sig); debug!("check_method_receiver({:?},cat={:?},self_ty={:?},sig={:?})", method.name, method.explicit_self, self_ty, sig); @@ -402,24 +423,21 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { ty::ExplicitSelfCategory::Static => return, ty::ExplicitSelfCategory::ByValue => self_ty, ty::ExplicitSelfCategory::ByReference(region, mutability) => { - fcx.tcx().mk_ref(fcx.tcx().mk_region(region), ty::TypeAndMut { + fcx.tcx.mk_ref(fcx.tcx.mk_region(region), ty::TypeAndMut { ty: self_ty, mutbl: mutability }) } - ty::ExplicitSelfCategory::ByBox => fcx.tcx().mk_box(self_ty) + ty::ExplicitSelfCategory::ByBox => fcx.tcx.mk_box(self_ty) }; let rcvr_ty = fcx.instantiate_type_scheme(span, free_substs, &rcvr_ty); - let rcvr_ty = fcx.tcx().liberate_late_bound_regions(free_id_outlive, - &ty::Binder(rcvr_ty)); + let rcvr_ty = fcx.tcx.liberate_late_bound_regions(free_id_outlive, + &ty::Binder(rcvr_ty)); debug!("check_method_receiver: receiver ty = {:?}", rcvr_ty); - let _ = ::require_same_types( - fcx.tcx(), Some(fcx.infcx()), false, span, - sig.inputs[0], rcvr_ty, - || "mismatched method receiver".to_owned() - ); + fcx.require_same_types(span, sig.inputs[0], rcvr_ty, + "mismatched method receiver"); } fn check_variances_for_type_defn(&self, @@ -438,8 +456,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { .map(|p| Parameter::Type(p)) .collect(); - identify_constrained_type_params(self.tcx(), - ty_predicates.predicates.as_slice(), + identify_constrained_type_params(ty_predicates.predicates.as_slice(), None, &mut constrained_parameters); @@ -472,7 +489,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { { let name = match space { TypeSpace => ast_generics.ty_params[index].name, - SelfSpace => special_idents::type_self.name, + SelfSpace => keywords::SelfType.name(), FnSpace => bug!("Fn space occupied?"), }; @@ -497,13 +514,12 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { span: Span, param_name: ast::Name) { - let mut err = error_392(self.tcx(), span, param_name); + let mut err = error_392(self.ccx, span, param_name); let suggested_marker_id = self.tcx().lang_items.phantom_data(); match suggested_marker_id { Some(def_id) => { - err.fileline_help( - span, + err.help( &format!("consider removing `{}` or using a marker such as `{}`", param_name, self.tcx().item_path_str(def_id))); @@ -516,9 +532,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> { } } -fn reject_shadowing_type_parameters<'tcx>(tcx: &TyCtxt<'tcx>, - span: Span, - generics: &ty::Generics<'tcx>) { +fn reject_shadowing_type_parameters(tcx: TyCtxt, span: Span, generics: &ty::Generics) { let impl_params = generics.types.get_slice(subst::TypeSpace).iter() .map(|tp| tp.name).collect::>(); @@ -561,75 +575,67 @@ struct AdtField<'tcx> { span: Span, } -fn struct_variant<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - struct_def: &hir::VariantData) - -> AdtVariant<'tcx> { - let fields = - struct_def.fields().iter() - .map(|field| { - let field_ty = fcx.tcx().node_id_to_type(field.id); - let field_ty = fcx.instantiate_type_scheme(field.span, - &fcx.inh - .infcx - .parameter_environment - .free_substs, - &field_ty); - AdtField { ty: field_ty, span: field.span } - }) - .collect(); - AdtVariant { fields: fields } -} +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + fn struct_variant(&self, struct_def: &hir::VariantData) -> AdtVariant<'tcx> { + let fields = + struct_def.fields().iter() + .map(|field| { + let field_ty = self.tcx.node_id_to_type(field.id); + let field_ty = self.instantiate_type_scheme(field.span, + &self.parameter_environment + .free_substs, + &field_ty); + AdtField { ty: field_ty, span: field.span } + }) + .collect(); + AdtVariant { fields: fields } + } -fn enum_variants<'a, 'tcx>(fcx: &FnCtxt<'a, 'tcx>, - enum_def: &hir::EnumDef) - -> Vec> { - enum_def.variants.iter() - .map(|variant| struct_variant(fcx, &variant.node.data)) - .collect() -} + fn enum_variants(&self, enum_def: &hir::EnumDef) -> Vec> { + enum_def.variants.iter() + .map(|variant| self.struct_variant(&variant.node.data)) + .collect() + } -fn impl_implied_bounds<'fcx,'tcx>(fcx: &FnCtxt<'fcx, 'tcx>, - impl_def_id: DefId, - span: Span) - -> Vec> -{ - let free_substs = &fcx.inh.infcx.parameter_environment.free_substs; - match fcx.tcx().impl_trait_ref(impl_def_id) { - Some(ref trait_ref) => { - // Trait impl: take implied bounds from all types that - // appear in the trait reference. - let trait_ref = fcx.instantiate_type_scheme(span, free_substs, trait_ref); - trait_ref.substs.types.as_slice().to_vec() - } + fn impl_implied_bounds(&self, impl_def_id: DefId, span: Span) -> Vec> { + let free_substs = &self.parameter_environment.free_substs; + match self.tcx.impl_trait_ref(impl_def_id) { + Some(ref trait_ref) => { + // Trait impl: take implied bounds from all types that + // appear in the trait reference. + let trait_ref = self.instantiate_type_scheme(span, free_substs, trait_ref); + trait_ref.substs.types.as_slice().to_vec() + } - None => { - // Inherent impl: take implied bounds from the self type. - let self_ty = fcx.tcx().lookup_item_type(impl_def_id).ty; - let self_ty = fcx.instantiate_type_scheme(span, free_substs, &self_ty); - vec![self_ty] + None => { + // Inherent impl: take implied bounds from the self type. + let self_ty = self.tcx.lookup_item_type(impl_def_id).ty; + let self_ty = self.instantiate_type_scheme(span, free_substs, &self_ty); + vec![self_ty] + } } } } -pub fn error_192<'ccx,'tcx>(ccx: &'ccx CrateCtxt<'ccx, 'tcx>, span: Span) { +fn error_192(ccx: &CrateCtxt, span: Span) { span_err!(ccx.tcx.sess, span, E0192, "negative impls are only allowed for traits with \ default impls (e.g., `Send` and `Sync`)") } -pub fn error_380<'ccx,'tcx>(ccx: &'ccx CrateCtxt<'ccx, 'tcx>, span: Span) { +fn error_380(ccx: &CrateCtxt, span: Span) { span_err!(ccx.tcx.sess, span, E0380, "traits with default impls (`e.g. unsafe impl \ Trait for ..`) must have no methods or associated items") } -pub fn error_392<'tcx>(tcx: &TyCtxt<'tcx>, span: Span, param_name: ast::Name) +fn error_392<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, span: Span, param_name: ast::Name) -> DiagnosticBuilder<'tcx> { - struct_span_err!(tcx.sess, span, E0392, + struct_span_err!(ccx.tcx.sess, span, E0392, "parameter `{}` is never used", param_name) } -pub fn error_194<'tcx>(tcx: &TyCtxt<'tcx>, span: Span, name: ast::Name) { +fn error_194(tcx: TyCtxt, span: Span, name: ast::Name) { span_err!(tcx.sess, span, E0194, "type parameter `{}` shadows another type parameter of the same name", name); diff --git a/src/librustc_typeck/check/writeback.rs b/src/librustc_typeck/check/writeback.rs index 68327ccd39..e6500747c0 100644 --- a/src/librustc_typeck/check/writeback.rs +++ b/src/librustc_typeck/check/writeback.rs @@ -13,14 +13,13 @@ // substitutions. use self::ResolveReason::*; -use astconv::AstConv; use check::FnCtxt; use hir::def_id::DefId; use hir::pat_util; use rustc::ty::{self, Ty, TyCtxt, MethodCall, MethodCallee}; use rustc::ty::adjustment; use rustc::ty::fold::{TypeFolder,TypeFoldable}; -use rustc::infer; +use rustc::infer::{InferCtxt, FixupError}; use write_substs_to_tcx; use write_ty_to_tcx; @@ -35,36 +34,36 @@ use rustc::hir; /////////////////////////////////////////////////////////////////////////// // Entry point functions -pub fn resolve_type_vars_in_expr(fcx: &FnCtxt, e: &hir::Expr) { - assert_eq!(fcx.writeback_errors.get(), false); - let mut wbcx = WritebackCx::new(fcx); - wbcx.visit_expr(e); - wbcx.visit_upvar_borrow_map(); - wbcx.visit_closures(); - wbcx.visit_liberated_fn_sigs(); - wbcx.visit_fru_field_types(); -} +impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> { + pub fn resolve_type_vars_in_expr(&self, e: &hir::Expr) { + assert_eq!(self.writeback_errors.get(), false); + let mut wbcx = WritebackCx::new(self); + wbcx.visit_expr(e); + wbcx.visit_upvar_borrow_map(); + wbcx.visit_closures(); + wbcx.visit_liberated_fn_sigs(); + wbcx.visit_fru_field_types(); + } -pub fn resolve_type_vars_in_fn(fcx: &FnCtxt, - decl: &hir::FnDecl, - blk: &hir::Block) { - assert_eq!(fcx.writeback_errors.get(), false); - let mut wbcx = WritebackCx::new(fcx); - wbcx.visit_block(blk); - for arg in &decl.inputs { - wbcx.visit_node_id(ResolvingPattern(arg.pat.span), arg.id); - wbcx.visit_pat(&arg.pat); - - // Privacy needs the type for the whole pattern, not just each binding - if !pat_util::pat_is_binding(&fcx.tcx().def_map.borrow(), &arg.pat) { - wbcx.visit_node_id(ResolvingPattern(arg.pat.span), - arg.pat.id); + pub fn resolve_type_vars_in_fn(&self, decl: &hir::FnDecl, blk: &hir::Block) { + assert_eq!(self.writeback_errors.get(), false); + let mut wbcx = WritebackCx::new(self); + wbcx.visit_block(blk); + for arg in &decl.inputs { + wbcx.visit_node_id(ResolvingPattern(arg.pat.span), arg.id); + wbcx.visit_pat(&arg.pat); + + // Privacy needs the type for the whole pattern, not just each binding + if !pat_util::pat_is_binding(&self.tcx.def_map.borrow(), &arg.pat) { + wbcx.visit_node_id(ResolvingPattern(arg.pat.span), + arg.pat.id); + } } + wbcx.visit_upvar_borrow_map(); + wbcx.visit_closures(); + wbcx.visit_liberated_fn_sigs(); + wbcx.visit_fru_field_types(); } - wbcx.visit_upvar_borrow_map(); - wbcx.visit_closures(); - wbcx.visit_liberated_fn_sigs(); - wbcx.visit_fru_field_types(); } /////////////////////////////////////////////////////////////////////////// @@ -75,17 +74,17 @@ pub fn resolve_type_vars_in_fn(fcx: &FnCtxt, // there, it applies a few ad-hoc checks that were not convenient to // do elsewhere. -struct WritebackCx<'cx, 'tcx: 'cx> { - fcx: &'cx FnCtxt<'cx, 'tcx>, +struct WritebackCx<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { + fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>, } -impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { - fn new(fcx: &'cx FnCtxt<'cx, 'tcx>) -> WritebackCx<'cx, 'tcx> { +impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> { + fn new(fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>) -> WritebackCx<'cx, 'gcx, 'tcx> { WritebackCx { fcx: fcx } } - fn tcx(&self) -> &'cx TyCtxt<'tcx> { - self.fcx.tcx() + fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> { + self.fcx.tcx } // Hacky hack: During type-checking, we treat *all* operators @@ -97,13 +96,13 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { hir::ExprBinary(ref op, ref lhs, ref rhs) | hir::ExprAssignOp(ref op, ref lhs, ref rhs) => { let lhs_ty = self.fcx.node_ty(lhs.id); - let lhs_ty = self.fcx.infcx().resolve_type_vars_if_possible(&lhs_ty); + let lhs_ty = self.fcx.resolve_type_vars_if_possible(&lhs_ty); let rhs_ty = self.fcx.node_ty(rhs.id); - let rhs_ty = self.fcx.infcx().resolve_type_vars_if_possible(&rhs_ty); + let rhs_ty = self.fcx.resolve_type_vars_if_possible(&rhs_ty); if lhs_ty.is_scalar() && rhs_ty.is_scalar() { - self.fcx.inh.tables.borrow_mut().method_map.remove(&MethodCall::expr(e.id)); + self.fcx.tables.borrow_mut().method_map.remove(&MethodCall::expr(e.id)); // weird but true: the by-ref binops put an // adjustment on the lhs but not the rhs; the @@ -112,11 +111,11 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { match e.node { hir::ExprBinary(..) => { if !op.node.is_by_value() { - self.fcx.inh.tables.borrow_mut().adjustments.remove(&lhs.id); + self.fcx.tables.borrow_mut().adjustments.remove(&lhs.id); } }, hir::ExprAssignOp(..) => { - self.fcx.inh.tables.borrow_mut().adjustments.remove(&lhs.id); + self.fcx.tables.borrow_mut().adjustments.remove(&lhs.id); }, _ => {}, } @@ -135,7 +134,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { // below. In general, a function is made into a `visitor` if it must // traffic in node-ids or update tables in the type context etc. -impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> { +impl<'cx, 'gcx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'gcx, 'tcx> { fn visit_stmt(&mut self, s: &hir::Stmt) { if self.fcx.writeback_errors.get() { return; @@ -156,7 +155,7 @@ impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> { self.visit_method_map_entry(ResolvingExpr(e.span), MethodCall::expr(e.id)); - if let hir::ExprClosure(_, ref decl, _) = e.node { + if let hir::ExprClosure(_, ref decl, _, _) = e.node { for input in &decl.inputs { self.visit_node_id(ResolvingExpr(e.span), input.id); } @@ -196,7 +195,7 @@ impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> { let var_ty = self.fcx.local_ty(l.span, l.id); let var_ty = self.resolve(&var_ty, ResolvingLocal(l.span)); - write_ty_to_tcx(self.tcx(), l.id, var_ty); + write_ty_to_tcx(self.fcx.ccx, l.id, var_ty); intravisit::walk_local(self, l); } @@ -204,7 +203,7 @@ impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> { match t.node { hir::TyFixedLengthVec(ref ty, ref count_expr) => { self.visit_ty(&ty); - write_ty_to_tcx(self.tcx(), count_expr.id, self.tcx().types.usize); + write_ty_to_tcx(self.fcx.ccx, count_expr.id, self.tcx().types.usize); } hir::TyBareFn(ref function_declaration) => { intravisit::walk_fn_decl_nopat(self, &function_declaration.decl); @@ -215,13 +214,13 @@ impl<'cx, 'tcx, 'v> Visitor<'v> for WritebackCx<'cx, 'tcx> { } } -impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { +impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> { fn visit_upvar_borrow_map(&self) { if self.fcx.writeback_errors.get() { return; } - for (upvar_id, upvar_capture) in self.fcx.inh.tables.borrow().upvar_capture_map.iter() { + for (upvar_id, upvar_capture) in self.fcx.tables.borrow().upvar_capture_map.iter() { let new_upvar_capture = match *upvar_capture { ty::UpvarCapture::ByValue => ty::UpvarCapture::ByValue, ty::UpvarCapture::ByRef(ref upvar_borrow) => { @@ -234,11 +233,11 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { debug!("Upvar capture for {:?} resolved to {:?}", upvar_id, new_upvar_capture); - self.fcx.tcx() - .tables - .borrow_mut() - .upvar_capture_map - .insert(*upvar_id, new_upvar_capture); + self.tcx() + .tables + .borrow_mut() + .upvar_capture_map + .insert(*upvar_id, new_upvar_capture); } } @@ -247,13 +246,13 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { return } - for (def_id, closure_ty) in self.fcx.inh.tables.borrow().closure_tys.iter() { + for (def_id, closure_ty) in self.fcx.tables.borrow().closure_tys.iter() { let closure_ty = self.resolve(closure_ty, ResolvingClosure(*def_id)); - self.fcx.tcx().tables.borrow_mut().closure_tys.insert(*def_id, closure_ty); + self.tcx().tables.borrow_mut().closure_tys.insert(*def_id, closure_ty); } - for (def_id, &closure_kind) in self.fcx.inh.tables.borrow().closure_kinds.iter() { - self.fcx.tcx().tables.borrow_mut().closure_kinds.insert(*def_id, closure_kind); + for (def_id, &closure_kind) in self.fcx.tables.borrow().closure_kinds.iter() { + self.tcx().tables.borrow_mut().closure_kinds.insert(*def_id, closure_kind); } } @@ -264,18 +263,18 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { // Resolve the type of the node with id `id` let n_ty = self.fcx.node_ty(id); let n_ty = self.resolve(&n_ty, reason); - write_ty_to_tcx(self.tcx(), id, n_ty); + write_ty_to_tcx(self.fcx.ccx, id, n_ty); debug!("Node {} has type {:?}", id, n_ty); // Resolve any substitutions self.fcx.opt_node_ty_substs(id, |item_substs| { - write_substs_to_tcx(self.tcx(), id, + write_substs_to_tcx(self.fcx.ccx, id, self.resolve(item_substs, reason)); }); } fn visit_adjustments(&self, reason: ResolveReason, id: ast::NodeId) { - let adjustments = self.fcx.inh.tables.borrow_mut().adjustments.remove(&id); + let adjustments = self.fcx.tables.borrow_mut().adjustments.remove(&id); match adjustments { None => { debug!("No adjustments for node {}", id); @@ -319,7 +318,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { reason: ResolveReason, method_call: MethodCall) { // Resolve any method map entry - let new_method = match self.fcx.inh.tables.borrow_mut().method_map.remove(&method_call) { + let new_method = match self.fcx.tables.borrow_mut().method_map.remove(&method_call) { Some(method) => { debug!("writeback::resolve_method_map_entry(call={:?}, entry={:?})", method_call, @@ -327,7 +326,7 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { let new_method = MethodCallee { def_id: method.def_id, ty: self.resolve(&method.ty, reason), - substs: self.tcx().mk_substs(self.resolve(method.substs, reason)), + substs: self.resolve(&method.substs, reason), }; Some(new_method) @@ -347,21 +346,29 @@ impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> { } fn visit_liberated_fn_sigs(&self) { - for (&node_id, fn_sig) in self.fcx.inh.tables.borrow().liberated_fn_sigs.iter() { + for (&node_id, fn_sig) in self.fcx.tables.borrow().liberated_fn_sigs.iter() { let fn_sig = self.resolve(fn_sig, ResolvingFnSig(node_id)); self.tcx().tables.borrow_mut().liberated_fn_sigs.insert(node_id, fn_sig.clone()); } } fn visit_fru_field_types(&self) { - for (&node_id, ftys) in self.fcx.inh.tables.borrow().fru_field_types.iter() { + for (&node_id, ftys) in self.fcx.tables.borrow().fru_field_types.iter() { let ftys = self.resolve(ftys, ResolvingFieldTypes(node_id)); self.tcx().tables.borrow_mut().fru_field_types.insert(node_id, ftys); } } - fn resolve>(&self, t: &T, reason: ResolveReason) -> T { - t.fold_with(&mut Resolver::new(self.fcx, reason)) + fn resolve(&self, x: &T, reason: ResolveReason) -> T::Lifted + where T: TypeFoldable<'tcx> + ty::Lift<'gcx> + { + let x = x.fold_with(&mut Resolver::new(self.fcx, reason)); + if let Some(lifted) = self.tcx().lift_to_global(&x) { + lifted + } else { + span_bug!(reason.span(self.tcx()), + "writeback: `{:?}` missing from the global type context", x); + } } } @@ -379,8 +386,8 @@ enum ResolveReason { ResolvingFieldTypes(ast::NodeId) } -impl ResolveReason { - fn span(&self, tcx: &TyCtxt) -> Span { +impl<'a, 'gcx, 'tcx> ResolveReason { + fn span(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Span { match *self { ResolvingExpr(s) => s, ResolvingLocal(s) => s, @@ -409,25 +416,25 @@ impl ResolveReason { // The Resolver. This is the type folding engine that detects // unresolved types and so forth. -struct Resolver<'cx, 'tcx: 'cx> { - tcx: &'cx TyCtxt<'tcx>, - infcx: &'cx infer::InferCtxt<'cx, 'tcx>, +struct Resolver<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> { + tcx: TyCtxt<'cx, 'gcx, 'tcx>, + infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, writeback_errors: &'cx Cell, reason: ResolveReason, } -impl<'cx, 'tcx> Resolver<'cx, 'tcx> { - fn new(fcx: &'cx FnCtxt<'cx, 'tcx>, +impl<'cx, 'gcx, 'tcx> Resolver<'cx, 'gcx, 'tcx> { + fn new(fcx: &'cx FnCtxt<'cx, 'gcx, 'tcx>, reason: ResolveReason) - -> Resolver<'cx, 'tcx> + -> Resolver<'cx, 'gcx, 'tcx> { - Resolver::from_infcx(fcx.infcx(), &fcx.writeback_errors, reason) + Resolver::from_infcx(fcx, &fcx.writeback_errors, reason) } - fn from_infcx(infcx: &'cx infer::InferCtxt<'cx, 'tcx>, + fn from_infcx(infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>, writeback_errors: &'cx Cell, reason: ResolveReason) - -> Resolver<'cx, 'tcx> + -> Resolver<'cx, 'gcx, 'tcx> { Resolver { infcx: infcx, tcx: infcx.tcx, @@ -435,34 +442,30 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> { reason: reason } } - fn report_error(&self, e: infer::FixupError) { + fn report_error(&self, e: FixupError) { self.writeback_errors.set(true); if !self.tcx.sess.has_errors() { match self.reason { ResolvingExpr(span) => { span_err!(self.tcx.sess, span, E0101, - "cannot determine a type for this expression: {}", - infer::fixup_err_to_string(e)); + "cannot determine a type for this expression: {}", e); } ResolvingLocal(span) => { span_err!(self.tcx.sess, span, E0102, - "cannot determine a type for this local variable: {}", - infer::fixup_err_to_string(e)); + "cannot determine a type for this local variable: {}", e); } ResolvingPattern(span) => { span_err!(self.tcx.sess, span, E0103, - "cannot determine a type for this pattern binding: {}", - infer::fixup_err_to_string(e)); + "cannot determine a type for this pattern binding: {}", e); } ResolvingUpvar(upvar_id) => { let span = self.reason.span(self.tcx); span_err!(self.tcx.sess, span, E0104, "cannot resolve lifetime for captured variable `{}`: {}", - self.tcx.local_var_name_str(upvar_id.var_id).to_string(), - infer::fixup_err_to_string(e)); + self.tcx.local_var_name_str(upvar_id.var_id), e); } ResolvingClosure(_) => { @@ -485,8 +488,8 @@ impl<'cx, 'tcx> Resolver<'cx, 'tcx> { } } -impl<'cx, 'tcx> TypeFolder<'tcx> for Resolver<'cx, 'tcx> { - fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx> { +impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Resolver<'cx, 'gcx, 'tcx> { + fn tcx<'a>(&'a self) -> TyCtxt<'a, 'gcx, 'tcx> { self.tcx } diff --git a/src/librustc_typeck/check_unused.rs b/src/librustc_typeck/check_unused.rs new file mode 100644 index 0000000000..c3538ace34 --- /dev/null +++ b/src/librustc_typeck/check_unused.rs @@ -0,0 +1,64 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use lint; +use rustc::dep_graph::DepNode; +use rustc::ty::TyCtxt; + +use syntax::ast; +use syntax::codemap::{Span, DUMMY_SP}; + +use rustc::hir; +use rustc::hir::intravisit::Visitor; + +struct UnusedTraitImportVisitor<'a, 'tcx: 'a> { + tcx: TyCtxt<'a, 'tcx, 'tcx>, +} + +impl<'a, 'tcx> UnusedTraitImportVisitor<'a, 'tcx> { + fn check_import(&self, id: ast::NodeId, span: Span) { + if !self.tcx.maybe_unused_trait_imports.contains(&id) { + return; + } + if self.tcx.used_trait_imports.borrow().contains(&id) { + return; + } + self.tcx.sess.add_lint(lint::builtin::UNUSED_IMPORTS, + id, + span, + "unused import".to_string()); + } +} + +impl<'a, 'tcx, 'v> Visitor<'v> for UnusedTraitImportVisitor<'a, 'tcx> { + fn visit_item(&mut self, item: &hir::Item) { + if item.vis == hir::Public || item.span == DUMMY_SP { + return; + } + if let hir::ItemUse(ref path) = item.node { + match path.node { + hir::ViewPathSimple(..) | hir::ViewPathGlob(..) => { + self.check_import(item.id, path.span); + } + hir::ViewPathList(_, ref path_list) => { + for path_item in path_list { + self.check_import(path_item.node.id(), path_item.span); + } + } + } + } + } +} + +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { + let _task = tcx.dep_graph.in_task(DepNode::UnusedTraitCheck); + let mut visitor = UnusedTraitImportVisitor { tcx: tcx }; + tcx.map.krate().visit_all_items(&mut visitor); +} diff --git a/src/librustc_typeck/coherence/mod.rs b/src/librustc_typeck/coherence/mod.rs index 9c8c9ba505..8bee0467f1 100644 --- a/src/librustc_typeck/coherence/mod.rs +++ b/src/librustc_typeck/coherence/mod.rs @@ -31,12 +31,10 @@ use rustc::ty::TyProjection; use rustc::ty::util::CopyImplementationError; use middle::free_region::FreeRegionMap; use CrateCtxt; -use rustc::infer::{self, InferCtxt, TypeOrigin, new_infer_ctxt}; +use rustc::infer::{self, InferCtxt, TypeOrigin}; use std::cell::RefCell; use std::rc::Rc; -use syntax::ast; use syntax::codemap::Span; -use syntax::errors::DiagnosticBuilder; use util::nodemap::{DefIdMap, FnvHashMap}; use rustc::dep_graph::DepNode; use rustc::hir::map as hir_map; @@ -48,54 +46,17 @@ mod orphan; mod overlap; mod unsafety; -// Returns the def ID of the base type, if there is one. -fn get_base_type_def_id<'a, 'tcx>(inference_context: &InferCtxt<'a, 'tcx>, - span: Span, - ty: Ty<'tcx>) - -> Option { - match ty.sty { - TyEnum(def, _) | - TyStruct(def, _) => { - Some(def.did) - } - - TyTrait(ref t) => { - Some(t.principal_def_id()) - } - - TyBox(_) => { - inference_context.tcx.lang_items.owned_box() - } - - TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) | - TyStr | TyArray(..) | TySlice(..) | TyFnDef(..) | TyFnPtr(_) | - TyTuple(..) | TyParam(..) | TyError | - TyRawPtr(_) | TyRef(_, _) | TyProjection(..) => { - None - } - - TyInfer(..) | TyClosure(..) => { - // `ty` comes from a user declaration so we should only expect types - // that the user can type - span_bug!( - span, - "coherence encountered unexpected type searching for base type: {}", - ty); - } - } -} - -struct CoherenceChecker<'a, 'tcx: 'a> { - crate_context: &'a CrateCtxt<'a, 'tcx>, - inference_context: InferCtxt<'a, 'tcx>, +struct CoherenceChecker<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + crate_context: &'a CrateCtxt<'a, 'gcx>, + inference_context: InferCtxt<'a, 'gcx, 'tcx>, inherent_impls: RefCell>>>>, } -struct CoherenceCheckVisitor<'a, 'tcx: 'a> { - cc: &'a CoherenceChecker<'a, 'tcx> +struct CoherenceCheckVisitor<'a, 'gcx: 'a+'tcx, 'tcx: 'a> { + cc: &'a CoherenceChecker<'a, 'gcx, 'tcx> } -impl<'a, 'tcx, 'v> intravisit::Visitor<'v> for CoherenceCheckVisitor<'a, 'tcx> { +impl<'a, 'gcx, 'tcx, 'v> intravisit::Visitor<'v> for CoherenceCheckVisitor<'a, 'gcx, 'tcx> { fn visit_item(&mut self, item: &Item) { if let ItemImpl(..) = item.node { self.cc.check_implementation(item) @@ -103,7 +64,42 @@ impl<'a, 'tcx, 'v> intravisit::Visitor<'v> for CoherenceCheckVisitor<'a, 'tcx> { } } -impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { +impl<'a, 'gcx, 'tcx> CoherenceChecker<'a, 'gcx, 'tcx> { + + // Returns the def ID of the base type, if there is one. + fn get_base_type_def_id(&self, span: Span, ty: Ty<'tcx>) -> Option { + match ty.sty { + TyEnum(def, _) | + TyStruct(def, _) => { + Some(def.did) + } + + TyTrait(ref t) => { + Some(t.principal_def_id()) + } + + TyBox(_) => { + self.inference_context.tcx.lang_items.owned_box() + } + + TyBool | TyChar | TyInt(..) | TyUint(..) | TyFloat(..) | + TyStr | TyArray(..) | TySlice(..) | TyFnDef(..) | TyFnPtr(_) | + TyTuple(..) | TyParam(..) | TyError | + TyRawPtr(_) | TyRef(_, _) | TyProjection(..) => { + None + } + + TyInfer(..) | TyClosure(..) => { + // `ty` comes from a user declaration so we should only expect types + // that the user can type + span_bug!( + span, + "coherence encountered unexpected type searching for base type: {}", + ty); + } + } + } + fn check(&self) { // Check implementations and traits. This populates the tables // containing the inherent methods and extension methods. It also @@ -169,9 +165,8 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { // Add the implementation to the mapping from implementation to base // type def ID, if there is a base type for this implementation and // the implementation does not have any associated traits. - if let Some(base_type_def_id) = get_base_type_def_id( - &self.inference_context, item.span, self_type.ty) { - self.add_inherent_impl(base_type_def_id, impl_did); + if let Some(base_def_id) = self.get_base_type_def_id(item.span, self_type.ty) { + self.add_inherent_impl(base_def_id, impl_did); } } @@ -192,7 +187,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { Rc::new(RefCell::new(vec!(impl_def_id)))); } - fn add_trait_impl(&self, impl_trait_ref: ty::TraitRef<'tcx>, impl_def_id: DefId) { + fn add_trait_impl(&self, impl_trait_ref: ty::TraitRef<'gcx>, impl_def_id: DefId) { debug!("add_trait_impl: impl_trait_ref={:?} impl_def_id={:?}", impl_trait_ref, impl_def_id); let trait_def = self.crate_context.tcx.lookup_trait_def(impl_trait_ref.def_id); @@ -307,7 +302,7 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { debug!("check_implementations_of_copy: self_type={:?} (free)", self_type); - match param_env.can_type_implement_copy(self_type, span) { + match param_env.can_type_implement_copy(tcx, self_type, span) { Ok(()) => {} Err(CopyImplementationError::InfrigingField(name)) => { span_err!(tcx.sess, span, E0204, @@ -381,116 +376,116 @@ impl<'a, 'tcx> CoherenceChecker<'a, 'tcx> { debug!("check_implementations_of_coerce_unsized: {:?} -> {:?} (free)", source, target); - let infcx = new_infer_ctxt(tcx, &tcx.tables, Some(param_env), ProjectionMode::Topmost); + tcx.infer_ctxt(None, Some(param_env), ProjectionMode::Topmost).enter(|infcx| { + let origin = TypeOrigin::Misc(span); + let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>, mt_b: ty::TypeAndMut<'gcx>, + mk_ptr: &Fn(Ty<'gcx>) -> Ty<'gcx>| { + if (mt_a.mutbl, mt_b.mutbl) == (hir::MutImmutable, hir::MutMutable) { + infcx.report_mismatched_types(origin, mk_ptr(mt_b.ty), + target, ty::error::TypeError::Mutability); + } + (mt_a.ty, mt_b.ty, unsize_trait, None) + }; + let (source, target, trait_def_id, kind) = match (&source.sty, &target.sty) { + (&ty::TyBox(a), &ty::TyBox(b)) => (a, b, unsize_trait, None), + + (&ty::TyRef(r_a, mt_a), &ty::TyRef(r_b, mt_b)) => { + infcx.sub_regions(infer::RelateObjectBound(span), *r_b, *r_a); + check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ref(r_b, ty)) + } - let origin = TypeOrigin::Misc(span); - let check_mutbl = |mt_a: ty::TypeAndMut<'tcx>, mt_b: ty::TypeAndMut<'tcx>, - mk_ptr: &Fn(Ty<'tcx>) -> Ty<'tcx>| { - if (mt_a.mutbl, mt_b.mutbl) == (hir::MutImmutable, hir::MutMutable) { - infcx.report_mismatched_types(origin, mk_ptr(mt_b.ty), - target, ty::error::TypeError::Mutability); - } - (mt_a.ty, mt_b.ty, unsize_trait, None) - }; - let (source, target, trait_def_id, kind) = match (&source.sty, &target.sty) { - (&ty::TyBox(a), &ty::TyBox(b)) => (a, b, unsize_trait, None), + (&ty::TyRef(_, mt_a), &ty::TyRawPtr(mt_b)) | + (&ty::TyRawPtr(mt_a), &ty::TyRawPtr(mt_b)) => { + check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty)) + } - (&ty::TyRef(r_a, mt_a), &ty::TyRef(r_b, mt_b)) => { - infer::mk_subr(&infcx, infer::RelateObjectBound(span), *r_b, *r_a); - check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ref(r_b, ty)) - } + (&ty::TyStruct(def_a, substs_a), &ty::TyStruct(def_b, substs_b)) => { + if def_a != def_b { + let source_path = tcx.item_path_str(def_a.did); + let target_path = tcx.item_path_str(def_b.did); + span_err!(tcx.sess, span, E0377, + "the trait `CoerceUnsized` may only be implemented \ + for a coercion between structures with the same \ + definition; expected {}, found {}", + source_path, target_path); + return; + } - (&ty::TyRef(_, mt_a), &ty::TyRawPtr(mt_b)) | - (&ty::TyRawPtr(mt_a), &ty::TyRawPtr(mt_b)) => { - check_mutbl(mt_a, mt_b, &|ty| tcx.mk_imm_ptr(ty)) - } + let fields = &def_a.struct_variant().fields; + let diff_fields = fields.iter().enumerate().filter_map(|(i, f)| { + let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b)); + + if f.unsubst_ty().is_phantom_data() { + // Ignore PhantomData fields + None + } else if infcx.sub_types(false, origin, b, a).is_ok() { + // Ignore fields that aren't significantly changed + None + } else { + // Collect up all fields that were significantly changed + // i.e. those that contain T in coerce_unsized T -> U + Some((i, a, b)) + } + }).collect::>(); + + if diff_fields.is_empty() { + span_err!(tcx.sess, span, E0374, + "the trait `CoerceUnsized` may only be implemented \ + for a coercion between structures with one field \ + being coerced, none found"); + return; + } else if diff_fields.len() > 1 { + span_err!(tcx.sess, span, E0375, + "the trait `CoerceUnsized` may only be implemented \ + for a coercion between structures with one field \ + being coerced, but {} fields need coercions: {}", + diff_fields.len(), diff_fields.iter().map(|&(i, a, b)| { + format!("{} ({} to {})", fields[i].name, a, b) + }).collect::>().join(", ")); + return; + } - (&ty::TyStruct(def_a, substs_a), &ty::TyStruct(def_b, substs_b)) => { - if def_a != def_b { - let source_path = tcx.item_path_str(def_a.did); - let target_path = tcx.item_path_str(def_b.did); - span_err!(tcx.sess, span, E0377, - "the trait `CoerceUnsized` may only be implemented \ - for a coercion between structures with the same \ - definition; expected {}, found {}", - source_path, target_path); - return; + let (i, a, b) = diff_fields[0]; + let kind = ty::adjustment::CustomCoerceUnsized::Struct(i); + (a, b, coerce_unsized_trait, Some(kind)) } - let fields = &def_a.struct_variant().fields; - let diff_fields = fields.iter().enumerate().filter_map(|(i, f)| { - let (a, b) = (f.ty(tcx, substs_a), f.ty(tcx, substs_b)); - - if f.unsubst_ty().is_phantom_data() { - // Ignore PhantomData fields - None - } else if infcx.sub_types(false, origin, b, a).is_ok() { - // Ignore fields that aren't significantly changed - None - } else { - // Collect up all fields that were significantly changed - // i.e. those that contain T in coerce_unsized T -> U - Some((i, a, b)) - } - }).collect::>(); - - if diff_fields.is_empty() { - span_err!(tcx.sess, span, E0374, - "the trait `CoerceUnsized` may only be implemented \ - for a coercion between structures with one field \ - being coerced, none found"); - return; - } else if diff_fields.len() > 1 { - span_err!(tcx.sess, span, E0375, + _ => { + span_err!(tcx.sess, span, E0376, "the trait `CoerceUnsized` may only be implemented \ - for a coercion between structures with one field \ - being coerced, but {} fields need coercions: {}", - diff_fields.len(), diff_fields.iter().map(|&(i, a, b)| { - format!("{} ({} to {})", fields[i].name, a, b) - }).collect::>().join(", ")); + for a coercion between structures"); return; } + }; - let (i, a, b) = diff_fields[0]; - let kind = ty::adjustment::CustomCoerceUnsized::Struct(i); - (a, b, coerce_unsized_trait, Some(kind)) - } - - _ => { - span_err!(tcx.sess, span, E0376, - "the trait `CoerceUnsized` may only be implemented \ - for a coercion between structures"); - return; - } - }; - - let mut fulfill_cx = traits::FulfillmentContext::new(); + let mut fulfill_cx = traits::FulfillmentContext::new(); - // Register an obligation for `A: Trait`. - let cause = traits::ObligationCause::misc(span, impl_node_id); - let predicate = traits::predicate_for_trait_def(tcx, cause, trait_def_id, - 0, source, vec![target]); - fulfill_cx.register_predicate_obligation(&infcx, predicate); + // Register an obligation for `A: Trait`. + let cause = traits::ObligationCause::misc(span, impl_node_id); + let predicate = tcx.predicate_for_trait_def(cause, trait_def_id, 0, + source, vec![target]); + fulfill_cx.register_predicate_obligation(&infcx, predicate); - // Check that all transitive obligations are satisfied. - if let Err(errors) = fulfill_cx.select_all_or_error(&infcx) { - traits::report_fulfillment_errors(&infcx, &errors); - } + // Check that all transitive obligations are satisfied. + if let Err(errors) = fulfill_cx.select_all_or_error(&infcx) { + infcx.report_fulfillment_errors(&errors); + } - // Finally, resolve all regions. - let mut free_regions = FreeRegionMap::new(); - free_regions.relate_free_regions_from_predicates(tcx, &infcx.parameter_environment - .caller_bounds); - infcx.resolve_regions_and_report_errors(&free_regions, impl_node_id); + // Finally, resolve all regions. + let mut free_regions = FreeRegionMap::new(); + free_regions.relate_free_regions_from_predicates( + &infcx.parameter_environment.caller_bounds); + infcx.resolve_regions_and_report_errors(&free_regions, impl_node_id); - if let Some(kind) = kind { - tcx.custom_coerce_unsized_kinds.borrow_mut().insert(impl_did, kind); - } + if let Some(kind) = kind { + tcx.custom_coerce_unsized_kinds.borrow_mut().insert(impl_did, kind); + } + }); }); } } -fn enforce_trait_manually_implementable(tcx: &TyCtxt, sp: Span, trait_def_id: DefId) { +fn enforce_trait_manually_implementable(tcx: TyCtxt, sp: Span, trait_def_id: DefId) { if tcx.sess.features.borrow().unboxed_closures { // the feature gate allows all of them return @@ -512,30 +507,20 @@ fn enforce_trait_manually_implementable(tcx: &TyCtxt, sp: Span, trait_def_id: De E0183, "manual implementations of `{}` are experimental", trait_name); - fileline_help!(&mut err, sp, - "add `#![feature(unboxed_closures)]` to the crate attributes to enable"); + help!(&mut err, "add `#![feature(unboxed_closures)]` to the crate attributes to enable"); err.emit(); } -// Factored out into helper because the error cannot be defined in multiple locations. -pub fn report_duplicate_item<'tcx>(tcx: &TyCtxt<'tcx>, sp: Span, name: ast::Name) - -> DiagnosticBuilder<'tcx> -{ - struct_span_err!(tcx.sess, sp, E0201, "duplicate definitions with name `{}`:", name) -} - -pub fn check_coherence(crate_context: &CrateCtxt) { - let _task = crate_context.tcx.dep_graph.in_task(DepNode::Coherence); - let infcx = new_infer_ctxt(crate_context.tcx, - &crate_context.tcx.tables, - None, - ProjectionMode::Topmost); - CoherenceChecker { - crate_context: crate_context, - inference_context: infcx, - inherent_impls: RefCell::new(FnvHashMap()), - }.check(); - unsafety::check(crate_context.tcx); - orphan::check(crate_context.tcx); - overlap::check(crate_context.tcx); +pub fn check_coherence(ccx: &CrateCtxt) { + let _task = ccx.tcx.dep_graph.in_task(DepNode::Coherence); + ccx.tcx.infer_ctxt(None, None, ProjectionMode::Topmost).enter(|infcx| { + CoherenceChecker { + crate_context: ccx, + inference_context: infcx, + inherent_impls: RefCell::new(FnvHashMap()), + }.check(); + }); + unsafety::check(ccx.tcx); + orphan::check(ccx.tcx); + overlap::check(ccx.tcx); } diff --git a/src/librustc_typeck/coherence/orphan.rs b/src/librustc_typeck/coherence/orphan.rs index f3d6395701..d9ad032220 100644 --- a/src/librustc_typeck/coherence/orphan.rs +++ b/src/librustc_typeck/coherence/orphan.rs @@ -21,13 +21,13 @@ use rustc::dep_graph::DepNode; use rustc::hir::intravisit; use rustc::hir; -pub fn check(tcx: &TyCtxt) { +pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut orphan = OrphanChecker { tcx: tcx }; tcx.visit_all_items_in_krate(DepNode::CoherenceOrphanCheck, &mut orphan); } struct OrphanChecker<'cx, 'tcx:'cx> { - tcx: &'cx TyCtxt<'tcx> + tcx: TyCtxt<'cx, 'tcx, 'tcx> } impl<'cx, 'tcx> OrphanChecker<'cx, 'tcx> { diff --git a/src/librustc_typeck/coherence/overlap.rs b/src/librustc_typeck/coherence/overlap.rs index a05167dbe4..dcaa5cfb20 100644 --- a/src/librustc_typeck/coherence/overlap.rs +++ b/src/librustc_typeck/coherence/overlap.rs @@ -12,10 +12,8 @@ //! same type. Likewise, no two inherent impls for a given type //! constructor provide a method with the same name. -use middle::cstore::CrateStore; use hir::def_id::DefId; use rustc::traits::{self, ProjectionMode}; -use rustc::infer; use rustc::ty::{self, TyCtxt}; use syntax::ast; use rustc::dep_graph::DepNode; @@ -24,7 +22,7 @@ use rustc::hir::intravisit; use util::nodemap::DefIdMap; use lint; -pub fn check(tcx: &TyCtxt) { +pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut overlap = OverlapChecker { tcx: tcx, default_impls: DefIdMap() }; @@ -34,7 +32,7 @@ pub fn check(tcx: &TyCtxt) { } struct OverlapChecker<'cx, 'tcx:'cx> { - tcx: &'cx TyCtxt<'tcx>, + tcx: TyCtxt<'cx, 'tcx, 'tcx>, // maps from a trait def-id to an impl id default_impls: DefIdMap, @@ -45,8 +43,9 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { #[derive(Copy, Clone, PartialEq)] enum Namespace { Type, Value } - fn name_and_namespace(tcx: &TyCtxt, item: &ty::ImplOrTraitItemId) - -> (ast::Name, Namespace) + fn name_and_namespace<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + item: &ty::ImplOrTraitItemId) + -> (ast::Name, Namespace) { let name = tcx.impl_or_trait_item(item.def_id()).name(); (name, match *item { @@ -59,10 +58,10 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { let impl_items = self.tcx.impl_items.borrow(); for item1 in &impl_items[&impl1] { - let (name, namespace) = name_and_namespace(&self.tcx, item1); + let (name, namespace) = name_and_namespace(self.tcx, item1); for item2 in &impl_items[&impl2] { - if (name, namespace) == name_and_namespace(&self.tcx, item2) { + if (name, namespace) == name_and_namespace(self.tcx, item2) { let msg = format!("duplicate definitions with name `{}`", name); let node_id = self.tcx.map.as_local_node_id(item1.def_id()).unwrap(); self.tcx.sess.add_lint(lint::builtin::OVERLAPPING_INHERENT_IMPLS, @@ -85,13 +84,11 @@ impl<'cx, 'tcx> OverlapChecker<'cx, 'tcx> { for (i, &impl1_def_id) in impls.iter().enumerate() { for &impl2_def_id in &impls[(i+1)..] { - let infcx = infer::new_infer_ctxt(self.tcx, - &self.tcx.tables, - None, - ProjectionMode::Topmost); - if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() { - self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id) - } + self.tcx.infer_ctxt(None, None, ProjectionMode::Topmost).enter(|infcx| { + if traits::overlapping_impls(&infcx, impl1_def_id, impl2_def_id).is_some() { + self.check_for_common_items_in_impls(impl1_def_id, impl2_def_id) + } + }); } } } @@ -140,24 +137,12 @@ impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { // insertion failed due to overlap if let Err(overlap) = insert_result { - // only print the Self type if it has at least some outer - // concrete shell; otherwise, it's not adding much - // information. - let self_type = { - overlap.on_trait_ref.substs.self_ty().and_then(|ty| { - if ty.has_concrete_skeleton() { - Some(format!(" for type `{}`", ty)) - } else { - None - } - }).unwrap_or(String::new()) - }; - let mut err = struct_span_err!( self.tcx.sess, self.tcx.span_of_impl(impl_def_id).unwrap(), E0119, "conflicting implementations of trait `{}`{}:", - overlap.on_trait_ref, - self_type); + overlap.trait_desc, + overlap.self_desc.map_or(String::new(), + |ty| format!(" for type `{}`", ty))); match self.tcx.span_of_impl(overlap.with_impl) { Ok(span) => { @@ -177,7 +162,7 @@ impl<'cx, 'tcx,'v> intravisit::Visitor<'v> for OverlapChecker<'cx, 'tcx> { // This is something like impl Trait1 for Trait2. Illegal // if Trait1 is a supertrait of Trait2 or Trait2 is not object safe. - if !traits::is_object_safe(self.tcx, data.principal_def_id()) { + if !self.tcx.is_object_safe(data.principal_def_id()) { // This is an error, but it will be // reported by wfcheck. Ignore it // here. This is tested by diff --git a/src/librustc_typeck/coherence/unsafety.rs b/src/librustc_typeck/coherence/unsafety.rs index b042e23e0a..53ec72abac 100644 --- a/src/librustc_typeck/coherence/unsafety.rs +++ b/src/librustc_typeck/coherence/unsafety.rs @@ -15,13 +15,13 @@ use rustc::ty::TyCtxt; use rustc::hir::intravisit; use rustc::hir; -pub fn check(tcx: &TyCtxt) { +pub fn check<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut orphan = UnsafetyChecker { tcx: tcx }; tcx.map.krate().visit_all_items(&mut orphan); } struct UnsafetyChecker<'cx, 'tcx:'cx> { - tcx: &'cx TyCtxt<'tcx> + tcx: TyCtxt<'cx, 'tcx, 'tcx> } impl<'cx, 'tcx, 'v> UnsafetyChecker<'cx, 'tcx> { diff --git a/src/librustc_typeck/collect.rs b/src/librustc_typeck/collect.rs index 2e1a684684..5896a34b0d 100644 --- a/src/librustc_typeck/collect.rs +++ b/src/librustc_typeck/collect.rs @@ -58,41 +58,39 @@ There are some shortcomings in this design: */ -use astconv::{self, AstConv, ty_of_arg, ast_ty_to_ty, ast_region_to_region}; +use astconv::{AstConv, ast_region_to_region, Bounds, PartitionedBounds, partition_bounds}; use lint; use hir::def::Def; use hir::def_id::DefId; use constrained_type_params as ctp; -use coherence; use middle::lang_items::SizedTraitLangItem; use middle::resolve_lifetime; use middle::const_val::ConstVal; use rustc_const_eval::EvalHint::UncheckedExprHint; -use rustc_const_eval::eval_const_expr_partial; +use rustc_const_eval::{eval_const_expr_partial, ConstEvalErr}; +use rustc_const_eval::ErrKind::ErroneousReferencedConstant; use rustc::ty::subst::{Substs, FnSpace, ParamSpace, SelfSpace, TypeSpace, VecPerParamSpace}; use rustc::ty::{ToPredicate, ImplContainer, ImplOrTraitItemContainer, TraitContainer}; use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeScheme}; use rustc::ty::{VariantKind}; -use rustc::ty::fold::{TypeFolder}; use rustc::ty::util::IntTypeExt; use rscope::*; use rustc::dep_graph::DepNode; use rustc::hir::map as hir_map; use util::common::{ErrorReported, MemoizationMap}; -use util::nodemap::{FnvHashMap, FnvHashSet}; -use write_ty_to_tcx; +use util::nodemap::{NodeMap, FnvHashMap}; +use {CrateCtxt, write_ty_to_tcx}; use rustc_const_math::ConstInt; use std::cell::RefCell; use std::collections::HashSet; +use std::collections::hash_map::Entry::{Occupied, Vacant}; use std::rc::Rc; -use syntax::abi; -use syntax::ast; -use syntax::attr; +use syntax::{abi, ast, attr}; use syntax::codemap::Span; -use syntax::parse::token::special_idents; +use syntax::parse::token::keywords; use syntax::ptr::P; use rustc::hir::{self, PatKind}; use rustc::hir::intravisit; @@ -101,22 +99,13 @@ use rustc::hir::print as pprust; /////////////////////////////////////////////////////////////////////////// // Main entry point -pub fn collect_item_types(tcx: &TyCtxt) { - let ccx = &CrateCtxt { tcx: tcx, stack: RefCell::new(Vec::new()) }; - let mut visitor = CollectItemTypesVisitor{ ccx: ccx }; +pub fn collect_item_types(ccx: &CrateCtxt) { + let mut visitor = CollectItemTypesVisitor { ccx: ccx }; ccx.tcx.visit_all_items_in_krate(DepNode::CollectItem, &mut visitor); } /////////////////////////////////////////////////////////////////////////// -struct CrateCtxt<'a,'tcx:'a> { - tcx: &'a TyCtxt<'tcx>, - - // This stack is used to identify cycles in the user's source. - // Note that these cycles can cross multiple items. - stack: RefCell>, -} - /// Context specific to some particular item. This is what implements /// AstConv. It has information about the predicates that are defined /// on the trait. Unfortunately, this predicate information is @@ -134,7 +123,7 @@ struct ItemCtxt<'a,'tcx:'a> { } #[derive(Copy, Clone, PartialEq, Eq)] -enum AstConvRequest { +pub enum AstConvRequest { GetItemTypeScheme(DefId), GetTraitDef(DefId), EnsureSuperPredicates(DefId), @@ -158,7 +147,10 @@ impl<'a, 'tcx, 'v> intravisit::Visitor<'v> for CollectItemTypesVisitor<'a, 'tcx> impl<'a,'tcx> CrateCtxt<'a,'tcx> { fn icx(&'a self, param_bounds: &'a GetTypeParameterBounds<'tcx>) -> ItemCtxt<'a,'tcx> { - ItemCtxt { ccx: self, param_bounds: param_bounds } + ItemCtxt { + ccx: self, + param_bounds: param_bounds, + } } fn cycle_check(&self, @@ -303,12 +295,16 @@ impl<'a,'tcx> CrateCtxt<'a,'tcx> { impl<'a,'tcx> ItemCtxt<'a,'tcx> { fn to_ty(&self, rs: &RS, ast_ty: &hir::Ty) -> Ty<'tcx> { - ast_ty_to_ty(self, rs, ast_ty) + AstConv::ast_ty_to_ty(self, rs, ast_ty) } } -impl<'a, 'tcx> AstConv<'tcx> for ItemCtxt<'a, 'tcx> { - fn tcx(&self) -> &TyCtxt<'tcx> { self.ccx.tcx } +impl<'a, 'tcx> AstConv<'tcx, 'tcx> for ItemCtxt<'a, 'tcx> { + fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> { self.ccx.tcx } + + fn ast_ty_to_ty_cache(&self) -> &RefCell>> { + &self.ccx.ast_ty_to_ty_cache + } fn get_item_type_scheme(&self, span: Span, id: DefId) -> Result, ErrorReported> @@ -365,16 +361,37 @@ impl<'a, 'tcx> AstConv<'tcx> for ItemCtxt<'a, 'tcx> { } } - fn ty_infer(&self, - _ty_param_def: Option>, - _substs: Option<&mut Substs<'tcx>>, - _space: Option, - span: Span) -> Ty<'tcx> { + fn get_free_substs(&self) -> Option<&Substs<'tcx>> { + None + } + + fn ty_infer(&self, + _ty_param_def: Option>, + _substs: Option<&mut Substs<'tcx>>, + _space: Option, + span: Span) -> Ty<'tcx> { span_err!(self.tcx().sess, span, E0121, "the type placeholder `_` is not allowed within types on item signatures"); self.tcx().types.err } + fn projected_ty_from_poly_trait_ref(&self, + span: Span, + poly_trait_ref: ty::PolyTraitRef<'tcx>, + item_name: ast::Name) + -> Ty<'tcx> + { + if let Some(trait_ref) = self.tcx().no_late_bound_regions(&poly_trait_ref) { + self.projected_ty(span, trait_ref, item_name) + } else { + // no late-bound regions, we can just ignore the binder + span_err!(self.tcx().sess, span, E0212, + "cannot extract an associated type from a higher-ranked trait bound \ + in this context"); + self.tcx().types.err + } + } + fn projected_ty(&self, _span: Span, trait_ref: ty::TraitRef<'tcx>, @@ -383,13 +400,17 @@ impl<'a, 'tcx> AstConv<'tcx> for ItemCtxt<'a, 'tcx> { { self.tcx().mk_projection(trait_ref, item_name) } + + fn set_tainted_by_errors(&self) { + // no obvious place to track this, just let it go + } } /// Interface used to find the bounds on a type parameter from within /// an `ItemCtxt`. This allows us to use multiple kinds of sources. trait GetTypeParameterBounds<'tcx> { fn get_type_parameter_bounds(&self, - astconv: &AstConv<'tcx>, + astconv: &AstConv<'tcx, 'tcx>, span: Span, node_id: ast::NodeId) -> Vec>; @@ -400,7 +421,7 @@ impl<'a,'b,'tcx,A,B> GetTypeParameterBounds<'tcx> for (&'a A,&'b B) where A : GetTypeParameterBounds<'tcx>, B : GetTypeParameterBounds<'tcx> { fn get_type_parameter_bounds(&self, - astconv: &AstConv<'tcx>, + astconv: &AstConv<'tcx, 'tcx>, span: Span, node_id: ast::NodeId) -> Vec> @@ -414,7 +435,7 @@ impl<'a,'b,'tcx,A,B> GetTypeParameterBounds<'tcx> for (&'a A,&'b B) /// Empty set of bounds. impl<'tcx> GetTypeParameterBounds<'tcx> for () { fn get_type_parameter_bounds(&self, - _astconv: &AstConv<'tcx>, + _astconv: &AstConv<'tcx, 'tcx>, _span: Span, _node_id: ast::NodeId) -> Vec> @@ -428,7 +449,7 @@ impl<'tcx> GetTypeParameterBounds<'tcx> for () { /// from the trait/impl have been fully converted. impl<'tcx> GetTypeParameterBounds<'tcx> for ty::GenericPredicates<'tcx> { fn get_type_parameter_bounds(&self, - astconv: &AstConv<'tcx>, + astconv: &AstConv<'tcx, 'tcx>, _span: Span, node_id: ast::NodeId) -> Vec> @@ -445,10 +466,12 @@ impl<'tcx> GetTypeParameterBounds<'tcx> for ty::GenericPredicates<'tcx> { ty::Predicate::TypeOutlives(ref data) => { data.skip_binder().0.is_param(def.space, def.index) } + ty::Predicate::Rfc1592(..) | ty::Predicate::Equate(..) | ty::Predicate::RegionOutlives(..) | ty::Predicate::WellFormed(..) | ty::Predicate::ObjectSafe(..) | + ty::Predicate::ClosureKind(..) | ty::Predicate::Projection(..) => { false } @@ -465,7 +488,7 @@ impl<'tcx> GetTypeParameterBounds<'tcx> for ty::GenericPredicates<'tcx> { /// bounds for a type parameter `X` if `X::Foo` is used. impl<'tcx> GetTypeParameterBounds<'tcx> for hir::Generics { fn get_type_parameter_bounds(&self, - astconv: &AstConv<'tcx>, + astconv: &AstConv<'tcx, 'tcx>, _: Span, node_id: ast::NodeId) -> Vec> @@ -504,10 +527,10 @@ impl<'tcx> GetTypeParameterBounds<'tcx> for hir::Generics { /// parameter with id `param_id`. We use this so as to avoid running /// `ast_ty_to_ty`, because we want to avoid triggering an all-out /// conversion of the type to avoid inducing unnecessary cycles. -fn is_param<'tcx>(tcx: &TyCtxt<'tcx>, - ast_ty: &hir::Ty, - param_id: ast::NodeId) - -> bool +fn is_param<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + ast_ty: &hir::Ty, + param_id: ast::NodeId) + -> bool { if let hir::TyPath(None, _) = ast_ty.node { let path_res = *tcx.def_map.borrow().get(&ast_ty.id).unwrap(); @@ -544,11 +567,12 @@ fn convert_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ty_generic_predicates_for_fn(ccx, &sig.generics, rcvr_ty_predicates); let (fty, explicit_self_category) = - astconv::ty_of_method(&ccx.icx(&(rcvr_ty_predicates, &sig.generics)), - sig, untransformed_rcvr_ty); + AstConv::ty_of_method(&ccx.icx(&(rcvr_ty_predicates, &sig.generics)), + sig, + untransformed_rcvr_ty); let def_id = ccx.tcx.map.local_def_id(id); - let substs = ccx.tcx.mk_substs(mk_item_substs(ccx, &ty_generics)); + let substs = mk_item_substs(ccx, &ty_generics); let ty_method = ty::Method::new(name, ty_generics, @@ -560,7 +584,7 @@ fn convert_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, def_id, container); - let fty = ccx.tcx.mk_fn_def(def_id, substs, ty_method.fty.clone()); + let fty = ccx.tcx.mk_fn_def(def_id, substs, ty_method.fty); debug!("method {} (id {}) has type {:?}", name, id, fty); ccx.tcx.register_item_type(def_id, TypeScheme { @@ -569,7 +593,7 @@ fn convert_method<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, }); ccx.tcx.predicates.borrow_mut().insert(def_id, ty_method.predicates.clone()); - write_ty_to_tcx(ccx.tcx, id, fty); + write_ty_to_tcx(ccx, id, fty); debug!("writing method type: def_id={:?} mty={:?}", def_id, ty_method); @@ -586,7 +610,7 @@ fn convert_field<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, { let tt = ccx.icx(struct_predicates).to_ty(&ExplicitRscope, &field.ty); ty_f.fulfill_ty(tt); - write_ty_to_tcx(ccx.tcx, field.id, tt); + write_ty_to_tcx(ccx, field.id, tt); /* add the field to the tcache */ ccx.tcx.register_item_type(ccx.tcx.map.local_def_id(field.id), @@ -610,7 +634,7 @@ fn convert_associated_const<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ccx.tcx.predicates.borrow_mut().insert(ccx.tcx.map.local_def_id(id), ty::GenericPredicates::empty()); - write_ty_to_tcx(ccx.tcx, id, ty); + write_ty_to_tcx(ccx, id, ty); let associated_const = Rc::new(ty::AssociatedConst { name: name, @@ -688,7 +712,7 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { } hir::ItemEnum(ref enum_definition, _) => { let (scheme, predicates) = convert_typed_item(ccx, it); - write_ty_to_tcx(tcx, it.id, scheme.ty); + write_ty_to_tcx(ccx, it.id, scheme.ty); convert_enum_variant_types(ccx, tcx.lookup_adt_def_master(ccx.tcx.map.local_def_id(it.id)), scheme, @@ -697,7 +721,7 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { }, hir::ItemDefaultImpl(_, ref ast_trait_ref) => { let trait_ref = - astconv::instantiate_mono_trait_ref(&ccx.icx(&()), + AstConv::instantiate_mono_trait_ref(&ccx.icx(&()), &ExplicitRscope, ast_trait_ref, None); @@ -715,41 +739,52 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { // Create generics from the generics specified in the impl head. debug!("convert: ast_generics={:?}", generics); let def_id = ccx.tcx.map.local_def_id(it.id); - let ty_generics = ty_generics_for_type_or_impl(ccx, generics); + let ty_generics = ty_generics_for_impl(ccx, generics); let mut ty_predicates = ty_generic_predicates_for_type_or_impl(ccx, generics); debug!("convert: impl_bounds={:?}", ty_predicates); let selfty = ccx.icx(&ty_predicates).to_ty(&ExplicitRscope, &selfty); - write_ty_to_tcx(tcx, it.id, selfty); + write_ty_to_tcx(ccx, it.id, selfty); tcx.register_item_type(def_id, TypeScheme { generics: ty_generics.clone(), ty: selfty }); let trait_ref = opt_trait_ref.as_ref().map(|ast_trait_ref| { - astconv::instantiate_mono_trait_ref(&ccx.icx(&ty_predicates), + AstConv::instantiate_mono_trait_ref(&ccx.icx(&ty_predicates), &ExplicitRscope, ast_trait_ref, Some(selfty)) }); tcx.impl_trait_refs.borrow_mut().insert(def_id, trait_ref); - enforce_impl_params_are_constrained(tcx, generics, &mut ty_predicates, def_id); + enforce_impl_params_are_constrained(ccx, generics, &mut ty_predicates, def_id); tcx.predicates.borrow_mut().insert(def_id, ty_predicates.clone()); // Convert all the associated consts. // Also, check if there are any duplicate associated items - let mut seen_type_items = FnvHashSet(); - let mut seen_value_items = FnvHashSet(); + let mut seen_type_items = FnvHashMap(); + let mut seen_value_items = FnvHashMap(); for impl_item in impl_items { let seen_items = match impl_item.node { hir::ImplItemKind::Type(_) => &mut seen_type_items, _ => &mut seen_value_items, }; - if !seen_items.insert(impl_item.name) { - coherence::report_duplicate_item(tcx, impl_item.span, impl_item.name).emit(); + match seen_items.entry(impl_item.name) { + Occupied(entry) => { + let mut err = struct_span_err!(tcx.sess, impl_item.span, E0201, + "duplicate definitions with name `{}`:", + impl_item.name); + span_note!(&mut err, *entry.get(), + "previous definition of `{}` here", + impl_item.name); + err.emit(); + } + Vacant(entry) => { + entry.insert(impl_item.span); + } } if let hir::ImplItemKind::Const(ref ty, _) = impl_item.node { @@ -800,7 +835,7 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { } } - enforce_impl_lifetimes_are_constrained(tcx, generics, def_id, impl_items); + enforce_impl_lifetimes_are_constrained(ccx, generics, def_id, impl_items); }, hir::ItemTrait(_, _, _, ref trait_items) => { let trait_def = trait_def_of_item(ccx, it); @@ -884,7 +919,7 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { }, hir::ItemStruct(ref struct_def, _) => { let (scheme, predicates) = convert_typed_item(ccx, it); - write_ty_to_tcx(tcx, it.id, scheme.ty); + write_ty_to_tcx(ccx, it.id, scheme.ty); let it_def_id = ccx.tcx.map.local_def_id(it.id); let variant = tcx.lookup_adt_def_master(it_def_id).struct_variant(); @@ -900,14 +935,14 @@ fn convert_item(ccx: &CrateCtxt, it: &hir::Item) { hir::ItemTy(_, ref generics) => { ensure_no_ty_param_bounds(ccx, it.span, generics, "type"); let (scheme, _) = convert_typed_item(ccx, it); - write_ty_to_tcx(tcx, it.id, scheme.ty); + write_ty_to_tcx(ccx, it.id, scheme.ty); }, _ => { // This call populates the type cache with the converted type // of the item in passing. All we have to do here is to write // it into the node type table. let (scheme, _) = convert_typed_item(ccx, it); - write_ty_to_tcx(tcx, it.id, scheme.ty); + write_ty_to_tcx(ccx, it.id, scheme.ty); }, } } @@ -927,8 +962,8 @@ fn convert_variant_ctor<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, .map(|field| field.unsubst_ty()) .collect(); let def_id = tcx.map.local_def_id(ctor_id); - let substs = tcx.mk_substs(mk_item_substs(ccx, &scheme.generics)); - tcx.mk_fn_def(def_id, substs, ty::BareFnTy { + let substs = mk_item_substs(ccx, &scheme.generics); + tcx.mk_fn_def(def_id, substs, tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: abi::Abi::Rust, sig: ty::Binder(ty::FnSig { @@ -936,10 +971,10 @@ fn convert_variant_ctor<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, output: ty::FnConverging(scheme.ty), variadic: false }) - }) + })) } }; - write_ty_to_tcx(tcx, ctor_id, ctor_ty); + write_ty_to_tcx(ccx, ctor_id, ctor_ty); tcx.predicates.borrow_mut().insert(tcx.map.local_def_id(ctor_id), predicates); tcx.register_item_type(tcx.map.local_def_id(ctor_id), TypeScheme { @@ -971,18 +1006,19 @@ fn convert_enum_variant_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, } } -fn convert_struct_variant<'tcx>(tcx: &TyCtxt<'tcx>, - did: DefId, - name: ast::Name, - disr_val: ty::Disr, - def: &hir::VariantData) -> ty::VariantDefData<'tcx, 'tcx> { +fn convert_struct_variant<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + did: DefId, + name: ast::Name, + disr_val: ty::Disr, + def: &hir::VariantData) + -> ty::VariantDefData<'tcx, 'tcx> { let mut seen_fields: FnvHashMap = FnvHashMap(); - let node_id = tcx.map.as_local_node_id(did).unwrap(); + let node_id = ccx.tcx.map.as_local_node_id(did).unwrap(); let fields = def.fields().iter().map(|f| { - let fid = tcx.map.local_def_id(f.id); + let fid = ccx.tcx.map.local_def_id(f.id); let dup_span = seen_fields.get(&f.name).cloned(); if let Some(prev_span) = dup_span { - let mut err = struct_span_err!(tcx.sess, f.span, E0124, + let mut err = struct_span_err!(ccx.tcx.sess, f.span, E0124, "field `{}` is already declared", f.name); span_note!(&mut err, prev_span, "previously declared here"); @@ -991,7 +1027,8 @@ fn convert_struct_variant<'tcx>(tcx: &TyCtxt<'tcx>, seen_fields.insert(f.name, f.span); } - ty::FieldDefData::new(fid, f.name, ty::Visibility::from_hir(&f.vis, node_id, tcx)) + ty::FieldDefData::new(fid, f.name, + ty::Visibility::from_hir(&f.vis, node_id, ccx.tcx)) }).collect(); ty::VariantDefData { did: did, @@ -1002,67 +1039,63 @@ fn convert_struct_variant<'tcx>(tcx: &TyCtxt<'tcx>, } } -fn convert_struct_def<'tcx>(tcx: &TyCtxt<'tcx>, - it: &hir::Item, - def: &hir::VariantData) - -> ty::AdtDefMaster<'tcx> +fn convert_struct_def<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + it: &hir::Item, + def: &hir::VariantData) + -> ty::AdtDefMaster<'tcx> { - let did = tcx.map.local_def_id(it.id); + let did = ccx.tcx.map.local_def_id(it.id); let ctor_id = if !def.is_struct() { - tcx.map.local_def_id(def.id()) + ccx.tcx.map.local_def_id(def.id()) } else { did }; - tcx.intern_adt_def( - did, - ty::AdtKind::Struct, - vec![convert_struct_variant(tcx, ctor_id, it.name, ConstInt::Infer(0), def)] - ) + ccx.tcx.intern_adt_def(did, ty::AdtKind::Struct, + vec![convert_struct_variant(ccx, ctor_id, it.name, ConstInt::Infer(0), def)]) } -fn convert_enum_def<'tcx>(tcx: &TyCtxt<'tcx>, - it: &hir::Item, - def: &hir::EnumDef) - -> ty::AdtDefMaster<'tcx> -{ - fn print_err(tcx: &TyCtxt, span: Span, ty: ty::Ty, cv: ConstVal) { - span_err!(tcx.sess, span, E0079, "mismatched types: expected `{}` got `{}`", - ty, cv.description()); - } - fn evaluate_disr_expr<'tcx>(tcx: &TyCtxt<'tcx>, - repr_ty: attr::IntType, - e: &hir::Expr) -> Option { + fn evaluate_disr_expr(ccx: &CrateCtxt, repr_ty: attr::IntType, e: &hir::Expr) + -> Option { debug!("disr expr, checking {}", pprust::expr_to_string(e)); - let ty_hint = repr_ty.to_ty(tcx); + let ty_hint = repr_ty.to_ty(ccx.tcx); + let print_err = |cv: ConstVal| { + struct_span_err!(ccx.tcx.sess, e.span, E0079, "mismatched types") + .note_expected_found(&"type", &ty_hint, &format!("{}", cv.description())) + .emit(); + }; + let hint = UncheckedExprHint(ty_hint); - match eval_const_expr_partial(tcx, e, hint, None) { + match eval_const_expr_partial(ccx.tcx, e, hint, None) { Ok(ConstVal::Integral(i)) => { // FIXME: eval_const_expr_partial should return an error if the hint is wrong match (repr_ty, i) { - (attr::SignedInt(ast::IntTy::I8), ConstInt::I8(_)) => Some(i), - (attr::SignedInt(ast::IntTy::I16), ConstInt::I16(_)) => Some(i), - (attr::SignedInt(ast::IntTy::I32), ConstInt::I32(_)) => Some(i), - (attr::SignedInt(ast::IntTy::I64), ConstInt::I64(_)) => Some(i), - (attr::SignedInt(ast::IntTy::Is), ConstInt::Isize(_)) => Some(i), - (attr::UnsignedInt(ast::UintTy::U8), ConstInt::U8(_)) => Some(i), - (attr::UnsignedInt(ast::UintTy::U16), ConstInt::U16(_)) => Some(i), - (attr::UnsignedInt(ast::UintTy::U32), ConstInt::U32(_)) => Some(i), - (attr::UnsignedInt(ast::UintTy::U64), ConstInt::U64(_)) => Some(i), + (attr::SignedInt(ast::IntTy::I8), ConstInt::I8(_)) | + (attr::SignedInt(ast::IntTy::I16), ConstInt::I16(_)) | + (attr::SignedInt(ast::IntTy::I32), ConstInt::I32(_)) | + (attr::SignedInt(ast::IntTy::I64), ConstInt::I64(_)) | + (attr::SignedInt(ast::IntTy::Is), ConstInt::Isize(_)) | + (attr::UnsignedInt(ast::UintTy::U8), ConstInt::U8(_)) | + (attr::UnsignedInt(ast::UintTy::U16), ConstInt::U16(_)) | + (attr::UnsignedInt(ast::UintTy::U32), ConstInt::U32(_)) | + (attr::UnsignedInt(ast::UintTy::U64), ConstInt::U64(_)) | (attr::UnsignedInt(ast::UintTy::Us), ConstInt::Usize(_)) => Some(i), (_, i) => { - print_err(tcx, e.span, ty_hint, ConstVal::Integral(i)); + print_err(ConstVal::Integral(i)); None }, } }, Ok(cv) => { - print_err(tcx, e.span, ty_hint, cv); + print_err(cv); None }, + // enum variant evaluation happens before the global constant check + // so we need to report the real error + Err(ConstEvalErr { kind: ErroneousReferencedConstant(box err), ..}) | Err(err) => { - let mut diag = struct_span_err!(tcx.sess, err.span, E0080, + let mut diag = struct_span_err!(ccx.tcx.sess, err.span, E0080, "constant evaluation error: {}", err.description()); if !e.span.contains(err.span) { @@ -1074,54 +1107,34 @@ fn convert_enum_def<'tcx>(tcx: &TyCtxt<'tcx>, } } - fn report_discrim_overflow(tcx: &TyCtxt, - variant_span: Span, - variant_name: &str, - prev_val: ty::Disr) { - span_err!(tcx.sess, variant_span, E0370, - "enum discriminant overflowed on value after {}; \ - set explicitly via {} = {} if that is desired outcome", - prev_val, variant_name, prev_val.wrap_incr()); - } - - fn next_disr(tcx: &TyCtxt, - v: &hir::Variant, - repr_type: attr::IntType, - prev_disr_val: Option) -> Option { - if let Some(prev_disr_val) = prev_disr_val { - let result = repr_type.disr_incr(prev_disr_val); - if let None = result { - report_discrim_overflow(tcx, v.span, &v.node.name.as_str(), prev_disr_val); - } - result - } else { - Some(repr_type.initial_discriminant(tcx)) - } - } - fn convert_enum_variant<'tcx>(tcx: &TyCtxt<'tcx>, - v: &hir::Variant, - disr: ty::Disr) - -> ty::VariantDefData<'tcx, 'tcx> - { - let did = tcx.map.local_def_id(v.node.data.id()); - let name = v.node.name; - convert_struct_variant(tcx, did, name, disr, &v.node.data) - } +fn convert_enum_def<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + it: &hir::Item, + def: &hir::EnumDef) + -> ty::AdtDefMaster<'tcx> +{ + let tcx = ccx.tcx; let did = tcx.map.local_def_id(it.id); let repr_hints = tcx.lookup_repr_hints(did); let repr_type = tcx.enum_repr_type(repr_hints.get(0)); - let mut prev_disr = None; + let initial = repr_type.initial_discriminant(tcx); + let mut prev_disr = None::; let variants = def.variants.iter().map(|v| { - let disr = match v.node.disr_expr { - Some(ref e) => evaluate_disr_expr(tcx, repr_type, e), - None => next_disr(tcx, v, repr_type, prev_disr) - }.unwrap_or_else(|| { - prev_disr.map(ty::Disr::wrap_incr) - .unwrap_or(repr_type.initial_discriminant(tcx)) - }); - + let wrapped_disr = prev_disr.map_or(initial, |d| d.wrap_incr()); + let disr = if let Some(ref e) = v.node.disr_expr { + evaluate_disr_expr(ccx, repr_type, e) + } else if let Some(disr) = repr_type.disr_incr(tcx, prev_disr) { + Some(disr) + } else { + span_err!(tcx.sess, v.span, E0370, + "enum discriminant overflowed on value after {}; \ + set explicitly via {} = {} if that is desired outcome", + prev_disr.unwrap(), v.node.name, wrapped_disr); + None + }.unwrap_or(wrapped_disr); prev_disr = Some(disr); - convert_enum_variant(tcx, v, disr) + + let did = tcx.map.local_def_id(v.node.data.id()); + convert_struct_variant(ccx, did, v.node.name, disr, &v.node.data) }).collect(); tcx.intern_adt_def(tcx.map.local_def_id(it.id), ty::AdtKind::Enum, variants) } @@ -1237,9 +1250,9 @@ fn trait_def_of_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, it.span, "the `#[rustc_paren_sugar]` attribute is a temporary means of controlling \ which traits can use parenthetical notation"); - fileline_help!(&mut err, it.span, - "add `#![feature(unboxed_closures)]` to \ - the crate attributes to use it"); + help!(&mut err, + "add `#![feature(unboxed_closures)]` to \ + the crate attributes to use it"); err.emit(); } @@ -1448,29 +1461,29 @@ fn compute_type_scheme_of_item<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, } hir::ItemFn(ref decl, unsafety, _, abi, ref generics, _) => { let ty_generics = ty_generics_for_fn(ccx, generics, &ty::Generics::empty()); - let tofd = astconv::ty_of_bare_fn(&ccx.icx(generics), unsafety, abi, &decl); + let tofd = AstConv::ty_of_bare_fn(&ccx.icx(generics), unsafety, abi, &decl); let def_id = ccx.tcx.map.local_def_id(it.id); - let substs = tcx.mk_substs(mk_item_substs(ccx, &ty_generics)); + let substs = mk_item_substs(ccx, &ty_generics); let ty = tcx.mk_fn_def(def_id, substs, tofd); ty::TypeScheme { ty: ty, generics: ty_generics } } hir::ItemTy(ref t, ref generics) => { - let ty_generics = ty_generics_for_type_or_impl(ccx, generics); + let ty_generics = ty_generics_for_type(ccx, generics); let ty = ccx.icx(generics).to_ty(&ExplicitRscope, &t); ty::TypeScheme { ty: ty, generics: ty_generics } } hir::ItemEnum(ref ei, ref generics) => { - let ty_generics = ty_generics_for_type_or_impl(ccx, generics); + let def = convert_enum_def(ccx, it, ei); + let ty_generics = ty_generics_for_type(ccx, generics); let substs = mk_item_substs(ccx, &ty_generics); - let def = convert_enum_def(tcx, it, ei); - let t = tcx.mk_enum(def, tcx.mk_substs(substs)); + let t = tcx.mk_enum(def, substs); ty::TypeScheme { ty: t, generics: ty_generics } } hir::ItemStruct(ref si, ref generics) => { - let ty_generics = ty_generics_for_type_or_impl(ccx, generics); + let def = convert_struct_def(ccx, it, si); + let ty_generics = ty_generics_for_type(ccx, generics); let substs = mk_item_substs(ccx, &ty_generics); - let def = convert_struct_def(tcx, it, si); - let t = tcx.mk_struct(def, tcx.mk_substs(substs)); + let t = tcx.mk_struct(def, substs); ty::TypeScheme { ty: t, generics: ty_generics } } hir::ItemDefaultImpl(..) | @@ -1578,7 +1591,7 @@ fn compute_type_scheme_of_foreign_item<'a, 'tcx>( hir::ForeignItemStatic(ref t, _) => { ty::TypeScheme { generics: ty::Generics::empty(), - ty: ast_ty_to_ty(&ccx.icx(&()), &ExplicitRscope, t) + ty: AstConv::ast_ty_to_ty(&ccx.icx(&()), &ExplicitRscope, t) } } } @@ -1595,7 +1608,7 @@ fn convert_foreign_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let abi = tcx.map.get_foreign_abi(it.id); let scheme = type_scheme_of_foreign_item(ccx, it, abi); - write_ty_to_tcx(ccx.tcx, it.id, scheme.ty); + write_ty_to_tcx(ccx, it.id, scheme.ty); let predicates = match it.node { hir::ForeignItemFn(_, ref generics) => { @@ -1611,10 +1624,14 @@ fn convert_foreign_item<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, assert!(prev_predicates.is_none()); } -fn ty_generics_for_type_or_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, - generics: &hir::Generics) - -> ty::Generics<'tcx> { - ty_generics(ccx, TypeSpace, generics, &ty::Generics::empty()) +fn ty_generics_for_type<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, generics: &hir::Generics) + -> ty::Generics<'tcx> { + ty_generics(ccx, TypeSpace, generics, &ty::Generics::empty(), true) +} + +fn ty_generics_for_impl<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, generics: &hir::Generics) + -> ty::Generics<'tcx> { + ty_generics(ccx, TypeSpace, generics, &ty::Generics::empty(), false) } fn ty_generic_predicates_for_type_or_impl<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, @@ -1633,7 +1650,7 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, debug!("ty_generics_for_trait(trait_id={:?}, substs={:?})", ccx.tcx.map.local_def_id(trait_id), substs); - let mut generics = ty_generics_for_type_or_impl(ccx, ast_generics); + let mut generics = ty_generics_for_type(ccx, ast_generics); // Add in the self type parameter. // @@ -1646,7 +1663,7 @@ fn ty_generics_for_trait<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, let def = ty::TypeParameterDef { space: SelfSpace, index: 0, - name: special_idents::type_self.name, + name: keywords::SelfType.name(), def_id: ccx.tcx.map.local_def_id(param_id), default_def_id: ccx.tcx.map.local_def_id(parent), default: None, @@ -1665,7 +1682,7 @@ fn ty_generics_for_fn<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, base_generics: &ty::Generics<'tcx>) -> ty::Generics<'tcx> { - ty_generics(ccx, FnSpace, generics, base_generics) + ty_generics(ccx, FnSpace, generics, base_generics, false) } fn ty_generic_predicates_for_fn<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, @@ -1677,7 +1694,7 @@ fn ty_generic_predicates_for_fn<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, } // Add the Sized bound, unless the type parameter is marked as `?Sized`. -fn add_unsized_bound<'tcx>(astconv: &AstConv<'tcx>, +fn add_unsized_bound<'tcx>(astconv: &AstConv<'tcx, 'tcx>, bounds: &mut ty::BuiltinBounds, ast_bounds: &[hir::TyParamBound], span: Span) @@ -1785,9 +1802,9 @@ fn ty_generic_predicates<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, for predicate in &where_clause.predicates { match predicate { &hir::WherePredicate::BoundPredicate(ref bound_pred) => { - let ty = ast_ty_to_ty(&ccx.icx(&(base_predicates, ast_generics)), - &ExplicitRscope, - &bound_pred.bounded_ty); + let ty = AstConv::ast_ty_to_ty(&ccx.icx(&(base_predicates, ast_generics)), + &ExplicitRscope, + &bound_pred.bounded_ty); for bound in bound_pred.bounds.iter() { match bound { @@ -1840,7 +1857,8 @@ fn ty_generic_predicates<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, fn ty_generics<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, space: ParamSpace, ast_generics: &hir::Generics, - base_generics: &ty::Generics<'tcx>) + base_generics: &ty::Generics<'tcx>, + allow_defaults: bool) -> ty::Generics<'tcx> { let tcx = ccx.tcx; @@ -1863,7 +1881,8 @@ fn ty_generics<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, // Now create the real type parameters. for i in 0..ast_generics.ty_params.len() { - let def = get_or_create_type_parameter_def(ccx, ast_generics, space, i as u32); + let def = + get_or_create_type_parameter_def(ccx, ast_generics, space, i as u32, allow_defaults); debug!("ty_generics: def for type param: {:?}, {:?}", def, space); result.types.push(space, def); } @@ -1877,7 +1896,7 @@ fn convert_default_type_parameter<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, index: u32) -> Ty<'tcx> { - let ty = ast_ty_to_ty(&ccx.icx(&()), &ExplicitRscope, &path); + let ty = AstConv::ast_ty_to_ty(&ccx.icx(&()), &ExplicitRscope, &path); for leaf_ty in ty.walk() { if let ty::TyParam(p) = leaf_ty.sty { @@ -1897,7 +1916,8 @@ fn convert_default_type_parameter<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, fn get_or_create_type_parameter_def<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, ast_generics: &hir::Generics, space: ParamSpace, - index: u32) + index: u32, + allow_defaults: bool) -> ty::TypeParameterDef<'tcx> { let param = &ast_generics.ty_params[index as usize]; @@ -1918,7 +1938,7 @@ fn get_or_create_type_parameter_def<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, let parent = tcx.map.get_parent(param.id); - if space != TypeSpace && default.is_some() { + if !allow_defaults && default.is_some() { if !tcx.sess.features.borrow().default_type_parameter_fallback { tcx.sess.add_lint( lint::builtin::INVALID_TYPE_PARAM_DEFAULT, @@ -1980,7 +2000,7 @@ fn compute_object_lifetime_default<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, hir::TraitTyParamBound(..) => None, hir::RegionTyParamBound(ref lifetime) => - Some(astconv::ast_region_to_region(ccx.tcx, lifetime)), + Some(ast_region_to_region(ccx.tcx, lifetime)), } }) .collect() @@ -2018,12 +2038,12 @@ enum SizedByDefault { Yes, No, } /// Translate the AST's notion of ty param bounds (which are an enum consisting of a newtyped Ty or /// a region) to ty's notion of ty param bounds, which can either be user-defined traits, or the /// built-in trait (formerly known as kind): Send. -fn compute_bounds<'tcx>(astconv: &AstConv<'tcx>, +fn compute_bounds<'tcx>(astconv: &AstConv<'tcx, 'tcx>, param_ty: ty::Ty<'tcx>, ast_bounds: &[hir::TyParamBound], sized_by_default: SizedByDefault, span: Span) - -> astconv::Bounds<'tcx> + -> Bounds<'tcx> { let mut bounds = conv_param_bounds(astconv, @@ -2048,7 +2068,7 @@ fn compute_bounds<'tcx>(astconv: &AstConv<'tcx>, /// because this can be anywhere from 0 predicates (`T:?Sized` adds no /// predicates) to 1 (`T:Foo`) to many (`T:Bar` adds `T:Bar` /// and `::X == i32`). -fn predicates_from_bound<'tcx>(astconv: &AstConv<'tcx>, +fn predicates_from_bound<'tcx>(astconv: &AstConv<'tcx, 'tcx>, param_ty: Ty<'tcx>, bound: &hir::TyParamBound) -> Vec> @@ -2073,31 +2093,31 @@ fn predicates_from_bound<'tcx>(astconv: &AstConv<'tcx>, } } -fn conv_poly_trait_ref<'tcx>(astconv: &AstConv<'tcx>, +fn conv_poly_trait_ref<'tcx>(astconv: &AstConv<'tcx, 'tcx>, param_ty: Ty<'tcx>, trait_ref: &hir::PolyTraitRef, projections: &mut Vec>) -> ty::PolyTraitRef<'tcx> { - astconv::instantiate_poly_trait_ref(astconv, + AstConv::instantiate_poly_trait_ref(astconv, &ExplicitRscope, trait_ref, Some(param_ty), projections) } -fn conv_param_bounds<'a,'tcx>(astconv: &AstConv<'tcx>, +fn conv_param_bounds<'a,'tcx>(astconv: &AstConv<'tcx, 'tcx>, span: Span, param_ty: ty::Ty<'tcx>, ast_bounds: &[hir::TyParamBound]) - -> astconv::Bounds<'tcx> + -> Bounds<'tcx> { let tcx = astconv.tcx(); - let astconv::PartitionedBounds { + let PartitionedBounds { builtin_bounds, trait_bounds, region_bounds - } = astconv::partition_bounds(tcx, span, &ast_bounds); + } = partition_bounds(tcx, span, &ast_bounds); let mut projection_bounds = Vec::new(); @@ -2114,7 +2134,7 @@ fn conv_param_bounds<'a,'tcx>(astconv: &AstConv<'tcx>, .map(|r| ast_region_to_region(tcx, r)) .collect(); - astconv::Bounds { + Bounds { region_bounds: region_bounds, builtin_bounds: builtin_bounds, trait_bounds: trait_bounds, @@ -2146,12 +2166,12 @@ fn compute_type_scheme_of_foreign_fn_decl<'a, 'tcx>( let rb = BindingRscope::new(); let input_tys = decl.inputs .iter() - .map(|a| ty_of_arg(&ccx.icx(ast_generics), &rb, a, None)) + .map(|a| AstConv::ty_of_arg(&ccx.icx(ast_generics), &rb, a, None)) .collect::>(); let output = match decl.output { hir::Return(ref ty) => - ty::FnConverging(ast_ty_to_ty(&ccx.icx(ast_generics), &rb, &ty)), + ty::FnConverging(AstConv::ast_ty_to_ty(&ccx.icx(ast_generics), &rb, &ty)), hir::DefaultReturn(..) => ty::FnConverging(ccx.tcx.mk_nil()), hir::NoReturn(..) => @@ -2168,8 +2188,7 @@ fn compute_type_scheme_of_foreign_fn_decl<'a, 'tcx>( &format!("use of SIMD type `{}` in FFI is highly experimental and \ may result in invalid code", pprust::ty_to_string(ast_ty))) - .fileline_help(ast_ty.span, - "add #![feature(simd_ffi)] to the crate attributes to enable") + .help("add #![feature(simd_ffi)] to the crate attributes to enable") .emit(); } }; @@ -2181,14 +2200,14 @@ fn compute_type_scheme_of_foreign_fn_decl<'a, 'tcx>( } } - let substs = ccx.tcx.mk_substs(mk_item_substs(ccx, &ty_generics)); - let t_fn = ccx.tcx.mk_fn_def(id, substs, ty::BareFnTy { + let substs = mk_item_substs(ccx, &ty_generics); + let t_fn = ccx.tcx.mk_fn_def(id, substs, ccx.tcx.mk_bare_fn(ty::BareFnTy { abi: abi, unsafety: hir::Unsafety::Unsafe, sig: ty::Binder(ty::FnSig {inputs: input_tys, output: output, variadic: decl.variadic}), - }); + })); ty::TypeScheme { generics: ty_generics, @@ -2198,7 +2217,7 @@ fn compute_type_scheme_of_foreign_fn_decl<'a, 'tcx>( fn mk_item_substs<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ty_generics: &ty::Generics<'tcx>) - -> Substs<'tcx> + -> &'tcx Substs<'tcx> { let types = ty_generics.types.map( @@ -2208,17 +2227,17 @@ fn mk_item_substs<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, ty_generics.regions.map( |def| def.to_early_bound_region()); - Substs::new(types, regions) + ccx.tcx.mk_substs(Substs::new(types, regions)) } /// Checks that all the type parameters on an impl -fn enforce_impl_params_are_constrained<'tcx>(tcx: &TyCtxt<'tcx>, - ast_generics: &hir::Generics, - impl_predicates: &mut ty::GenericPredicates<'tcx>, - impl_def_id: DefId) +fn enforce_impl_params_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + ast_generics: &hir::Generics, + impl_predicates: &mut ty::GenericPredicates<'tcx>, + impl_def_id: DefId) { - let impl_scheme = tcx.lookup_item_type(impl_def_id); - let impl_trait_ref = tcx.impl_trait_ref(impl_def_id); + let impl_scheme = ccx.tcx.lookup_item_type(impl_def_id); + let impl_trait_ref = ccx.tcx.impl_trait_ref(impl_def_id); assert!(impl_predicates.predicates.is_empty_in(FnSpace)); assert!(impl_predicates.predicates.is_empty_in(SelfSpace)); @@ -2232,8 +2251,7 @@ fn enforce_impl_params_are_constrained<'tcx>(tcx: &TyCtxt<'tcx>, input_parameters.extend(ctp::parameters_for_trait_ref(trait_ref, false)); } - ctp::setup_constraining_predicates(tcx, - impl_predicates.predicates.get_mut_slice(TypeSpace), + ctp::setup_constraining_predicates(impl_predicates.predicates.get_mut_slice(TypeSpace), impl_trait_ref, &mut input_parameters); @@ -2242,42 +2260,41 @@ fn enforce_impl_params_are_constrained<'tcx>(tcx: &TyCtxt<'tcx>, idx: index as u32, name: ty_param.name }; if !input_parameters.contains(&ctp::Parameter::Type(param_ty)) { - report_unused_parameter(tcx, ty_param.span, "type", ¶m_ty.to_string()); + report_unused_parameter(ccx, ty_param.span, "type", ¶m_ty.to_string()); } } } -fn enforce_impl_lifetimes_are_constrained<'tcx>(tcx: &TyCtxt<'tcx>, - ast_generics: &hir::Generics, - impl_def_id: DefId, - impl_items: &[hir::ImplItem]) +fn enforce_impl_lifetimes_are_constrained<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + ast_generics: &hir::Generics, + impl_def_id: DefId, + impl_items: &[hir::ImplItem]) { // Every lifetime used in an associated type must be constrained. - let impl_scheme = tcx.lookup_item_type(impl_def_id); - let impl_predicates = tcx.lookup_predicates(impl_def_id); - let impl_trait_ref = tcx.impl_trait_ref(impl_def_id); + let impl_scheme = ccx.tcx.lookup_item_type(impl_def_id); + let impl_predicates = ccx.tcx.lookup_predicates(impl_def_id); + let impl_trait_ref = ccx.tcx.impl_trait_ref(impl_def_id); let mut input_parameters: HashSet<_> = ctp::parameters_for_type(impl_scheme.ty, false).into_iter().collect(); if let Some(ref trait_ref) = impl_trait_ref { input_parameters.extend(ctp::parameters_for_trait_ref(trait_ref, false)); } - ctp::identify_constrained_type_params(tcx, + ctp::identify_constrained_type_params( &impl_predicates.predicates.as_slice(), impl_trait_ref, &mut input_parameters); - let lifetimes_in_associated_types: HashSet<_> = - impl_items.iter() - .map(|item| tcx.impl_or_trait_item(tcx.map.local_def_id(item.id))) - .filter_map(|item| match item { - ty::TypeTraitItem(ref assoc_ty) => assoc_ty.ty, - ty::ConstTraitItem(..) | ty::MethodTraitItem(..) => None - }) - .flat_map(|ty| ctp::parameters_for_type(ty, true)) - .filter_map(|p| match p { - ctp::Parameter::Type(_) => None, - ctp::Parameter::Region(r) => Some(r), - }) - .collect(); + let lifetimes_in_associated_types: HashSet<_> = impl_items.iter() + .map(|item| ccx.tcx.impl_or_trait_item(ccx.tcx.map.local_def_id(item.id))) + .filter_map(|item| match item { + ty::TypeTraitItem(ref assoc_ty) => assoc_ty.ty, + ty::ConstTraitItem(..) | ty::MethodTraitItem(..) => None + }) + .flat_map(|ty| ctp::parameters_for_type(ty, true)) + .filter_map(|p| match p { + ctp::Parameter::Type(_) => None, + ctp::Parameter::Region(r) => Some(r), + }) + .collect(); for (index, lifetime_def) in ast_generics.lifetimes.iter().enumerate() { let region = ty::EarlyBoundRegion { space: TypeSpace, @@ -2287,7 +2304,7 @@ fn enforce_impl_lifetimes_are_constrained<'tcx>(tcx: &TyCtxt<'tcx>, lifetimes_in_associated_types.contains(®ion) && // (*) !input_parameters.contains(&ctp::Parameter::Region(region)) { - report_unused_parameter(tcx, lifetime_def.lifetime.span, + report_unused_parameter(ccx, lifetime_def.lifetime.span, "lifetime", ®ion.name.to_string()); } } @@ -2312,12 +2329,12 @@ fn enforce_impl_lifetimes_are_constrained<'tcx>(tcx: &TyCtxt<'tcx>, // used elsewhere are not projected back out. } -fn report_unused_parameter(tcx: &TyCtxt, +fn report_unused_parameter(ccx: &CrateCtxt, span: Span, kind: &str, name: &str) { - span_err!(tcx.sess, span, E0207, + span_err!(ccx.tcx.sess, span, E0207, "the {} parameter `{}` is not constrained by the \ impl trait, self type, or predicates", kind, name); diff --git a/src/librustc_typeck/constrained_type_params.rs b/src/librustc_typeck/constrained_type_params.rs index 7e8b08c585..08c1b5fcc8 100644 --- a/src/librustc_typeck/constrained_type_params.rs +++ b/src/librustc_typeck/constrained_type_params.rs @@ -8,8 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use rustc::ty::subst; -use rustc::ty::{self, Ty, TyCtxt}; +use rustc::ty::{self, subst, Ty}; use std::collections::HashSet; @@ -94,13 +93,12 @@ fn parameters_for_region(region: &ty::Region) -> Option { } } -pub fn identify_constrained_type_params<'tcx>(_tcx: &TyCtxt<'tcx>, - predicates: &[ty::Predicate<'tcx>], +pub fn identify_constrained_type_params<'tcx>(predicates: &[ty::Predicate<'tcx>], impl_trait_ref: Option>, input_parameters: &mut HashSet) { let mut predicates = predicates.to_owned(); - setup_constraining_predicates(_tcx, &mut predicates, impl_trait_ref, input_parameters); + setup_constraining_predicates(&mut predicates, impl_trait_ref, input_parameters); } @@ -144,8 +142,7 @@ pub fn identify_constrained_type_params<'tcx>(_tcx: &TyCtxt<'tcx>, /// which is determined by 1, which requires `U`, that is determined /// by 0. I should probably pick a less tangled example, but I can't /// think of any. -pub fn setup_constraining_predicates<'tcx>(_tcx: &TyCtxt<'tcx>, - predicates: &mut [ty::Predicate<'tcx>], +pub fn setup_constraining_predicates<'tcx>(predicates: &mut [ty::Predicate<'tcx>], impl_trait_ref: Option>, input_parameters: &mut HashSet) { diff --git a/src/librustc_typeck/diagnostics.rs b/src/librustc_typeck/diagnostics.rs index 8f76bf92ef..45aec9558f 100644 --- a/src/librustc_typeck/diagnostics.rs +++ b/src/librustc_typeck/diagnostics.rs @@ -53,8 +53,8 @@ let x = Fruit::Apple(String::new(), String::new()); // Incorrect. match x { - Apple(a) => {}, - Apple(a, b, c) => {}, + Fruit::Apple(a) => {}, + Fruit::Apple(a, b, c) => {}, } ``` @@ -77,8 +77,8 @@ enum Number { // Assuming x is a Number we can pattern match on its contents. match x { - Zero(inside) => {}, - One(inside) => {}, + Number::Zero(inside) => {}, + Number::One(inside) => {}, } ``` @@ -632,7 +632,7 @@ recursion limit (which can be set via the `recursion_limit` attribute). For a somewhat artificial example: -```compile_fail +```compile_fail,ignore #![recursion_limit="2"] struct Foo; @@ -742,7 +742,7 @@ fn f(a: u16, b: &str) {} Must always be called with exactly two arguments, e.g. `f(2, "test")`. -Note, that Rust does not have a notion of optional function arguments or +Note that Rust does not have a notion of optional function arguments or variadic functions (except for its C-FFI). "##, @@ -1002,18 +1002,18 @@ operate on. This will cause an error: ```compile_fail -#![feature(simd)] +#![feature(repr_simd)] -#[simd] +#[repr(simd)] struct Bad; ``` This will not: ``` -#![feature(simd)] +#![feature(repr_simd)] -#[simd] +#[repr(simd)] struct Good(u32); ``` "##, @@ -1420,45 +1420,24 @@ fn main() { "##, E0102: r##" -You hit this error because the compiler lacks information to -determine a type for this variable. Erroneous code example: +You hit this error because the compiler lacks the information to +determine the type of this variable. Erroneous code example: ```compile_fail -fn demo(devil: fn () -> !) { - let x: &_ = devil(); - // error: cannot determine a type for this local variable -} - -fn oh_no() -> ! { panic!("the devil is in the details") } - fn main() { - demo(oh_no); + // could be an array of anything + let x = []; // error: cannot determine a type for this local variable } ``` To solve this situation, constrain the type of the variable. Examples: -```no_run +``` #![allow(unused_variables)] -fn some_func(x: &u32) { - // some code -} - -fn demo(devil: fn () -> !) { - let x: &u32 = devil(); - // Here we defined the type at the variable creation - - let x: &_ = devil(); - some_func(x); - // Here, the type is determined by the function argument type -} - -fn oh_no() -> ! { panic!("the devil is in the details") } - fn main() { - demo(oh_no); + let x: [u8; 0] = []; } ``` "##, @@ -2717,7 +2696,7 @@ Rust does not currently support this. A simple example that causes this error: ```compile_fail fn main() { - let _: Box; + let _: Box; } ``` @@ -2727,7 +2706,7 @@ following compiles correctly: ``` fn main() { - let _: Box; + let _: Box; } ``` "##, @@ -3305,6 +3284,164 @@ impl Baz for Bar { } // Note: This is OK ``` "##, +E0374: r##" +A struct without a field containing an unsized type cannot implement +`CoerceUnsized`. An +[unsized type](https://doc.rust-lang.org/book/unsized-types.html) +is any type that the compiler doesn't know the length or alignment of at +compile time. Any struct containing an unsized type is also unsized. + +Example of erroneous code: + +```compile_fail +#![feature(coerce_unsized)] +use std::ops::CoerceUnsized; + +struct Foo { + a: i32, +} + +// error: Struct `Foo` has no unsized fields that need `CoerceUnsized`. +impl CoerceUnsized> for Foo + where T: CoerceUnsized {} +``` + +`CoerceUnsized` is used to coerce one struct containing an unsized type +into another struct containing a different unsized type. If the struct +doesn't have any fields of unsized types then you don't need explicit +coercion to get the types you want. To fix this you can either +not try to implement `CoerceUnsized` or you can add a field that is +unsized to the struct. + +Example: + +``` +#![feature(coerce_unsized)] +use std::ops::CoerceUnsized; + +// We don't need to impl `CoerceUnsized` here. +struct Foo { + a: i32, +} + +// We add the unsized type field to the struct. +struct Bar { + a: i32, + b: T, +} + +// The struct has an unsized field so we can implement +// `CoerceUnsized` for it. +impl CoerceUnsized> for Bar + where T: CoerceUnsized {} +``` + +Note that `CoerceUnsized` is mainly used by smart pointers like `Box`, `Rc` +and `Arc` to be able to mark that they can coerce unsized types that they +are pointing at. +"##, + +E0375: r##" +A struct with more than one field containing an unsized type cannot implement +`CoerceUnsized`. This only occurs when you are trying to coerce one of the +types in your struct to another type in the struct. In this case we try to +impl `CoerceUnsized` from `T` to `U` which are both types that the struct +takes. An [unsized type](https://doc.rust-lang.org/book/unsized-types.html) +is any type that the compiler doesn't know the length or alignment of at +compile time. Any struct containing an unsized type is also unsized. + +Example of erroneous code: + +```compile_fail +#![feature(coerce_unsized)] +use std::ops::CoerceUnsized; + +struct Foo { + a: i32, + b: T, + c: U, +} + +// error: Struct `Foo` has more than one unsized field. +impl CoerceUnsized> for Foo {} +``` + +`CoerceUnsized` only allows for coercion from a structure with a single +unsized type field to another struct with a single unsized type field. +In fact Rust only allows for a struct to have one unsized type in a struct +and that unsized type must be the last field in the struct. So having two +unsized types in a single struct is not allowed by the compiler. To fix this +use only one field containing an unsized type in the struct and then use +multiple structs to manage each unsized type field you need. + +Example: + +``` +#![feature(coerce_unsized)] +use std::ops::CoerceUnsized; + +struct Foo { + a: i32, + b: T, +} + +impl CoerceUnsized> for Foo + where T: CoerceUnsized {} + +fn coerce_foo, U>(t: T) -> Foo { + Foo { a: 12i32, b: t } // we use coercion to get the `Foo` type we need +} +``` + +"##, + +E0376: r##" +The type you are trying to impl `CoerceUnsized` for is not a struct. +`CoerceUnsized` can only be implemented for a struct. Unsized types are +already able to be coerced without an implementation of `CoerceUnsized` +whereas a struct containing an unsized type needs to know the unsized type +field it's containing is able to be coerced. An +[unsized type](https://doc.rust-lang.org/book/unsized-types.html) +is any type that the compiler doesn't know the length or alignment of at +compile time. Any struct containing an unsized type is also unsized. + +Example of erroneous code: + +```compile_fail +#![feature(coerce_unsized)] +use std::ops::CoerceUnsized; + +struct Foo { + a: T, +} + +// error: The type `U` is not a struct +impl CoerceUnsized for Foo {} +``` + +The `CoerceUnsized` trait takes a struct type. Make sure the type you are +providing to `CoerceUnsized` is a struct with only the last field containing an +unsized type. + +Example: + +``` +#![feature(coerce_unsized)] +use std::ops::CoerceUnsized; + +struct Foo { + a: T, +} + +// The `Foo` is a struct so `CoerceUnsized` can be implemented +impl CoerceUnsized> for Foo where T: CoerceUnsized {} +``` + +Note that in Rust, structs can only contain an unsized type if the field +containing the unsized type is the last and only unsized type field in the +struct. +"##, + E0379: r##" Trait methods cannot be declared `const` by design. For more information, see [RFC 911]. @@ -3426,6 +3563,37 @@ parameters. You can read more about it in the API documentation: https://doc.rust-lang.org/std/marker/struct.PhantomData.html "##, +E0393: r##" +A type parameter which references `Self` in its default value was not specified. +Example of erroneous code: + +```compile_fail +trait A {} + +fn together_we_will_rule_the_galaxy(son: &A) {} +// error: the type parameter `T` must be explicitly specified in an +// object type because its default value `Self` references the +// type `Self` +``` + +A trait object is defined over a single, fully-defined trait. With a regular +default parameter, this parameter can just be substituted in. However, if the +default parameter is `Self`, the trait changes for each concrete type; i.e. +`i32` will be expected to implement `A`, `bool` will be expected to +implement `A`, etc... These types will not share an implementation of a +fully-defined trait; instead they share implementations of a trait with +different parameters substituted in for each implementation. This is +irreconcilable with what we need to make a trait object work, and is thus +disallowed. Making the trait concrete by explicitly specifying the value of the +defaulted parameter will fix this issue. Fixed example: + +``` +trait A {} + +fn together_we_will_rule_the_galaxy(son: &A) {} // Ok! +``` +"##, + E0439: r##" The length of the platform-intrinsic function `simd_shuffle` wasn't specified. Erroneous code example: @@ -3767,17 +3935,8 @@ register_diagnostics! { E0320, // recursive overflow during dropck E0328, // cannot implement Unsize explicitly // E0372, // coherence not object safe - E0374, // the trait `CoerceUnsized` may only be implemented for a coercion - // between structures with one field being coerced, none found - E0375, // the trait `CoerceUnsized` may only be implemented for a coercion - // between structures with one field being coerced, but multiple - // fields need coercions - E0376, // the trait `CoerceUnsized` may only be implemented for a coercion - // between structures E0377, // the trait `CoerceUnsized` may only be implemented for a coercion // between structures with the same definition - E0393, // the type parameter `{}` must be explicitly specified in an object - // type because its default value `{}` references the type `Self`" E0399, // trait items need to be implemented because the associated // type `{}` was overridden E0436, // functional record update requires a struct diff --git a/src/librustc_typeck/lib.rs b/src/librustc_typeck/lib.rs index 7f27d10ce1..0b23951db3 100644 --- a/src/librustc_typeck/lib.rs +++ b/src/librustc_typeck/lib.rs @@ -104,7 +104,7 @@ pub use rustc::util; use dep_graph::DepNode; use hir::map as hir_map; use hir::def::Def; -use rustc::infer::{self, TypeOrigin}; +use rustc::infer::TypeOrigin; use rustc::ty::subst::Substs; use rustc::ty::{self, Ty, TyCtxt, TypeFoldable}; use rustc::traits::ProjectionMode; @@ -116,12 +116,14 @@ use syntax::ast; use syntax::abi::Abi; use std::cell::RefCell; +use util::nodemap::NodeMap; // NB: This module needs to be declared first so diagnostics are // registered before they are used. pub mod diagnostics; pub mod check; +pub mod check_unused; mod rscope; mod astconv; pub mod collect; @@ -135,24 +137,32 @@ pub struct TypeAndSubsts<'tcx> { } pub struct CrateCtxt<'a, 'tcx: 'a> { - // A mapping from method call sites to traits that have that method. + ast_ty_to_ty_cache: RefCell>>, + + /// A mapping from method call sites to traits that have that method. pub trait_map: hir::TraitMap, + /// A vector of every trait accessible in the whole crate /// (i.e. including those from subcrates). This is used only for /// error reporting, and so is lazily initialised and generally /// shouldn't taint the common path (hence the RefCell). pub all_traits: RefCell>, - pub tcx: &'a TyCtxt<'tcx>, + + /// This stack is used to identify cycles in the user's source. + /// Note that these cycles can cross multiple items. + pub stack: RefCell>, + + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, } // Functions that write types into the node type table -fn write_ty_to_tcx<'tcx>(tcx: &TyCtxt<'tcx>, node_id: ast::NodeId, ty: Ty<'tcx>) { +fn write_ty_to_tcx<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, node_id: ast::NodeId, ty: Ty<'tcx>) { debug!("write_ty_to_tcx({}, {:?})", node_id, ty); assert!(!ty.needs_infer()); - tcx.node_type_insert(node_id, ty); + ccx.tcx.node_type_insert(node_id, ty); } -fn write_substs_to_tcx<'tcx>(tcx: &TyCtxt<'tcx>, +fn write_substs_to_tcx<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, node_id: ast::NodeId, item_substs: ty::ItemSubsts<'tcx>) { if !item_substs.is_noop() { @@ -162,11 +172,11 @@ fn write_substs_to_tcx<'tcx>(tcx: &TyCtxt<'tcx>, assert!(!item_substs.substs.types.needs_infer()); - tcx.tables.borrow_mut().item_substs.insert(node_id, item_substs); + ccx.tcx.tables.borrow_mut().item_substs.insert(node_id, item_substs); } } -fn lookup_full_def(tcx: &TyCtxt, sp: Span, id: ast::NodeId) -> Def { +fn lookup_full_def(tcx: TyCtxt, sp: Span, id: ast::NodeId) -> Def { match tcx.def_map.borrow().get(&id) { Some(x) => x.full_def(), None => { @@ -175,7 +185,7 @@ fn lookup_full_def(tcx: &TyCtxt, sp: Span, id: ast::NodeId) -> Def { } } -fn require_c_abi_if_variadic(tcx: &TyCtxt, +fn require_c_abi_if_variadic(tcx: TyCtxt, decl: &hir::FnDecl, abi: Abi, span: Span) { @@ -185,35 +195,33 @@ fn require_c_abi_if_variadic(tcx: &TyCtxt, } } -fn require_same_types<'a, 'tcx, M>(tcx: &TyCtxt<'tcx>, - maybe_infcx: Option<&infer::InferCtxt<'a, 'tcx>>, - t1_is_expected: bool, - span: Span, - t1: Ty<'tcx>, - t2: Ty<'tcx>, - msg: M) - -> bool where - M: FnOnce() -> String, -{ - let result = match maybe_infcx { - None => { - let infcx = infer::new_infer_ctxt(tcx, &tcx.tables, None, ProjectionMode::AnyFinal); - infer::mk_eqty(&infcx, t1_is_expected, TypeOrigin::Misc(span), t1, t2) - } - Some(infcx) => { - infer::mk_eqty(infcx, t1_is_expected, TypeOrigin::Misc(span), t1, t2) - } - }; +pub fn emit_type_err<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>, + span: Span, + found_ty: Ty<'tcx>, + expected_ty: Ty<'tcx>, + terr: &ty::error::TypeError<'tcx>, + msg: &str) { + let mut err = struct_span_err!(tcx.sess, span, E0211, "{}", msg); + err.span_label(span, &terr); + err.note_expected_found(&"type", &expected_ty, &found_ty); + tcx.note_and_explain_type_err(&mut err, terr, span); + err.emit(); +} - match result { - Ok(_) => true, - Err(ref terr) => { - let mut err = struct_span_err!(tcx.sess, span, E0211, "{}: {}", msg(), terr); - tcx.note_and_explain_type_err(&mut err, terr, span); - err.emit(); +fn require_same_types<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>, + span: Span, + t1: Ty<'tcx>, + t2: Ty<'tcx>, + msg: &str) + -> bool { + ccx.tcx.infer_ctxt(None, None, ProjectionMode::AnyFinal).enter(|infcx| { + if let Err(err) = infcx.eq_types(false, TypeOrigin::Misc(span), t1, t2) { + emit_type_err(infcx.tcx, span, t1, t2, &err, msg); false + } else { + true } - } + }) } fn check_main_fn_ty(ccx: &CrateCtxt, @@ -239,7 +247,8 @@ fn check_main_fn_ty(ccx: &CrateCtxt, } let main_def_id = tcx.map.local_def_id(main_id); let substs = tcx.mk_substs(Substs::empty()); - let se_ty = tcx.mk_fn_def(main_def_id, substs, ty::BareFnTy { + let se_ty = tcx.mk_fn_def(main_def_id, substs, + tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: Abi::Rust, sig: ty::Binder(ty::FnSig { @@ -247,13 +256,10 @@ fn check_main_fn_ty(ccx: &CrateCtxt, output: ty::FnConverging(tcx.mk_nil()), variadic: false }) - }); + })); - require_same_types(tcx, None, false, main_span, main_t, se_ty, - || { - format!("main function expects type: `{}`", - se_ty) - }); + require_same_types(ccx, main_span, main_t, se_ty, + "main function has wrong type"); } _ => { span_bug!(main_span, @@ -287,7 +293,8 @@ fn check_start_fn_ty(ccx: &CrateCtxt, let start_def_id = ccx.tcx.map.local_def_id(start_id); let substs = tcx.mk_substs(Substs::empty()); - let se_ty = tcx.mk_fn_def(start_def_id, substs, ty::BareFnTy { + let se_ty = tcx.mk_fn_def(start_def_id, substs, + tcx.mk_bare_fn(ty::BareFnTy { unsafety: hir::Unsafety::Normal, abi: Abi::Rust, sig: ty::Binder(ty::FnSig { @@ -298,14 +305,10 @@ fn check_start_fn_ty(ccx: &CrateCtxt, output: ty::FnConverging(tcx.types.isize), variadic: false, }), - }); - - require_same_types(tcx, None, false, start_span, start_t, se_ty, - || { - format!("start function expects type: `{}`", - se_ty) - }); + })); + require_same_types(ccx, start_span, start_t, se_ty, + "start function has wrong type"); } _ => { span_bug!(start_span, @@ -329,11 +332,15 @@ fn check_for_entry_fn(ccx: &CrateCtxt) { } } -pub fn check_crate(tcx: &TyCtxt, trait_map: hir::TraitMap) -> CompileResult { +pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, + trait_map: hir::TraitMap) + -> CompileResult { let time_passes = tcx.sess.time_passes(); let ccx = CrateCtxt { + ast_ty_to_ty_cache: RefCell::new(NodeMap()), trait_map: trait_map, all_traits: RefCell::new(None), + stack: RefCell::new(Vec::new()), tcx: tcx }; @@ -341,7 +348,7 @@ pub fn check_crate(tcx: &TyCtxt, trait_map: hir::TraitMap) -> CompileResult { // have valid types and not error tcx.sess.track_errors(|| { time(time_passes, "type collecting", || - collect::collect_item_types(tcx)); + collect::collect_item_types(&ccx)); })?; @@ -361,6 +368,7 @@ pub fn check_crate(tcx: &TyCtxt, trait_map: hir::TraitMap) -> CompileResult { time(time_passes, "drop-impl checking", || check::check_drop_impls(&ccx))?; + check_unused::check_crate(tcx); check_for_entry_fn(&ccx); let err_count = tcx.sess.err_count(); diff --git a/src/librustc_typeck/variance/constraints.rs b/src/librustc_typeck/variance/constraints.rs index 3b03a713a5..a532f9744f 100644 --- a/src/librustc_typeck/variance/constraints.rs +++ b/src/librustc_typeck/variance/constraints.rs @@ -127,7 +127,7 @@ fn is_lifetime(map: &hir_map::Map, param_id: ast::NodeId) -> bool { } impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { - fn tcx(&self) -> &'a TyCtxt<'tcx> { + fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { self.terms_cx.tcx } @@ -345,7 +345,7 @@ impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { self.add_constraints_from_mt(generics, mt, variance); } - ty::TyTuple(ref subtys) => { + ty::TyTuple(subtys) => { for &subty in subtys { self.add_constraints_from_ty(generics, subty, variance); } diff --git a/src/librustc_typeck/variance/mod.rs b/src/librustc_typeck/variance/mod.rs index ee9f317f20..13ed6cf764 100644 --- a/src/librustc_typeck/variance/mod.rs +++ b/src/librustc_typeck/variance/mod.rs @@ -27,7 +27,7 @@ mod solve; /// Code for transforming variances. mod xform; -pub fn infer_variance(tcx: &TyCtxt) { +pub fn infer_variance<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) { let mut arena = arena::TypedArena::new(); let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &mut arena); let constraints_cx = constraints::add_constraints_from_crate(terms_cx); diff --git a/src/librustc_typeck/variance/terms.rs b/src/librustc_typeck/variance/terms.rs index 413dc83e63..d9e7e8cbf7 100644 --- a/src/librustc_typeck/variance/terms.rs +++ b/src/librustc_typeck/variance/terms.rs @@ -59,7 +59,7 @@ impl<'a> fmt::Debug for VarianceTerm<'a> { // The first pass over the crate simply builds up the set of inferreds. pub struct TermsContext<'a, 'tcx: 'a> { - pub tcx: &'a TyCtxt<'tcx>, + pub tcx: TyCtxt<'a, 'tcx, 'tcx>, pub arena: &'a TypedArena>, pub empty_variances: Rc, @@ -98,7 +98,7 @@ pub struct InferredInfo<'a> { } pub fn determine_parameters_to_be_inferred<'a, 'tcx>( - tcx: &'a TyCtxt<'tcx>, + tcx: TyCtxt<'a, 'tcx, 'tcx>, arena: &'a mut TypedArena>) -> TermsContext<'a, 'tcx> { @@ -125,7 +125,7 @@ pub fn determine_parameters_to_be_inferred<'a, 'tcx>( terms_cx } -fn lang_items(tcx: &TyCtxt) -> Vec<(ast::NodeId,Vec)> { +fn lang_items(tcx: TyCtxt) -> Vec<(ast::NodeId,Vec)> { let all = vec![ (tcx.lang_items.phantom_data(), vec![ty::Covariant]), (tcx.lang_items.unsafe_cell_type(), vec![ty::Invariant]), diff --git a/src/librustc_unicode/tables.rs b/src/librustc_unicode/tables.rs index ad17016eae..43e7c26fd7 100644 --- a/src/librustc_unicode/tables.rs +++ b/src/librustc_unicode/tables.rs @@ -16,1187 +16,1306 @@ /// that the unicode parts of `CharExt` and `UnicodeStrPrelude` traits are based on. pub const UNICODE_VERSION: (u64, u64, u64) = (8, 0, 0); -fn bsearch_range_table(c: char, r: &'static [(char, char)]) -> bool { - use core::cmp::Ordering::{Equal, Less, Greater}; - r.binary_search_by(|&(lo, hi)| { - if c < lo { - Greater - } else if hi < c { - Less - } else { - Equal - } - }) - .is_ok() + +// BoolTrie is a trie for representing a set of Unicode codepoints. It is +// implemented with postfix compression (sharing of identical child nodes), +// which gives both compact size and fast lookup. +// +// The space of Unicode codepoints is divided into 3 subareas, each +// represented by a trie with different depth. In the first (0..0x800), there +// is no trie structure at all; each u64 entry corresponds to a bitvector +// effectively holding 64 bool values. +// +// In the second (0x800..0x10000), each child of the root node represents a +// 64-wide subrange, but instead of storing the full 64-bit value of the leaf, +// the trie stores an 8-bit index into a shared table of leaf values. This +// exploits the fact that in reasonable sets, many such leaves can be shared. +// +// In the third (0x10000..0x110000), each child of the root node represents a +// 4096-wide subrange, and the trie stores an 8-bit index into a 64-byte slice +// of a child tree. Each of these 64 bytes represents an index into the table +// of shared 64-bit leaf values. This exploits the sparse structure in the +// non-BMP range of most Unicode sets. +pub struct BoolTrie { + // 0..0x800 (corresponding to 1 and 2 byte utf-8 sequences) + r1: [u64; 32], // leaves + + // 0x800..0x10000 (corresponding to 3 byte utf-8 sequences) + r2: [u8; 992], // first level + r3: &'static [u64], // leaves + + // 0x10000..0x110000 (corresponding to 4 byte utf-8 sequences) + r4: [u8; 256], // first level + r5: &'static [u8], // second level + r6: &'static [u64], // leaves +} + +fn trie_range_leaf(c: usize, bitmap_chunk: u64) -> bool { + ((bitmap_chunk >> (c & 63)) & 1) != 0 +} + +fn trie_lookup_range_table(c: char, r: &'static BoolTrie) -> bool { + let c = c as usize; + if c < 0x800 { + trie_range_leaf(c, r.r1[c >> 6]) + } else if c < 0x10000 { + let child = r.r2[(c >> 6) - 0x20]; + trie_range_leaf(c, r.r3[child as usize]) + } else { + let child = r.r4[(c >> 12) - 0x10]; + let leaf = r.r5[((child as usize) << 6) + ((c >> 6) & 0x3f)]; + trie_range_leaf(c, r.r6[leaf as usize]) + } } pub mod general_category { - pub const Cc_table: &'static [(char, char)] = &[ - ('\0', '\u{1f}'), ('\u{7f}', '\u{9f}') - ]; + pub const Cc_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x00000000ffffffff, 0x8000000000000000, 0x00000000ffffffff, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + ], + r2: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 + ], + r3: &[ + 0x0000000000000000 + ], + r4: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + r5: &[ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000 + ], + }; pub fn Cc(c: char) -> bool { - super::bsearch_range_table(c, Cc_table) + super::trie_lookup_range_table(c, Cc_table) } - pub const N_table: &'static [(char, char)] = &[ - ('\u{30}', '\u{39}'), ('\u{660}', '\u{669}'), ('\u{6f0}', '\u{6f9}'), ('\u{7c0}', - '\u{7c9}'), ('\u{966}', '\u{96f}'), ('\u{9e6}', '\u{9ef}'), ('\u{a66}', '\u{a6f}'), - ('\u{ae6}', '\u{aef}'), ('\u{b66}', '\u{b6f}'), ('\u{be6}', '\u{bef}'), ('\u{c66}', - '\u{c6f}'), ('\u{ce6}', '\u{cef}'), ('\u{d66}', '\u{d6f}'), ('\u{de6}', '\u{def}'), - ('\u{e50}', '\u{e59}'), ('\u{ed0}', '\u{ed9}'), ('\u{f20}', '\u{f29}'), ('\u{1040}', - '\u{1049}'), ('\u{1090}', '\u{1099}'), ('\u{16ee}', '\u{16f0}'), ('\u{17e0}', '\u{17e9}'), - ('\u{1810}', '\u{1819}'), ('\u{1946}', '\u{194f}'), ('\u{19d0}', '\u{19d9}'), ('\u{1a80}', - '\u{1a89}'), ('\u{1a90}', '\u{1a99}'), ('\u{1b50}', '\u{1b59}'), ('\u{1bb0}', '\u{1bb9}'), - ('\u{1c40}', '\u{1c49}'), ('\u{1c50}', '\u{1c59}'), ('\u{2160}', '\u{2182}'), ('\u{2185}', - '\u{2188}'), ('\u{3007}', '\u{3007}'), ('\u{3021}', '\u{3029}'), ('\u{3038}', '\u{303a}'), - ('\u{a620}', '\u{a629}'), ('\u{a6e6}', '\u{a6ef}'), ('\u{a8d0}', '\u{a8d9}'), ('\u{a900}', - '\u{a909}'), ('\u{a9d0}', '\u{a9d9}'), ('\u{a9f0}', '\u{a9f9}'), ('\u{aa50}', '\u{aa59}'), - ('\u{abf0}', '\u{abf9}'), ('\u{ff10}', '\u{ff19}'), ('\u{10140}', '\u{10174}'), - ('\u{10341}', '\u{10341}'), ('\u{1034a}', '\u{1034a}'), ('\u{103d1}', '\u{103d5}'), - ('\u{104a0}', '\u{104a9}'), ('\u{11066}', '\u{1106f}'), ('\u{110f0}', '\u{110f9}'), - ('\u{11136}', '\u{1113f}'), ('\u{111d0}', '\u{111d9}'), ('\u{112f0}', '\u{112f9}'), - ('\u{114d0}', '\u{114d9}'), ('\u{11650}', '\u{11659}'), ('\u{116c0}', '\u{116c9}'), - ('\u{11730}', '\u{11739}'), ('\u{118e0}', '\u{118e9}'), ('\u{12400}', '\u{1246e}'), - ('\u{16a60}', '\u{16a69}'), ('\u{16b50}', '\u{16b59}'), ('\u{1d7ce}', '\u{1d7ff}') - ]; + pub const N_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x03ff000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x000003ff00000000, 0x0000000000000000, 0x03ff000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x00000000000003ff + ], + r2: [ + 0, 0, 0, 0, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 1, 0, 2, 0, 2, 3, + 0, 0, 0, 0, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 5, 0, 0, 0, 3, 2, 0, 0, 0, 0, 6, 0, 2, 0, 0, 7, 0, 0, 2, 8, 0, 0, 7, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 1, 0, 0, + 0, 0, 0, 0, 0, 2, 4, 0, 0, 12, 0, 2, 0, 0, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 2, 0, 0, 0 + ], + r3: &[ + 0x0000000000000000, 0x0000ffc000000000, 0x0000000003ff0000, 0x000003ff00000000, + 0x00000000000003ff, 0x0001c00000000000, 0x000000000000ffc0, 0x0000000003ff03ff, + 0x03ff000000000000, 0xffffffff00000000, 0x00000000000001e7, 0x070003fe00000080, + 0x03ff000003ff0000 + ], + r4: [ + 0, 1, 2, 3, 3, 3, 4, 3, 3, 3, 3, 3, 3, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3 + ], + r5: &[ + 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 2, 0, 3, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 5, 0, 6, 7, 0, 0, 8, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 8, 0, 0, 0, + 0, 0, 8, 0, 9, 6, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, + 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000, 0x001fffffffffffff, 0x0000000000000402, 0x00000000003e0000, + 0x000003ff00000000, 0x0000ffc000000000, 0x03ff000000000000, 0xffc0000000000000, + 0x0000000003ff0000, 0x00000000000003ff, 0xffffffffffffffff, 0x00007fffffffffff, + 0xffffffffffffc000 + ], + }; pub fn N(c: char) -> bool { - super::bsearch_range_table(c, N_table) + super::trie_lookup_range_table(c, N_table) } } pub mod derived_property { - pub const Alphabetic_table: &'static [(char, char)] = &[ - ('\u{41}', '\u{5a}'), ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), - ('\u{ba}', '\u{ba}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'), - ('\u{2c6}', '\u{2d1}'), ('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}', - '\u{2ee}'), ('\u{345}', '\u{345}'), ('\u{370}', '\u{374}'), ('\u{376}', '\u{377}'), - ('\u{37a}', '\u{37d}'), ('\u{37f}', '\u{37f}'), ('\u{386}', '\u{386}'), ('\u{388}', - '\u{38a}'), ('\u{38c}', '\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}', '\u{3f5}'), - ('\u{3f7}', '\u{481}'), ('\u{48a}', '\u{52f}'), ('\u{531}', '\u{556}'), ('\u{559}', - '\u{559}'), ('\u{561}', '\u{587}'), ('\u{5b0}', '\u{5bd}'), ('\u{5bf}', '\u{5bf}'), - ('\u{5c1}', '\u{5c2}'), ('\u{5c4}', '\u{5c5}'), ('\u{5c7}', '\u{5c7}'), ('\u{5d0}', - '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{610}', '\u{61a}'), ('\u{620}', '\u{657}'), - ('\u{659}', '\u{65f}'), ('\u{66e}', '\u{6d3}'), ('\u{6d5}', '\u{6dc}'), ('\u{6e1}', - '\u{6e8}'), ('\u{6ed}', '\u{6ef}'), ('\u{6fa}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'), - ('\u{710}', '\u{73f}'), ('\u{74d}', '\u{7b1}'), ('\u{7ca}', '\u{7ea}'), ('\u{7f4}', - '\u{7f5}'), ('\u{7fa}', '\u{7fa}'), ('\u{800}', '\u{817}'), ('\u{81a}', '\u{82c}'), - ('\u{840}', '\u{858}'), ('\u{8a0}', '\u{8b4}'), ('\u{8e3}', '\u{8e9}'), ('\u{8f0}', - '\u{93b}'), ('\u{93d}', '\u{94c}'), ('\u{94e}', '\u{950}'), ('\u{955}', '\u{963}'), - ('\u{971}', '\u{983}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'), ('\u{993}', - '\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}', '\u{9b9}'), - ('\u{9bd}', '\u{9c4}'), ('\u{9c7}', '\u{9c8}'), ('\u{9cb}', '\u{9cc}'), ('\u{9ce}', - '\u{9ce}'), ('\u{9d7}', '\u{9d7}'), ('\u{9dc}', '\u{9dd}'), ('\u{9df}', '\u{9e3}'), - ('\u{9f0}', '\u{9f1}'), ('\u{a01}', '\u{a03}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}', - '\u{a10}'), ('\u{a13}', '\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'), - ('\u{a35}', '\u{a36}'), ('\u{a38}', '\u{a39}'), ('\u{a3e}', '\u{a42}'), ('\u{a47}', - '\u{a48}'), ('\u{a4b}', '\u{a4c}'), ('\u{a51}', '\u{a51}'), ('\u{a59}', '\u{a5c}'), - ('\u{a5e}', '\u{a5e}'), ('\u{a70}', '\u{a75}'), ('\u{a81}', '\u{a83}'), ('\u{a85}', - '\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', '\u{aa8}'), ('\u{aaa}', '\u{ab0}'), - ('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), ('\u{abd}', '\u{ac5}'), ('\u{ac7}', - '\u{ac9}'), ('\u{acb}', '\u{acc}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae3}'), - ('\u{af9}', '\u{af9}'), ('\u{b01}', '\u{b03}'), ('\u{b05}', '\u{b0c}'), ('\u{b0f}', - '\u{b10}'), ('\u{b13}', '\u{b28}'), ('\u{b2a}', '\u{b30}'), ('\u{b32}', '\u{b33}'), - ('\u{b35}', '\u{b39}'), ('\u{b3d}', '\u{b44}'), ('\u{b47}', '\u{b48}'), ('\u{b4b}', - '\u{b4c}'), ('\u{b56}', '\u{b57}'), ('\u{b5c}', '\u{b5d}'), ('\u{b5f}', '\u{b63}'), - ('\u{b71}', '\u{b71}'), ('\u{b82}', '\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', - '\u{b90}'), ('\u{b92}', '\u{b95}'), ('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), - ('\u{b9e}', '\u{b9f}'), ('\u{ba3}', '\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', - '\u{bb9}'), ('\u{bbe}', '\u{bc2}'), ('\u{bc6}', '\u{bc8}'), ('\u{bca}', '\u{bcc}'), - ('\u{bd0}', '\u{bd0}'), ('\u{bd7}', '\u{bd7}'), ('\u{c00}', '\u{c03}'), ('\u{c05}', - '\u{c0c}'), ('\u{c0e}', '\u{c10}'), ('\u{c12}', '\u{c28}'), ('\u{c2a}', '\u{c39}'), - ('\u{c3d}', '\u{c44}'), ('\u{c46}', '\u{c48}'), ('\u{c4a}', '\u{c4c}'), ('\u{c55}', - '\u{c56}'), ('\u{c58}', '\u{c5a}'), ('\u{c60}', '\u{c63}'), ('\u{c81}', '\u{c83}'), - ('\u{c85}', '\u{c8c}'), ('\u{c8e}', '\u{c90}'), ('\u{c92}', '\u{ca8}'), ('\u{caa}', - '\u{cb3}'), ('\u{cb5}', '\u{cb9}'), ('\u{cbd}', '\u{cc4}'), ('\u{cc6}', '\u{cc8}'), - ('\u{cca}', '\u{ccc}'), ('\u{cd5}', '\u{cd6}'), ('\u{cde}', '\u{cde}'), ('\u{ce0}', - '\u{ce3}'), ('\u{cf1}', '\u{cf2}'), ('\u{d01}', '\u{d03}'), ('\u{d05}', '\u{d0c}'), - ('\u{d0e}', '\u{d10}'), ('\u{d12}', '\u{d3a}'), ('\u{d3d}', '\u{d44}'), ('\u{d46}', - '\u{d48}'), ('\u{d4a}', '\u{d4c}'), ('\u{d4e}', '\u{d4e}'), ('\u{d57}', '\u{d57}'), - ('\u{d5f}', '\u{d63}'), ('\u{d7a}', '\u{d7f}'), ('\u{d82}', '\u{d83}'), ('\u{d85}', - '\u{d96}'), ('\u{d9a}', '\u{db1}'), ('\u{db3}', '\u{dbb}'), ('\u{dbd}', '\u{dbd}'), - ('\u{dc0}', '\u{dc6}'), ('\u{dcf}', '\u{dd4}'), ('\u{dd6}', '\u{dd6}'), ('\u{dd8}', - '\u{ddf}'), ('\u{df2}', '\u{df3}'), ('\u{e01}', '\u{e3a}'), ('\u{e40}', '\u{e46}'), - ('\u{e4d}', '\u{e4d}'), ('\u{e81}', '\u{e82}'), ('\u{e84}', '\u{e84}'), ('\u{e87}', - '\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}', '\u{e8d}'), ('\u{e94}', '\u{e97}'), - ('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'), ('\u{ea5}', '\u{ea5}'), ('\u{ea7}', - '\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}', '\u{eb9}'), ('\u{ebb}', '\u{ebd}'), - ('\u{ec0}', '\u{ec4}'), ('\u{ec6}', '\u{ec6}'), ('\u{ecd}', '\u{ecd}'), ('\u{edc}', - '\u{edf}'), ('\u{f00}', '\u{f00}'), ('\u{f40}', '\u{f47}'), ('\u{f49}', '\u{f6c}'), - ('\u{f71}', '\u{f81}'), ('\u{f88}', '\u{f97}'), ('\u{f99}', '\u{fbc}'), ('\u{1000}', - '\u{1036}'), ('\u{1038}', '\u{1038}'), ('\u{103b}', '\u{103f}'), ('\u{1050}', '\u{1062}'), - ('\u{1065}', '\u{1068}'), ('\u{106e}', '\u{1086}'), ('\u{108e}', '\u{108e}'), ('\u{109c}', - '\u{109d}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), ('\u{10cd}', '\u{10cd}'), - ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}', '\u{124d}'), ('\u{1250}', - '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'), ('\u{1260}', '\u{1288}'), - ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}', '\u{12b5}'), ('\u{12b8}', - '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), ('\u{12c8}', '\u{12d6}'), - ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}', '\u{135a}'), ('\u{135f}', - '\u{135f}'), ('\u{1380}', '\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), - ('\u{1401}', '\u{166c}'), ('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', - '\u{16ea}'), ('\u{16ee}', '\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1713}'), - ('\u{1720}', '\u{1733}'), ('\u{1740}', '\u{1753}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', - '\u{1770}'), ('\u{1772}', '\u{1773}'), ('\u{1780}', '\u{17b3}'), ('\u{17b6}', '\u{17c8}'), - ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dc}'), ('\u{1820}', '\u{1877}'), ('\u{1880}', - '\u{18aa}'), ('\u{18b0}', '\u{18f5}'), ('\u{1900}', '\u{191e}'), ('\u{1920}', '\u{192b}'), - ('\u{1930}', '\u{1938}'), ('\u{1950}', '\u{196d}'), ('\u{1970}', '\u{1974}'), ('\u{1980}', - '\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{1a00}', '\u{1a1b}'), ('\u{1a20}', '\u{1a5e}'), - ('\u{1a61}', '\u{1a74}'), ('\u{1aa7}', '\u{1aa7}'), ('\u{1b00}', '\u{1b33}'), ('\u{1b35}', - '\u{1b43}'), ('\u{1b45}', '\u{1b4b}'), ('\u{1b80}', '\u{1ba9}'), ('\u{1bac}', '\u{1baf}'), - ('\u{1bba}', '\u{1be5}'), ('\u{1be7}', '\u{1bf1}'), ('\u{1c00}', '\u{1c35}'), ('\u{1c4d}', - '\u{1c4f}'), ('\u{1c5a}', '\u{1c7d}'), ('\u{1ce9}', '\u{1cec}'), ('\u{1cee}', '\u{1cf3}'), - ('\u{1cf5}', '\u{1cf6}'), ('\u{1d00}', '\u{1dbf}'), ('\u{1de7}', '\u{1df4}'), ('\u{1e00}', - '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'), ('\u{1f48}', '\u{1f4d}'), - ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', '\u{1f5b}'), ('\u{1f5d}', - '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'), ('\u{1fb6}', '\u{1fbc}'), - ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', '\u{1fcc}'), ('\u{1fd0}', - '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'), ('\u{1ff2}', '\u{1ff4}'), - ('\u{1ff6}', '\u{1ffc}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}', - '\u{209c}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}', '\u{2113}'), - ('\u{2115}', '\u{2115}'), ('\u{2119}', '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', - '\u{2126}'), ('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{212d}'), ('\u{212f}', '\u{2139}'), - ('\u{213c}', '\u{213f}'), ('\u{2145}', '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', - '\u{2188}'), ('\u{24b6}', '\u{24e9}'), ('\u{2c00}', '\u{2c2e}'), ('\u{2c30}', '\u{2c5e}'), - ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cee}'), ('\u{2cf2}', '\u{2cf3}'), ('\u{2d00}', - '\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'), - ('\u{2d6f}', '\u{2d6f}'), ('\u{2d80}', '\u{2d96}'), ('\u{2da0}', '\u{2da6}'), ('\u{2da8}', - '\u{2dae}'), ('\u{2db0}', '\u{2db6}'), ('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'), - ('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), ('\u{2de0}', - '\u{2dff}'), ('\u{2e2f}', '\u{2e2f}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{3029}'), - ('\u{3031}', '\u{3035}'), ('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{309d}', - '\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'), - ('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}', - '\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'), - ('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a640}', - '\u{a66e}'), ('\u{a674}', '\u{a67b}'), ('\u{a67f}', '\u{a6ef}'), ('\u{a717}', '\u{a71f}'), - ('\u{a722}', '\u{a788}'), ('\u{a78b}', '\u{a7ad}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}', - '\u{a801}'), ('\u{a803}', '\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a827}'), - ('\u{a840}', '\u{a873}'), ('\u{a880}', '\u{a8c3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}', - '\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a90a}', '\u{a92a}'), ('\u{a930}', '\u{a952}'), - ('\u{a960}', '\u{a97c}'), ('\u{a980}', '\u{a9b2}'), ('\u{a9b4}', '\u{a9bf}'), ('\u{a9cf}', - '\u{a9cf}'), ('\u{a9e0}', '\u{a9e4}'), ('\u{a9e6}', '\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), - ('\u{aa00}', '\u{aa36}'), ('\u{aa40}', '\u{aa4d}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', - '\u{aa7a}'), ('\u{aa7e}', '\u{aabe}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), - ('\u{aadb}', '\u{aadd}'), ('\u{aae0}', '\u{aaef}'), ('\u{aaf2}', '\u{aaf5}'), ('\u{ab01}', - '\u{ab06}'), ('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), - ('\u{ab28}', '\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', - '\u{abea}'), ('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), - ('\u{f900}', '\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', - '\u{fb17}'), ('\u{fb1d}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}', '\u{fb3c}'), - ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'), ('\u{fb46}', - '\u{fbb1}'), ('\u{fbd3}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), - ('\u{fdf0}', '\u{fdfb}'), ('\u{fe70}', '\u{fe74}'), ('\u{fe76}', '\u{fefc}'), ('\u{ff21}', - '\u{ff3a}'), ('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), - ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', - '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}', - '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}', - '\u{100fa}'), ('\u{10140}', '\u{10174}'), ('\u{10280}', '\u{1029c}'), ('\u{102a0}', - '\u{102d0}'), ('\u{10300}', '\u{1031f}'), ('\u{10330}', '\u{1034a}'), ('\u{10350}', - '\u{1037a}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'), ('\u{103c8}', - '\u{103cf}'), ('\u{103d1}', '\u{103d5}'), ('\u{10400}', '\u{1049d}'), ('\u{10500}', - '\u{10527}'), ('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', - '\u{10755}'), ('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', - '\u{10808}'), ('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', - '\u{1083c}'), ('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', - '\u{1089e}'), ('\u{108e0}', '\u{108f2}'), ('\u{108f4}', '\u{108f5}'), ('\u{10900}', - '\u{10915}'), ('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}', - '\u{109bf}'), ('\u{10a00}', '\u{10a03}'), ('\u{10a05}', '\u{10a06}'), ('\u{10a0c}', - '\u{10a13}'), ('\u{10a15}', '\u{10a17}'), ('\u{10a19}', '\u{10a33}'), ('\u{10a60}', - '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', - '\u{10ae4}'), ('\u{10b00}', '\u{10b35}'), ('\u{10b40}', '\u{10b55}'), ('\u{10b60}', - '\u{10b72}'), ('\u{10b80}', '\u{10b91}'), ('\u{10c00}', '\u{10c48}'), ('\u{10c80}', - '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'), ('\u{11000}', '\u{11045}'), ('\u{11082}', - '\u{110b8}'), ('\u{110d0}', '\u{110e8}'), ('\u{11100}', '\u{11132}'), ('\u{11150}', - '\u{11172}'), ('\u{11176}', '\u{11176}'), ('\u{11180}', '\u{111bf}'), ('\u{111c1}', - '\u{111c4}'), ('\u{111da}', '\u{111da}'), ('\u{111dc}', '\u{111dc}'), ('\u{11200}', - '\u{11211}'), ('\u{11213}', '\u{11234}'), ('\u{11237}', '\u{11237}'), ('\u{11280}', - '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'), ('\u{1128f}', - '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112e8}'), ('\u{11300}', - '\u{11303}'), ('\u{11305}', '\u{1130c}'), ('\u{1130f}', '\u{11310}'), ('\u{11313}', - '\u{11328}'), ('\u{1132a}', '\u{11330}'), ('\u{11332}', '\u{11333}'), ('\u{11335}', - '\u{11339}'), ('\u{1133d}', '\u{11344}'), ('\u{11347}', '\u{11348}'), ('\u{1134b}', - '\u{1134c}'), ('\u{11350}', '\u{11350}'), ('\u{11357}', '\u{11357}'), ('\u{1135d}', - '\u{11363}'), ('\u{11480}', '\u{114c1}'), ('\u{114c4}', '\u{114c5}'), ('\u{114c7}', - '\u{114c7}'), ('\u{11580}', '\u{115b5}'), ('\u{115b8}', '\u{115be}'), ('\u{115d8}', - '\u{115dd}'), ('\u{11600}', '\u{1163e}'), ('\u{11640}', '\u{11640}'), ('\u{11644}', - '\u{11644}'), ('\u{11680}', '\u{116b5}'), ('\u{11700}', '\u{11719}'), ('\u{1171d}', - '\u{1172a}'), ('\u{118a0}', '\u{118df}'), ('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', - '\u{11af8}'), ('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', - '\u{12543}'), ('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', - '\u{16a38}'), ('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}', '\u{16aed}'), ('\u{16b00}', - '\u{16b36}'), ('\u{16b40}', '\u{16b43}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', - '\u{16b8f}'), ('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f7e}'), ('\u{16f93}', - '\u{16f9f}'), ('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', - '\u{1bc7c}'), ('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1bc9e}', - '\u{1bc9e}'), ('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', - '\u{1d49f}'), ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', - '\u{1d4ac}'), ('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', - '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', - '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', - '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', - '\u{1d550}'), ('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', - '\u{1d6da}'), ('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', - '\u{1d734}'), ('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', - '\u{1d788}'), ('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', - '\u{1d7cb}'), ('\u{1e800}', '\u{1e8c4}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', - '\u{1ee1f}'), ('\u{1ee21}', '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', - '\u{1ee27}'), ('\u{1ee29}', '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', - '\u{1ee39}'), ('\u{1ee3b}', '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', - '\u{1ee47}'), ('\u{1ee49}', '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', - '\u{1ee4f}'), ('\u{1ee51}', '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', - '\u{1ee57}'), ('\u{1ee59}', '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', - '\u{1ee5d}'), ('\u{1ee5f}', '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', - '\u{1ee64}'), ('\u{1ee67}', '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', - '\u{1ee77}'), ('\u{1ee79}', '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', - '\u{1ee89}'), ('\u{1ee8b}', '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', - '\u{1eea9}'), ('\u{1eeab}', '\u{1eebb}'), ('\u{1f130}', '\u{1f149}'), ('\u{1f150}', - '\u{1f169}'), ('\u{1f170}', '\u{1f189}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', - '\u{2b734}'), ('\u{2b740}', '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', - '\u{2fa1d}') - ]; + pub const Alphabetic_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0000000000000000, 0x07fffffe07fffffe, 0x0420040000000000, 0xff7fffffff7fffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0x0000501f0003ffc3, + 0x0000000000000000, 0xbcdf000000000020, 0xfffffffbffffd740, 0xffbfffffffffffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffc03, 0xffffffffffffffff, + 0xfffeffffffffffff, 0xfffffffe027fffff, 0xbfff0000000000ff, 0x000707ffffff00b6, + 0xffffffff07ff0000, 0xffffc000feffffff, 0xffffffffffffffff, 0x9c00e1fe1fefffff, + 0xffffffffffff0000, 0xffffffffffffe000, 0x0003ffffffffffff, 0x043007fffffffc00 + ], + r2: [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 36, 36, 36, 36, 37, 38, 39, 40, 41, + 42, 43, 44, 36, 36, 36, 36, 36, 36, 36, 36, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, + 56, 57, 58, 59, 60, 61, 62, 31, 63, 64, 65, 66, 55, 67, 31, 68, 36, 36, 36, 69, 36, 36, + 36, 36, 70, 71, 72, 73, 31, 74, 75, 31, 76, 77, 78, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 79, 80, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 81, 82, 36, 83, 84, 85, 86, 87, 88, 31, 31, 31, + 31, 31, 31, 31, 89, 44, 90, 91, 92, 36, 93, 94, 31, 31, 31, 31, 31, 31, 31, 31, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 55, 31, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 95, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 96, 97, 36, 36, 36, 36, 98, 99, 36, 100, 101, 36, 102, + 103, 104, 105, 36, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 36, 117, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, + 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 36, 118, 119, + 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, 31, + 36, 36, 36, 36, 36, 120, 36, 121, 122, 123, 124, 125, 36, 36, 36, 36, 126, 127, 128, + 129, 31, 130, 36, 131, 132, 133, 113, 134 + ], + r3: &[ + 0x00001ffffcffffff, 0x0000000001ffffff, 0x001fffff00000000, 0xffff03f800000000, + 0xefffffffffffffff, 0xfffe000fffe1dfff, 0xe3c5fdfffff99fef, 0x0003000fb080599f, + 0xc36dfdfffff987ee, 0x003f00005e021987, 0xe3edfdfffffbbfee, 0x0200000f00011bbf, + 0xe3edfdfffff99fee, 0x0002000fb0c0199f, 0xc3ffc718d63dc7ec, 0x0000000000811dc7, + 0xe3fffdfffffddfef, 0x0000000f07601ddf, 0xe3effdfffffddfee, 0x0006000f40601ddf, + 0xe7fffffffffddfee, 0xfc00000f80805ddf, 0x2ffbfffffc7fffec, 0x000c0000ff5f807f, + 0x07fffffffffffffe, 0x000000000000207f, 0x3bffecaefef02596, 0x00000000f000205f, + 0x0000000000000001, 0xfffe1ffffffffeff, 0x1ffffffffeffff03, 0x0000000000000000, + 0xf97fffffffffffff, 0xffffc1e7ffff0000, 0xffffffff3000407f, 0xf7ffffffffff20bf, + 0xffffffffffffffff, 0xffffffff3d7f3dff, 0x7f3dffffffff3dff, 0xffffffffff7fff3d, + 0xffffffffff3dffff, 0x0000000087ffffff, 0xffffffff0000ffff, 0x3f3fffffffffffff, + 0xfffffffffffffffe, 0xffff9fffffffffff, 0xffffffff07fffffe, 0x01ffc7ffffffffff, + 0x000fffff000fdfff, 0x000ddfff000fffff, 0xffcfffffffffffff, 0x00000000108001ff, + 0xffffffff00000000, 0x00ffffffffffffff, 0xffff07ffffffffff, 0x003fffffffffffff, + 0x01ff0fff7fffffff, 0x001f3fffffff0000, 0xffff0fffffffffff, 0x00000000000003ff, + 0xffffffff0fffffff, 0x001ffffe7fffffff, 0x0000008000000000, 0xffefffffffffffff, + 0x0000000000000fef, 0xfc00f3ffffffffff, 0x0003ffbfffffffff, 0x3ffffffffc00e000, + 0x006fde0000000000, 0x001fff8000000000, 0xffffffff3f3fffff, 0x3fffffffaaff3f3f, + 0x5fdfffffffffffff, 0x1fdc1fff0fcf1fdc, 0x8002000000000000, 0x000000001fff0000, + 0xf3ffbd503e2ffc84, 0xffffffff000043e0, 0x00000000000001ff, 0xffc0000000000000, + 0x000003ffffffffff, 0xffff7fffffffffff, 0xffffffff7fffffff, 0x000c781fffffffff, + 0xffff20bfffffffff, 0x000080ffffffffff, 0x7f7f7f7f007fffff, 0xffffffff7f7f7f7f, + 0x0000800000000000, 0x1f3e03fe000000e0, 0xfffffffee07fffff, 0xf7ffffffffffffff, + 0xfffe3fffffffffe0, 0x07ffffff00007fff, 0xffff000000000000, 0x00000000003fffff, + 0x0000000000001fff, 0x3fffffffffff0000, 0x00000c00ffff1fff, 0x8ff07fffffffffff, + 0x0000ffffffffffff, 0xfffffffcff800000, 0x00ff3ffffffff9ff, 0xff80000000000000, + 0x000000fffffff7bb, 0x000fffffffffffff, 0x28fc00000000000f, 0xffff07fffffffc00, + 0x1fffffff0007ffff, 0xfff7ffffffffffff, 0x7c00ffdf00008000, 0x007fffffffffffff, + 0xc47fffff00003fff, 0x7fffffffffffffff, 0x003cffff38000005, 0xffff7f7f007e7e7e, + 0xffff003ff7ffffff, 0x000007ffffffffff, 0xffff000fffffffff, 0x0ffffffffffff87f, + 0xffff3fffffffffff, 0x0000000003ffffff, 0x5f7ffdffe0f8007f, 0xffffffffffffffdb, + 0x0003ffffffffffff, 0xfffffffffff80000, 0x3fffffffffffffff, 0xffffffffffff0000, + 0xfffffffffffcffff, 0x0fff0000000000ff, 0xffdf000000000000, 0x1fffffffffffffff, + 0x07fffffe00000000, 0xffffffc007fffffe, 0x000000001cfcfcfc + ], + r4: [ + 0, 1, 2, 3, 4, 5, 6, 5, 5, 5, 5, 7, 5, 8, 9, 10, 11, 11, 11, 11, 11, 11, 11, 11, 11, 11, + 12, 13, 14, 5, 5, 15, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 + ], + r5: &[ + 0, 1, 2, 3, 4, 5, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 2, 2, 12, 4, 13, 14, 4, 4, 2, 2, 2, 2, + 15, 16, 4, 4, 17, 18, 19, 20, 21, 4, 22, 4, 23, 24, 25, 26, 27, 28, 29, 4, 2, 30, 31, + 31, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 32, 33, 34, 31, 35, 2, 36, 37, 4, 38, 39, 40, + 41, 4, 4, 4, 4, 2, 42, 4, 4, 43, 44, 45, 46, 27, 4, 47, 4, 4, 4, 4, 4, 48, 49, 4, 4, 4, + 4, 4, 4, 4, 50, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 51, 4, 2, 52, 2, 2, 2, 53, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 52, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 54, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, + 2, 2, 2, 50, 19, 4, 55, 15, 56, 57, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 58, 59, 4, + 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 61, 62, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 63, 64, 65, 66, 67, + 2, 2, 2, 2, 68, 69, 70, 71, 72, 73, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 74, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 75, 76, 77, 4, 4, 4, 4, 4, 4, 4, 4, 4, 78, 79, + 80, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 81, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 82, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, + 2, 2, 2, 2, 2, 2, 12, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4 + ], + r6: &[ + 0xb7ffff7fffffefff, 0x000000003fff3fff, 0xffffffffffffffff, 0x07ffffffffffffff, + 0x0000000000000000, 0x001fffffffffffff, 0xffffffff1fffffff, 0x000000000001ffff, + 0xffff0000ffffffff, 0x07ffffffffff07ff, 0xffffffff3fffffff, 0x00000000003eff0f, + 0x000000003fffffff, 0xffff00ffffffffff, 0x0000000fffffffff, 0x007fffffffffffff, + 0x000000ff003fffff, 0x91bffffffffffd3f, 0x007fffff003fffff, 0x000000007fffffff, + 0x0037ffff00000000, 0x03ffffff003fffff, 0xc0ffffffffffffff, 0x000ffffffeeff06f, + 0x1fffffff00000000, 0x000000001fffffff, 0x0000001ffffffeff, 0x003fffffffffffff, + 0x0007ffff003fffff, 0x000000000003ffff, 0x00000000000001ff, 0x0007ffffffffffff, + 0x000000000000003f, 0x01fffffffffffffc, 0x000001ffffff0000, 0x0047ffffffff0000, + 0x000000001400001e, 0x009ffffffffbffff, 0xffff01ffbfffbd7f, 0x000001ffffffffff, + 0xe3edfdfffff99fef, 0x0000000fe081199f, 0x00000000000000b3, 0x7f3fffffffffffff, + 0x000000003f000000, 0x7fffffffffffffff, 0x0000000000000011, 0x000007ffe3ffffff, + 0xffffffff00000000, 0x80000000ffffffff, 0x01ffffffffffffff, 0x0000000003ffffff, + 0x00007fffffffffff, 0x000000000000000f, 0x000000000000007f, 0x00003fffffff0000, + 0xe0fffff80000000f, 0x000000000000ffff, 0x7fffffffffff001f, 0x00000000fff80000, + 0x0000000000000003, 0x1fff07ffffffffff, 0x0000000043ff01ff, 0xffffffffffdfffff, + 0xebffde64dfffffff, 0xffffffffffffffef, 0x7bffffffdfdfe7bf, 0xfffffffffffdfc5f, + 0xffffff3fffffffff, 0xf7fffffff7fffffd, 0xffdfffffffdfffff, 0xffff7fffffff7fff, + 0xfffffdfffffffdff, 0x0000000000000ff7, 0x000000000000001f, 0x0af7fe96ffffffef, + 0x5ef7f796aa96ea84, 0x0ffffbee0ffffbff, 0xffff000000000000, 0xffff03ffffff03ff, + 0x00000000000003ff, 0x00000000007fffff, 0x00000003ffffffff + ], + }; pub fn Alphabetic(c: char) -> bool { - super::bsearch_range_table(c, Alphabetic_table) + super::trie_lookup_range_table(c, Alphabetic_table) } - pub const Case_Ignorable_table: &'static [(char, char)] = &[ - ('\u{27}', '\u{27}'), ('\u{2e}', '\u{2e}'), ('\u{3a}', '\u{3a}'), ('\u{5e}', '\u{5e}'), - ('\u{60}', '\u{60}'), ('\u{a8}', '\u{a8}'), ('\u{ad}', '\u{ad}'), ('\u{af}', '\u{af}'), - ('\u{b4}', '\u{b4}'), ('\u{b7}', '\u{b8}'), ('\u{2b0}', '\u{36f}'), ('\u{374}', '\u{375}'), - ('\u{37a}', '\u{37a}'), ('\u{384}', '\u{385}'), ('\u{387}', '\u{387}'), ('\u{483}', - '\u{489}'), ('\u{559}', '\u{559}'), ('\u{591}', '\u{5bd}'), ('\u{5bf}', '\u{5bf}'), - ('\u{5c1}', '\u{5c2}'), ('\u{5c4}', '\u{5c5}'), ('\u{5c7}', '\u{5c7}'), ('\u{5f4}', - '\u{5f4}'), ('\u{600}', '\u{605}'), ('\u{610}', '\u{61a}'), ('\u{61c}', '\u{61c}'), - ('\u{640}', '\u{640}'), ('\u{64b}', '\u{65f}'), ('\u{670}', '\u{670}'), ('\u{6d6}', - '\u{6dd}'), ('\u{6df}', '\u{6e8}'), ('\u{6ea}', '\u{6ed}'), ('\u{70f}', '\u{70f}'), - ('\u{711}', '\u{711}'), ('\u{730}', '\u{74a}'), ('\u{7a6}', '\u{7b0}'), ('\u{7eb}', - '\u{7f5}'), ('\u{7fa}', '\u{7fa}'), ('\u{816}', '\u{82d}'), ('\u{859}', '\u{85b}'), - ('\u{8e3}', '\u{902}'), ('\u{93a}', '\u{93a}'), ('\u{93c}', '\u{93c}'), ('\u{941}', - '\u{948}'), ('\u{94d}', '\u{94d}'), ('\u{951}', '\u{957}'), ('\u{962}', '\u{963}'), - ('\u{971}', '\u{971}'), ('\u{981}', '\u{981}'), ('\u{9bc}', '\u{9bc}'), ('\u{9c1}', - '\u{9c4}'), ('\u{9cd}', '\u{9cd}'), ('\u{9e2}', '\u{9e3}'), ('\u{a01}', '\u{a02}'), - ('\u{a3c}', '\u{a3c}'), ('\u{a41}', '\u{a42}'), ('\u{a47}', '\u{a48}'), ('\u{a4b}', - '\u{a4d}'), ('\u{a51}', '\u{a51}'), ('\u{a70}', '\u{a71}'), ('\u{a75}', '\u{a75}'), - ('\u{a81}', '\u{a82}'), ('\u{abc}', '\u{abc}'), ('\u{ac1}', '\u{ac5}'), ('\u{ac7}', - '\u{ac8}'), ('\u{acd}', '\u{acd}'), ('\u{ae2}', '\u{ae3}'), ('\u{b01}', '\u{b01}'), - ('\u{b3c}', '\u{b3c}'), ('\u{b3f}', '\u{b3f}'), ('\u{b41}', '\u{b44}'), ('\u{b4d}', - '\u{b4d}'), ('\u{b56}', '\u{b56}'), ('\u{b62}', '\u{b63}'), ('\u{b82}', '\u{b82}'), - ('\u{bc0}', '\u{bc0}'), ('\u{bcd}', '\u{bcd}'), ('\u{c00}', '\u{c00}'), ('\u{c3e}', - '\u{c40}'), ('\u{c46}', '\u{c48}'), ('\u{c4a}', '\u{c4d}'), ('\u{c55}', '\u{c56}'), - ('\u{c62}', '\u{c63}'), ('\u{c81}', '\u{c81}'), ('\u{cbc}', '\u{cbc}'), ('\u{cbf}', - '\u{cbf}'), ('\u{cc6}', '\u{cc6}'), ('\u{ccc}', '\u{ccd}'), ('\u{ce2}', '\u{ce3}'), - ('\u{d01}', '\u{d01}'), ('\u{d41}', '\u{d44}'), ('\u{d4d}', '\u{d4d}'), ('\u{d62}', - '\u{d63}'), ('\u{dca}', '\u{dca}'), ('\u{dd2}', '\u{dd4}'), ('\u{dd6}', '\u{dd6}'), - ('\u{e31}', '\u{e31}'), ('\u{e34}', '\u{e3a}'), ('\u{e46}', '\u{e4e}'), ('\u{eb1}', - '\u{eb1}'), ('\u{eb4}', '\u{eb9}'), ('\u{ebb}', '\u{ebc}'), ('\u{ec6}', '\u{ec6}'), - ('\u{ec8}', '\u{ecd}'), ('\u{f18}', '\u{f19}'), ('\u{f35}', '\u{f35}'), ('\u{f37}', - '\u{f37}'), ('\u{f39}', '\u{f39}'), ('\u{f71}', '\u{f7e}'), ('\u{f80}', '\u{f84}'), - ('\u{f86}', '\u{f87}'), ('\u{f8d}', '\u{f97}'), ('\u{f99}', '\u{fbc}'), ('\u{fc6}', - '\u{fc6}'), ('\u{102d}', '\u{1030}'), ('\u{1032}', '\u{1037}'), ('\u{1039}', '\u{103a}'), - ('\u{103d}', '\u{103e}'), ('\u{1058}', '\u{1059}'), ('\u{105e}', '\u{1060}'), ('\u{1071}', - '\u{1074}'), ('\u{1082}', '\u{1082}'), ('\u{1085}', '\u{1086}'), ('\u{108d}', '\u{108d}'), - ('\u{109d}', '\u{109d}'), ('\u{10fc}', '\u{10fc}'), ('\u{135d}', '\u{135f}'), ('\u{1712}', - '\u{1714}'), ('\u{1732}', '\u{1734}'), ('\u{1752}', '\u{1753}'), ('\u{1772}', '\u{1773}'), - ('\u{17b4}', '\u{17b5}'), ('\u{17b7}', '\u{17bd}'), ('\u{17c6}', '\u{17c6}'), ('\u{17c9}', - '\u{17d3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dd}', '\u{17dd}'), ('\u{180b}', '\u{180e}'), - ('\u{1843}', '\u{1843}'), ('\u{18a9}', '\u{18a9}'), ('\u{1920}', '\u{1922}'), ('\u{1927}', - '\u{1928}'), ('\u{1932}', '\u{1932}'), ('\u{1939}', '\u{193b}'), ('\u{1a17}', '\u{1a18}'), - ('\u{1a1b}', '\u{1a1b}'), ('\u{1a56}', '\u{1a56}'), ('\u{1a58}', '\u{1a5e}'), ('\u{1a60}', - '\u{1a60}'), ('\u{1a62}', '\u{1a62}'), ('\u{1a65}', '\u{1a6c}'), ('\u{1a73}', '\u{1a7c}'), - ('\u{1a7f}', '\u{1a7f}'), ('\u{1aa7}', '\u{1aa7}'), ('\u{1ab0}', '\u{1abe}'), ('\u{1b00}', - '\u{1b03}'), ('\u{1b34}', '\u{1b34}'), ('\u{1b36}', '\u{1b3a}'), ('\u{1b3c}', '\u{1b3c}'), - ('\u{1b42}', '\u{1b42}'), ('\u{1b6b}', '\u{1b73}'), ('\u{1b80}', '\u{1b81}'), ('\u{1ba2}', - '\u{1ba5}'), ('\u{1ba8}', '\u{1ba9}'), ('\u{1bab}', '\u{1bad}'), ('\u{1be6}', '\u{1be6}'), - ('\u{1be8}', '\u{1be9}'), ('\u{1bed}', '\u{1bed}'), ('\u{1bef}', '\u{1bf1}'), ('\u{1c2c}', - '\u{1c33}'), ('\u{1c36}', '\u{1c37}'), ('\u{1c78}', '\u{1c7d}'), ('\u{1cd0}', '\u{1cd2}'), - ('\u{1cd4}', '\u{1ce0}'), ('\u{1ce2}', '\u{1ce8}'), ('\u{1ced}', '\u{1ced}'), ('\u{1cf4}', - '\u{1cf4}'), ('\u{1cf8}', '\u{1cf9}'), ('\u{1d2c}', '\u{1d6a}'), ('\u{1d78}', '\u{1d78}'), - ('\u{1d9b}', '\u{1df5}'), ('\u{1dfc}', '\u{1dff}'), ('\u{1fbd}', '\u{1fbd}'), ('\u{1fbf}', - '\u{1fc1}'), ('\u{1fcd}', '\u{1fcf}'), ('\u{1fdd}', '\u{1fdf}'), ('\u{1fed}', '\u{1fef}'), - ('\u{1ffd}', '\u{1ffe}'), ('\u{200b}', '\u{200f}'), ('\u{2018}', '\u{2019}'), ('\u{2024}', - '\u{2024}'), ('\u{2027}', '\u{2027}'), ('\u{202a}', '\u{202e}'), ('\u{2060}', '\u{2064}'), - ('\u{2066}', '\u{206f}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}', - '\u{209c}'), ('\u{20d0}', '\u{20f0}'), ('\u{2c7c}', '\u{2c7d}'), ('\u{2cef}', '\u{2cf1}'), - ('\u{2d6f}', '\u{2d6f}'), ('\u{2d7f}', '\u{2d7f}'), ('\u{2de0}', '\u{2dff}'), ('\u{2e2f}', - '\u{2e2f}'), ('\u{3005}', '\u{3005}'), ('\u{302a}', '\u{302d}'), ('\u{3031}', '\u{3035}'), - ('\u{303b}', '\u{303b}'), ('\u{3099}', '\u{309e}'), ('\u{30fc}', '\u{30fe}'), ('\u{a015}', - '\u{a015}'), ('\u{a4f8}', '\u{a4fd}'), ('\u{a60c}', '\u{a60c}'), ('\u{a66f}', '\u{a672}'), - ('\u{a674}', '\u{a67d}'), ('\u{a67f}', '\u{a67f}'), ('\u{a69c}', '\u{a69f}'), ('\u{a6f0}', - '\u{a6f1}'), ('\u{a700}', '\u{a721}'), ('\u{a770}', '\u{a770}'), ('\u{a788}', '\u{a78a}'), - ('\u{a7f8}', '\u{a7f9}'), ('\u{a802}', '\u{a802}'), ('\u{a806}', '\u{a806}'), ('\u{a80b}', - '\u{a80b}'), ('\u{a825}', '\u{a826}'), ('\u{a8c4}', '\u{a8c4}'), ('\u{a8e0}', '\u{a8f1}'), - ('\u{a926}', '\u{a92d}'), ('\u{a947}', '\u{a951}'), ('\u{a980}', '\u{a982}'), ('\u{a9b3}', - '\u{a9b3}'), ('\u{a9b6}', '\u{a9b9}'), ('\u{a9bc}', '\u{a9bc}'), ('\u{a9cf}', '\u{a9cf}'), - ('\u{a9e5}', '\u{a9e6}'), ('\u{aa29}', '\u{aa2e}'), ('\u{aa31}', '\u{aa32}'), ('\u{aa35}', - '\u{aa36}'), ('\u{aa43}', '\u{aa43}'), ('\u{aa4c}', '\u{aa4c}'), ('\u{aa70}', '\u{aa70}'), - ('\u{aa7c}', '\u{aa7c}'), ('\u{aab0}', '\u{aab0}'), ('\u{aab2}', '\u{aab4}'), ('\u{aab7}', - '\u{aab8}'), ('\u{aabe}', '\u{aabf}'), ('\u{aac1}', '\u{aac1}'), ('\u{aadd}', '\u{aadd}'), - ('\u{aaec}', '\u{aaed}'), ('\u{aaf3}', '\u{aaf4}'), ('\u{aaf6}', '\u{aaf6}'), ('\u{ab5b}', - '\u{ab5f}'), ('\u{abe5}', '\u{abe5}'), ('\u{abe8}', '\u{abe8}'), ('\u{abed}', '\u{abed}'), - ('\u{fb1e}', '\u{fb1e}'), ('\u{fbb2}', '\u{fbc1}'), ('\u{fe00}', '\u{fe0f}'), ('\u{fe13}', - '\u{fe13}'), ('\u{fe20}', '\u{fe2f}'), ('\u{fe52}', '\u{fe52}'), ('\u{fe55}', '\u{fe55}'), - ('\u{feff}', '\u{feff}'), ('\u{ff07}', '\u{ff07}'), ('\u{ff0e}', '\u{ff0e}'), ('\u{ff1a}', - '\u{ff1a}'), ('\u{ff3e}', '\u{ff3e}'), ('\u{ff40}', '\u{ff40}'), ('\u{ff70}', '\u{ff70}'), - ('\u{ff9e}', '\u{ff9f}'), ('\u{ffe3}', '\u{ffe3}'), ('\u{fff9}', '\u{fffb}'), ('\u{101fd}', - '\u{101fd}'), ('\u{102e0}', '\u{102e0}'), ('\u{10376}', '\u{1037a}'), ('\u{10a01}', - '\u{10a03}'), ('\u{10a05}', '\u{10a06}'), ('\u{10a0c}', '\u{10a0f}'), ('\u{10a38}', - '\u{10a3a}'), ('\u{10a3f}', '\u{10a3f}'), ('\u{10ae5}', '\u{10ae6}'), ('\u{11001}', - '\u{11001}'), ('\u{11038}', '\u{11046}'), ('\u{1107f}', '\u{11081}'), ('\u{110b3}', - '\u{110b6}'), ('\u{110b9}', '\u{110ba}'), ('\u{110bd}', '\u{110bd}'), ('\u{11100}', - '\u{11102}'), ('\u{11127}', '\u{1112b}'), ('\u{1112d}', '\u{11134}'), ('\u{11173}', - '\u{11173}'), ('\u{11180}', '\u{11181}'), ('\u{111b6}', '\u{111be}'), ('\u{111ca}', - '\u{111cc}'), ('\u{1122f}', '\u{11231}'), ('\u{11234}', '\u{11234}'), ('\u{11236}', - '\u{11237}'), ('\u{112df}', '\u{112df}'), ('\u{112e3}', '\u{112ea}'), ('\u{11300}', - '\u{11301}'), ('\u{1133c}', '\u{1133c}'), ('\u{11340}', '\u{11340}'), ('\u{11366}', - '\u{1136c}'), ('\u{11370}', '\u{11374}'), ('\u{114b3}', '\u{114b8}'), ('\u{114ba}', - '\u{114ba}'), ('\u{114bf}', '\u{114c0}'), ('\u{114c2}', '\u{114c3}'), ('\u{115b2}', - '\u{115b5}'), ('\u{115bc}', '\u{115bd}'), ('\u{115bf}', '\u{115c0}'), ('\u{115dc}', - '\u{115dd}'), ('\u{11633}', '\u{1163a}'), ('\u{1163d}', '\u{1163d}'), ('\u{1163f}', - '\u{11640}'), ('\u{116ab}', '\u{116ab}'), ('\u{116ad}', '\u{116ad}'), ('\u{116b0}', - '\u{116b5}'), ('\u{116b7}', '\u{116b7}'), ('\u{1171d}', '\u{1171f}'), ('\u{11722}', - '\u{11725}'), ('\u{11727}', '\u{1172b}'), ('\u{16af0}', '\u{16af4}'), ('\u{16b30}', - '\u{16b36}'), ('\u{16b40}', '\u{16b43}'), ('\u{16f8f}', '\u{16f9f}'), ('\u{1bc9d}', - '\u{1bc9e}'), ('\u{1bca0}', '\u{1bca3}'), ('\u{1d167}', '\u{1d169}'), ('\u{1d173}', - '\u{1d182}'), ('\u{1d185}', '\u{1d18b}'), ('\u{1d1aa}', '\u{1d1ad}'), ('\u{1d242}', - '\u{1d244}'), ('\u{1da00}', '\u{1da36}'), ('\u{1da3b}', '\u{1da6c}'), ('\u{1da75}', - '\u{1da75}'), ('\u{1da84}', '\u{1da84}'), ('\u{1da9b}', '\u{1da9f}'), ('\u{1daa1}', - '\u{1daaf}'), ('\u{1e8d0}', '\u{1e8d6}'), ('\u{1f3fb}', '\u{1f3ff}'), ('\u{e0001}', - '\u{e0001}'), ('\u{e0020}', '\u{e007f}'), ('\u{e0100}', '\u{e01ef}') - ]; + pub const Case_Ignorable_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0400408000000000, 0x0000000140000000, 0x0190a10000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0xffff000000000000, 0xffffffffffffffff, + 0xffffffffffffffff, 0x0430ffffffffffff, 0x00000000000000b0, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x00000000000003f8, 0x0000000000000000, + 0x0000000000000000, 0x0000000002000000, 0xbffffffffffe0000, 0x00100000000000b6, + 0x0000000017ff003f, 0x00010000fffff801, 0x0000000000000000, 0x00003dffbfc00000, + 0xffff000000028000, 0x00000000000007ff, 0x0001ffc000000000, 0x043ff80000000000 + ], + r2: [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 8, 10, 11, 12, 13, 14, 15, 16, 11, 17, 18, 7, 2, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 2, 2, 2, 2, 2, 2, 2, 2, 2, 32, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 33, 34, 35, 36, 37, 38, 39, 2, 40, 2, 2, 2, 41, 42, 43, 2, + 44, 45, 46, 47, 48, 49, 2, 50, 51, 52, 53, 54, 2, 2, 2, 2, 2, 2, 55, 56, 57, 58, 59, 60, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 61, 2, 62, 2, 63, 2, 64, 65, 2, 2, 2, 2, + 2, 2, 2, 66, 2, 67, 68, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 69, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 49, 2, 2, 2, 2, 70, 71, 72, 73, 74, 75, 76, 77, 78, 2, 2, 79, 80, + 81, 82, 83, 84, 85, 86, 87, 2, 88, 2, 89, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 90, 2, 91, 92, 2, 2, 2, 2, 2, 2, 2, 2, 93, 94, 2, 95, + 96, 97, 98, 99 + ], + r3: &[ + 0x00003fffffc00000, 0x000000000e000000, 0x0000000000000000, 0xfffffff800000000, + 0x1400000000000007, 0x0002000c00fe21fe, 0x1000000000000002, 0x0000000c0000201e, + 0x1000000000000006, 0x0023000000023986, 0x0000000c000021be, 0x9000000000000002, + 0x0000000c0040201e, 0x0000000000000004, 0x0000000000002001, 0xc000000000000001, + 0x0000000c00603dc1, 0x0000000c00003040, 0x0000000000000002, 0x00000000005c0400, + 0x07f2000000000000, 0x0000000000007fc0, 0x1bf2000000000000, 0x0000000000003f40, + 0x02a0000003000000, 0x7ffe000000000000, 0x1ffffffffeffe0df, 0x0000000000000040, + 0x66fde00000000000, 0x001e0001c3000000, 0x0000000020002064, 0x1000000000000000, + 0x00000000e0000000, 0x001c0000001c0000, 0x000c0000000c0000, 0x3fb0000000000000, + 0x00000000208ffe40, 0x0000000000007800, 0x0000000000000008, 0x0000020000000000, + 0x0e04018700000000, 0x0000000009800000, 0x9ff81fe57f400000, 0x7fff008000000000, + 0x17d000000000000f, 0x000ff80000000004, 0x00003b3c00000003, 0x0003a34000000000, + 0x00cff00000000000, 0x3f00000000000000, 0x031021fdfff70000, 0xfffff00000000000, + 0x010007ffffffffff, 0xfffffffff8000000, 0xf03fffffffffffff, 0xa000000000000000, + 0x6000e000e000e003, 0x00007c900300f800, 0x8002ffdf00000000, 0x000000001fff0000, + 0x0001ffffffff0000, 0x3000000000000000, 0x0003800000000000, 0x8000800000000000, + 0xffffffff00000000, 0x0000800000000000, 0x083e3c0000000020, 0x000000007e000000, + 0x7000000000000000, 0x0000000000200000, 0x0000000000001000, 0xbff7800000000000, + 0x00000000f0000000, 0x0003000000000000, 0x00000003ffffffff, 0x0001000000000000, + 0x0000000000000700, 0x0300000000000000, 0x0000006000000844, 0x0003ffff00000010, + 0x00003fc000000000, 0x000000000003ff80, 0x13c8000000000007, 0x0000006000008000, + 0x00667e0000000000, 0x1001000000001008, 0xc19d000000000000, 0x0058300020000002, + 0x00000000f8000000, 0x0000212000000000, 0x0000000040000000, 0xfffc000000000000, + 0x0000000000000003, 0x0000ffff0008ffff, 0x0000000000240000, 0x8000000000000000, + 0x4000000004004080, 0x0001000000000001, 0x00000000c0000000, 0x0e00000800000000 + ], + r4: [ + 0, 1, 2, 2, 2, 2, 3, 2, 2, 2, 2, 4, 2, 5, 6, 7, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 8, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + ], + r5: &[ + 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 2, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 6, 7, 8, 0, 9, 10, 11, 12, 13, 0, 0, 14, 15, 16, 0, 0, 0, 0, 17, 18, + 0, 0, 19, 20, 21, 22, 23, 0, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 25, 26, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 28, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 31, 0, 0, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 33, 34, 35, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 36, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 37, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 38, 39, + 0, 0, 39, 39, 39, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000, 0x2000000000000000, 0x0000000100000000, 0x07c0000000000000, + 0x870000000000f06e, 0x0000006000000000, 0xff00000000000002, 0x800000000000007f, + 0x2678000000000003, 0x001fef8000000007, 0x0008000000000000, 0x7fc0000000000003, + 0x0000000000001c00, 0x00d3800000000000, 0x000007f880000000, 0x1000000000000003, + 0x001f1fc000000001, 0x85f8000000000000, 0x000000000000000d, 0xb03c000000000000, + 0x0000000030000001, 0xa7f8000000000000, 0x0000000000000001, 0x00bf280000000000, + 0x00000fbce0000000, 0x001f000000000000, 0x007f000000000000, 0x000000000000000f, + 0x00000000ffff8000, 0x0000000f60000000, 0xfff8038000000000, 0x00003c0000000fe7, + 0x000000000000001c, 0xf87fffffffffffff, 0x00201fffffffffff, 0x0000fffef8000010, + 0x00000000007f0000, 0xf800000000000000, 0xffffffff00000002, 0xffffffffffffffff, + 0x0000ffffffffffff + ], + }; pub fn Case_Ignorable(c: char) -> bool { - super::bsearch_range_table(c, Case_Ignorable_table) + super::trie_lookup_range_table(c, Case_Ignorable_table) } - pub const Cased_table: &'static [(char, char)] = &[ - ('\u{41}', '\u{5a}'), ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), - ('\u{ba}', '\u{ba}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{1ba}'), - ('\u{1bc}', '\u{1bf}'), ('\u{1c4}', '\u{293}'), ('\u{295}', '\u{2b8}'), ('\u{2c0}', - '\u{2c1}'), ('\u{2e0}', '\u{2e4}'), ('\u{345}', '\u{345}'), ('\u{370}', '\u{373}'), - ('\u{376}', '\u{377}'), ('\u{37a}', '\u{37d}'), ('\u{37f}', '\u{37f}'), ('\u{386}', - '\u{386}'), ('\u{388}', '\u{38a}'), ('\u{38c}', '\u{38c}'), ('\u{38e}', '\u{3a1}'), - ('\u{3a3}', '\u{3f5}'), ('\u{3f7}', '\u{481}'), ('\u{48a}', '\u{52f}'), ('\u{531}', - '\u{556}'), ('\u{561}', '\u{587}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), - ('\u{10cd}', '\u{10cd}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), ('\u{1d00}', - '\u{1dbf}'), ('\u{1e00}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'), - ('\u{1f48}', '\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', - '\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'), - ('\u{1fb6}', '\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', - '\u{1fcc}'), ('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'), - ('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', - '\u{207f}'), ('\u{2090}', '\u{209c}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), - ('\u{210a}', '\u{2113}'), ('\u{2115}', '\u{2115}'), ('\u{2119}', '\u{211d}'), ('\u{2124}', - '\u{2124}'), ('\u{2126}', '\u{2126}'), ('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{212d}'), - ('\u{212f}', '\u{2134}'), ('\u{2139}', '\u{2139}'), ('\u{213c}', '\u{213f}'), ('\u{2145}', - '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{217f}'), ('\u{2183}', '\u{2184}'), - ('\u{24b6}', '\u{24e9}'), ('\u{2c00}', '\u{2c2e}'), ('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', - '\u{2ce4}'), ('\u{2ceb}', '\u{2cee}'), ('\u{2cf2}', '\u{2cf3}'), ('\u{2d00}', '\u{2d25}'), - ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{a640}', '\u{a66d}'), ('\u{a680}', - '\u{a69d}'), ('\u{a722}', '\u{a787}'), ('\u{a78b}', '\u{a78e}'), ('\u{a790}', '\u{a7ad}'), - ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f8}', '\u{a7fa}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', - '\u{ab65}'), ('\u{ab70}', '\u{abbf}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'), - ('\u{ff21}', '\u{ff3a}'), ('\u{ff41}', '\u{ff5a}'), ('\u{10400}', '\u{1044f}'), - ('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'), ('\u{118a0}', '\u{118df}'), - ('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), - ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), - ('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'), - ('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), - ('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), - ('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), - ('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'), - ('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'), - ('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'), - ('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'), - ('\u{1f130}', '\u{1f149}'), ('\u{1f150}', '\u{1f169}'), ('\u{1f170}', '\u{1f189}') - ]; + pub const Cased_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0000000000000000, 0x07fffffe07fffffe, 0x0420040000000000, 0xff7fffffff7fffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xf7ffffffffffffff, 0xfffffffffffffff0, + 0xffffffffffffffff, 0xffffffffffffffff, 0x01ffffffffefffff, 0x0000001f00000003, + 0x0000000000000000, 0xbccf000000000020, 0xfffffffbffffd740, 0xffbfffffffffffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffc03, 0xffffffffffffffff, + 0xfffeffffffffffff, 0xfffffffe007fffff, 0x00000000000000ff, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + ], + r2: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, + 0, 4, 4, 4, 4, 5, 6, 7, 8, 0, 9, 10, 0, 11, 12, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14, + 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 16, 17, 4, 18, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 21, 0, + 22, 4, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 25, 26, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 28, 29, 0, 0 + ], + r3: &[ + 0x0000000000000000, 0xffffffff00000000, 0x00000000000020bf, 0x3f3fffffffffffff, + 0xffffffffffffffff, 0xffffffff3f3fffff, 0x3fffffffaaff3f3f, 0x5fdfffffffffffff, + 0x1fdc1fff0fcf1fdc, 0x8002000000000000, 0x000000001fff0000, 0xf21fbd503e2ffc84, + 0xffffffff000043e0, 0x0000000000000018, 0xffc0000000000000, 0x000003ffffffffff, + 0xffff7fffffffffff, 0xffffffff7fffffff, 0x000c781fffffffff, 0x000020bfffffffff, + 0x00003fffffffffff, 0x000000003fffffff, 0xfffffffc00000000, 0x00ff3fffffff78ff, + 0x0700000000000000, 0xffff000000000000, 0xffff003ff7ffffff, 0x0000000000f8007f, + 0x07fffffe00000000, 0x0000000007fffffe + ], + r4: [ + 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + ], + r5: &[ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 6, 7, 8, 9, 10, 1, 1, 1, 1, 11, 12, 13, 14, 15, 16, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 17, 18, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000, 0xffffffffffffffff, 0x000000000000ffff, 0x0007ffffffffffff, + 0xffffffff00000000, 0x00000000ffffffff, 0xffffffffffdfffff, 0xebffde64dfffffff, + 0xffffffffffffffef, 0x7bffffffdfdfe7bf, 0xfffffffffffdfc5f, 0xffffff3fffffffff, + 0xf7fffffff7fffffd, 0xffdfffffffdfffff, 0xffff7fffffff7fff, 0xfffffdfffffffdff, + 0x0000000000000ff7, 0xffff000000000000, 0xffff03ffffff03ff, 0x00000000000003ff + ], + }; pub fn Cased(c: char) -> bool { - super::bsearch_range_table(c, Cased_table) + super::trie_lookup_range_table(c, Cased_table) } - pub const Lowercase_table: &'static [(char, char)] = &[ - ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), ('\u{ba}', '\u{ba}'), - ('\u{df}', '\u{f6}'), ('\u{f8}', '\u{ff}'), ('\u{101}', '\u{101}'), ('\u{103}', '\u{103}'), - ('\u{105}', '\u{105}'), ('\u{107}', '\u{107}'), ('\u{109}', '\u{109}'), ('\u{10b}', - '\u{10b}'), ('\u{10d}', '\u{10d}'), ('\u{10f}', '\u{10f}'), ('\u{111}', '\u{111}'), - ('\u{113}', '\u{113}'), ('\u{115}', '\u{115}'), ('\u{117}', '\u{117}'), ('\u{119}', - '\u{119}'), ('\u{11b}', '\u{11b}'), ('\u{11d}', '\u{11d}'), ('\u{11f}', '\u{11f}'), - ('\u{121}', '\u{121}'), ('\u{123}', '\u{123}'), ('\u{125}', '\u{125}'), ('\u{127}', - '\u{127}'), ('\u{129}', '\u{129}'), ('\u{12b}', '\u{12b}'), ('\u{12d}', '\u{12d}'), - ('\u{12f}', '\u{12f}'), ('\u{131}', '\u{131}'), ('\u{133}', '\u{133}'), ('\u{135}', - '\u{135}'), ('\u{137}', '\u{138}'), ('\u{13a}', '\u{13a}'), ('\u{13c}', '\u{13c}'), - ('\u{13e}', '\u{13e}'), ('\u{140}', '\u{140}'), ('\u{142}', '\u{142}'), ('\u{144}', - '\u{144}'), ('\u{146}', '\u{146}'), ('\u{148}', '\u{149}'), ('\u{14b}', '\u{14b}'), - ('\u{14d}', '\u{14d}'), ('\u{14f}', '\u{14f}'), ('\u{151}', '\u{151}'), ('\u{153}', - '\u{153}'), ('\u{155}', '\u{155}'), ('\u{157}', '\u{157}'), ('\u{159}', '\u{159}'), - ('\u{15b}', '\u{15b}'), ('\u{15d}', '\u{15d}'), ('\u{15f}', '\u{15f}'), ('\u{161}', - '\u{161}'), ('\u{163}', '\u{163}'), ('\u{165}', '\u{165}'), ('\u{167}', '\u{167}'), - ('\u{169}', '\u{169}'), ('\u{16b}', '\u{16b}'), ('\u{16d}', '\u{16d}'), ('\u{16f}', - '\u{16f}'), ('\u{171}', '\u{171}'), ('\u{173}', '\u{173}'), ('\u{175}', '\u{175}'), - ('\u{177}', '\u{177}'), ('\u{17a}', '\u{17a}'), ('\u{17c}', '\u{17c}'), ('\u{17e}', - '\u{180}'), ('\u{183}', '\u{183}'), ('\u{185}', '\u{185}'), ('\u{188}', '\u{188}'), - ('\u{18c}', '\u{18d}'), ('\u{192}', '\u{192}'), ('\u{195}', '\u{195}'), ('\u{199}', - '\u{19b}'), ('\u{19e}', '\u{19e}'), ('\u{1a1}', '\u{1a1}'), ('\u{1a3}', '\u{1a3}'), - ('\u{1a5}', '\u{1a5}'), ('\u{1a8}', '\u{1a8}'), ('\u{1aa}', '\u{1ab}'), ('\u{1ad}', - '\u{1ad}'), ('\u{1b0}', '\u{1b0}'), ('\u{1b4}', '\u{1b4}'), ('\u{1b6}', '\u{1b6}'), - ('\u{1b9}', '\u{1ba}'), ('\u{1bd}', '\u{1bf}'), ('\u{1c6}', '\u{1c6}'), ('\u{1c9}', - '\u{1c9}'), ('\u{1cc}', '\u{1cc}'), ('\u{1ce}', '\u{1ce}'), ('\u{1d0}', '\u{1d0}'), - ('\u{1d2}', '\u{1d2}'), ('\u{1d4}', '\u{1d4}'), ('\u{1d6}', '\u{1d6}'), ('\u{1d8}', - '\u{1d8}'), ('\u{1da}', '\u{1da}'), ('\u{1dc}', '\u{1dd}'), ('\u{1df}', '\u{1df}'), - ('\u{1e1}', '\u{1e1}'), ('\u{1e3}', '\u{1e3}'), ('\u{1e5}', '\u{1e5}'), ('\u{1e7}', - '\u{1e7}'), ('\u{1e9}', '\u{1e9}'), ('\u{1eb}', '\u{1eb}'), ('\u{1ed}', '\u{1ed}'), - ('\u{1ef}', '\u{1f0}'), ('\u{1f3}', '\u{1f3}'), ('\u{1f5}', '\u{1f5}'), ('\u{1f9}', - '\u{1f9}'), ('\u{1fb}', '\u{1fb}'), ('\u{1fd}', '\u{1fd}'), ('\u{1ff}', '\u{1ff}'), - ('\u{201}', '\u{201}'), ('\u{203}', '\u{203}'), ('\u{205}', '\u{205}'), ('\u{207}', - '\u{207}'), ('\u{209}', '\u{209}'), ('\u{20b}', '\u{20b}'), ('\u{20d}', '\u{20d}'), - ('\u{20f}', '\u{20f}'), ('\u{211}', '\u{211}'), ('\u{213}', '\u{213}'), ('\u{215}', - '\u{215}'), ('\u{217}', '\u{217}'), ('\u{219}', '\u{219}'), ('\u{21b}', '\u{21b}'), - ('\u{21d}', '\u{21d}'), ('\u{21f}', '\u{21f}'), ('\u{221}', '\u{221}'), ('\u{223}', - '\u{223}'), ('\u{225}', '\u{225}'), ('\u{227}', '\u{227}'), ('\u{229}', '\u{229}'), - ('\u{22b}', '\u{22b}'), ('\u{22d}', '\u{22d}'), ('\u{22f}', '\u{22f}'), ('\u{231}', - '\u{231}'), ('\u{233}', '\u{239}'), ('\u{23c}', '\u{23c}'), ('\u{23f}', '\u{240}'), - ('\u{242}', '\u{242}'), ('\u{247}', '\u{247}'), ('\u{249}', '\u{249}'), ('\u{24b}', - '\u{24b}'), ('\u{24d}', '\u{24d}'), ('\u{24f}', '\u{293}'), ('\u{295}', '\u{2b8}'), - ('\u{2c0}', '\u{2c1}'), ('\u{2e0}', '\u{2e4}'), ('\u{345}', '\u{345}'), ('\u{371}', - '\u{371}'), ('\u{373}', '\u{373}'), ('\u{377}', '\u{377}'), ('\u{37a}', '\u{37d}'), - ('\u{390}', '\u{390}'), ('\u{3ac}', '\u{3ce}'), ('\u{3d0}', '\u{3d1}'), ('\u{3d5}', - '\u{3d7}'), ('\u{3d9}', '\u{3d9}'), ('\u{3db}', '\u{3db}'), ('\u{3dd}', '\u{3dd}'), - ('\u{3df}', '\u{3df}'), ('\u{3e1}', '\u{3e1}'), ('\u{3e3}', '\u{3e3}'), ('\u{3e5}', - '\u{3e5}'), ('\u{3e7}', '\u{3e7}'), ('\u{3e9}', '\u{3e9}'), ('\u{3eb}', '\u{3eb}'), - ('\u{3ed}', '\u{3ed}'), ('\u{3ef}', '\u{3f3}'), ('\u{3f5}', '\u{3f5}'), ('\u{3f8}', - '\u{3f8}'), ('\u{3fb}', '\u{3fc}'), ('\u{430}', '\u{45f}'), ('\u{461}', '\u{461}'), - ('\u{463}', '\u{463}'), ('\u{465}', '\u{465}'), ('\u{467}', '\u{467}'), ('\u{469}', - '\u{469}'), ('\u{46b}', '\u{46b}'), ('\u{46d}', '\u{46d}'), ('\u{46f}', '\u{46f}'), - ('\u{471}', '\u{471}'), ('\u{473}', '\u{473}'), ('\u{475}', '\u{475}'), ('\u{477}', - '\u{477}'), ('\u{479}', '\u{479}'), ('\u{47b}', '\u{47b}'), ('\u{47d}', '\u{47d}'), - ('\u{47f}', '\u{47f}'), ('\u{481}', '\u{481}'), ('\u{48b}', '\u{48b}'), ('\u{48d}', - '\u{48d}'), ('\u{48f}', '\u{48f}'), ('\u{491}', '\u{491}'), ('\u{493}', '\u{493}'), - ('\u{495}', '\u{495}'), ('\u{497}', '\u{497}'), ('\u{499}', '\u{499}'), ('\u{49b}', - '\u{49b}'), ('\u{49d}', '\u{49d}'), ('\u{49f}', '\u{49f}'), ('\u{4a1}', '\u{4a1}'), - ('\u{4a3}', '\u{4a3}'), ('\u{4a5}', '\u{4a5}'), ('\u{4a7}', '\u{4a7}'), ('\u{4a9}', - '\u{4a9}'), ('\u{4ab}', '\u{4ab}'), ('\u{4ad}', '\u{4ad}'), ('\u{4af}', '\u{4af}'), - ('\u{4b1}', '\u{4b1}'), ('\u{4b3}', '\u{4b3}'), ('\u{4b5}', '\u{4b5}'), ('\u{4b7}', - '\u{4b7}'), ('\u{4b9}', '\u{4b9}'), ('\u{4bb}', '\u{4bb}'), ('\u{4bd}', '\u{4bd}'), - ('\u{4bf}', '\u{4bf}'), ('\u{4c2}', '\u{4c2}'), ('\u{4c4}', '\u{4c4}'), ('\u{4c6}', - '\u{4c6}'), ('\u{4c8}', '\u{4c8}'), ('\u{4ca}', '\u{4ca}'), ('\u{4cc}', '\u{4cc}'), - ('\u{4ce}', '\u{4cf}'), ('\u{4d1}', '\u{4d1}'), ('\u{4d3}', '\u{4d3}'), ('\u{4d5}', - '\u{4d5}'), ('\u{4d7}', '\u{4d7}'), ('\u{4d9}', '\u{4d9}'), ('\u{4db}', '\u{4db}'), - ('\u{4dd}', '\u{4dd}'), ('\u{4df}', '\u{4df}'), ('\u{4e1}', '\u{4e1}'), ('\u{4e3}', - '\u{4e3}'), ('\u{4e5}', '\u{4e5}'), ('\u{4e7}', '\u{4e7}'), ('\u{4e9}', '\u{4e9}'), - ('\u{4eb}', '\u{4eb}'), ('\u{4ed}', '\u{4ed}'), ('\u{4ef}', '\u{4ef}'), ('\u{4f1}', - '\u{4f1}'), ('\u{4f3}', '\u{4f3}'), ('\u{4f5}', '\u{4f5}'), ('\u{4f7}', '\u{4f7}'), - ('\u{4f9}', '\u{4f9}'), ('\u{4fb}', '\u{4fb}'), ('\u{4fd}', '\u{4fd}'), ('\u{4ff}', - '\u{4ff}'), ('\u{501}', '\u{501}'), ('\u{503}', '\u{503}'), ('\u{505}', '\u{505}'), - ('\u{507}', '\u{507}'), ('\u{509}', '\u{509}'), ('\u{50b}', '\u{50b}'), ('\u{50d}', - '\u{50d}'), ('\u{50f}', '\u{50f}'), ('\u{511}', '\u{511}'), ('\u{513}', '\u{513}'), - ('\u{515}', '\u{515}'), ('\u{517}', '\u{517}'), ('\u{519}', '\u{519}'), ('\u{51b}', - '\u{51b}'), ('\u{51d}', '\u{51d}'), ('\u{51f}', '\u{51f}'), ('\u{521}', '\u{521}'), - ('\u{523}', '\u{523}'), ('\u{525}', '\u{525}'), ('\u{527}', '\u{527}'), ('\u{529}', - '\u{529}'), ('\u{52b}', '\u{52b}'), ('\u{52d}', '\u{52d}'), ('\u{52f}', '\u{52f}'), - ('\u{561}', '\u{587}'), ('\u{13f8}', '\u{13fd}'), ('\u{1d00}', '\u{1dbf}'), ('\u{1e01}', - '\u{1e01}'), ('\u{1e03}', '\u{1e03}'), ('\u{1e05}', '\u{1e05}'), ('\u{1e07}', '\u{1e07}'), - ('\u{1e09}', '\u{1e09}'), ('\u{1e0b}', '\u{1e0b}'), ('\u{1e0d}', '\u{1e0d}'), ('\u{1e0f}', - '\u{1e0f}'), ('\u{1e11}', '\u{1e11}'), ('\u{1e13}', '\u{1e13}'), ('\u{1e15}', '\u{1e15}'), - ('\u{1e17}', '\u{1e17}'), ('\u{1e19}', '\u{1e19}'), ('\u{1e1b}', '\u{1e1b}'), ('\u{1e1d}', - '\u{1e1d}'), ('\u{1e1f}', '\u{1e1f}'), ('\u{1e21}', '\u{1e21}'), ('\u{1e23}', '\u{1e23}'), - ('\u{1e25}', '\u{1e25}'), ('\u{1e27}', '\u{1e27}'), ('\u{1e29}', '\u{1e29}'), ('\u{1e2b}', - '\u{1e2b}'), ('\u{1e2d}', '\u{1e2d}'), ('\u{1e2f}', '\u{1e2f}'), ('\u{1e31}', '\u{1e31}'), - ('\u{1e33}', '\u{1e33}'), ('\u{1e35}', '\u{1e35}'), ('\u{1e37}', '\u{1e37}'), ('\u{1e39}', - '\u{1e39}'), ('\u{1e3b}', '\u{1e3b}'), ('\u{1e3d}', '\u{1e3d}'), ('\u{1e3f}', '\u{1e3f}'), - ('\u{1e41}', '\u{1e41}'), ('\u{1e43}', '\u{1e43}'), ('\u{1e45}', '\u{1e45}'), ('\u{1e47}', - '\u{1e47}'), ('\u{1e49}', '\u{1e49}'), ('\u{1e4b}', '\u{1e4b}'), ('\u{1e4d}', '\u{1e4d}'), - ('\u{1e4f}', '\u{1e4f}'), ('\u{1e51}', '\u{1e51}'), ('\u{1e53}', '\u{1e53}'), ('\u{1e55}', - '\u{1e55}'), ('\u{1e57}', '\u{1e57}'), ('\u{1e59}', '\u{1e59}'), ('\u{1e5b}', '\u{1e5b}'), - ('\u{1e5d}', '\u{1e5d}'), ('\u{1e5f}', '\u{1e5f}'), ('\u{1e61}', '\u{1e61}'), ('\u{1e63}', - '\u{1e63}'), ('\u{1e65}', '\u{1e65}'), ('\u{1e67}', '\u{1e67}'), ('\u{1e69}', '\u{1e69}'), - ('\u{1e6b}', '\u{1e6b}'), ('\u{1e6d}', '\u{1e6d}'), ('\u{1e6f}', '\u{1e6f}'), ('\u{1e71}', - '\u{1e71}'), ('\u{1e73}', '\u{1e73}'), ('\u{1e75}', '\u{1e75}'), ('\u{1e77}', '\u{1e77}'), - ('\u{1e79}', '\u{1e79}'), ('\u{1e7b}', '\u{1e7b}'), ('\u{1e7d}', '\u{1e7d}'), ('\u{1e7f}', - '\u{1e7f}'), ('\u{1e81}', '\u{1e81}'), ('\u{1e83}', '\u{1e83}'), ('\u{1e85}', '\u{1e85}'), - ('\u{1e87}', '\u{1e87}'), ('\u{1e89}', '\u{1e89}'), ('\u{1e8b}', '\u{1e8b}'), ('\u{1e8d}', - '\u{1e8d}'), ('\u{1e8f}', '\u{1e8f}'), ('\u{1e91}', '\u{1e91}'), ('\u{1e93}', '\u{1e93}'), - ('\u{1e95}', '\u{1e9d}'), ('\u{1e9f}', '\u{1e9f}'), ('\u{1ea1}', '\u{1ea1}'), ('\u{1ea3}', - '\u{1ea3}'), ('\u{1ea5}', '\u{1ea5}'), ('\u{1ea7}', '\u{1ea7}'), ('\u{1ea9}', '\u{1ea9}'), - ('\u{1eab}', '\u{1eab}'), ('\u{1ead}', '\u{1ead}'), ('\u{1eaf}', '\u{1eaf}'), ('\u{1eb1}', - '\u{1eb1}'), ('\u{1eb3}', '\u{1eb3}'), ('\u{1eb5}', '\u{1eb5}'), ('\u{1eb7}', '\u{1eb7}'), - ('\u{1eb9}', '\u{1eb9}'), ('\u{1ebb}', '\u{1ebb}'), ('\u{1ebd}', '\u{1ebd}'), ('\u{1ebf}', - '\u{1ebf}'), ('\u{1ec1}', '\u{1ec1}'), ('\u{1ec3}', '\u{1ec3}'), ('\u{1ec5}', '\u{1ec5}'), - ('\u{1ec7}', '\u{1ec7}'), ('\u{1ec9}', '\u{1ec9}'), ('\u{1ecb}', '\u{1ecb}'), ('\u{1ecd}', - '\u{1ecd}'), ('\u{1ecf}', '\u{1ecf}'), ('\u{1ed1}', '\u{1ed1}'), ('\u{1ed3}', '\u{1ed3}'), - ('\u{1ed5}', '\u{1ed5}'), ('\u{1ed7}', '\u{1ed7}'), ('\u{1ed9}', '\u{1ed9}'), ('\u{1edb}', - '\u{1edb}'), ('\u{1edd}', '\u{1edd}'), ('\u{1edf}', '\u{1edf}'), ('\u{1ee1}', '\u{1ee1}'), - ('\u{1ee3}', '\u{1ee3}'), ('\u{1ee5}', '\u{1ee5}'), ('\u{1ee7}', '\u{1ee7}'), ('\u{1ee9}', - '\u{1ee9}'), ('\u{1eeb}', '\u{1eeb}'), ('\u{1eed}', '\u{1eed}'), ('\u{1eef}', '\u{1eef}'), - ('\u{1ef1}', '\u{1ef1}'), ('\u{1ef3}', '\u{1ef3}'), ('\u{1ef5}', '\u{1ef5}'), ('\u{1ef7}', - '\u{1ef7}'), ('\u{1ef9}', '\u{1ef9}'), ('\u{1efb}', '\u{1efb}'), ('\u{1efd}', '\u{1efd}'), - ('\u{1eff}', '\u{1f07}'), ('\u{1f10}', '\u{1f15}'), ('\u{1f20}', '\u{1f27}'), ('\u{1f30}', - '\u{1f37}'), ('\u{1f40}', '\u{1f45}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f60}', '\u{1f67}'), - ('\u{1f70}', '\u{1f7d}'), ('\u{1f80}', '\u{1f87}'), ('\u{1f90}', '\u{1f97}'), ('\u{1fa0}', - '\u{1fa7}'), ('\u{1fb0}', '\u{1fb4}'), ('\u{1fb6}', '\u{1fb7}'), ('\u{1fbe}', '\u{1fbe}'), - ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', '\u{1fc7}'), ('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', - '\u{1fd7}'), ('\u{1fe0}', '\u{1fe7}'), ('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}', '\u{1ff7}'), - ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}', '\u{209c}'), ('\u{210a}', - '\u{210a}'), ('\u{210e}', '\u{210f}'), ('\u{2113}', '\u{2113}'), ('\u{212f}', '\u{212f}'), - ('\u{2134}', '\u{2134}'), ('\u{2139}', '\u{2139}'), ('\u{213c}', '\u{213d}'), ('\u{2146}', - '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2170}', '\u{217f}'), ('\u{2184}', '\u{2184}'), - ('\u{24d0}', '\u{24e9}'), ('\u{2c30}', '\u{2c5e}'), ('\u{2c61}', '\u{2c61}'), ('\u{2c65}', - '\u{2c66}'), ('\u{2c68}', '\u{2c68}'), ('\u{2c6a}', '\u{2c6a}'), ('\u{2c6c}', '\u{2c6c}'), - ('\u{2c71}', '\u{2c71}'), ('\u{2c73}', '\u{2c74}'), ('\u{2c76}', '\u{2c7d}'), ('\u{2c81}', - '\u{2c81}'), ('\u{2c83}', '\u{2c83}'), ('\u{2c85}', '\u{2c85}'), ('\u{2c87}', '\u{2c87}'), - ('\u{2c89}', '\u{2c89}'), ('\u{2c8b}', '\u{2c8b}'), ('\u{2c8d}', '\u{2c8d}'), ('\u{2c8f}', - '\u{2c8f}'), ('\u{2c91}', '\u{2c91}'), ('\u{2c93}', '\u{2c93}'), ('\u{2c95}', '\u{2c95}'), - ('\u{2c97}', '\u{2c97}'), ('\u{2c99}', '\u{2c99}'), ('\u{2c9b}', '\u{2c9b}'), ('\u{2c9d}', - '\u{2c9d}'), ('\u{2c9f}', '\u{2c9f}'), ('\u{2ca1}', '\u{2ca1}'), ('\u{2ca3}', '\u{2ca3}'), - ('\u{2ca5}', '\u{2ca5}'), ('\u{2ca7}', '\u{2ca7}'), ('\u{2ca9}', '\u{2ca9}'), ('\u{2cab}', - '\u{2cab}'), ('\u{2cad}', '\u{2cad}'), ('\u{2caf}', '\u{2caf}'), ('\u{2cb1}', '\u{2cb1}'), - ('\u{2cb3}', '\u{2cb3}'), ('\u{2cb5}', '\u{2cb5}'), ('\u{2cb7}', '\u{2cb7}'), ('\u{2cb9}', - '\u{2cb9}'), ('\u{2cbb}', '\u{2cbb}'), ('\u{2cbd}', '\u{2cbd}'), ('\u{2cbf}', '\u{2cbf}'), - ('\u{2cc1}', '\u{2cc1}'), ('\u{2cc3}', '\u{2cc3}'), ('\u{2cc5}', '\u{2cc5}'), ('\u{2cc7}', - '\u{2cc7}'), ('\u{2cc9}', '\u{2cc9}'), ('\u{2ccb}', '\u{2ccb}'), ('\u{2ccd}', '\u{2ccd}'), - ('\u{2ccf}', '\u{2ccf}'), ('\u{2cd1}', '\u{2cd1}'), ('\u{2cd3}', '\u{2cd3}'), ('\u{2cd5}', - '\u{2cd5}'), ('\u{2cd7}', '\u{2cd7}'), ('\u{2cd9}', '\u{2cd9}'), ('\u{2cdb}', '\u{2cdb}'), - ('\u{2cdd}', '\u{2cdd}'), ('\u{2cdf}', '\u{2cdf}'), ('\u{2ce1}', '\u{2ce1}'), ('\u{2ce3}', - '\u{2ce4}'), ('\u{2cec}', '\u{2cec}'), ('\u{2cee}', '\u{2cee}'), ('\u{2cf3}', '\u{2cf3}'), - ('\u{2d00}', '\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{a641}', - '\u{a641}'), ('\u{a643}', '\u{a643}'), ('\u{a645}', '\u{a645}'), ('\u{a647}', '\u{a647}'), - ('\u{a649}', '\u{a649}'), ('\u{a64b}', '\u{a64b}'), ('\u{a64d}', '\u{a64d}'), ('\u{a64f}', - '\u{a64f}'), ('\u{a651}', '\u{a651}'), ('\u{a653}', '\u{a653}'), ('\u{a655}', '\u{a655}'), - ('\u{a657}', '\u{a657}'), ('\u{a659}', '\u{a659}'), ('\u{a65b}', '\u{a65b}'), ('\u{a65d}', - '\u{a65d}'), ('\u{a65f}', '\u{a65f}'), ('\u{a661}', '\u{a661}'), ('\u{a663}', '\u{a663}'), - ('\u{a665}', '\u{a665}'), ('\u{a667}', '\u{a667}'), ('\u{a669}', '\u{a669}'), ('\u{a66b}', - '\u{a66b}'), ('\u{a66d}', '\u{a66d}'), ('\u{a681}', '\u{a681}'), ('\u{a683}', '\u{a683}'), - ('\u{a685}', '\u{a685}'), ('\u{a687}', '\u{a687}'), ('\u{a689}', '\u{a689}'), ('\u{a68b}', - '\u{a68b}'), ('\u{a68d}', '\u{a68d}'), ('\u{a68f}', '\u{a68f}'), ('\u{a691}', '\u{a691}'), - ('\u{a693}', '\u{a693}'), ('\u{a695}', '\u{a695}'), ('\u{a697}', '\u{a697}'), ('\u{a699}', - '\u{a699}'), ('\u{a69b}', '\u{a69d}'), ('\u{a723}', '\u{a723}'), ('\u{a725}', '\u{a725}'), - ('\u{a727}', '\u{a727}'), ('\u{a729}', '\u{a729}'), ('\u{a72b}', '\u{a72b}'), ('\u{a72d}', - '\u{a72d}'), ('\u{a72f}', '\u{a731}'), ('\u{a733}', '\u{a733}'), ('\u{a735}', '\u{a735}'), - ('\u{a737}', '\u{a737}'), ('\u{a739}', '\u{a739}'), ('\u{a73b}', '\u{a73b}'), ('\u{a73d}', - '\u{a73d}'), ('\u{a73f}', '\u{a73f}'), ('\u{a741}', '\u{a741}'), ('\u{a743}', '\u{a743}'), - ('\u{a745}', '\u{a745}'), ('\u{a747}', '\u{a747}'), ('\u{a749}', '\u{a749}'), ('\u{a74b}', - '\u{a74b}'), ('\u{a74d}', '\u{a74d}'), ('\u{a74f}', '\u{a74f}'), ('\u{a751}', '\u{a751}'), - ('\u{a753}', '\u{a753}'), ('\u{a755}', '\u{a755}'), ('\u{a757}', '\u{a757}'), ('\u{a759}', - '\u{a759}'), ('\u{a75b}', '\u{a75b}'), ('\u{a75d}', '\u{a75d}'), ('\u{a75f}', '\u{a75f}'), - ('\u{a761}', '\u{a761}'), ('\u{a763}', '\u{a763}'), ('\u{a765}', '\u{a765}'), ('\u{a767}', - '\u{a767}'), ('\u{a769}', '\u{a769}'), ('\u{a76b}', '\u{a76b}'), ('\u{a76d}', '\u{a76d}'), - ('\u{a76f}', '\u{a778}'), ('\u{a77a}', '\u{a77a}'), ('\u{a77c}', '\u{a77c}'), ('\u{a77f}', - '\u{a77f}'), ('\u{a781}', '\u{a781}'), ('\u{a783}', '\u{a783}'), ('\u{a785}', '\u{a785}'), - ('\u{a787}', '\u{a787}'), ('\u{a78c}', '\u{a78c}'), ('\u{a78e}', '\u{a78e}'), ('\u{a791}', - '\u{a791}'), ('\u{a793}', '\u{a795}'), ('\u{a797}', '\u{a797}'), ('\u{a799}', '\u{a799}'), - ('\u{a79b}', '\u{a79b}'), ('\u{a79d}', '\u{a79d}'), ('\u{a79f}', '\u{a79f}'), ('\u{a7a1}', - '\u{a7a1}'), ('\u{a7a3}', '\u{a7a3}'), ('\u{a7a5}', '\u{a7a5}'), ('\u{a7a7}', '\u{a7a7}'), - ('\u{a7a9}', '\u{a7a9}'), ('\u{a7b5}', '\u{a7b5}'), ('\u{a7b7}', '\u{a7b7}'), ('\u{a7f8}', - '\u{a7fa}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abbf}'), - ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{ff41}', '\u{ff5a}'), ('\u{10428}', - '\u{1044f}'), ('\u{10cc0}', '\u{10cf2}'), ('\u{118c0}', '\u{118df}'), ('\u{1d41a}', - '\u{1d433}'), ('\u{1d44e}', '\u{1d454}'), ('\u{1d456}', '\u{1d467}'), ('\u{1d482}', - '\u{1d49b}'), ('\u{1d4b6}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', - '\u{1d4c3}'), ('\u{1d4c5}', '\u{1d4cf}'), ('\u{1d4ea}', '\u{1d503}'), ('\u{1d51e}', - '\u{1d537}'), ('\u{1d552}', '\u{1d56b}'), ('\u{1d586}', '\u{1d59f}'), ('\u{1d5ba}', - '\u{1d5d3}'), ('\u{1d5ee}', '\u{1d607}'), ('\u{1d622}', '\u{1d63b}'), ('\u{1d656}', - '\u{1d66f}'), ('\u{1d68a}', '\u{1d6a5}'), ('\u{1d6c2}', '\u{1d6da}'), ('\u{1d6dc}', - '\u{1d6e1}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d71b}'), ('\u{1d736}', - '\u{1d74e}'), ('\u{1d750}', '\u{1d755}'), ('\u{1d770}', '\u{1d788}'), ('\u{1d78a}', - '\u{1d78f}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7c9}'), ('\u{1d7cb}', - '\u{1d7cb}') - ]; + pub const Lowercase_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0000000000000000, 0x07fffffe00000000, 0x0420040000000000, 0xff7fffff80000000, + 0x55aaaaaaaaaaaaaa, 0xd4aaaaaaaaaaab55, 0xe6512d2a4e243129, 0xaa29aaaab5555240, + 0x93faaaaaaaaaaaaa, 0xffffffffffffaa85, 0x01ffffffffefffff, 0x0000001f00000003, + 0x0000000000000000, 0x3c8a000000000020, 0xfffff00000010000, 0x192faaaaaae37fff, + 0xffff000000000000, 0xaaaaaaaaffffffff, 0xaaaaaaaaaaaaa802, 0xaaaaaaaaaaaad554, + 0x0000aaaaaaaaaaaa, 0xfffffffe00000000, 0x00000000000000ff, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + ], + r2: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, + 0, 3, 3, 4, 3, 5, 6, 7, 8, 0, 9, 10, 0, 11, 12, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15, 16, 3, 17, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 20, 0, + 21, 22, 23, 24, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 25, 2, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 27, 0, 0 + ], + r3: &[ + 0x0000000000000000, 0x3f00000000000000, 0xffffffffffffffff, 0xaaaaaaaaaaaaaaaa, + 0xaaaaaaaabfeaaaaa, 0x00ff00ff003f00ff, 0x3fff00ff00ff003f, 0x40df00ff00ff00ff, + 0x00dc00ff00cf00dc, 0x8002000000000000, 0x000000001fff0000, 0x321080000008c400, + 0xffff0000000043c0, 0x0000000000000010, 0x000003ffffff0000, 0xffff000000000000, + 0x3fda15627fffffff, 0x0008501aaaaaaaaa, 0x000020bfffffffff, 0x00002aaaaaaaaaaa, + 0x000000003aaaaaaa, 0xaaabaaa800000000, 0x95ffaaaaaaaaaaaa, 0x00a002aaaaba50aa, + 0x0700000000000000, 0xffff003ff7ffffff, 0x0000000000f8007f, 0x0000000007fffffe + ], + r4: [ + 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + ], + r5: &[ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000, 0xffffff0000000000, 0x000000000000ffff, 0x0007ffffffffffff, + 0x00000000ffffffff, 0x000ffffffc000000, 0x000000ffffdfc000, 0xebc000000ffffffc, + 0xfffffc000000ffef, 0x00ffffffc000000f, 0x00000ffffffc0000, 0xfc000000ffffffc0, + 0xffffc000000fffff, 0x0ffffffc000000ff, 0x0000ffffffc00000, 0x0000003ffffffc00, + 0xf0000003f7fffffc, 0xffc000000fdfffff, 0xffff0000003f7fff, 0xfffffc000000fdff, + 0x0000000000000bf7 + ], + }; pub fn Lowercase(c: char) -> bool { - super::bsearch_range_table(c, Lowercase_table) + super::trie_lookup_range_table(c, Lowercase_table) } - pub const Uppercase_table: &'static [(char, char)] = &[ - ('\u{41}', '\u{5a}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{de}'), ('\u{100}', '\u{100}'), - ('\u{102}', '\u{102}'), ('\u{104}', '\u{104}'), ('\u{106}', '\u{106}'), ('\u{108}', - '\u{108}'), ('\u{10a}', '\u{10a}'), ('\u{10c}', '\u{10c}'), ('\u{10e}', '\u{10e}'), - ('\u{110}', '\u{110}'), ('\u{112}', '\u{112}'), ('\u{114}', '\u{114}'), ('\u{116}', - '\u{116}'), ('\u{118}', '\u{118}'), ('\u{11a}', '\u{11a}'), ('\u{11c}', '\u{11c}'), - ('\u{11e}', '\u{11e}'), ('\u{120}', '\u{120}'), ('\u{122}', '\u{122}'), ('\u{124}', - '\u{124}'), ('\u{126}', '\u{126}'), ('\u{128}', '\u{128}'), ('\u{12a}', '\u{12a}'), - ('\u{12c}', '\u{12c}'), ('\u{12e}', '\u{12e}'), ('\u{130}', '\u{130}'), ('\u{132}', - '\u{132}'), ('\u{134}', '\u{134}'), ('\u{136}', '\u{136}'), ('\u{139}', '\u{139}'), - ('\u{13b}', '\u{13b}'), ('\u{13d}', '\u{13d}'), ('\u{13f}', '\u{13f}'), ('\u{141}', - '\u{141}'), ('\u{143}', '\u{143}'), ('\u{145}', '\u{145}'), ('\u{147}', '\u{147}'), - ('\u{14a}', '\u{14a}'), ('\u{14c}', '\u{14c}'), ('\u{14e}', '\u{14e}'), ('\u{150}', - '\u{150}'), ('\u{152}', '\u{152}'), ('\u{154}', '\u{154}'), ('\u{156}', '\u{156}'), - ('\u{158}', '\u{158}'), ('\u{15a}', '\u{15a}'), ('\u{15c}', '\u{15c}'), ('\u{15e}', - '\u{15e}'), ('\u{160}', '\u{160}'), ('\u{162}', '\u{162}'), ('\u{164}', '\u{164}'), - ('\u{166}', '\u{166}'), ('\u{168}', '\u{168}'), ('\u{16a}', '\u{16a}'), ('\u{16c}', - '\u{16c}'), ('\u{16e}', '\u{16e}'), ('\u{170}', '\u{170}'), ('\u{172}', '\u{172}'), - ('\u{174}', '\u{174}'), ('\u{176}', '\u{176}'), ('\u{178}', '\u{179}'), ('\u{17b}', - '\u{17b}'), ('\u{17d}', '\u{17d}'), ('\u{181}', '\u{182}'), ('\u{184}', '\u{184}'), - ('\u{186}', '\u{187}'), ('\u{189}', '\u{18b}'), ('\u{18e}', '\u{191}'), ('\u{193}', - '\u{194}'), ('\u{196}', '\u{198}'), ('\u{19c}', '\u{19d}'), ('\u{19f}', '\u{1a0}'), - ('\u{1a2}', '\u{1a2}'), ('\u{1a4}', '\u{1a4}'), ('\u{1a6}', '\u{1a7}'), ('\u{1a9}', - '\u{1a9}'), ('\u{1ac}', '\u{1ac}'), ('\u{1ae}', '\u{1af}'), ('\u{1b1}', '\u{1b3}'), - ('\u{1b5}', '\u{1b5}'), ('\u{1b7}', '\u{1b8}'), ('\u{1bc}', '\u{1bc}'), ('\u{1c4}', - '\u{1c4}'), ('\u{1c7}', '\u{1c7}'), ('\u{1ca}', '\u{1ca}'), ('\u{1cd}', '\u{1cd}'), - ('\u{1cf}', '\u{1cf}'), ('\u{1d1}', '\u{1d1}'), ('\u{1d3}', '\u{1d3}'), ('\u{1d5}', - '\u{1d5}'), ('\u{1d7}', '\u{1d7}'), ('\u{1d9}', '\u{1d9}'), ('\u{1db}', '\u{1db}'), - ('\u{1de}', '\u{1de}'), ('\u{1e0}', '\u{1e0}'), ('\u{1e2}', '\u{1e2}'), ('\u{1e4}', - '\u{1e4}'), ('\u{1e6}', '\u{1e6}'), ('\u{1e8}', '\u{1e8}'), ('\u{1ea}', '\u{1ea}'), - ('\u{1ec}', '\u{1ec}'), ('\u{1ee}', '\u{1ee}'), ('\u{1f1}', '\u{1f1}'), ('\u{1f4}', - '\u{1f4}'), ('\u{1f6}', '\u{1f8}'), ('\u{1fa}', '\u{1fa}'), ('\u{1fc}', '\u{1fc}'), - ('\u{1fe}', '\u{1fe}'), ('\u{200}', '\u{200}'), ('\u{202}', '\u{202}'), ('\u{204}', - '\u{204}'), ('\u{206}', '\u{206}'), ('\u{208}', '\u{208}'), ('\u{20a}', '\u{20a}'), - ('\u{20c}', '\u{20c}'), ('\u{20e}', '\u{20e}'), ('\u{210}', '\u{210}'), ('\u{212}', - '\u{212}'), ('\u{214}', '\u{214}'), ('\u{216}', '\u{216}'), ('\u{218}', '\u{218}'), - ('\u{21a}', '\u{21a}'), ('\u{21c}', '\u{21c}'), ('\u{21e}', '\u{21e}'), ('\u{220}', - '\u{220}'), ('\u{222}', '\u{222}'), ('\u{224}', '\u{224}'), ('\u{226}', '\u{226}'), - ('\u{228}', '\u{228}'), ('\u{22a}', '\u{22a}'), ('\u{22c}', '\u{22c}'), ('\u{22e}', - '\u{22e}'), ('\u{230}', '\u{230}'), ('\u{232}', '\u{232}'), ('\u{23a}', '\u{23b}'), - ('\u{23d}', '\u{23e}'), ('\u{241}', '\u{241}'), ('\u{243}', '\u{246}'), ('\u{248}', - '\u{248}'), ('\u{24a}', '\u{24a}'), ('\u{24c}', '\u{24c}'), ('\u{24e}', '\u{24e}'), - ('\u{370}', '\u{370}'), ('\u{372}', '\u{372}'), ('\u{376}', '\u{376}'), ('\u{37f}', - '\u{37f}'), ('\u{386}', '\u{386}'), ('\u{388}', '\u{38a}'), ('\u{38c}', '\u{38c}'), - ('\u{38e}', '\u{38f}'), ('\u{391}', '\u{3a1}'), ('\u{3a3}', '\u{3ab}'), ('\u{3cf}', - '\u{3cf}'), ('\u{3d2}', '\u{3d4}'), ('\u{3d8}', '\u{3d8}'), ('\u{3da}', '\u{3da}'), - ('\u{3dc}', '\u{3dc}'), ('\u{3de}', '\u{3de}'), ('\u{3e0}', '\u{3e0}'), ('\u{3e2}', - '\u{3e2}'), ('\u{3e4}', '\u{3e4}'), ('\u{3e6}', '\u{3e6}'), ('\u{3e8}', '\u{3e8}'), - ('\u{3ea}', '\u{3ea}'), ('\u{3ec}', '\u{3ec}'), ('\u{3ee}', '\u{3ee}'), ('\u{3f4}', - '\u{3f4}'), ('\u{3f7}', '\u{3f7}'), ('\u{3f9}', '\u{3fa}'), ('\u{3fd}', '\u{42f}'), - ('\u{460}', '\u{460}'), ('\u{462}', '\u{462}'), ('\u{464}', '\u{464}'), ('\u{466}', - '\u{466}'), ('\u{468}', '\u{468}'), ('\u{46a}', '\u{46a}'), ('\u{46c}', '\u{46c}'), - ('\u{46e}', '\u{46e}'), ('\u{470}', '\u{470}'), ('\u{472}', '\u{472}'), ('\u{474}', - '\u{474}'), ('\u{476}', '\u{476}'), ('\u{478}', '\u{478}'), ('\u{47a}', '\u{47a}'), - ('\u{47c}', '\u{47c}'), ('\u{47e}', '\u{47e}'), ('\u{480}', '\u{480}'), ('\u{48a}', - '\u{48a}'), ('\u{48c}', '\u{48c}'), ('\u{48e}', '\u{48e}'), ('\u{490}', '\u{490}'), - ('\u{492}', '\u{492}'), ('\u{494}', '\u{494}'), ('\u{496}', '\u{496}'), ('\u{498}', - '\u{498}'), ('\u{49a}', '\u{49a}'), ('\u{49c}', '\u{49c}'), ('\u{49e}', '\u{49e}'), - ('\u{4a0}', '\u{4a0}'), ('\u{4a2}', '\u{4a2}'), ('\u{4a4}', '\u{4a4}'), ('\u{4a6}', - '\u{4a6}'), ('\u{4a8}', '\u{4a8}'), ('\u{4aa}', '\u{4aa}'), ('\u{4ac}', '\u{4ac}'), - ('\u{4ae}', '\u{4ae}'), ('\u{4b0}', '\u{4b0}'), ('\u{4b2}', '\u{4b2}'), ('\u{4b4}', - '\u{4b4}'), ('\u{4b6}', '\u{4b6}'), ('\u{4b8}', '\u{4b8}'), ('\u{4ba}', '\u{4ba}'), - ('\u{4bc}', '\u{4bc}'), ('\u{4be}', '\u{4be}'), ('\u{4c0}', '\u{4c1}'), ('\u{4c3}', - '\u{4c3}'), ('\u{4c5}', '\u{4c5}'), ('\u{4c7}', '\u{4c7}'), ('\u{4c9}', '\u{4c9}'), - ('\u{4cb}', '\u{4cb}'), ('\u{4cd}', '\u{4cd}'), ('\u{4d0}', '\u{4d0}'), ('\u{4d2}', - '\u{4d2}'), ('\u{4d4}', '\u{4d4}'), ('\u{4d6}', '\u{4d6}'), ('\u{4d8}', '\u{4d8}'), - ('\u{4da}', '\u{4da}'), ('\u{4dc}', '\u{4dc}'), ('\u{4de}', '\u{4de}'), ('\u{4e0}', - '\u{4e0}'), ('\u{4e2}', '\u{4e2}'), ('\u{4e4}', '\u{4e4}'), ('\u{4e6}', '\u{4e6}'), - ('\u{4e8}', '\u{4e8}'), ('\u{4ea}', '\u{4ea}'), ('\u{4ec}', '\u{4ec}'), ('\u{4ee}', - '\u{4ee}'), ('\u{4f0}', '\u{4f0}'), ('\u{4f2}', '\u{4f2}'), ('\u{4f4}', '\u{4f4}'), - ('\u{4f6}', '\u{4f6}'), ('\u{4f8}', '\u{4f8}'), ('\u{4fa}', '\u{4fa}'), ('\u{4fc}', - '\u{4fc}'), ('\u{4fe}', '\u{4fe}'), ('\u{500}', '\u{500}'), ('\u{502}', '\u{502}'), - ('\u{504}', '\u{504}'), ('\u{506}', '\u{506}'), ('\u{508}', '\u{508}'), ('\u{50a}', - '\u{50a}'), ('\u{50c}', '\u{50c}'), ('\u{50e}', '\u{50e}'), ('\u{510}', '\u{510}'), - ('\u{512}', '\u{512}'), ('\u{514}', '\u{514}'), ('\u{516}', '\u{516}'), ('\u{518}', - '\u{518}'), ('\u{51a}', '\u{51a}'), ('\u{51c}', '\u{51c}'), ('\u{51e}', '\u{51e}'), - ('\u{520}', '\u{520}'), ('\u{522}', '\u{522}'), ('\u{524}', '\u{524}'), ('\u{526}', - '\u{526}'), ('\u{528}', '\u{528}'), ('\u{52a}', '\u{52a}'), ('\u{52c}', '\u{52c}'), - ('\u{52e}', '\u{52e}'), ('\u{531}', '\u{556}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', - '\u{10c7}'), ('\u{10cd}', '\u{10cd}'), ('\u{13a0}', '\u{13f5}'), ('\u{1e00}', '\u{1e00}'), - ('\u{1e02}', '\u{1e02}'), ('\u{1e04}', '\u{1e04}'), ('\u{1e06}', '\u{1e06}'), ('\u{1e08}', - '\u{1e08}'), ('\u{1e0a}', '\u{1e0a}'), ('\u{1e0c}', '\u{1e0c}'), ('\u{1e0e}', '\u{1e0e}'), - ('\u{1e10}', '\u{1e10}'), ('\u{1e12}', '\u{1e12}'), ('\u{1e14}', '\u{1e14}'), ('\u{1e16}', - '\u{1e16}'), ('\u{1e18}', '\u{1e18}'), ('\u{1e1a}', '\u{1e1a}'), ('\u{1e1c}', '\u{1e1c}'), - ('\u{1e1e}', '\u{1e1e}'), ('\u{1e20}', '\u{1e20}'), ('\u{1e22}', '\u{1e22}'), ('\u{1e24}', - '\u{1e24}'), ('\u{1e26}', '\u{1e26}'), ('\u{1e28}', '\u{1e28}'), ('\u{1e2a}', '\u{1e2a}'), - ('\u{1e2c}', '\u{1e2c}'), ('\u{1e2e}', '\u{1e2e}'), ('\u{1e30}', '\u{1e30}'), ('\u{1e32}', - '\u{1e32}'), ('\u{1e34}', '\u{1e34}'), ('\u{1e36}', '\u{1e36}'), ('\u{1e38}', '\u{1e38}'), - ('\u{1e3a}', '\u{1e3a}'), ('\u{1e3c}', '\u{1e3c}'), ('\u{1e3e}', '\u{1e3e}'), ('\u{1e40}', - '\u{1e40}'), ('\u{1e42}', '\u{1e42}'), ('\u{1e44}', '\u{1e44}'), ('\u{1e46}', '\u{1e46}'), - ('\u{1e48}', '\u{1e48}'), ('\u{1e4a}', '\u{1e4a}'), ('\u{1e4c}', '\u{1e4c}'), ('\u{1e4e}', - '\u{1e4e}'), ('\u{1e50}', '\u{1e50}'), ('\u{1e52}', '\u{1e52}'), ('\u{1e54}', '\u{1e54}'), - ('\u{1e56}', '\u{1e56}'), ('\u{1e58}', '\u{1e58}'), ('\u{1e5a}', '\u{1e5a}'), ('\u{1e5c}', - '\u{1e5c}'), ('\u{1e5e}', '\u{1e5e}'), ('\u{1e60}', '\u{1e60}'), ('\u{1e62}', '\u{1e62}'), - ('\u{1e64}', '\u{1e64}'), ('\u{1e66}', '\u{1e66}'), ('\u{1e68}', '\u{1e68}'), ('\u{1e6a}', - '\u{1e6a}'), ('\u{1e6c}', '\u{1e6c}'), ('\u{1e6e}', '\u{1e6e}'), ('\u{1e70}', '\u{1e70}'), - ('\u{1e72}', '\u{1e72}'), ('\u{1e74}', '\u{1e74}'), ('\u{1e76}', '\u{1e76}'), ('\u{1e78}', - '\u{1e78}'), ('\u{1e7a}', '\u{1e7a}'), ('\u{1e7c}', '\u{1e7c}'), ('\u{1e7e}', '\u{1e7e}'), - ('\u{1e80}', '\u{1e80}'), ('\u{1e82}', '\u{1e82}'), ('\u{1e84}', '\u{1e84}'), ('\u{1e86}', - '\u{1e86}'), ('\u{1e88}', '\u{1e88}'), ('\u{1e8a}', '\u{1e8a}'), ('\u{1e8c}', '\u{1e8c}'), - ('\u{1e8e}', '\u{1e8e}'), ('\u{1e90}', '\u{1e90}'), ('\u{1e92}', '\u{1e92}'), ('\u{1e94}', - '\u{1e94}'), ('\u{1e9e}', '\u{1e9e}'), ('\u{1ea0}', '\u{1ea0}'), ('\u{1ea2}', '\u{1ea2}'), - ('\u{1ea4}', '\u{1ea4}'), ('\u{1ea6}', '\u{1ea6}'), ('\u{1ea8}', '\u{1ea8}'), ('\u{1eaa}', - '\u{1eaa}'), ('\u{1eac}', '\u{1eac}'), ('\u{1eae}', '\u{1eae}'), ('\u{1eb0}', '\u{1eb0}'), - ('\u{1eb2}', '\u{1eb2}'), ('\u{1eb4}', '\u{1eb4}'), ('\u{1eb6}', '\u{1eb6}'), ('\u{1eb8}', - '\u{1eb8}'), ('\u{1eba}', '\u{1eba}'), ('\u{1ebc}', '\u{1ebc}'), ('\u{1ebe}', '\u{1ebe}'), - ('\u{1ec0}', '\u{1ec0}'), ('\u{1ec2}', '\u{1ec2}'), ('\u{1ec4}', '\u{1ec4}'), ('\u{1ec6}', - '\u{1ec6}'), ('\u{1ec8}', '\u{1ec8}'), ('\u{1eca}', '\u{1eca}'), ('\u{1ecc}', '\u{1ecc}'), - ('\u{1ece}', '\u{1ece}'), ('\u{1ed0}', '\u{1ed0}'), ('\u{1ed2}', '\u{1ed2}'), ('\u{1ed4}', - '\u{1ed4}'), ('\u{1ed6}', '\u{1ed6}'), ('\u{1ed8}', '\u{1ed8}'), ('\u{1eda}', '\u{1eda}'), - ('\u{1edc}', '\u{1edc}'), ('\u{1ede}', '\u{1ede}'), ('\u{1ee0}', '\u{1ee0}'), ('\u{1ee2}', - '\u{1ee2}'), ('\u{1ee4}', '\u{1ee4}'), ('\u{1ee6}', '\u{1ee6}'), ('\u{1ee8}', '\u{1ee8}'), - ('\u{1eea}', '\u{1eea}'), ('\u{1eec}', '\u{1eec}'), ('\u{1eee}', '\u{1eee}'), ('\u{1ef0}', - '\u{1ef0}'), ('\u{1ef2}', '\u{1ef2}'), ('\u{1ef4}', '\u{1ef4}'), ('\u{1ef6}', '\u{1ef6}'), - ('\u{1ef8}', '\u{1ef8}'), ('\u{1efa}', '\u{1efa}'), ('\u{1efc}', '\u{1efc}'), ('\u{1efe}', - '\u{1efe}'), ('\u{1f08}', '\u{1f0f}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f28}', '\u{1f2f}'), - ('\u{1f38}', '\u{1f3f}'), ('\u{1f48}', '\u{1f4d}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', - '\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f5f}'), ('\u{1f68}', '\u{1f6f}'), - ('\u{1fb8}', '\u{1fbb}'), ('\u{1fc8}', '\u{1fcb}'), ('\u{1fd8}', '\u{1fdb}'), ('\u{1fe8}', - '\u{1fec}'), ('\u{1ff8}', '\u{1ffb}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), - ('\u{210b}', '\u{210d}'), ('\u{2110}', '\u{2112}'), ('\u{2115}', '\u{2115}'), ('\u{2119}', - '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', '\u{2126}'), ('\u{2128}', '\u{2128}'), - ('\u{212a}', '\u{212d}'), ('\u{2130}', '\u{2133}'), ('\u{213e}', '\u{213f}'), ('\u{2145}', - '\u{2145}'), ('\u{2160}', '\u{216f}'), ('\u{2183}', '\u{2183}'), ('\u{24b6}', '\u{24cf}'), - ('\u{2c00}', '\u{2c2e}'), ('\u{2c60}', '\u{2c60}'), ('\u{2c62}', '\u{2c64}'), ('\u{2c67}', - '\u{2c67}'), ('\u{2c69}', '\u{2c69}'), ('\u{2c6b}', '\u{2c6b}'), ('\u{2c6d}', '\u{2c70}'), - ('\u{2c72}', '\u{2c72}'), ('\u{2c75}', '\u{2c75}'), ('\u{2c7e}', '\u{2c80}'), ('\u{2c82}', - '\u{2c82}'), ('\u{2c84}', '\u{2c84}'), ('\u{2c86}', '\u{2c86}'), ('\u{2c88}', '\u{2c88}'), - ('\u{2c8a}', '\u{2c8a}'), ('\u{2c8c}', '\u{2c8c}'), ('\u{2c8e}', '\u{2c8e}'), ('\u{2c90}', - '\u{2c90}'), ('\u{2c92}', '\u{2c92}'), ('\u{2c94}', '\u{2c94}'), ('\u{2c96}', '\u{2c96}'), - ('\u{2c98}', '\u{2c98}'), ('\u{2c9a}', '\u{2c9a}'), ('\u{2c9c}', '\u{2c9c}'), ('\u{2c9e}', - '\u{2c9e}'), ('\u{2ca0}', '\u{2ca0}'), ('\u{2ca2}', '\u{2ca2}'), ('\u{2ca4}', '\u{2ca4}'), - ('\u{2ca6}', '\u{2ca6}'), ('\u{2ca8}', '\u{2ca8}'), ('\u{2caa}', '\u{2caa}'), ('\u{2cac}', - '\u{2cac}'), ('\u{2cae}', '\u{2cae}'), ('\u{2cb0}', '\u{2cb0}'), ('\u{2cb2}', '\u{2cb2}'), - ('\u{2cb4}', '\u{2cb4}'), ('\u{2cb6}', '\u{2cb6}'), ('\u{2cb8}', '\u{2cb8}'), ('\u{2cba}', - '\u{2cba}'), ('\u{2cbc}', '\u{2cbc}'), ('\u{2cbe}', '\u{2cbe}'), ('\u{2cc0}', '\u{2cc0}'), - ('\u{2cc2}', '\u{2cc2}'), ('\u{2cc4}', '\u{2cc4}'), ('\u{2cc6}', '\u{2cc6}'), ('\u{2cc8}', - '\u{2cc8}'), ('\u{2cca}', '\u{2cca}'), ('\u{2ccc}', '\u{2ccc}'), ('\u{2cce}', '\u{2cce}'), - ('\u{2cd0}', '\u{2cd0}'), ('\u{2cd2}', '\u{2cd2}'), ('\u{2cd4}', '\u{2cd4}'), ('\u{2cd6}', - '\u{2cd6}'), ('\u{2cd8}', '\u{2cd8}'), ('\u{2cda}', '\u{2cda}'), ('\u{2cdc}', '\u{2cdc}'), - ('\u{2cde}', '\u{2cde}'), ('\u{2ce0}', '\u{2ce0}'), ('\u{2ce2}', '\u{2ce2}'), ('\u{2ceb}', - '\u{2ceb}'), ('\u{2ced}', '\u{2ced}'), ('\u{2cf2}', '\u{2cf2}'), ('\u{a640}', '\u{a640}'), - ('\u{a642}', '\u{a642}'), ('\u{a644}', '\u{a644}'), ('\u{a646}', '\u{a646}'), ('\u{a648}', - '\u{a648}'), ('\u{a64a}', '\u{a64a}'), ('\u{a64c}', '\u{a64c}'), ('\u{a64e}', '\u{a64e}'), - ('\u{a650}', '\u{a650}'), ('\u{a652}', '\u{a652}'), ('\u{a654}', '\u{a654}'), ('\u{a656}', - '\u{a656}'), ('\u{a658}', '\u{a658}'), ('\u{a65a}', '\u{a65a}'), ('\u{a65c}', '\u{a65c}'), - ('\u{a65e}', '\u{a65e}'), ('\u{a660}', '\u{a660}'), ('\u{a662}', '\u{a662}'), ('\u{a664}', - '\u{a664}'), ('\u{a666}', '\u{a666}'), ('\u{a668}', '\u{a668}'), ('\u{a66a}', '\u{a66a}'), - ('\u{a66c}', '\u{a66c}'), ('\u{a680}', '\u{a680}'), ('\u{a682}', '\u{a682}'), ('\u{a684}', - '\u{a684}'), ('\u{a686}', '\u{a686}'), ('\u{a688}', '\u{a688}'), ('\u{a68a}', '\u{a68a}'), - ('\u{a68c}', '\u{a68c}'), ('\u{a68e}', '\u{a68e}'), ('\u{a690}', '\u{a690}'), ('\u{a692}', - '\u{a692}'), ('\u{a694}', '\u{a694}'), ('\u{a696}', '\u{a696}'), ('\u{a698}', '\u{a698}'), - ('\u{a69a}', '\u{a69a}'), ('\u{a722}', '\u{a722}'), ('\u{a724}', '\u{a724}'), ('\u{a726}', - '\u{a726}'), ('\u{a728}', '\u{a728}'), ('\u{a72a}', '\u{a72a}'), ('\u{a72c}', '\u{a72c}'), - ('\u{a72e}', '\u{a72e}'), ('\u{a732}', '\u{a732}'), ('\u{a734}', '\u{a734}'), ('\u{a736}', - '\u{a736}'), ('\u{a738}', '\u{a738}'), ('\u{a73a}', '\u{a73a}'), ('\u{a73c}', '\u{a73c}'), - ('\u{a73e}', '\u{a73e}'), ('\u{a740}', '\u{a740}'), ('\u{a742}', '\u{a742}'), ('\u{a744}', - '\u{a744}'), ('\u{a746}', '\u{a746}'), ('\u{a748}', '\u{a748}'), ('\u{a74a}', '\u{a74a}'), - ('\u{a74c}', '\u{a74c}'), ('\u{a74e}', '\u{a74e}'), ('\u{a750}', '\u{a750}'), ('\u{a752}', - '\u{a752}'), ('\u{a754}', '\u{a754}'), ('\u{a756}', '\u{a756}'), ('\u{a758}', '\u{a758}'), - ('\u{a75a}', '\u{a75a}'), ('\u{a75c}', '\u{a75c}'), ('\u{a75e}', '\u{a75e}'), ('\u{a760}', - '\u{a760}'), ('\u{a762}', '\u{a762}'), ('\u{a764}', '\u{a764}'), ('\u{a766}', '\u{a766}'), - ('\u{a768}', '\u{a768}'), ('\u{a76a}', '\u{a76a}'), ('\u{a76c}', '\u{a76c}'), ('\u{a76e}', - '\u{a76e}'), ('\u{a779}', '\u{a779}'), ('\u{a77b}', '\u{a77b}'), ('\u{a77d}', '\u{a77e}'), - ('\u{a780}', '\u{a780}'), ('\u{a782}', '\u{a782}'), ('\u{a784}', '\u{a784}'), ('\u{a786}', - '\u{a786}'), ('\u{a78b}', '\u{a78b}'), ('\u{a78d}', '\u{a78d}'), ('\u{a790}', '\u{a790}'), - ('\u{a792}', '\u{a792}'), ('\u{a796}', '\u{a796}'), ('\u{a798}', '\u{a798}'), ('\u{a79a}', - '\u{a79a}'), ('\u{a79c}', '\u{a79c}'), ('\u{a79e}', '\u{a79e}'), ('\u{a7a0}', '\u{a7a0}'), - ('\u{a7a2}', '\u{a7a2}'), ('\u{a7a4}', '\u{a7a4}'), ('\u{a7a6}', '\u{a7a6}'), ('\u{a7a8}', - '\u{a7a8}'), ('\u{a7aa}', '\u{a7ad}'), ('\u{a7b0}', '\u{a7b4}'), ('\u{a7b6}', '\u{a7b6}'), - ('\u{ff21}', '\u{ff3a}'), ('\u{10400}', '\u{10427}'), ('\u{10c80}', '\u{10cb2}'), - ('\u{118a0}', '\u{118bf}'), ('\u{1d400}', '\u{1d419}'), ('\u{1d434}', '\u{1d44d}'), - ('\u{1d468}', '\u{1d481}'), ('\u{1d49c}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), - ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), - ('\u{1d4ae}', '\u{1d4b5}'), ('\u{1d4d0}', '\u{1d4e9}'), ('\u{1d504}', '\u{1d505}'), - ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), ('\u{1d516}', '\u{1d51c}'), - ('\u{1d538}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), ('\u{1d540}', '\u{1d544}'), - ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), ('\u{1d56c}', '\u{1d585}'), - ('\u{1d5a0}', '\u{1d5b9}'), ('\u{1d5d4}', '\u{1d5ed}'), ('\u{1d608}', '\u{1d621}'), - ('\u{1d63c}', '\u{1d655}'), ('\u{1d670}', '\u{1d689}'), ('\u{1d6a8}', '\u{1d6c0}'), - ('\u{1d6e2}', '\u{1d6fa}'), ('\u{1d71c}', '\u{1d734}'), ('\u{1d756}', '\u{1d76e}'), - ('\u{1d790}', '\u{1d7a8}'), ('\u{1d7ca}', '\u{1d7ca}'), ('\u{1f130}', '\u{1f149}'), - ('\u{1f150}', '\u{1f169}'), ('\u{1f170}', '\u{1f189}') - ]; + pub const Uppercase_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0000000000000000, 0x0000000007fffffe, 0x0000000000000000, 0x000000007f7fffff, + 0xaa55555555555555, 0x2b555555555554aa, 0x11aed2d5b1dbced6, 0x55d255554aaaa490, + 0x6c05555555555555, 0x000000000000557a, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x8045000000000000, 0x00000ffbfffed740, 0xe6905555551c8000, + 0x0000ffffffffffff, 0x5555555500000000, 0x5555555555555401, 0x5555555555552aab, + 0xfffe555555555555, 0x00000000007fffff, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + ], + r2: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 1, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 4, 4, 5, 4, 6, 7, 8, 9, 0, 0, 0, 0, 10, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, + 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15, 16, 4, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 19, 0, + 20, 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 23, 0, 0, 0 + ], + r3: &[ + 0x0000000000000000, 0xffffffff00000000, 0x00000000000020bf, 0x003fffffffffffff, + 0x5555555555555555, 0x5555555540155555, 0xff00ff003f00ff00, 0x0000ff00aa003f00, + 0x0f00000000000000, 0x0f001f000f000f00, 0xc00f3d503e273884, 0x0000ffff00000020, + 0x0000000000000008, 0xffc0000000000000, 0x000000000000ffff, 0x00007fffffffffff, + 0xc025ea9d00000000, 0x0004280555555555, 0x0000155555555555, 0x0000000005555555, + 0x5554555400000000, 0x6a00555555555555, 0x005f3d5555452855, 0x07fffffe00000000 + ], + r4: [ + 0, 1, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 2, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2 + ], + r5: &[ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 20, 21, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000, 0x000000ffffffffff, 0x0007ffffffffffff, 0xffffffff00000000, + 0xfff0000003ffffff, 0xffffff0000003fff, 0x003fde64d0000003, 0x000003ffffff0000, + 0x7b0000001fdfe7b0, 0xfffff0000001fc5f, 0x03ffffff0000003f, 0x00003ffffff00000, + 0xf0000003ffffff00, 0xffff0000003fffff, 0xffffff00000003ff, 0x07fffffc00000001, + 0x001ffffff0000000, 0x00007fffffc00000, 0x000001ffffff0000, 0x0000000000000400, + 0xffff000000000000, 0xffff03ffffff03ff, 0x00000000000003ff + ], + }; pub fn Uppercase(c: char) -> bool { - super::bsearch_range_table(c, Uppercase_table) + super::trie_lookup_range_table(c, Uppercase_table) } - pub const XID_Continue_table: &'static [(char, char)] = &[ - ('\u{30}', '\u{39}'), ('\u{41}', '\u{5a}'), ('\u{5f}', '\u{5f}'), ('\u{61}', '\u{7a}'), - ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), ('\u{b7}', '\u{b7}'), ('\u{ba}', '\u{ba}'), - ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'), ('\u{2c6}', '\u{2d1}'), - ('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}', '\u{2ee}'), ('\u{300}', - '\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'), ('\u{37f}', '\u{37f}'), - ('\u{386}', '\u{38a}'), ('\u{38c}', '\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}', - '\u{3f5}'), ('\u{3f7}', '\u{481}'), ('\u{483}', '\u{487}'), ('\u{48a}', '\u{52f}'), - ('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}', '\u{587}'), ('\u{591}', - '\u{5bd}'), ('\u{5bf}', '\u{5bf}'), ('\u{5c1}', '\u{5c2}'), ('\u{5c4}', '\u{5c5}'), - ('\u{5c7}', '\u{5c7}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{610}', - '\u{61a}'), ('\u{620}', '\u{669}'), ('\u{66e}', '\u{6d3}'), ('\u{6d5}', '\u{6dc}'), - ('\u{6df}', '\u{6e8}'), ('\u{6ea}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'), ('\u{710}', - '\u{74a}'), ('\u{74d}', '\u{7b1}'), ('\u{7c0}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'), - ('\u{800}', '\u{82d}'), ('\u{840}', '\u{85b}'), ('\u{8a0}', '\u{8b4}'), ('\u{8e3}', - '\u{963}'), ('\u{966}', '\u{96f}'), ('\u{971}', '\u{983}'), ('\u{985}', '\u{98c}'), - ('\u{98f}', '\u{990}'), ('\u{993}', '\u{9a8}'), ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', - '\u{9b2}'), ('\u{9b6}', '\u{9b9}'), ('\u{9bc}', '\u{9c4}'), ('\u{9c7}', '\u{9c8}'), - ('\u{9cb}', '\u{9ce}'), ('\u{9d7}', '\u{9d7}'), ('\u{9dc}', '\u{9dd}'), ('\u{9df}', - '\u{9e3}'), ('\u{9e6}', '\u{9f1}'), ('\u{a01}', '\u{a03}'), ('\u{a05}', '\u{a0a}'), - ('\u{a0f}', '\u{a10}'), ('\u{a13}', '\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', - '\u{a33}'), ('\u{a35}', '\u{a36}'), ('\u{a38}', '\u{a39}'), ('\u{a3c}', '\u{a3c}'), - ('\u{a3e}', '\u{a42}'), ('\u{a47}', '\u{a48}'), ('\u{a4b}', '\u{a4d}'), ('\u{a51}', - '\u{a51}'), ('\u{a59}', '\u{a5c}'), ('\u{a5e}', '\u{a5e}'), ('\u{a66}', '\u{a75}'), - ('\u{a81}', '\u{a83}'), ('\u{a85}', '\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', - '\u{aa8}'), ('\u{aaa}', '\u{ab0}'), ('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), - ('\u{abc}', '\u{ac5}'), ('\u{ac7}', '\u{ac9}'), ('\u{acb}', '\u{acd}'), ('\u{ad0}', - '\u{ad0}'), ('\u{ae0}', '\u{ae3}'), ('\u{ae6}', '\u{aef}'), ('\u{af9}', '\u{af9}'), - ('\u{b01}', '\u{b03}'), ('\u{b05}', '\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}', - '\u{b28}'), ('\u{b2a}', '\u{b30}'), ('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'), - ('\u{b3c}', '\u{b44}'), ('\u{b47}', '\u{b48}'), ('\u{b4b}', '\u{b4d}'), ('\u{b56}', - '\u{b57}'), ('\u{b5c}', '\u{b5d}'), ('\u{b5f}', '\u{b63}'), ('\u{b66}', '\u{b6f}'), - ('\u{b71}', '\u{b71}'), ('\u{b82}', '\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', - '\u{b90}'), ('\u{b92}', '\u{b95}'), ('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), - ('\u{b9e}', '\u{b9f}'), ('\u{ba3}', '\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', - '\u{bb9}'), ('\u{bbe}', '\u{bc2}'), ('\u{bc6}', '\u{bc8}'), ('\u{bca}', '\u{bcd}'), - ('\u{bd0}', '\u{bd0}'), ('\u{bd7}', '\u{bd7}'), ('\u{be6}', '\u{bef}'), ('\u{c00}', - '\u{c03}'), ('\u{c05}', '\u{c0c}'), ('\u{c0e}', '\u{c10}'), ('\u{c12}', '\u{c28}'), - ('\u{c2a}', '\u{c39}'), ('\u{c3d}', '\u{c44}'), ('\u{c46}', '\u{c48}'), ('\u{c4a}', - '\u{c4d}'), ('\u{c55}', '\u{c56}'), ('\u{c58}', '\u{c5a}'), ('\u{c60}', '\u{c63}'), - ('\u{c66}', '\u{c6f}'), ('\u{c81}', '\u{c83}'), ('\u{c85}', '\u{c8c}'), ('\u{c8e}', - '\u{c90}'), ('\u{c92}', '\u{ca8}'), ('\u{caa}', '\u{cb3}'), ('\u{cb5}', '\u{cb9}'), - ('\u{cbc}', '\u{cc4}'), ('\u{cc6}', '\u{cc8}'), ('\u{cca}', '\u{ccd}'), ('\u{cd5}', - '\u{cd6}'), ('\u{cde}', '\u{cde}'), ('\u{ce0}', '\u{ce3}'), ('\u{ce6}', '\u{cef}'), - ('\u{cf1}', '\u{cf2}'), ('\u{d01}', '\u{d03}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', - '\u{d10}'), ('\u{d12}', '\u{d3a}'), ('\u{d3d}', '\u{d44}'), ('\u{d46}', '\u{d48}'), - ('\u{d4a}', '\u{d4e}'), ('\u{d57}', '\u{d57}'), ('\u{d5f}', '\u{d63}'), ('\u{d66}', - '\u{d6f}'), ('\u{d7a}', '\u{d7f}'), ('\u{d82}', '\u{d83}'), ('\u{d85}', '\u{d96}'), - ('\u{d9a}', '\u{db1}'), ('\u{db3}', '\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', - '\u{dc6}'), ('\u{dca}', '\u{dca}'), ('\u{dcf}', '\u{dd4}'), ('\u{dd6}', '\u{dd6}'), - ('\u{dd8}', '\u{ddf}'), ('\u{de6}', '\u{def}'), ('\u{df2}', '\u{df3}'), ('\u{e01}', - '\u{e3a}'), ('\u{e40}', '\u{e4e}'), ('\u{e50}', '\u{e59}'), ('\u{e81}', '\u{e82}'), - ('\u{e84}', '\u{e84}'), ('\u{e87}', '\u{e88}'), ('\u{e8a}', '\u{e8a}'), ('\u{e8d}', - '\u{e8d}'), ('\u{e94}', '\u{e97}'), ('\u{e99}', '\u{e9f}'), ('\u{ea1}', '\u{ea3}'), - ('\u{ea5}', '\u{ea5}'), ('\u{ea7}', '\u{ea7}'), ('\u{eaa}', '\u{eab}'), ('\u{ead}', - '\u{eb9}'), ('\u{ebb}', '\u{ebd}'), ('\u{ec0}', '\u{ec4}'), ('\u{ec6}', '\u{ec6}'), - ('\u{ec8}', '\u{ecd}'), ('\u{ed0}', '\u{ed9}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', - '\u{f00}'), ('\u{f18}', '\u{f19}'), ('\u{f20}', '\u{f29}'), ('\u{f35}', '\u{f35}'), - ('\u{f37}', '\u{f37}'), ('\u{f39}', '\u{f39}'), ('\u{f3e}', '\u{f47}'), ('\u{f49}', - '\u{f6c}'), ('\u{f71}', '\u{f84}'), ('\u{f86}', '\u{f97}'), ('\u{f99}', '\u{fbc}'), - ('\u{fc6}', '\u{fc6}'), ('\u{1000}', '\u{1049}'), ('\u{1050}', '\u{109d}'), ('\u{10a0}', - '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), ('\u{10cd}', '\u{10cd}'), ('\u{10d0}', '\u{10fa}'), - ('\u{10fc}', '\u{1248}'), ('\u{124a}', '\u{124d}'), ('\u{1250}', '\u{1256}'), ('\u{1258}', - '\u{1258}'), ('\u{125a}', '\u{125d}'), ('\u{1260}', '\u{1288}'), ('\u{128a}', '\u{128d}'), - ('\u{1290}', '\u{12b0}'), ('\u{12b2}', '\u{12b5}'), ('\u{12b8}', '\u{12be}'), ('\u{12c0}', - '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), ('\u{12c8}', '\u{12d6}'), ('\u{12d8}', '\u{1310}'), - ('\u{1312}', '\u{1315}'), ('\u{1318}', '\u{135a}'), ('\u{135d}', '\u{135f}'), ('\u{1369}', - '\u{1371}'), ('\u{1380}', '\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), - ('\u{1401}', '\u{166c}'), ('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', - '\u{16ea}'), ('\u{16ee}', '\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1714}'), - ('\u{1720}', '\u{1734}'), ('\u{1740}', '\u{1753}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', - '\u{1770}'), ('\u{1772}', '\u{1773}'), ('\u{1780}', '\u{17d3}'), ('\u{17d7}', '\u{17d7}'), - ('\u{17dc}', '\u{17dd}'), ('\u{17e0}', '\u{17e9}'), ('\u{180b}', '\u{180d}'), ('\u{1810}', - '\u{1819}'), ('\u{1820}', '\u{1877}'), ('\u{1880}', '\u{18aa}'), ('\u{18b0}', '\u{18f5}'), - ('\u{1900}', '\u{191e}'), ('\u{1920}', '\u{192b}'), ('\u{1930}', '\u{193b}'), ('\u{1946}', - '\u{196d}'), ('\u{1970}', '\u{1974}'), ('\u{1980}', '\u{19ab}'), ('\u{19b0}', '\u{19c9}'), - ('\u{19d0}', '\u{19da}'), ('\u{1a00}', '\u{1a1b}'), ('\u{1a20}', '\u{1a5e}'), ('\u{1a60}', - '\u{1a7c}'), ('\u{1a7f}', '\u{1a89}'), ('\u{1a90}', '\u{1a99}'), ('\u{1aa7}', '\u{1aa7}'), - ('\u{1ab0}', '\u{1abd}'), ('\u{1b00}', '\u{1b4b}'), ('\u{1b50}', '\u{1b59}'), ('\u{1b6b}', - '\u{1b73}'), ('\u{1b80}', '\u{1bf3}'), ('\u{1c00}', '\u{1c37}'), ('\u{1c40}', '\u{1c49}'), - ('\u{1c4d}', '\u{1c7d}'), ('\u{1cd0}', '\u{1cd2}'), ('\u{1cd4}', '\u{1cf6}'), ('\u{1cf8}', - '\u{1cf9}'), ('\u{1d00}', '\u{1df5}'), ('\u{1dfc}', '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), - ('\u{1f20}', '\u{1f45}'), ('\u{1f48}', '\u{1f4d}'), ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', - '\u{1f59}'), ('\u{1f5b}', '\u{1f5b}'), ('\u{1f5d}', '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), - ('\u{1f80}', '\u{1fb4}'), ('\u{1fb6}', '\u{1fbc}'), ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', - '\u{1fc4}'), ('\u{1fc6}', '\u{1fcc}'), ('\u{1fd0}', '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), - ('\u{1fe0}', '\u{1fec}'), ('\u{1ff2}', '\u{1ff4}'), ('\u{1ff6}', '\u{1ffc}'), ('\u{203f}', - '\u{2040}'), ('\u{2054}', '\u{2054}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), - ('\u{2090}', '\u{209c}'), ('\u{20d0}', '\u{20dc}'), ('\u{20e1}', '\u{20e1}'), ('\u{20e5}', - '\u{20f0}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}', '\u{2113}'), - ('\u{2115}', '\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', - '\u{2126}'), ('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}', '\u{213f}'), - ('\u{2145}', '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'), ('\u{2c00}', - '\u{2c2e}'), ('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cf3}'), - ('\u{2d00}', '\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', '\u{2d2d}'), ('\u{2d30}', - '\u{2d67}'), ('\u{2d6f}', '\u{2d6f}'), ('\u{2d7f}', '\u{2d96}'), ('\u{2da0}', '\u{2da6}'), - ('\u{2da8}', '\u{2dae}'), ('\u{2db0}', '\u{2db6}'), ('\u{2db8}', '\u{2dbe}'), ('\u{2dc0}', - '\u{2dc6}'), ('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), ('\u{2dd8}', '\u{2dde}'), - ('\u{2de0}', '\u{2dff}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{302f}'), ('\u{3031}', - '\u{3035}'), ('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{3099}', '\u{309a}'), - ('\u{309d}', '\u{309f}'), ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', - '\u{312d}'), ('\u{3131}', '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), - ('\u{3400}', '\u{4db5}'), ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', - '\u{a4fd}'), ('\u{a500}', '\u{a60c}'), ('\u{a610}', '\u{a62b}'), ('\u{a640}', '\u{a66f}'), - ('\u{a674}', '\u{a67d}'), ('\u{a67f}', '\u{a6f1}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', - '\u{a788}'), ('\u{a78b}', '\u{a7ad}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}', '\u{a827}'), - ('\u{a840}', '\u{a873}'), ('\u{a880}', '\u{a8c4}'), ('\u{a8d0}', '\u{a8d9}'), ('\u{a8e0}', - '\u{a8f7}'), ('\u{a8fb}', '\u{a8fb}'), ('\u{a8fd}', '\u{a8fd}'), ('\u{a900}', '\u{a92d}'), - ('\u{a930}', '\u{a953}'), ('\u{a960}', '\u{a97c}'), ('\u{a980}', '\u{a9c0}'), ('\u{a9cf}', - '\u{a9d9}'), ('\u{a9e0}', '\u{a9fe}'), ('\u{aa00}', '\u{aa36}'), ('\u{aa40}', '\u{aa4d}'), - ('\u{aa50}', '\u{aa59}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', '\u{aac2}'), ('\u{aadb}', - '\u{aadd}'), ('\u{aae0}', '\u{aaef}'), ('\u{aaf2}', '\u{aaf6}'), ('\u{ab01}', '\u{ab06}'), - ('\u{ab09}', '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', - '\u{ab2e}'), ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abea}'), - ('\u{abec}', '\u{abed}'), ('\u{abf0}', '\u{abf9}'), ('\u{ac00}', '\u{d7a3}'), ('\u{d7b0}', - '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), ('\u{fa70}', '\u{fad9}'), - ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}', '\u{fb28}'), ('\u{fb2a}', - '\u{fb36}'), ('\u{fb38}', '\u{fb3c}'), ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), - ('\u{fb43}', '\u{fb44}'), ('\u{fb46}', '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', - '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), - ('\u{fe00}', '\u{fe0f}'), ('\u{fe20}', '\u{fe2f}'), ('\u{fe33}', '\u{fe34}'), ('\u{fe4d}', - '\u{fe4f}'), ('\u{fe71}', '\u{fe71}'), ('\u{fe73}', '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), - ('\u{fe79}', '\u{fe79}'), ('\u{fe7b}', '\u{fe7b}'), ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', - '\u{fefc}'), ('\u{ff10}', '\u{ff19}'), ('\u{ff21}', '\u{ff3a}'), ('\u{ff3f}', '\u{ff3f}'), - ('\u{ff41}', '\u{ff5a}'), ('\u{ff66}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), ('\u{ffca}', - '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', '\u{1000b}'), - ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}', '\u{1003d}'), - ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}', '\u{100fa}'), - ('\u{10140}', '\u{10174}'), ('\u{101fd}', '\u{101fd}'), ('\u{10280}', '\u{1029c}'), - ('\u{102a0}', '\u{102d0}'), ('\u{102e0}', '\u{102e0}'), ('\u{10300}', '\u{1031f}'), - ('\u{10330}', '\u{1034a}'), ('\u{10350}', '\u{1037a}'), ('\u{10380}', '\u{1039d}'), - ('\u{103a0}', '\u{103c3}'), ('\u{103c8}', '\u{103cf}'), ('\u{103d1}', '\u{103d5}'), - ('\u{10400}', '\u{1049d}'), ('\u{104a0}', '\u{104a9}'), ('\u{10500}', '\u{10527}'), - ('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', '\u{10755}'), - ('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', '\u{10808}'), - ('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', '\u{1083c}'), - ('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', '\u{1089e}'), - ('\u{108e0}', '\u{108f2}'), ('\u{108f4}', '\u{108f5}'), ('\u{10900}', '\u{10915}'), - ('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}', '\u{109bf}'), - ('\u{10a00}', '\u{10a03}'), ('\u{10a05}', '\u{10a06}'), ('\u{10a0c}', '\u{10a13}'), - ('\u{10a15}', '\u{10a17}'), ('\u{10a19}', '\u{10a33}'), ('\u{10a38}', '\u{10a3a}'), - ('\u{10a3f}', '\u{10a3f}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', '\u{10a9c}'), - ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae6}'), ('\u{10b00}', '\u{10b35}'), - ('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}', '\u{10b91}'), - ('\u{10c00}', '\u{10c48}'), ('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', '\u{10cf2}'), - ('\u{11000}', '\u{11046}'), ('\u{11066}', '\u{1106f}'), ('\u{1107f}', '\u{110ba}'), - ('\u{110d0}', '\u{110e8}'), ('\u{110f0}', '\u{110f9}'), ('\u{11100}', '\u{11134}'), - ('\u{11136}', '\u{1113f}'), ('\u{11150}', '\u{11173}'), ('\u{11176}', '\u{11176}'), - ('\u{11180}', '\u{111c4}'), ('\u{111ca}', '\u{111cc}'), ('\u{111d0}', '\u{111da}'), - ('\u{111dc}', '\u{111dc}'), ('\u{11200}', '\u{11211}'), ('\u{11213}', '\u{11237}'), - ('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', '\u{1128d}'), - ('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', '\u{112ea}'), - ('\u{112f0}', '\u{112f9}'), ('\u{11300}', '\u{11303}'), ('\u{11305}', '\u{1130c}'), - ('\u{1130f}', '\u{11310}'), ('\u{11313}', '\u{11328}'), ('\u{1132a}', '\u{11330}'), - ('\u{11332}', '\u{11333}'), ('\u{11335}', '\u{11339}'), ('\u{1133c}', '\u{11344}'), - ('\u{11347}', '\u{11348}'), ('\u{1134b}', '\u{1134d}'), ('\u{11350}', '\u{11350}'), - ('\u{11357}', '\u{11357}'), ('\u{1135d}', '\u{11363}'), ('\u{11366}', '\u{1136c}'), - ('\u{11370}', '\u{11374}'), ('\u{11480}', '\u{114c5}'), ('\u{114c7}', '\u{114c7}'), - ('\u{114d0}', '\u{114d9}'), ('\u{11580}', '\u{115b5}'), ('\u{115b8}', '\u{115c0}'), - ('\u{115d8}', '\u{115dd}'), ('\u{11600}', '\u{11640}'), ('\u{11644}', '\u{11644}'), - ('\u{11650}', '\u{11659}'), ('\u{11680}', '\u{116b7}'), ('\u{116c0}', '\u{116c9}'), - ('\u{11700}', '\u{11719}'), ('\u{1171d}', '\u{1172b}'), ('\u{11730}', '\u{11739}'), - ('\u{118a0}', '\u{118e9}'), ('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', '\u{11af8}'), - ('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', '\u{12543}'), - ('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', '\u{16a38}'), - ('\u{16a40}', '\u{16a5e}'), ('\u{16a60}', '\u{16a69}'), ('\u{16ad0}', '\u{16aed}'), - ('\u{16af0}', '\u{16af4}'), ('\u{16b00}', '\u{16b36}'), ('\u{16b40}', '\u{16b43}'), - ('\u{16b50}', '\u{16b59}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', '\u{16b8f}'), - ('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f7e}'), ('\u{16f8f}', '\u{16f9f}'), - ('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', '\u{1bc7c}'), - ('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1bc9d}', '\u{1bc9e}'), - ('\u{1d165}', '\u{1d169}'), ('\u{1d16d}', '\u{1d172}'), ('\u{1d17b}', '\u{1d182}'), - ('\u{1d185}', '\u{1d18b}'), ('\u{1d1aa}', '\u{1d1ad}'), ('\u{1d242}', '\u{1d244}'), - ('\u{1d400}', '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), - ('\u{1d4a2}', '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), - ('\u{1d4ae}', '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'), - ('\u{1d4c5}', '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), - ('\u{1d516}', '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), - ('\u{1d540}', '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), - ('\u{1d552}', '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'), - ('\u{1d6dc}', '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'), - ('\u{1d736}', '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'), - ('\u{1d78a}', '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'), - ('\u{1d7ce}', '\u{1d7ff}'), ('\u{1da00}', '\u{1da36}'), ('\u{1da3b}', '\u{1da6c}'), - ('\u{1da75}', '\u{1da75}'), ('\u{1da84}', '\u{1da84}'), ('\u{1da9b}', '\u{1da9f}'), - ('\u{1daa1}', '\u{1daaf}'), ('\u{1e800}', '\u{1e8c4}'), ('\u{1e8d0}', '\u{1e8d6}'), - ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'), ('\u{1ee21}', '\u{1ee22}'), - ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'), ('\u{1ee29}', '\u{1ee32}'), - ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'), ('\u{1ee3b}', '\u{1ee3b}'), - ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'), ('\u{1ee49}', '\u{1ee49}'), - ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'), ('\u{1ee51}', '\u{1ee52}'), - ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'), ('\u{1ee59}', '\u{1ee59}'), - ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'), ('\u{1ee5f}', '\u{1ee5f}'), - ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'), ('\u{1ee67}', '\u{1ee6a}'), - ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'), ('\u{1ee79}', '\u{1ee7c}'), - ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'), ('\u{1ee8b}', '\u{1ee9b}'), - ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'), ('\u{1eeab}', '\u{1eebb}'), - ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'), ('\u{2b740}', '\u{2b81d}'), - ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}'), ('\u{e0100}', '\u{e01ef}') - ]; + pub const XID_Continue_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x03ff000000000000, 0x07fffffe87fffffe, 0x04a0040000000000, 0xff7fffffff7fffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0x0000501f0003ffc3, + 0xffffffffffffffff, 0xb8dfffffffffffff, 0xfffffffbffffd7c0, 0xffbfffffffffffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffcfb, 0xffffffffffffffff, + 0xfffeffffffffffff, 0xfffffffe027fffff, 0xbffffffffffe00ff, 0x000707ffffff00b6, + 0xffffffff07ff0000, 0xffffc3ffffffffff, 0xffffffffffffffff, 0x9ffffdff9fefffff, + 0xffffffffffff0000, 0xffffffffffffe7ff, 0x0003ffffffffffff, 0x043fffffffffffff + ], + r2: [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 25, 26, 27, 28, 29, 30, 31, 4, 32, 33, 34, 4, 4, 4, 4, 4, 35, 36, 37, 38, 39, 40, + 41, 42, 4, 4, 4, 4, 4, 4, 4, 4, 43, 44, 45, 46, 47, 4, 48, 49, 50, 51, 52, 53, 54, 55, + 56, 57, 58, 59, 60, 4, 61, 4, 62, 50, 63, 60, 64, 4, 4, 4, 65, 4, 4, 4, 4, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 77, 78, 4, 79, 80, 81, 82, 83, 60, 60, 60, 60, 60, 60, 60, 60, 84, + 42, 85, 86, 87, 4, 88, 89, 60, 60, 60, 60, 60, 60, 60, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 52, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 90, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 91, 92, 4, 4, 4, 4, 93, 94, 4, 95, 96, 4, 97, 98, 99, 62, 4, 100, 101, + 102, 4, 103, 104, 105, 4, 106, 107, 108, 4, 109, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 110, 111, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, + 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 60, 4, 4, 4, 4, 4, 101, 4, 112, + 113, 114, 95, 115, 4, 116, 4, 4, 117, 118, 119, 120, 121, 122, 4, 123, 124, 125, 126, + 127 + ], + r3: &[ + 0x00003fffffffffff, 0x000000000fffffff, 0x001fffff00000000, 0xfffffff800000000, + 0xffffffffffffffff, 0xfffeffcfffffffff, 0xf3c5fdfffff99fef, 0x0003ffcfb080799f, + 0xd36dfdfffff987ee, 0x003fffc05e023987, 0xf3edfdfffffbbfee, 0x0200ffcf00013bbf, + 0xf3edfdfffff99fee, 0x0002ffcfb0c0399f, 0xc3ffc718d63dc7ec, 0x0000ffc000813dc7, + 0xe3fffdfffffddfef, 0x0000ffcf07603ddf, 0xf3effdfffffddfee, 0x0006ffcf40603ddf, + 0xe7fffffffffddfee, 0xfc00ffcf80807ddf, 0x2ffbfffffc7fffec, 0x000cffc0ff5f847f, + 0x07fffffffffffffe, 0x0000000003ff7fff, 0x3bffecaefef02596, 0x00000000f3ff3f5f, + 0xc2a003ff03000001, 0xfffe1ffffffffeff, 0x1ffffffffeffffdf, 0x0000000000000040, + 0xffffffffffff03ff, 0xffffffff3fffffff, 0xf7ffffffffff20bf, 0xffffffff3d7f3dff, + 0x7f3dffffffff3dff, 0xffffffffff7fff3d, 0xffffffffff3dffff, 0x0003fe00e7ffffff, + 0xffffffff0000ffff, 0x3f3fffffffffffff, 0xfffffffffffffffe, 0xffff9fffffffffff, + 0xffffffff07fffffe, 0x01ffc7ffffffffff, 0x001fffff001fdfff, 0x000ddfff000fffff, + 0x000003ff308fffff, 0xffffffff03ff3800, 0x00ffffffffffffff, 0xffff07ffffffffff, + 0x003fffffffffffff, 0x0fff0fff7fffffff, 0x001f3fffffffffc0, 0xffff0fffffffffff, + 0x0000000007ff03ff, 0xffffffff0fffffff, 0x9fffffff7fffffff, 0x3fff008003ff03ff, + 0x0000000000000000, 0x000ff80003ff0fff, 0x000fffffffffffff, 0x3fffffffffffe3ff, + 0x037ffffffff70000, 0xf03fffffffffffff, 0xffffffff3f3fffff, 0x3fffffffaaff3f3f, + 0x5fdfffffffffffff, 0x1fdc1fff0fcf1fdc, 0x8000000000000000, 0x8002000000100001, + 0x000000001fff0000, 0x0001ffe21fff0000, 0xf3fffd503f2ffc84, 0xffffffff000043e0, + 0x00000000000001ff, 0xffff7fffffffffff, 0xffffffff7fffffff, 0x000ff81fffffffff, + 0xffff20bfffffffff, 0x800080ffffffffff, 0x7f7f7f7f007fffff, 0xffffffff7f7f7f7f, + 0x1f3efffe000000e0, 0xfffffffee67fffff, 0xf7ffffffffffffff, 0xfffe3fffffffffe0, + 0x07ffffff00007fff, 0xffff000000000000, 0x00000000003fffff, 0x0000000000001fff, + 0x3fffffffffff0000, 0x00000fffffff1fff, 0xbff0ffffffffffff, 0x0003ffffffffffff, + 0xfffffffcff800000, 0x00ff3ffffffff9ff, 0xff80000000000000, 0x000000ffffffffff, + 0x28ffffff03ff001f, 0xffff3fffffffffff, 0x1fffffff000fffff, 0x7fffffff03ff8001, + 0x007fffffffffffff, 0xfc7fffff03ff3fff, 0x007cffff38000007, 0xffff7f7f007e7e7e, + 0xffff003ff7ffffff, 0x03ff37ffffffffff, 0xffff000fffffffff, 0x0ffffffffffff87f, + 0x0000000003ffffff, 0x5f7ffdffe0f8007f, 0xffffffffffffffdb, 0xfffffffffff80000, + 0xfffffff03fffffff, 0x3fffffffffffffff, 0xffffffffffff0000, 0xfffffffffffcffff, + 0x03ff0000000000ff, 0x0018ffff0000ffff, 0xaa8a00000000e000, 0x1fffffffffffffff, + 0x87fffffe03ff0000, 0xffffffc007fffffe, 0x7fffffffffffffff, 0x000000001cfcfcfc + ], + r4: [ + 0, 1, 2, 3, 4, 5, 6, 5, 5, 5, 5, 7, 5, 8, 9, 5, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, + 11, 12, 13, 5, 5, 14, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 15, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 + ], + r5: &[ + 0, 1, 2, 3, 4, 5, 4, 6, 4, 4, 7, 8, 9, 10, 11, 12, 2, 2, 13, 4, 14, 15, 4, 4, 2, 2, 2, + 2, 16, 17, 4, 4, 18, 19, 20, 21, 22, 4, 23, 4, 24, 25, 26, 27, 28, 29, 30, 4, 2, 31, 32, + 32, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 33, 3, 34, 35, 36, 2, 37, 38, 4, 39, 40, 41, + 42, 4, 4, 4, 4, 2, 43, 4, 4, 44, 45, 2, 46, 47, 48, 49, 4, 4, 4, 4, 4, 50, 51, 4, 4, 4, + 4, 4, 4, 4, 52, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 53, 4, 2, 54, 2, 2, 2, 55, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 54, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 56, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, + 2, 2, 2, 52, 57, 4, 58, 16, 59, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 61, 62, 4, + 63, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 64, 65, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 66, 67, 4, 4, 68, 4, 4, 4, 4, 4, 4, 2, 69, 70, 71, 72, + 73, 2, 2, 2, 2, 74, 75, 76, 77, 78, 79, 4, 4, 4, 4, 4, 4, 4, 4, 80, 81, 82, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 83, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 84, 85, 86, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 87, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 11, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 88, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, + 2, 89, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 2, 2, 2, 90, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4 + ], + r6: &[ + 0xb7ffff7fffffefff, 0x000000003fff3fff, 0xffffffffffffffff, 0x07ffffffffffffff, + 0x0000000000000000, 0x001fffffffffffff, 0x2000000000000000, 0xffffffff1fffffff, + 0x000000010001ffff, 0xffff0000ffffffff, 0x07ffffffffff07ff, 0xffffffff3fffffff, + 0x00000000003eff0f, 0x000003ff3fffffff, 0xffff00ffffffffff, 0x0000000fffffffff, + 0x007fffffffffffff, 0x000000ff003fffff, 0x91bffffffffffd3f, 0x007fffff003fffff, + 0x000000007fffffff, 0x0037ffff00000000, 0x03ffffff003fffff, 0xc0ffffffffffffff, + 0x870ffffffeeff06f, 0x1fffffff00000000, 0x000000001fffffff, 0x0000007ffffffeff, + 0x003fffffffffffff, 0x0007ffff003fffff, 0x000000000003ffff, 0x00000000000001ff, + 0x0007ffffffffffff, 0x8000ffc00000007f, 0x03ff01ffffff0000, 0xffdfffffffffffff, + 0x004fffffffff0000, 0x0000000017ff1c1f, 0x00fffffffffbffff, 0xffff01ffbfffbd7f, + 0x03ff07ffffffffff, 0xf3edfdfffff99fef, 0x001f1fcfe081399f, 0x0000000003ff00bf, + 0xff3fffffffffffff, 0x000000003f000001, 0x0000000003ff0011, 0x00ffffffffffffff, + 0x00000000000003ff, 0x03ff0fffe3ffffff, 0xffffffff00000000, 0x800003ffffffffff, + 0x01ffffffffffffff, 0x0000000003ffffff, 0x00007fffffffffff, 0x000000000000000f, + 0x000000000000007f, 0x000003ff7fffffff, 0x001f3fffffff0000, 0xe0fffff803ff000f, + 0x000000000000ffff, 0x7fffffffffff001f, 0x00000000ffff8000, 0x0000000000000003, + 0x1fff07ffffffffff, 0x0000000063ff01ff, 0xf807e3e000000000, 0x00003c0000000fe7, + 0x000000000000001c, 0xffffffffffdfffff, 0xebffde64dfffffff, 0xffffffffffffffef, + 0x7bffffffdfdfe7bf, 0xfffffffffffdfc5f, 0xffffff3fffffffff, 0xf7fffffff7fffffd, + 0xffdfffffffdfffff, 0xffff7fffffff7fff, 0xfffffdfffffffdff, 0xffffffffffffcff7, + 0xf87fffffffffffff, 0x00201fffffffffff, 0x0000fffef8000010, 0x00000000007f001f, + 0x0af7fe96ffffffef, 0x5ef7f796aa96ea84, 0x0ffffbee0ffffbff, 0x00000000007fffff, + 0x00000003ffffffff, 0x000000003fffffff, 0x0000ffffffffffff + ], + }; pub fn XID_Continue(c: char) -> bool { - super::bsearch_range_table(c, XID_Continue_table) + super::trie_lookup_range_table(c, XID_Continue_table) } - pub const XID_Start_table: &'static [(char, char)] = &[ - ('\u{41}', '\u{5a}'), ('\u{61}', '\u{7a}'), ('\u{aa}', '\u{aa}'), ('\u{b5}', '\u{b5}'), - ('\u{ba}', '\u{ba}'), ('\u{c0}', '\u{d6}'), ('\u{d8}', '\u{f6}'), ('\u{f8}', '\u{2c1}'), - ('\u{2c6}', '\u{2d1}'), ('\u{2e0}', '\u{2e4}'), ('\u{2ec}', '\u{2ec}'), ('\u{2ee}', - '\u{2ee}'), ('\u{370}', '\u{374}'), ('\u{376}', '\u{377}'), ('\u{37b}', '\u{37d}'), - ('\u{37f}', '\u{37f}'), ('\u{386}', '\u{386}'), ('\u{388}', '\u{38a}'), ('\u{38c}', - '\u{38c}'), ('\u{38e}', '\u{3a1}'), ('\u{3a3}', '\u{3f5}'), ('\u{3f7}', '\u{481}'), - ('\u{48a}', '\u{52f}'), ('\u{531}', '\u{556}'), ('\u{559}', '\u{559}'), ('\u{561}', - '\u{587}'), ('\u{5d0}', '\u{5ea}'), ('\u{5f0}', '\u{5f2}'), ('\u{620}', '\u{64a}'), - ('\u{66e}', '\u{66f}'), ('\u{671}', '\u{6d3}'), ('\u{6d5}', '\u{6d5}'), ('\u{6e5}', - '\u{6e6}'), ('\u{6ee}', '\u{6ef}'), ('\u{6fa}', '\u{6fc}'), ('\u{6ff}', '\u{6ff}'), - ('\u{710}', '\u{710}'), ('\u{712}', '\u{72f}'), ('\u{74d}', '\u{7a5}'), ('\u{7b1}', - '\u{7b1}'), ('\u{7ca}', '\u{7ea}'), ('\u{7f4}', '\u{7f5}'), ('\u{7fa}', '\u{7fa}'), - ('\u{800}', '\u{815}'), ('\u{81a}', '\u{81a}'), ('\u{824}', '\u{824}'), ('\u{828}', - '\u{828}'), ('\u{840}', '\u{858}'), ('\u{8a0}', '\u{8b4}'), ('\u{904}', '\u{939}'), - ('\u{93d}', '\u{93d}'), ('\u{950}', '\u{950}'), ('\u{958}', '\u{961}'), ('\u{971}', - '\u{980}'), ('\u{985}', '\u{98c}'), ('\u{98f}', '\u{990}'), ('\u{993}', '\u{9a8}'), - ('\u{9aa}', '\u{9b0}'), ('\u{9b2}', '\u{9b2}'), ('\u{9b6}', '\u{9b9}'), ('\u{9bd}', - '\u{9bd}'), ('\u{9ce}', '\u{9ce}'), ('\u{9dc}', '\u{9dd}'), ('\u{9df}', '\u{9e1}'), - ('\u{9f0}', '\u{9f1}'), ('\u{a05}', '\u{a0a}'), ('\u{a0f}', '\u{a10}'), ('\u{a13}', - '\u{a28}'), ('\u{a2a}', '\u{a30}'), ('\u{a32}', '\u{a33}'), ('\u{a35}', '\u{a36}'), - ('\u{a38}', '\u{a39}'), ('\u{a59}', '\u{a5c}'), ('\u{a5e}', '\u{a5e}'), ('\u{a72}', - '\u{a74}'), ('\u{a85}', '\u{a8d}'), ('\u{a8f}', '\u{a91}'), ('\u{a93}', '\u{aa8}'), - ('\u{aaa}', '\u{ab0}'), ('\u{ab2}', '\u{ab3}'), ('\u{ab5}', '\u{ab9}'), ('\u{abd}', - '\u{abd}'), ('\u{ad0}', '\u{ad0}'), ('\u{ae0}', '\u{ae1}'), ('\u{af9}', '\u{af9}'), - ('\u{b05}', '\u{b0c}'), ('\u{b0f}', '\u{b10}'), ('\u{b13}', '\u{b28}'), ('\u{b2a}', - '\u{b30}'), ('\u{b32}', '\u{b33}'), ('\u{b35}', '\u{b39}'), ('\u{b3d}', '\u{b3d}'), - ('\u{b5c}', '\u{b5d}'), ('\u{b5f}', '\u{b61}'), ('\u{b71}', '\u{b71}'), ('\u{b83}', - '\u{b83}'), ('\u{b85}', '\u{b8a}'), ('\u{b8e}', '\u{b90}'), ('\u{b92}', '\u{b95}'), - ('\u{b99}', '\u{b9a}'), ('\u{b9c}', '\u{b9c}'), ('\u{b9e}', '\u{b9f}'), ('\u{ba3}', - '\u{ba4}'), ('\u{ba8}', '\u{baa}'), ('\u{bae}', '\u{bb9}'), ('\u{bd0}', '\u{bd0}'), - ('\u{c05}', '\u{c0c}'), ('\u{c0e}', '\u{c10}'), ('\u{c12}', '\u{c28}'), ('\u{c2a}', - '\u{c39}'), ('\u{c3d}', '\u{c3d}'), ('\u{c58}', '\u{c5a}'), ('\u{c60}', '\u{c61}'), - ('\u{c85}', '\u{c8c}'), ('\u{c8e}', '\u{c90}'), ('\u{c92}', '\u{ca8}'), ('\u{caa}', - '\u{cb3}'), ('\u{cb5}', '\u{cb9}'), ('\u{cbd}', '\u{cbd}'), ('\u{cde}', '\u{cde}'), - ('\u{ce0}', '\u{ce1}'), ('\u{cf1}', '\u{cf2}'), ('\u{d05}', '\u{d0c}'), ('\u{d0e}', - '\u{d10}'), ('\u{d12}', '\u{d3a}'), ('\u{d3d}', '\u{d3d}'), ('\u{d4e}', '\u{d4e}'), - ('\u{d5f}', '\u{d61}'), ('\u{d7a}', '\u{d7f}'), ('\u{d85}', '\u{d96}'), ('\u{d9a}', - '\u{db1}'), ('\u{db3}', '\u{dbb}'), ('\u{dbd}', '\u{dbd}'), ('\u{dc0}', '\u{dc6}'), - ('\u{e01}', '\u{e30}'), ('\u{e32}', '\u{e32}'), ('\u{e40}', '\u{e46}'), ('\u{e81}', - '\u{e82}'), ('\u{e84}', '\u{e84}'), ('\u{e87}', '\u{e88}'), ('\u{e8a}', '\u{e8a}'), - ('\u{e8d}', '\u{e8d}'), ('\u{e94}', '\u{e97}'), ('\u{e99}', '\u{e9f}'), ('\u{ea1}', - '\u{ea3}'), ('\u{ea5}', '\u{ea5}'), ('\u{ea7}', '\u{ea7}'), ('\u{eaa}', '\u{eab}'), - ('\u{ead}', '\u{eb0}'), ('\u{eb2}', '\u{eb2}'), ('\u{ebd}', '\u{ebd}'), ('\u{ec0}', - '\u{ec4}'), ('\u{ec6}', '\u{ec6}'), ('\u{edc}', '\u{edf}'), ('\u{f00}', '\u{f00}'), - ('\u{f40}', '\u{f47}'), ('\u{f49}', '\u{f6c}'), ('\u{f88}', '\u{f8c}'), ('\u{1000}', - '\u{102a}'), ('\u{103f}', '\u{103f}'), ('\u{1050}', '\u{1055}'), ('\u{105a}', '\u{105d}'), - ('\u{1061}', '\u{1061}'), ('\u{1065}', '\u{1066}'), ('\u{106e}', '\u{1070}'), ('\u{1075}', - '\u{1081}'), ('\u{108e}', '\u{108e}'), ('\u{10a0}', '\u{10c5}'), ('\u{10c7}', '\u{10c7}'), - ('\u{10cd}', '\u{10cd}'), ('\u{10d0}', '\u{10fa}'), ('\u{10fc}', '\u{1248}'), ('\u{124a}', - '\u{124d}'), ('\u{1250}', '\u{1256}'), ('\u{1258}', '\u{1258}'), ('\u{125a}', '\u{125d}'), - ('\u{1260}', '\u{1288}'), ('\u{128a}', '\u{128d}'), ('\u{1290}', '\u{12b0}'), ('\u{12b2}', - '\u{12b5}'), ('\u{12b8}', '\u{12be}'), ('\u{12c0}', '\u{12c0}'), ('\u{12c2}', '\u{12c5}'), - ('\u{12c8}', '\u{12d6}'), ('\u{12d8}', '\u{1310}'), ('\u{1312}', '\u{1315}'), ('\u{1318}', - '\u{135a}'), ('\u{1380}', '\u{138f}'), ('\u{13a0}', '\u{13f5}'), ('\u{13f8}', '\u{13fd}'), - ('\u{1401}', '\u{166c}'), ('\u{166f}', '\u{167f}'), ('\u{1681}', '\u{169a}'), ('\u{16a0}', - '\u{16ea}'), ('\u{16ee}', '\u{16f8}'), ('\u{1700}', '\u{170c}'), ('\u{170e}', '\u{1711}'), - ('\u{1720}', '\u{1731}'), ('\u{1740}', '\u{1751}'), ('\u{1760}', '\u{176c}'), ('\u{176e}', - '\u{1770}'), ('\u{1780}', '\u{17b3}'), ('\u{17d7}', '\u{17d7}'), ('\u{17dc}', '\u{17dc}'), - ('\u{1820}', '\u{1877}'), ('\u{1880}', '\u{18a8}'), ('\u{18aa}', '\u{18aa}'), ('\u{18b0}', - '\u{18f5}'), ('\u{1900}', '\u{191e}'), ('\u{1950}', '\u{196d}'), ('\u{1970}', '\u{1974}'), - ('\u{1980}', '\u{19ab}'), ('\u{19b0}', '\u{19c9}'), ('\u{1a00}', '\u{1a16}'), ('\u{1a20}', - '\u{1a54}'), ('\u{1aa7}', '\u{1aa7}'), ('\u{1b05}', '\u{1b33}'), ('\u{1b45}', '\u{1b4b}'), - ('\u{1b83}', '\u{1ba0}'), ('\u{1bae}', '\u{1baf}'), ('\u{1bba}', '\u{1be5}'), ('\u{1c00}', - '\u{1c23}'), ('\u{1c4d}', '\u{1c4f}'), ('\u{1c5a}', '\u{1c7d}'), ('\u{1ce9}', '\u{1cec}'), - ('\u{1cee}', '\u{1cf1}'), ('\u{1cf5}', '\u{1cf6}'), ('\u{1d00}', '\u{1dbf}'), ('\u{1e00}', - '\u{1f15}'), ('\u{1f18}', '\u{1f1d}'), ('\u{1f20}', '\u{1f45}'), ('\u{1f48}', '\u{1f4d}'), - ('\u{1f50}', '\u{1f57}'), ('\u{1f59}', '\u{1f59}'), ('\u{1f5b}', '\u{1f5b}'), ('\u{1f5d}', - '\u{1f5d}'), ('\u{1f5f}', '\u{1f7d}'), ('\u{1f80}', '\u{1fb4}'), ('\u{1fb6}', '\u{1fbc}'), - ('\u{1fbe}', '\u{1fbe}'), ('\u{1fc2}', '\u{1fc4}'), ('\u{1fc6}', '\u{1fcc}'), ('\u{1fd0}', - '\u{1fd3}'), ('\u{1fd6}', '\u{1fdb}'), ('\u{1fe0}', '\u{1fec}'), ('\u{1ff2}', '\u{1ff4}'), - ('\u{1ff6}', '\u{1ffc}'), ('\u{2071}', '\u{2071}'), ('\u{207f}', '\u{207f}'), ('\u{2090}', - '\u{209c}'), ('\u{2102}', '\u{2102}'), ('\u{2107}', '\u{2107}'), ('\u{210a}', '\u{2113}'), - ('\u{2115}', '\u{2115}'), ('\u{2118}', '\u{211d}'), ('\u{2124}', '\u{2124}'), ('\u{2126}', - '\u{2126}'), ('\u{2128}', '\u{2128}'), ('\u{212a}', '\u{2139}'), ('\u{213c}', '\u{213f}'), - ('\u{2145}', '\u{2149}'), ('\u{214e}', '\u{214e}'), ('\u{2160}', '\u{2188}'), ('\u{2c00}', - '\u{2c2e}'), ('\u{2c30}', '\u{2c5e}'), ('\u{2c60}', '\u{2ce4}'), ('\u{2ceb}', '\u{2cee}'), - ('\u{2cf2}', '\u{2cf3}'), ('\u{2d00}', '\u{2d25}'), ('\u{2d27}', '\u{2d27}'), ('\u{2d2d}', - '\u{2d2d}'), ('\u{2d30}', '\u{2d67}'), ('\u{2d6f}', '\u{2d6f}'), ('\u{2d80}', '\u{2d96}'), - ('\u{2da0}', '\u{2da6}'), ('\u{2da8}', '\u{2dae}'), ('\u{2db0}', '\u{2db6}'), ('\u{2db8}', - '\u{2dbe}'), ('\u{2dc0}', '\u{2dc6}'), ('\u{2dc8}', '\u{2dce}'), ('\u{2dd0}', '\u{2dd6}'), - ('\u{2dd8}', '\u{2dde}'), ('\u{3005}', '\u{3007}'), ('\u{3021}', '\u{3029}'), ('\u{3031}', - '\u{3035}'), ('\u{3038}', '\u{303c}'), ('\u{3041}', '\u{3096}'), ('\u{309d}', '\u{309f}'), - ('\u{30a1}', '\u{30fa}'), ('\u{30fc}', '\u{30ff}'), ('\u{3105}', '\u{312d}'), ('\u{3131}', - '\u{318e}'), ('\u{31a0}', '\u{31ba}'), ('\u{31f0}', '\u{31ff}'), ('\u{3400}', '\u{4db5}'), - ('\u{4e00}', '\u{9fd5}'), ('\u{a000}', '\u{a48c}'), ('\u{a4d0}', '\u{a4fd}'), ('\u{a500}', - '\u{a60c}'), ('\u{a610}', '\u{a61f}'), ('\u{a62a}', '\u{a62b}'), ('\u{a640}', '\u{a66e}'), - ('\u{a67f}', '\u{a69d}'), ('\u{a6a0}', '\u{a6ef}'), ('\u{a717}', '\u{a71f}'), ('\u{a722}', - '\u{a788}'), ('\u{a78b}', '\u{a7ad}'), ('\u{a7b0}', '\u{a7b7}'), ('\u{a7f7}', '\u{a801}'), - ('\u{a803}', '\u{a805}'), ('\u{a807}', '\u{a80a}'), ('\u{a80c}', '\u{a822}'), ('\u{a840}', - '\u{a873}'), ('\u{a882}', '\u{a8b3}'), ('\u{a8f2}', '\u{a8f7}'), ('\u{a8fb}', '\u{a8fb}'), - ('\u{a8fd}', '\u{a8fd}'), ('\u{a90a}', '\u{a925}'), ('\u{a930}', '\u{a946}'), ('\u{a960}', - '\u{a97c}'), ('\u{a984}', '\u{a9b2}'), ('\u{a9cf}', '\u{a9cf}'), ('\u{a9e0}', '\u{a9e4}'), - ('\u{a9e6}', '\u{a9ef}'), ('\u{a9fa}', '\u{a9fe}'), ('\u{aa00}', '\u{aa28}'), ('\u{aa40}', - '\u{aa42}'), ('\u{aa44}', '\u{aa4b}'), ('\u{aa60}', '\u{aa76}'), ('\u{aa7a}', '\u{aa7a}'), - ('\u{aa7e}', '\u{aaaf}'), ('\u{aab1}', '\u{aab1}'), ('\u{aab5}', '\u{aab6}'), ('\u{aab9}', - '\u{aabd}'), ('\u{aac0}', '\u{aac0}'), ('\u{aac2}', '\u{aac2}'), ('\u{aadb}', '\u{aadd}'), - ('\u{aae0}', '\u{aaea}'), ('\u{aaf2}', '\u{aaf4}'), ('\u{ab01}', '\u{ab06}'), ('\u{ab09}', - '\u{ab0e}'), ('\u{ab11}', '\u{ab16}'), ('\u{ab20}', '\u{ab26}'), ('\u{ab28}', '\u{ab2e}'), - ('\u{ab30}', '\u{ab5a}'), ('\u{ab5c}', '\u{ab65}'), ('\u{ab70}', '\u{abe2}'), ('\u{ac00}', - '\u{d7a3}'), ('\u{d7b0}', '\u{d7c6}'), ('\u{d7cb}', '\u{d7fb}'), ('\u{f900}', '\u{fa6d}'), - ('\u{fa70}', '\u{fad9}'), ('\u{fb00}', '\u{fb06}'), ('\u{fb13}', '\u{fb17}'), ('\u{fb1d}', - '\u{fb1d}'), ('\u{fb1f}', '\u{fb28}'), ('\u{fb2a}', '\u{fb36}'), ('\u{fb38}', '\u{fb3c}'), - ('\u{fb3e}', '\u{fb3e}'), ('\u{fb40}', '\u{fb41}'), ('\u{fb43}', '\u{fb44}'), ('\u{fb46}', - '\u{fbb1}'), ('\u{fbd3}', '\u{fc5d}'), ('\u{fc64}', '\u{fd3d}'), ('\u{fd50}', '\u{fd8f}'), - ('\u{fd92}', '\u{fdc7}'), ('\u{fdf0}', '\u{fdf9}'), ('\u{fe71}', '\u{fe71}'), ('\u{fe73}', - '\u{fe73}'), ('\u{fe77}', '\u{fe77}'), ('\u{fe79}', '\u{fe79}'), ('\u{fe7b}', '\u{fe7b}'), - ('\u{fe7d}', '\u{fe7d}'), ('\u{fe7f}', '\u{fefc}'), ('\u{ff21}', '\u{ff3a}'), ('\u{ff41}', - '\u{ff5a}'), ('\u{ff66}', '\u{ff9d}'), ('\u{ffa0}', '\u{ffbe}'), ('\u{ffc2}', '\u{ffc7}'), - ('\u{ffca}', '\u{ffcf}'), ('\u{ffd2}', '\u{ffd7}'), ('\u{ffda}', '\u{ffdc}'), ('\u{10000}', - '\u{1000b}'), ('\u{1000d}', '\u{10026}'), ('\u{10028}', '\u{1003a}'), ('\u{1003c}', - '\u{1003d}'), ('\u{1003f}', '\u{1004d}'), ('\u{10050}', '\u{1005d}'), ('\u{10080}', - '\u{100fa}'), ('\u{10140}', '\u{10174}'), ('\u{10280}', '\u{1029c}'), ('\u{102a0}', - '\u{102d0}'), ('\u{10300}', '\u{1031f}'), ('\u{10330}', '\u{1034a}'), ('\u{10350}', - '\u{10375}'), ('\u{10380}', '\u{1039d}'), ('\u{103a0}', '\u{103c3}'), ('\u{103c8}', - '\u{103cf}'), ('\u{103d1}', '\u{103d5}'), ('\u{10400}', '\u{1049d}'), ('\u{10500}', - '\u{10527}'), ('\u{10530}', '\u{10563}'), ('\u{10600}', '\u{10736}'), ('\u{10740}', - '\u{10755}'), ('\u{10760}', '\u{10767}'), ('\u{10800}', '\u{10805}'), ('\u{10808}', - '\u{10808}'), ('\u{1080a}', '\u{10835}'), ('\u{10837}', '\u{10838}'), ('\u{1083c}', - '\u{1083c}'), ('\u{1083f}', '\u{10855}'), ('\u{10860}', '\u{10876}'), ('\u{10880}', - '\u{1089e}'), ('\u{108e0}', '\u{108f2}'), ('\u{108f4}', '\u{108f5}'), ('\u{10900}', - '\u{10915}'), ('\u{10920}', '\u{10939}'), ('\u{10980}', '\u{109b7}'), ('\u{109be}', - '\u{109bf}'), ('\u{10a00}', '\u{10a00}'), ('\u{10a10}', '\u{10a13}'), ('\u{10a15}', - '\u{10a17}'), ('\u{10a19}', '\u{10a33}'), ('\u{10a60}', '\u{10a7c}'), ('\u{10a80}', - '\u{10a9c}'), ('\u{10ac0}', '\u{10ac7}'), ('\u{10ac9}', '\u{10ae4}'), ('\u{10b00}', - '\u{10b35}'), ('\u{10b40}', '\u{10b55}'), ('\u{10b60}', '\u{10b72}'), ('\u{10b80}', - '\u{10b91}'), ('\u{10c00}', '\u{10c48}'), ('\u{10c80}', '\u{10cb2}'), ('\u{10cc0}', - '\u{10cf2}'), ('\u{11003}', '\u{11037}'), ('\u{11083}', '\u{110af}'), ('\u{110d0}', - '\u{110e8}'), ('\u{11103}', '\u{11126}'), ('\u{11150}', '\u{11172}'), ('\u{11176}', - '\u{11176}'), ('\u{11183}', '\u{111b2}'), ('\u{111c1}', '\u{111c4}'), ('\u{111da}', - '\u{111da}'), ('\u{111dc}', '\u{111dc}'), ('\u{11200}', '\u{11211}'), ('\u{11213}', - '\u{1122b}'), ('\u{11280}', '\u{11286}'), ('\u{11288}', '\u{11288}'), ('\u{1128a}', - '\u{1128d}'), ('\u{1128f}', '\u{1129d}'), ('\u{1129f}', '\u{112a8}'), ('\u{112b0}', - '\u{112de}'), ('\u{11305}', '\u{1130c}'), ('\u{1130f}', '\u{11310}'), ('\u{11313}', - '\u{11328}'), ('\u{1132a}', '\u{11330}'), ('\u{11332}', '\u{11333}'), ('\u{11335}', - '\u{11339}'), ('\u{1133d}', '\u{1133d}'), ('\u{11350}', '\u{11350}'), ('\u{1135d}', - '\u{11361}'), ('\u{11480}', '\u{114af}'), ('\u{114c4}', '\u{114c5}'), ('\u{114c7}', - '\u{114c7}'), ('\u{11580}', '\u{115ae}'), ('\u{115d8}', '\u{115db}'), ('\u{11600}', - '\u{1162f}'), ('\u{11644}', '\u{11644}'), ('\u{11680}', '\u{116aa}'), ('\u{11700}', - '\u{11719}'), ('\u{118a0}', '\u{118df}'), ('\u{118ff}', '\u{118ff}'), ('\u{11ac0}', - '\u{11af8}'), ('\u{12000}', '\u{12399}'), ('\u{12400}', '\u{1246e}'), ('\u{12480}', - '\u{12543}'), ('\u{13000}', '\u{1342e}'), ('\u{14400}', '\u{14646}'), ('\u{16800}', - '\u{16a38}'), ('\u{16a40}', '\u{16a5e}'), ('\u{16ad0}', '\u{16aed}'), ('\u{16b00}', - '\u{16b2f}'), ('\u{16b40}', '\u{16b43}'), ('\u{16b63}', '\u{16b77}'), ('\u{16b7d}', - '\u{16b8f}'), ('\u{16f00}', '\u{16f44}'), ('\u{16f50}', '\u{16f50}'), ('\u{16f93}', - '\u{16f9f}'), ('\u{1b000}', '\u{1b001}'), ('\u{1bc00}', '\u{1bc6a}'), ('\u{1bc70}', - '\u{1bc7c}'), ('\u{1bc80}', '\u{1bc88}'), ('\u{1bc90}', '\u{1bc99}'), ('\u{1d400}', - '\u{1d454}'), ('\u{1d456}', '\u{1d49c}'), ('\u{1d49e}', '\u{1d49f}'), ('\u{1d4a2}', - '\u{1d4a2}'), ('\u{1d4a5}', '\u{1d4a6}'), ('\u{1d4a9}', '\u{1d4ac}'), ('\u{1d4ae}', - '\u{1d4b9}'), ('\u{1d4bb}', '\u{1d4bb}'), ('\u{1d4bd}', '\u{1d4c3}'), ('\u{1d4c5}', - '\u{1d505}'), ('\u{1d507}', '\u{1d50a}'), ('\u{1d50d}', '\u{1d514}'), ('\u{1d516}', - '\u{1d51c}'), ('\u{1d51e}', '\u{1d539}'), ('\u{1d53b}', '\u{1d53e}'), ('\u{1d540}', - '\u{1d544}'), ('\u{1d546}', '\u{1d546}'), ('\u{1d54a}', '\u{1d550}'), ('\u{1d552}', - '\u{1d6a5}'), ('\u{1d6a8}', '\u{1d6c0}'), ('\u{1d6c2}', '\u{1d6da}'), ('\u{1d6dc}', - '\u{1d6fa}'), ('\u{1d6fc}', '\u{1d714}'), ('\u{1d716}', '\u{1d734}'), ('\u{1d736}', - '\u{1d74e}'), ('\u{1d750}', '\u{1d76e}'), ('\u{1d770}', '\u{1d788}'), ('\u{1d78a}', - '\u{1d7a8}'), ('\u{1d7aa}', '\u{1d7c2}'), ('\u{1d7c4}', '\u{1d7cb}'), ('\u{1e800}', - '\u{1e8c4}'), ('\u{1ee00}', '\u{1ee03}'), ('\u{1ee05}', '\u{1ee1f}'), ('\u{1ee21}', - '\u{1ee22}'), ('\u{1ee24}', '\u{1ee24}'), ('\u{1ee27}', '\u{1ee27}'), ('\u{1ee29}', - '\u{1ee32}'), ('\u{1ee34}', '\u{1ee37}'), ('\u{1ee39}', '\u{1ee39}'), ('\u{1ee3b}', - '\u{1ee3b}'), ('\u{1ee42}', '\u{1ee42}'), ('\u{1ee47}', '\u{1ee47}'), ('\u{1ee49}', - '\u{1ee49}'), ('\u{1ee4b}', '\u{1ee4b}'), ('\u{1ee4d}', '\u{1ee4f}'), ('\u{1ee51}', - '\u{1ee52}'), ('\u{1ee54}', '\u{1ee54}'), ('\u{1ee57}', '\u{1ee57}'), ('\u{1ee59}', - '\u{1ee59}'), ('\u{1ee5b}', '\u{1ee5b}'), ('\u{1ee5d}', '\u{1ee5d}'), ('\u{1ee5f}', - '\u{1ee5f}'), ('\u{1ee61}', '\u{1ee62}'), ('\u{1ee64}', '\u{1ee64}'), ('\u{1ee67}', - '\u{1ee6a}'), ('\u{1ee6c}', '\u{1ee72}'), ('\u{1ee74}', '\u{1ee77}'), ('\u{1ee79}', - '\u{1ee7c}'), ('\u{1ee7e}', '\u{1ee7e}'), ('\u{1ee80}', '\u{1ee89}'), ('\u{1ee8b}', - '\u{1ee9b}'), ('\u{1eea1}', '\u{1eea3}'), ('\u{1eea5}', '\u{1eea9}'), ('\u{1eeab}', - '\u{1eebb}'), ('\u{20000}', '\u{2a6d6}'), ('\u{2a700}', '\u{2b734}'), ('\u{2b740}', - '\u{2b81d}'), ('\u{2b820}', '\u{2cea1}'), ('\u{2f800}', '\u{2fa1d}') - ]; + pub const XID_Start_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0000000000000000, 0x07fffffe07fffffe, 0x0420040000000000, 0xff7fffffff7fffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xffffffffffffffff, 0x0000501f0003ffc3, + 0x0000000000000000, 0xb8df000000000000, 0xfffffffbffffd740, 0xffbfffffffffffff, + 0xffffffffffffffff, 0xffffffffffffffff, 0xfffffffffffffc03, 0xffffffffffffffff, + 0xfffeffffffffffff, 0xfffffffe027fffff, 0x00000000000000ff, 0x000707ffffff0000, + 0xffffffff00000000, 0xfffec000000007ff, 0xffffffffffffffff, 0x9c00c060002fffff, + 0x0000fffffffd0000, 0xffffffffffffe000, 0x0002003fffffffff, 0x043007fffffffc00 + ], + r2: [ + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, + 24, 23, 25, 26, 27, 28, 29, 3, 30, 31, 32, 33, 34, 34, 34, 34, 34, 35, 36, 37, 38, 39, + 40, 41, 42, 34, 34, 34, 34, 34, 34, 34, 34, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, + 54, 55, 56, 57, 58, 59, 60, 3, 61, 62, 63, 64, 65, 66, 3, 67, 34, 34, 34, 3, 34, 34, 34, + 34, 68, 69, 70, 71, 3, 72, 73, 3, 74, 75, 76, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 77, 78, + 34, 79, 80, 81, 82, 83, 3, 3, 3, 3, 3, 3, 3, 3, 84, 42, 85, 86, 87, 34, 88, 89, 3, 3, 3, + 3, 3, 3, 3, 3, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 53, 3, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 90, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 91, 92, 34, 34, 34, 34, 93, 94, + 95, 96, 97, 34, 98, 99, 100, 48, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, + 112, 34, 113, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, 34, + 34, 34, 114, 115, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, + 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 34, 34, 34, 34, 34, + 116, 34, 117, 118, 119, 120, 121, 34, 122, 34, 34, 123, 124, 125, 126, 3, 127, 34, 128, + 129, 130, 131, 132 + ], + r3: &[ + 0x00000110043fffff, 0x0000000001ffffff, 0x001fffff00000000, 0x0000000000000000, + 0x23fffffffffffff0, 0xfffe0003ff010000, 0x23c5fdfffff99fe1, 0x00030003b0004000, + 0x036dfdfffff987e0, 0x001c00005e000000, 0x23edfdfffffbbfe0, 0x0200000300010000, + 0x23edfdfffff99fe0, 0x00020003b0000000, 0x03ffc718d63dc7e8, 0x0000000000010000, + 0x23fffdfffffddfe0, 0x0000000307000000, 0x23effdfffffddfe0, 0x0006000340000000, + 0x27fffffffffddfe0, 0xfc00000380004000, 0x2ffbfffffc7fffe0, 0x000000000000007f, + 0x0005fffffffffffe, 0x2005ecaefef02596, 0x00000000f000005f, 0x0000000000000001, + 0x00001ffffffffeff, 0x0000000000001f00, 0x800007ffffffffff, 0xffe1c0623c3f0000, + 0xffffffff00004003, 0xf7ffffffffff20bf, 0xffffffffffffffff, 0xffffffff3d7f3dff, + 0x7f3dffffffff3dff, 0xffffffffff7fff3d, 0xffffffffff3dffff, 0x0000000007ffffff, + 0xffffffff0000ffff, 0x3f3fffffffffffff, 0xfffffffffffffffe, 0xffff9fffffffffff, + 0xffffffff07fffffe, 0x01ffc7ffffffffff, 0x0003ffff0003dfff, 0x0001dfff0003ffff, + 0x000fffffffffffff, 0x0000000010800000, 0xffffffff00000000, 0x00ffffffffffffff, + 0xffff05ffffffffff, 0x003fffffffffffff, 0x000000007fffffff, 0x001f3fffffff0000, + 0xffff0fffffffffff, 0x00000000000003ff, 0xffffffff007fffff, 0x00000000001fffff, + 0x0000008000000000, 0x000fffffffffffe0, 0x0000000000000fe0, 0xfc00c001fffffff8, + 0x0000003fffffffff, 0x0000000fffffffff, 0x3ffffffffc00e000, 0x0063de0000000000, + 0xffffffff3f3fffff, 0x3fffffffaaff3f3f, 0x5fdfffffffffffff, 0x1fdc1fff0fcf1fdc, + 0x8002000000000000, 0x000000001fff0000, 0xf3fffd503f2ffc84, 0xffffffff000043e0, + 0x00000000000001ff, 0xffff7fffffffffff, 0xffffffff7fffffff, 0x000c781fffffffff, + 0xffff20bfffffffff, 0x000080ffffffffff, 0x7f7f7f7f007fffff, 0x000000007f7f7f7f, + 0x1f3e03fe000000e0, 0xfffffffee07fffff, 0xf7ffffffffffffff, 0xfffe3fffffffffe0, + 0x07ffffff00007fff, 0xffff000000000000, 0x00000000003fffff, 0x0000000000001fff, + 0x3fffffffffff0000, 0x00000c00ffff1fff, 0x80007fffffffffff, 0xffffffff3fffffff, + 0x0000ffffffffffff, 0xfffffffcff800000, 0x00ff3ffffffff9ff, 0xff80000000000000, + 0x00000007fffff7bb, 0x000ffffffffffffc, 0x28fc000000000000, 0xffff003ffffffc00, + 0x1fffffff0000007f, 0x0007fffffffffff0, 0x7c00ffdf00008000, 0x000001ffffffffff, + 0xc47fffff00000ff7, 0x3e62ffffffffffff, 0x001c07ff38000005, 0xffff7f7f007e7e7e, + 0xffff003ff7ffffff, 0x00000007ffffffff, 0xffff000fffffffff, 0x0ffffffffffff87f, + 0xffff3fffffffffff, 0x0000000003ffffff, 0x5f7ffdffa0f8007f, 0xffffffffffffffdb, + 0x0003ffffffffffff, 0xfffffffffff80000, 0xfffffff03fffffff, 0x3fffffffffffffff, + 0xffffffffffff0000, 0xfffffffffffcffff, 0x03ff0000000000ff, 0xaa8a000000000000, + 0x1fffffffffffffff, 0x07fffffe00000000, 0xffffffc007fffffe, 0x7fffffff3fffffff, + 0x000000001cfcfcfc + ], + r4: [ + 0, 1, 2, 3, 4, 5, 6, 5, 5, 5, 5, 7, 5, 8, 9, 5, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, + 11, 12, 13, 5, 5, 14, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, + 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5 + ], + r5: &[ + 0, 1, 2, 3, 4, 5, 4, 4, 4, 4, 6, 7, 8, 9, 10, 11, 2, 2, 12, 4, 13, 14, 4, 4, 2, 2, 2, 2, + 15, 16, 4, 4, 17, 18, 19, 20, 21, 4, 22, 4, 23, 24, 25, 26, 27, 28, 29, 4, 2, 30, 31, + 31, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 32, 4, 33, 34, 35, 36, 37, 38, 39, 4, 40, 19, + 41, 42, 4, 4, 4, 4, 43, 44, 4, 4, 45, 46, 43, 47, 48, 4, 49, 4, 4, 4, 4, 4, 50, 51, 4, + 4, 4, 4, 4, 4, 4, 52, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 49, 4, 2, 45, 2, 2, 2, 53, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 45, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 54, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, + 2, 2, 2, 2, 2, 52, 19, 4, 55, 43, 56, 57, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 58, + 59, 4, 60, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 61, 62, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 63, 64, 65, + 66, 67, 2, 2, 2, 2, 68, 69, 70, 71, 72, 73, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 74, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 75, 76, 77, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 78, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 5, 2, 2, 2, 10, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, + 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 79, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, + 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, + 2, 12, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4 + ], + r6: &[ + 0xb7ffff7fffffefff, 0x000000003fff3fff, 0xffffffffffffffff, 0x07ffffffffffffff, + 0x0000000000000000, 0x001fffffffffffff, 0xffffffff1fffffff, 0x000000000001ffff, + 0xffff0000ffffffff, 0x003fffffffff07ff, 0xffffffff3fffffff, 0x00000000003eff0f, + 0x000000003fffffff, 0xffff00ffffffffff, 0x0000000fffffffff, 0x007fffffffffffff, + 0x000000ff003fffff, 0x91bffffffffffd3f, 0x007fffff003fffff, 0x000000007fffffff, + 0x0037ffff00000000, 0x03ffffff003fffff, 0xc0ffffffffffffff, 0x000ffffffeef0001, + 0x1fffffff00000000, 0x000000001fffffff, 0x0000001ffffffeff, 0x003fffffffffffff, + 0x0007ffff003fffff, 0x000000000003ffff, 0x00000000000001ff, 0x0007ffffffffffff, + 0x00fffffffffffff8, 0x0000fffffffffff8, 0x000001ffffff0000, 0x0000007ffffffff8, + 0x0047ffffffff0000, 0x0007fffffffffff8, 0x000000001400001e, 0x00000ffffffbffff, + 0xffff01ffbfffbd7f, 0x23edfdfffff99fe0, 0x00000003e0010000, 0x0000ffffffffffff, + 0x00000000000000b0, 0x00007fffffffffff, 0x000000000f000000, 0x0000000000000010, + 0x000007ffffffffff, 0x0000000003ffffff, 0xffffffff00000000, 0x80000000ffffffff, + 0x01ffffffffffffff, 0x000000000000000f, 0x000000000000007f, 0x00003fffffff0000, + 0xe0fffff80000000f, 0x000000000000ffff, 0x000000000001001f, 0x00000000fff80000, + 0x0000000000000003, 0x1fff07ffffffffff, 0x0000000003ff01ff, 0xffffffffffdfffff, + 0xebffde64dfffffff, 0xffffffffffffffef, 0x7bffffffdfdfe7bf, 0xfffffffffffdfc5f, + 0xffffff3fffffffff, 0xf7fffffff7fffffd, 0xffdfffffffdfffff, 0xffff7fffffff7fff, + 0xfffffdfffffffdff, 0x0000000000000ff7, 0x000000000000001f, 0x0af7fe96ffffffef, + 0x5ef7f796aa96ea84, 0x0ffffbee0ffffbff, 0x00000000007fffff, 0x00000003ffffffff + ], + }; pub fn XID_Start(c: char) -> bool { - super::bsearch_range_table(c, XID_Start_table) + super::trie_lookup_range_table(c, XID_Start_table) } } pub mod property { - pub const Pattern_White_Space_table: &'static [(char, char)] = &[ - ('\u{9}', '\u{d}'), ('\u{20}', '\u{20}'), ('\u{85}', '\u{85}'), ('\u{200e}', '\u{200f}'), - ('\u{2028}', '\u{2029}') - ]; + pub const Pattern_White_Space_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0000000100003e00, 0x0000000000000000, 0x0000000000000020, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + ], + r2: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 + ], + r3: &[ + 0x0000000000000000, 0x000003000000c000 + ], + r4: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + r5: &[ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000 + ], + }; pub fn Pattern_White_Space(c: char) -> bool { - super::bsearch_range_table(c, Pattern_White_Space_table) + super::trie_lookup_range_table(c, Pattern_White_Space_table) } - pub const White_Space_table: &'static [(char, char)] = &[ - ('\u{9}', '\u{d}'), ('\u{20}', '\u{20}'), ('\u{85}', '\u{85}'), ('\u{a0}', '\u{a0}'), - ('\u{1680}', '\u{1680}'), ('\u{2000}', '\u{200a}'), ('\u{2028}', '\u{2029}'), ('\u{202f}', - '\u{202f}'), ('\u{205f}', '\u{205f}'), ('\u{3000}', '\u{3000}') - ]; + pub const White_Space_table: &'static super::BoolTrie = &super::BoolTrie { + r1: [ + 0x0000000100003e00, 0x0000000000000000, 0x0000000100000020, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, + 0x0000000000000000, 0x0000000000000000, 0x0000000000000000, 0x0000000000000000 + ], + r2: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 + ], + r3: &[ + 0x0000000000000000, 0x0000000000000001, 0x00008300000007ff, 0x0000000080000000 + ], + r4: [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + ], + r5: &[ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0 + ], + r6: &[ + 0x0000000000000000 + ], + }; pub fn White_Space(c: char) -> bool { - super::bsearch_range_table(c, White_Space_table) + super::trie_lookup_range_table(c, White_Space_table) } } diff --git a/src/librustdoc/clean/inline.rs b/src/librustdoc/clean/inline.rs index 8509754982..f5d54123f3 100644 --- a/src/librustdoc/clean/inline.rs +++ b/src/librustdoc/clean/inline.rs @@ -14,23 +14,22 @@ use std::collections::HashSet; use std::iter::once; use syntax::ast; -use syntax::attr::AttrMetaMethods; use rustc::hir; -use rustc::middle::cstore::{self, CrateStore}; +use rustc::middle::cstore; use rustc::hir::def::Def; use rustc::hir::def_id::DefId; +use rustc::hir::print as pprust; use rustc::ty::{self, TyCtxt}; use rustc::ty::subst; -use rustc::middle::stability; use rustc_const_eval::lookup_const_by_id; -use core::DocContext; +use core::{DocContext, DocAccessLevels}; use doctree; -use clean::{self, Attributes, GetDefId}; +use clean::{self, GetDefId}; -use super::{Clean, ToSource}; +use super::Clean; /// Attempt to inline the definition of a local node id into this AST. /// @@ -69,13 +68,14 @@ pub fn try_inline(cx: &DocContext, id: ast::NodeId, into: Option) }) } -fn try_inline_def(cx: &DocContext, tcx: &TyCtxt, - def: Def) -> Option> { +fn try_inline_def<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + def: Def) -> Option> { let mut ret = Vec::new(); let did = def.def_id(); let inner = match def { Def::Trait(did) => { record_extern_fqn(cx, did, clean::TypeTrait); + ret.extend(build_impls(cx, tcx, did)); clean::TraitItem(build_external_trait(cx, tcx, did)) } Def::Fn(did) => { @@ -116,22 +116,22 @@ fn try_inline_def(cx: &DocContext, tcx: &TyCtxt, } _ => return None, }; - cx.inlined.borrow_mut().as_mut().unwrap().insert(did); + cx.renderinfo.borrow_mut().inlined.insert(did); ret.push(clean::Item { source: clean::Span::empty(), name: Some(tcx.item_name(did).to_string()), attrs: load_attrs(cx, tcx, did), inner: inner, - visibility: Some(hir::Public), - stability: stability::lookup_stability(tcx, did).clean(cx), - deprecation: stability::lookup_deprecation(tcx, did).clean(cx), + visibility: Some(clean::Public), + stability: tcx.lookup_stability(did).clean(cx), + deprecation: tcx.lookup_deprecation(did).clean(cx), def_id: did, }); Some(ret) } -pub fn load_attrs(cx: &DocContext, tcx: &TyCtxt, - did: DefId) -> Vec { +pub fn load_attrs<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> Vec { tcx.get_attrs(did).iter().map(|a| a.clean(cx)).collect() } @@ -146,12 +146,12 @@ pub fn record_extern_fqn(cx: &DocContext, did: DefId, kind: clean::TypeKind) { elem.data.to_string() }); let fqn = once(crate_name).chain(relative).collect(); - cx.external_paths.borrow_mut().as_mut().unwrap().insert(did, (fqn, kind)); + cx.renderinfo.borrow_mut().external_paths.insert(did, (fqn, kind)); } } -pub fn build_external_trait(cx: &DocContext, tcx: &TyCtxt, - did: DefId) -> clean::Trait { +pub fn build_external_trait<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::Trait { let def = tcx.lookup_trait_def(did); let trait_items = tcx.trait_items(did).clean(cx); let predicates = tcx.lookup_predicates(did); @@ -166,7 +166,8 @@ pub fn build_external_trait(cx: &DocContext, tcx: &TyCtxt, } } -fn build_external_function(cx: &DocContext, tcx: &TyCtxt, did: DefId) -> clean::Function { +fn build_external_function<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::Function { let t = tcx.lookup_item_type(did); let (decl, style, abi) = match t.ty.sty { ty::TyFnDef(_, _, ref f) => ((did, &f.sig).clean(cx), f.unsafety, f.abi), @@ -189,7 +190,8 @@ fn build_external_function(cx: &DocContext, tcx: &TyCtxt, did: DefId) -> clean:: } } -fn build_struct(cx: &DocContext, tcx: &TyCtxt, did: DefId) -> clean::Struct { +fn build_struct<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::Struct { let t = tcx.lookup_item_type(did); let predicates = tcx.lookup_predicates(did); let variant = tcx.lookup_adt_def(did).struct_variant(); @@ -207,7 +209,8 @@ fn build_struct(cx: &DocContext, tcx: &TyCtxt, did: DefId) -> clean::Struct { } } -fn build_type(cx: &DocContext, tcx: &TyCtxt, did: DefId) -> clean::ItemEnum { +fn build_type<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::ItemEnum { let t = tcx.lookup_item_type(did); let predicates = tcx.lookup_predicates(did); match t.ty.sty { @@ -227,9 +230,9 @@ fn build_type(cx: &DocContext, tcx: &TyCtxt, did: DefId) -> clean::ItemEnum { }, false) } -pub fn build_impls(cx: &DocContext, - tcx: &TyCtxt, - did: DefId) -> Vec { +pub fn build_impls<'a, 'tcx>(cx: &DocContext, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> Vec { tcx.populate_inherent_implementations_for_type_if_necessary(did); let mut impls = Vec::new(); @@ -247,24 +250,17 @@ pub fn build_impls(cx: &DocContext, // Primarily, the impls will be used to populate the documentation for this // type being inlined, but impls can also be used when generating // documentation for primitives (no way to find those specifically). - if !cx.all_crate_impls.borrow_mut().contains_key(&did.krate) { - let mut impls = Vec::new(); + if cx.populated_crate_impls.borrow_mut().insert(did.krate) { for item in tcx.sess.cstore.crate_top_level_items(did.krate) { populate_impls(cx, tcx, item.def, &mut impls); } - cx.all_crate_impls.borrow_mut().insert(did.krate, impls); - fn populate_impls(cx: &DocContext, tcx: &TyCtxt, - def: cstore::DefLike, - impls: &mut Vec) { + fn populate_impls<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + def: cstore::DefLike, + impls: &mut Vec) { match def { cstore::DlImpl(did) => build_impl(cx, tcx, did, impls), cstore::DlDef(Def::Mod(did)) => { - // Don't recurse if this is a #[doc(hidden)] module - if load_attrs(cx, tcx, did).list("doc").has_word("hidden") { - return; - } - for item in tcx.sess.cstore.item_children(did) { populate_impls(cx, tcx, item.def, impls) } @@ -274,37 +270,24 @@ pub fn build_impls(cx: &DocContext, } } - let mut candidates = cx.all_crate_impls.borrow_mut(); - let candidates = candidates.get_mut(&did.krate).unwrap(); - for i in (0..candidates.len()).rev() { - let remove = match candidates[i].inner { - clean::ImplItem(ref i) => { - i.for_.def_id() == Some(did) || i.for_.primitive_type().is_some() - } - _ => continue, - }; - if remove { - impls.push(candidates.swap_remove(i)); - } - } - - return impls; + impls } -pub fn build_impl(cx: &DocContext, - tcx: &TyCtxt, - did: DefId, - ret: &mut Vec) { - if !cx.inlined.borrow_mut().as_mut().unwrap().insert(did) { +pub fn build_impl<'a, 'tcx>(cx: &DocContext, + tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId, + ret: &mut Vec) { + if !cx.renderinfo.borrow_mut().inlined.insert(did) { return } let attrs = load_attrs(cx, tcx, did); let associated_trait = tcx.impl_trait_ref(did); - if let Some(ref t) = associated_trait { - // If this is an impl for a #[doc(hidden)] trait, be sure to not inline - let trait_attrs = load_attrs(cx, tcx, t.def_id); - if trait_attrs.list("doc").has_word("hidden") { + + // Only inline impl if the implemented trait is + // reachable in rustdoc generated documentation + if let Some(traitref) = associated_trait { + if !cx.access_levels.borrow().is_doc_reachable(traitref.def_id) { return } } @@ -323,13 +306,24 @@ pub fn build_impl(cx: &DocContext, source: clean::Span::empty(), name: None, attrs: attrs, - visibility: Some(hir::Inherited), - stability: stability::lookup_stability(tcx, did).clean(cx), - deprecation: stability::lookup_deprecation(tcx, did).clean(cx), + visibility: Some(clean::Inherited), + stability: tcx.lookup_stability(did).clean(cx), + deprecation: tcx.lookup_deprecation(did).clean(cx), def_id: did, }); } + let ty = tcx.lookup_item_type(did); + let for_ = ty.ty.clean(cx); + + // Only inline impl if the implementing type is + // reachable in rustdoc generated documentation + if let Some(did) = for_.def_id() { + if !cx.access_levels.borrow().is_doc_reachable(did) { + return + } + } + let predicates = tcx.lookup_predicates(did); let trait_items = tcx.sess.cstore.impl_items(did) .iter() @@ -341,8 +335,8 @@ pub fn build_impl(cx: &DocContext, let did = assoc_const.def_id; let type_scheme = tcx.lookup_item_type(did); let default = if assoc_const.has_value { - Some(lookup_const_by_id(tcx, did, None) - .unwrap().0.span.to_src(cx)) + Some(pprust::expr_to_string( + lookup_const_by_id(tcx, did, None).unwrap().0)) } else { None }; @@ -355,8 +349,8 @@ pub fn build_impl(cx: &DocContext, source: clean::Span::empty(), attrs: vec![], visibility: None, - stability: stability::lookup_stability(tcx, did).clean(cx), - deprecation: stability::lookup_deprecation(tcx, did).clean(cx), + stability: tcx.lookup_stability(did).clean(cx), + deprecation: tcx.lookup_deprecation(did).clean(cx), def_id: did }) } @@ -367,7 +361,7 @@ pub fn build_impl(cx: &DocContext, let mut item = method.clean(cx); item.inner = match item.inner.clone() { clean::TyMethodItem(clean::TyMethod { - unsafety, decl, self_, generics, abi + unsafety, decl, generics, abi }) => { let constness = if tcx.sess.cstore.is_const_fn(did) { hir::Constness::Const @@ -379,7 +373,6 @@ pub fn build_impl(cx: &DocContext, unsafety: unsafety, constness: constness, decl: decl, - self_: self_, generics: generics, abi: abi }) @@ -404,15 +397,14 @@ pub fn build_impl(cx: &DocContext, source: clean::Span::empty(), attrs: vec![], visibility: None, - stability: stability::lookup_stability(tcx, did).clean(cx), - deprecation: stability::lookup_deprecation(tcx, did).clean(cx), + stability: tcx.lookup_stability(did).clean(cx), + deprecation: tcx.lookup_deprecation(did).clean(cx), def_id: did }) } } }).collect::>(); let polarity = tcx.trait_impl_polarity(did); - let ty = tcx.lookup_item_type(did); let trait_ = associated_trait.clean(cx).map(|bound| { match bound { clean::TraitBound(polyt, _) => polyt.trait_, @@ -436,7 +428,7 @@ pub fn build_impl(cx: &DocContext, derived: clean::detect_derived(&attrs), provided_trait_methods: provided, trait_: trait_, - for_: ty.ty.clean(cx), + for_: for_, generics: (&ty.generics, &predicates, subst::TypeSpace).clean(cx), items: trait_items, polarity: polarity.map(|p| { p.clean(cx) }), @@ -444,15 +436,15 @@ pub fn build_impl(cx: &DocContext, source: clean::Span::empty(), name: None, attrs: attrs, - visibility: Some(hir::Inherited), - stability: stability::lookup_stability(tcx, did).clean(cx), - deprecation: stability::lookup_deprecation(tcx, did).clean(cx), + visibility: Some(clean::Inherited), + stability: tcx.lookup_stability(did).clean(cx), + deprecation: tcx.lookup_deprecation(did).clean(cx), def_id: did, }); } -fn build_module(cx: &DocContext, tcx: &TyCtxt, - did: DefId) -> clean::Module { +fn build_module<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::Module { let mut items = Vec::new(); fill_in(cx, tcx, did, &mut items); return clean::Module { @@ -460,8 +452,8 @@ fn build_module(cx: &DocContext, tcx: &TyCtxt, is_crate: false, }; - fn fill_in(cx: &DocContext, tcx: &TyCtxt, did: DefId, - items: &mut Vec) { + fn fill_in<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId, items: &mut Vec) { // If we're reexporting a reexport it may actually reexport something in // two namespaces, so the target may be listed twice. Make sure we only // visit each node at most once. @@ -486,10 +478,8 @@ fn build_module(cx: &DocContext, tcx: &TyCtxt, } } -fn build_const(cx: &DocContext, tcx: &TyCtxt, - did: DefId) -> clean::Constant { - use rustc::hir::print as pprust; - +fn build_const<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId) -> clean::Constant { let (expr, ty) = lookup_const_by_id(tcx, did, None).unwrap_or_else(|| { panic!("expected lookup_const_by_id to succeed for {:?}", did); }); @@ -503,9 +493,9 @@ fn build_const(cx: &DocContext, tcx: &TyCtxt, } } -fn build_static(cx: &DocContext, tcx: &TyCtxt, - did: DefId, - mutable: bool) -> clean::Static { +fn build_static<'a, 'tcx>(cx: &DocContext, tcx: TyCtxt<'a, 'tcx, 'tcx>, + did: DefId, + mutable: bool) -> clean::Static { clean::Static { type_: tcx.lookup_item_type(did).ty.clean(cx), mutability: if mutable {clean::Mutable} else {clean::Immutable}, diff --git a/src/librustdoc/clean/mod.rs b/src/librustdoc/clean/mod.rs index d8d1472560..ca138168b2 100644 --- a/src/librustdoc/clean/mod.rs +++ b/src/librustdoc/clean/mod.rs @@ -22,6 +22,7 @@ pub use self::Attribute::*; pub use self::TyParamBound::*; pub use self::SelfTy::*; pub use self::FunctionRetTy::*; +pub use self::Visibility::*; use syntax; use syntax::abi::Abi; @@ -30,13 +31,15 @@ use syntax::attr; use syntax::attr::{AttributeMethods, AttrMetaMethods}; use syntax::codemap; use syntax::codemap::{DUMMY_SP, Pos, Spanned}; -use syntax::parse::token::{self, InternedString, special_idents}; +use syntax::parse::token::{self, InternedString, keywords}; use syntax::ptr::P; use rustc_trans::back::link; -use rustc::middle::cstore::{self, CrateStore}; +use rustc::middle::cstore; +use rustc::middle::privacy::AccessLevels; use rustc::hir::def::Def; -use rustc::hir::def_id::{DefId, DefIndex}; +use rustc::hir::def_id::{DefId, DefIndex, CRATE_DEF_INDEX}; +use rustc::hir::print as pprust; use rustc::ty::subst::{self, ParamSpace, VecPerParamSpace}; use rustc::ty; use rustc::middle::stability; @@ -46,24 +49,26 @@ use rustc::hir; use std::collections::{HashMap, HashSet}; use std::path::PathBuf; use std::rc::Rc; +use std::sync::Arc; use std::u32; use std::env::current_dir; +use std::mem; use core::DocContext; use doctree; use visit_ast; use html::item_type::ItemType; -mod inline; +pub mod inline; mod simplify; // extract the stability index for a node from tcx, if possible fn get_stability(cx: &DocContext, def_id: DefId) -> Option { - cx.tcx_opt().and_then(|tcx| stability::lookup_stability(tcx, def_id)).clean(cx) + cx.tcx_opt().and_then(|tcx| tcx.lookup_stability(def_id)).clean(cx) } fn get_deprecation(cx: &DocContext, def_id: DefId) -> Option { - cx.tcx_opt().and_then(|tcx| stability::lookup_deprecation(tcx, def_id)).clean(cx) + cx.tcx_opt().and_then(|tcx| tcx.lookup_deprecation(def_id)).clean(cx) } pub trait Clean { @@ -112,13 +117,16 @@ impl, U> Clean> for P<[T]> { } } -#[derive(Clone, RustcEncodable, RustcDecodable, Debug)] +#[derive(Clone, Debug)] pub struct Crate { pub name: String, pub src: PathBuf, pub module: Option, pub externs: Vec<(ast::CrateNum, ExternalCrate)>, pub primitives: Vec, + pub access_levels: Arc>, + // These are later on moved into `CACHEKEY`, leaving the map empty. + // Only here so that they can be filtered through the rustdoc passes. pub external_traits: HashMap, } @@ -127,14 +135,20 @@ struct CrateNum(ast::CrateNum); impl<'a, 'tcx> Clean for visit_ast::RustdocVisitor<'a, 'tcx> { fn clean(&self, cx: &DocContext) -> Crate { use rustc::session::config::Input; + use ::visit_lib::LibEmbargoVisitor; if let Some(t) = cx.tcx_opt() { cx.deref_trait_did.set(t.lang_items.deref_trait()); + cx.renderinfo.borrow_mut().deref_trait_did = cx.deref_trait_did.get(); } let mut externs = Vec::new(); for cnum in cx.sess().cstore.crates() { externs.push((cnum, CrateNum(cnum).clean(cx))); + if cx.tcx_opt().is_some() { + // Analyze doc-reachability for extern items + LibEmbargoVisitor::new(cx).visit_lib(cnum); + } } externs.sort_by(|&(a, _), &(b, _)| a.cmp(&b)); @@ -183,7 +197,7 @@ impl<'a, 'tcx> Clean for visit_ast::RustdocVisitor<'a, 'tcx> { source: Span::empty(), name: Some(prim.to_url_str().to_string()), attrs: child.attrs.clone(), - visibility: Some(hir::Public), + visibility: Some(Public), stability: None, deprecation: None, def_id: DefId::local(prim.to_def_index()), @@ -204,14 +218,17 @@ impl<'a, 'tcx> Clean for visit_ast::RustdocVisitor<'a, 'tcx> { Input::Str { ref name, .. } => PathBuf::from(name.clone()), }; + let mut access_levels = cx.access_levels.borrow_mut(); + let mut external_traits = cx.external_traits.borrow_mut(); + Crate { name: name.to_string(), src: src, module: Some(module), externs: externs, primitives: primitives, - external_traits: cx.external_traits.borrow_mut().take() - .unwrap_or(HashMap::new()), + access_levels: Arc::new(mem::replace(&mut access_levels, Default::default())), + external_traits: mem::replace(&mut external_traits, Default::default()), } } } @@ -540,8 +557,7 @@ impl Clean for hir::TyParam { impl<'tcx> Clean for ty::TypeParameterDef<'tcx> { fn clean(&self, cx: &DocContext) -> TyParam { - cx.external_typarams.borrow_mut().as_mut().unwrap() - .insert(self.def_id, self.name.clean(cx)); + cx.renderinfo.borrow_mut().external_typarams.insert(self.def_id, self.name.clean(cx)); TyParam { name: self.name.clean(cx), did: self.def_id, @@ -715,7 +731,7 @@ impl<'tcx> Clean for ty::TraitRef<'tcx> { // collect any late bound regions let mut late_bounds = vec![]; for &ty_s in self.substs.types.get_slice(ParamSpace::TypeSpace) { - if let ty::TyTuple(ref ts) = ty_s.sty { + if let ty::TyTuple(ts) = ty_s.sty { for &ty_s in ts { if let ty::TyRef(ref reg, _) = ty_s.sty { if let &ty::Region::ReLateBound(_, _) = *reg { @@ -779,7 +795,17 @@ impl Clean for hir::Lifetime { impl Clean for hir::LifetimeDef { fn clean(&self, _: &DocContext) -> Lifetime { - Lifetime(self.lifetime.name.to_string()) + if self.bounds.len() > 0 { + let mut s = format!("{}: {}", + self.lifetime.name.to_string(), + self.bounds[0].name.to_string()); + for bound in self.bounds.iter().skip(1) { + s.push_str(&format!(" + {}", bound.name.to_string())); + } + Lifetime(s) + } else { + Lifetime(self.lifetime.name.to_string()) + } } } @@ -849,6 +875,8 @@ impl<'a> Clean for ty::Predicate<'a> { Predicate::Projection(ref pred) => pred.clean(cx), Predicate::WellFormed(_) => panic!("not user writable"), Predicate::ObjectSafe(_) => panic!("not user writable"), + Predicate::ClosureKind(..) => panic!("not user writable"), + Predicate::Rfc1592(..) => panic!("not user writable"), } } } @@ -1007,7 +1035,6 @@ impl<'a, 'tcx> Clean for (&'a ty::Generics<'tcx>, #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Method { pub generics: Generics, - pub self_: SelfTy, pub unsafety: hir::Unsafety, pub constness: hir::Constness, pub decl: FnDecl, @@ -1016,14 +1043,9 @@ pub struct Method { impl Clean for hir::MethodSig { fn clean(&self, cx: &DocContext) -> Method { - let all_inputs = &self.decl.inputs; - let inputs = match self.explicit_self.node { - hir::SelfStatic => &**all_inputs, - _ => &all_inputs[1..] - }; let decl = FnDecl { inputs: Arguments { - values: inputs.clean(cx), + values: self.decl.inputs.clean(cx), }, output: self.decl.output.clean(cx), variadic: false, @@ -1031,7 +1053,6 @@ impl Clean for hir::MethodSig { }; Method { generics: self.generics.clean(cx), - self_: self.explicit_self.node.clean(cx), unsafety: self.unsafety, constness: self.constness, decl: decl, @@ -1045,19 +1066,14 @@ pub struct TyMethod { pub unsafety: hir::Unsafety, pub decl: FnDecl, pub generics: Generics, - pub self_: SelfTy, pub abi: Abi, } impl Clean for hir::MethodSig { fn clean(&self, cx: &DocContext) -> TyMethod { - let inputs = match self.explicit_self.node { - hir::SelfStatic => &*self.decl.inputs, - _ => &self.decl.inputs[1..] - }; let decl = FnDecl { inputs: Arguments { - values: inputs.clean(cx), + values: self.decl.inputs.clean(cx), }, output: self.decl.output.clean(cx), variadic: false, @@ -1066,34 +1082,12 @@ impl Clean for hir::MethodSig { TyMethod { unsafety: self.unsafety.clone(), decl: decl, - self_: self.explicit_self.node.clean(cx), generics: self.generics.clean(cx), abi: self.abi } } } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] -pub enum SelfTy { - SelfStatic, - SelfValue, - SelfBorrowed(Option, Mutability), - SelfExplicit(Type), -} - -impl Clean for hir::ExplicitSelf_ { - fn clean(&self, cx: &DocContext) -> SelfTy { - match *self { - hir::SelfStatic => SelfStatic, - hir::SelfValue(_) => SelfValue, - hir::SelfRegion(ref lt, ref mt, _) => { - SelfBorrowed(lt.clean(cx), mt.clean(cx)) - } - hir::SelfExplicit(ref typ, _) => SelfExplicit(typ.clean(cx)), - } - } -} - #[derive(Clone, RustcEncodable, RustcDecodable, Debug)] pub struct Function { pub decl: FnDecl, @@ -1132,6 +1126,12 @@ pub struct FnDecl { pub attrs: Vec, } +impl FnDecl { + pub fn has_self(&self) -> bool { + return self.inputs.values.len() > 0 && self.inputs.values[0].name == "self"; + } +} + #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] pub struct Arguments { pub values: Vec, @@ -1167,9 +1167,6 @@ impl<'a, 'tcx> Clean for (DefId, &'a ty::PolyFnSig<'tcx>) { } else { cx.tcx().sess.cstore.method_arg_names(did).into_iter() }.peekable(); - if let Some("self") = names.peek().map(|s| &s[..]) { - let _ = names.next(); - } FnDecl { output: Return(sig.0.output.clean(cx)), attrs: Vec::new(), @@ -1194,6 +1191,29 @@ pub struct Argument { pub id: ast::NodeId, } +#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Debug)] +pub enum SelfTy { + SelfValue, + SelfBorrowed(Option, Mutability), + SelfExplicit(Type), +} + +impl Argument { + pub fn to_self(&self) -> Option { + if self.name == "self" { + match self.type_ { + Infer => Some(SelfValue), + BorrowedRef{ref lifetime, mutability, ref type_} if **type_ == Infer => { + Some(SelfBorrowed(lifetime.clone(), mutability)) + } + _ => Some(SelfExplicit(self.type_.clone())) + } + } else { + None + } + } +} + impl Clean for hir::Arg { fn clean(&self, cx: &DocContext) -> Argument { Argument { @@ -1269,8 +1289,7 @@ impl Clean for hir::TraitItem { let inner = match self.node { hir::ConstTraitItem(ref ty, ref default) => { AssociatedConstItem(ty.clean(cx), - default.as_ref().map(|expr| - expr.span.to_src(cx))) + default.as_ref().map(|e| pprust::expr_to_string(&e))) } hir::MethodTraitItem(ref sig, Some(_)) => { MethodItem(sig.clean(cx)) @@ -1300,7 +1319,7 @@ impl Clean for hir::ImplItem { let inner = match self.node { hir::ImplItemKind::Const(ref ty, ref expr) => { AssociatedConstItem(ty.clean(cx), - Some(expr.span.to_src(cx))) + Some(pprust::expr_to_string(expr))) } hir::ImplItemKind::Method(ref sig, _) => { MethodItem(sig.clean(cx)) @@ -1329,36 +1348,21 @@ impl Clean for hir::ImplItem { impl<'tcx> Clean for ty::Method<'tcx> { fn clean(&self, cx: &DocContext) -> Item { - let (self_, sig) = match self.explicit_self { - ty::ExplicitSelfCategory::Static => (hir::SelfStatic.clean(cx), - self.fty.sig.clone()), - s => { - let sig = ty::Binder(ty::FnSig { - inputs: self.fty.sig.0.inputs[1..].to_vec(), - ..self.fty.sig.0.clone() - }); - let s = match s { - ty::ExplicitSelfCategory::ByValue => SelfValue, - ty::ExplicitSelfCategory::ByReference(..) => { - match self.fty.sig.0.inputs[0].sty { - ty::TyRef(r, mt) => { - SelfBorrowed(r.clean(cx), mt.mutbl.clean(cx)) - } - _ => unreachable!(), - } - } - ty::ExplicitSelfCategory::ByBox => { - SelfExplicit(self.fty.sig.0.inputs[0].clean(cx)) - } - ty::ExplicitSelfCategory::Static => unreachable!(), - }; - (s, sig) - } - }; - let generics = (&self.generics, &self.predicates, subst::FnSpace).clean(cx); - let decl = (self.def_id, &sig).clean(cx); + let mut decl = (self.def_id, &self.fty.sig).clean(cx); + match self.explicit_self { + ty::ExplicitSelfCategory::ByValue => { + decl.inputs.values[0].type_ = Infer; + } + ty::ExplicitSelfCategory::ByReference(..) => { + match decl.inputs.values[0].type_ { + BorrowedRef{ref mut type_, ..} => **type_ = Infer, + _ => unreachable!(), + } + } + _ => {} + } let provided = match self.container { ty::ImplContainer(..) => false, ty::TraitContainer(did) => { @@ -1371,7 +1375,6 @@ impl<'tcx> Clean for ty::Method<'tcx> { MethodItem(Method { unsafety: self.fty.unsafety, generics: generics, - self_: self_, decl: decl, abi: self.fty.abi, @@ -1382,7 +1385,6 @@ impl<'tcx> Clean for ty::Method<'tcx> { TyMethodItem(TyMethod { unsafety: self.fty.unsafety, generics: generics, - self_: self_, decl: decl, abi: self.fty.abi, }) @@ -1390,7 +1392,7 @@ impl<'tcx> Clean for ty::Method<'tcx> { Item { name: Some(self.name.clean(cx)), - visibility: Some(hir::Inherited), + visibility: Some(Inherited), stability: get_stability(cx, self.def_id), deprecation: get_deprecation(cx, self.def_id), def_id: self.def_id, @@ -1516,6 +1518,13 @@ impl Type { _ => None, } } + + pub fn is_generic(&self) -> bool { + match *self { + ResolvedPath { is_generic, .. } => is_generic, + _ => false, + } + } } impl GetDefId for Type { @@ -1612,8 +1621,8 @@ impl Clean for hir::Ty { BorrowedRef {lifetime: l.clean(cx), mutability: m.mutbl.clean(cx), type_: box m.ty.clean(cx)}, TyVec(ref ty) => Vector(box ty.clean(cx)), - TyFixedLengthVec(ref ty, ref e) => FixedVector(box ty.clean(cx), - e.span.to_src(cx)), + TyFixedLengthVec(ref ty, ref e) => + FixedVector(box ty.clean(cx), pprust::expr_to_string(e)), TyTup(ref tys) => Tuple(tys.clean(cx)), TyPath(None, ref p) => { resolve_type(cx, p.clean(cx), self.id) @@ -1627,7 +1636,7 @@ impl Clean for hir::Ty { segments: segments.into(), }; Type::QPath { - name: p.segments.last().unwrap().identifier.name.clean(cx), + name: p.segments.last().unwrap().name.clean(cx), self_type: box qself.ty.clean(cx), trait_: box resolve_type(cx, trait_path.clean(cx), self.id) } @@ -1698,7 +1707,7 @@ impl<'tcx> Clean for ty::Ty<'tcx> { where_predicates: Vec::new() }, decl: (cx.map.local_def_id(0), &fty.sig).clean(cx), - abi: fty.abi.to_string(), + abi: fty.abi, }), ty::TyStruct(def, substs) | ty::TyEnum(def, substs) => { @@ -1776,17 +1785,21 @@ impl<'tcx> Clean for ty::FieldDefData<'tcx, 'static> { } } -pub type Visibility = hir::Visibility; +#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug)] +pub enum Visibility { + Public, + Inherited, +} impl Clean> for hir::Visibility { fn clean(&self, _: &DocContext) -> Option { - Some(self.clone()) + Some(if *self == hir::Visibility::Public { Public } else { Inherited }) } } impl Clean> for ty::Visibility { fn clean(&self, _: &DocContext) -> Option { - Some(if *self == ty::Visibility::Public { hir::Public } else { hir::Inherited }) + Some(if *self == ty::Visibility::Public { Public } else { Inherited }) } } @@ -1918,7 +1931,7 @@ impl<'tcx> Clean for ty::VariantDefData<'tcx, 'static> { name: Some(self.name.clean(cx)), attrs: inline::load_attrs(cx, cx.tcx(), self.did), source: Span::empty(), - visibility: Some(hir::Inherited), + visibility: Some(Inherited), def_id: self.did, inner: VariantItem(Variant { kind: kind }), stability: get_stability(cx, self.did), @@ -2003,6 +2016,10 @@ impl Path { }] } } + + pub fn last_name(&self) -> String { + self.segments.last().unwrap().name.clone() + } } impl Clean for hir::Path { @@ -2057,7 +2074,7 @@ pub struct PathSegment { impl Clean for hir::PathSegment { fn clean(&self, cx: &DocContext) -> PathSegment { PathSegment { - name: self.identifier.name.clean(cx), + name: self.name.clean(cx), params: self.parameters.clean(cx) } } @@ -2066,7 +2083,7 @@ impl Clean for hir::PathSegment { fn path_to_string(p: &hir::Path) -> String { let mut s = String::new(); let mut first = true; - for i in p.segments.iter().map(|x| x.identifier.name.as_str()) { + for i in p.segments.iter().map(|x| x.name.as_str()) { if !first || p.global { s.push_str("::"); } else { @@ -2112,7 +2129,7 @@ pub struct BareFunctionDecl { pub unsafety: hir::Unsafety, pub generics: Generics, pub decl: FnDecl, - pub abi: String, + pub abi: Abi, } impl Clean for hir::BareFnTy { @@ -2125,7 +2142,7 @@ impl Clean for hir::BareFnTy { where_predicates: Vec::new() }, decl: self.decl.clean(cx), - abi: self.abi.to_string(), + abi: self.abi, } } } @@ -2154,7 +2171,7 @@ impl Clean for doctree::Static { inner: StaticItem(Static { type_: self.type_.clean(cx), mutability: self.mutability.clean(cx), - expr: self.expr.span.to_src(cx), + expr: pprust::expr_to_string(&self.expr), }), } } @@ -2178,7 +2195,7 @@ impl Clean for doctree::Constant { deprecation: self.depr.clean(cx), inner: ConstantItem(Constant { type_: self.type_.clean(cx), - expr: self.expr.span.to_src(cx), + expr: pprust::expr_to_string(&self.expr), }), } } @@ -2340,7 +2357,7 @@ impl Clean for doctree::DefaultImpl { attrs: self.attrs.clean(cx), source: self.whence.clean(cx), def_id: cx.map.local_def_id(self.id), - visibility: Some(hir::Public), + visibility: Some(Public), stability: None, deprecation: None, inner: DefaultImplItem(DefaultImpl { @@ -2357,7 +2374,7 @@ impl Clean for doctree::ExternCrate { name: None, attrs: self.attrs.clean(cx), source: self.whence.clean(cx), - def_id: cx.map.local_def_id(0), + def_id: DefId { krate: self.cnum, index: CRATE_DEF_INDEX }, visibility: self.vis.clean(cx), stability: None, deprecation: None, @@ -2635,7 +2652,7 @@ fn resolve_type(cx: &DocContext, hir::TyFloat(ast::FloatTy::F64) => return Primitive(F64), }, Def::SelfTy(..) if path.segments.len() == 1 => { - return Generic(special_idents::type_self.name.to_string()); + return Generic(keywords::SelfType.name().to_string()); } Def::SelfTy(..) | Def::TyParam(..) => true, _ => false, @@ -2657,7 +2674,7 @@ fn register_def(cx: &DocContext, def: Def) -> DefId { Def::Static(i, _) => (i, TypeStatic), Def::Variant(i, _) => (i, TypeEnum), Def::SelfTy(Some(def_id), _) => (def_id, TypeTrait), - Def::SelfTy(_, Some((impl_id, _))) => return cx.map.local_def_id(impl_id), + Def::SelfTy(_, Some(impl_id)) => return cx.map.local_def_id(impl_id), _ => return def.def_id() }; if did.is_local() { return did } @@ -2668,7 +2685,7 @@ fn register_def(cx: &DocContext, def: Def) -> DefId { inline::record_extern_fqn(cx, did, kind); if let TypeTrait = kind { let t = inline::build_external_trait(cx, tcx, did); - cx.external_traits.borrow_mut().as_mut().unwrap().insert(did, t); + cx.external_traits.borrow_mut().insert(did, t); } did } @@ -2699,7 +2716,7 @@ impl Clean for doctree::Macro { name: Some(name.clone()), attrs: self.attrs.clean(cx), source: self.whence.clean(cx), - visibility: hir::Public.clean(cx), + visibility: Some(Public), stability: self.stab.clean(cx), deprecation: self.depr.clean(cx), def_id: cx.map.local_def_id(self.id), @@ -2839,8 +2856,8 @@ impl<'tcx> Clean for ty::AssociatedType<'tcx> { inner: AssociatedTypeItem(bounds, self.ty.clean(cx)), visibility: self.vis.clean(cx), def_id: self.def_id, - stability: stability::lookup_stability(cx.tcx(), self.def_id).clean(cx), - deprecation: stability::lookup_deprecation(cx.tcx(), self.def_id).clean(cx), + stability: cx.tcx().lookup_stability(self.def_id).clean(cx), + deprecation: cx.tcx().lookup_deprecation(self.def_id).clean(cx), } } } diff --git a/src/librustdoc/clean/simplify.rs b/src/librustdoc/clean/simplify.rs index 4ba412cdc8..c0faa04323 100644 --- a/src/librustdoc/clean/simplify.rs +++ b/src/librustdoc/clean/simplify.rs @@ -27,7 +27,7 @@ //! bounds by special casing scenarios such as these. Fun! use std::mem; -use std::collections::HashMap; +use std::collections::BTreeMap; use rustc::hir::def_id::DefId; use rustc::ty::subst; @@ -39,7 +39,7 @@ use core::DocContext; pub fn where_clauses(cx: &DocContext, clauses: Vec) -> Vec { // First, partition the where clause into its separate components - let mut params = HashMap::new(); + let mut params = BTreeMap::new(); let mut lifetimes = Vec::new(); let mut equalities = Vec::new(); let mut tybounds = Vec::new(); @@ -62,7 +62,7 @@ pub fn where_clauses(cx: &DocContext, clauses: Vec) -> Vec { // Simplify the type parameter bounds on all the generics let mut params = params.into_iter().map(|(k, v)| { (k, ty_bounds(v)) - }).collect::>(); + }).collect::>(); // Look for equality predicates on associated types that can be merged into // general bound predicates diff --git a/src/librustdoc/core.rs b/src/librustdoc/core.rs index 6b7aa103e1..61985d3908 100644 --- a/src/librustdoc/core.rs +++ b/src/librustdoc/core.rs @@ -20,8 +20,8 @@ use rustc::hir::map as hir_map; use rustc::lint; use rustc_trans::back::link; use rustc_resolve as resolve; -use rustc::hir::lowering::{lower_crate, LoweringContext}; use rustc_metadata::cstore::CStore; +use rustc_metadata::creader::read_local_crates; use syntax::{ast, codemap, errors}; use syntax::errors::emitter::ColorConfig; @@ -35,29 +35,35 @@ use std::rc::Rc; use visit_ast::RustdocVisitor; use clean; use clean::Clean; +use html::render::RenderInfo; pub use rustc::session::config::Input; pub use rustc::session::search_paths::SearchPaths; /// Are we generating documentation (`Typed`) or tests (`NotTyped`)? pub enum MaybeTyped<'a, 'tcx: 'a> { - Typed(&'a TyCtxt<'tcx>), + Typed(TyCtxt<'a, 'tcx, 'tcx>), NotTyped(&'a session::Session) } -pub type ExternalPaths = RefCell, clean::TypeKind)>>>; +pub type Externs = HashMap>; +pub type ExternalPaths = HashMap, clean::TypeKind)>; pub struct DocContext<'a, 'tcx: 'a> { pub map: &'a hir_map::Map<'tcx>, pub maybe_typed: MaybeTyped<'a, 'tcx>, pub input: Input, - pub external_paths: ExternalPaths, - pub external_traits: RefCell>>, - pub external_typarams: RefCell>>, - pub inlined: RefCell>>, - pub all_crate_impls: RefCell>>, + pub populated_crate_impls: RefCell>, pub deref_trait_did: Cell>, + // Note that external items for which `doc(hidden)` applies to are shown as + // non-reachable while local items aren't. This is because we're reusing + // the access levels from crateanalysis. + /// Later on moved into `clean::Crate` + pub access_levels: RefCell>, + /// Later on moved into `html::render::CACHE_KEY` + pub renderinfo: RefCell, + /// Later on moved through `clean::Crate` into `html::render::CACHE_KEY` + pub external_traits: RefCell>, } impl<'b, 'tcx> DocContext<'b, 'tcx> { @@ -68,33 +74,36 @@ impl<'b, 'tcx> DocContext<'b, 'tcx> { } } - pub fn tcx_opt<'a>(&'a self) -> Option<&'a TyCtxt<'tcx>> { + pub fn tcx_opt<'a>(&'a self) -> Option> { match self.maybe_typed { Typed(tcx) => Some(tcx), NotTyped(_) => None } } - pub fn tcx<'a>(&'a self) -> &'a TyCtxt<'tcx> { + pub fn tcx<'a>(&'a self) -> TyCtxt<'a, 'tcx, 'tcx> { let tcx_opt = self.tcx_opt(); tcx_opt.expect("tcx not present") } } -pub struct CrateAnalysis { - pub access_levels: AccessLevels, - pub external_paths: ExternalPaths, - pub external_typarams: RefCell>>, - pub inlined: RefCell>>, - pub deref_trait_did: Option, +pub trait DocAccessLevels { + fn is_doc_reachable(&self, DefId) -> bool; } -pub type Externs = HashMap>; +impl DocAccessLevels for AccessLevels { + fn is_doc_reachable(&self, did: DefId) -> bool { + self.is_public(did) + } +} -pub fn run_core(search_paths: SearchPaths, cfgs: Vec, externs: Externs, - input: Input, triple: Option) - -> (clean::Crate, CrateAnalysis) { +pub fn run_core(search_paths: SearchPaths, + cfgs: Vec, + externs: Externs, + input: Input, + triple: Option) -> (clean::Crate, RenderInfo) +{ // Parse, resolve, and typecheck the given crate. let cpath = match input { @@ -125,8 +134,10 @@ pub fn run_core(search_paths: SearchPaths, cfgs: Vec, externs: Externs, false, codemap.clone()); - let cstore = Rc::new(CStore::new(token::get_ident_interner())); - let sess = session::build_session_(sessopts, cpath, diagnostic_handler, + let dep_graph = DepGraph::new(false); + let _ignore = dep_graph.in_ignore(); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); + let sess = session::build_session_(sessopts, &dep_graph, cpath, diagnostic_handler, codemap, cstore.clone()); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); @@ -142,25 +153,30 @@ pub fn run_core(search_paths: SearchPaths, cfgs: Vec, externs: Externs, .expect("phase_2_configure_and_expand aborted in rustdoc!"); let krate = driver::assign_node_ids(&sess, krate); - // Lower ast -> hir. - let lcx = LoweringContext::new(&sess, Some(&krate)); - let mut hir_forest = hir_map::Forest::new(lower_crate(&lcx, &krate), DepGraph::new(false)); + + let mut defs = hir_map::collect_definitions(&krate); + read_local_crates(&sess, &cstore, &defs, &krate, &name, &dep_graph); + + // Lower ast -> hir and resolve. + let (analysis, resolutions, mut hir_forest) = { + driver::lower_and_resolve(&sess, &name, &mut defs, &krate, + &sess.dep_graph, resolve::MakeGlobMap::No) + }; + let arenas = ty::CtxtArenas::new(); - let hir_map = driver::make_map(&sess, &mut hir_forest); + let hir_map = hir_map::map_crate(&mut hir_forest, defs); - let krate_and_analysis = abort_on_err(driver::phase_3_run_analysis_passes(&sess, - &cstore, + abort_on_err(driver::phase_3_run_analysis_passes(&sess, hir_map, + analysis, + resolutions, &arenas, &name, - resolve::MakeGlobMap::No, |tcx, _, analysis, result| { - // Return if the driver hit an err (in `result`) if let Err(_) = result { - return None + sess.fatal("Compilation failed, aborting rustdoc"); } - let _ignore = tcx.dep_graph.in_ignore(); let ty::CrateAnalysis { access_levels, .. } = analysis; // Convert from a NodeId set to a DefId set since we don't always have easy access @@ -175,42 +191,20 @@ pub fn run_core(search_paths: SearchPaths, cfgs: Vec, externs: Externs, map: &tcx.map, maybe_typed: Typed(tcx), input: input, - external_traits: RefCell::new(Some(HashMap::new())), - external_typarams: RefCell::new(Some(HashMap::new())), - external_paths: RefCell::new(Some(HashMap::new())), - inlined: RefCell::new(Some(HashSet::new())), - all_crate_impls: RefCell::new(HashMap::new()), + populated_crate_impls: RefCell::new(HashSet::new()), deref_trait_did: Cell::new(None), + access_levels: RefCell::new(access_levels), + external_traits: RefCell::new(HashMap::new()), + renderinfo: RefCell::new(Default::default()), }; debug!("crate: {:?}", ctxt.map.krate()); - let mut analysis = CrateAnalysis { - access_levels: access_levels, - external_paths: RefCell::new(None), - external_typarams: RefCell::new(None), - inlined: RefCell::new(None), - deref_trait_did: None, - }; - let krate = { - let mut v = RustdocVisitor::new(&ctxt, Some(&analysis)); + let mut v = RustdocVisitor::new(&ctxt); v.visit(ctxt.map.krate()); v.clean(&ctxt) }; - let external_paths = ctxt.external_paths.borrow_mut().take(); - *analysis.external_paths.borrow_mut() = external_paths; - - let map = ctxt.external_typarams.borrow_mut().take(); - *analysis.external_typarams.borrow_mut() = map; - - let map = ctxt.inlined.borrow_mut().take(); - *analysis.inlined.borrow_mut() = map; - - analysis.deref_trait_did = ctxt.deref_trait_did.get(); - - Some((krate, analysis)) - }), &sess); - - krate_and_analysis.unwrap() + (krate, ctxt.renderinfo.into_inner()) + }), &sess) } diff --git a/src/librustdoc/doctree.rs b/src/librustdoc/doctree.rs index 2db4b779ee..408782a698 100644 --- a/src/librustdoc/doctree.rs +++ b/src/librustdoc/doctree.rs @@ -232,6 +232,7 @@ pub struct Macro { pub struct ExternCrate { pub name: Name, + pub cnum: ast::CrateNum, pub path: Option, pub vis: hir::Visibility, pub attrs: hir::HirVec, diff --git a/src/librustdoc/html/format.rs b/src/librustdoc/html/format.rs index ce20ad05ac..d4fdafea88 100644 --- a/src/librustdoc/html/format.rs +++ b/src/librustdoc/html/format.rs @@ -24,14 +24,16 @@ use syntax::abi::Abi; use rustc::hir; use clean; +use core::DocAccessLevels; use html::item_type::ItemType; +use html::escape::Escape; use html::render; use html::render::{cache, CURRENT_LOCATION_KEY}; /// Helper to render an optional visibility with a space after it (if the /// visibility is preset) #[derive(Copy, Clone)] -pub struct VisSpace<'a>(pub &'a Option); +pub struct VisSpace<'a>(pub &'a Option); /// Similarly to VisSpace, this structure is used to render a function style with a /// space after it. #[derive(Copy, Clone)] @@ -41,7 +43,7 @@ pub struct UnsafetySpace(pub hir::Unsafety); #[derive(Copy, Clone)] pub struct ConstnessSpace(pub hir::Constness); /// Wrapper struct for properly emitting a method declaration. -pub struct Method<'a>(pub &'a clean::SelfTy, pub &'a clean::FnDecl); +pub struct Method<'a>(pub &'a clean::FnDecl); /// Similar to VisSpace, but used for mutability #[derive(Copy, Clone)] pub struct MutableSpace(pub clean::Mutability); @@ -56,8 +58,13 @@ pub struct TyParamBounds<'a>(pub &'a [clean::TyParamBound]); pub struct CommaSep<'a, T: 'a>(pub &'a [T]); pub struct AbiSpace(pub Abi); +pub struct HRef<'a> { + pub did: DefId, + pub text: &'a str, +} + impl<'a> VisSpace<'a> { - pub fn get(self) -> &'a Option { + pub fn get(self) -> &'a Option { let VisSpace(v) = self; v } } @@ -290,11 +297,16 @@ impl fmt::Display for clean::Path { pub fn href(did: DefId) -> Option<(String, ItemType, Vec)> { let cache = cache(); + if !did.is_local() && !cache.access_levels.is_doc_reachable(did) { + return None + } + let loc = CURRENT_LOCATION_KEY.with(|l| l.borrow().clone()); let &(ref fqp, shortty) = match cache.paths.get(&did) { Some(p) => p, None => return None, }; + let mut url = if did.is_local() || cache.inlined.contains(&did) { repeat("../").take(loc.len()).collect::() } else { @@ -357,15 +369,7 @@ fn resolved_path(w: &mut fmt::Formatter, did: DefId, path: &clean::Path, } } } - - match href(did) { - Some((url, shortty, fqp)) => { - write!(w, "{}", - shortty, url, fqp.join("::"), last.name)?; - } - _ => write!(w, "{}", last.name)?, - } - write!(w, "{}", last.params)?; + write!(w, "{}{}", HRef::new(did, &last.name), last.params)?; Ok(()) } @@ -431,6 +435,24 @@ fn tybounds(w: &mut fmt::Formatter, } } +impl<'a> HRef<'a> { + pub fn new(did: DefId, text: &'a str) -> HRef<'a> { + HRef { did: did, text: text } + } +} + +impl<'a> fmt::Display for HRef<'a> { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match href(self.did) { + Some((url, shortty, fqp)) => { + write!(f, "{}", + shortty, url, fqp.join("::"), self.text) + } + _ => write!(f, "{}", self.text), + } + } +} + impl fmt::Display for clean::Type { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { @@ -447,11 +469,7 @@ impl fmt::Display for clean::Type { clean::BareFunction(ref decl) => { write!(f, "{}{}fn{}{}", UnsafetySpace(decl.unsafety), - match &*decl.abi { - "" => " extern ".to_string(), - "\"Rust\"" => "".to_string(), - s => format!(" extern {} ", s) - }, + AbiSpace(decl.abi), decl.generics, decl.decl) } @@ -479,7 +497,7 @@ impl fmt::Display for clean::Type { primitive_link(f, clean::PrimitiveType::Array, "[")?; write!(f, "{}", t)?; primitive_link(f, clean::PrimitiveType::Array, - &format!("; {}]", *s)) + &format!("; {}]", Escape(s))) } clean::Bottom => f.write_str("!"), clean::RawPointer(m, ref t) => { @@ -561,19 +579,39 @@ impl fmt::Display for clean::Type { } } +fn fmt_impl(i: &clean::Impl, f: &mut fmt::Formatter, link_trait: bool) -> fmt::Result { + write!(f, "impl{} ", i.generics)?; + if let Some(ref ty) = i.trait_ { + write!(f, "{}", + if i.polarity == Some(clean::ImplPolarity::Negative) { "!" } else { "" })?; + if link_trait { + write!(f, "{}", *ty)?; + } else { + match *ty { + clean::ResolvedPath{ typarams: None, ref path, is_generic: false, .. } => { + let last = path.segments.last().unwrap(); + write!(f, "{}{}", last.name, last.params)?; + } + _ => unreachable!(), + } + } + write!(f, " for ")?; + } + write!(f, "{}{}", i.for_, WhereClause(&i.generics))?; + Ok(()) +} + impl fmt::Display for clean::Impl { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - write!(f, "impl{} ", self.generics)?; - if let Some(ref ty) = self.trait_ { - write!(f, "{}{} for ", - if self.polarity == Some(clean::ImplPolarity::Negative) { "!" } else { "" }, - *ty)?; - } - write!(f, "{}{}", self.for_, WhereClause(&self.generics))?; - Ok(()) + fmt_impl(self, f, true) } } +// The difference from above is that trait is not hyperlinked. +pub fn fmt_impl_for_trait_page(i: &clean::Impl, f: &mut fmt::Formatter) -> fmt::Result { + fmt_impl(i, f, false) +} + impl fmt::Display for clean::Arguments { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { for (i, input) in self.values.iter().enumerate() { @@ -610,37 +648,39 @@ impl fmt::Display for clean::FnDecl { impl<'a> fmt::Display for Method<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - let Method(selfty, d) = *self; + let decl = self.0; let mut args = String::new(); - match *selfty { - clean::SelfStatic => {}, - clean::SelfValue => args.push_str("self"), - clean::SelfBorrowed(Some(ref lt), mtbl) => { - args.push_str(&format!("&{} {}self", *lt, MutableSpace(mtbl))); - } - clean::SelfBorrowed(None, mtbl) => { - args.push_str(&format!("&{}self", MutableSpace(mtbl))); - } - clean::SelfExplicit(ref typ) => { - args.push_str(&format!("self: {}", *typ)); - } - } - for (i, input) in d.inputs.values.iter().enumerate() { + for (i, input) in decl.inputs.values.iter().enumerate() { if i > 0 || !args.is_empty() { args.push_str(", "); } - if !input.name.is_empty() { - args.push_str(&format!("{}: ", input.name)); + if let Some(selfty) = input.to_self() { + match selfty { + clean::SelfValue => args.push_str("self"), + clean::SelfBorrowed(Some(ref lt), mtbl) => { + args.push_str(&format!("&{} {}self", *lt, MutableSpace(mtbl))); + } + clean::SelfBorrowed(None, mtbl) => { + args.push_str(&format!("&{}self", MutableSpace(mtbl))); + } + clean::SelfExplicit(ref typ) => { + args.push_str(&format!("self: {}", *typ)); + } + } + } else { + if !input.name.is_empty() { + args.push_str(&format!("{}: ", input.name)); + } + args.push_str(&format!("{}", input.type_)); } - args.push_str(&format!("{}", input.type_)); } - write!(f, "({args}){arrow}", args = args, arrow = d.output) + write!(f, "({args}){arrow}", args = args, arrow = decl.output) } } impl<'a> fmt::Display for VisSpace<'a> { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self.get() { - Some(hir::Public) => write!(f, "pub "), - Some(hir::Inherited) | None => Ok(()) + Some(clean::Public) => write!(f, "pub "), + Some(clean::Inherited) | None => Ok(()) } } } @@ -667,7 +707,7 @@ impl fmt::Display for clean::Import { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { match *self { clean::SimpleImport(ref name, ref src) => { - if *name == src.path.segments.last().unwrap().name { + if *name == src.path.last_name() { write!(f, "use {};", *src) } else { write!(f, "use {} as {};", *src, *name) @@ -753,7 +793,7 @@ impl fmt::Display for AbiSpace { match self.0 { Abi::Rust => Ok(()), Abi::C => write!(f, "extern "), - abi => write!(f, "extern {} ", abi), + abi => write!(f, "extern "{}" ", abi.name()), } } } diff --git a/src/librustdoc/html/highlight.rs b/src/librustdoc/html/highlight.rs index 7ca4703a2e..7ccf51a462 100644 --- a/src/librustdoc/html/highlight.rs +++ b/src/librustdoc/html/highlight.rs @@ -1,4 +1,4 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// Copyright 2014-2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,16 +8,26 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Basic html highlighting functionality +//! Basic syntax highlighting functionality. //! //! This module uses libsyntax's lexer to provide token-based highlighting for //! the HTML documentation generated by rustdoc. +//! +//! If you just want to syntax highlighting for a Rust program, then you can use +//! the `render_inner_with_highlighting` or `render_with_highlighting` +//! functions. For more advanced use cases (if you want to supply your own css +//! classes or control how the HTML is generated, or even generate something +//! other then HTML), then you should implement the the `Writer` trait and use a +//! `Classifier`. use html::escape::Escape; +use std::fmt::Display; use std::io; use std::io::prelude::*; -use syntax::parse::lexer::{self, Reader}; + +use syntax::codemap::{CodeMap, Span}; +use syntax::parse::lexer::{self, Reader, TokenAndSpan}; use syntax::parse::token; use syntax::parse; @@ -29,9 +39,13 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str> let mut out = Vec::new(); write_header(class, id, &mut out).unwrap(); - write_source(&sess, - lexer::StringReader::new(&sess.span_diagnostic, fm), - &mut out).unwrap(); + + let mut classifier = Classifier::new(lexer::StringReader::new(&sess.span_diagnostic, fm), + sess.codemap()); + if let Err(_) = classifier.write_source(&mut out) { + return format!("
{}
", src); + } + write_footer(&mut out).unwrap(); String::from_utf8_lossy(&out[..]).into_owned() } @@ -39,79 +53,192 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str> /// Highlights `src`, returning the HTML output. Returns only the inner html to /// be inserted into an element. C.f., `render_with_highlighting` which includes /// an enclosing `
` block.
-pub fn render_inner_with_highlighting(src: &str) -> String {
+pub fn render_inner_with_highlighting(src: &str) -> io::Result {
     let sess = parse::ParseSess::new();
     let fm = sess.codemap().new_filemap("".to_string(), src.to_string());
 
     let mut out = Vec::new();
-    write_source(&sess,
-                 lexer::StringReader::new(&sess.span_diagnostic, fm),
-                 &mut out).unwrap();
-    String::from_utf8_lossy(&out[..]).into_owned()
+    let mut classifier = Classifier::new(lexer::StringReader::new(&sess.span_diagnostic, fm),
+                                         sess.codemap());
+    classifier.write_source(&mut out)?;
+
+    Ok(String::from_utf8_lossy(&out).into_owned())
+}
+
+/// Processes a program (nested in the internal `lexer`), classifying strings of
+/// text by highlighting category (`Class`). Calls out to a `Writer` to write
+/// each span of text in sequence.
+pub struct Classifier<'a> {
+    lexer: lexer::StringReader<'a>,
+    codemap: &'a CodeMap,
+
+    // State of the classifier.
+    in_attribute: bool,
+    in_macro: bool,
+    in_macro_nonterminal: bool,
+}
+
+/// How a span of text is classified. Mostly corresponds to token kinds.
+#[derive(Clone, Copy, Debug, Eq, PartialEq)]
+pub enum Class {
+    None,
+    Comment,
+    DocComment,
+    Attribute,
+    KeyWord,
+    // Keywords that do pointer/reference stuff.
+    RefKeyWord,
+    Self_,
+    Op,
+    Macro,
+    MacroNonTerminal,
+    String,
+    Number,
+    Bool,
+    Ident,
+    Lifetime,
+    PreludeTy,
+    PreludeVal,
 }
 
-/// Exhausts the `lexer` writing the output into `out`.
+/// Trait that controls writing the output of syntax highlighting. Users should
+/// implement this trait to customise writing output.
 ///
-/// The general structure for this method is to iterate over each token,
-/// possibly giving it an HTML span with a class specifying what flavor of token
-/// it's used. All source code emission is done as slices from the source map,
-/// not from the tokens themselves, in order to stay true to the original
-/// source.
-fn write_source(sess: &parse::ParseSess,
-                mut lexer: lexer::StringReader,
-                out: &mut Write)
-                -> io::Result<()> {
-    let mut is_attribute = false;
-    let mut is_macro = false;
-    let mut is_macro_nonterminal = false;
-    loop {
-        let next = lexer.next_token();
+/// The classifier will call into the `Writer` implementation as it finds spans
+/// of text to highlight. Exactly how that text should be highlighted is up to
+/// the implemention.
+pub trait Writer {
+    /// Called when we start processing a span of text that should be highlighted.
+    /// The `Class` argument specifies how it should be highlighted.
+    fn enter_span(&mut self, Class) -> io::Result<()>;
 
-        let snip = |sp| sess.codemap().span_to_snippet(sp).unwrap();
+    /// Called at the end of a span of highlighted text.
+    fn exit_span(&mut self) -> io::Result<()>;
 
-        if next.tok == token::Eof { break }
+    /// Called for a span of text, usually, but not always, a single token. If
+    /// the string of text (`T`) does correspond to a token, then the token will
+    /// also be passed. If the text should be highlighted differently from the
+    /// surrounding text, then the `Class` argument will be a value other than
+    /// `None`.
+    /// The following sequences of callbacks are equivalent:
+    /// ```plain
+    ///     enter_span(Foo), string("text", None), exit_span()
+    ///     string("text", Foo)
+    /// ```
+    /// The latter can be thought of as a shorthand for the former, which is
+    /// more flexible.
+    fn string(&mut self, T, Class, Option<&TokenAndSpan>) -> io::Result<()>;
+}
 
-        let klass = match next.tok {
-            token::Whitespace => {
-                write!(out, "{}", Escape(&snip(next.sp)))?;
-                continue
-            },
-            token::Comment => {
-                write!(out, "{}",
-                       Escape(&snip(next.sp)))?;
-                continue
-            },
+// Implement `Writer` for anthing that can be written to, this just implements
+// the default rustdoc behaviour.
+impl Writer for U {
+    fn string(&mut self,
+                          text: T,
+                          klass: Class,
+                          _tas: Option<&TokenAndSpan>)
+                          -> io::Result<()> {
+        match klass {
+            Class::None => write!(self, "{}", text),
+            klass => write!(self, "{}", klass.rustdoc_class(), text),
+        }
+    }
+
+    fn enter_span(&mut self, klass: Class) -> io::Result<()> {
+        write!(self, "", klass.rustdoc_class())
+    }
+
+    fn exit_span(&mut self) -> io::Result<()> {
+        write!(self, "")
+    }
+}
+
+impl<'a> Classifier<'a> {
+    pub fn new(lexer: lexer::StringReader<'a>, codemap: &'a CodeMap) -> Classifier<'a> {
+        Classifier {
+            lexer: lexer,
+            codemap: codemap,
+            in_attribute: false,
+            in_macro: false,
+            in_macro_nonterminal: false,
+        }
+    }
+
+    /// Exhausts the `lexer` writing the output into `out`.
+    ///
+    /// The general structure for this method is to iterate over each token,
+    /// possibly giving it an HTML span with a class specifying what flavor of token
+    /// is used. All source code emission is done as slices from the source map,
+    /// not from the tokens themselves, in order to stay true to the original
+    /// source.
+    pub fn write_source(&mut self,
+                                   out: &mut W)
+                                   -> io::Result<()> {
+        loop {
+            let next = match self.lexer.try_next_token() {
+                Ok(tas) => tas,
+                Err(_) => {
+                    self.lexer.emit_fatal_errors();
+                    self.lexer.span_diagnostic.struct_warn("Backing out of syntax highlighting")
+                                              .note("You probably did not intend to render this \
+                                                     as a rust code-block")
+                                              .emit();
+                    return Err(io::Error::new(io::ErrorKind::Other, ""));
+                }
+            };
+
+            if next.tok == token::Eof {
+                break;
+            }
+
+            self.write_token(out, next)?;
+        }
+
+        Ok(())
+    }
+
+    // Handles an individual token from the lexer.
+    fn write_token(&mut self,
+                              out: &mut W,
+                              tas: TokenAndSpan)
+                              -> io::Result<()> {
+        let klass = match tas.tok {
             token::Shebang(s) => {
-                write!(out, "{}", Escape(&s.as_str()))?;
-                continue
+                out.string(Escape(&s.as_str()), Class::None, Some(&tas))?;
+                return Ok(());
             },
+
+            token::Whitespace => Class::None,
+            token::Comment => Class::Comment,
+            token::DocComment(..) => Class::DocComment,
+
             // If this '&' token is directly adjacent to another token, assume
             // that it's the address-of operator instead of the and-operator.
-            // This allows us to give all pointers their own class (`Box` and
-            // `@` are below).
-            token::BinOp(token::And) if lexer.peek().sp.lo == next.sp.hi => "kw-2",
-            token::At | token::Tilde => "kw-2",
+            token::BinOp(token::And) if self.lexer.peek().sp.lo == tas.sp.hi => Class::RefKeyWord,
 
-            // consider this as part of a macro invocation if there was a
-            // leading identifier
-            token::Not if is_macro => { is_macro = false; "macro" }
+            // Consider this as part of a macro invocation if there was a
+            // leading identifier.
+            token::Not if self.in_macro => {
+                self.in_macro = false;
+                Class::Macro
+            }
 
-            // operators
+            // Operators.
             token::Eq | token::Lt | token::Le | token::EqEq | token::Ne | token::Ge | token::Gt |
                 token::AndAnd | token::OrOr | token::Not | token::BinOp(..) | token::RArrow |
-                token::BinOpEq(..) | token::FatArrow => "op",
+                token::BinOpEq(..) | token::FatArrow => Class::Op,
 
-            // miscellaneous, no highlighting
+            // Miscellaneous, no highlighting.
             token::Dot | token::DotDot | token::DotDotDot | token::Comma | token::Semi |
                 token::Colon | token::ModSep | token::LArrow | token::OpenDelim(_) |
                 token::CloseDelim(token::Brace) | token::CloseDelim(token::Paren) |
-                token::Question => "",
+                token::Question => Class::None,
             token::Dollar => {
-                if lexer.peek().tok.is_ident() {
-                    is_macro_nonterminal = true;
-                    "macro-nonterminal"
+                if self.lexer.peek().tok.is_ident() {
+                    self.in_macro_nonterminal = true;
+                    Class::MacroNonTerminal
                 } else {
-                    ""
+                    Class::None
                 }
             }
 
@@ -120,78 +247,103 @@ fn write_source(sess: &parse::ParseSess,
             // seen, so skip out early. Down below we terminate the attribute
             // span when we see the ']'.
             token::Pound => {
-                is_attribute = true;
-                write!(out, r"#")?;
-                continue
+                self.in_attribute = true;
+                out.enter_span(Class::Attribute)?;
+                out.string("#", Class::None, None)?;
+                return Ok(());
             }
             token::CloseDelim(token::Bracket) => {
-                if is_attribute {
-                    is_attribute = false;
-                    write!(out, "]")?;
-                    continue
+                if self.in_attribute {
+                    self.in_attribute = false;
+                    out.string("]", Class::None, None)?;
+                    out.exit_span()?;
+                    return Ok(());
                 } else {
-                    ""
+                    Class::None
                 }
             }
 
             token::Literal(lit, _suf) => {
                 match lit {
-                    // text literals
+                    // Text literals.
                     token::Byte(..) | token::Char(..) |
                         token::ByteStr(..) | token::ByteStrRaw(..) |
-                        token::Str_(..) | token::StrRaw(..) => "string",
+                        token::Str_(..) | token::StrRaw(..) => Class::String,
 
-                    // number literals
-                    token::Integer(..) | token::Float(..) => "number",
+                    // Number literals.
+                    token::Integer(..) | token::Float(..) => Class::Number,
                 }
             }
 
-            // keywords are also included in the identifier set
-            token::Ident(ident, _is_mod_sep) => {
+            // Keywords are also included in the identifier set.
+            token::Ident(ident) => {
                 match &*ident.name.as_str() {
-                    "ref" | "mut" => "kw-2",
+                    "ref" | "mut" => Class::RefKeyWord,
 
-                    "self" => "self",
-                    "false" | "true" => "boolval",
+                    "self" |"Self" => Class::Self_,
+                    "false" | "true" => Class::Bool,
 
-                    "Option" | "Result" => "prelude-ty",
-                    "Some" | "None" | "Ok" | "Err" => "prelude-val",
+                    "Option" | "Result" => Class::PreludeTy,
+                    "Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
 
-                    _ if next.tok.is_any_keyword() => "kw",
+                    _ if tas.tok.is_any_keyword() => Class::KeyWord,
                     _ => {
-                        if is_macro_nonterminal {
-                            is_macro_nonterminal = false;
-                            "macro-nonterminal"
-                        } else if lexer.peek().tok == token::Not {
-                            is_macro = true;
-                            "macro"
+                        if self.in_macro_nonterminal {
+                            self.in_macro_nonterminal = false;
+                            Class::MacroNonTerminal
+                        } else if self.lexer.peek().tok == token::Not {
+                            self.in_macro = true;
+                            Class::Macro
                         } else {
-                            "ident"
+                            Class::Ident
                         }
                     }
                 }
             }
 
-            // Special macro vars are like keywords
-            token::SpecialVarNt(_) => "kw-2",
+            // Special macro vars are like keywords.
+            token::SpecialVarNt(_) => Class::KeyWord,
+
+            token::Lifetime(..) => Class::Lifetime,
 
-            token::Lifetime(..) => "lifetime",
-            token::DocComment(..) => "doccomment",
             token::Underscore | token::Eof | token::Interpolated(..) |
-                token::MatchNt(..) | token::SubstNt(..) => "",
+            token::MatchNt(..) | token::SubstNt(..) | token::Tilde | token::At => Class::None,
         };
 
-        // as mentioned above, use the original source code instead of
-        // stringifying this token
-        let snip = sess.codemap().span_to_snippet(next.sp).unwrap();
-        if klass == "" {
-            write!(out, "{}", Escape(&snip))?;
-        } else {
-            write!(out, "{}", klass, Escape(&snip))?;
-        }
+        // Anything that didn't return above is the simple case where we the
+        // class just spans a single token, so we can use the `string` method.
+        out.string(Escape(&self.snip(tas.sp)), klass, Some(&tas))
     }
 
-    Ok(())
+    // Helper function to get a snippet from the codemap.
+    fn snip(&self, sp: Span) -> String {
+        self.codemap.span_to_snippet(sp).unwrap()
+    }
+}
+
+impl Class {
+    /// Returns the css class expected by rustdoc for each `Class`.
+    pub fn rustdoc_class(self) -> &'static str {
+        match self {
+            Class::None => "",
+            Class::Comment => "comment",
+            Class::DocComment => "doccomment",
+            Class::Attribute => "attribute",
+            Class::KeyWord => "kw",
+            Class::RefKeyWord => "kw-2",
+            Class::Self_ => "self",
+            Class::Op => "op",
+            Class::Macro => "macro",
+            Class::MacroNonTerminal => "macro-nonterminal",
+            Class::String => "string",
+            Class::Number => "number",
+            Class::Bool => "boolvalue",
+            Class::Ident => "ident",
+            Class::Lifetime => "lifetime",
+            Class::PreludeTy => "prelude-ty",
+            Class::PreludeVal => "prelude-val",
+        }
+    }
 }
 
 fn write_header(class: Option<&str>,
diff --git a/src/librustdoc/html/layout.rs b/src/librustdoc/html/layout.rs
index 8b2e849749..265ed6be15 100644
--- a/src/librustdoc/html/layout.rs
+++ b/src/librustdoc/html/layout.rs
@@ -9,7 +9,6 @@
 // except according to those terms.
 
 use std::fmt;
-use std::io::prelude::*;
 use std::io;
 
 use externalfiles::ExternalHtml;
@@ -104,6 +103,8 @@ r##"
                     
Move down in search results
Go to active search result
+
+
+
Collapse/expand all sections
diff --git a/src/librustdoc/html/render.rs b/src/librustdoc/html/render.rs index c585008957..7357ff3aba 100644 --- a/src/librustdoc/html/render.rs +++ b/src/librustdoc/html/render.rs @@ -62,13 +62,14 @@ use rustc::middle::stability; use rustc::session::config::get_unstable_features_setting; use rustc::hir; -use clean::{self, SelfTy, Attributes, GetDefId}; +use clean::{self, Attributes, GetDefId}; use doctree; use fold::DocFolder; use html::escape::Escape; use html::format::{ConstnessSpace}; use html::format::{TyParamBounds, WhereClause, href, AbiSpace}; use html::format::{VisSpace, Method, UnsafetySpace, MutableSpace}; +use html::format::fmt_impl_for_trait_page; use html::item_type::ItemType; use html::markdown::{self, Markdown}; use html::{highlight, layout}; @@ -144,14 +145,19 @@ pub struct Implementor { /// Metadata about implementations for a type. #[derive(Clone)] pub struct Impl { - pub impl_: clean::Impl, - pub dox: Option, - pub stability: Option, + pub impl_item: clean::Item, } impl Impl { + fn inner_impl(&self) -> &clean::Impl { + match self.impl_item.inner { + clean::ImplItem(ref impl_) => impl_, + _ => panic!("non-impl item found in impl") + } + } + fn trait_did(&self) -> Option { - self.impl_.trait_.def_id() + self.inner_impl().trait_.def_id() } } @@ -246,14 +252,20 @@ pub struct Cache { /// Set of definitions which have been inlined from external crates. pub inlined: HashSet, + // Note that external items for which `doc(hidden)` applies to are shown as + // non-reachable while local items aren't. This is because we're reusing + // the access levels from crateanalysis. + pub access_levels: Arc>, + // Private fields only used when initially crawling a crate to build a cache stack: Vec, parent_stack: Vec, parent_is_trait_impl: bool, search_index: Vec, + seen_modules: HashSet, + seen_mod: bool, stripped_mod: bool, - access_levels: AccessLevels, deref_trait_did: Option, // In rare case where a structure is defined in one module but implemented @@ -264,6 +276,16 @@ pub struct Cache { orphan_methods: Vec<(DefId, clean::Item)>, } +/// Temporary storage for data obtained during `RustdocVisitor::clean()`. +/// Later on moved into `CACHE_KEY`. +#[derive(Default)] +pub struct RenderInfo { + pub inlined: HashSet, + pub external_paths: ::core::ExternalPaths, + pub external_typarams: HashMap, + pub deref_trait_did: Option, +} + /// Helper struct to render all source code to HTML pages struct SourceCollector<'a> { scx: &'a mut SharedContext, @@ -415,7 +437,8 @@ pub fn run(mut krate: clean::Crate, external_html: &ExternalHtml, dst: PathBuf, passes: HashSet, - css_file_extension: Option) -> Result<(), Error> { + css_file_extension: Option, + renderinfo: RenderInfo) -> Result<(), Error> { let src_root = match krate.src.parent() { Some(p) => p.to_path_buf(), None => PathBuf::new(), @@ -482,19 +505,20 @@ pub fn run(mut krate: clean::Crate, }; // Crawl the crate to build various caches used for the output - let analysis = ::ANALYSISKEY.with(|a| a.clone()); - let analysis = analysis.borrow(); - let access_levels = analysis.as_ref().map(|a| a.access_levels.clone()); - let access_levels = access_levels.unwrap_or(Default::default()); - let paths: HashMap, ItemType)> = - analysis.as_ref().map(|a| { - let paths = a.external_paths.borrow_mut().take().unwrap(); - paths.into_iter().map(|(k, (v, t))| (k, (v, ItemType::from_type_kind(t)))).collect() - }).unwrap_or(HashMap::new()); + let RenderInfo { + inlined, + external_paths, + external_typarams, + deref_trait_did, + } = renderinfo; + + let paths = external_paths.into_iter() + .map(|(k, (v, t))| (k, (v, ItemType::from_type_kind(t)))) + .collect::>(); + let mut cache = Cache { impls: HashMap::new(), - external_paths: paths.iter().map(|(&k, v)| (k, v.0.clone())) - .collect(), + external_paths: paths.iter().map(|(&k, v)| (k, v.0.clone())).collect(), paths: paths, implementors: HashMap::new(), stack: Vec::new(), @@ -503,17 +527,15 @@ pub fn run(mut krate: clean::Crate, parent_is_trait_impl: false, extern_locations: HashMap::new(), primitive_locations: HashMap::new(), + seen_modules: HashSet::new(), + seen_mod: false, stripped_mod: false, - access_levels: access_levels, + access_levels: krate.access_levels.clone(), orphan_methods: Vec::new(), traits: mem::replace(&mut krate.external_traits, HashMap::new()), - deref_trait_did: analysis.as_ref().and_then(|a| a.deref_trait_did), - typarams: analysis.as_ref().map(|a| { - a.external_typarams.borrow_mut().take().unwrap() - }).unwrap_or(HashMap::new()), - inlined: analysis.as_ref().map(|a| { - a.inlined.borrow_mut().take().unwrap() - }).unwrap_or(HashSet::new()), + deref_trait_did: deref_trait_did, + typarams: external_typarams, + inlined: inlined, }; // Cache where all our extern crates are located @@ -570,8 +592,6 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String { for &(did, ref item) in orphan_methods { match paths.get(&did) { Some(&(ref fqp, _)) => { - // Needed to determine `self` type. - let parent_basename = Some(fqp[fqp.len() - 1].clone()); search_index.push(IndexItem { ty: shortty(item), name: item.name.clone().unwrap(), @@ -579,7 +599,7 @@ fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String { desc: Escape(&shorter(item.doc_value())).to_string(), parent: Some(did), parent_idx: None, - search_type: get_index_search_type(&item, parent_basename), + search_type: get_index_search_type(&item), }); }, None => {} @@ -761,7 +781,7 @@ fn write_shared(cx: &Context, try_err!(write!(&mut f, "{}", *implementor), &mydst); } - try_err!(write!(&mut f, r"implementors['{}'] = [", krate.name), &mydst); + try_err!(write!(&mut f, r#"implementors["{}"] = ["#, krate.name), &mydst); for imp in imps { // If the trait and implementation are in the same crate, then // there's no need to emit information about it (there's inlining @@ -803,13 +823,16 @@ fn write(dst: PathBuf, contents: &[u8]) -> Result<(), Error> { Ok(try_err!(try_err!(File::create(&dst), &dst).write_all(contents), &dst)) } -/// Makes a directory on the filesystem, failing the thread if an error occurs and -/// skipping if the directory already exists. +/// Makes a directory on the filesystem, failing the thread if an error occurs +/// and skipping if the directory already exists. +/// +/// Note that this also handles races as rustdoc is likely to be run +/// concurrently against another invocation. fn mkdir(path: &Path) -> io::Result<()> { - if !path.exists() { - fs::create_dir(path) - } else { - Ok(()) + match fs::create_dir(path) { + Ok(()) => Ok(()), + Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => Ok(()), + Err(e) => Err(e) } } @@ -963,13 +986,20 @@ impl DocFolder for Cache { // we don't want it or its children in the search index. let orig_stripped_mod = match item.inner { clean::StrippedItem(box clean::ModuleItem(..)) => { - let prev = self.stripped_mod; - self.stripped_mod = true; - prev + mem::replace(&mut self.stripped_mod, true) } _ => self.stripped_mod, }; + // Inlining can cause us to visit the same item multiple times. + // (i.e. relevant for gathering impls and implementors) + let orig_seen_mod = if item.is_mod() { + let seen_this = self.seen_mod || !self.seen_modules.insert(item.def_id); + mem::replace(&mut self.seen_mod, seen_this) + } else { + self.seen_mod + }; + // Register any generics to their corresponding string. This is used // when pretty-printing types match item.inner { @@ -985,20 +1015,22 @@ impl DocFolder for Cache { _ => {} } - // Propagate a trait methods' documentation to all implementors of the - // trait - if let clean::TraitItem(ref t) = item.inner { - self.traits.insert(item.def_id, t.clone()); - } + if !self.seen_mod { + // Propagate a trait methods' documentation to all implementors of the + // trait + if let clean::TraitItem(ref t) = item.inner { + self.traits.insert(item.def_id, t.clone()); + } - // Collect all the implementors of traits. - if let clean::ImplItem(ref i) = item.inner { - if let Some(did) = i.trait_.def_id() { - self.implementors.entry(did).or_insert(vec![]).push(Implementor { - def_id: item.def_id, - stability: item.stability.clone(), - impl_: i.clone(), - }); + // Collect all the implementors of traits. + if let clean::ImplItem(ref i) = item.inner { + if let Some(did) = i.trait_.def_id() { + self.implementors.entry(did).or_insert(vec![]).push(Implementor { + def_id: item.def_id, + stability: item.stability.clone(), + impl_: i.clone(), + }); + } } } @@ -1047,13 +1079,6 @@ impl DocFolder for Cache { match parent { (parent, Some(path)) if is_method || (!self.stripped_mod) => { - // Needed to determine `self` type. - let parent_basename = self.parent_stack.first().and_then(|parent| { - match self.paths.get(parent) { - Some(&(ref fqp, _)) => Some(fqp[fqp.len() - 1].clone()), - _ => None - } - }); debug_assert!(!item.is_stripped()); // A crate has a module at its root, containing all items, @@ -1067,7 +1092,7 @@ impl DocFolder for Cache { desc: Escape(&shorter(item.doc_value())).to_string(), parent: parent, parent_idx: None, - search_type: get_index_search_type(&item, parent_basename), + search_type: get_index_search_type(&item), }); } } @@ -1160,33 +1185,36 @@ impl DocFolder for Cache { // Once we've recursively found all the generics, then hoard off all the // implementations elsewhere let ret = self.fold_item_recur(item).and_then(|item| { - if let clean::Item { attrs, inner: clean::ImplItem(i), .. } = item { + if let clean::Item { inner: clean::ImplItem(_), .. } = item { // Figure out the id of this impl. This may map to a // primitive rather than always to a struct/enum. - let did = match i.for_ { - clean::ResolvedPath { did, .. } | - clean::BorrowedRef { - type_: box clean::ResolvedPath { did, .. }, .. - } => { - Some(did) - } - - ref t => { - t.primitive_type().and_then(|t| { - self.primitive_locations.get(&t).map(|n| { - let id = t.to_def_index(); - DefId { krate: *n, index: id } + // Note: matching twice to restrict the lifetime of the `i` borrow. + let did = if let clean::Item { inner: clean::ImplItem(ref i), .. } = item { + match i.for_ { + clean::ResolvedPath { did, .. } | + clean::BorrowedRef { + type_: box clean::ResolvedPath { did, .. }, .. + } => { + Some(did) + } + ref t => { + t.primitive_type().and_then(|t| { + self.primitive_locations.get(&t).map(|n| { + let id = t.to_def_index(); + DefId { krate: *n, index: id } + }) }) - }) + } } + } else { + unreachable!() }; - - if let Some(did) = did { - self.impls.entry(did).or_insert(vec![]).push(Impl { - impl_: i, - dox: attrs.value("doc").map(|s|s.to_owned()), - stability: item.stability.clone(), - }); + if !self.seen_mod { + if let Some(did) = did { + self.impls.entry(did).or_insert(vec![]).push(Impl { + impl_item: item, + }); + } } None } else { @@ -1196,6 +1224,7 @@ impl DocFolder for Cache { if pushed { self.stack.pop().unwrap(); } if parent_pushed { self.parent_stack.pop().unwrap(); } + self.seen_mod = orig_seen_mod; self.stripped_mod = orig_stripped_mod; self.parent_is_trait_impl = orig_parent_is_trait_impl; return ret; @@ -1408,7 +1437,8 @@ impl Context { match it.inner { clean::StrippedItem(..) => true, clean::ModuleItem(ref m) => { - it.doc_value().is_none() && m.items.is_empty() && it.visibility != Some(hir::Public) + it.doc_value().is_none() && m.items.is_empty() + && it.visibility != Some(clean::Public) }, _ => false, } @@ -1479,11 +1509,15 @@ impl<'a> Item<'a> { // located, then we return `None`. } else { let cache = cache(); - let path = &cache.external_paths[&self.item.def_id]; - let root = match cache.extern_locations[&self.item.def_id.krate] { - (_, Remote(ref s)) => s.to_string(), - (_, Local) => self.cx.root_path.clone(), - (_, Unknown) => return None, + let path = match cache.external_paths.get(&self.item.def_id) { + Some(path) => path, + None => return None, + }; + let root = match cache.extern_locations.get(&self.item.def_id.krate) { + Some(&(_, Remote(ref s))) => s.to_string(), + Some(&(_, Local)) => self.cx.root_path.clone(), + Some(&(_, Unknown)) => return None, + None => return None, }; Some(format!("{root}{path}/{file}?gotosrc={goto}", root = root, @@ -1494,7 +1528,6 @@ impl<'a> Item<'a> { } } - impl<'a> fmt::Display for Item<'a> { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { debug_assert!(!self.item.is_stripped()); @@ -1532,6 +1565,10 @@ impl<'a> fmt::Display for Item<'a> { write!(fmt, "")?; // in-band write!(fmt, "")?; + if let Some(version) = self.item.stable_since() { + write!(fmt, "{0}", + version)?; + } write!(fmt, r##" @@ -1612,8 +1649,8 @@ fn plain_summary_line(s: Option<&str>) -> String { } fn document(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item) -> fmt::Result { - if let Some(s) = short_stability(item, cx, true) { - write!(w, "
{}
", s)?; + for stability in short_stability(item, cx, true) { + write!(w, "
{}
", stability)?; } if let Some(s) = item.doc_value() { write!(w, "
{}
", Markdown(s))?; @@ -1621,6 +1658,19 @@ fn document(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item) -> fmt::Re Ok(()) } +fn document_short(w: &mut fmt::Formatter, item: &clean::Item, link: AssocItemLink) -> fmt::Result { + if let Some(s) = item.doc_value() { + let markdown = if s.contains('\n') { + format!("{} [Read more]({})", + &plain_summary_line(Some(s)), naive_assoc_href(item, link)) + } else { + format!("{}", &plain_summary_line(Some(s))) + }; + write!(w, "
{}
", Markdown(&markdown))?; + } + Ok(()) +} + fn item_module(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, items: &[clean::Item]) -> fmt::Result { document(w, cx, item)?; @@ -1711,16 +1761,19 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, match myitem.inner { clean::ExternCrateItem(ref name, ref src) => { + use html::format::HRef; + match *src { Some(ref src) => { write!(w, "{}extern crate {} as {};", VisSpace(&myitem.visibility), - src, + HRef::new(myitem.def_id, src), name)? } None => { write!(w, "{}extern crate {};", - VisSpace(&myitem.visibility), name)? + VisSpace(&myitem.visibility), + HRef::new(myitem.def_id, name))? } } write!(w, "")?; @@ -1733,8 +1786,15 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, _ => { if myitem.name.is_none() { continue } - let stab_docs = if let Some(s) = short_stability(myitem, cx, false) { - format!("[{}]", s) + + let stabilities = short_stability(myitem, cx, false); + + let stab_docs = if !stabilities.is_empty() { + stabilities.iter() + .map(|s| format!("[{}]", s)) + .collect::>() + .as_slice() + .join(" ") } else { String::new() }; @@ -1761,21 +1821,26 @@ fn item_module(w: &mut fmt::Formatter, cx: &Context, write!(w, "") } -fn short_stability(item: &clean::Item, cx: &Context, show_reason: bool) -> Option { - item.stability.as_ref().and_then(|stab| { +fn short_stability(item: &clean::Item, cx: &Context, show_reason: bool) -> Vec { + let mut stability = vec![]; + + if let Some(stab) = item.stability.as_ref() { let reason = if show_reason && !stab.reason.is_empty() { format!(": {}", stab.reason) } else { String::new() }; - let text = if !stab.deprecated_since.is_empty() { + if !stab.deprecated_since.is_empty() { let since = if show_reason { format!(" since {}", Escape(&stab.deprecated_since)) } else { String::new() }; - format!("Deprecated{}{}", since, Markdown(&reason)) - } else if stab.level == stability::Unstable { + let text = format!("Deprecated{}{}", since, Markdown(&reason)); + stability.push(format!("{}", text)) + }; + + if stab.level == stability::Unstable { let unstable_extra = if show_reason { match (!stab.feature.is_empty(), &cx.shared.issue_tracker_base_url, stab.issue) { (true, &Some(ref tracker_url), Some(issue_no)) if issue_no > 0 => @@ -1791,29 +1856,26 @@ fn short_stability(item: &clean::Item, cx: &Context, show_reason: bool) -> Optio } else { String::new() }; - format!("Unstable{}{}", unstable_extra, Markdown(&reason)) + let text = format!("Unstable{}{}", unstable_extra, Markdown(&reason)); + stability.push(format!("{}", text)) + }; + } else if let Some(depr) = item.deprecation.as_ref() { + let note = if show_reason && !depr.note.is_empty() { + format!(": {}", depr.note) } else { - return None + String::new() + }; + let since = if show_reason && !depr.since.is_empty() { + format!(" since {}", Escape(&depr.since)) + } else { + String::new() }; - Some(format!("{}", - item.stability_class(), text)) - }).or_else(|| { - item.deprecation.as_ref().and_then(|depr| { - let note = if show_reason && !depr.note.is_empty() { - format!(": {}", depr.note) - } else { - String::new() - }; - let since = if show_reason && !depr.since.is_empty() { - format!(" since {}", Escape(&depr.since)) - } else { - String::new() - }; - let text = format!("Deprecated{}{}", since, Markdown(¬e)); - Some(format!("{}", text)) - }) - }) + let text = format!("Deprecated{}{}", since, Markdown(¬e)); + stability.push(format!("{}", text)) + } + + stability } struct Initializer<'a>(&'a str); @@ -1823,7 +1885,7 @@ impl<'a> fmt::Display for Initializer<'a> { let Initializer(s) = *self; if s.is_empty() { return Ok(()); } write!(f, " = ")?; - write!(f, "{}", s) + write!(f, "{}", Escape(s)) } } @@ -1867,7 +1929,6 @@ fn item_function(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, generics = f.generics, where_clause = WhereClause(&f.generics), decl = f.decl)?; - render_stability_since_raw(w, it.stable_since(), None)?; document(w, cx, it) } @@ -1902,10 +1963,11 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, if t.items.is_empty() { write!(w, "{{ }}")?; } else { + // FIXME: we should be using a derived_id for the Anchors here write!(w, "{{\n")?; for t in &types { write!(w, " ")?; - render_assoc_item(w, t, AssocItemLink::Anchor)?; + render_assoc_item(w, t, AssocItemLink::Anchor(None))?; write!(w, ";\n")?; } if !types.is_empty() && !consts.is_empty() { @@ -1913,7 +1975,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, } for t in &consts { write!(w, " ")?; - render_assoc_item(w, t, AssocItemLink::Anchor)?; + render_assoc_item(w, t, AssocItemLink::Anchor(None))?; write!(w, ";\n")?; } if !consts.is_empty() && !required.is_empty() { @@ -1921,7 +1983,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, } for m in &required { write!(w, " ")?; - render_assoc_item(w, m, AssocItemLink::Anchor)?; + render_assoc_item(w, m, AssocItemLink::Anchor(None))?; write!(w, ";\n")?; } if !required.is_empty() && !provided.is_empty() { @@ -1929,7 +1991,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, } for m in &provided { write!(w, " ")?; - render_assoc_item(w, m, AssocItemLink::Anchor)?; + render_assoc_item(w, m, AssocItemLink::Anchor(None))?; write!(w, " {{ ... }}\n")?; } write!(w, "}}")?; @@ -1946,7 +2008,7 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, write!(w, "

", id = id, stab = m.stability_class())?; - render_assoc_item(w, m, AssocItemLink::Anchor)?; + render_assoc_item(w, m, AssocItemLink::Anchor(Some(&id)))?; write!(w, "")?; render_stability_since(w, m, t)?; write!(w, "

")?; @@ -2009,7 +2071,9 @@ fn item_trait(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, match cache.implementors.get(&it.def_id) { Some(implementors) => { for i in implementors { - writeln!(w, "
  • {}
  • ", i.impl_)?; + write!(w, "
  • ")?; + fmt_impl_for_trait_page(&i.impl_, w)?; + writeln!(w, "
  • ")?; } } None => {} @@ -2041,7 +2105,8 @@ fn naive_assoc_href(it: &clean::Item, link: AssocItemLink) -> String { let anchor = format!("#{}.{}", ty, name); match link { - AssocItemLink::Anchor => anchor, + AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id), + AssocItemLink::Anchor(None) => anchor, AssocItemLink::GotoSource(did, _) => { href(did).map(|p| format!("{}{}", p.0, anchor)).unwrap_or(anchor) } @@ -2059,7 +2124,7 @@ fn assoc_const(w: &mut fmt::Formatter, write!(w, ": {}", ty)?; if let Some(default) = default { - write!(w, " = {}", default)?; + write!(w, " = {}", Escape(default))?; } Ok(()) } @@ -2085,7 +2150,7 @@ fn render_stability_since_raw<'a>(w: &mut fmt::Formatter, containing_ver: Option<&'a str>) -> fmt::Result { if let Some(v) = ver { if containing_ver != ver && v.len() > 0 { - write!(w, "{}", + write!(w, "
    {0}
    ", v)? } } @@ -2107,16 +2172,14 @@ fn render_assoc_item(w: &mut fmt::Formatter, constness: hir::Constness, abi: abi::Abi, g: &clean::Generics, - selfty: &clean::SelfTy, d: &clean::FnDecl, link: AssocItemLink) -> fmt::Result { - use syntax::abi::Abi; - let name = meth.name.as_ref().unwrap(); let anchor = format!("#{}.{}", shortty(meth), name); let href = match link { - AssocItemLink::Anchor => anchor, + AssocItemLink::Anchor(Some(ref id)) => format!("#{}", id), + AssocItemLink::Anchor(None) => anchor, AssocItemLink::GotoSource(did, provided_methods) => { // We're creating a link from an impl-item to the corresponding // trait-item and need to map the anchored type accordingly. @@ -2138,25 +2201,22 @@ fn render_assoc_item(w: &mut fmt::Formatter, {generics}{decl}{where_clause}", ConstnessSpace(vis_constness), UnsafetySpace(unsafety), - match abi { - Abi::Rust => String::new(), - a => format!("extern {} ", a.to_string()) - }, + AbiSpace(abi), href = href, name = name, generics = *g, - decl = Method(selfty, d), + decl = Method(d), where_clause = WhereClause(g)) } match item.inner { clean::StrippedItem(..) => Ok(()), clean::TyMethodItem(ref m) => { method(w, item, m.unsafety, hir::Constness::NotConst, - m.abi, &m.generics, &m.self_, &m.decl, link) + m.abi, &m.generics, &m.decl, link) } clean::MethodItem(ref m) => { method(w, item, m.unsafety, m.constness, - m.abi, &m.generics, &m.self_, &m.decl, + m.abi, &m.generics, &m.decl, link) } clean::AssociatedConstItem(ref ty, ref default) => { @@ -2181,7 +2241,6 @@ fn item_struct(w: &mut fmt::Formatter, cx: &Context, it: &clean::Item, "", true)?; write!(w, "
    ")?; - render_stability_since_raw(w, it.stable_since(), None)?; document(w, cx, it)?; let mut fields = s.fields.iter().filter(|f| { @@ -2377,10 +2436,19 @@ fn render_struct(w: &mut fmt::Formatter, it: &clean::Item, #[derive(Copy, Clone)] enum AssocItemLink<'a> { - Anchor, + Anchor(Option<&'a str>), GotoSource(DefId, &'a HashSet), } +impl<'a> AssocItemLink<'a> { + fn anchor(&self, id: &'a String) -> Self { + match *self { + AssocItemLink::Anchor(_) => { AssocItemLink::Anchor(Some(&id)) }, + ref other => *other, + } + } +} + enum AssocItemRender<'a> { All, DerefFor { trait_: &'a clean::Type, type_: &'a clean::Type }, @@ -2397,7 +2465,7 @@ fn render_assoc_items(w: &mut fmt::Formatter, None => return Ok(()), }; let (non_trait, traits): (Vec<_>, _) = v.iter().partition(|i| { - i.impl_.trait_.is_none() + i.inner_impl().trait_.is_none() }); if !non_trait.is_empty() { let render_header = match what { @@ -2412,7 +2480,7 @@ fn render_assoc_items(w: &mut fmt::Formatter, } }; for i in &non_trait { - render_impl(w, cx, i, AssocItemLink::Anchor, render_header, + render_impl(w, cx, i, AssocItemLink::Anchor(None), render_header, containing_item.stable_since())?; } } @@ -2421,7 +2489,7 @@ fn render_assoc_items(w: &mut fmt::Formatter, } if !traits.is_empty() { let deref_impl = traits.iter().find(|t| { - t.impl_.trait_.def_id() == c.deref_trait_did + t.inner_impl().trait_.def_id() == c.deref_trait_did }); if let Some(impl_) = deref_impl { render_deref_methods(w, cx, impl_, containing_item)?; @@ -2429,11 +2497,11 @@ fn render_assoc_items(w: &mut fmt::Formatter, write!(w, "

    Trait \ Implementations

    ")?; let (derived, manual): (Vec<_>, Vec<&Impl>) = traits.iter().partition(|i| { - i.impl_.derived + i.inner_impl().derived }); for i in &manual { let did = i.trait_did().unwrap(); - let assoc_link = AssocItemLink::GotoSource(did, &i.impl_.provided_trait_methods); + let assoc_link = AssocItemLink::GotoSource(did, &i.inner_impl().provided_trait_methods); render_impl(w, cx, i, assoc_link, true, containing_item.stable_since())?; } if !derived.is_empty() { @@ -2442,7 +2510,8 @@ fn render_assoc_items(w: &mut fmt::Formatter, ")?; for i in &derived { let did = i.trait_did().unwrap(); - let assoc_link = AssocItemLink::GotoSource(did, &i.impl_.provided_trait_methods); + let assoc_link = AssocItemLink::GotoSource(did, + &i.inner_impl().provided_trait_methods); render_impl(w, cx, i, assoc_link, true, containing_item.stable_since())?; } } @@ -2452,8 +2521,8 @@ fn render_assoc_items(w: &mut fmt::Formatter, fn render_deref_methods(w: &mut fmt::Formatter, cx: &Context, impl_: &Impl, container_item: &clean::Item) -> fmt::Result { - let deref_type = impl_.impl_.trait_.as_ref().unwrap(); - let target = impl_.impl_.items.iter().filter_map(|item| { + let deref_type = impl_.inner_impl().trait_.as_ref().unwrap(); + let target = impl_.inner_impl().items.iter().filter_map(|item| { match item.inner { clean::TypedefItem(ref t, true) => Some(&t.type_), _ => None, @@ -2479,24 +2548,35 @@ fn render_deref_methods(w: &mut fmt::Formatter, cx: &Context, impl_: &Impl, fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLink, render_header: bool, outer_version: Option<&str>) -> fmt::Result { if render_header { - write!(w, "

    {}", i.impl_)?; - let since = i.stability.as_ref().map(|s| &s.since[..]); - render_stability_since_raw(w, since, outer_version)?; - write!(w, "

    ")?; - if let Some(ref dox) = i.dox { + write!(w, "

    {}", i.inner_impl())?; + write!(w, "")?; + let since = i.impl_item.stability.as_ref().map(|s| &s.since[..]); + if let Some(l) = (Item { item: &i.impl_item, cx: cx }).href() { + write!(w, "
    ")?; + render_stability_since_raw(w, since, outer_version)?; + write!(w, "
    [src]", + i.impl_item.def_id.index.as_usize(), l, "goto source code")?; + } else { + render_stability_since_raw(w, since, outer_version)?; + } + write!(w, "")?; + write!(w, "

    \n")?; + if let Some(ref dox) = i.impl_item.attrs.value("doc") { write!(w, "
    {}
    ", Markdown(dox))?; } } fn doctraititem(w: &mut fmt::Formatter, cx: &Context, item: &clean::Item, link: AssocItemLink, render_static: bool, - outer_version: Option<&str>) -> fmt::Result { + is_default_item: bool, outer_version: Option<&str>, + trait_: Option<&clean::Trait>) -> fmt::Result { let shortty = shortty(item); let name = item.name.as_ref().unwrap(); let is_static = match item.inner { - clean::MethodItem(ref method) => method.self_ == SelfTy::SelfStatic, - clean::TyMethodItem(ref method) => method.self_ == SelfTy::SelfStatic, + clean::MethodItem(ref method) => !method.decl.has_self(), + clean::TyMethodItem(ref method) => !method.decl.has_self(), _ => false }; @@ -2506,51 +2586,70 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi if !is_static || render_static { let id = derive_id(format!("{}.{}", shortty, name)); write!(w, "

    ", id, shortty)?; - render_stability_since_raw(w, item.stable_since(), outer_version)?; write!(w, "")?; - render_assoc_item(w, item, link)?; - write!(w, "

    \n")?; + render_assoc_item(w, item, link.anchor(&id))?; + write!(w, "
    ")?; + render_stability_since_raw(w, item.stable_since(), outer_version)?; + write!(w, "\n")?; } } clean::TypedefItem(ref tydef, _) => { let id = derive_id(format!("{}.{}", ItemType::AssociatedType, name)); write!(w, "

    ", id, shortty)?; - assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link)?; + assoc_type(w, item, &Vec::new(), Some(&tydef.type_), link.anchor(&id))?; write!(w, "

    \n")?; } clean::AssociatedConstItem(ref ty, ref default) => { let id = derive_id(format!("{}.{}", shortty, name)); write!(w, "

    ", id, shortty)?; - assoc_const(w, item, ty, default.as_ref(), link)?; + assoc_const(w, item, ty, default.as_ref(), link.anchor(&id))?; write!(w, "

    \n")?; } clean::ConstantItem(ref c) => { let id = derive_id(format!("{}.{}", shortty, name)); write!(w, "

    ", id, shortty)?; - assoc_const(w, item, &c.type_, Some(&c.expr), link)?; + assoc_const(w, item, &c.type_, Some(&c.expr), link.anchor(&id))?; write!(w, "

    \n")?; } clean::AssociatedTypeItem(ref bounds, ref default) => { let id = derive_id(format!("{}.{}", shortty, name)); write!(w, "

    ", id, shortty)?; - assoc_type(w, item, bounds, default.as_ref(), link)?; + assoc_type(w, item, bounds, default.as_ref(), link.anchor(&id))?; write!(w, "

    \n")?; } clean::StrippedItem(..) => return Ok(()), _ => panic!("can't make docs for trait item with name {:?}", item.name) } - match link { - AssocItemLink::Anchor if !is_static || render_static => { - document(w, cx, item) - }, - _ => Ok(()), + if !is_static || render_static { + if !is_default_item { + + if item.doc_value().is_some() { + document(w, cx, item)?; + } else { + // In case the item isn't documented, + // provide short documentation from the trait + if let Some(t) = trait_ { + if let Some(it) = t.items.iter() + .find(|i| i.name == item.name) { + document_short(w, it, link)?; + } + } + } + } else { + document_short(w, item, link)?; + } } + Ok(()) } + let traits = &cache().traits; + let trait_ = i.trait_did().and_then(|did| traits.get(&did)); + write!(w, "
    ")?; - for trait_item in &i.impl_.items { - doctraititem(w, cx, trait_item, link, render_header, outer_version)?; + for trait_item in &i.inner_impl().items { + doctraititem(w, cx, trait_item, link, render_header, + false, outer_version, trait_)?; } fn render_default_items(w: &mut fmt::Formatter, @@ -2567,18 +2666,16 @@ fn render_impl(w: &mut fmt::Formatter, cx: &Context, i: &Impl, link: AssocItemLi let did = i.trait_.as_ref().unwrap().def_id().unwrap(); let assoc_link = AssocItemLink::GotoSource(did, &i.provided_trait_methods); - doctraititem(w, cx, trait_item, assoc_link, render_static, - outer_version)?; + doctraititem(w, cx, trait_item, assoc_link, render_static, true, + outer_version, None)?; } Ok(()) } // If we've implemented a trait, then also emit documentation for all // default items which weren't overridden in the implementation block. - if let Some(did) = i.trait_did() { - if let Some(t) = cache().traits.get(&did) { - render_default_items(w, cx, t, &i.impl_, render_header, outer_version)?; - } + if let Some(t) = trait_ { + render_default_items(w, cx, t, &i.inner_impl(), render_header, outer_version)?; } write!(w, "
    ")?; Ok(()) @@ -2685,27 +2782,15 @@ fn make_item_keywords(it: &clean::Item) -> String { format!("{}, {}", BASIC_KEYWORDS, it.name.as_ref().unwrap()) } -fn get_index_search_type(item: &clean::Item, - parent: Option) -> Option { - let (decl, selfty) = match item.inner { - clean::FunctionItem(ref f) => (&f.decl, None), - clean::MethodItem(ref m) => (&m.decl, Some(&m.self_)), - clean::TyMethodItem(ref m) => (&m.decl, Some(&m.self_)), +fn get_index_search_type(item: &clean::Item) -> Option { + let decl = match item.inner { + clean::FunctionItem(ref f) => &f.decl, + clean::MethodItem(ref m) => &m.decl, + clean::TyMethodItem(ref m) => &m.decl, _ => return None }; - let mut inputs = Vec::new(); - - // Consider `self` an argument as well. - match parent.and_then(|p| selfty.map(|s| (p, s)) ) { - Some((_, &clean::SelfStatic)) | None => (), - Some((name, _)) => inputs.push(Type { name: Some(name.to_ascii_lowercase()) }), - } - - inputs.extend(&mut decl.inputs.values.iter().map(|arg| { - get_index_type(&arg.type_) - })); - + let inputs = decl.inputs.values.iter().map(|arg| get_index_type(&arg.type_)).collect(); let output = match decl.output { clean::FunctionRetTy::Return(ref return_type) => Some(get_index_type(return_type)), _ => None diff --git a/src/librustdoc/html/static/main.js b/src/librustdoc/html/static/main.js index 1d1e78926f..0ec5cab78b 100644 --- a/src/librustdoc/html/static/main.js +++ b/src/librustdoc/html/static/main.js @@ -124,6 +124,10 @@ focusSearchBar(); break; + case "+": + toggleAllDocs(); + break; + case "?": if (ev.shiftKey && $("#help").hasClass("hidden")) { ev.preventDefault(); @@ -931,7 +935,7 @@ return "\u2212"; // "\u2212" is '−' minus sign } - $("#toggle-all-docs").on("click", function() { + function toggleAllDocs() { var toggle = $("#toggle-all-docs"); if (toggle.hasClass("will-expand")) { toggle.removeClass("will-expand"); @@ -950,7 +954,9 @@ $(".toggle-wrapper").addClass("collapsed"); $(".collapse-toggle").children(".inner").text(labelForToggleButton(true)); } - }); + } + + $("#toggle-all-docs").on("click", toggleAllDocs); $(document).on("click", ".collapse-toggle", function() { var toggle = $(this); @@ -981,7 +987,7 @@ $(".method").each(function() { if ($(this).next().is(".docblock") || ($(this).next().is(".stability") && $(this).next().next().is(".docblock"))) { - $(this).children().first().after(toggle.clone()); + $(this).children().last().after(toggle.clone()); } }); diff --git a/src/librustdoc/html/static/rustdoc.css b/src/librustdoc/html/static/rustdoc.css index c07871a402..8e4245d4eb 100644 --- a/src/librustdoc/html/static/rustdoc.css +++ b/src/librustdoc/html/static/rustdoc.css @@ -97,6 +97,7 @@ h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):no h1.fqn { border-bottom: 1px dashed; margin-top: 0; + position: relative; } h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) { border-bottom: 1px solid; @@ -105,6 +106,7 @@ h3.impl, h3.method, h4.method, h3.type, h4.type { font-weight: 600; margin-top: 10px; margin-bottom: 10px; + position: relative; } h3.impl, h3.method, h3.type { margin-top: 15px; @@ -265,20 +267,39 @@ nav.sub { .content .out-of-band { font-size: 23px; - width: 40%; margin: 0px; padding: 0px; text-align: right; display: inline-block; + font-weight: normal; + position: absolute; + right: 0; +} + +h3.impl > .out-of-band { + font-size: 21px; +} + +h4 > code, h3 > code { + position: inherit; +} + +.in-band, code { + z-index: 5; } .content .in-band { - width: 60%; margin: 0px; padding: 0px; display: inline-block; } +#main { position: relative; } +#main > .since { + top: inherit; + font-family: "Fira Sans", "Helvetica Neue", Helvetica, Arial, sans-serif; +} + .content table { border-spacing: 0 5px; border-collapse: separate; @@ -451,7 +472,7 @@ body.blur > :not(#help) { background: #e9e9e9; box-shadow: 0 0 6px rgba(0,0,0,.2); width: 550px; - height: 300px; + height: 330px; border: 1px solid #bfbfbf; } #help dt { @@ -498,11 +519,13 @@ em.stab p { opacity: 0.65; } -span.since { - float: right; +.since { font-weight: normal; font-size: initial; color: grey; + position: absolute; + right: 0; + top: 0; } .variants_table { @@ -597,7 +620,29 @@ a.test-arrow { color: #999; } +.ghost { + display: none; +} + +.ghost + .since { + position: initial; + display: table-cell; +} + +.since + .srclink { + display: table-cell; + padding-left: 10px; +} + +span.since { + position: initial; + font-size: 20px; + margin-right: 5px; +} +:target > code { + background: #FDFFD3; +} /* Media Queries */ diff --git a/src/librustdoc/html/static/styles/main.css b/src/librustdoc/html/static/styles/main.css index 5c073860f0..59b2ff7e3d 100644 --- a/src/librustdoc/html/static/styles/main.css +++ b/src/librustdoc/html/static/styles/main.css @@ -26,6 +26,9 @@ h1.fqn { h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) { border-bottom-color: #DDDDDD; } +.in-band, code { + background-color: white; +} .docblock code { background-color: #F5F5F5; diff --git a/src/librustdoc/lib.rs b/src/librustdoc/lib.rs index bc7c7c5e0c..86aad10e02 100644 --- a/src/librustdoc/lib.rs +++ b/src/librustdoc/lib.rs @@ -48,14 +48,11 @@ extern crate rustc_unicode; extern crate serialize as rustc_serialize; // used by deriving -use std::cell::RefCell; use std::collections::HashMap; use std::default::Default; use std::env; -use std::io::Read; use std::path::PathBuf; use std::process; -use std::rc::Rc; use std::sync::mpsc::channel; use externalfiles::ExternalHtml; @@ -83,6 +80,7 @@ pub mod markdown; pub mod passes; pub mod plugins; pub mod visit_ast; +pub mod visit_lib; pub mod test; mod flock; @@ -113,12 +111,9 @@ const DEFAULT_PASSES: &'static [&'static str] = &[ "unindent-comments", ]; -thread_local!(pub static ANALYSISKEY: Rc>> = { - Rc::new(RefCell::new(None)) -}); - struct Output { krate: clean::Crate, + renderinfo: html::render::RenderInfo, passes: Vec, } @@ -302,14 +297,15 @@ pub fn main_args(args: &[String]) -> isize { return 1; } }; - let Output { krate, passes, } = out; + let Output { krate, passes, renderinfo } = out; info!("going to format"); match matches.opt_str("w").as_ref().map(|s| &**s) { Some("html") | None => { html::render::run(krate, &external_html, output.unwrap_or(PathBuf::from("doc")), passes.into_iter().collect(), - css_file_extension) + css_file_extension, + renderinfo) .expect("failed to generate documentation") } Some(s) => { @@ -380,12 +376,8 @@ fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matche tx.send(core::run_core(paths, cfgs, externs, Input::File(cr), triple)).unwrap(); }); - let (mut krate, analysis) = rx.recv().unwrap(); + let (mut krate, renderinfo) = rx.recv().unwrap(); info!("finished with rustc"); - let mut analysis = Some(analysis); - ANALYSISKEY.with(|s| { - *s.borrow_mut() = analysis.take(); - }); if let Some(name) = matches.opt_str("crate-name") { krate.name = name @@ -443,5 +435,5 @@ fn rust_input(cratefile: &str, externs: core::Externs, matches: &getopts::Matche // Run everything! info!("Executing passes/plugins"); let krate = pm.run_plugins(krate); - Output { krate: krate, passes: passes } + Output { krate: krate, renderinfo: renderinfo, passes: passes } } diff --git a/src/librustdoc/passes.rs b/src/librustdoc/passes.rs index adc39b6998..1980d1f9cc 100644 --- a/src/librustdoc/passes.rs +++ b/src/librustdoc/passes.rs @@ -14,7 +14,6 @@ use rustc::util::nodemap::DefIdSet; use std::cmp; use std::string::String; use std::usize; -use rustc::hir; use clean::{self, Attributes, GetDefId}; use clean::Item; @@ -25,19 +24,17 @@ use fold::FoldItem::Strip; /// Strip items marked `#[doc(hidden)]` pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult { - let mut stripped = DefIdSet(); + let mut retained = DefIdSet(); // strip all #[doc(hidden)] items let krate = { struct Stripper<'a> { - stripped: &'a mut DefIdSet + retained: &'a mut DefIdSet } impl<'a> fold::DocFolder for Stripper<'a> { fn fold_item(&mut self, i: Item) -> Option { if i.attrs.list("doc").has_word("hidden") { debug!("found one in strip_hidden; removing"); - self.stripped.insert(i.def_id); - // use a dedicated hidden item for given item type if any match i.inner { clean::StructFieldItem(..) | clean::ModuleItem(..) => { @@ -45,42 +42,19 @@ pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult { } _ => return None, } + } else { + self.retained.insert(i.def_id); } self.fold_item_recur(i) } } - let mut stripper = Stripper{ stripped: &mut stripped }; + let mut stripper = Stripper{ retained: &mut retained }; stripper.fold_crate(krate) }; - // strip any traits implemented on stripped items - { - struct ImplStripper<'a> { - stripped: &'a mut DefIdSet - } - impl<'a> fold::DocFolder for ImplStripper<'a> { - fn fold_item(&mut self, i: Item) -> Option { - if let clean::ImplItem(clean::Impl{ - for_: clean::ResolvedPath{ did, .. }, - ref trait_, .. - }) = i.inner { - // Impls for stripped types don't need to exist - if self.stripped.contains(&did) { - return None; - } - // Impls of stripped traits also don't need to exist - if let Some(did) = trait_.def_id() { - if self.stripped.contains(&did) { - return None; - } - } - } - self.fold_item_recur(i) - } - } - let mut stripper = ImplStripper{ stripped: &mut stripped }; - stripper.fold_crate(krate) - } + // strip all impls referencing stripped items + let mut stripper = ImplStripper { retained: &retained }; + stripper.fold_crate(krate) } /// Strip private items from the point of view of a crate or externally from a @@ -88,10 +62,7 @@ pub fn strip_hidden(krate: clean::Crate) -> plugins::PluginResult { pub fn strip_private(mut krate: clean::Crate) -> plugins::PluginResult { // This stripper collects all *retained* nodes. let mut retained = DefIdSet(); - let analysis = super::ANALYSISKEY.with(|a| a.clone()); - let analysis = analysis.borrow(); - let analysis = analysis.as_ref().unwrap(); - let access_levels = analysis.access_levels.clone(); + let access_levels = krate.access_levels.clone(); // strip all private items { @@ -102,11 +73,9 @@ pub fn strip_private(mut krate: clean::Crate) -> plugins::PluginResult { krate = ImportStripper.fold_crate(stripper.fold_crate(krate)); } - // strip all private implementations of traits - { - let mut stripper = ImplStripper(&retained); - stripper.fold_crate(krate) - } + // strip all impls referencing private items + let mut stripper = ImplStripper { retained: &retained }; + stripper.fold_crate(krate) } struct Stripper<'a> { @@ -133,22 +102,22 @@ impl<'a> fold::DocFolder for Stripper<'a> { } clean::StructFieldItem(..) => { - if i.visibility != Some(hir::Public) { + if i.visibility != Some(clean::Public) { return Strip(i).fold(); } } clean::ModuleItem(..) => { - if i.def_id.is_local() && i.visibility != Some(hir::Public) { + if i.def_id.is_local() && i.visibility != Some(clean::Public) { return Strip(self.fold_item_recur(i).unwrap()).fold() } } // trait impls for private items should be stripped clean::ImplItem(clean::Impl{ - for_: clean::ResolvedPath{ did, .. }, .. + for_: clean::ResolvedPath{ did, is_generic, .. }, .. }) => { - if did.is_local() && !self.access_levels.is_exported(did) { + if did.is_local() && !is_generic && !self.access_levels.is_exported(did) { return None; } } @@ -205,13 +174,23 @@ impl<'a> fold::DocFolder for Stripper<'a> { } } -// This stripper discards all private impls of traits -struct ImplStripper<'a>(&'a DefIdSet); +// This stripper discards all impls which reference stripped items +struct ImplStripper<'a> { + retained: &'a DefIdSet +} + impl<'a> fold::DocFolder for ImplStripper<'a> { fn fold_item(&mut self, i: Item) -> Option { if let clean::ImplItem(ref imp) = i.inner { + if let Some(did) = imp.for_.def_id() { + if did.is_local() && !imp.for_.is_generic() && + !self.retained.contains(&did) + { + return None; + } + } if let Some(did) = imp.trait_.def_id() { - if did.is_local() && !self.0.contains(&did) { + if did.is_local() && !self.retained.contains(&did) { return None; } } @@ -226,7 +205,7 @@ impl fold::DocFolder for ImportStripper { fn fold_item(&mut self, i: Item) -> Option { match i.inner { clean::ExternCrateItem(..) | - clean::ImportItem(..) if i.visibility != Some(hir::Public) => None, + clean::ImportItem(..) if i.visibility != Some(clean::Public) => None, _ => self.fold_item_recur(i) } } diff --git a/src/librustdoc/test.rs b/src/librustdoc/test.rs index 982f477fc4..2754f77444 100644 --- a/src/librustdoc/test.rs +++ b/src/librustdoc/test.rs @@ -9,7 +9,7 @@ // except according to those terms. use std::cell::{RefCell, Cell}; -use std::collections::HashMap; +use std::collections::{HashMap, HashSet}; use std::env; use std::ffi::OsString; use std::io::prelude::*; @@ -28,7 +28,7 @@ use rustc::hir::map as hir_map; use rustc::session::{self, config}; use rustc::session::config::{get_unstable_features_setting, OutputType}; use rustc::session::search_paths::{SearchPaths, PathKind}; -use rustc::hir::lowering::{lower_crate, LoweringContext}; +use rustc::hir::lowering::{lower_crate, DummyResolver}; use rustc_back::dynamic_lib::DynamicLibrary; use rustc_back::tempdir::TempDir; use rustc_driver::{driver, Compilation}; @@ -79,8 +79,11 @@ pub fn run(input: &str, false, codemap.clone()); - let cstore = Rc::new(CStore::new(token::get_ident_interner())); + let dep_graph = DepGraph::new(false); + let _ignore = dep_graph.in_ignore(); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); let sess = session::build_session_(sessopts, + &dep_graph, Some(input_path.clone()), diagnostic_handler, codemap, @@ -94,29 +97,30 @@ pub fn run(input: &str, "rustdoc-test", None) .expect("phase_2_configure_and_expand aborted in rustdoc!"); let krate = driver::assign_node_ids(&sess, krate); - let lcx = LoweringContext::new(&sess, Some(&krate)); - let krate = lower_crate(&lcx, &krate); + let dep_graph = DepGraph::new(false); + let defs = hir_map::collect_definitions(&krate); + + let mut dummy_resolver = DummyResolver; + let krate = lower_crate(&sess, &krate, &sess, &mut dummy_resolver); let opts = scrape_test_config(&krate); - let dep_graph = DepGraph::new(false); let _ignore = dep_graph.in_ignore(); - let mut forest = hir_map::Forest::new(krate, dep_graph.clone()); - let map = hir_map::map_crate(&mut forest); + let mut forest = hir_map::Forest::new(krate, &dep_graph); + let map = hir_map::map_crate(&mut forest, defs); let ctx = core::DocContext { map: &map, maybe_typed: core::NotTyped(&sess), input: input, - external_paths: RefCell::new(Some(HashMap::new())), - external_traits: RefCell::new(None), - external_typarams: RefCell::new(None), - inlined: RefCell::new(None), - all_crate_impls: RefCell::new(HashMap::new()), + external_traits: RefCell::new(HashMap::new()), + populated_crate_impls: RefCell::new(HashSet::new()), deref_trait_did: Cell::new(None), + access_levels: Default::default(), + renderinfo: Default::default(), }; - let mut v = RustdocVisitor::new(&ctx, None); + let mut v = RustdocVisitor::new(&ctx); v.visit(ctx.map.krate()); let mut krate = v.clean(&ctx); if let Some(name) = crate_name { @@ -237,8 +241,10 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec, libs: SearchPaths, // Compile the code let diagnostic_handler = errors::Handler::with_emitter(true, false, box emitter); - let cstore = Rc::new(CStore::new(token::get_ident_interner())); + let dep_graph = DepGraph::new(false); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); let sess = session::build_session_(sessopts, + &dep_graph, None, diagnostic_handler, codemap, @@ -339,7 +345,7 @@ pub fn maketest(s: &str, cratename: Option<&str>, dont_insert_main: bool, prog.push_str(&everything_else); } else { prog.push_str("fn main() {\n "); - prog.push_str(&everything_else.replace("\n", "\n ")); + prog.push_str(&everything_else); prog = prog.trim().into(); prog.push_str("\n}"); } diff --git a/src/librustdoc/visit_ast.rs b/src/librustdoc/visit_ast.rs index 5c36c38abc..d5309d7433 100644 --- a/src/librustdoc/visit_ast.rs +++ b/src/librustdoc/visit_ast.rs @@ -21,12 +21,13 @@ use syntax::attr::AttrMetaMethods; use syntax::codemap::Span; use rustc::hir::map as hir_map; -use rustc::middle::stability; +use rustc::hir::def::Def; +use rustc::middle::privacy::AccessLevel; use rustc::hir; use core; -use clean::{Clean, Attributes}; +use clean::{self, Clean, Attributes}; use doctree::*; // looks to me like the first two of these are actually @@ -41,14 +42,12 @@ pub struct RustdocVisitor<'a, 'tcx: 'a> { pub module: Module, pub attrs: hir::HirVec, pub cx: &'a core::DocContext<'a, 'tcx>, - pub analysis: Option<&'a core::CrateAnalysis>, view_item_stack: HashSet, inlining_from_glob: bool, } impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { - pub fn new(cx: &'a core::DocContext<'a, 'tcx>, - analysis: Option<&'a core::CrateAnalysis>) -> RustdocVisitor<'a, 'tcx> { + pub fn new(cx: &'a core::DocContext<'a, 'tcx>) -> RustdocVisitor<'a, 'tcx> { // If the root is reexported, terminate all recursion. let mut stack = HashSet::new(); stack.insert(ast::CRATE_NODE_ID); @@ -56,7 +55,6 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { module: Module::new(None), attrs: hir::HirVec::new(), cx: cx, - analysis: analysis, view_item_stack: stack, inlining_from_glob: false, } @@ -65,7 +63,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { fn stability(&self, id: ast::NodeId) -> Option { self.cx.tcx_opt().and_then(|tcx| { self.cx.map.opt_local_def_id(id) - .and_then(|def_id| stability::lookup_stability(tcx, def_id)) + .and_then(|def_id| tcx.lookup_stability(def_id)) .cloned() }) } @@ -73,7 +71,7 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { fn deprecation(&self, id: ast::NodeId) -> Option { self.cx.tcx_opt().and_then(|tcx| { self.cx.map.opt_local_def_id(id) - .and_then(|def_id| stability::lookup_deprecation(tcx, def_id)) + .and_then(|def_id| tcx.lookup_deprecation(def_id)) }) } @@ -243,19 +241,40 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { Some(tcx) => tcx, None => return false }; - let def = tcx.def_map.borrow()[&id].def_id(); - let def_node_id = match tcx.map.as_local_node_id(def) { - Some(n) => n, None => return false - }; - let analysis = match self.analysis { - Some(analysis) => analysis, None => return false - }; + let def = tcx.def_map.borrow()[&id]; + let def_did = def.def_id(); let use_attrs = tcx.map.attrs(id).clean(self.cx); + let is_no_inline = use_attrs.list("doc").has_word("no_inline"); + + // For cross-crate impl inlining we need to know whether items are + // reachable in documentation - a previously nonreachable item can be + // made reachable by cross-crate inlining which we're checking here. + // (this is done here because we need to know this upfront) + if !def.def_id().is_local() && !is_no_inline { + let attrs = clean::inline::load_attrs(self.cx, tcx, def_did); + let self_is_hidden = attrs.list("doc").has_word("hidden"); + match def.base_def { + Def::Trait(did) | + Def::Struct(did) | + Def::Enum(did) | + Def::TyAlias(did) if !self_is_hidden => { + self.cx.access_levels.borrow_mut().map.insert(did, AccessLevel::Public); + }, + Def::Mod(did) => if !self_is_hidden { + ::visit_lib::LibEmbargoVisitor::new(self.cx).visit_mod(did); + }, + _ => {}, + } + return false + } + + let def_node_id = match tcx.map.as_local_node_id(def_did) { + Some(n) => n, None => return false + }; - let is_private = !analysis.access_levels.is_public(def); + let is_private = !self.cx.access_levels.borrow().is_public(def_did); let is_hidden = inherits_doc_hidden(self.cx, def_node_id); - let is_no_inline = use_attrs.list("doc").has_word("no_inline"); // Only inline if requested or if the item would otherwise be stripped if (!please_inline && !is_private && !is_hidden) || is_no_inline { @@ -296,7 +315,10 @@ impl<'a, 'tcx> RustdocVisitor<'a, 'tcx> { let name = renamed.unwrap_or(item.name); match item.node { hir::ItemExternCrate(ref p) => { + let cstore = &self.cx.sess().cstore; om.extern_crates.push(ExternCrate { + cnum: cstore.extern_mod_stmt_cnum(item.id) + .unwrap_or(ast::CrateNum::max_value()), name: name, path: p.map(|x|x.to_string()), vis: item.vis.clone(), diff --git a/src/librustdoc/visit_lib.rs b/src/librustdoc/visit_lib.rs new file mode 100644 index 0000000000..f6d89f7c1d --- /dev/null +++ b/src/librustdoc/visit_lib.rs @@ -0,0 +1,109 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use rustc::middle::cstore::{CrateStore, ChildItem, DefLike}; +use rustc::middle::privacy::{AccessLevels, AccessLevel}; +use rustc::hir::def::Def; +use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId}; +use rustc::ty::Visibility; +use syntax::ast; + +use std::cell::RefMut; + +use clean::{Attributes, Clean}; + +// FIXME: this may not be exhaustive, but is sufficient for rustdocs current uses + +/// Similar to `librustc_privacy::EmbargoVisitor`, but also takes +/// specific rustdoc annotations into account (i.e. `doc(hidden)`) +pub struct LibEmbargoVisitor<'a, 'b: 'a, 'tcx: 'b> { + cx: &'a ::core::DocContext<'b, 'tcx>, + cstore: &'a CrateStore<'tcx>, + // Accessibility levels for reachable nodes + access_levels: RefMut<'a, AccessLevels>, + // Previous accessibility level, None means unreachable + prev_level: Option, +} + +impl<'a, 'b, 'tcx> LibEmbargoVisitor<'a, 'b, 'tcx> { + pub fn new(cx: &'a ::core::DocContext<'b, 'tcx>) -> LibEmbargoVisitor<'a, 'b, 'tcx> { + LibEmbargoVisitor { + cx: cx, + cstore: &*cx.sess().cstore, + access_levels: cx.access_levels.borrow_mut(), + prev_level: Some(AccessLevel::Public), + } + } + + pub fn visit_lib(&mut self, cnum: ast::CrateNum) { + let did = DefId { krate: cnum, index: CRATE_DEF_INDEX }; + self.update(did, Some(AccessLevel::Public)); + self.visit_mod(did); + } + + // Updates node level and returns the updated level + fn update(&mut self, did: DefId, level: Option) -> Option { + let attrs: Vec<_> = self.cx.tcx().get_attrs(did).iter() + .map(|a| a.clean(self.cx)) + .collect(); + let is_hidden = attrs.list("doc").has_word("hidden"); + + let old_level = self.access_levels.map.get(&did).cloned(); + // Accessibility levels can only grow + if level > old_level && !is_hidden { + self.access_levels.map.insert(did, level.unwrap()); + level + } else { + old_level + } + } + + pub fn visit_mod(&mut self, did: DefId) { + for item in self.cstore.item_children(did) { + if let DefLike::DlDef(def) = item.def { + match def { + Def::Mod(did) | + Def::ForeignMod(did) | + Def::Trait(did) | + Def::Struct(did) | + Def::Enum(did) | + Def::TyAlias(did) | + Def::Fn(did) | + Def::Method(did) | + Def::Static(did, _) | + Def::Const(did) => self.visit_item(did, item), + _ => {} + } + } + } + } + + fn visit_item(&mut self, did: DefId, item: ChildItem) { + let inherited_item_level = match item.def { + DefLike::DlImpl(..) | DefLike::DlField => unreachable!(), + DefLike::DlDef(def) => { + match def { + Def::ForeignMod(..) => self.prev_level, + _ => if item.vis == Visibility::Public { self.prev_level } else { None } + } + } + }; + + let item_level = self.update(did, inherited_item_level); + + if let DefLike::DlDef(Def::Mod(did)) = item.def { + let orig_level = self.prev_level; + + self.prev_level = item_level; + self.visit_mod(did); + self.prev_level = orig_level; + } + } +} diff --git a/src/libserialize/json.rs b/src/libserialize/json.rs index a7d7235156..90b2c61160 100644 --- a/src/libserialize/json.rs +++ b/src/libserialize/json.rs @@ -3948,7 +3948,7 @@ mod tests { let mut mem_buf = string::String::new(); let mut encoder = Encoder::new(&mut mem_buf); let result = hm.encode(&mut encoder); - match result.err().unwrap() { + match result.unwrap_err() { EncoderError::BadHashmapKey => (), _ => panic!("expected bad hash map key") } diff --git a/src/libstd/Cargo.toml b/src/libstd/Cargo.toml index 29bd28b616..eded6e24f3 100644 --- a/src/libstd/Cargo.toml +++ b/src/libstd/Cargo.toml @@ -8,17 +8,19 @@ build = "build.rs" name = "std" path = "lib.rs" crate-type = ["dylib", "rlib"] -test = false [dependencies] alloc = { path = "../liballoc" } alloc_jemalloc = { path = "../liballoc_jemalloc", optional = true } alloc_system = { path = "../liballoc_system" } +panic_unwind = { path = "../libpanic_unwind" } +panic_abort = { path = "../libpanic_abort" } collections = { path = "../libcollections" } core = { path = "../libcore" } libc = { path = "../rustc/libc_shim" } rand = { path = "../librand" } rustc_unicode = { path = "../librustc_unicode" } +unwind = { path = "../libunwind" } [build-dependencies] build_helper = { path = "../build_helper" } diff --git a/src/libstd/build.rs b/src/libstd/build.rs index c32bca82bd..ff9dacbb67 100644 --- a/src/libstd/build.rs +++ b/src/libstd/build.rs @@ -8,11 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![deny(warnings)] + extern crate gcc; extern crate build_helper; use std::env; -use std::fs; use std::path::PathBuf; use std::process::Command; @@ -20,6 +21,7 @@ use build_helper::run; fn main() { println!("cargo:rustc-cfg=cargobuild"); + println!("cargo:rerun-if-changed=build.rs"); let target = env::var("TARGET").unwrap(); let host = env::var("HOST").unwrap(); @@ -28,9 +30,7 @@ fn main() { } if target.contains("linux") { - if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) { - println!("cargo:rustc-link-lib=static=unwind"); - } else if target.contains("android") { + if target.contains("android") { println!("cargo:rustc-link-lib=dl"); println!("cargo:rustc-link-lib=log"); println!("cargo:rustc-link-lib=gcc"); @@ -38,27 +38,13 @@ fn main() { println!("cargo:rustc-link-lib=dl"); println!("cargo:rustc-link-lib=rt"); println!("cargo:rustc-link-lib=pthread"); - println!("cargo:rustc-link-lib=gcc_s"); } } else if target.contains("freebsd") { println!("cargo:rustc-link-lib=execinfo"); println!("cargo:rustc-link-lib=pthread"); - println!("cargo:rustc-link-lib=gcc_s"); } else if target.contains("dragonfly") || target.contains("bitrig") || target.contains("netbsd") || target.contains("openbsd") { println!("cargo:rustc-link-lib=pthread"); - - if target.contains("rumprun") { - println!("cargo:rustc-link-lib=unwind"); - } else if target.contains("netbsd") { - println!("cargo:rustc-link-lib=gcc_s"); - } else if target.contains("openbsd") { - println!("cargo:rustc-link-lib=gcc"); - } else if target.contains("bitrig") { - println!("cargo:rustc-link-lib=c++abi"); - } else if target.contains("dragonfly") { - println!("cargo:rustc-link-lib=gcc_pic"); - } } else if target.contains("apple-darwin") { println!("cargo:rustc-link-lib=System"); } else if target.contains("apple-ios") { @@ -67,9 +53,6 @@ fn main() { println!("cargo:rustc-link-lib=framework=Security"); println!("cargo:rustc-link-lib=framework=Foundation"); } else if target.contains("windows") { - if target.contains("windows-gnu") { - println!("cargo:rustc-link-lib=gcc_eh"); - } println!("cargo:rustc-link-lib=advapi32"); println!("cargo:rustc-link-lib=ws2_32"); println!("cargo:rustc-link-lib=userenv"); @@ -84,8 +67,16 @@ fn build_libbacktrace(host: &str, target: &str) { println!("cargo:rustc-link-lib=static=backtrace"); println!("cargo:rustc-link-search=native={}/.libs", build_dir.display()); - if fs::metadata(&build_dir.join(".libs/libbacktrace.a")).is_ok() { - return + let mut stack = src_dir.read_dir().unwrap() + .map(|e| e.unwrap()) + .collect::>(); + while let Some(entry) = stack.pop() { + let path = entry.path(); + if entry.file_type().unwrap().is_dir() { + stack.extend(path.read_dir().unwrap().map(|e| e.unwrap())); + } else { + println!("cargo:rerun-if-changed={}", path.display()); + } } let compiler = gcc::Config::new().get_compiler(); diff --git a/src/libstd/collections/hash/bench.rs b/src/libstd/collections/hash/bench.rs index 9fae9af2d5..a1275d23d5 100644 --- a/src/libstd/collections/hash/bench.rs +++ b/src/libstd/collections/hash/bench.rs @@ -11,7 +11,6 @@ #![cfg(test)] extern crate test; -use prelude::v1::*; use self::test::Bencher; diff --git a/src/libstd/collections/hash/map.rs b/src/libstd/collections/hash/map.rs index c20270e830..37045822d4 100644 --- a/src/libstd/collections/hash/map.rs +++ b/src/libstd/collections/hash/map.rs @@ -202,8 +202,10 @@ fn test_resize_policy() { /// The hashes are all keyed by the thread-local random number generator /// on creation by default. This means that the ordering of the keys is /// randomized, but makes the tables more resistant to -/// denial-of-service attacks (Hash DoS). This behavior can be -/// overridden with one of the constructors. +/// denial-of-service attacks (Hash DoS). No guarantees are made to the +/// quality of the random data. The implementation uses the best available +/// random data from your platform at the time of creation. This behavior +/// can be overridden with one of the constructors. /// /// It is required that the keys implement the `Eq` and `Hash` traits, although /// this can frequently be achieved by using `#[derive(PartialEq, Eq, Hash)]`. @@ -830,7 +832,7 @@ impl HashMap /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn keys<'a>(&'a self) -> Keys<'a, K, V> { + pub fn keys(&self) -> Keys { Keys { inner: self.iter() } } @@ -852,7 +854,7 @@ impl HashMap /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] - pub fn values<'a>(&'a self) -> Values<'a, K, V> { + pub fn values(&self) -> Values { Values { inner: self.iter() } } @@ -862,7 +864,6 @@ impl HashMap /// # Examples /// /// ``` - /// # #![feature(map_values_mut)] /// use std::collections::HashMap; /// /// let mut map = HashMap::new(); @@ -879,8 +880,8 @@ impl HashMap /// print!("{}", val); /// } /// ``` - #[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] - pub fn values_mut<'a>(&'a mut self) -> ValuesMut<'a, K, V> { + #[stable(feature = "map_values_mut", since = "1.10.0")] + pub fn values_mut(&mut self) -> ValuesMut { ValuesMut { inner: self.iter_mut() } } @@ -1286,7 +1287,7 @@ pub struct Drain<'a, K: 'a, V: 'a> { } /// Mutable HashMap values iterator. -#[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] +#[stable(feature = "map_values_mut", since = "1.10.0")] pub struct ValuesMut<'a, K: 'a, V: 'a> { inner: IterMut<'a, K, V> } @@ -1489,14 +1490,14 @@ impl<'a, K, V> ExactSizeIterator for Values<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } -#[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] +#[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> Iterator for ValuesMut<'a, K, V> { type Item = &'a mut V; #[inline] fn next(&mut self) -> Option<(&'a mut V)> { self.inner.next().map(|(_, v)| v) } #[inline] fn size_hint(&self) -> (usize, Option) { self.inner.size_hint() } } -#[unstable(feature = "map_values_mut", reason = "recently added", issue = "32551")] +#[stable(feature = "map_values_mut", since = "1.10.0")] impl<'a, K, V> ExactSizeIterator for ValuesMut<'a, K, V> { #[inline] fn len(&self) -> usize { self.inner.len() } } @@ -1533,11 +1534,20 @@ impl<'a, K, V> Entry<'a, K, V> { Vacant(entry) => entry.insert(default()), } } + + /// Returns a reference to this entry's key. + #[stable(feature = "map_entry_keys", since = "1.10.0")] + pub fn key(&self) -> &K { + match *self { + Occupied(ref entry) => entry.key(), + Vacant(ref entry) => entry.key(), + } + } } impl<'a, K, V> OccupiedEntry<'a, K, V> { /// Gets a reference to the key in the entry. - #[unstable(feature = "map_entry_keys", issue = "32281")] + #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { self.elem.read().0 } @@ -1585,7 +1595,7 @@ impl<'a, K, V> OccupiedEntry<'a, K, V> { impl<'a, K: 'a, V: 'a> VacantEntry<'a, K, V> { /// Gets a reference to the key that would be used when inserting a value /// through the VacantEntry. - #[unstable(feature = "map_entry_keys", issue = "32281")] + #[stable(feature = "map_entry_keys", since = "1.10.0")] pub fn key(&self) -> &K { &self.key } @@ -1656,8 +1666,33 @@ impl RandomState { #[allow(deprecated)] // rand #[stable(feature = "hashmap_build_hasher", since = "1.7.0")] pub fn new() -> RandomState { - let mut r = rand::thread_rng(); - RandomState { k0: r.gen(), k1: r.gen() } + // Historically this function did not cache keys from the OS and instead + // simply always called `rand::thread_rng().gen()` twice. In #31356 it + // was discovered, however, that because we re-seed the thread-local RNG + // from the OS periodically that this can cause excessive slowdown when + // many hash maps are created on a thread. To solve this performance + // trap we cache the first set of randomly generated keys per-thread. + // + // In doing this, however, we lose the property that all hash maps have + // nondeterministic iteration order as all of those created on the same + // thread would have the same hash keys. This property has been nice in + // the past as it allows for maximal flexibility in the implementation + // of `HashMap` itself. + // + // The constraint here (if there even is one) is just that maps created + // on the same thread have the same iteration order, and that *may* be + // relied upon even though it is not a documented guarantee at all of + // the `HashMap` type. In any case we've decided that this is reasonable + // for now, so caching keys thread-locally seems fine. + thread_local!(static KEYS: (u64, u64) = { + let r = rand::OsRng::new(); + let mut r = r.expect("failed to create an OS RNG"); + (r.gen(), r.gen()) + }); + + KEYS.with(|&(k0, k1)| { + RandomState { k0: k0, k1: k1 } + }) } } diff --git a/src/libstd/collections/hash/set.rs b/src/libstd/collections/hash/set.rs index b353a4c1ba..e4ef3fca55 100644 --- a/src/libstd/collections/hash/set.rs +++ b/src/libstd/collections/hash/set.rs @@ -535,9 +535,9 @@ impl HashSet /// Adds a value to the set. /// - /// If the set did not have a value present, `true` is returned. + /// If the set did not have this value present, `true` is returned. /// - /// If the set did have this key present, `false` is returned. + /// If the set did have this value present, `false` is returned. /// /// # Examples /// diff --git a/src/libstd/collections/mod.rs b/src/libstd/collections/mod.rs index 4de442fd3a..44613d7767 100644 --- a/src/libstd/collections/mod.rs +++ b/src/libstd/collections/mod.rs @@ -120,12 +120,10 @@ //! //! For Sets, all operations have the cost of the equivalent Map operation. //! -//! | | get | insert | remove | predecessor | -//! |----------|-----------|----------|----------|-------------| -//! | HashMap | O(1)~ | O(1)~* | O(1)~ | N/A | -//! | BTreeMap | O(log n) | O(log n) | O(log n) | O(log n) | -//! -//! Note that BTreeMap's precise performance depends on the value of B. +//! | | get | insert | remove | predecessor | append | +//! |----------|-----------|----------|----------|-------------|--------| +//! | HashMap | O(1)~ | O(1)~* | O(1)~ | N/A | N/A | +//! | BTreeMap | O(log n) | O(log n) | O(log n) | O(log n) | O(n+m) | //! //! # Correct and Efficient Usage of Collections //! diff --git a/src/libstd/env.rs b/src/libstd/env.rs index 9dc6a26cde..6956dc0d90 100644 --- a/src/libstd/env.rs +++ b/src/libstd/env.rs @@ -452,16 +452,16 @@ pub fn home_dir() -> Option { /// Returns the path of a temporary directory. /// -/// On Unix, returns the value of the 'TMPDIR' environment variable if it is -/// set, otherwise for non-Android it returns '/tmp'. If Android, since there -/// is no global temporary folder (it is usually allocated per-app), we return -/// '/data/local/tmp'. -/// -/// On Windows, returns the value of, in order, the 'TMP', 'TEMP', -/// 'USERPROFILE' environment variable if any are set and not the empty -/// string. Otherwise, tmpdir returns the path of the Windows directory. This -/// behavior is identical to that of [GetTempPath][msdn], which this function -/// uses internally. +/// On Unix, returns the value of the `TMPDIR` environment variable if it is +/// set, otherwise for non-Android it returns `/tmp`. If Android, since there +/// is no global temporary folder (it is usually allocated per-app), it returns +/// `/data/local/tmp`. +/// +/// On Windows, returns the value of, in order, the `TMP`, `TEMP`, +/// `USERPROFILE` environment variable if any are set and not the empty +/// string. Otherwise, `temp_dir` returns the path of the Windows directory. +/// This behavior is identical to that of [`GetTempPath`][msdn], which this +/// function uses internally. /// /// [msdn]: https://msdn.microsoft.com/en-us/library/windows/desktop/aa364992(v=vs.85).aspx /// diff --git a/src/libstd/error.rs b/src/libstd/error.rs index 35cd4a5ec5..d49d976494 100644 --- a/src/libstd/error.rs +++ b/src/libstd/error.rs @@ -159,6 +159,13 @@ impl Error for num::ParseIntError { } } +#[unstable(feature = "try_from", issue = "33417")] +impl Error for num::TryFromIntError { + fn description(&self) -> &str { + self.__description() + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl Error for num::ParseFloatError { fn description(&self) -> &str { diff --git a/src/libstd/ffi/c_str.rs b/src/libstd/ffi/c_str.rs index 52d7bb128d..2bc7585f5f 100644 --- a/src/libstd/ffi/c_str.rs +++ b/src/libstd/ffi/c_str.rs @@ -159,6 +159,12 @@ pub struct CStr { #[stable(feature = "rust1", since = "1.0.0")] pub struct NulError(usize, Vec); +/// An error returned from `CStr::from_bytes_with_nul` to indicate that a nul +/// byte was found too early in the slice provided or one wasn't found at all. +#[derive(Clone, PartialEq, Debug)] +#[stable(feature = "cstr_from_bytes", since = "1.10.0")] +pub struct FromBytesWithNulError { _a: () } + /// An error returned from `CString::into_string` to indicate that a UTF-8 error /// was encountered during the conversion. #[derive(Clone, PartialEq, Debug)] @@ -326,6 +332,22 @@ impl fmt::Debug for CStr { } } +#[stable(feature = "cstr_default", since = "1.10.0")] +impl<'a> Default for &'a CStr { + fn default() -> &'a CStr { + static SLICE: &'static [c_char] = &[0]; + unsafe { CStr::from_ptr(SLICE.as_ptr()) } + } +} + +#[stable(feature = "cstr_default", since = "1.10.0")] +impl Default for CString { + fn default() -> CString { + let a: &CStr = Default::default(); + a.to_owned() + } +} + #[stable(feature = "cstr_borrow", since = "1.3.0")] impl Borrow for CString { fn borrow(&self) -> &CStr { self } @@ -445,20 +467,18 @@ impl CStr { /// # Examples /// /// ``` - /// # #![feature(cstr_from_bytes)] /// use std::ffi::CStr; /// - /// # fn main() { /// let cstr = CStr::from_bytes_with_nul(b"hello\0"); - /// assert!(cstr.is_some()); - /// # } + /// assert!(cstr.is_ok()); /// ``` - #[unstable(feature = "cstr_from_bytes", reason = "recently added", issue = "31190")] - pub fn from_bytes_with_nul(bytes: &[u8]) -> Option<&CStr> { + #[stable(feature = "cstr_from_bytes", since = "1.10.0")] + pub fn from_bytes_with_nul(bytes: &[u8]) + -> Result<&CStr, FromBytesWithNulError> { if bytes.is_empty() || memchr::memchr(0, &bytes) != Some(bytes.len() - 1) { - None + Err(FromBytesWithNulError { _a: () }) } else { - Some(unsafe { Self::from_bytes_with_nul_unchecked(bytes) }) + Ok(unsafe { Self::from_bytes_with_nul_unchecked(bytes) }) } } @@ -471,18 +491,15 @@ impl CStr { /// # Examples /// /// ``` - /// # #![feature(cstr_from_bytes)] /// use std::ffi::{CStr, CString}; /// - /// # fn main() { /// unsafe { /// let cstring = CString::new("hello").unwrap(); /// let cstr = CStr::from_bytes_with_nul_unchecked(cstring.to_bytes_with_nul()); /// assert_eq!(cstr, &*cstring); /// } - /// # } /// ``` - #[unstable(feature = "cstr_from_bytes", reason = "recently added", issue = "31190")] + #[stable(feature = "cstr_from_bytes", since = "1.10.0")] pub unsafe fn from_bytes_with_nul_unchecked(bytes: &[u8]) -> &CStr { mem::transmute(bytes) } @@ -726,12 +743,14 @@ mod tests { fn from_bytes_with_nul() { let data = b"123\0"; let cstr = CStr::from_bytes_with_nul(data); - assert_eq!(cstr.map(CStr::to_bytes), Some(&b"123"[..])); - assert_eq!(cstr.map(CStr::to_bytes_with_nul), Some(&b"123\0"[..])); + assert_eq!(cstr.map(CStr::to_bytes), Ok(&b"123"[..])); + let cstr = CStr::from_bytes_with_nul(data); + assert_eq!(cstr.map(CStr::to_bytes_with_nul), Ok(&b"123\0"[..])); unsafe { + let cstr = CStr::from_bytes_with_nul(data); let cstr_unchecked = CStr::from_bytes_with_nul_unchecked(data); - assert_eq!(cstr, Some(cstr_unchecked)); + assert_eq!(cstr, Ok(cstr_unchecked)); } } @@ -739,13 +758,13 @@ mod tests { fn from_bytes_with_nul_unterminated() { let data = b"123"; let cstr = CStr::from_bytes_with_nul(data); - assert!(cstr.is_none()); + assert!(cstr.is_err()); } #[test] fn from_bytes_with_nul_interior() { let data = b"1\023\0"; let cstr = CStr::from_bytes_with_nul(data); - assert!(cstr.is_none()); + assert!(cstr.is_err()); } } diff --git a/src/libstd/ffi/mod.rs b/src/libstd/ffi/mod.rs index bfd6ab5228..ca1ff18f1c 100644 --- a/src/libstd/ffi/mod.rs +++ b/src/libstd/ffi/mod.rs @@ -14,6 +14,8 @@ #[stable(feature = "rust1", since = "1.0.0")] pub use self::c_str::{CString, CStr, NulError, IntoStringError}; +#[stable(feature = "cstr_from_bytes", since = "1.10.0")] +pub use self::c_str::{FromBytesWithNulError}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::os_str::{OsString, OsStr}; diff --git a/src/libstd/fs.rs b/src/libstd/fs.rs index f5a51e9742..125170bd47 100644 --- a/src/libstd/fs.rs +++ b/src/libstd/fs.rs @@ -657,7 +657,7 @@ impl Metadata { /// /// This field may not be available on all platforms, and will return an /// `Err` on platforms where it is not available. - #[unstable(feature = "fs_time", issue = "31399")] + #[stable(feature = "fs_time", since = "1.10.0")] pub fn modified(&self) -> io::Result { self.0.modified().map(FromInner::from_inner) } @@ -675,7 +675,7 @@ impl Metadata { /// /// This field may not be available on all platforms, and will return an /// `Err` on platforms where it is not available. - #[unstable(feature = "fs_time", issue = "31399")] + #[stable(feature = "fs_time", since = "1.10.0")] pub fn accessed(&self) -> io::Result { self.0.accessed().map(FromInner::from_inner) } @@ -689,7 +689,7 @@ impl Metadata { /// /// This field may not be available on all platforms, and will return an /// `Err` on platforms where it is not available. - #[unstable(feature = "fs_time", issue = "31399")] + #[stable(feature = "fs_time", since = "1.10.0")] pub fn created(&self) -> io::Result { self.0.created().map(FromInner::from_inner) } @@ -965,7 +965,8 @@ pub fn symlink_metadata>(path: P) -> io::Result { fs_imp::lstat(path.as_ref()).map(Metadata) } -/// Rename a file or directory to a new name. +/// Rename a file or directory to a new name, replacing the original file if +/// `to` already exists. /// /// This will not work if the new name is on a different mount point. /// @@ -973,6 +974,12 @@ pub fn symlink_metadata>(path: P) -> io::Result { /// /// This function currently corresponds to the `rename` function on Unix /// and the `MoveFileEx` function with the `MOVEFILE_REPLACE_EXISTING` flag on Windows. +/// +/// Because of this, the behavior when both `from` and `to` exist differs. On +/// Unix, if `from` is a directory, `to` must also be an (empty) directory. If +/// `from` is not a directory, `to` must also be not a directory. In contrast, +/// on Windows, `from` can be anything, but `to` must *not* be a directory. +/// /// Note that, this [may change in the future][changes]. /// [changes]: ../io/index.html#platform-specific-behavior /// @@ -1536,7 +1543,7 @@ mod tests { let result = File::open(filename); if cfg!(unix) { - error!(result, "o such file or directory"); + error!(result, "No such file or directory"); } if cfg!(windows) { error!(result, "The system cannot find the file specified"); @@ -1551,7 +1558,7 @@ mod tests { let result = fs::remove_file(filename); if cfg!(unix) { - error!(result, "o such file or directory"); + error!(result, "No such file or directory"); } if cfg!(windows) { error!(result, "The system cannot find the file specified"); @@ -1765,7 +1772,7 @@ mod tests { let tmpdir = tmpdir(); let dir = &tmpdir.join("mkdir_error_twice"); check!(fs::create_dir(dir)); - let e = fs::create_dir(dir).err().unwrap(); + let e = fs::create_dir(dir).unwrap_err(); assert_eq!(e.kind(), ErrorKind::AlreadyExists); } diff --git a/src/libstd/io/buffered.rs b/src/libstd/io/buffered.rs index 632ef3db80..a92ca95f4e 100644 --- a/src/libstd/io/buffered.rs +++ b/src/libstd/io/buffered.rs @@ -1127,7 +1127,7 @@ mod tests { let mut writer = BufWriter::new(PanicWriter); let _ = writer.write(b"hello world"); let _ = writer.flush(); - }).join().err().unwrap(); + }).join().unwrap_err(); assert_eq!(WRITES.load(Ordering::SeqCst), 1); } diff --git a/src/libstd/io/error.rs b/src/libstd/io/error.rs index 9a605fc7bb..e142c78569 100644 --- a/src/libstd/io/error.rs +++ b/src/libstd/io/error.rs @@ -350,7 +350,6 @@ mod test { use prelude::v1::*; use super::{Error, ErrorKind}; use error; - use error::Error as error_Error; use fmt; use sys::os::error_string; diff --git a/src/libstd/io/lazy.rs b/src/libstd/io/lazy.rs index 65667f24dd..1155160120 100644 --- a/src/libstd/io/lazy.rs +++ b/src/libstd/io/lazy.rs @@ -12,11 +12,12 @@ use prelude::v1::*; use cell::Cell; use ptr; -use sync::{StaticMutex, Arc}; +use sync::Arc; use sys_common; +use sys_common::mutex::Mutex; pub struct Lazy { - lock: StaticMutex, + lock: Mutex, ptr: Cell<*mut Arc>, init: fn() -> Arc, } @@ -26,23 +27,25 @@ unsafe impl Sync for Lazy {} impl Lazy { pub const fn new(init: fn() -> Arc) -> Lazy { Lazy { - lock: StaticMutex::new(), + lock: Mutex::new(), ptr: Cell::new(ptr::null_mut()), init: init } } pub fn get(&'static self) -> Option> { - let _g = self.lock.lock(); - let ptr = self.ptr.get(); unsafe { - if ptr.is_null() { + self.lock.lock(); + let ptr = self.ptr.get(); + let ret = if ptr.is_null() { Some(self.init()) } else if ptr as usize == 1 { None } else { Some((*ptr).clone()) - } + }; + self.lock.unlock(); + return ret } } @@ -52,10 +55,10 @@ impl Lazy { // the at exit handler). Otherwise we just return the freshly allocated // `Arc`. let registered = sys_common::at_exit(move || { - let g = self.lock.lock(); + self.lock.lock(); let ptr = self.ptr.get(); self.ptr.set(1 as *mut _); - drop(g); + self.lock.unlock(); drop(Box::from_raw(ptr)) }); let ret = (self.init)(); diff --git a/src/libstd/io/mod.rs b/src/libstd/io/mod.rs index 6dd7273c17..a058337a50 100644 --- a/src/libstd/io/mod.rs +++ b/src/libstd/io/mod.rs @@ -182,11 +182,10 @@ //! //! # fn foo() -> io::Result<()> { //! let f = try!(File::open("foo.txt")); -//! let mut reader = BufReader::new(f); +//! let reader = BufReader::new(f); //! //! for line in reader.lines() { -//! let line = try!(line); -//! println!("{}", line); +//! println!("{}", try!(line)); //! } //! //! # Ok(()) @@ -195,7 +194,7 @@ //! //! ## Functions //! -//! There are a number of [functions][functions] that offer access to various +//! There are a number of [functions][functions-list] that offer access to various //! features. For example, we can use three of these functions to copy everything //! from standard input to standard output: //! @@ -208,7 +207,7 @@ //! # } //! ``` //! -//! [functions]: #functions +//! [functions-list]: #functions-1 //! //! ## io::Result //! @@ -1505,6 +1504,11 @@ impl Read for Take { #[stable(feature = "rust1", since = "1.0.0")] impl BufRead for Take { fn fill_buf(&mut self) -> Result<&[u8]> { + // Don't call into inner reader at all at EOF because it may still block + if self.limit == 0 { + return Ok(&[]); + } + let buf = self.inner.fill_buf()?; let cap = cmp::min(buf.len() as u64, self.limit) as usize; Ok(&buf[..cap]) @@ -1860,9 +1864,16 @@ mod tests { Err(io::Error::new(io::ErrorKind::Other, "")) } } + impl BufRead for R { + fn fill_buf(&mut self) -> io::Result<&[u8]> { + Err(io::Error::new(io::ErrorKind::Other, "")) + } + fn consume(&mut self, _amt: usize) { } + } let mut buf = [0; 1]; assert_eq!(0, R.take(0).read(&mut buf).unwrap()); + assert_eq!(b"", R.take(0).fill_buf().unwrap()); } fn cmp_bufread(mut br1: Br1, mut br2: Br2, exp: &[u8]) { diff --git a/src/libstd/lib.rs b/src/libstd/lib.rs index e14a31453d..8f41bdf39e 100644 --- a/src/libstd/lib.rs +++ b/src/libstd/lib.rs @@ -245,6 +245,7 @@ #![feature(on_unimplemented)] #![feature(oom)] #![feature(optin_builtin_traits)] +#![feature(panic_unwind)] #![feature(placement_in_syntax)] #![feature(rand)] #![feature(raw)] @@ -270,6 +271,7 @@ #![feature(vec_push_all)] #![feature(zero_one)] #![feature(question_mark)] +#![feature(try_from)] // Issue# 30592: Systematically use alloc_system during stage0 since jemalloc // might be unavailable or disabled @@ -282,6 +284,13 @@ #![allow(unused_features)] // std may use features in a platform-specific way #![cfg_attr(not(stage0), deny(warnings))] +// FIXME(stage0): after a snapshot, move needs_panic_runtime up above and remove +// this `extern crate` declaration and feature(panic_unwind) +#![cfg_attr(not(stage0), needs_panic_runtime)] +#![cfg_attr(not(stage0), feature(needs_panic_runtime))] +#[cfg(stage0)] +extern crate panic_unwind as __please_just_link_me_dont_reference_me; + #[cfg(test)] extern crate test; // We want to reexport a few macros from core but libcore has already been @@ -300,6 +309,9 @@ extern crate alloc; extern crate rustc_unicode; extern crate libc; +// We always need an unwinder currently for backtraces +extern crate unwind; + #[cfg(stage0)] extern crate alloc_system; diff --git a/src/libstd/macros.rs b/src/libstd/macros.rs index 39adda1066..d69789ceda 100644 --- a/src/libstd/macros.rs +++ b/src/libstd/macros.rs @@ -17,9 +17,9 @@ /// The entry point for panic of Rust threads. /// /// This macro is used to inject panic into a Rust thread, causing the thread to -/// unwind and panic entirely. Each thread's panic can be reaped as the -/// `Box` type, and the single-argument form of the `panic!` macro will be -/// the value which is transmitted. +/// panic entirely. Each thread's panic can be reaped as the `Box` type, +/// and the single-argument form of the `panic!` macro will be the value which +/// is transmitted. /// /// The multi-argument form of this macro panics with a string and has the /// `format!` syntax for building a string. @@ -41,14 +41,14 @@ macro_rules! panic { panic!("explicit panic") }); ($msg:expr) => ({ - $crate::rt::begin_unwind($msg, { + $crate::rt::begin_panic($msg, { // static requires less code at runtime, more constant data static _FILE_LINE: (&'static str, u32) = (file!(), line!()); &_FILE_LINE }) }); ($fmt:expr, $($arg:tt)+) => ({ - $crate::rt::begin_unwind_fmt(format_args!($fmt, $($arg)+), { + $crate::rt::begin_panic_fmt(&format_args!($fmt, $($arg)+), { // The leading _'s are to avoid dead code warnings if this is // used inside a dead function. Just `#[allow(dead_code)]` is // insufficient, since the user may have diff --git a/src/libstd/net/ip.rs b/src/libstd/net/ip.rs index adceee6d73..45b85d600a 100644 --- a/src/libstd/net/ip.rs +++ b/src/libstd/net/ip.rs @@ -89,7 +89,7 @@ impl Ipv4Addr { /// Returns true if this is a loopback address (127.0.0.0/8). /// - /// This property is defined by RFC 6890 + /// This property is defined by RFC 6890. #[stable(since = "1.7.0", feature = "ip_17")] pub fn is_loopback(&self) -> bool { self.octets()[0] == 127 @@ -97,7 +97,7 @@ impl Ipv4Addr { /// Returns true if this is a private address. /// - /// The private address ranges are defined in RFC1918 and include: + /// The private address ranges are defined in RFC 1918 and include: /// /// - 10.0.0.0/8 /// - 172.16.0.0/12 @@ -114,7 +114,7 @@ impl Ipv4Addr { /// Returns true if the address is link-local (169.254.0.0/16). /// - /// This property is defined by RFC 6890 + /// This property is defined by RFC 6890. #[stable(since = "1.7.0", feature = "ip_17")] pub fn is_link_local(&self) -> bool { self.octets()[0] == 169 && self.octets()[1] == 254 @@ -140,7 +140,7 @@ impl Ipv4Addr { /// Returns true if this is a multicast address. /// /// Multicast addresses have a most significant octet between 224 and 239, - /// and is defined by RFC 5771 + /// and is defined by RFC 5771. #[stable(since = "1.7.0", feature = "ip_17")] pub fn is_multicast(&self) -> bool { self.octets()[0] >= 224 && self.octets()[0] <= 239 @@ -354,7 +354,7 @@ impl Ipv6Addr { /// Returns true if this is a unique local address (IPv6). /// - /// Unique local addresses are defined in RFC4193 and have the form fc00::/7. + /// Unique local addresses are defined in RFC 4193 and have the form fc00::/7. pub fn is_unique_local(&self) -> bool { (self.segments()[0] & 0xfe00) == 0xfc00 } @@ -371,7 +371,7 @@ impl Ipv6Addr { } /// Returns true if this is an address reserved for documentation - /// This is defined to be 2001:db8::/32 in RFC RFC 3849 + /// This is defined to be 2001:db8::/32 in RFC 3849. pub fn is_documentation(&self) -> bool { (self.segments()[0] == 0x2001) && (self.segments()[1] == 0xdb8) } diff --git a/src/libstd/net/tcp.rs b/src/libstd/net/tcp.rs index a7738e3170..5ab0d5a087 100644 --- a/src/libstd/net/tcp.rs +++ b/src/libstd/net/tcp.rs @@ -86,6 +86,8 @@ impl TcpStream { /// `addr` is an address of the remote host. Anything which implements /// `ToSocketAddrs` trait can be supplied for the address; see this trait /// documentation for concrete examples. + /// In case `ToSocketAddrs::to_socket_addrs()` returns more than one entry, + /// then the first valid and reachable address is used. #[stable(feature = "rust1", since = "1.0.0")] pub fn connect(addr: A) -> io::Result { super::each_addr(addr, net_imp::TcpStream::connect).map(TcpStream) diff --git a/src/libstd/net/udp.rs b/src/libstd/net/udp.rs index 4683c7061c..9d0279deb1 100644 --- a/src/libstd/net/udp.rs +++ b/src/libstd/net/udp.rs @@ -324,7 +324,7 @@ impl UdpSocket { self.0.recv(buf) } - /// Moves this TCP stream into or out of nonblocking mode. + /// Moves this UDP socket into or out of nonblocking mode. /// /// On Unix this corresponds to calling fcntl, and on Windows this /// corresponds to calling ioctlsocket. diff --git a/src/libstd/num/f32.rs b/src/libstd/num/f32.rs index 6fc26bb7ee..94aa3d6b51 100644 --- a/src/libstd/num/f32.rs +++ b/src/libstd/num/f32.rs @@ -646,7 +646,10 @@ impl f32 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn log2(self) -> f32 { - unsafe { intrinsics::log2f32(self) } + #[cfg(target_os = "android")] + return ::sys::android::log2f32(self); + #[cfg(not(target_os = "android"))] + return unsafe { intrinsics::log2f32(self) }; } /// Returns the base 10 logarithm of the number. @@ -1030,7 +1033,7 @@ impl f32 { /// let abs_difference_1 = (f.1 - x.cos()).abs(); /// /// assert!(abs_difference_0 <= f32::EPSILON); - /// assert!(abs_difference_0 <= f32::EPSILON); + /// assert!(abs_difference_1 <= f32::EPSILON); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] diff --git a/src/libstd/num/f64.rs b/src/libstd/num/f64.rs index 93e5969a27..2beffb64d3 100644 --- a/src/libstd/num/f64.rs +++ b/src/libstd/num/f64.rs @@ -546,7 +546,12 @@ impl f64 { #[stable(feature = "rust1", since = "1.0.0")] #[inline] pub fn log2(self) -> f64 { - self.log_wrapper(|n| { unsafe { intrinsics::log2f64(n) } }) + self.log_wrapper(|n| { + #[cfg(target_os = "android")] + return ::sys::android::log2f64(n); + #[cfg(not(target_os = "android"))] + return unsafe { intrinsics::log2f64(n) }; + }) } /// Returns the base 10 logarithm of the number. @@ -903,7 +908,7 @@ impl f64 { /// let abs_difference_1 = (f.1 - x.cos()).abs(); /// /// assert!(abs_difference_0 < 1e-10); - /// assert!(abs_difference_0 < 1e-10); + /// assert!(abs_difference_1 < 1e-10); /// ``` #[stable(feature = "rust1", since = "1.0.0")] #[inline] diff --git a/src/libstd/num/mod.rs b/src/libstd/num/mod.rs index 1886b4fdf5..d33df05acf 100644 --- a/src/libstd/num/mod.rs +++ b/src/libstd/num/mod.rs @@ -19,7 +19,7 @@ #[stable(feature = "rust1", since = "1.0.0")] pub use core::num::{Zero, One}; #[stable(feature = "rust1", since = "1.0.0")] -pub use core::num::{FpCategory, ParseIntError, ParseFloatError}; +pub use core::num::{FpCategory, ParseIntError, ParseFloatError, TryFromIntError}; #[stable(feature = "rust1", since = "1.0.0")] pub use core::num::Wrapping; diff --git a/src/libstd/panic.rs b/src/libstd/panic.rs index 16401c4527..70e6f90293 100644 --- a/src/libstd/panic.rs +++ b/src/libstd/panic.rs @@ -16,13 +16,13 @@ use any::Any; use boxed::Box; use cell::UnsafeCell; use ops::{Deref, DerefMut}; +use panicking; use ptr::{Unique, Shared}; use rc::Rc; use sync::{Arc, Mutex, RwLock}; -use sys_common::unwind; use thread::Result; -#[unstable(feature = "panic_handler", issue = "30449")] +#[stable(feature = "panic_hooks", since = "1.10.0")] pub use panicking::{take_hook, set_hook, PanicInfo, Location}; /// @@ -73,7 +73,7 @@ pub fn take_handler() -> Box { /// /// [rfc]: https://github.com/rust-lang/rfcs/blob/master/text/1236-stabilize-catch-panic.md /// -/// ## What is `RecoverSafe`? +/// ## What is `UnwindSafe`? /// /// Now that we've got an idea of what panic safety is in Rust, it's also /// important to understand what this trait represents. As mentioned above, one @@ -81,7 +81,7 @@ pub fn take_handler() -> Box { /// module as it allows catching a panic and then re-using the environment of /// the closure. /// -/// Simply put, a type `T` implements `RecoverSafe` if it cannot easily allow +/// Simply put, a type `T` implements `UnwindSafe` if it cannot easily allow /// witnessing a broken invariant through the use of `recover` (catching a /// panic). This trait is a marker trait, so it is automatically implemented for /// many types, and it is also structurally composed (e.g. a struct is recover @@ -108,7 +108,7 @@ pub fn take_handler() -> Box { /// /// Is not intended that most types or functions need to worry about this trait. /// It is only used as a bound on the `recover` function and as mentioned above, -/// the lack of `unsafe` means it is mostly an advisory. The `AssertRecoverSafe` +/// the lack of `unsafe` means it is mostly an advisory. The `AssertUnwindSafe` /// wrapper struct in this module can be used to force this trait to be /// implemented for any closed over variables passed to the `recover` function /// (more on this below). @@ -246,7 +246,7 @@ impl UnwindSafe for Rc {} #[stable(feature = "catch_unwind", since = "1.9.0")] impl UnwindSafe for Arc {} -// Pretty simple implementations for the `RefRecoverSafe` marker trait, +// Pretty simple implementations for the `RefUnwindSafe` marker trait, // basically just saying that this is a marker trait and `UnsafeCell` is the // only thing which doesn't implement it (which then transitively applies to // everything else). @@ -346,9 +346,9 @@ impl R> FnOnce<()> for AssertRecoverSafe { /// It is **not** recommended to use this function for a general try/catch /// mechanism. The `Result` type is more appropriate to use for functions that /// can fail on a regular basis. Additionally, this function is not guaranteed -/// to catch all panics, see the "Notes" sectino below. +/// to catch all panics, see the "Notes" section below. /// -/// The closure provided is required to adhere to the `UnwindSafe` to ensure +/// The closure provided is required to adhere to the `UnwindSafe` trait to ensure /// that all captured variables are safe to cross this boundary. The purpose of /// this bound is to encode the concept of [exception safety][rfc] in the type /// system. Most usage of this function should not need to worry about this @@ -383,12 +383,9 @@ impl R> FnOnce<()> for AssertRecoverSafe { /// ``` #[stable(feature = "catch_unwind", since = "1.9.0")] pub fn catch_unwind R + UnwindSafe, R>(f: F) -> Result { - let mut result = None; unsafe { - let result = &mut result; - unwind::try(move || *result = Some(f()))? + panicking::try(f) } - Ok(result.unwrap()) } /// Deprecated, renamed to `catch_unwind` @@ -398,7 +395,7 @@ pub fn recover R + UnwindSafe, R>(f: F) -> Result { catch_unwind(f) } -/// Triggers a panic without invoking the panic handler. +/// Triggers a panic without invoking the panic hook. /// /// This is designed to be used in conjunction with `catch_unwind` to, for /// example, carry a panic across a layer of C code. @@ -425,7 +422,7 @@ pub fn recover R + UnwindSafe, R>(f: F) -> Result { /// ``` #[stable(feature = "resume_unwind", since = "1.9.0")] pub fn resume_unwind(payload: Box) -> ! { - unwind::rust_panic(payload) + panicking::rust_panic(payload) } /// Deprecated, use resume_unwind instead diff --git a/src/libstd/panicking.rs b/src/libstd/panicking.rs index fd6a15b0f6..d73e9542d2 100644 --- a/src/libstd/panicking.rs +++ b/src/libstd/panicking.rs @@ -8,14 +8,26 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +//! Implementation of various bits and pieces of the `panic!` macro and +//! associated runtime pieces. +//! +//! Specifically, this module contains the implementation of: +//! +//! * Panic hooks +//! * Executing a panic up to doing the actual implementation +//! * Shims around "try" + use prelude::v1::*; use io::prelude::*; use any::Any; use cell::Cell; use cell::RefCell; +use fmt; use intrinsics; -use sync::StaticRwLock; +use mem; +use raw; +use sys_common::rwlock::RWLock; use sync::atomic::{AtomicBool, Ordering}; use sys::stdio::Stderr; use sys_common::backtrace; @@ -23,30 +35,51 @@ use sys_common::thread_info; use sys_common::util; use thread; -thread_local! { pub static PANIC_COUNT: Cell = Cell::new(0) } - thread_local! { pub static LOCAL_STDERR: RefCell>> = { RefCell::new(None) } } +thread_local! { pub static PANIC_COUNT: Cell = Cell::new(0) } + +// Binary interface to the panic runtime that the standard library depends on. +// +// The standard library is tagged with `#![needs_panic_runtime]` (introduced in +// RFC 1513) to indicate that it requires some other crate tagged with +// `#![panic_runtime]` to exist somewhere. Each panic runtime is intended to +// implement these symbols (with the same signatures) so we can get matched up +// to them. +// +// One day this may look a little less ad-hoc with the compiler helping out to +// hook up these functions, but it is not this day! +#[allow(improper_ctypes)] +extern { + fn __rust_maybe_catch_panic(f: fn(*mut u8), + data: *mut u8, + data_ptr: *mut usize, + vtable_ptr: *mut usize) -> u32; + #[unwind] + fn __rust_start_panic(data: usize, vtable: usize) -> u32; +} + #[derive(Copy, Clone)] enum Hook { Default, Custom(*mut (Fn(&PanicInfo) + 'static + Sync + Send)), } -static HOOK_LOCK: StaticRwLock = StaticRwLock::new(); +static HOOK_LOCK: RWLock = RWLock::new(); static mut HOOK: Hook = Hook::Default; static FIRST_PANIC: AtomicBool = AtomicBool::new(true); /// Registers a custom panic hook, replacing any that was previously registered. /// -/// The panic hook is invoked when a thread panics, but before it begins -/// unwinding the stack. The default hook prints a message to standard error -/// and generates a backtrace if requested, but this behavior can be customized -/// with the `set_hook` and `take_hook` functions. +/// The panic hook is invoked when a thread panics, but before the panic runtime +/// is invoked. As such, the hook will run with both the aborting and unwinding +/// runtimes. The default hook prints a message to standard error and generates +/// a backtrace if requested, but this behavior can be customized with the +/// `set_hook` and `take_hook` functions. /// /// The hook is provided with a `PanicInfo` struct which contains information /// about the origin of the panic, including the payload passed to `panic!` and @@ -57,17 +90,17 @@ static FIRST_PANIC: AtomicBool = AtomicBool::new(true); /// # Panics /// /// Panics if called from a panicking thread. -#[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] +#[stable(feature = "panic_hooks", since = "1.10.0")] pub fn set_hook(hook: Box) { if thread::panicking() { panic!("cannot modify the panic hook from a panicking thread"); } unsafe { - let lock = HOOK_LOCK.write(); + HOOK_LOCK.write(); let old_hook = HOOK; HOOK = Hook::Custom(Box::into_raw(hook)); - drop(lock); + HOOK_LOCK.write_unlock(); if let Hook::Custom(ptr) = old_hook { Box::from_raw(ptr); @@ -82,17 +115,17 @@ pub fn set_hook(hook: Box) { /// # Panics /// /// Panics if called from a panicking thread. -#[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] +#[stable(feature = "panic_hooks", since = "1.10.0")] pub fn take_hook() -> Box { if thread::panicking() { panic!("cannot modify the panic hook from a panicking thread"); } unsafe { - let lock = HOOK_LOCK.write(); + HOOK_LOCK.write(); let hook = HOOK; HOOK = Hook::Default; - drop(lock); + HOOK_LOCK.write_unlock(); match hook { Hook::Default => Box::new(default_hook), @@ -102,7 +135,7 @@ pub fn take_hook() -> Box { } /// A struct providing information about a panic. -#[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] +#[stable(feature = "panic_hooks", since = "1.10.0")] pub struct PanicInfo<'a> { payload: &'a (Any + Send), location: Location<'a>, @@ -112,7 +145,7 @@ impl<'a> PanicInfo<'a> { /// Returns the payload associated with the panic. /// /// This will commonly, but not always, be a `&'static str` or `String`. - #[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] + #[stable(feature = "panic_hooks", since = "1.10.0")] pub fn payload(&self) -> &(Any + Send) { self.payload } @@ -122,14 +155,14 @@ impl<'a> PanicInfo<'a> { /// /// This method will currently always return `Some`, but this may change /// in future versions. - #[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] + #[stable(feature = "panic_hooks", since = "1.10.0")] pub fn location(&self) -> Option<&Location> { Some(&self.location) } } /// A struct containing information about the location of a panic. -#[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] +#[stable(feature = "panic_hooks", since = "1.10.0")] pub struct Location<'a> { file: &'a str, line: u32, @@ -137,20 +170,20 @@ pub struct Location<'a> { impl<'a> Location<'a> { /// Returns the name of the source file from which the panic originated. - #[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] + #[stable(feature = "panic_hooks", since = "1.10.0")] pub fn file(&self) -> &str { self.file } /// Returns the line number from which the panic originated. - #[unstable(feature = "panic_handler", reason = "awaiting feedback", issue = "30449")] + #[stable(feature = "panic_hooks", since = "1.10.0")] pub fn line(&self) -> u32 { self.line } } fn default_hook(info: &PanicInfo) { - let panics = PANIC_COUNT.with(|s| s.get()); + let panics = PANIC_COUNT.with(|c| c.get()); // If this is a double panic, make sure that we print a backtrace // for this panic. Otherwise only print it if logging is enabled. @@ -195,41 +228,152 @@ fn default_hook(info: &PanicInfo) { } } -pub fn on_panic(obj: &(Any+Send), file: &'static str, line: u32) { - let panics = PANIC_COUNT.with(|s| { - let count = s.get() + 1; - s.set(count); - count +/// Invoke a closure, capturing the cause of an unwinding panic if one occurs. +pub unsafe fn try R>(f: F) -> Result> { + let mut slot = None; + let mut f = Some(f); + let ret = PANIC_COUNT.with(|s| { + let prev = s.get(); + s.set(0); + + let mut to_run = || { + slot = Some(f.take().unwrap()()); + }; + let fnptr = get_call(&mut to_run); + let dataptr = &mut to_run as *mut _ as *mut u8; + let mut any_data = 0; + let mut any_vtable = 0; + let fnptr = mem::transmute::(fnptr); + let r = __rust_maybe_catch_panic(fnptr, + dataptr, + &mut any_data, + &mut any_vtable); + s.set(prev); + + if r == 0 { + Ok(()) + } else { + Err(mem::transmute(raw::TraitObject { + data: any_data as *mut _, + vtable: any_vtable as *mut _, + })) + } + }); + + return ret.map(|()| { + slot.take().unwrap() }); - // If this is the third nested call, on_panic triggered the last panic, - // otherwise the double-panic check would have aborted the process. - // Even if it is likely that on_panic was unable to log the backtrace, - // abort immediately to avoid infinite recursion, so that attaching a - // debugger provides a useable stacktrace. - if panics >= 3 { + fn get_call(_: &mut F) -> fn(&mut F) { + call + } + + fn call(f: &mut F) { + f() + } +} + +/// Determines whether the current thread is unwinding because of panic. +pub fn panicking() -> bool { + PANIC_COUNT.with(|c| c.get() != 0) +} + +/// Entry point of panic from the libcore crate. +#[cfg(not(test))] +#[lang = "panic_fmt"] +#[unwind] +pub extern fn rust_begin_panic(msg: fmt::Arguments, + file: &'static str, + line: u32) -> ! { + begin_panic_fmt(&msg, &(file, line)) +} + +/// The entry point for panicking with a formatted message. +/// +/// This is designed to reduce the amount of code required at the call +/// site as much as possible (so that `panic!()` has as low an impact +/// on (e.g.) the inlining of other functions as possible), by moving +/// the actual formatting into this shared place. +#[unstable(feature = "libstd_sys_internals", + reason = "used by the panic! macro", + issue = "0")] +#[inline(never)] #[cold] +pub fn begin_panic_fmt(msg: &fmt::Arguments, + file_line: &(&'static str, u32)) -> ! { + use fmt::Write; + + // We do two allocations here, unfortunately. But (a) they're + // required with the current scheme, and (b) we don't handle + // panic + OOM properly anyway (see comment in begin_panic + // below). + + let mut s = String::new(); + let _ = s.write_fmt(*msg); + begin_panic(s, file_line) +} + +/// This is the entry point of panicking for panic!() and assert!(). +#[unstable(feature = "libstd_sys_internals", + reason = "used by the panic! macro", + issue = "0")] +#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible +pub fn begin_panic(msg: M, file_line: &(&'static str, u32)) -> ! { + // Note that this should be the only allocation performed in this code path. + // Currently this means that panic!() on OOM will invoke this code path, + // but then again we're not really ready for panic on OOM anyway. If + // we do start doing this, then we should propagate this allocation to + // be performed in the parent of this thread instead of the thread that's + // panicking. + + rust_panic_with_hook(Box::new(msg), file_line) +} + +/// Executes the primary logic for a panic, including checking for recursive +/// panics and panic hooks. +/// +/// This is the entry point or panics from libcore, formatted panics, and +/// `Box` panics. Here we'll verify that we're not panicking recursively, +/// run panic hooks, and then delegate to the actual implementation of panics. +#[inline(never)] +#[cold] +fn rust_panic_with_hook(msg: Box, + file_line: &(&'static str, u32)) -> ! { + let (file, line) = *file_line; + + let panics = PANIC_COUNT.with(|c| { + let prev = c.get(); + c.set(prev + 1); + prev + }); + + // If this is the third nested call (e.g. panics == 2, this is 0-indexed), + // the panic hook probably triggered the last panic, otherwise the + // double-panic check would have aborted the process. In this case abort the + // process real quickly as we don't want to try calling it again as it'll + // probably just panic again. + if panics > 1 { util::dumb_print(format_args!("thread panicked while processing \ panic. aborting.\n")); unsafe { intrinsics::abort() } } - let info = PanicInfo { - payload: obj, - location: Location { - file: file, - line: line, - }, - }; - unsafe { - let _lock = HOOK_LOCK.read(); + let info = PanicInfo { + payload: &*msg, + location: Location { + file: file, + line: line, + }, + }; + HOOK_LOCK.read(); match HOOK { Hook::Default => default_hook(&info), Hook::Custom(ptr) => (*ptr)(&info), } + HOOK_LOCK.read_unlock(); } - if panics >= 2 { + if panics > 0 { // If a thread panics while it's already unwinding then we // have limited options. Currently our preference is to // just abort. In the future we may consider resuming @@ -238,4 +382,17 @@ pub fn on_panic(obj: &(Any+Send), file: &'static str, line: u32) { aborting.\n")); unsafe { intrinsics::abort() } } + + rust_panic(msg) +} + +/// A private no-mangle function on which to slap yer breakpoints. +#[no_mangle] +#[allow(private_no_mangle_fns)] // yes we get it, but we like breakpoints +pub fn rust_panic(msg: Box) -> ! { + let code = unsafe { + let obj = mem::transmute::<_, raw::TraitObject>(msg); + __rust_start_panic(obj.data as usize, obj.vtable as usize) + }; + rtabort!("failed to initiate panic, error {}", code) } diff --git a/src/libstd/path.rs b/src/libstd/path.rs index 5309cc3c85..f413bed86a 100644 --- a/src/libstd/path.rs +++ b/src/libstd/path.rs @@ -466,7 +466,7 @@ enum State { Done = 3, } -/// A Windows path prefix, e.g. `C:` or `\server\share`. +/// A Windows path prefix, e.g. `C:` or `\\server\share`. /// /// Does not occur on Unix. #[stable(feature = "rust1", since = "1.0.0")] @@ -528,7 +528,7 @@ impl<'a> Hash for PrefixComponent<'a> { #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[stable(feature = "rust1", since = "1.0.0")] pub enum Component<'a> { - /// A Windows path prefix, e.g. `C:` or `\server\share`. + /// A Windows path prefix, e.g. `C:` or `\\server\share`. /// /// Does not occur on Unix. #[stable(feature = "rust1", since = "1.0.0")] diff --git a/src/libstd/process.rs b/src/libstd/process.rs index c1ef60852a..1b6f6c3e87 100644 --- a/src/libstd/process.rs +++ b/src/libstd/process.rs @@ -38,10 +38,10 @@ use sys_common::{AsInner, AsInnerMut, FromInner, IntoInner}; /// let mut child = Command::new("/bin/cat") /// .arg("file.txt") /// .spawn() -/// .unwrap_or_else(|e| { panic!("failed to execute child: {}", e) }); +/// .expect("failed to execute child"); /// /// let ecode = child.wait() -/// .unwrap_or_else(|e| { panic!("failed to wait on child: {}", e) }); +/// .expect("failed to wait on child"); /// /// assert!(ecode.success()); /// ``` @@ -195,7 +195,8 @@ impl FromInner for ChildStderr { /// .arg("-c") /// .arg("echo hello") /// .output() -/// .unwrap_or_else(|e| { panic!("failed to execute process: {}", e) }); +/// .expect("failed to execute proces"); +/// /// let hello = output.stdout; /// ``` #[stable(feature = "process", since = "1.0.0")] @@ -214,12 +215,38 @@ impl Command { /// /// Builder methods are provided to change these defaults and /// otherwise configure the process. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("sh") + /// .spawn() + /// .expect("sh command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn new>(program: S) -> Command { Command { inner: imp::Command::new(program.as_ref()) } } /// Add an argument to pass to the program. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("ls") + /// .arg("-l") + /// .arg("-a") + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn arg>(&mut self, arg: S) -> &mut Command { self.inner.arg(arg.as_ref()); @@ -227,6 +254,19 @@ impl Command { } /// Add multiple arguments to pass to the program. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("ls") + /// .args(&["-l", "-a"]) + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn args>(&mut self, args: &[S]) -> &mut Command { for arg in args { @@ -239,6 +279,19 @@ impl Command { /// /// Note that environment variable names are case-insensitive (but case-preserving) on Windows, /// and case-sensitive on all other platforms. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("ls") + /// .env("PATH", "/bin") + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn env(&mut self, key: K, val: V) -> &mut Command where K: AsRef, V: AsRef @@ -248,6 +301,19 @@ impl Command { } /// Removes an environment variable mapping. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("ls") + /// .env_remove("PATH") + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn env_remove>(&mut self, key: K) -> &mut Command { self.inner.env_remove(key.as_ref()); @@ -255,6 +321,19 @@ impl Command { } /// Clears the entire environment map for the child process. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("ls") + /// .env_clear() + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn env_clear(&mut self) -> &mut Command { self.inner.env_clear(); @@ -262,6 +341,19 @@ impl Command { } /// Sets the working directory for the child process. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("ls") + /// .current_dir("/bin") + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn current_dir>(&mut self, dir: P) -> &mut Command { self.inner.cwd(dir.as_ref().as_ref()); @@ -269,6 +361,19 @@ impl Command { } /// Configuration for the child process's stdin handle (file descriptor 0). + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::{Command, Stdio}; + /// + /// Command::new("ls") + /// .stdin(Stdio::null()) + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn stdin(&mut self, cfg: Stdio) -> &mut Command { self.inner.stdin(cfg.0); @@ -276,6 +381,19 @@ impl Command { } /// Configuration for the child process's stdout handle (file descriptor 1). + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::{Command, Stdio}; + /// + /// Command::new("ls") + /// .stdout(Stdio::null()) + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn stdout(&mut self, cfg: Stdio) -> &mut Command { self.inner.stdout(cfg.0); @@ -283,6 +401,19 @@ impl Command { } /// Configuration for the child process's stderr handle (file descriptor 2). + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::{Command, Stdio}; + /// + /// Command::new("ls") + /// .stderr(Stdio::null()) + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn stderr(&mut self, cfg: Stdio) -> &mut Command { self.inner.stderr(cfg.0); @@ -292,6 +423,18 @@ impl Command { /// Executes the command as a child process, returning a handle to it. /// /// By default, stdin, stdout and stderr are inherited from the parent. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// Command::new("ls") + /// .spawn() + /// .expect("ls command failed to start"); + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn spawn(&mut self) -> io::Result { self.inner.spawn(imp::Stdio::Inherit, true).map(Child::from_inner) @@ -305,15 +448,18 @@ impl Command { /// /// # Examples /// - /// ``` + /// ```should_panic /// use std::process::Command; - /// let output = Command::new("cat").arg("foo.txt").output().unwrap_or_else(|e| { - /// panic!("failed to execute process: {}", e) - /// }); + /// let output = Command::new("/bin/cat") + /// .arg("file.txt") + /// .output() + /// .expect("failed to execute process"); /// /// println!("status: {}", output.status); /// println!("stdout: {}", String::from_utf8_lossy(&output.stdout)); /// println!("stderr: {}", String::from_utf8_lossy(&output.stderr)); + /// + /// assert!(output.status.success()); /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn output(&mut self) -> io::Result { @@ -328,14 +474,17 @@ impl Command { /// /// # Examples /// - /// ``` + /// ```should_panic /// use std::process::Command; /// - /// let status = Command::new("ls").status().unwrap_or_else(|e| { - /// panic!("failed to execute process: {}", e) - /// }); + /// let status = Command::new("/bin/cat") + /// .arg("file.txt") + /// .status() + /// .expect("failed to execute process"); /// /// println!("process exited with: {}", status); + /// + /// assert!(status.success()); /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn status(&mut self) -> io::Result { @@ -456,6 +605,12 @@ impl AsInner for ExitStatus { fn as_inner(&self) -> &imp::ExitStatus { &self.0 } } +impl FromInner for ExitStatus { + fn from_inner(s: imp::ExitStatus) -> ExitStatus { + ExitStatus(s) + } +} + #[stable(feature = "process", since = "1.0.0")] impl fmt::Display for ExitStatus { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -466,12 +621,42 @@ impl fmt::Display for ExitStatus { impl Child { /// Forces the child to exit. This is equivalent to sending a /// SIGKILL on unix platforms. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// let mut command = Command::new("yes"); + /// if let Ok(mut child) = command.spawn() { + /// child.kill().expect("command wasn't running"); + /// } else { + /// println!("yes command didn't start"); + /// } + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn kill(&mut self) -> io::Result<()> { self.handle.kill() } /// Returns the OS-assigned process identifier associated with this child. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// let mut command = Command::new("ls"); + /// if let Ok(child) = command.spawn() { + /// println!("Child's id is {}", child.id()); + /// } else { + /// println!("ls command didn't start"); + /// } + /// ``` #[stable(feature = "process_id", since = "1.3.0")] pub fn id(&self) -> u32 { self.handle.id() @@ -485,6 +670,22 @@ impl Child { /// before waiting. This helps avoid deadlock: it ensures that the /// child does not block waiting for input from the parent, while /// the parent waits for the child to exit. + /// + /// # Examples + /// + /// Basic usage: + /// + /// ```no_run + /// use std::process::Command; + /// + /// let mut command = Command::new("ls"); + /// if let Ok(mut child) = command.spawn() { + /// child.wait().expect("command wasn't running"); + /// println!("Child has finished its execution!"); + /// } else { + /// println!("ls command didn't start"); + /// } + /// ``` #[stable(feature = "process", since = "1.0.0")] pub fn wait(&mut self) -> io::Result { drop(self.stdin.take()); @@ -499,6 +700,29 @@ impl Child { /// before waiting. This helps avoid deadlock: it ensures that the /// child does not block waiting for input from the parent, while /// the parent waits for the child to exit. + /// + /// By default, stdin, stdout and stderr are inherited from the parent. + /// In order to capture the output into this `Result` it is + /// necessary to create new pipes between parent and child. Use + /// `stdout(Stdio::piped())` or `stderr(Stdio::piped())`, respectively. + /// + /// # Examples + /// + /// ```should_panic + /// use std::process::{Command, Stdio}; + /// + /// let mut child = Command::new("/bin/cat") + /// .arg("file.txt") + /// .stdout(Stdio::piped()) + /// .spawn() + /// .expect("failed to execute child"); + /// + /// let ecode = child.wait_with_output() + /// .expect("failed to wait on child"); + /// + /// assert!(ecode.status.success()); + /// ``` + /// #[stable(feature = "process", since = "1.0.0")] pub fn wait_with_output(mut self) -> io::Result { drop(self.stdin.take()); diff --git a/src/libstd/rt.rs b/src/libstd/rt.rs index 83091c72c0..6eee4ee9bb 100644 --- a/src/libstd/rt.rs +++ b/src/libstd/rt.rs @@ -25,12 +25,10 @@ // Reexport some of our utilities which are expected by other crates. -pub use sys_common::unwind::{begin_unwind, begin_unwind_fmt}; +pub use panicking::{begin_panic, begin_panic_fmt}; -// Rust runtime's startup objects depend on these symbols, so they must be public. -// Since sys_common isn't public, we have to re-export them here. -#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))] -pub use sys_common::unwind::imp::eh_frame_registry::*; +#[cfg(stage0)] +pub use panicking::begin_panic as begin_unwind; #[cfg(not(test))] #[lang = "start"] diff --git a/src/libstd/sync/barrier.rs b/src/libstd/sync/barrier.rs index b543240c15..b1267acdee 100644 --- a/src/libstd/sync/barrier.rs +++ b/src/libstd/sync/barrier.rs @@ -71,7 +71,7 @@ impl Barrier { } } - /// Blocks the current thread until all threads has rendezvoused here. + /// Blocks the current thread until all threads have rendezvoused here. /// /// Barriers are re-usable after all threads have rendezvoused once, and can /// be used continuously. diff --git a/src/libstd/sync/condvar.rs b/src/libstd/sync/condvar.rs index 64468be396..bf4b119a0b 100644 --- a/src/libstd/sync/condvar.rs +++ b/src/libstd/sync/condvar.rs @@ -72,6 +72,7 @@ impl WaitTimeoutResult { /// } /// ``` #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub struct Condvar { inner: Box } /// Statically allocated condition variables. @@ -91,6 +92,11 @@ pub struct Condvar { inner: Box } #[unstable(feature = "static_condvar", reason = "may be merged with Condvar in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `Condvar::new` in a static should \ + suffice")] pub struct StaticCondvar { inner: sys::Condvar, mutex: AtomicUsize, @@ -100,8 +106,15 @@ pub struct StaticCondvar { #[unstable(feature = "static_condvar", reason = "may be merged with Condvar in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `Condvar::new` in a static should \ + suffice")] +#[allow(deprecated)] pub const CONDVAR_INIT: StaticCondvar = StaticCondvar::new(); +#[allow(deprecated)] impl Condvar { /// Creates a new condition variable which is ready to be waited on and /// notified. @@ -220,13 +233,30 @@ impl Condvar { pub fn notify_all(&self) { unsafe { self.inner.inner.notify_all() } } } +#[stable(feature = "condvar_default", since = "1.9.0")] +impl Default for Condvar { + fn default() -> Condvar { + Condvar::new() + } +} + #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] impl Drop for Condvar { fn drop(&mut self) { unsafe { self.inner.inner.destroy() } } } +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `Condvar::new` in a static should \ + suffice")] +#[unstable(feature = "static_condvar", + reason = "may be merged with Condvar in the future", + issue = "27717")] +#[allow(deprecated)] impl StaticCondvar { /// Creates a new condition variable #[unstable(feature = "static_condvar", @@ -385,6 +415,7 @@ impl StaticCondvar { } #[cfg(test)] +#[allow(deprecated)] mod tests { use prelude::v1::*; diff --git a/src/libstd/sync/mod.rs b/src/libstd/sync/mod.rs index c20b422d40..56eb7340c8 100644 --- a/src/libstd/sync/mod.rs +++ b/src/libstd/sync/mod.rs @@ -25,18 +25,22 @@ pub use core::sync::atomic; #[stable(feature = "rust1", since = "1.0.0")] pub use self::barrier::{Barrier, BarrierWaitResult}; #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub use self::condvar::{Condvar, StaticCondvar, WaitTimeoutResult, CONDVAR_INIT}; #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub use self::mutex::MUTEX_INIT; #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub use self::mutex::{Mutex, MutexGuard, StaticMutex}; #[stable(feature = "rust1", since = "1.0.0")] -pub use self::once::{Once, ONCE_INIT}; +pub use self::once::{Once, OnceState, ONCE_INIT}; #[stable(feature = "rust1", since = "1.0.0")] pub use sys_common::poison::{PoisonError, TryLockError, TryLockResult, LockResult}; #[stable(feature = "rust1", since = "1.0.0")] pub use self::rwlock::{RwLockReadGuard, RwLockWriteGuard}; #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub use self::rwlock::{RwLock, StaticRwLock, RW_LOCK_INIT}; pub mod mpsc; diff --git a/src/libstd/sync/mpsc/mod.rs b/src/libstd/sync/mpsc/mod.rs index dbcc2bc95b..63b659d8db 100644 --- a/src/libstd/sync/mpsc/mod.rs +++ b/src/libstd/sync/mpsc/mod.rs @@ -535,7 +535,7 @@ impl Sender { /// /// // This send will fail because the receiver is gone /// drop(rx); - /// assert_eq!(tx.send(1).err().unwrap().0, 1); + /// assert_eq!(tx.send(1).unwrap_err().0, 1); /// ``` #[stable(feature = "rust1", since = "1.0.0")] pub fn send(&self, t: T) -> Result<(), SendError> { diff --git a/src/libstd/sync/mutex.rs b/src/libstd/sync/mutex.rs index e0946a5c12..15e69628c7 100644 --- a/src/libstd/sync/mutex.rs +++ b/src/libstd/sync/mutex.rs @@ -113,6 +113,7 @@ use sys_common::poison::{self, TryLockError, TryLockResult, LockResult}; /// *guard += 1; /// ``` #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub struct Mutex { // Note that this static mutex is in a *box*, not inlined into the struct // itself. Once a native mutex has been used once, its address can never @@ -156,6 +157,11 @@ unsafe impl Sync for Mutex { } #[unstable(feature = "static_mutex", reason = "may be merged with Mutex in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `Mutex::new` in a static should \ + suffice")] pub struct StaticMutex { lock: sys::Mutex, poison: poison::Flag, @@ -168,6 +174,7 @@ pub struct StaticMutex { /// `Deref` and `DerefMut` implementations #[must_use] #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub struct MutexGuard<'a, T: ?Sized + 'a> { // funny underscores due to how Deref/DerefMut currently work (they // disregard field privacy). @@ -184,8 +191,15 @@ impl<'a, T: ?Sized> !marker::Send for MutexGuard<'a, T> {} #[unstable(feature = "static_mutex", reason = "may be merged with Mutex in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `Mutex::new` in a static should \ + suffice")] +#[allow(deprecated)] pub const MUTEX_INIT: StaticMutex = StaticMutex::new(); +#[allow(deprecated)] impl Mutex { /// Creates a new mutex in an unlocked state ready for use. #[stable(feature = "rust1", since = "1.0.0")] @@ -197,6 +211,7 @@ impl Mutex { } } +#[allow(deprecated)] impl Mutex { /// Acquires a mutex, blocking the current thread until it is able to do so. /// @@ -205,10 +220,19 @@ impl Mutex { /// held. An RAII guard is returned to allow scoped unlock of the lock. When /// the guard goes out of scope, the mutex will be unlocked. /// + /// The exact behavior on locking a mutex in the thread which already holds + /// the lock is left unspecified. However, this function will not return on + /// the second call (it might panic or deadlock, for example). + /// /// # Errors /// /// If another user of this mutex panicked while holding the mutex, then /// this call will return an error once the mutex is acquired. + /// + /// # Panics + /// + /// This function might panic when called if the lock is already held by + /// the current thread. #[stable(feature = "rust1", since = "1.0.0")] pub fn lock(&self) -> LockResult> { unsafe { @@ -298,6 +322,7 @@ impl Mutex { } #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] impl Drop for Mutex { #[unsafe_destructor_blind_to_params] fn drop(&mut self) { @@ -310,6 +335,13 @@ impl Drop for Mutex { } } +#[stable(feature = "mutex_default", since = "1.9.0")] +impl Default for Mutex { + fn default() -> Mutex { + Mutex::new(Default::default()) + } +} + #[stable(feature = "rust1", since = "1.0.0")] impl fmt::Debug for Mutex { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { @@ -330,6 +362,12 @@ static DUMMY: Dummy = Dummy(UnsafeCell::new(())); #[unstable(feature = "static_mutex", reason = "may be merged with Mutex in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `Mutex::new` in a static should \ + suffice")] +#[allow(deprecated)] impl StaticMutex { /// Creates a new mutex in an unlocked state ready for use. pub const fn new() -> StaticMutex { @@ -375,8 +413,8 @@ impl StaticMutex { } } +#[allow(deprecated)] impl<'mutex, T: ?Sized> MutexGuard<'mutex, T> { - unsafe fn new(lock: &'mutex StaticMutex, data: &'mutex UnsafeCell) -> LockResult> { poison::map_result(lock.poison.borrow(), |guard| { @@ -402,6 +440,7 @@ impl<'mutex, T: ?Sized> DerefMut for MutexGuard<'mutex, T> { } #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] impl<'a, T: ?Sized> Drop for MutexGuard<'a, T> { #[inline] fn drop(&mut self) { @@ -412,15 +451,18 @@ impl<'a, T: ?Sized> Drop for MutexGuard<'a, T> { } } +#[allow(deprecated)] pub fn guard_lock<'a, T: ?Sized>(guard: &MutexGuard<'a, T>) -> &'a sys::Mutex { &guard.__lock.lock } +#[allow(deprecated)] pub fn guard_poison<'a, T: ?Sized>(guard: &MutexGuard<'a, T>) -> &'a poison::Flag { &guard.__lock.poison } #[cfg(test)] +#[allow(deprecated)] mod tests { use prelude::v1::*; diff --git a/src/libstd/sync/once.rs b/src/libstd/sync/once.rs index e228d236a3..e9ea465cc9 100644 --- a/src/libstd/sync/once.rs +++ b/src/libstd/sync/once.rs @@ -101,7 +101,7 @@ unsafe impl Send for Once {} /// State yielded to the `call_once_force` method which can be used to query /// whether the `Once` was previously poisoned or not. -#[unstable(feature = "once_poison", issue = "31688")] +#[unstable(feature = "once_poison", issue = "33577")] pub struct OnceState { poisoned: bool, } @@ -218,8 +218,7 @@ impl Once { /// The closure `f` is yielded a structure which can be used to query the /// state of this `Once` (whether initialization has previously panicked or /// not). - /// poisoned or not. - #[unstable(feature = "once_poison", issue = "31688")] + #[unstable(feature = "once_poison", issue = "33577")] pub fn call_once_force(&'static self, f: F) where F: FnOnce(&OnceState) { // same as above, just with a different parameter to `call_inner`. if self.state.load(Ordering::SeqCst) == COMPLETE { @@ -361,7 +360,7 @@ impl OnceState { /// /// Once an initalization routine for a `Once` has panicked it will forever /// indicate to future forced initialization routines that it is poisoned. - #[unstable(feature = "once_poison", issue = "31688")] + #[unstable(feature = "once_poison", issue = "33577")] pub fn poisoned(&self) -> bool { self.poisoned } diff --git a/src/libstd/sync/rwlock.rs b/src/libstd/sync/rwlock.rs index a37c1c16a4..e1e764bd25 100644 --- a/src/libstd/sync/rwlock.rs +++ b/src/libstd/sync/rwlock.rs @@ -38,8 +38,8 @@ use sys_common::rwlock as sys; /// /// # Poisoning /// -/// RwLocks, like Mutexes, will become poisoned on panics. Note, however, that -/// an RwLock may only be poisoned if a panic occurs while it is locked +/// An `RwLock`, like `Mutex`, will become poisoned on a panic. Note, however, +/// that an `RwLock` may only be poisoned if a panic occurs while it is locked /// exclusively (write mode). If a panic occurs in any reader, then the lock /// will not be poisoned. /// @@ -66,6 +66,7 @@ use sys_common::rwlock as sys; /// } // write lock is dropped here /// ``` #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub struct RwLock { inner: Box, data: UnsafeCell, @@ -104,6 +105,11 @@ unsafe impl Sync for RwLock {} #[unstable(feature = "static_rwlock", reason = "may be merged with RwLock in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `RwLock::new` in a static should \ + suffice")] pub struct StaticRwLock { lock: sys::RWLock, poison: poison::Flag, @@ -113,12 +119,19 @@ pub struct StaticRwLock { #[unstable(feature = "static_rwlock", reason = "may be merged with RwLock in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `RwLock::new` in a static should \ + suffice")] +#[allow(deprecated)] pub const RW_LOCK_INIT: StaticRwLock = StaticRwLock::new(); /// RAII structure used to release the shared read access of a lock when /// dropped. #[must_use] #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub struct RwLockReadGuard<'a, T: ?Sized + 'a> { __lock: &'a StaticRwLock, __data: &'a T, @@ -131,6 +144,7 @@ impl<'a, T: ?Sized> !marker::Send for RwLockReadGuard<'a, T> {} /// dropped. #[must_use] #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] pub struct RwLockWriteGuard<'a, T: ?Sized + 'a> { __lock: &'a StaticRwLock, __data: &'a mut T, @@ -140,6 +154,7 @@ pub struct RwLockWriteGuard<'a, T: ?Sized + 'a> { #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: ?Sized> !marker::Send for RwLockWriteGuard<'a, T> {} +#[allow(deprecated)] impl RwLock { /// Creates a new instance of an `RwLock` which is unlocked. /// @@ -156,6 +171,7 @@ impl RwLock { } } +#[allow(deprecated)] impl RwLock { /// Locks this rwlock with shared read access, blocking the current thread /// until it can be acquired. @@ -325,6 +341,7 @@ impl RwLock { } #[stable(feature = "rust1", since = "1.0.0")] +#[allow(deprecated)] impl Drop for RwLock { #[unsafe_destructor_blind_to_params] fn drop(&mut self) { @@ -346,6 +363,13 @@ impl fmt::Debug for RwLock { } } +#[stable(feature = "rw_lock_default", since = "1.9.0")] +impl Default for RwLock { + fn default() -> RwLock { + RwLock::new(Default::default()) + } +} + struct Dummy(UnsafeCell<()>); unsafe impl Sync for Dummy {} static DUMMY: Dummy = Dummy(UnsafeCell::new(())); @@ -353,6 +377,12 @@ static DUMMY: Dummy = Dummy(UnsafeCell::new(())); #[unstable(feature = "static_rwlock", reason = "may be merged with RwLock in the future", issue = "27717")] +#[rustc_deprecated(since = "1.10.0", + reason = "the lazy-static crate suffices for static sync \ + primitives and eventually this type shouldn't \ + be necessary as `RwLock::new` in a static should \ + suffice")] +#[allow(deprecated)] impl StaticRwLock { /// Creates a new rwlock. pub const fn new() -> StaticRwLock { @@ -427,6 +457,7 @@ impl StaticRwLock { } } +#[allow(deprecated)] impl<'rwlock, T: ?Sized> RwLockReadGuard<'rwlock, T> { unsafe fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell) -> LockResult> { @@ -475,6 +506,7 @@ impl<'rwlock, T: ?Sized> RwLockReadGuard<'rwlock, T> { } } +#[allow(deprecated)] impl<'rwlock, T: ?Sized> RwLockWriteGuard<'rwlock, T> { unsafe fn new(lock: &'rwlock StaticRwLock, data: &'rwlock UnsafeCell) -> LockResult> { @@ -555,10 +587,12 @@ impl<'rwlock, T: ?Sized> Deref for RwLockWriteGuard<'rwlock, T> { #[stable(feature = "rust1", since = "1.0.0")] impl<'rwlock, T: ?Sized> DerefMut for RwLockWriteGuard<'rwlock, T> { - fn deref_mut(&mut self) -> &mut T { self.__data + fn deref_mut(&mut self) -> &mut T { + self.__data } } +#[allow(deprecated)] #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: ?Sized> Drop for RwLockReadGuard<'a, T> { fn drop(&mut self) { @@ -566,6 +600,7 @@ impl<'a, T: ?Sized> Drop for RwLockReadGuard<'a, T> { } } +#[allow(deprecated)] #[stable(feature = "rust1", since = "1.0.0")] impl<'a, T: ?Sized> Drop for RwLockWriteGuard<'a, T> { fn drop(&mut self) { @@ -575,6 +610,7 @@ impl<'a, T: ?Sized> Drop for RwLockWriteGuard<'a, T> { } #[cfg(test)] +#[allow(deprecated)] mod tests { #![allow(deprecated)] // rand diff --git a/src/libstd/sys/common/args.rs b/src/libstd/sys/common/args.rs index 5841754066..e877391fb8 100644 --- a/src/libstd/sys/common/args.rs +++ b/src/libstd/sys/common/args.rs @@ -48,32 +48,36 @@ mod imp { use mem; use ffi::CStr; - use sync::StaticMutex; + use sys_common::mutex::Mutex; static mut GLOBAL_ARGS_PTR: usize = 0; - static LOCK: StaticMutex = StaticMutex::new(); + static LOCK: Mutex = Mutex::new(); pub unsafe fn init(argc: isize, argv: *const *const u8) { let args = (0..argc).map(|i| { CStr::from_ptr(*argv.offset(i) as *const c_char).to_bytes().to_vec() }).collect(); - let _guard = LOCK.lock(); + LOCK.lock(); let ptr = get_global_ptr(); assert!((*ptr).is_none()); (*ptr) = Some(box args); + LOCK.unlock(); } pub unsafe fn cleanup() { - let _guard = LOCK.lock(); + LOCK.lock(); *get_global_ptr() = None; + LOCK.unlock(); } pub fn clone() -> Option>> { - let _guard = LOCK.lock(); unsafe { + LOCK.lock(); let ptr = get_global_ptr(); - (*ptr).as_ref().map(|s| (**s).clone()) + let ret = (*ptr).as_ref().map(|s| (**s).clone()); + LOCK.unlock(); + return ret } } diff --git a/src/libstd/sys/common/mod.rs b/src/libstd/sys/common/mod.rs index 56628a4c75..c9279883ae 100644 --- a/src/libstd/sys/common/mod.rs +++ b/src/libstd/sys/common/mod.rs @@ -30,9 +30,7 @@ pub mod args; pub mod at_exit_imp; pub mod backtrace; pub mod condvar; -pub mod dwarf; pub mod io; -pub mod libunwind; pub mod mutex; pub mod net; pub mod poison; @@ -41,7 +39,6 @@ pub mod rwlock; pub mod thread; pub mod thread_info; pub mod thread_local; -pub mod unwind; pub mod util; pub mod wtf8; diff --git a/src/libstd/sys/common/unwind/mod.rs b/src/libstd/sys/common/unwind/mod.rs deleted file mode 100644 index 527c2e6303..0000000000 --- a/src/libstd/sys/common/unwind/mod.rs +++ /dev/null @@ -1,241 +0,0 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Implementation of Rust stack unwinding -//! -//! For background on exception handling and stack unwinding please see -//! "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and -//! documents linked from it. -//! These are also good reads: -//! http://mentorembedded.github.io/cxx-abi/abi-eh.html -//! http://monoinfinito.wordpress.com/series/exception-handling-in-c/ -//! http://www.airs.com/blog/index.php?s=exception+frames -//! -//! ## A brief summary -//! -//! Exception handling happens in two phases: a search phase and a cleanup phase. -//! -//! In both phases the unwinder walks stack frames from top to bottom using -//! information from the stack frame unwind sections of the current process's -//! modules ("module" here refers to an OS module, i.e. an executable or a -//! dynamic library). -//! -//! For each stack frame, it invokes the associated "personality routine", whose -//! address is also stored in the unwind info section. -//! -//! In the search phase, the job of a personality routine is to examine exception -//! object being thrown, and to decide whether it should be caught at that stack -//! frame. Once the handler frame has been identified, cleanup phase begins. -//! -//! In the cleanup phase, the unwinder invokes each personality routine again. -//! This time it decides which (if any) cleanup code needs to be run for -//! the current stack frame. If so, the control is transferred to a special branch -//! in the function body, the "landing pad", which invokes destructors, frees memory, -//! etc. At the end of the landing pad, control is transferred back to the unwinder -//! and unwinding resumes. -//! -//! Once stack has been unwound down to the handler frame level, unwinding stops -//! and the last personality routine transfers control to the catch block. -//! -//! ## `eh_personality` and `eh_unwind_resume` -//! -//! These language items are used by the compiler when generating unwind info. -//! The first one is the personality routine described above. The second one -//! allows compilation target to customize the process of resuming unwind at the -//! end of the landing pads. `eh_unwind_resume` is used only if `custom_unwind_resume` -//! flag in the target options is set. -//! -//! ## Frame unwind info registration -//! -//! Each module's image contains a frame unwind info section (usually ".eh_frame"). -//! When a module is loaded/unloaded into the process, the unwinder must be informed -//! about the location of this section in memory. The methods of achieving that vary -//! by the platform. -//! On some (e.g. Linux), the unwinder can discover unwind info sections on its own -//! (by dynamically enumerating currently loaded modules via the dl_iterate_phdr() API -//! and finding their ".eh_frame" sections); -//! Others, like Windows, require modules to actively register their unwind info -//! sections via unwinder API (see `rust_eh_register_frames`/`rust_eh_unregister_frames`). - -#![allow(dead_code)] -#![allow(unused_imports)] - -use prelude::v1::*; - -use any::Any; -use boxed; -use cmp; -use panicking::{self,PANIC_COUNT}; -use fmt; -use intrinsics; -use mem; -use sync::atomic::{self, Ordering}; -use sys_common::mutex::Mutex; - -// The actual unwinding implementation is cfg'd here, and we've got two current -// implementations. One goes through SEH on Windows and the other goes through -// libgcc via the libunwind-like API. - -// *-pc-windows-msvc -#[cfg(target_env = "msvc")] -#[path = "seh.rs"] #[doc(hidden)] -pub mod imp; - -// x86_64-pc-windows-gnu -#[cfg(all(windows, target_arch = "x86_64", target_env = "gnu"))] -#[path = "seh64_gnu.rs"] #[doc(hidden)] -pub mod imp; - -// i686-pc-windows-gnu and all others -#[cfg(any(unix, all(windows, target_arch = "x86", target_env = "gnu")))] -#[path = "gcc.rs"] #[doc(hidden)] -pub mod imp; - -/// Invoke a closure, capturing the cause of panic if one occurs. -/// -/// This function will return `Ok(())` if the closure did not panic, and will -/// return `Err(cause)` if the closure panics. The `cause` returned is the -/// object with which panic was originally invoked. -/// -/// This function also is unsafe for a variety of reasons: -/// -/// * This is not safe to call in a nested fashion. The unwinding -/// interface for Rust is designed to have at most one try/catch block per -/// thread, not multiple. No runtime checking is currently performed to uphold -/// this invariant, so this function is not safe. A nested try/catch block -/// may result in corruption of the outer try/catch block's state, especially -/// if this is used within a thread itself. -/// -/// * It is not sound to trigger unwinding while already unwinding. Rust threads -/// have runtime checks in place to ensure this invariant, but it is not -/// guaranteed that a rust thread is in place when invoking this function. -/// Unwinding twice can lead to resource leaks where some destructors are not -/// run. -pub unsafe fn try(f: F) -> Result<(), Box> { - let mut f = Some(f); - return inner_try(try_fn::, &mut f as *mut _ as *mut u8); - - fn try_fn(opt_closure: *mut u8) { - let opt_closure = opt_closure as *mut Option; - unsafe { (*opt_closure).take().unwrap()(); } - } -} - -unsafe fn inner_try(f: fn(*mut u8), data: *mut u8) - -> Result<(), Box> { - PANIC_COUNT.with(|s| { - let prev = s.get(); - s.set(0); - - // The "payload" here is a platform-specific region of memory which is - // used to transmit information about the exception being thrown from - // the point-of-throw back to this location. - // - // A pointer to this data is passed to the `try` intrinsic itself, - // allowing this function, the `try` intrinsic, imp::payload(), and - // imp::cleanup() to all work in concert to transmit this information. - // - // More information about what this pointer actually is can be found in - // each implementation as well as browsing the compiler source itself. - let mut payload = imp::payload(); - let r = intrinsics::try(f, data, &mut payload as *mut _ as *mut _); - s.set(prev); - if r == 0 { - Ok(()) - } else { - Err(imp::cleanup(payload)) - } - }) -} - -/// Determines whether the current thread is unwinding because of panic. -pub fn panicking() -> bool { - PANIC_COUNT.with(|s| s.get() != 0) -} - -// An uninlined, unmangled function upon which to slap yer breakpoints -#[inline(never)] -#[no_mangle] -#[allow(private_no_mangle_fns)] -pub fn rust_panic(cause: Box) -> ! { - unsafe { - imp::panic(cause) - } -} - -#[cfg(not(test))] -/// Entry point of panic from the libcore crate. -#[lang = "panic_fmt"] -#[unwind] -pub extern fn rust_begin_unwind(msg: fmt::Arguments, - file: &'static str, line: u32) -> ! { - begin_unwind_fmt(msg, &(file, line)) -} - -/// The entry point for unwinding with a formatted message. -/// -/// This is designed to reduce the amount of code required at the call -/// site as much as possible (so that `panic!()` has as low an impact -/// on (e.g.) the inlining of other functions as possible), by moving -/// the actual formatting into this shared place. -#[unstable(feature = "libstd_sys_internals", - reason = "used by the panic! macro", - issue = "0")] -#[inline(never)] #[cold] -pub fn begin_unwind_fmt(msg: fmt::Arguments, file_line: &(&'static str, u32)) -> ! { - use fmt::Write; - - // We do two allocations here, unfortunately. But (a) they're - // required with the current scheme, and (b) we don't handle - // panic + OOM properly anyway (see comment in begin_unwind - // below). - - let mut s = String::new(); - let _ = s.write_fmt(msg); - begin_unwind_inner(Box::new(s), file_line) -} - -/// This is the entry point of unwinding for panic!() and assert!(). -#[unstable(feature = "libstd_sys_internals", - reason = "used by the panic! macro", - issue = "0")] -#[inline(never)] #[cold] // avoid code bloat at the call sites as much as possible -pub fn begin_unwind(msg: M, file_line: &(&'static str, u32)) -> ! { - // Note that this should be the only allocation performed in this code path. - // Currently this means that panic!() on OOM will invoke this code path, - // but then again we're not really ready for panic on OOM anyway. If - // we do start doing this, then we should propagate this allocation to - // be performed in the parent of this thread instead of the thread that's - // panicking. - - // see below for why we do the `Any` coercion here. - begin_unwind_inner(Box::new(msg), file_line) -} - -/// The core of the unwinding. -/// -/// This is non-generic to avoid instantiation bloat in other crates -/// (which makes compilation of small crates noticeably slower). (Note: -/// we need the `Any` object anyway, we're not just creating it to -/// avoid being generic.) -/// -/// Doing this split took the LLVM IR line counts of `fn main() { panic!() -/// }` from ~1900/3700 (-O/no opts) to 180/590. -#[inline(never)] #[cold] // this is the slow path, please never inline this -fn begin_unwind_inner(msg: Box, - file_line: &(&'static str, u32)) -> ! { - let (file, line) = *file_line; - - // First, invoke the default panic handler. - panicking::on_panic(&*msg, file, line); - - // Finally, perform the unwinding. - rust_panic(msg); -} diff --git a/src/libstd/sys/common/unwind/seh.rs b/src/libstd/sys/common/unwind/seh.rs deleted file mode 100644 index 94da42f009..0000000000 --- a/src/libstd/sys/common/unwind/seh.rs +++ /dev/null @@ -1,153 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! Windows SEH -//! -//! On Windows (currently only on MSVC), the default exception handling -//! mechanism is Structured Exception Handling (SEH). This is quite different -//! than Dwarf-based exception handling (e.g. what other unix platforms use) in -//! terms of compiler internals, so LLVM is required to have a good deal of -//! extra support for SEH. -//! -//! In a nutshell, what happens here is: -//! -//! 1. The `panic` function calls the standard Windows function `RaiseException` -//! with a Rust-specific code, triggering the unwinding process. -//! 2. All landing pads generated by the compiler use the personality function -//! `__C_specific_handler` on 64-bit and `__except_handler3` on 32-bit, -//! functions in the CRT, and the unwinding code in Windows will use this -//! personality function to execute all cleanup code on the stack. -//! 3. All compiler-generated calls to `invoke` have a landing pad set as a -//! `cleanuppad` LLVM instruction, which indicates the start of the cleanup -//! routine. The personality (in step 2, defined in the CRT) is responsible -//! for running the cleanup routines. -//! 4. Eventually the "catch" code in the `try` intrinsic (generated by the -//! compiler) is executed, which will ensure that the exception being caught -//! is indeed a Rust exception, indicating that control should come back to -//! Rust. This is done via a `catchswitch` plus a `catchpad` instruction in -//! LLVM IR terms, finally returning normal control to the program with a -//! `catchret` instruction. The `try` intrinsic uses a filter function to -//! detect what kind of exception is being thrown, and this detection is -//! implemented as the msvc_try_filter language item below. -//! -//! Some specific differences from the gcc-based exception handling are: -//! -//! * Rust has no custom personality function, it is instead *always* -//! __C_specific_handler or __except_handler3, so the filtering is done in a -//! C++-like manner instead of in the personality function itself. Note that -//! the precise codegen for this was lifted from an LLVM test case for SEH -//! (this is the `__rust_try_filter` function below). -//! * We've got some data to transmit across the unwinding boundary, -//! specifically a `Box`. Like with Dwarf exceptions -//! these two pointers are stored as a payload in the exception itself. On -//! MSVC, however, there's no need for an extra allocation because the call -//! stack is preserved while filter functions are being executed. This means -//! that the pointers are passed directly to `RaiseException` which are then -//! recovered in the filter function to be written to the stack frame of the -//! `try` intrinsic. -//! -//! [win64]: http://msdn.microsoft.com/en-us/library/1eyas8tf.aspx -//! [llvm]: http://llvm.org/docs/ExceptionHandling.html#background-on-windows-exceptions - -use sys::c; - -// A code which indicates panics that originate from Rust. Note that some of the -// upper bits are used by the system so we just set them to 0 and ignore them. -// 0x 0 R S T -const RUST_PANIC: c::DWORD = 0x00525354; - -pub use self::imp::*; - -mod imp { - use prelude::v1::*; - - use any::Any; - use mem; - use raw; - use super::RUST_PANIC; - use sys::c; - - pub unsafe fn panic(data: Box) -> ! { - // As mentioned above, the call stack here is preserved while the filter - // functions are running, so it's ok to pass stack-local arrays into - // `RaiseException`. - // - // The two pointers of the `data` trait object are written to the stack, - // passed to `RaiseException`, and they're later extracted by the filter - // function below in the "custom exception information" section of the - // `EXCEPTION_RECORD` type. - let ptrs = mem::transmute::<_, raw::TraitObject>(data); - let ptrs = [ptrs.data, ptrs.vtable]; - c::RaiseException(RUST_PANIC, 0, 2, ptrs.as_ptr() as *mut _); - rtabort!("could not unwind stack"); - } - - pub fn payload() -> [usize; 2] { - [0; 2] - } - - pub unsafe fn cleanup(payload: [usize; 2]) -> Box { - mem::transmute(raw::TraitObject { - data: payload[0] as *mut _, - vtable: payload[1] as *mut _, - }) - } - - // This is quite a special function, and it's not literally passed in as the - // filter function for the `catchpad` of the `try` intrinsic. The compiler - // actually generates its own filter function wrapper which will delegate to - // this for the actual execution logic for whether the exception should be - // caught. The reasons for this are: - // - // * Each architecture has a slightly different ABI for the filter function - // here. For example on x86 there are no arguments but on x86_64 there are - // two. - // * This function needs access to the stack frame of the `try` intrinsic - // which is using this filter as a catch pad. This is because the payload - // of this exception, `Box`, needs to be transmitted to that - // location. - // - // Both of these differences end up using a ton of weird llvm-specific - // intrinsics, so it's actually pretty difficult to express the entire - // filter function in Rust itself. As a compromise, the compiler takes care - // of all the weird LLVM-specific and platform-specific stuff, getting to - // the point where this function makes the actual decision about what to - // catch given two parameters. - // - // The first parameter is `*mut EXCEPTION_POINTERS` which is some contextual - // information about the exception being filtered, and the second pointer is - // `*mut *mut [usize; 2]` (the payload here). This value points directly - // into the stack frame of the `try` intrinsic itself, and we use it to copy - // information from the exception onto the stack. - #[lang = "msvc_try_filter"] - #[cfg(not(test))] - unsafe extern fn __rust_try_filter(eh_ptrs: *mut u8, - payload: *mut u8) -> i32 { - let eh_ptrs = eh_ptrs as *mut c::EXCEPTION_POINTERS; - let payload = payload as *mut *mut [usize; 2]; - let record = &*(*eh_ptrs).ExceptionRecord; - if record.ExceptionCode != RUST_PANIC { - return 0 - } - (**payload)[0] = record.ExceptionInformation[0] as usize; - (**payload)[1] = record.ExceptionInformation[1] as usize; - return 1 - } -} - -// This is required by the compiler to exist (e.g. it's a lang item), but -// it's never actually called by the compiler because __C_specific_handler -// or _except_handler3 is the personality function that is always used. -// Hence this is just an aborting stub. -#[lang = "eh_personality"] -#[cfg(not(test))] -fn rust_eh_personality() { - unsafe { ::intrinsics::abort() } -} diff --git a/src/libstd/sys/common/util.rs b/src/libstd/sys/common/util.rs index b7a6b7650d..1df511a881 100644 --- a/src/libstd/sys/common/util.rs +++ b/src/libstd/sys/common/util.rs @@ -10,7 +10,6 @@ use env; use fmt; -use intrinsics; use io::prelude::*; use sync::atomic::{self, Ordering}; use sys::stdio::Stderr; @@ -34,9 +33,32 @@ pub fn dumb_print(args: fmt::Arguments) { let _ = Stderr::new().map(|mut stderr| stderr.write_fmt(args)); } +// On Unix-like platforms, libc::abort will unregister signal handlers +// including the SIGABRT handler, preventing the abort from being blocked, and +// fclose streams, with the side effect of flushing them so libc bufferred +// output will be printed. Additionally the shell will generally print a more +// understandable error message like "Abort trap" rather than "Illegal +// instruction" that intrinsics::abort would cause, as intrinsics::abort is +// implemented as an illegal instruction. +#[cfg(unix)] +unsafe fn abort_internal() -> ! { + use libc; + libc::abort() +} + +// On Windows, we want to avoid using libc, and there isn't a direct +// equivalent of libc::abort. The __failfast intrinsic may be a reasonable +// substitute, but desireability of using it over the abort instrinsic is +// debateable; see https://github.com/rust-lang/rust/pull/31519 for details. +#[cfg(not(unix))] +unsafe fn abort_internal() -> ! { + use intrinsics; + intrinsics::abort() +} + pub fn abort(args: fmt::Arguments) -> ! { dumb_print(format_args!("fatal runtime error: {}\n", args)); - unsafe { intrinsics::abort(); } + unsafe { abort_internal(); } } #[allow(dead_code)] // stack overflow detection not enabled on all platforms diff --git a/src/libstd/sys/unix/android.rs b/src/libstd/sys/unix/android.rs new file mode 100644 index 0000000000..abbe3fc184 --- /dev/null +++ b/src/libstd/sys/unix/android.rs @@ -0,0 +1,119 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Android ABI-compatibility module +//! +//! The ABI of Android has changed quite a bit over time, and libstd attempts to +//! be both forwards and backwards compatible as much as possible. We want to +//! always work with the most recent version of Android, but we also want to +//! work with older versions of Android for whenever projects need to. +//! +//! Our current minimum supported Android version is `android-9`, e.g. Android +//! with API level 9. We then in theory want to work on that and all future +//! versions of Android! +//! +//! Some of the detection here is done at runtime via `dlopen` and +//! introspection. Other times no detection is performed at all and we just +//! provide a fallback implementation as some versions of Android we support +//! don't have the function. +//! +//! You'll find more details below about why each compatibility shim is needed. + +#![cfg(target_os = "android")] + +use libc::{c_int, sighandler_t}; + +use io; +use sys::cvt_r; + +// The `log2` and `log2f` functions apparently appeared in android-18, or at +// least you can see they're not present in the android-17 header [1] and they +// are present in android-18 [2]. +// +// [1]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms +// /android-17/arch-arm/usr/include/math.h +// [2]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms +// /android-18/arch-arm/usr/include/math.h +// +// Note that these shims are likely less precise than directly calling `log2`, +// but hopefully that should be enough for now... +// +// Note that mathematically, for any arbitrary `y`: +// +// log_2(x) = log_y(x) / log_y(2) +// = log_y(x) / (1 / log_2(y)) +// = log_y(x) * log_2(y) +// +// Hence because `ln` (log_e) is available on all Android we just choose `y = e` +// and get: +// +// log_2(x) = ln(x) * log_2(e) + +#[cfg(not(test))] +pub fn log2f32(f: f32) -> f32 { + f.ln() * ::f32::consts::LOG2_E +} + +#[cfg(not(test))] +pub fn log2f64(f: f64) -> f64 { + f.ln() * ::f64::consts::LOG2_E +} + +// Back in the day [1] the `signal` function was just an inline wrapper +// around `bsd_signal`, but starting in API level android-20 the `signal` +// symbols was introduced [2]. Finally, in android-21 the API `bsd_signal` was +// removed [3]. +// +// Basically this means that if we want to be binary compatible with multiple +// Android releases (oldest being 9 and newest being 21) then we need to check +// for both symbols and not actually link against either. +// +// [1]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms +// /android-18/arch-arm/usr/include/signal.h +// [2]: https://chromium.googlesource.com/android_tools/+/fbd420/ndk_experimental +// /platforms/android-20/arch-arm +// /usr/include/signal.h +// [3]: https://chromium.googlesource.com/android_tools/+/20ee6d/ndk/platforms +// /android-21/arch-arm/usr/include/signal.h +pub unsafe fn signal(signum: c_int, handler: sighandler_t) -> sighandler_t { + weak!(fn signal(c_int, sighandler_t) -> sighandler_t); + weak!(fn bsd_signal(c_int, sighandler_t) -> sighandler_t); + + let f = signal.get().or_else(|| bsd_signal.get()); + let f = f.expect("neither `signal` nor `bsd_signal` symbols found"); + f(signum, handler) +} + +// The `ftruncate64` symbol apparently appeared in android-12, so we do some +// dynamic detection to see if we can figure out whether `ftruncate64` exists. +// +// If it doesn't we just fall back to `ftruncate`, generating an error for +// too-large values. +pub fn ftruncate64(fd: c_int, size: u64) -> io::Result<()> { + weak!(fn ftruncate64(c_int, i64) -> c_int); + + extern { + fn ftruncate(fd: c_int, off: i32) -> c_int; + } + + unsafe { + match ftruncate64.get() { + Some(f) => cvt_r(|| f(fd, size as i64)).map(|_| ()), + None => { + if size > i32::max_value() as u64 { + Err(io::Error::new(io::ErrorKind::InvalidInput, + "cannot truncate >2GB")) + } else { + cvt_r(|| ftruncate(fd, size as i32)).map(|_| ()) + } + } + } + } +} diff --git a/src/libstd/sys/unix/backtrace/tracing/backtrace_fn.rs b/src/libstd/sys/unix/backtrace/tracing/backtrace_fn.rs index de93d3d4e5..ca2e70b500 100644 --- a/src/libstd/sys/unix/backtrace/tracing/backtrace_fn.rs +++ b/src/libstd/sys/unix/backtrace/tracing/backtrace_fn.rs @@ -18,12 +18,11 @@ /// simple to use it should be used only on iOS devices as the only viable /// option. -use io; use io::prelude::*; +use io; use libc; use mem; -use result::Result::Ok; -use sync::StaticMutex; +use sys::mutex::Mutex; use super::super::printing::print; @@ -37,18 +36,21 @@ pub fn write(w: &mut Write) -> io::Result<()> { // while it doesn't requires lock for work as everything is // local, it still displays much nicer backtraces when a // couple of threads panic simultaneously - static LOCK: StaticMutex = StaticMutex::new(); - let _g = LOCK.lock(); + static LOCK: Mutex = Mutex::new(); + unsafe { + LOCK.lock(); - writeln!(w, "stack backtrace:")?; - // 100 lines should be enough - const SIZE: usize = 100; - let mut buf: [*mut libc::c_void; SIZE] = unsafe { mem::zeroed() }; - let cnt = unsafe { backtrace(buf.as_mut_ptr(), SIZE as libc::c_int) as usize}; + writeln!(w, "stack backtrace:")?; + // 100 lines should be enough + const SIZE: usize = 100; + let mut buf: [*mut libc::c_void; SIZE] = mem::zeroed(); + let cnt = backtrace(buf.as_mut_ptr(), SIZE as libc::c_int) as usize; - // skipping the first one as it is write itself - for i in 1..cnt { - print(w, i as isize, buf[i], buf[i])? + // skipping the first one as it is write itself + for i in 1..cnt { + print(w, i as isize, buf[i], buf[i])? + } + LOCK.unlock(); } Ok(()) } diff --git a/src/libstd/sys/unix/backtrace/tracing/gcc_s.rs b/src/libstd/sys/unix/backtrace/tracing/gcc_s.rs index 8d88091716..c1b45620ab 100644 --- a/src/libstd/sys/unix/backtrace/tracing/gcc_s.rs +++ b/src/libstd/sys/unix/backtrace/tracing/gcc_s.rs @@ -12,9 +12,10 @@ use io; use io::prelude::*; use libc; use mem; -use sync::StaticMutex; +use sys_common::mutex::Mutex; use super::super::printing::print; +use unwind as uw; #[inline(never)] // if we know this is a function call, we can skip it when // tracing @@ -30,24 +31,28 @@ pub fn write(w: &mut Write) -> io::Result<()> { // is semi-reasonable in terms of printing anyway, and we know that all // I/O done here is blocking I/O, not green I/O, so we don't have to // worry about this being a native vs green mutex. - static LOCK: StaticMutex = StaticMutex::new(); - let _g = LOCK.lock(); - - writeln!(w, "stack backtrace:")?; - - let mut cx = Context { writer: w, last_error: None, idx: 0 }; - return match unsafe { - uw::_Unwind_Backtrace(trace_fn, - &mut cx as *mut Context as *mut libc::c_void) - } { - uw::_URC_NO_REASON => { - match cx.last_error { - Some(err) => Err(err), - None => Ok(()) + static LOCK: Mutex = Mutex::new(); + unsafe { + LOCK.lock(); + + writeln!(w, "stack backtrace:")?; + + let mut cx = Context { writer: w, last_error: None, idx: 0 }; + let ret = match { + uw::_Unwind_Backtrace(trace_fn, + &mut cx as *mut Context as *mut libc::c_void) + } { + uw::_URC_NO_REASON => { + match cx.last_error { + Some(err) => Err(err), + None => Ok(()) + } } - } - _ => Ok(()), - }; + _ => Ok(()), + }; + LOCK.unlock(); + return ret + } extern fn trace_fn(ctx: *mut uw::_Unwind_Context, arg: *mut libc::c_void) -> uw::_Unwind_Reason_Code { @@ -102,126 +107,3 @@ pub fn write(w: &mut Write) -> io::Result<()> { uw::_URC_NO_REASON } } - -/// Unwind library interface used for backtraces -/// -/// Note that dead code is allowed as here are just bindings -/// iOS doesn't use all of them it but adding more -/// platform-specific configs pollutes the code too much -#[allow(non_camel_case_types)] -#[allow(non_snake_case)] -mod uw { - pub use self::_Unwind_Reason_Code::*; - - use libc; - - #[repr(C)] - pub enum _Unwind_Reason_Code { - _URC_NO_REASON = 0, - _URC_FOREIGN_EXCEPTION_CAUGHT = 1, - _URC_FATAL_PHASE2_ERROR = 2, - _URC_FATAL_PHASE1_ERROR = 3, - _URC_NORMAL_STOP = 4, - _URC_END_OF_STACK = 5, - _URC_HANDLER_FOUND = 6, - _URC_INSTALL_CONTEXT = 7, - _URC_CONTINUE_UNWIND = 8, - _URC_FAILURE = 9, // used only by ARM EABI - } - - pub enum _Unwind_Context {} - - pub type _Unwind_Trace_Fn = - extern fn(ctx: *mut _Unwind_Context, - arg: *mut libc::c_void) -> _Unwind_Reason_Code; - - extern { - // No native _Unwind_Backtrace on iOS - #[cfg(not(all(target_os = "ios", target_arch = "arm")))] - pub fn _Unwind_Backtrace(trace: _Unwind_Trace_Fn, - trace_argument: *mut libc::c_void) - -> _Unwind_Reason_Code; - - // available since GCC 4.2.0, should be fine for our purpose - #[cfg(all(not(all(target_os = "android", target_arch = "arm")), - not(all(target_os = "linux", target_arch = "arm"))))] - pub fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, - ip_before_insn: *mut libc::c_int) - -> libc::uintptr_t; - - #[cfg(all(not(target_os = "android"), - not(all(target_os = "linux", target_arch = "arm"))))] - pub fn _Unwind_FindEnclosingFunction(pc: *mut libc::c_void) - -> *mut libc::c_void; - } - - // On android, the function _Unwind_GetIP is a macro, and this is the - // expansion of the macro. This is all copy/pasted directly from the - // header file with the definition of _Unwind_GetIP. - #[cfg(any(all(target_os = "android", target_arch = "arm"), - all(target_os = "linux", target_arch = "arm")))] - pub unsafe fn _Unwind_GetIP(ctx: *mut _Unwind_Context) -> libc::uintptr_t { - #[repr(C)] - enum _Unwind_VRS_Result { - _UVRSR_OK = 0, - _UVRSR_NOT_IMPLEMENTED = 1, - _UVRSR_FAILED = 2, - } - #[repr(C)] - enum _Unwind_VRS_RegClass { - _UVRSC_CORE = 0, - _UVRSC_VFP = 1, - _UVRSC_FPA = 2, - _UVRSC_WMMXD = 3, - _UVRSC_WMMXC = 4, - } - #[repr(C)] - enum _Unwind_VRS_DataRepresentation { - _UVRSD_UINT32 = 0, - _UVRSD_VFPX = 1, - _UVRSD_FPAX = 2, - _UVRSD_UINT64 = 3, - _UVRSD_FLOAT = 4, - _UVRSD_DOUBLE = 5, - } - - type _Unwind_Word = libc::c_uint; - extern { - fn _Unwind_VRS_Get(ctx: *mut _Unwind_Context, - klass: _Unwind_VRS_RegClass, - word: _Unwind_Word, - repr: _Unwind_VRS_DataRepresentation, - data: *mut libc::c_void) - -> _Unwind_VRS_Result; - } - - let mut val: _Unwind_Word = 0; - let ptr = &mut val as *mut _Unwind_Word; - let _ = _Unwind_VRS_Get(ctx, _Unwind_VRS_RegClass::_UVRSC_CORE, 15, - _Unwind_VRS_DataRepresentation::_UVRSD_UINT32, - ptr as *mut libc::c_void); - (val & !1) as libc::uintptr_t - } - - // This function doesn't exist on Android or ARM/Linux, so make it same - // to _Unwind_GetIP - #[cfg(any(all(target_os = "android", target_arch = "arm"), - all(target_os = "linux", target_arch = "arm")))] - pub unsafe fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, - ip_before_insn: *mut libc::c_int) - -> libc::uintptr_t - { - *ip_before_insn = 0; - _Unwind_GetIP(ctx) - } - - // This function also doesn't exist on Android or ARM/Linux, so make it - // a no-op - #[cfg(any(target_os = "android", - all(target_os = "linux", target_arch = "arm")))] - pub unsafe fn _Unwind_FindEnclosingFunction(pc: *mut libc::c_void) - -> *mut libc::c_void - { - pc - } -} diff --git a/src/libstd/sys/unix/ext/fs.rs b/src/libstd/sys/unix/ext/fs.rs index a152845886..bb90a97743 100644 --- a/src/libstd/sys/unix/ext/fs.rs +++ b/src/libstd/sys/unix/ext/fs.rs @@ -88,9 +88,7 @@ pub trait OpenOptionsExt { /// } /// let file = options.open("foo.txt"); /// ``` - #[unstable(feature = "expand_open_options", - reason = "recently added", - issue = "30014")] + #[stable(feature = "open_options_ext", since = "1.10.0")] fn custom_flags(&mut self, flags: i32) -> &mut Self; } diff --git a/src/libstd/sys/unix/ext/net.rs b/src/libstd/sys/unix/ext/net.rs index a74f7ea13b..b5287cce48 100644 --- a/src/libstd/sys/unix/ext/net.rs +++ b/src/libstd/sys/unix/ext/net.rs @@ -7,7 +7,8 @@ // , at your // option. This file may not be copied, modified, or distributed // except according to those terms. -#![unstable(feature = "unix_socket", reason = "newly added", issue = "32312")] + +#![stable(feature = "unix_socket", since = "1.10.0")] //! Unix-specific networking functionality @@ -75,6 +76,7 @@ enum AddressKind<'a> { /// An address associated with a Unix socket. #[derive(Clone)] +#[stable(feature = "unix_socket", since = "1.10.0")] pub struct SocketAddr { addr: libc::sockaddr_un, len: libc::socklen_t, @@ -109,6 +111,7 @@ impl SocketAddr { } /// Returns true iff the address is unnamed. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn is_unnamed(&self) -> bool { if let AddressKind::Unnamed = self.address() { true @@ -118,6 +121,7 @@ impl SocketAddr { } /// Returns the contents of this address if it is a `pathname` address. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn as_pathname(&self) -> Option<&Path> { if let AddressKind::Pathname(path) = self.address() { Some(path) @@ -141,6 +145,7 @@ impl SocketAddr { } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl fmt::Debug for SocketAddr { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { match self.address() { @@ -168,8 +173,6 @@ impl<'a> fmt::Display for AsciiEscaped<'a> { /// # Examples /// /// ```rust,no_run -/// #![feature(unix_socket)] -/// /// use std::os::unix::net::UnixStream; /// use std::io::prelude::*; /// @@ -179,8 +182,10 @@ impl<'a> fmt::Display for AsciiEscaped<'a> { /// stream.read_to_string(&mut response).unwrap(); /// println!("{}", response); /// ``` +#[stable(feature = "unix_socket", since = "1.10.0")] pub struct UnixStream(Socket); +#[stable(feature = "unix_socket", since = "1.10.0")] impl fmt::Debug for UnixStream { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let mut builder = fmt.debug_struct("UnixStream"); @@ -197,6 +202,7 @@ impl fmt::Debug for UnixStream { impl UnixStream { /// Connects to the socket named by `path`. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn connect>(path: P) -> io::Result { fn inner(path: &Path) -> io::Result { unsafe { @@ -213,6 +219,7 @@ impl UnixStream { /// Creates an unnamed pair of connected sockets. /// /// Returns two `UnixStream`s which are connected to each other. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn pair() -> io::Result<(UnixStream, UnixStream)> { let (i1, i2) = Socket::new_pair(libc::AF_UNIX, libc::SOCK_STREAM)?; Ok((UnixStream(i1), UnixStream(i2))) @@ -224,16 +231,19 @@ impl UnixStream { /// object references. Both handles will read and write the same stream of /// data, and options set on one stream will be propogated to the other /// stream. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn try_clone(&self) -> io::Result { self.0.duplicate().map(UnixStream) } /// Returns the socket address of the local half of this connection. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn local_addr(&self) -> io::Result { SocketAddr::new(|addr, len| unsafe { libc::getsockname(*self.0.as_inner(), addr, len) }) } /// Returns the socket address of the remote half of this connection. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn peer_addr(&self) -> io::Result { SocketAddr::new(|addr, len| unsafe { libc::getpeername(*self.0.as_inner(), addr, len) }) } @@ -243,6 +253,7 @@ impl UnixStream { /// If the provided value is `None`, then `read` calls will block /// indefinitely. It is an error to pass the zero `Duration` to this /// method. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn set_read_timeout(&self, timeout: Option) -> io::Result<()> { self.0.set_timeout(timeout, libc::SO_RCVTIMEO) } @@ -252,26 +263,31 @@ impl UnixStream { /// If the provided value is `None`, then `write` calls will block /// indefinitely. It is an error to pass the zero `Duration` to this /// method. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn set_write_timeout(&self, timeout: Option) -> io::Result<()> { self.0.set_timeout(timeout, libc::SO_SNDTIMEO) } /// Returns the read timeout of this socket. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn read_timeout(&self) -> io::Result> { self.0.timeout(libc::SO_RCVTIMEO) } /// Returns the write timeout of this socket. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn write_timeout(&self) -> io::Result> { self.0.timeout(libc::SO_SNDTIMEO) } /// Moves the socket into or out of nonblocking mode. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { self.0.set_nonblocking(nonblocking) } /// Returns the value of the `SO_ERROR` option. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn take_error(&self) -> io::Result> { self.0.take_error() } @@ -281,11 +297,13 @@ impl UnixStream { /// This function will cause all pending and future I/O calls on the /// specified portions to immediately return with an appropriate value /// (see the documentation of `Shutdown`). + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { self.0.shutdown(how) } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl io::Read for UnixStream { fn read(&mut self, buf: &mut [u8]) -> io::Result { io::Read::read(&mut &*self, buf) @@ -296,6 +314,7 @@ impl io::Read for UnixStream { } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl<'a> io::Read for &'a UnixStream { fn read(&mut self, buf: &mut [u8]) -> io::Result { self.0.read(buf) @@ -306,6 +325,7 @@ impl<'a> io::Read for &'a UnixStream { } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl io::Write for UnixStream { fn write(&mut self, buf: &[u8]) -> io::Result { io::Write::write(&mut &*self, buf) @@ -316,6 +336,7 @@ impl io::Write for UnixStream { } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl<'a> io::Write for &'a UnixStream { fn write(&mut self, buf: &[u8]) -> io::Result { self.0.write(buf) @@ -326,18 +347,21 @@ impl<'a> io::Write for &'a UnixStream { } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl AsRawFd for UnixStream { fn as_raw_fd(&self) -> RawFd { *self.0.as_inner() } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl FromRawFd for UnixStream { unsafe fn from_raw_fd(fd: RawFd) -> UnixStream { UnixStream(Socket::from_inner(fd)) } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl IntoRawFd for UnixStream { fn into_raw_fd(self) -> RawFd { self.0.into_inner() @@ -349,8 +373,6 @@ impl IntoRawFd for UnixStream { /// # Examples /// /// ```rust,no_run -/// #![feature(unix_socket)] -/// /// use std::thread; /// use std::os::unix::net::{UnixStream, UnixListener}; /// @@ -377,8 +399,10 @@ impl IntoRawFd for UnixStream { /// // close the listener socket /// drop(listener); /// ``` +#[stable(feature = "unix_socket", since = "1.10.0")] pub struct UnixListener(Socket); +#[stable(feature = "unix_socket", since = "1.10.0")] impl fmt::Debug for UnixListener { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let mut builder = fmt.debug_struct("UnixListener"); @@ -392,6 +416,7 @@ impl fmt::Debug for UnixListener { impl UnixListener { /// Creates a new `UnixListener` bound to the specified socket. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn bind>(path: P) -> io::Result { fn inner(path: &Path) -> io::Result { unsafe { @@ -412,6 +437,7 @@ impl UnixListener { /// This function will block the calling thread until a new Unix connection /// is established. When established, the corersponding `UnixStream` and /// the remote peer's address will be returned. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn accept(&self) -> io::Result<(UnixStream, SocketAddr)> { let mut storage: libc::sockaddr_un = unsafe { mem::zeroed() }; let mut len = mem::size_of_val(&storage) as libc::socklen_t; @@ -425,21 +451,25 @@ impl UnixListener { /// The returned `UnixListener` is a reference to the same socket that this /// object references. Both handles can be used to accept incoming /// connections and options set on one listener will affect the other. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn try_clone(&self) -> io::Result { self.0.duplicate().map(UnixListener) } /// Returns the local socket address of this listener. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn local_addr(&self) -> io::Result { SocketAddr::new(|addr, len| unsafe { libc::getsockname(*self.0.as_inner(), addr, len) }) } /// Moves the socket into or out of nonblocking mode. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { self.0.set_nonblocking(nonblocking) } /// Returns the value of the `SO_ERROR` option. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn take_error(&self) -> io::Result> { self.0.take_error() } @@ -448,29 +478,34 @@ impl UnixListener { /// /// The iterator will never return `None` and will also not yield the /// peer's `SocketAddr` structure. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn incoming<'a>(&'a self) -> Incoming<'a> { Incoming { listener: self } } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl AsRawFd for UnixListener { fn as_raw_fd(&self) -> RawFd { *self.0.as_inner() } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl FromRawFd for UnixListener { unsafe fn from_raw_fd(fd: RawFd) -> UnixListener { UnixListener(Socket::from_inner(fd)) } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl IntoRawFd for UnixListener { fn into_raw_fd(self) -> RawFd { self.0.into_inner() } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl<'a> IntoIterator for &'a UnixListener { type Item = io::Result; type IntoIter = Incoming<'a>; @@ -484,10 +519,12 @@ impl<'a> IntoIterator for &'a UnixListener { /// /// It will never return `None`. #[derive(Debug)] +#[stable(feature = "unix_socket", since = "1.10.0")] pub struct Incoming<'a> { listener: &'a UnixListener, } +#[stable(feature = "unix_socket", since = "1.10.0")] impl<'a> Iterator for Incoming<'a> { type Item = io::Result; @@ -505,8 +542,6 @@ impl<'a> Iterator for Incoming<'a> { /// # Examples /// /// ```rust,no_run -/// #![feature(unix_socket)] -/// /// use std::os::unix::net::UnixDatagram; /// /// let socket = UnixDatagram::bind("/path/to/my/socket").unwrap(); @@ -515,8 +550,10 @@ impl<'a> Iterator for Incoming<'a> { /// let (count, address) = socket.recv_from(&mut buf).unwrap(); /// println!("socket {:?} sent {:?}", address, &buf[..count]); /// ``` +#[stable(feature = "unix_socket", since = "1.10.0")] pub struct UnixDatagram(Socket); +#[stable(feature = "unix_socket", since = "1.10.0")] impl fmt::Debug for UnixDatagram { fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { let mut builder = fmt.debug_struct("UnixDatagram"); @@ -533,6 +570,7 @@ impl fmt::Debug for UnixDatagram { impl UnixDatagram { /// Creates a Unix datagram socket bound to the given path. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn bind>(path: P) -> io::Result { fn inner(path: &Path) -> io::Result { unsafe { @@ -548,6 +586,7 @@ impl UnixDatagram { } /// Creates a Unix Datagram socket which is not bound to any address. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn unbound() -> io::Result { let inner = Socket::new_raw(libc::AF_UNIX, libc::SOCK_DGRAM)?; Ok(UnixDatagram(inner)) @@ -556,6 +595,7 @@ impl UnixDatagram { /// Create an unnamed pair of connected sockets. /// /// Returns two `UnixDatagrams`s which are connected to each other. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn pair() -> io::Result<(UnixDatagram, UnixDatagram)> { let (i1, i2) = Socket::new_pair(libc::AF_UNIX, libc::SOCK_DGRAM)?; Ok((UnixDatagram(i1), UnixDatagram(i2))) @@ -565,6 +605,7 @@ impl UnixDatagram { /// /// The `send` method may be used to send data to the specified address. /// `recv` and `recv_from` will only receive data from that address. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn connect>(&self, path: P) -> io::Result<()> { fn inner(d: &UnixDatagram, path: &Path) -> io::Result<()> { unsafe { @@ -583,11 +624,13 @@ impl UnixDatagram { /// The returned `UnixListener` is a reference to the same socket that this /// object references. Both handles can be used to accept incoming /// connections and options set on one listener will affect the other. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn try_clone(&self) -> io::Result { self.0.duplicate().map(UnixDatagram) } /// Returns the address of this socket. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn local_addr(&self) -> io::Result { SocketAddr::new(|addr, len| unsafe { libc::getsockname(*self.0.as_inner(), addr, len) }) } @@ -595,6 +638,7 @@ impl UnixDatagram { /// Returns the address of this socket's peer. /// /// The `connect` method will connect the socket to a peer. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn peer_addr(&self) -> io::Result { SocketAddr::new(|addr, len| unsafe { libc::getpeername(*self.0.as_inner(), addr, len) }) } @@ -603,6 +647,7 @@ impl UnixDatagram { /// /// On success, returns the number of bytes read and the address from /// whence the data came. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn recv_from(&self, buf: &mut [u8]) -> io::Result<(usize, SocketAddr)> { let mut count = 0; let addr = SocketAddr::new(|addr, len| { @@ -629,6 +674,7 @@ impl UnixDatagram { /// Receives data from the socket. /// /// On success, returns the number of bytes read. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn recv(&self, buf: &mut [u8]) -> io::Result { self.0.read(buf) } @@ -636,6 +682,7 @@ impl UnixDatagram { /// Sends data on the socket to the specified address. /// /// On success, returns the number of bytes written. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn send_to>(&self, buf: &[u8], path: P) -> io::Result { fn inner(d: &UnixDatagram, buf: &[u8], path: &Path) -> io::Result { unsafe { @@ -659,6 +706,7 @@ impl UnixDatagram { /// will return an error if the socket has not already been connected. /// /// On success, returns the number of bytes written. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn send(&self, buf: &[u8]) -> io::Result { self.0.write(buf) } @@ -668,6 +716,7 @@ impl UnixDatagram { /// If the provided value is `None`, then `recv` and `recv_from` calls will /// block indefinitely. It is an error to pass the zero `Duration` to this /// method. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn set_read_timeout(&self, timeout: Option) -> io::Result<()> { self.0.set_timeout(timeout, libc::SO_RCVTIMEO) } @@ -677,26 +726,31 @@ impl UnixDatagram { /// If the provided value is `None`, then `send` and `send_to` calls will /// block indefinitely. It is an error to pass the zero `Duration` to this /// method. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn set_write_timeout(&self, timeout: Option) -> io::Result<()> { self.0.set_timeout(timeout, libc::SO_SNDTIMEO) } /// Returns the read timeout of this socket. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn read_timeout(&self) -> io::Result> { self.0.timeout(libc::SO_RCVTIMEO) } /// Returns the write timeout of this socket. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn write_timeout(&self) -> io::Result> { self.0.timeout(libc::SO_SNDTIMEO) } /// Moves the socket into or out of nonblocking mode. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn set_nonblocking(&self, nonblocking: bool) -> io::Result<()> { self.0.set_nonblocking(nonblocking) } /// Returns the value of the `SO_ERROR` option. + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn take_error(&self) -> io::Result> { self.0.take_error() } @@ -706,23 +760,27 @@ impl UnixDatagram { /// This function will cause all pending and future I/O calls on the /// specified portions to immediately return with an appropriate value /// (see the documentation of `Shutdown`). + #[stable(feature = "unix_socket", since = "1.10.0")] pub fn shutdown(&self, how: Shutdown) -> io::Result<()> { self.0.shutdown(how) } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl AsRawFd for UnixDatagram { fn as_raw_fd(&self) -> RawFd { *self.0.as_inner() } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl FromRawFd for UnixDatagram { unsafe fn from_raw_fd(fd: RawFd) -> UnixDatagram { UnixDatagram(Socket::from_inner(fd)) } } +#[stable(feature = "unix_socket", since = "1.10.0")] impl IntoRawFd for UnixDatagram { fn into_raw_fd(self) -> RawFd { self.0.into_inner() diff --git a/src/libstd/sys/unix/ext/process.rs b/src/libstd/sys/unix/ext/process.rs index 7f31cf9f3b..b0fed2f469 100644 --- a/src/libstd/sys/unix/ext/process.rs +++ b/src/libstd/sys/unix/ext/process.rs @@ -132,6 +132,11 @@ impl CommandExt for process::Command { /// Unix-specific extensions to `std::process::ExitStatus` #[stable(feature = "rust1", since = "1.0.0")] pub trait ExitStatusExt { + /// Creates a new `ExitStatus` from the raw underlying `i32` return value of + /// a process. + #[unstable(feature = "exit_status_from", issue = "32713")] + fn from_raw(raw: i32) -> Self; + /// If the process was terminated by a signal, returns that signal. #[stable(feature = "rust1", since = "1.0.0")] fn signal(&self) -> Option; @@ -139,6 +144,10 @@ pub trait ExitStatusExt { #[stable(feature = "rust1", since = "1.0.0")] impl ExitStatusExt for process::ExitStatus { + fn from_raw(raw: i32) -> Self { + process::ExitStatus::from_inner(From::from(raw)) + } + fn signal(&self) -> Option { self.as_inner().signal() } diff --git a/src/libstd/sys/unix/fs.rs b/src/libstd/sys/unix/fs.rs index 810a34478c..7f23ae53fc 100644 --- a/src/libstd/sys/unix/fs.rs +++ b/src/libstd/sys/unix/fs.rs @@ -9,7 +9,6 @@ // except according to those terms. use prelude::v1::*; -use io::prelude::*; use os::unix::prelude::*; use ffi::{CString, CStr, OsString, OsStr}; @@ -28,7 +27,7 @@ use sys_common::{AsInner, FromInner}; #[cfg(any(target_os = "linux", target_os = "emscripten"))] use libc::{stat64, fstat64, lstat64, off64_t, ftruncate64, lseek64, dirent64, readdir64_r, open64}; #[cfg(target_os = "android")] -use libc::{stat as stat64, fstat as fstat64, lstat as lstat64, off64_t, ftruncate64, lseek64, +use libc::{stat as stat64, fstat as fstat64, lstat as lstat64, off64_t, lseek64, dirent as dirent64, open as open64}; #[cfg(not(any(target_os = "linux", target_os = "emscripten", @@ -101,31 +100,6 @@ impl FileAttr { } } -#[cfg(any(target_os = "ios", target_os = "macos"))] -// FIXME: update SystemTime to store a timespec and don't lose precision -impl FileAttr { - pub fn modified(&self) -> io::Result { - Ok(SystemTime::from(libc::timeval { - tv_sec: self.stat.st_mtime, - tv_usec: (self.stat.st_mtime_nsec / 1000) as libc::suseconds_t, - })) - } - - pub fn accessed(&self) -> io::Result { - Ok(SystemTime::from(libc::timeval { - tv_sec: self.stat.st_atime, - tv_usec: (self.stat.st_atime_nsec / 1000) as libc::suseconds_t, - })) - } - - pub fn created(&self) -> io::Result { - Ok(SystemTime::from(libc::timeval { - tv_sec: self.stat.st_birthtime, - tv_usec: (self.stat.st_birthtime_nsec / 1000) as libc::suseconds_t, - })) - } -} - #[cfg(target_os = "netbsd")] impl FileAttr { pub fn modified(&self) -> io::Result { @@ -150,7 +124,7 @@ impl FileAttr { } } -#[cfg(not(any(target_os = "ios", target_os = "macos", target_os = "netbsd")))] +#[cfg(not(target_os = "netbsd"))] impl FileAttr { pub fn modified(&self) -> io::Result { Ok(SystemTime::from(libc::timespec { @@ -168,7 +142,9 @@ impl FileAttr { #[cfg(any(target_os = "bitrig", target_os = "freebsd", - target_os = "openbsd"))] + target_os = "openbsd", + target_os = "macos", + target_os = "ios"))] pub fn created(&self) -> io::Result { Ok(SystemTime::from(libc::timespec { tv_sec: self.stat.st_birthtime as libc::time_t, @@ -178,7 +154,9 @@ impl FileAttr { #[cfg(not(any(target_os = "bitrig", target_os = "freebsd", - target_os = "openbsd")))] + target_os = "openbsd", + target_os = "macos", + target_os = "ios")))] pub fn created(&self) -> io::Result { Err(io::Error::new(io::ErrorKind::Other, "creation time is not available on this platform \ @@ -476,10 +454,13 @@ impl File { } pub fn truncate(&self, size: u64) -> io::Result<()> { - cvt_r(|| unsafe { + #[cfg(target_os = "android")] + return ::sys::android::ftruncate64(self.0.raw(), size); + + #[cfg(not(target_os = "android"))] + return cvt_r(|| unsafe { ftruncate64(self.0.raw(), size as off64_t) - })?; - Ok(()) + }).map(|_| ()); } pub fn read(&self, buf: &mut [u8]) -> io::Result { diff --git a/src/libstd/sys/unix/mod.rs b/src/libstd/sys/unix/mod.rs index f8b2d4dd23..12a877f747 100644 --- a/src/libstd/sys/unix/mod.rs +++ b/src/libstd/sys/unix/mod.rs @@ -31,6 +31,7 @@ use ops::Neg; #[macro_use] pub mod weak; +pub mod android; pub mod backtrace; pub mod condvar; pub mod ext; @@ -91,37 +92,8 @@ pub fn init() { unsafe fn reset_sigpipe() {} } -// Currently the minimum supported Android version of the standard library is -// API level 18 (android-18). Back in those days [1] the `signal` function was -// just an inline wrapper around `bsd_signal`, but starting in API level -// android-20 the `signal` symbols was introduced [2]. Finally, in android-21 -// the API `bsd_signal` was removed [3]. -// -// Basically this means that if we want to be binary compatible with multiple -// Android releases (oldest being 18 and newest being 21) then we need to check -// for both symbols and not actually link against either. -// -// Note that if we're not on android we just link against the `android` symbol -// itself. -// -// [1]: https://chromium.googlesource.com/android_tools/+/20ee6d20/ndk/platforms -// /android-18/arch-arm/usr/include/signal.h -// [2]: https://chromium.googlesource.com/android_tools/+/fbd420/ndk_experimental -// /platforms/android-20/arch-arm -// /usr/include/signal.h -// [3]: https://chromium.googlesource.com/android_tools/+/20ee6d/ndk/platforms -// /android-21/arch-arm/usr/include/signal.h #[cfg(target_os = "android")] -unsafe fn signal(signum: libc::c_int, - handler: libc::sighandler_t) -> libc::sighandler_t { - weak!(fn signal(libc::c_int, libc::sighandler_t) -> libc::sighandler_t); - weak!(fn bsd_signal(libc::c_int, libc::sighandler_t) -> libc::sighandler_t); - - let f = signal.get().or_else(|| bsd_signal.get()); - let f = f.expect("neither `signal` nor `bsd_signal` symbols found"); - f(signum, handler) -} - +pub use sys::android::signal; #[cfg(not(target_os = "android"))] pub use libc::signal; diff --git a/src/libstd/sys/unix/os.rs b/src/libstd/sys/unix/os.rs index 94ebbd70ae..21ce6b19ce 100644 --- a/src/libstd/sys/unix/os.rs +++ b/src/libstd/sys/unix/os.rs @@ -27,13 +27,13 @@ use path::{self, PathBuf}; use ptr; use slice; use str; -use sync::StaticMutex; +use sys_common::mutex::Mutex; use sys::cvt; use sys::fd; use vec; const TMPBUF_SZ: usize = 128; -static ENV_LOCK: StaticMutex = StaticMutex::new(); +static ENV_LOCK: Mutex = Mutex::new(); /// Returns the platform-specific value of errno #[cfg(not(target_os = "dragonfly"))] @@ -434,10 +434,11 @@ pub unsafe fn environ() -> *mut *const *const c_char { /// Returns a vector of (variable, value) byte-vector pairs for all the /// environment variables of the current process. pub fn env() -> Env { - let _g = ENV_LOCK.lock(); - return unsafe { + unsafe { + ENV_LOCK.lock(); let mut environ = *environ(); if environ == ptr::null() { + ENV_LOCK.unlock(); panic!("os::env() failure getting env string from OS: {}", io::Error::last_os_error()); } @@ -448,8 +449,13 @@ pub fn env() -> Env { } environ = environ.offset(1); } - Env { iter: result.into_iter(), _dont_send_or_sync_me: ptr::null_mut() } - }; + let ret = Env { + iter: result.into_iter(), + _dont_send_or_sync_me: ptr::null_mut(), + }; + ENV_LOCK.unlock(); + return ret + } fn parse(input: &[u8]) -> Option<(OsString, OsString)> { // Strategy (copied from glibc): Variable name and value are separated @@ -471,32 +477,40 @@ pub fn getenv(k: &OsStr) -> io::Result> { // environment variables with a nul byte can't be set, so their value is // always None as well let k = CString::new(k.as_bytes())?; - let _g = ENV_LOCK.lock(); - Ok(unsafe { + unsafe { + ENV_LOCK.lock(); let s = libc::getenv(k.as_ptr()) as *const _; - if s.is_null() { + let ret = if s.is_null() { None } else { Some(OsStringExt::from_vec(CStr::from_ptr(s).to_bytes().to_vec())) - } - }) + }; + ENV_LOCK.unlock(); + return Ok(ret) + } } pub fn setenv(k: &OsStr, v: &OsStr) -> io::Result<()> { let k = CString::new(k.as_bytes())?; let v = CString::new(v.as_bytes())?; - let _g = ENV_LOCK.lock(); - cvt(unsafe { - libc::setenv(k.as_ptr(), v.as_ptr(), 1) - }).map(|_| ()) + + unsafe { + ENV_LOCK.lock(); + let ret = cvt(libc::setenv(k.as_ptr(), v.as_ptr(), 1)).map(|_| ()); + ENV_LOCK.unlock(); + return ret + } } pub fn unsetenv(n: &OsStr) -> io::Result<()> { let nbuf = CString::new(n.as_bytes())?; - let _g = ENV_LOCK.lock(); - cvt(unsafe { - libc::unsetenv(nbuf.as_ptr()) - }).map(|_| ()) + + unsafe { + ENV_LOCK.lock(); + let ret = cvt(libc::unsetenv(nbuf.as_ptr())).map(|_| ()); + ENV_LOCK.unlock(); + return ret + } } pub fn page_size() -> usize { diff --git a/src/libstd/sys/unix/process.rs b/src/libstd/sys/unix/process.rs index 270c2096b2..d571916754 100644 --- a/src/libstd/sys/unix/process.rs +++ b/src/libstd/sys/unix/process.rs @@ -147,7 +147,7 @@ impl Command { let new_key = pair_to_key(key, val, &mut self.saw_nul); let (map, envp) = self.init_env_map(); - // If `key` is already present then we we just update `envp` in place + // If `key` is already present then we just update `envp` in place // (and store the owned value), but if it's not there we override the // trailing NULL pointer, add a new NULL pointer, and store where we // were located. @@ -550,6 +550,12 @@ impl ExitStatus { } } +impl From for ExitStatus { + fn from(a: c_int) -> ExitStatus { + ExitStatus(a) + } +} + impl fmt::Display for ExitStatus { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { if let Some(code) = self.code() { diff --git a/src/libstd/sys/unix/rand.rs b/src/libstd/sys/unix/rand.rs index 92c3bf8829..25a7a3ce50 100644 --- a/src/libstd/sys/unix/rand.rs +++ b/src/libstd/sys/unix/rand.rs @@ -41,8 +41,10 @@ mod imp { #[cfg(target_arch = "aarch64")] const NR_GETRANDOM: libc::c_long = 278; + const GRND_NONBLOCK: libc::c_uint = 0x0001; + unsafe { - libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), 0) + libc::syscall(NR_GETRANDOM, buf.as_mut_ptr(), buf.len(), GRND_NONBLOCK) } } @@ -63,6 +65,19 @@ mod imp { let err = errno() as libc::c_int; if err == libc::EINTR { continue; + } else if err == libc::EAGAIN { + // if getrandom() returns EAGAIN it would have blocked + // because the non-blocking pool (urandom) has not + // initialized in the kernel yet due to a lack of entropy + // the fallback we do here is to avoid blocking applications + // which could depend on this call without ever knowing + // they do and don't have a work around. The PRNG of + // /dev/urandom will still be used but not over a completely + // full entropy pool + let reader = File::open("/dev/urandom").expect("Unable to open /dev/urandom"); + let mut reader_rng = ReaderRng::new(reader); + reader_rng.fill_bytes(& mut v[read..]); + read += v.len() as usize; } else { panic!("unexpected getrandom error: {}", err); } diff --git a/src/libstd/sys/unix/stack_overflow.rs b/src/libstd/sys/unix/stack_overflow.rs index 1553aba35a..22d47ba0f6 100644 --- a/src/libstd/sys/unix/stack_overflow.rs +++ b/src/libstd/sys/unix/stack_overflow.rs @@ -64,7 +64,7 @@ mod imp { unsafe fn siginfo_si_addr(info: *mut libc::siginfo_t) -> usize { #[repr(C)] struct siginfo_t { - a: [libc::c_int; 3], // si_signo, si_code, si_errno, + a: [libc::c_int; 3], // si_signo, si_errno, si_code si_addr: *mut libc::c_void, } diff --git a/src/libstd/sys/unix/thread.rs b/src/libstd/sys/unix/thread.rs index 6d966a0f69..cb34d1a5fb 100644 --- a/src/libstd/sys/unix/thread.rs +++ b/src/libstd/sys/unix/thread.rs @@ -164,6 +164,7 @@ impl Drop for Thread { } #[cfg(all(not(all(target_os = "linux", not(target_env = "musl"))), + not(target_os = "freebsd"), not(target_os = "macos"), not(target_os = "bitrig"), not(all(target_os = "netbsd", not(target_vendor = "rumprun"))), @@ -177,6 +178,7 @@ pub mod guard { #[cfg(any(all(target_os = "linux", not(target_env = "musl")), + target_os = "freebsd", target_os = "macos", target_os = "bitrig", all(target_os = "netbsd", not(target_vendor = "rumprun")), @@ -199,12 +201,17 @@ pub mod guard { current().map(|s| s as *mut libc::c_void) } - #[cfg(any(target_os = "linux", target_os = "android", target_os = "netbsd"))] + #[cfg(any(target_os = "android", target_os = "freebsd", + target_os = "linux", target_os = "netbsd"))] unsafe fn get_stack_start() -> Option<*mut libc::c_void> { let mut ret = None; let mut attr: libc::pthread_attr_t = ::mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); - if libc::pthread_getattr_np(libc::pthread_self(), &mut attr) == 0 { + #[cfg(target_os = "freebsd")] + let e = libc::pthread_attr_get_np(libc::pthread_self(), &mut attr); + #[cfg(not(target_os = "freebsd"))] + let e = libc::pthread_getattr_np(libc::pthread_self(), &mut attr); + if e == 0 { let mut stackaddr = ::ptr::null_mut(); let mut stacksize = 0; assert_eq!(libc::pthread_attr_getstack(&attr, &mut stackaddr, @@ -248,7 +255,11 @@ pub mod guard { panic!("failed to allocate a guard page"); } - let offset = if cfg!(target_os = "linux") {2} else {1}; + let offset = if cfg!(any(target_os = "linux", target_os = "freebsd")) { + 2 + } else { + 1 + }; Some(stackaddr as usize + offset * psize) } @@ -282,12 +293,17 @@ pub mod guard { }) } - #[cfg(any(target_os = "linux", target_os = "android", target_os = "netbsd"))] + #[cfg(any(target_os = "android", target_os = "freebsd", + target_os = "linux", target_os = "netbsd"))] pub unsafe fn current() -> Option { let mut ret = None; let mut attr: libc::pthread_attr_t = ::mem::zeroed(); assert_eq!(libc::pthread_attr_init(&mut attr), 0); - if libc::pthread_getattr_np(libc::pthread_self(), &mut attr) == 0 { + #[cfg(target_os = "freebsd")] + let e = libc::pthread_attr_get_np(libc::pthread_self(), &mut attr); + #[cfg(not(target_os = "freebsd"))] + let e = libc::pthread_getattr_np(libc::pthread_self(), &mut attr); + if e == 0 { let mut guardsize = 0; assert_eq!(libc::pthread_attr_getguardsize(&attr, &mut guardsize), 0); if guardsize == 0 { @@ -298,7 +314,9 @@ pub mod guard { assert_eq!(libc::pthread_attr_getstack(&attr, &mut stackaddr, &mut size), 0); - ret = if cfg!(target_os = "netbsd") { + ret = if cfg!(target_os = "freebsd") { + Some(stackaddr as usize - guardsize as usize) + } else if cfg!(target_os = "netbsd") { Some(stackaddr as usize) } else { Some(stackaddr as usize + guardsize as usize) diff --git a/src/libstd/sys/unix/time.rs b/src/libstd/sys/unix/time.rs index cc7abe25e3..68eebba9e7 100644 --- a/src/libstd/sys/unix/time.rs +++ b/src/libstd/sys/unix/time.rs @@ -8,37 +8,129 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +use cmp::Ordering; +use time::Duration; +use libc; + pub use self::inner::{Instant, SystemTime, UNIX_EPOCH}; const NSEC_PER_SEC: u64 = 1_000_000_000; +#[derive(Copy, Clone)] +struct Timespec { + t: libc::timespec, +} + +impl Timespec { + fn sub_timespec(&self, other: &Timespec) -> Result { + if self >= other { + Ok(if self.t.tv_nsec >= other.t.tv_nsec { + Duration::new((self.t.tv_sec - other.t.tv_sec) as u64, + (self.t.tv_nsec - other.t.tv_nsec) as u32) + } else { + Duration::new((self.t.tv_sec - 1 - other.t.tv_sec) as u64, + self.t.tv_nsec as u32 + (NSEC_PER_SEC as u32) - + other.t.tv_nsec as u32) + }) + } else { + match other.sub_timespec(self) { + Ok(d) => Err(d), + Err(d) => Ok(d), + } + } + } + + fn add_duration(&self, other: &Duration) -> Timespec { + let secs = (self.t.tv_sec as i64).checked_add(other.as_secs() as i64); + let mut secs = secs.expect("overflow when adding duration to time"); + + // Nano calculations can't overflow because nanos are <1B which fit + // in a u32. + let mut nsec = other.subsec_nanos() + self.t.tv_nsec as u32; + if nsec >= NSEC_PER_SEC as u32 { + nsec -= NSEC_PER_SEC as u32; + secs = secs.checked_add(1).expect("overflow when adding \ + duration to time"); + } + Timespec { + t: libc::timespec { + tv_sec: secs as libc::time_t, + tv_nsec: nsec as libc::c_long, + }, + } + } + + fn sub_duration(&self, other: &Duration) -> Timespec { + let secs = (self.t.tv_sec as i64).checked_sub(other.as_secs() as i64); + let mut secs = secs.expect("overflow when subtracting duration \ + from time"); + + // Similar to above, nanos can't overflow. + let mut nsec = self.t.tv_nsec as i32 - other.subsec_nanos() as i32; + if nsec < 0 { + nsec += NSEC_PER_SEC as i32; + secs = secs.checked_sub(1).expect("overflow when subtracting \ + duration from time"); + } + Timespec { + t: libc::timespec { + tv_sec: secs as libc::time_t, + tv_nsec: nsec as libc::c_long, + }, + } + } +} + +impl PartialEq for Timespec { + fn eq(&self, other: &Timespec) -> bool { + self.t.tv_sec == other.t.tv_sec && self.t.tv_nsec == other.t.tv_nsec + } +} + +impl Eq for Timespec {} + +impl PartialOrd for Timespec { + fn partial_cmp(&self, other: &Timespec) -> Option { + Some(self.cmp(other)) + } +} + +impl Ord for Timespec { + fn cmp(&self, other: &Timespec) -> Ordering { + let me = (self.t.tv_sec, self.t.tv_nsec); + let other = (other.t.tv_sec, other.t.tv_nsec); + me.cmp(&other) + } +} + #[cfg(any(target_os = "macos", target_os = "ios"))] mod inner { - use cmp::Ordering; use fmt; use libc; - use super::NSEC_PER_SEC; use sync::Once; use sys::cvt; use sys_common::mul_div_u64; use time::Duration; - const USEC_PER_SEC: u64 = NSEC_PER_SEC / 1000; + use super::NSEC_PER_SEC; + use super::Timespec; #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug)] pub struct Instant { t: u64 } - #[derive(Copy, Clone)] + #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct SystemTime { - t: libc::timeval, + t: Timespec, } pub const UNIX_EPOCH: SystemTime = SystemTime { - t: libc::timeval { - tv_sec: 0, - tv_usec: 0, + t: Timespec { + t: libc::timespec { + tv_sec: 0, + tv_nsec: 0, + }, }, }; @@ -72,113 +164,50 @@ mod inner { impl SystemTime { pub fn now() -> SystemTime { - let mut s = SystemTime { - t: libc::timeval { - tv_sec: 0, - tv_usec: 0, - }, + let mut s = libc::timeval { + tv_sec: 0, + tv_usec: 0, }; cvt(unsafe { - libc::gettimeofday(&mut s.t, 0 as *mut _) + libc::gettimeofday(&mut s, 0 as *mut _) }).unwrap(); - return s + return SystemTime::from(s) } pub fn sub_time(&self, other: &SystemTime) -> Result { - if self >= other { - Ok(if self.t.tv_usec >= other.t.tv_usec { - Duration::new((self.t.tv_sec - other.t.tv_sec) as u64, - ((self.t.tv_usec - - other.t.tv_usec) as u32) * 1000) - } else { - Duration::new((self.t.tv_sec - 1 - other.t.tv_sec) as u64, - (self.t.tv_usec as u32 + (USEC_PER_SEC as u32) - - other.t.tv_usec as u32) * 1000) - }) - } else { - match other.sub_time(self) { - Ok(d) => Err(d), - Err(d) => Ok(d), - } - } + self.t.sub_timespec(&other.t) } pub fn add_duration(&self, other: &Duration) -> SystemTime { - let secs = (self.t.tv_sec as i64).checked_add(other.as_secs() as i64); - let mut secs = secs.expect("overflow when adding duration to time"); - - // Nano calculations can't overflow because nanos are <1B which fit - // in a u32. - let mut usec = (other.subsec_nanos() / 1000) + self.t.tv_usec as u32; - if usec >= USEC_PER_SEC as u32 { - usec -= USEC_PER_SEC as u32; - secs = secs.checked_add(1).expect("overflow when adding \ - duration to time"); - } - SystemTime { - t: libc::timeval { - tv_sec: secs as libc::time_t, - tv_usec: usec as libc::suseconds_t, - }, - } + SystemTime { t: self.t.add_duration(other) } } pub fn sub_duration(&self, other: &Duration) -> SystemTime { - let secs = (self.t.tv_sec as i64).checked_sub(other.as_secs() as i64); - let mut secs = secs.expect("overflow when subtracting duration \ - from time"); - - // Similar to above, nanos can't overflow. - let mut usec = self.t.tv_usec as i32 - - (other.subsec_nanos() / 1000) as i32; - if usec < 0 { - usec += USEC_PER_SEC as i32; - secs = secs.checked_sub(1).expect("overflow when subtracting \ - duration from time"); - } - SystemTime { - t: libc::timeval { - tv_sec: secs as libc::time_t, - tv_usec: usec as libc::suseconds_t, - }, - } + SystemTime { t: self.t.sub_duration(other) } } } impl From for SystemTime { fn from(t: libc::timeval) -> SystemTime { - SystemTime { t: t } - } - } - - impl PartialEq for SystemTime { - fn eq(&self, other: &SystemTime) -> bool { - self.t.tv_sec == other.t.tv_sec && self.t.tv_usec == other.t.tv_usec - } - } - - impl Eq for SystemTime {} - - impl PartialOrd for SystemTime { - fn partial_cmp(&self, other: &SystemTime) -> Option { - Some(self.cmp(other)) + SystemTime::from(libc::timespec { + tv_sec: t.tv_sec, + tv_nsec: (t.tv_usec * 1000) as libc::c_long, + }) } } - impl Ord for SystemTime { - fn cmp(&self, other: &SystemTime) -> Ordering { - let me = (self.t.tv_sec, self.t.tv_usec); - let other = (other.t.tv_sec, other.t.tv_usec); - me.cmp(&other) + impl From for SystemTime { + fn from(t: libc::timespec) -> SystemTime { + SystemTime { t: Timespec { t: t } } } } impl fmt::Debug for SystemTime { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { f.debug_struct("SystemTime") - .field("tv_sec", &self.t.tv_sec) - .field("tv_usec", &self.t.tv_usec) + .field("tv_sec", &self.t.t.tv_sec) + .field("tv_nsec", &self.t.t.tv_nsec) .finish() } } @@ -209,17 +238,12 @@ mod inner { #[cfg(not(any(target_os = "macos", target_os = "ios")))] mod inner { - use cmp::Ordering; use fmt; use libc; - use super::NSEC_PER_SEC; use sys::cvt; use time::Duration; - #[derive(Copy, Clone)] - struct Timespec { - t: libc::timespec, - } + use super::Timespec; #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct Instant { @@ -242,7 +266,7 @@ mod inner { impl Instant { pub fn now() -> Instant { - Instant { t: Timespec::now(libc::CLOCK_MONOTONIC) } + Instant { t: now(libc::CLOCK_MONOTONIC) } } pub fn sub_instant(&self, other: &Instant) -> Duration { @@ -271,7 +295,7 @@ mod inner { impl SystemTime { pub fn now() -> SystemTime { - SystemTime { t: Timespec::now(libc::CLOCK_REALTIME) } + SystemTime { t: now(libc::CLOCK_REALTIME) } } pub fn sub_time(&self, other: &SystemTime) @@ -308,98 +332,16 @@ mod inner { #[cfg(target_os = "dragonfly")] pub type clock_t = libc::c_ulong; - impl Timespec { - pub fn now(clock: clock_t) -> Timespec { - let mut t = Timespec { - t: libc::timespec { - tv_sec: 0, - tv_nsec: 0, - } - }; - cvt(unsafe { - libc::clock_gettime(clock, &mut t.t) - }).unwrap(); - t - } - - fn sub_timespec(&self, other: &Timespec) -> Result { - if self >= other { - Ok(if self.t.tv_nsec >= other.t.tv_nsec { - Duration::new((self.t.tv_sec - other.t.tv_sec) as u64, - (self.t.tv_nsec - other.t.tv_nsec) as u32) - } else { - Duration::new((self.t.tv_sec - 1 - other.t.tv_sec) as u64, - self.t.tv_nsec as u32 + (NSEC_PER_SEC as u32) - - other.t.tv_nsec as u32) - }) - } else { - match other.sub_timespec(self) { - Ok(d) => Err(d), - Err(d) => Ok(d), - } - } - } - - fn add_duration(&self, other: &Duration) -> Timespec { - let secs = (self.t.tv_sec as i64).checked_add(other.as_secs() as i64); - let mut secs = secs.expect("overflow when adding duration to time"); - - // Nano calculations can't overflow because nanos are <1B which fit - // in a u32. - let mut nsec = other.subsec_nanos() + self.t.tv_nsec as u32; - if nsec >= NSEC_PER_SEC as u32 { - nsec -= NSEC_PER_SEC as u32; - secs = secs.checked_add(1).expect("overflow when adding \ - duration to time"); - } - Timespec { - t: libc::timespec { - tv_sec: secs as libc::time_t, - tv_nsec: nsec as libc::c_long, - }, - } - } - - fn sub_duration(&self, other: &Duration) -> Timespec { - let secs = (self.t.tv_sec as i64).checked_sub(other.as_secs() as i64); - let mut secs = secs.expect("overflow when subtracting duration \ - from time"); - - // Similar to above, nanos can't overflow. - let mut nsec = self.t.tv_nsec as i32 - other.subsec_nanos() as i32; - if nsec < 0 { - nsec += NSEC_PER_SEC as i32; - secs = secs.checked_sub(1).expect("overflow when subtracting \ - duration from time"); - } - Timespec { - t: libc::timespec { - tv_sec: secs as libc::time_t, - tv_nsec: nsec as libc::c_long, - }, + fn now(clock: clock_t) -> Timespec { + let mut t = Timespec { + t: libc::timespec { + tv_sec: 0, + tv_nsec: 0, } - } - } - - impl PartialEq for Timespec { - fn eq(&self, other: &Timespec) -> bool { - self.t.tv_sec == other.t.tv_sec && self.t.tv_nsec == other.t.tv_nsec - } - } - - impl Eq for Timespec {} - - impl PartialOrd for Timespec { - fn partial_cmp(&self, other: &Timespec) -> Option { - Some(self.cmp(other)) - } - } - - impl Ord for Timespec { - fn cmp(&self, other: &Timespec) -> Ordering { - let me = (self.t.tv_sec, self.t.tv_nsec); - let other = (other.t.tv_sec, other.t.tv_nsec); - me.cmp(&other) - } + }; + cvt(unsafe { + libc::clock_gettime(clock, &mut t.t) + }).unwrap(); + t } } diff --git a/src/libstd/sys/windows/c.rs b/src/libstd/sys/windows/c.rs index ab24b9e6fd..2acf6485eb 100644 --- a/src/libstd/sys/windows/c.rs +++ b/src/libstd/sys/windows/c.rs @@ -277,21 +277,6 @@ pub const CRYPT_VERIFYCONTEXT: DWORD = 0xF0000000; pub const EXCEPTION_CONTINUE_SEARCH: LONG = 0; pub const EXCEPTION_STACK_OVERFLOW: DWORD = 0xc00000fd; pub const EXCEPTION_MAXIMUM_PARAMETERS: usize = 15; -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub const EXCEPTION_NONCONTINUABLE: DWORD = 0x1; // Noncontinuable exception -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub const EXCEPTION_UNWINDING: DWORD = 0x2; // Unwind is in progress -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub const EXCEPTION_EXIT_UNWIND: DWORD = 0x4; // Exit unwind is in progress -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub const EXCEPTION_TARGET_UNWIND: DWORD = 0x20; // Target unwind in progress -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub const EXCEPTION_COLLIDED_UNWIND: DWORD = 0x40; // Collided exception handler call -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub const EXCEPTION_UNWIND: DWORD = EXCEPTION_UNWINDING | - EXCEPTION_EXIT_UNWIND | - EXCEPTION_TARGET_UNWIND | - EXCEPTION_COLLIDED_UNWIND; pub const PIPE_ACCESS_INBOUND: DWORD = 0x00000001; pub const FILE_FLAG_FIRST_PIPE_INSTANCE: DWORD = 0x00080000; @@ -813,31 +798,6 @@ pub struct in6_addr { pub s6_addr: [u8; 16], } -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub enum UNWIND_HISTORY_TABLE {} - -#[repr(C)] -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub struct RUNTIME_FUNCTION { - pub BeginAddress: DWORD, - pub EndAddress: DWORD, - pub UnwindData: DWORD, -} - -#[repr(C)] -#[cfg(all(target_arch = "x86_64", target_env = "gnu"))] -pub struct DISPATCHER_CONTEXT { - pub ControlPc: LPVOID, - pub ImageBase: LPVOID, - pub FunctionEntry: *const RUNTIME_FUNCTION, - pub EstablisherFrame: LPVOID, - pub TargetIp: LPVOID, - pub ContextRecord: *const CONTEXT, - pub LanguageHandler: LPVOID, - pub HandlerData: *const u8, - pub HistoryTable: *const UNWIND_HISTORY_TABLE, -} - #[repr(C)] #[derive(Copy, Clone)] #[allow(dead_code)] // we only use some variants @@ -1113,19 +1073,6 @@ extern "system" { pbBuffer: *mut BYTE) -> BOOL; pub fn CryptReleaseContext(hProv: HCRYPTPROV, dwFlags: DWORD) -> BOOL; - #[unwind] - #[cfg(any(target_arch = "x86_64", target_env = "msvc"))] - pub fn RaiseException(dwExceptionCode: DWORD, - dwExceptionFlags: DWORD, - nNumberOfArguments: DWORD, - lpArguments: *const ULONG_PTR); - #[cfg(all(target_arch = "x86_64", target_env = "gnu"))] - pub fn RtlUnwindEx(TargetFrame: LPVOID, - TargetIp: LPVOID, - ExceptionRecord: *const EXCEPTION_RECORD, - ReturnValue: LPVOID, - OriginalContext: *const CONTEXT, - HistoryTable: *const UNWIND_HISTORY_TABLE); pub fn GetSystemTimeAsFileTime(lpSystemTimeAsFileTime: LPFILETIME); pub fn CreateEventW(lpEventAttributes: LPSECURITY_ATTRIBUTES, diff --git a/src/libstd/sys/windows/ext/fs.rs b/src/libstd/sys/windows/ext/fs.rs index d378a6853f..4388a0bdff 100644 --- a/src/libstd/sys/windows/ext/fs.rs +++ b/src/libstd/sys/windows/ext/fs.rs @@ -19,9 +19,7 @@ use sys; use sys_common::{AsInnerMut, AsInner}; /// Windows-specific extensions to `OpenOptions` -#[unstable(feature = "open_options_ext", - reason = "may require more thought/methods", - issue = "27720")] +#[stable(feature = "open_options_ext", since = "1.10.0")] pub trait OpenOptionsExt { /// Overrides the `dwDesiredAccess` argument to the call to `CreateFile` /// with the specified value. @@ -34,7 +32,6 @@ pub trait OpenOptionsExt { /// # Examples /// /// ```no_run - /// #![feature(open_options_ext)] /// use std::fs::OpenOptions; /// use std::os::windows::fs::OpenOptionsExt; /// @@ -42,6 +39,7 @@ pub trait OpenOptionsExt { /// // to call `stat()` on the file /// let file = OpenOptions::new().access_mode(0).open("foo.txt"); /// ``` + #[stable(feature = "open_options_ext", since = "1.10.0")] fn access_mode(&mut self, access: u32) -> &mut Self; /// Overrides the `dwShareMode` argument to the call to `CreateFile` with @@ -55,7 +53,6 @@ pub trait OpenOptionsExt { /// # Examples /// /// ```no_run - /// #![feature(open_options_ext)] /// use std::fs::OpenOptions; /// use std::os::windows::fs::OpenOptionsExt; /// @@ -65,6 +62,7 @@ pub trait OpenOptionsExt { /// .share_mode(0) /// .open("foo.txt"); /// ``` + #[stable(feature = "open_options_ext", since = "1.10.0")] fn share_mode(&mut self, val: u32) -> &mut Self; /// Sets extra flags for the `dwFileFlags` argument to the call to @@ -88,9 +86,7 @@ pub trait OpenOptionsExt { /// } /// let file = options.open("foo.txt"); /// ``` - #[unstable(feature = "expand_open_options", - reason = "recently added", - issue = "30014")] + #[stable(feature = "open_options_ext", since = "1.10.0")] fn custom_flags(&mut self, flags: u32) -> &mut Self; /// Sets the `dwFileAttributes` argument to the call to `CreateFile2` to @@ -111,7 +107,6 @@ pub trait OpenOptionsExt { /// # Examples /// /// ```rust,ignore - /// #![feature(open_options_ext)] /// extern crate winapi; /// use std::fs::OpenOptions; /// use std::os::windows::fs::OpenOptionsExt; @@ -120,17 +115,17 @@ pub trait OpenOptionsExt { /// .attributes(winapi::FILE_ATTRIBUTE_HIDDEN) /// .open("foo.txt"); /// ``` + #[stable(feature = "open_options_ext", since = "1.10.0")] fn attributes(&mut self, val: u32) -> &mut Self; /// Sets the `dwSecurityQosFlags` argument to the call to `CreateFile2` to /// the specified value (or combines it with `custom_flags` and `attributes` /// to set the `dwFlagsAndAttributes` for `CreateFile`). + #[stable(feature = "open_options_ext", since = "1.10.0")] fn security_qos_flags(&mut self, flags: u32) -> &mut OpenOptions; } -#[unstable(feature = "open_options_ext", - reason = "may require more thought/methods", - issue = "27720")] +#[stable(feature = "open_options_ext", since = "1.10.0")] impl OpenOptionsExt for OpenOptions { fn access_mode(&mut self, access: u32) -> &mut OpenOptions { self.as_inner_mut().access_mode(access); self diff --git a/src/libstd/sys/windows/ext/process.rs b/src/libstd/sys/windows/ext/process.rs index f6ee59eec3..56c6a73d4f 100644 --- a/src/libstd/sys/windows/ext/process.rs +++ b/src/libstd/sys/windows/ext/process.rs @@ -81,3 +81,18 @@ impl IntoRawHandle for process::ChildStderr { self.into_inner().into_handle().into_raw() as *mut _ } } + +/// Windows-specific extensions to `std::process::ExitStatus` +#[unstable(feature = "exit_status_from", issue = "32713")] +pub trait ExitStatusExt { + /// Creates a new `ExitStatus` from the raw underlying `u32` return value of + /// a process. + fn from_raw(raw: u32) -> Self; +} + +#[stable(feature = "rust1", since = "1.0.0")] +impl ExitStatusExt for process::ExitStatus { + fn from_raw(raw: u32) -> Self { + process::ExitStatus::from_inner(From::from(raw)) + } +} diff --git a/src/libstd/sys/windows/fs.rs b/src/libstd/sys/windows/fs.rs index 529e42248f..3cd45afaf0 100644 --- a/src/libstd/sys/windows/fs.rs +++ b/src/libstd/sys/windows/fs.rs @@ -9,7 +9,6 @@ // except according to those terms. use prelude::v1::*; -use io::prelude::*; use os::windows::prelude::*; use ffi::OsString; diff --git a/src/libstd/sys/windows/process.rs b/src/libstd/sys/windows/process.rs index f495729758..3ca75cf364 100644 --- a/src/libstd/sys/windows/process.rs +++ b/src/libstd/sys/windows/process.rs @@ -24,7 +24,7 @@ use mem; use os::windows::ffi::OsStrExt; use path::Path; use ptr; -use sync::StaticMutex; +use sys::mutex::Mutex; use sys::c; use sys::fs::{OpenOptions, File}; use sys::handle::Handle; @@ -75,6 +75,10 @@ pub struct StdioPipes { pub stderr: Option, } +struct DropGuard<'a> { + lock: &'a Mutex, +} + impl Command { pub fn new(program: &OsStr) -> Command { Command { @@ -173,8 +177,8 @@ impl Command { // // For more information, msdn also has an article about this race: // http://support.microsoft.com/kb/315939 - static CREATE_PROCESS_LOCK: StaticMutex = StaticMutex::new(); - let _lock = CREATE_PROCESS_LOCK.lock(); + static CREATE_PROCESS_LOCK: Mutex = Mutex::new(); + let _guard = DropGuard::new(&CREATE_PROCESS_LOCK); let mut pipes = StdioPipes { stdin: None, @@ -224,6 +228,23 @@ impl fmt::Debug for Command { } } +impl<'a> DropGuard<'a> { + fn new(lock: &'a Mutex) -> DropGuard<'a> { + unsafe { + lock.lock(); + DropGuard { lock: lock } + } + } +} + +impl<'a> Drop for DropGuard<'a> { + fn drop(&mut self) { + unsafe { + self.lock.unlock(); + } + } +} + impl Stdio { fn to_handle(&self, stdio_id: c::DWORD, pipe: &mut Option) -> io::Result { @@ -337,6 +358,12 @@ impl ExitStatus { } } +impl From for ExitStatus { + fn from(u: c::DWORD) -> ExitStatus { + ExitStatus(u) + } +} + impl fmt::Display for ExitStatus { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "exit code: {}", self.0) diff --git a/src/libstd/thread/local.rs b/src/libstd/thread/local.rs index 69395001bb..6b54ec8afc 100644 --- a/src/libstd/thread/local.rs +++ b/src/libstd/thread/local.rs @@ -60,7 +60,7 @@ use mem; /// # Platform-specific behavior /// /// Note that a "best effort" is made to ensure that destructors for types -/// stored in thread local storage are run, but not all platforms can gurantee +/// stored in thread local storage are run, but not all platforms can guarantee /// that destructors will be run for all types in thread local storage. For /// example, there are a number of known caveats where destructors are not run: /// @@ -254,7 +254,7 @@ impl LocalKey { /// destruction has completed. Keys without destructors (e.g. with types /// that are `Copy`), may never enter the `Destroyed` state. /// - /// Keys in the `Uninitialized` can be accessed so long as the + /// Keys in the `Uninitialized` state can be accessed so long as the /// initialization does not panic. Keys in the `Valid` state are guaranteed /// to be able to be accessed. Keys in the `Destroyed` state will panic on /// any call to `with`. diff --git a/src/libstd/thread/mod.rs b/src/libstd/thread/mod.rs index b3549dc126..dc26370590 100644 --- a/src/libstd/thread/mod.rs +++ b/src/libstd/thread/mod.rs @@ -13,7 +13,8 @@ //! ## The threading model //! //! An executing Rust program consists of a collection of native OS threads, -//! each with their own stack and local state. +//! each with their own stack and local state. Threads can be named, and +//! provide some built-in support for low-level synchronization. //! //! Communication between threads can be done through //! [channels](../../std/sync/mpsc/index.html), Rust's message-passing @@ -37,20 +38,6 @@ //! convenient facilities for automatically waiting for the termination of a //! child thread (i.e., join). //! -//! ## The `Thread` type -//! -//! Threads are represented via the `Thread` type, which you can -//! get in one of two ways: -//! -//! * By spawning a new thread, e.g. using the `thread::spawn` function. -//! * By requesting the current thread, using the `thread::current` function. -//! -//! Threads can be named, and provide some built-in support for low-level -//! synchronization (described below). -//! -//! The `thread::current()` function is available even for threads not spawned -//! by the APIs of this module. -//! //! ## Spawning a thread //! //! A new thread can be spawned using the `thread::spawn` function: @@ -99,10 +86,24 @@ //! }); //! ``` //! +//! ## The `Thread` type +//! +//! Threads are represented via the `Thread` type, which you can get in one of +//! two ways: +//! +//! * By spawning a new thread, e.g. using the `thread::spawn` function, and +//! calling `thread()` on the `JoinHandle`. +//! * By requesting the current thread, using the `thread::current` function. +//! +//! The `thread::current()` function is available even for threads not spawned +//! by the APIs of this module. +//! //! ## Blocking support: park and unpark //! //! Every thread is equipped with some basic low-level blocking support, via the -//! `park` and `unpark` functions. +//! `thread::park()` function and `thread::Thread::unpark()` method. `park()` +//! blocks the current thread, which can then be resumed from another thread by +//! calling the `unpark()` method on the blocked thread's handle. //! //! Conceptually, each `Thread` handle has an associated token, which is //! initially not present: @@ -164,14 +165,15 @@ use prelude::v1::*; use any::Any; use cell::UnsafeCell; +use ffi::{CStr, CString}; use fmt; use io; +use panic; +use panicking; use str; -use ffi::{CStr, CString}; use sync::{Mutex, Condvar, Arc}; use sys::thread as imp; use sys_common::thread_info; -use sys_common::unwind; use sys_common::util; use sys_common::{AsInner, IntoInner}; use time::Duration; @@ -274,14 +276,8 @@ impl Builder { } unsafe { thread_info::set(imp::guard::current(), their_thread); - let mut output = None; - let try_result = { - let ptr = &mut output; - unwind::try(move || *ptr = Some(f())) - }; - *their_packet.get() = Some(try_result.map(|()| { - output.unwrap() - })); + let try_result = panic::catch_unwind(panic::AssertUnwindSafe(f)); + *their_packet.get() = Some(try_result); } }; @@ -338,7 +334,7 @@ pub fn yield_now() { #[inline] #[stable(feature = "rust1", since = "1.0.0")] pub fn panicking() -> bool { - unwind::panicking() + panicking::panicking() } /// Puts the current thread to sleep for the specified amount of time. diff --git a/src/libstd/time/duration.rs b/src/libstd/time/duration.rs index 8a50f07e6d..79bbe5e7da 100644 --- a/src/libstd/time/duration.rs +++ b/src/libstd/time/duration.rs @@ -51,10 +51,16 @@ impl Duration { /// /// If the nanoseconds is greater than 1 billion (the number of nanoseconds /// in a second), then it will carry over into the seconds provided. + /// + /// # Panics + /// + /// This constructor will panic if the carry from the nanoseconds overflows + /// the seconds counter. #[stable(feature = "duration", since = "1.3.0")] #[inline] pub fn new(secs: u64, nanos: u32) -> Duration { - let secs = secs + (nanos / NANOS_PER_SEC) as u64; + let secs = secs.checked_add((nanos / NANOS_PER_SEC) as u64) + .expect("overflow in Duration::new"); let nanos = nanos % NANOS_PER_SEC; Duration { secs: secs, nanos: nanos } } diff --git a/src/libstd/time/mod.rs b/src/libstd/time/mod.rs index bc50b0d3a7..80963a9b73 100644 --- a/src/libstd/time/mod.rs +++ b/src/libstd/time/mod.rs @@ -76,7 +76,7 @@ pub struct Instant(time::Instant); /// Distinct from the `Instant` type, this time measurement **is not /// monotonic**. This means that you can save a file to the file system, then /// save another file to the file system, **and the second file has a -/// `SystemTime` measurement earlier than the second**. In other words, an +/// `SystemTime` measurement earlier than the first**. In other words, an /// operation that happens after another operation in real time may have an /// earlier `SystemTime`! /// diff --git a/src/libsyntax/ast.rs b/src/libsyntax/ast.rs index 16d4ed53b5..d9409d3bbd 100644 --- a/src/libsyntax/ast.rs +++ b/src/libsyntax/ast.rs @@ -16,13 +16,12 @@ pub use self::ViewPath_::*; pub use self::PathParameters::*; use attr::ThinAttributes; -use codemap::{Span, Spanned, DUMMY_SP, ExpnId}; +use codemap::{mk_sp, respan, Span, Spanned, DUMMY_SP, ExpnId}; use abi::Abi; use errors; use ext::base; use ext::tt::macro_parser; -use parse::token::InternedString; -use parse::token; +use parse::token::{self, keywords, InternedString}; use parse::lexer; use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration}; use print::pprust; @@ -61,6 +60,10 @@ impl Name { pub fn as_str(self) -> token::InternedString { token::InternedString::new_from_name(self) } + + pub fn unhygienize(self) -> Name { + token::intern(&self.as_str()) + } } impl fmt::Debug for Name { @@ -93,7 +96,7 @@ impl Ident { pub fn new(name: Name, ctxt: SyntaxContext) -> Ident { Ident {name: name, ctxt: ctxt} } - pub fn with_empty_ctxt(name: Name) -> Ident { + pub const fn with_empty_ctxt(name: Name) -> Ident { Ident {name: name, ctxt: EMPTY_CTXT} } } @@ -248,8 +251,8 @@ impl PathParameters { pub fn none() -> PathParameters { PathParameters::AngleBracketed(AngleBracketedParameterData { lifetimes: Vec::new(), - types: P::empty(), - bindings: P::empty(), + types: P::new(), + bindings: P::new(), }) } @@ -421,7 +424,7 @@ impl Default for Generics { fn default() -> Generics { Generics { lifetimes: Vec::new(), - ty_params: P::empty(), + ty_params: P::new(), where_clause: WhereClause { id: DUMMY_NODE_ID, predicates: Vec::new(), @@ -551,6 +554,44 @@ impl fmt::Debug for Pat { } } +impl Pat { + pub fn walk(&self, it: &mut F) -> bool + where F: FnMut(&Pat) -> bool + { + if !it(self) { + return false; + } + + match self.node { + PatKind::Ident(_, _, Some(ref p)) => p.walk(it), + PatKind::Struct(_, ref fields, _) => { + fields.iter().all(|field| field.node.pat.walk(it)) + } + PatKind::TupleStruct(_, Some(ref s)) | PatKind::Tup(ref s) => { + s.iter().all(|p| p.walk(it)) + } + PatKind::Box(ref s) | PatKind::Ref(ref s, _) => { + s.walk(it) + } + PatKind::Vec(ref before, ref slice, ref after) => { + before.iter().all(|p| p.walk(it)) && + slice.iter().all(|p| p.walk(it)) && + after.iter().all(|p| p.walk(it)) + } + PatKind::Wild | + PatKind::Lit(_) | + PatKind::Range(_, _) | + PatKind::Ident(_, _, _) | + PatKind::TupleStruct(..) | + PatKind::Path(..) | + PatKind::QPath(_, _) | + PatKind::Mac(_) => { + true + } + } + } +} + /// A single field in a struct pattern /// /// Patterns like the fields of Foo `{ x, ref y, ref mut z }` @@ -986,7 +1027,9 @@ pub enum ExprKind { /// A `match` block. Match(P, Vec), /// A closure (for example, `move |a, b, c| {a + b + c}`) - Closure(CaptureBy, P, P), + /// + /// The final span is the span of the argument block `|...|` + Closure(CaptureBy, P, P, Span), /// A block (`{ ... }`) Block(P), @@ -1206,8 +1249,7 @@ impl TokenTree { TokenTree::Delimited(sp, Rc::new(Delimited { delim: token::Bracket, open_span: sp, - tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"), - token::Plain)), + tts: vec![TokenTree::Token(sp, token::Ident(token::str_to_ident("doc"))), TokenTree::Token(sp, token::Eq), TokenTree::Token(sp, token::Literal( token::StrRaw(token::intern(&stripped), num_of_hashes), None))], @@ -1225,14 +1267,13 @@ impl TokenTree { } (&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => { let v = [TokenTree::Token(sp, token::Dollar), - TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str()), - token::Plain))]; + TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))]; v[index].clone() } - (&TokenTree::Token(sp, token::MatchNt(name, kind, name_st, kind_st)), _) => { - let v = [TokenTree::Token(sp, token::SubstNt(name, name_st)), + (&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => { + let v = [TokenTree::Token(sp, token::SubstNt(name)), TokenTree::Token(sp, token::Colon), - TokenTree::Token(sp, token::Ident(kind, kind_st))]; + TokenTree::Token(sp, token::Ident(kind))]; v[index].clone() } (&TokenTree::Sequence(_, ref seq), _) => { @@ -1636,7 +1677,25 @@ pub struct Arg { pub id: NodeId, } +/// Represents the kind of 'self' associated with a method. +/// String representation of `Ident` here is always "self", but hygiene contexts may differ. +#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] +pub enum SelfKind { + /// No self + Static, + /// `self`, `mut self` + Value(Ident), + /// `&'lt self`, `&'lt mut self` + Region(Option, Mutability, Ident), + /// `self: TYPE`, `mut self: TYPE` + Explicit(P, Ident), +} + +pub type ExplicitSelf = Spanned; + impl Arg { + #[unstable(feature = "rustc_private", issue = "27812")] + #[rustc_deprecated(since = "1.10.0", reason = "use `from_self` instead")] pub fn new_self(span: Span, mutability: Mutability, self_ident: Ident) -> Arg { let path = Spanned{span:span,node:self_ident}; Arg { @@ -1654,6 +1713,51 @@ impl Arg { id: DUMMY_NODE_ID } } + + pub fn to_self(&self) -> Option { + if let PatKind::Ident(_, ident, _) = self.pat.node { + if ident.node.name == keywords::SelfValue.name() { + return match self.ty.node { + TyKind::Infer => Some(respan(self.pat.span, SelfKind::Value(ident.node))), + TyKind::Rptr(lt, MutTy{ref ty, mutbl}) if ty.node == TyKind::Infer => { + Some(respan(self.pat.span, SelfKind::Region(lt, mutbl, ident.node))) + } + _ => Some(respan(mk_sp(self.pat.span.lo, self.ty.span.hi), + SelfKind::Explicit(self.ty.clone(), ident.node))), + } + } + } + None + } + + pub fn from_self(eself: ExplicitSelf, ident_sp: Span, mutbl: Mutability) -> Arg { + let pat = |ident, span| P(Pat { + id: DUMMY_NODE_ID, + node: PatKind::Ident(BindingMode::ByValue(mutbl), respan(ident_sp, ident), None), + span: span, + }); + let infer_ty = P(Ty { + id: DUMMY_NODE_ID, + node: TyKind::Infer, + span: DUMMY_SP, + }); + let arg = |ident, ty, span| Arg { + pat: pat(ident, span), + ty: ty, + id: DUMMY_NODE_ID, + }; + match eself.node { + SelfKind::Static => panic!("bug: `Arg::from_self` is called \ + with `SelfKind::Static` argument"), + SelfKind::Explicit(ty, ident) => arg(ident, ty, mk_sp(eself.span.lo, ident_sp.hi)), + SelfKind::Value(ident) => arg(ident, infer_ty, eself.span), + SelfKind::Region(lt, mutbl, ident) => arg(ident, P(Ty { + id: DUMMY_NODE_ID, + node: TyKind::Rptr(lt, MutTy { ty: infer_ty, mutbl: mutbl }), + span: DUMMY_SP, + }), eself.span), + } + } } /// Represents the header (not the body) of a function declaration @@ -1734,21 +1838,6 @@ impl FunctionRetTy { } } -/// Represents the kind of 'self' associated with a method -#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] -pub enum SelfKind { - /// No self - Static, - /// `self` - Value(Ident), - /// `&'lt self`, `&'lt mut self` - Region(Option, Mutability, Ident), - /// `self: TYPE` - Explicit(P, Ident), -} - -pub type ExplicitSelf = Spanned; - #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub struct Mod { /// A span from the first token past `{` to the last token until `}`. @@ -1887,7 +1976,7 @@ pub struct PolyTraitRef { #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] pub enum Visibility { Public, - Crate, + Crate(Span), Restricted { path: P, id: NodeId }, Inherited, } @@ -1992,10 +2081,7 @@ pub enum ItemKind { /// A struct definition, e.g. `struct Foo {x: A}` Struct(VariantData, Generics), /// Represents a Trait Declaration - Trait(Unsafety, - Generics, - TyParamBounds, - Vec), + Trait(Unsafety, Generics, TyParamBounds, Vec), // Default trait implementations /// diff --git a/src/libsyntax/attr.rs b/src/libsyntax/attr.rs index dd414c463c..8761ca3717 100644 --- a/src/libsyntax/attr.rs +++ b/src/libsyntax/attr.rs @@ -333,11 +333,11 @@ pub enum InlineAttr { pub fn find_inline_attr(diagnostic: Option<&Handler>, attrs: &[Attribute]) -> InlineAttr { attrs.iter().fold(InlineAttr::None, |ia,attr| { match attr.node.value.node { - MetaItemKind::Word(ref n) if *n == "inline" => { + MetaItemKind::Word(ref n) if n == "inline" => { mark_used(attr); InlineAttr::Hint } - MetaItemKind::List(ref n, ref items) if *n == "inline" => { + MetaItemKind::List(ref n, ref items) if n == "inline" => { mark_used(attr); if items.len() != 1 { diagnostic.map(|d|{ d.span_err(attr.span, "expected one argument"); }); @@ -711,7 +711,7 @@ pub fn require_unique_names(diagnostic: &Handler, metas: &[P]) { pub fn find_repr_attrs(diagnostic: &Handler, attr: &Attribute) -> Vec { let mut acc = Vec::new(); match attr.node.value.node { - ast::MetaItemKind::List(ref s, ref items) if *s == "repr" => { + ast::MetaItemKind::List(ref s, ref items) if s == "repr" => { mark_used(attr); for item in items { match item.node { diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index f771ee95bd..ca8708fdc8 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -32,8 +32,6 @@ use serialize::{Encodable, Decodable, Encoder, Decoder}; use ast::Name; -use errors::emitter::MAX_HIGHLIGHT_LINES; - // _____________________________________________________________________________ // Pos, BytePos, CharPos // @@ -51,7 +49,7 @@ pub struct BytePos(pub u32); /// A character offset. Because of multibyte utf8 characters, a byte offset /// is not equivalent to a character offset. The CodeMap will convert BytePos /// values to CharPos values as necessary. -#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Debug)] +#[derive(Copy, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Debug)] pub struct CharPos(pub usize); // FIXME: Lots of boilerplate in these impls, but so far my attempts to fix @@ -132,13 +130,29 @@ pub struct Span { pub expn_id: ExpnId } -/// Spans are converted to MultiSpans just before error reporting, either automatically, -/// generated by line grouping, or manually constructed. -/// In the latter case care should be taken to ensure that spans are ordered, disjoint, -/// and point into the same FileMap. +/// A collection of spans. Spans have two orthogonal attributes: +/// +/// - they can be *primary spans*. In this case they are the locus of +/// the error, and would be rendered with `^^^`. +/// - they can have a *label*. In this case, the label is written next +/// to the mark in the snippet when we render. #[derive(Clone)] pub struct MultiSpan { - pub spans: Vec + primary_spans: Vec, + span_labels: Vec<(Span, String)>, +} + +#[derive(Clone, Debug)] +pub struct SpanLabel { + /// The span we are going to include in the final snippet. + pub span: Span, + + /// Is this a primary span? This is the "locus" of the message, + /// and is indicated with a `^^^^` underline, versus `----`. + pub is_primary: bool, + + /// What label should we attach to this span (if any)? + pub label: Option, } pub const DUMMY_SP: Span = Span { lo: BytePos(0), hi: BytePos(0), expn_id: NO_EXPANSION }; @@ -149,6 +163,12 @@ pub const COMMAND_LINE_SP: Span = Span { lo: BytePos(0), expn_id: COMMAND_LINE_EXPN }; impl Span { + /// Returns a new span representing just the end-point of this span + pub fn end_point(self) -> Span { + let lo = cmp::max(self.hi.0 - 1, self.lo.0); + Span { lo: BytePos(lo), hi: self.hi, expn_id: self.expn_id} + } + /// Returns `self` if `self` is not the dummy span, and `other` otherwise. pub fn substitute_dummy(self, other: Span) -> Span { if self.source_equal(&DUMMY_SP) { other } else { self } @@ -276,97 +296,74 @@ pub fn original_sp(cm: &CodeMap, sp: Span, enclosing_sp: Span) -> Span { impl MultiSpan { pub fn new() -> MultiSpan { - MultiSpan { spans: Vec::new() } + MultiSpan { + primary_spans: vec![], + span_labels: vec![] + } } - pub fn to_span_bounds(&self) -> Span { - assert!(!self.spans.is_empty()); - let Span { lo, expn_id, .. } = *self.spans.first().unwrap(); - let Span { hi, .. } = *self.spans.last().unwrap(); - Span { lo: lo, hi: hi, expn_id: expn_id } + pub fn from_span(primary_span: Span) -> MultiSpan { + MultiSpan { + primary_spans: vec![primary_span], + span_labels: vec![] + } } - /// Merges or inserts the given span into itself. - pub fn push_merge(&mut self, mut sp: Span) { - let mut idx_merged = None; - - for idx in 0.. { - let cur = match self.spans.get(idx) { - Some(s) => *s, - None => break, - }; - // Try to merge with a contained Span - if let Some(union) = cur.merge(sp) { - self.spans[idx] = union; - sp = union; - idx_merged = Some(idx); - break; - } - // Or insert into the first sorted position - if sp.hi <= cur.lo { - self.spans.insert(idx, sp); - idx_merged = Some(idx); - break; - } - } - if let Some(idx) = idx_merged { - // Merge with spans trailing the insertion/merging position - while (idx + 1) < self.spans.len() { - if let Some(union) = self.spans[idx + 1].merge(sp) { - self.spans[idx] = union; - self.spans.remove(idx + 1); - } else { - break; - } - } - } else { - self.spans.push(sp); + pub fn from_spans(vec: Vec) -> MultiSpan { + MultiSpan { + primary_spans: vec, + span_labels: vec![] } } - /// Inserts the given span into itself, for use with `end_highlight_lines`. - pub fn push_trim(&mut self, mut sp: Span) { - let mut prev = mk_sp(BytePos(0), BytePos(0)); + pub fn push_span_label(&mut self, span: Span, label: String) { + self.span_labels.push((span, label)); + } - if let Some(first) = self.spans.get_mut(0) { - if first.lo > sp.lo { - // Prevent us here from spanning fewer lines - // because of trimming the start of the span - // (this should not be visible, because this method ought - // to not be used in conjunction with `highlight_lines`) - first.lo = sp.lo; - } + /// Selects the first primary span (if any) + pub fn primary_span(&self) -> Option { + self.primary_spans.first().cloned() + } + + /// Returns all primary spans. + pub fn primary_spans(&self) -> &[Span] { + &self.primary_spans + } + + /// Returns the strings to highlight. We always ensure that there + /// is an entry for each of the primary spans -- for each primary + /// span P, if there is at least one label with span P, we return + /// those labels (marked as primary). But otherwise we return + /// `SpanLabel` instances with empty labels. + pub fn span_labels(&self) -> Vec { + let is_primary = |span| self.primary_spans.contains(&span); + let mut span_labels = vec![]; + + for &(span, ref label) in &self.span_labels { + span_labels.push(SpanLabel { + span: span, + is_primary: is_primary(span), + label: Some(label.clone()) + }); } - for idx in 0.. { - if let Some(sp_trim) = sp.trim_start(prev) { - // Implies `sp.hi > prev.hi` - let cur = match self.spans.get(idx) { - Some(s) => *s, - None => { - sp = sp_trim; - break; - } - }; - // `cur` may overlap with `sp_trim` - if let Some(cur_trim) = cur.trim_start(sp_trim) { - // Implies `sp.hi < cur.hi` - self.spans.insert(idx, sp_trim); - self.spans[idx + 1] = cur_trim; - return; - } else if sp.hi == cur.hi { - return; - } - prev = cur; + for &span in &self.primary_spans { + if !span_labels.iter().any(|sl| sl.span == span) { + span_labels.push(SpanLabel { + span: span, + is_primary: true, + label: None + }); } } - self.spans.push(sp); + + span_labels } } impl From for MultiSpan { fn from(span: Span) -> MultiSpan { - MultiSpan { spans: vec![span] } + MultiSpan::from_span(span) } } @@ -801,7 +798,7 @@ impl CodeMap { /// Creates a new filemap and sets its line information. pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Rc { let fm = self.new_filemap(filename.to_string(), src.to_owned()); - let mut byte_pos: u32 = 0; + let mut byte_pos: u32 = fm.start_pos.0; for line in src.lines() { // register the start of this line fm.next_line(BytePos(byte_pos)); @@ -929,6 +926,10 @@ impl CodeMap { } pub fn span_to_string(&self, sp: Span) -> String { + if sp == COMMAND_LINE_SP { + return "".to_string(); + } + if self.files.borrow().is_empty() && sp.source_equal(&DUMMY_SP) { return "no-location".to_string(); } @@ -1099,12 +1100,16 @@ impl CodeMap { } pub fn span_to_lines(&self, sp: Span) -> FileLinesResult { + debug!("span_to_lines(sp={:?})", sp); + if sp.lo > sp.hi { return Err(SpanLinesError::IllFormedSpan(sp)); } let lo = self.lookup_char_pos(sp.lo); + debug!("span_to_lines: lo={:?}", lo); let hi = self.lookup_char_pos(sp.hi); + debug!("span_to_lines: hi={:?}", hi); if lo.file.start_pos != hi.file.start_pos { return Err(SpanLinesError::DistinctSources(DistinctSources { @@ -1125,7 +1130,9 @@ impl CodeMap { // numbers in Loc are 1-based, so we subtract 1 to get 0-based // lines. for line_index in lo.line-1 .. hi.line-1 { - let line_len = lo.file.get_line(line_index).map(|s| s.len()).unwrap_or(0); + let line_len = lo.file.get_line(line_index) + .map(|s| s.chars().count()) + .unwrap_or(0); lines.push(LineInfo { line_index: line_index, start_col: start_col, end_col: CharPos::from_usize(line_len) }); @@ -1184,59 +1191,6 @@ impl CodeMap { } } - /// Groups and sorts spans by lines into `MultiSpan`s, where `push` adds them to their group, - /// specifying the unification behaviour for overlapping spans. - /// Spans overflowing a line are put into their own one-element-group. - pub fn custom_group_spans(&self, mut spans: Vec, push: F) -> Vec - where F: Fn(&mut MultiSpan, Span) - { - spans.sort_by(|a, b| a.lo.cmp(&b.lo)); - let mut groups = Vec::::new(); - let mut overflowing = vec![]; - let mut prev_expn = ExpnId(!2u32); - let mut prev_file = !0usize; - let mut prev_line = !0usize; - let mut err_size = 0; - - for sp in spans { - let line = self.lookup_char_pos(sp.lo).line; - let line_hi = self.lookup_char_pos(sp.hi).line; - if line != line_hi { - overflowing.push(sp.into()); - continue - } - let file = self.lookup_filemap_idx(sp.lo); - - if err_size < MAX_HIGHLIGHT_LINES && sp.expn_id == prev_expn && file == prev_file { - // `push` takes care of sorting, trimming, and merging - push(&mut groups.last_mut().unwrap(), sp); - if line != prev_line { - err_size += 1; - } - } else { - groups.push(sp.into()); - err_size = 1; - } - prev_expn = sp.expn_id; - prev_file = file; - prev_line = line; - } - groups.extend(overflowing); - groups - } - - /// Groups and sorts spans by lines into `MultiSpan`s, merging overlapping spans. - /// Spans overflowing a line are put into their own one-element-group. - pub fn group_spans(&self, spans: Vec) -> Vec { - self.custom_group_spans(spans, |msp, sp| msp.push_merge(sp)) - } - - /// Like `group_spans`, but trims overlapping spans instead of - /// merging them (for use with `end_highlight_lines`) - pub fn end_group_spans(&self, spans: Vec) -> Vec { - self.custom_group_spans(spans, |msp, sp| msp.push_trim(sp)) - } - pub fn get_filemap(&self, filename: &str) -> Rc { for fm in self.files.borrow().iter() { if filename == fm.name { @@ -1394,6 +1348,56 @@ impl CodeMap { pub fn count_lines(&self) -> usize { self.files.borrow().iter().fold(0, |a, f| a + f.count_lines()) } + + pub fn macro_backtrace(&self, span: Span) -> Vec { + let mut last_span = DUMMY_SP; + let mut span = span; + let mut result = vec![]; + loop { + let span_name_span = self.with_expn_info(span.expn_id, |expn_info| { + expn_info.map(|ei| { + let (pre, post) = match ei.callee.format { + MacroAttribute(..) => ("#[", "]"), + MacroBang(..) => ("", "!"), + }; + let macro_decl_name = format!("{}{}{}", + pre, + ei.callee.name(), + post); + let def_site_span = ei.callee.span; + (ei.call_site, macro_decl_name, def_site_span) + }) + }); + + match span_name_span { + None => break, + Some((call_site, macro_decl_name, def_site_span)) => { + // Don't print recursive invocations + if !call_site.source_equal(&last_span) { + result.push(MacroBacktrace { + call_site: call_site, + macro_decl_name: macro_decl_name, + def_site_span: def_site_span, + }); + } + last_span = span; + span = call_site; + } + } + } + result + } +} + +pub struct MacroBacktrace { + /// span where macro was applied to generate this code + pub call_site: Span, + + /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]") + pub macro_decl_name: String, + + /// span where macro was defined (if known) + pub def_site_span: Option, } // _____________________________________________________________________________ @@ -1586,13 +1590,13 @@ mod tests { assert_eq!(file_lines.lines[0].line_index, 1); } - /// Given a string like " ^~~~~~~~~~~~ ", produces a span + /// Given a string like " ~~~~~~~~~~~~ ", produces a span /// coverting that range. The idea is that the string has the same /// length as the input, and we uncover the byte positions. Note /// that this can span lines and so on. fn span_from_selection(input: &str, selection: &str) -> Span { assert_eq!(input.len(), selection.len()); - let left_index = selection.find('^').unwrap() as u32; + let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION } } @@ -1603,7 +1607,7 @@ mod tests { fn span_to_snippet_and_lines_spanning_multiple_lines() { let cm = CodeMap::new(); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; - let selection = " \n ^~\n~~~\n~~~~~ \n \n"; + let selection = " \n ~~\n~~~\n~~~~~ \n \n"; cm.new_filemap_and_lines("blork.rs", inputtext); let span = span_from_selection(inputtext, selection); @@ -1753,73 +1757,4 @@ r"blork2.rs:2:1: 2:12 "; assert_eq!(sstr, res_str); } - - #[test] - fn t13() { - // Test that collecting multiple spans into line-groups works correctly - let cm = CodeMap::new(); - let inp = "_aaaaa__bbb\nvv\nw\nx\ny\nz\ncccccc__ddddee__"; - let sp1 = " ^~~~~ \n \n \n \n \n \n "; - let sp2 = " \n \n \n \n \n^\n "; - let sp3 = " ^~~\n~~\n \n \n \n \n "; - let sp4 = " \n \n \n \n \n \n^~~~~~ "; - let sp5 = " \n \n \n \n \n \n ^~~~ "; - let sp6 = " \n \n \n \n \n \n ^~~~ "; - let sp_trim = " \n \n \n \n \n \n ^~ "; - let sp_merge = " \n \n \n \n \n \n ^~~~~~ "; - let sp7 = " \n ^\n \n \n \n \n "; - let sp8 = " \n \n^\n \n \n \n "; - let sp9 = " \n \n \n^\n \n \n "; - let sp10 = " \n \n \n \n^\n \n "; - - let span = |sp, expected| { - let sp = span_from_selection(inp, sp); - assert_eq!(&cm.span_to_snippet(sp).unwrap(), expected); - sp - }; - - cm.new_filemap_and_lines("blork.rs", inp); - let sp1 = span(sp1, "aaaaa"); - let sp2 = span(sp2, "z"); - let sp3 = span(sp3, "bbb\nvv"); - let sp4 = span(sp4, "cccccc"); - let sp5 = span(sp5, "dddd"); - let sp6 = span(sp6, "ddee"); - let sp7 = span(sp7, "v"); - let sp8 = span(sp8, "w"); - let sp9 = span(sp9, "x"); - let sp10 = span(sp10, "y"); - let sp_trim = span(sp_trim, "ee"); - let sp_merge = span(sp_merge, "ddddee"); - - let spans = vec![sp5, sp2, sp4, sp9, sp10, sp7, sp3, sp8, sp1, sp6]; - - macro_rules! check_next { - ($groups: expr, $expected: expr) => ({ - let actual = $groups.next().map(|g|&g.spans[..]); - let expected = $expected; - println!("actual:\n{:?}\n", actual); - println!("expected:\n{:?}\n", expected); - assert_eq!(actual, expected.as_ref().map(|x|&x[..])); - }); - } - - let _groups = cm.group_spans(spans.clone()); - let it = &mut _groups.iter(); - - check_next!(it, Some([sp1, sp7, sp8, sp9, sp10, sp2])); - // New group because we're exceeding MAX_HIGHLIGHT_LINES - check_next!(it, Some([sp4, sp_merge])); - check_next!(it, Some([sp3])); - check_next!(it, None::<[Span; 0]>); - - let _groups = cm.end_group_spans(spans); - let it = &mut _groups.iter(); - - check_next!(it, Some([sp1, sp7, sp8, sp9, sp10, sp2])); - // New group because we're exceeding MAX_HIGHLIGHT_LINES - check_next!(it, Some([sp4, sp5, sp_trim])); - check_next!(it, Some([sp3])); - check_next!(it, None::<[Span; 0]>); - } } diff --git a/src/libsyntax/diagnostics/macros.rs b/src/libsyntax/diagnostics/macros.rs index 95a74d8755..25e0428248 100644 --- a/src/libsyntax/diagnostics/macros.rs +++ b/src/libsyntax/diagnostics/macros.rs @@ -101,9 +101,9 @@ macro_rules! span_help { } #[macro_export] -macro_rules! fileline_help { - ($err:expr, $span:expr, $($message:tt)*) => ({ - ($err).fileline_help($span, &format!($($message)*)); +macro_rules! help { + ($err:expr, $($message:tt)*) => ({ + ($err).help(&format!($($message)*)); }) } diff --git a/src/libsyntax/diagnostics/plugin.rs b/src/libsyntax/diagnostics/plugin.rs index 43b4a201af..26088b1242 100644 --- a/src/libsyntax/diagnostics/plugin.rs +++ b/src/libsyntax/diagnostics/plugin.rs @@ -54,7 +54,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt, token_tree: &[TokenTree]) -> Box { let code = match (token_tree.len(), token_tree.get(0)) { - (1, Some(&TokenTree::Token(_, token::Ident(code, _)))) => code, + (1, Some(&TokenTree::Token(_, token::Ident(code)))) => code, _ => unreachable!() }; @@ -92,10 +92,10 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt, token_tree.get(1), token_tree.get(2) ) { - (1, Some(&TokenTree::Token(_, token::Ident(ref code, _))), None, None) => { + (1, Some(&TokenTree::Token(_, token::Ident(ref code))), None, None) => { (code, None) }, - (3, Some(&TokenTree::Token(_, token::Ident(ref code, _))), + (3, Some(&TokenTree::Token(_, token::Ident(ref code))), Some(&TokenTree::Token(_, token::Comma)), Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => { (code, Some(description)) @@ -160,9 +160,9 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt, let (crate_name, name) = match (&token_tree[0], &token_tree[2]) { ( // Crate name. - &TokenTree::Token(_, token::Ident(ref crate_name, _)), + &TokenTree::Token(_, token::Ident(ref crate_name)), // DIAGNOSTICS ident. - &TokenTree::Token(_, token::Ident(ref name, _)) + &TokenTree::Token(_, token::Ident(ref name)) ) => (*&crate_name, name), _ => unreachable!() }; diff --git a/src/libsyntax/errors/emitter.rs b/src/libsyntax/errors/emitter.rs index 61fdc8453d..7c9985d7d2 100644 --- a/src/libsyntax/errors/emitter.rs +++ b/src/libsyntax/errors/emitter.rs @@ -10,12 +10,14 @@ use self::Destination::*; -use codemap::{self, COMMAND_LINE_SP, DUMMY_SP, Pos, Span, MultiSpan}; +use codemap::{self, COMMAND_LINE_SP, DUMMY_SP, Span, MultiSpan}; use diagnostics; +use errors::check_old_skool; use errors::{Level, RenderSpan, CodeSuggestion, DiagnosticBuilder}; use errors::RenderSpan::*; use errors::Level::*; +use errors::snippet::{RenderedLineKind, SnippetData, Style}; use std::{cmp, fmt}; use std::io::prelude::*; @@ -23,17 +25,73 @@ use std::io; use std::rc::Rc; use term; +/// Emitter trait for emitting errors. Do not implement this directly: +/// implement `CoreEmitter` instead. pub trait Emitter { - fn emit(&mut self, span: Option<&MultiSpan>, msg: &str, code: Option<&str>, lvl: Level); - fn custom_emit(&mut self, sp: &RenderSpan, msg: &str, lvl: Level); + /// Emit a standalone diagnostic message. + fn emit(&mut self, span: &MultiSpan, msg: &str, code: Option<&str>, lvl: Level); /// Emit a structured diagnostic. + fn emit_struct(&mut self, db: &DiagnosticBuilder); +} + +pub trait CoreEmitter { + fn emit_message(&mut self, + rsp: &RenderSpan, + msg: &str, + code: Option<&str>, + lvl: Level, + is_header: bool, + show_snippet: bool); +} + +impl Emitter for T { + fn emit(&mut self, + msp: &MultiSpan, + msg: &str, + code: Option<&str>, + lvl: Level) { + self.emit_message(&FullSpan(msp.clone()), + msg, + code, + lvl, + true, + true); + } + fn emit_struct(&mut self, db: &DiagnosticBuilder) { - self.emit(db.span.as_ref(), &db.message, db.code.as_ref().map(|s| &**s), db.level); + let old_school = check_old_skool(); + let db_span = FullSpan(db.span.clone()); + self.emit_message(&FullSpan(db.span.clone()), + &db.message, + db.code.as_ref().map(|s| &**s), + db.level, + true, + true); for child in &db.children { - match child.render_span { - Some(ref sp) => self.custom_emit(sp, &child.message, child.level), - None => self.emit(child.span.as_ref(), &child.message, None, child.level), + let render_span = child.render_span + .clone() + .unwrap_or_else( + || FullSpan(child.span.clone())); + + if !old_school { + self.emit_message(&render_span, + &child.message, + None, + child.level, + false, + true); + } else { + let (render_span, show_snippet) = match render_span.span().primary_span() { + None => (db_span.clone(), false), + _ => (render_span, true) + }; + self.emit_message(&render_span, + &child.message, + None, + child.level, + false, + show_snippet); } } } @@ -42,9 +100,6 @@ pub trait Emitter { /// maximum number of lines we will print for each error; arbitrary. pub const MAX_HIGHLIGHT_LINES: usize = 6; -/// maximum number of lines we will print for each span; arbitrary. -const MAX_SP_LINES: usize = 6; - #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub enum ColorConfig { Auto, @@ -68,21 +123,18 @@ pub struct BasicEmitter { dst: Destination, } -impl Emitter for BasicEmitter { - fn emit(&mut self, - msp: Option<&MultiSpan>, - msg: &str, - code: Option<&str>, - lvl: Level) { - assert!(msp.is_none(), "BasicEmitter can't handle spans"); +impl CoreEmitter for BasicEmitter { + fn emit_message(&mut self, + _rsp: &RenderSpan, + msg: &str, + code: Option<&str>, + lvl: Level, + _is_header: bool, + _show_snippet: bool) { + // we ignore the span as we have no access to a codemap at this point if let Err(e) = print_diagnostic(&mut self.dst, "", lvl, msg, code) { panic!("failed to print diagnostics: {:?}", e); } - - } - - fn custom_emit(&mut self, _: &RenderSpan, _: &str, _: Level) { - panic!("BasicEmitter can't handle custom_emit"); } } @@ -101,33 +153,26 @@ pub struct EmitterWriter { dst: Destination, registry: Option, cm: Rc, -} -impl Emitter for EmitterWriter { - fn emit(&mut self, - msp: Option<&MultiSpan>, - msg: &str, - code: Option<&str>, - lvl: Level) { - let error = match msp.map(|s|(s.to_span_bounds(), s)) { - Some((COMMAND_LINE_SP, msp)) => { - self.emit_(&FileLine(msp.clone()), msg, code, lvl) - }, - Some((DUMMY_SP, _)) | None => print_diagnostic(&mut self.dst, "", lvl, msg, code), - Some((_, msp)) => self.emit_(&FullSpan(msp.clone()), msg, code, lvl), - }; + /// Is this the first error emitted thus far? If not, we emit a + /// `\n` before the top-level errors. + first: bool, - if let Err(e) = error { - panic!("failed to print diagnostics: {:?}", e); - } - } + // For now, allow an old-school mode while we transition + old_school: bool, +} - fn custom_emit(&mut self, - rsp: &RenderSpan, - msg: &str, - lvl: Level) { - if let Err(e) = self.emit_(rsp, msg, None, lvl) { - panic!("failed to print diagnostics: {:?}", e); +impl CoreEmitter for EmitterWriter { + fn emit_message(&mut self, + rsp: &RenderSpan, + msg: &str, + code: Option<&str>, + lvl: Level, + is_header: bool, + show_snippet: bool) { + match self.emit_message_(rsp, msg, code, lvl, is_header, show_snippet) { + Ok(()) => { } + Err(e) => panic!("failed to emit error: {}", e) } } } @@ -151,11 +196,20 @@ impl EmitterWriter { registry: Option, code_map: Rc) -> EmitterWriter { + let old_school = check_old_skool(); if color_config.use_color() { let dst = Destination::from_stderr(); - EmitterWriter { dst: dst, registry: registry, cm: code_map } + EmitterWriter { dst: dst, + registry: registry, + cm: code_map, + first: true, + old_school: old_school } } else { - EmitterWriter { dst: Raw(Box::new(io::stderr())), registry: registry, cm: code_map } + EmitterWriter { dst: Raw(Box::new(io::stderr())), + registry: registry, + cm: code_map, + first: true, + old_school: old_school } } } @@ -163,53 +217,108 @@ impl EmitterWriter { registry: Option, code_map: Rc) -> EmitterWriter { - EmitterWriter { dst: Raw(dst), registry: registry, cm: code_map } - } + let old_school = check_old_skool(); + EmitterWriter { dst: Raw(dst), + registry: registry, + cm: code_map, + first: true, + old_school: old_school } + } + + fn emit_message_(&mut self, + rsp: &RenderSpan, + msg: &str, + code: Option<&str>, + lvl: Level, + is_header: bool, + show_snippet: bool) + -> io::Result<()> { + if is_header { + if self.first { + self.first = false; + } else { + if !self.old_school { + write!(self.dst, "\n")?; + } + } + } - fn emit_(&mut self, - rsp: &RenderSpan, - msg: &str, - code: Option<&str>, - lvl: Level) - -> io::Result<()> { - let msp = rsp.span(); - let bounds = msp.to_span_bounds(); - - let ss = if bounds == COMMAND_LINE_SP { - "".to_string() - } else if let EndSpan(_) = *rsp { - let span_end = Span { lo: bounds.hi, hi: bounds.hi, expn_id: bounds.expn_id}; - self.cm.span_to_string(span_end) - } else { - self.cm.span_to_string(bounds) - }; + match code { + Some(code) if self.registry.as_ref() + .and_then(|registry| registry.find_description(code)) + .is_some() => { + let code_with_explain = String::from("--explain ") + code; + if self.old_school { + let loc = match rsp.span().primary_span() { + Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(), + Some(ps) => self.cm.span_to_string(ps), + None => "".to_string() + }; + print_diagnostic(&mut self.dst, &loc, lvl, msg, Some(code))? + } + else { + print_diagnostic(&mut self.dst, "", lvl, msg, Some(&code_with_explain))? + } + } + _ => { + if self.old_school { + let loc = match rsp.span().primary_span() { + Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(), + Some(ps) => self.cm.span_to_string(ps), + None => "".to_string() + }; + print_diagnostic(&mut self.dst, &loc, lvl, msg, code)? + } + else { + print_diagnostic(&mut self.dst, "", lvl, msg, code)? + } + } + } - print_diagnostic(&mut self.dst, &ss[..], lvl, msg, code)?; + if !show_snippet { + return Ok(()); + } + // Watch out for various nasty special spans; don't try to + // print any filename or anything for those. + match rsp.span().primary_span() { + Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => { + return Ok(()); + } + _ => { } + } + + // Otherwise, print out the snippet etc as needed. match *rsp { - FullSpan(_) => { + FullSpan(ref msp) => { self.highlight_lines(msp, lvl)?; - self.print_macro_backtrace(bounds)?; - } - EndSpan(_) => { - self.end_highlight_lines(msp, lvl)?; - self.print_macro_backtrace(bounds)?; + if let Some(primary_span) = msp.primary_span() { + self.print_macro_backtrace(primary_span)?; + } } Suggestion(ref suggestion) => { self.highlight_suggestion(suggestion)?; - self.print_macro_backtrace(bounds)?; - } - FileLine(..) => { - // no source text in this case! + if let Some(primary_span) = rsp.span().primary_span() { + self.print_macro_backtrace(primary_span)?; + } } } - - if let Some(code) = code { - if let Some(_) = self.registry.as_ref() - .and_then(|registry| registry.find_description(code)) { - print_diagnostic(&mut self.dst, &ss[..], Help, - &format!("run `rustc --explain {}` to see a \ - detailed explanation", code), None)?; + if self.old_school { + match code { + Some(code) if self.registry.as_ref() + .and_then(|registry| registry.find_description(code)) + .is_some() => { + let loc = match rsp.span().primary_span() { + Some(COMMAND_LINE_SP) | Some(DUMMY_SP) => "".to_string(), + Some(ps) => self.cm.span_to_string(ps), + None => "".to_string() + }; + let msg = "run `rustc --explain ".to_string() + &code.to_string() + + "` to see a detailed explanation"; + print_diagnostic(&mut self.dst, &loc, Level::Help, &msg, + None)? + } + _ => () } } Ok(()) @@ -217,7 +326,8 @@ impl EmitterWriter { fn highlight_suggestion(&mut self, suggestion: &CodeSuggestion) -> io::Result<()> { - let lines = self.cm.span_to_lines(suggestion.msp.to_span_bounds()).unwrap(); + let primary_span = suggestion.msp.primary_span().unwrap(); + let lines = self.cm.span_to_lines(primary_span).unwrap(); assert!(!lines.lines.is_empty()); let complete = suggestion.splice_lines(&self.cm); @@ -251,325 +361,53 @@ impl EmitterWriter { lvl: Level) -> io::Result<()> { - let lines = match self.cm.span_to_lines(msp.to_span_bounds()) { - Ok(lines) => lines, - Err(_) => { - write!(&mut self.dst, "(internal compiler error: unprintable span)\n")?; - return Ok(()); - } - }; - - let fm = &*lines.file; - if let None = fm.src { - return Ok(()); - } - - let display_line_infos = &lines.lines[..]; - assert!(display_line_infos.len() > 0); - - // Calculate the widest number to format evenly and fix #11715 - let digits = line_num_max_digits(display_line_infos.last().unwrap()); - let first_line_index = display_line_infos.first().unwrap().line_index; - - let skip = fm.name.chars().count() + digits + 2; - - let mut spans = msp.spans.iter().peekable(); - let mut lines = display_line_infos.iter(); - let mut prev_line_index = first_line_index.wrapping_sub(1); - - // Display at most MAX_HIGHLIGHT_LINES lines. - let mut remaining_err_lines = MAX_HIGHLIGHT_LINES; - - // To emit a overflowed spans code-lines *AFTER* the rendered spans - let mut overflowed_buf = String::new(); - let mut overflowed = false; - - // FIXME (#8706) - 'l: loop { - if remaining_err_lines <= 0 { - break; - } - let line = match lines.next() { - Some(l) => l, - None => break, - }; - - // Skip is the number of characters we need to skip because they are - // part of the 'filename:line ' part of the code line. - let mut s: String = ::std::iter::repeat(' ').take(skip).collect(); - let mut col = skip; - let mut lastc = ' '; - - let cur_line_str = fm.get_line(line.line_index).unwrap(); - let mut line_chars = cur_line_str.chars().enumerate().peekable(); - let mut line_spans = 0; - - // Assemble spans for this line - loop { - // Peek here to preserve the span if it doesn't belong to this line - let sp = match spans.peek() { - Some(sp) => **sp, - None => break, - }; - let lo = self.cm.lookup_char_pos(sp.lo); - let hi = self.cm.lookup_char_pos(sp.hi); - let line_num = line.line_index + 1; - - if !(lo.line <= line_num && hi.line >= line_num) { - // This line is not contained in the span - if overflowed { - // Never elide the final line of an overflowed span - prev_line_index = line.line_index - 1; - overflowed = false; - break; - } - - if line_spans == 0 { - continue 'l; - } else { - // This line is finished, now render the spans we've assembled - break; - } + let mut snippet_data = SnippetData::new(self.cm.clone(), + msp.primary_span()); + if self.old_school { + let mut output_vec = vec![]; + + for span_label in msp.span_labels() { + let mut snippet_data = SnippetData::new(self.cm.clone(), + Some(span_label.span)); + + snippet_data.push(span_label.span, + span_label.is_primary, + span_label.label); + if span_label.is_primary { + output_vec.insert(0, snippet_data); } - spans.next(); - line_spans += 1; - - if lo.line != hi.line { - // Assemble extra code lines to be emitted after this lines spans - // (substract `2` because the first and last line are rendered normally) - let max_lines = cmp::min(remaining_err_lines, MAX_SP_LINES) - 2; - prev_line_index = line.line_index; - let count = cmp::min((hi.line - lo.line - 1), max_lines); - for _ in 0..count { - let line = match lines.next() { - Some(l) => l, - None => break, - }; - let line_str = fm.get_line(line.line_index).unwrap(); - overflowed_buf.push_str(&format!("{}:{:>width$} {}\n", - fm.name, - line.line_index + 1, - line_str, - width=digits)); - remaining_err_lines -= 1; - prev_line_index += 1 - } - // Remember that the span overflowed to ensure - // that we emit its last line exactly once - // (other spans may, or may not, start on it) - overflowed = true; - break; - } - - for (pos, ch) in line_chars.by_ref() { - lastc = ch; - if pos >= lo.col.to_usize() { break; } - // Whenever a tab occurs on the code line, we insert one on - // the error-point-squiggly-line as well (instead of a space). - // That way the squiggly line will usually appear in the correct - // position. - match ch { - '\t' => { - col += 8 - col%8; - s.push('\t'); - }, - _ => { - col += 1; - s.push(' '); - }, - } + else { + output_vec.push(snippet_data); } + } - s.push('^'); - let col_ptr = col; - let count = match lastc { - // Most terminals have a tab stop every eight columns by default - '\t' => 8 - col%8, - _ => 1, - }; - col += count; - s.extend(::std::iter::repeat('~').take(count)); - - let hi = self.cm.lookup_char_pos(sp.hi); - if hi.col != lo.col { - let mut chars = line_chars.by_ref(); - loop { - // We peek here to preserve the value for the next span - let (pos, ch) = match chars.peek() { - Some(elem) => *elem, - None => break, - }; - if pos >= hi.col.to_usize() { break; } - let count = match ch { - '\t' => 8 - col%8, - _ => 1, - }; - col += count; - s.extend(::std::iter::repeat('~').take(count)); - - chars.next(); + for snippet_data in output_vec.iter() { + let rendered_lines = snippet_data.render_lines(); + for rendered_line in &rendered_lines { + for styled_string in &rendered_line.text { + self.dst.apply_style(lvl, &rendered_line.kind, styled_string.style)?; + write!(&mut self.dst, "{}", styled_string.text)?; + self.dst.reset_attrs()?; } + write!(&mut self.dst, "\n")?; } - if (col - col_ptr) > 0 { - // One extra squiggly is replaced by a "^" - s.pop(); - } - } - - // If we elided something put an ellipsis. - if prev_line_index != line.line_index.wrapping_sub(1) && !overflowed { - write!(&mut self.dst, "{0:1$}...\n", "", skip)?; - } - - // Print offending code-line - remaining_err_lines -= 1; - write!(&mut self.dst, "{}:{:>width$} {}\n", - fm.name, - line.line_index + 1, - cur_line_str, - width=digits)?; - - if s.len() > skip { - // Render the spans we assembled previously (if any). - println_maybe_styled!(&mut self.dst, term::Attr::ForegroundColor(lvl.color()), - "{}", s)?; - } - - if !overflowed_buf.is_empty() { - // Print code-lines trailing the rendered spans (when a span overflows) - write!(&mut self.dst, "{}", &overflowed_buf)?; - overflowed_buf.clear(); - } else { - prev_line_index = line.line_index; } } - - // If we elided something, put an ellipsis. - if lines.next().is_some() { - write!(&mut self.dst, "{0:1$}...\n", "", skip)?; - } - Ok(()) - } - - /// Here are the differences between this and the normal `highlight_lines`: - /// `end_highlight_lines` will always put arrow on the last byte of each - /// span (instead of the first byte). Also, when a span is too long (more - /// than 6 lines), `end_highlight_lines` will print the first line, then - /// dot dot dot, then last line, whereas `highlight_lines` prints the first - /// six lines. - #[allow(deprecated)] - fn end_highlight_lines(&mut self, - msp: &MultiSpan, - lvl: Level) - -> io::Result<()> { - let lines = match self.cm.span_to_lines(msp.to_span_bounds()) { - Ok(lines) => lines, - Err(_) => { - write!(&mut self.dst, "(internal compiler error: unprintable span)\n")?; - return Ok(()); + else { + for span_label in msp.span_labels() { + snippet_data.push(span_label.span, + span_label.is_primary, + span_label.label); } - }; - - let fm = &*lines.file; - if let None = fm.src { - return Ok(()); - } - - let lines = &lines.lines[..]; - - // Calculate the widest number to format evenly - let first_line = lines.first().unwrap(); - let last_line = lines.last().unwrap(); - let digits = line_num_max_digits(last_line); - - let skip = fm.name.chars().count() + digits + 2; - - let mut spans = msp.spans.iter().peekable(); - let mut lines = lines.iter(); - let mut prev_line_index = first_line.line_index.wrapping_sub(1); - - // Display at most MAX_HIGHLIGHT_LINES lines. - let mut remaining_err_lines = MAX_HIGHLIGHT_LINES; - - 'l: loop { - if remaining_err_lines <= 0 { - break; - } - let line = match lines.next() { - Some(line) => line, - None => break, - }; - - // Skip is the number of characters we need to skip because they are - // part of the 'filename:line ' part of the previous line. - let mut s: String = ::std::iter::repeat(' ').take(skip).collect(); - - let line_str = fm.get_line(line.line_index).unwrap(); - let mut line_chars = line_str.chars().enumerate(); - let mut line_spans = 0; - - loop { - // Peek here to preserve the span if it doesn't belong to this line - let sp = match spans.peek() { - Some(sp) => **sp, - None => break, - }; - let lo = self.cm.lookup_char_pos(sp.lo); - let hi = self.cm.lookup_char_pos(sp.hi); - let elide_sp = (hi.line - lo.line) >= MAX_SP_LINES; - - let line_num = line.line_index + 1; - if !(lo.line <= line_num && hi.line >= line_num) { - // This line is not contained in the span - if line_spans == 0 { - continue 'l; - } else { - // This line is finished, now render the spans we've assembled - break - } - } else if hi.line > line_num { - if elide_sp && lo.line < line_num { - // This line is inbetween the first and last line of the span, - // so we may want to elide it. - continue 'l; - } else { - break - } + let rendered_lines = snippet_data.render_lines(); + for rendered_line in &rendered_lines { + for styled_string in &rendered_line.text { + self.dst.apply_style(lvl, &rendered_line.kind, styled_string.style)?; + write!(&mut self.dst, "{}", styled_string.text)?; + self.dst.reset_attrs()?; } - line_spans += 1; - spans.next(); - - for (pos, ch) in line_chars.by_ref() { - // Span seems to use half-opened interval, so subtract 1 - if pos >= hi.col.to_usize() - 1 { break; } - // Whenever a tab occurs on the previous line, we insert one on - // the error-point-squiggly-line as well (instead of a space). - // That way the squiggly line will usually appear in the correct - // position. - match ch { - '\t' => s.push('\t'), - _ => s.push(' '), - } - } - s.push('^'); - } - - if prev_line_index != line.line_index.wrapping_sub(1) { - // If we elided something, put an ellipsis. - write!(&mut self.dst, "{0:1$}...\n", "", skip)?; - } - - // Print offending code-lines - write!(&mut self.dst, "{}:{:>width$} {}\n", fm.name, - line.line_index + 1, line_str, width=digits)?; - remaining_err_lines -= 1; - - if s.len() > skip { - // Render the spans we assembled previously (if any) - println_maybe_styled!(&mut self.dst, term::Attr::ForegroundColor(lvl.color()), - "{}", s)?; + write!(&mut self.dst, "\n")?; } - prev_line_index = line.line_index; } Ok(()) } @@ -577,46 +415,17 @@ impl EmitterWriter { fn print_macro_backtrace(&mut self, sp: Span) -> io::Result<()> { - let mut last_span = codemap::DUMMY_SP; - let mut span = sp; - - loop { - let span_name_span = self.cm.with_expn_info(span.expn_id, |expn_info| { - expn_info.map(|ei| { - let (pre, post) = match ei.callee.format { - codemap::MacroAttribute(..) => ("#[", "]"), - codemap::MacroBang(..) => ("", "!"), - }; - let macro_decl_name = format!("in this expansion of {}{}{}", - pre, - ei.callee.name(), - post); - let def_site_span = ei.callee.span; - (ei.call_site, macro_decl_name, def_site_span) - }) - }); - let (macro_decl_name, def_site_span) = match span_name_span { - None => break, - Some((sp, macro_decl_name, def_site_span)) => { - span = sp; - (macro_decl_name, def_site_span) - } - }; - - // Don't print recursive invocations - if !span.source_equal(&last_span) { - let mut diag_string = macro_decl_name; - if let Some(def_site_span) = def_site_span { - diag_string.push_str(&format!(" (defined in {})", - self.cm.span_to_filename(def_site_span))); - } - - let snippet = self.cm.span_to_string(span); - print_diagnostic(&mut self.dst, &snippet, Note, &diag_string, None)?; + for trace in self.cm.macro_backtrace(sp) { + let mut diag_string = + format!("in this expansion of {}", trace.macro_decl_name); + if let Some(def_site_span) = trace.def_site_span { + diag_string.push_str( + &format!(" (defined in {})", + self.cm.span_to_filename(def_site_span))); } - last_span = span; + let snippet = self.cm.span_to_string(trace.call_site); + print_diagnostic(&mut self.dst, &snippet, Note, &diag_string, None)?; } - Ok(()) } } @@ -638,17 +447,29 @@ fn print_diagnostic(dst: &mut Destination, code: Option<&str>) -> io::Result<()> { if !topic.is_empty() { - write!(dst, "{} ", topic)?; + let old_school = check_old_skool(); + if !old_school { + write!(dst, "{}: ", topic)?; + } + else { + write!(dst, "{} ", topic)?; + } + dst.reset_attrs()?; } - - print_maybe_styled!(dst, term::Attr::ForegroundColor(lvl.color()), - "{}: ", lvl.to_string())?; - print_maybe_styled!(dst, term::Attr::Bold, "{}", msg)?; + dst.start_attr(term::Attr::Bold)?; + dst.start_attr(term::Attr::ForegroundColor(lvl.color()))?; + write!(dst, "{}", lvl.to_string())?; + dst.reset_attrs()?; + write!(dst, ": ")?; + dst.start_attr(term::Attr::Bold)?; + write!(dst, "{}", msg)?; if let Some(code) = code { let style = term::Attr::ForegroundColor(term::color::BRIGHT_MAGENTA); print_maybe_styled!(dst, style, " [{}]", code.clone())?; } + + dst.reset_attrs()?; write!(dst, "\n")?; Ok(()) } @@ -689,6 +510,58 @@ impl Destination { } } + fn apply_style(&mut self, + lvl: Level, + _kind: &RenderedLineKind, + style: Style) + -> io::Result<()> { + match style { + Style::FileNameStyle | + Style::LineAndColumn => { + } + Style::LineNumber => { + self.start_attr(term::Attr::Bold)?; + self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_BLUE))?; + } + Style::Quotation => { + } + Style::OldSkoolNote => { + self.start_attr(term::Attr::Bold)?; + self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_GREEN))?; + } + Style::OldSkoolNoteText => { + self.start_attr(term::Attr::Bold)?; + } + Style::UnderlinePrimary | Style::LabelPrimary => { + self.start_attr(term::Attr::Bold)?; + self.start_attr(term::Attr::ForegroundColor(lvl.color()))?; + } + Style::UnderlineSecondary | Style::LabelSecondary => { + self.start_attr(term::Attr::Bold)?; + self.start_attr(term::Attr::ForegroundColor(term::color::BRIGHT_BLUE))?; + } + Style::NoStyle => { + } + } + Ok(()) + } + + fn start_attr(&mut self, attr: term::Attr) -> io::Result<()> { + match *self { + Terminal(ref mut t) => { t.attr(attr)?; } + Raw(_) => { } + } + Ok(()) + } + + fn reset_attrs(&mut self) -> io::Result<()> { + match *self { + Terminal(ref mut t) => { t.reset()?; } + Raw(_) => { } + } + Ok(()) + } + fn print_maybe_styled(&mut self, args: fmt::Arguments, color: term::Attr, @@ -770,7 +643,7 @@ mod test { /// that this can span lines and so on. fn span_from_selection(input: &str, selection: &str) -> Span { assert_eq!(input.len(), selection.len()); - let left_index = selection.find('^').unwrap() as u32; + let left_index = selection.find('~').unwrap() as u32; let right_index = selection.rfind('~').map(|x|x as u32).unwrap_or(left_index); Span { lo: BytePos(left_index), hi: BytePos(right_index + 1), expn_id: NO_EXPANSION } } @@ -796,7 +669,7 @@ mod test { dreizehn "; let file = cm.new_filemap_and_lines("dummy.txt", content); - let start = file.lines.borrow()[7]; + let start = file.lines.borrow()[10]; let end = file.lines.borrow()[11]; let sp = mk_sp(start, end); let lvl = Level::Error; @@ -806,12 +679,12 @@ mod test { let vec = data.lock().unwrap().clone(); let vec: &[u8] = &vec; let str = from_utf8(vec).unwrap(); - println!("{}", str); - assert_eq!(str, "dummy.txt: 8 line8\n\ - dummy.txt: 9 line9\n\ - dummy.txt:10 line10\n\ - dummy.txt:11 e-lä-vän\n\ - dummy.txt:12 tolv\n"); + println!("r#\"\n{}\"#", str); + assert_eq!(str, &r#" + --> dummy.txt:11:1 +11 |> e-lä-vän + |> ^ +"#[1..]); } #[test] @@ -819,7 +692,7 @@ mod test { // Test that a `MultiSpan` containing a single span splices a substition correctly let cm = CodeMap::new(); let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; - let selection = " \n ^~\n~~~\n~~~~~ \n \n"; + let selection = " \n ~~\n~~~\n~~~~~ \n \n"; cm.new_filemap_and_lines("blork.rs", inputtext); let sp = span_from_selection(inputtext, selection); let msp: MultiSpan = sp.into(); @@ -837,51 +710,25 @@ mod test { } #[test] - fn test_multiple_span_splice() { - // Test that a `MultiSpan` containing multiple spans splices substitions on - // several lines correctly + fn test_multi_span_splice() { + // Test that a `MultiSpan` containing multiple spans splices a substition correctly let cm = CodeMap::new(); - let inp = "aaaaabbbbBB\nZZ\nZZ\nCCCDDDDDdddddeee"; - let sp1 = " ^~~~~~\n \n \n "; - let sp2 = " \n \n \n^~~~~~ "; - let sp3 = " \n \n \n ^~~ "; - let sp4 = " \n \n \n ^~~~ "; - - let span_eq = |sp, eq| assert_eq!(&cm.span_to_snippet(sp).unwrap(), eq); - - cm.new_filemap_and_lines("blork.rs", inp); - let sp1 = span_from_selection(inp, sp1); - let sp2 = span_from_selection(inp, sp2); - let sp3 = span_from_selection(inp, sp3); - let sp4 = span_from_selection(inp, sp4); - span_eq(sp1, "bbbbBB"); - span_eq(sp2, "CCCDDD"); - span_eq(sp3, "ddd"); - span_eq(sp4, "ddee"); - - let substitutes: Vec = ["1", "2", "3", "4"].iter().map(|x|x.to_string()).collect(); - let expected = "aaaaa1\nZZ\nZZ\n2DD34e"; - - let test = |msp| { - let suggest = CodeSuggestion { - msp: msp, - substitutes: substitutes.clone(), - }; - let actual = suggest.splice_lines(&cm); - assert_eq!(actual, expected); + let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; + let selection1 = " \n \n \n \n ~ \n"; // intentionally out of order + let selection2 = " \n ~~\n~~~\n~~~~~ \n \n"; + cm.new_filemap_and_lines("blork.rs", inputtext); + let sp1 = span_from_selection(inputtext, selection1); + let sp2 = span_from_selection(inputtext, selection2); + let msp: MultiSpan = MultiSpan::from_spans(vec![sp1, sp2]); + + let expected = "bbbbZZZZZZddddd\neXYZe"; + let suggest = CodeSuggestion { + msp: msp, + substitutes: vec!["ZZZZZZ".to_owned(), + "XYZ".to_owned()] }; - test(MultiSpan { spans: vec![sp1, sp2, sp3, sp4] }); - - // Test ordering and merging by `MultiSpan::push` - let mut msp = MultiSpan::new(); - msp.push_merge(sp2); - msp.push_merge(sp1); - assert_eq!(&msp.spans, &[sp1, sp2]); - msp.push_merge(sp4); - assert_eq!(&msp.spans, &[sp1, sp2, sp4]); - msp.push_merge(sp3); - assert_eq!(&msp.spans, &[sp1, sp2, sp3, sp4]); - test(msp); + + assert_eq!(suggest.splice_lines(&cm), expected); } #[test] @@ -891,17 +738,17 @@ mod test { let mut diag = EmitterWriter::new(Box::new(Sink(data.clone())), None, cm.clone()); let inp = "_____aaaaaa____bbbbbb__cccccdd_"; - let sp1 = " ^~~~~~ "; - let sp2 = " ^~~~~~ "; - let sp3 = " ^~~~~ "; - let sp4 = " ^~~~ "; - let sp34 = " ^~~~~~~ "; - let sp4_end = " ^~ "; - - let expect_start = "dummy.txt:1 _____aaaaaa____bbbbbb__cccccdd_\n\ - \x20 ^~~~~~ ^~~~~~ ^~~~~~~\n"; - let expect_end = "dummy.txt:1 _____aaaaaa____bbbbbb__cccccdd_\n\ - \x20 ^ ^ ^ ^\n"; + let sp1 = " ~~~~~~ "; + let sp2 = " ~~~~~~ "; + let sp3 = " ~~~~~ "; + let sp4 = " ~~~~ "; + let sp34 = " ~~~~~~~ "; + + let expect_start = &r#" + --> dummy.txt:1:6 +1 |> _____aaaaaa____bbbbbb__cccccdd_ + |> ^^^^^^ ^^^^^^ ^^^^^^^ +"#[1..]; let span = |sp, expected| { let sp = span_from_selection(inp, sp); @@ -914,7 +761,6 @@ mod test { let sp3 = span(sp3, "ccccc"); let sp4 = span(sp4, "ccdd"); let sp34 = span(sp34, "cccccdd"); - let sp4_end = span(sp4_end, "dd"); let spans = vec![sp1, sp2, sp3, sp4]; @@ -923,26 +769,17 @@ mod test { highlight(); let vec = data.lock().unwrap().clone(); let actual = from_utf8(&vec[..]).unwrap(); + println!("actual=\n{}", actual); assert_eq!(actual, expected); }; - let msp = MultiSpan { spans: vec![sp1, sp2, sp34] }; - let msp_end = MultiSpan { spans: vec![sp1, sp2, sp3, sp4_end] }; + let msp = MultiSpan::from_spans(vec![sp1, sp2, sp34]); test(expect_start, &mut || { diag.highlight_lines(&msp, Level::Error).unwrap(); }); - test(expect_end, &mut || { - diag.end_highlight_lines(&msp_end, Level::Error).unwrap(); - }); test(expect_start, &mut || { - for msp in cm.group_spans(spans.clone()) { - diag.highlight_lines(&msp, Level::Error).unwrap(); - } - }); - test(expect_end, &mut || { - for msp in cm.end_group_spans(spans.clone()) { - diag.end_highlight_lines(&msp, Level::Error).unwrap(); - } + let msp = MultiSpan::from_spans(spans.clone()); + diag.highlight_lines(&msp, Level::Error).unwrap(); }); } @@ -979,75 +816,29 @@ mod test { let sp4 = span(10, 10, (2, 3)); let sp5 = span(10, 10, (4, 6)); - let expect0 = "dummy.txt: 5 ccccc\n\ - dummy.txt: 6 xxxxx\n\ - dummy.txt: 7 yyyyy\n\ - \x20 ...\n\ - dummy.txt: 9 ddd__eee_\n\ - \x20 ^~~ ^~~\n\ - \x20 ...\n\ - dummy.txt:11 __f_gg\n\ - \x20 ^ ^~\n"; - - let expect = "dummy.txt: 1 aaaaa\n\ - dummy.txt: 2 aaaaa\n\ - dummy.txt: 3 aaaaa\n\ - dummy.txt: 4 bbbbb\n\ - dummy.txt: 5 ccccc\n\ - dummy.txt: 6 xxxxx\n\ - \x20 ...\n"; - - let expect_g1 = "dummy.txt:1 aaaaa\n\ - dummy.txt:2 aaaaa\n\ - dummy.txt:3 aaaaa\n\ - dummy.txt:4 bbbbb\n\ - dummy.txt:5 ccccc\n\ - dummy.txt:6 xxxxx\n\ - \x20 ...\n"; - - let expect2 = "dummy.txt: 9 ddd__eee_\n\ - \x20 ^~~ ^~~\n\ - \x20 ...\n\ - dummy.txt:11 __f_gg\n\ - \x20 ^ ^~\n"; - - - let expect_end = "dummy.txt: 1 aaaaa\n\ - \x20 ...\n\ - dummy.txt: 7 yyyyy\n\ - \x20 ^\n\ - \x20 ...\n\ - dummy.txt: 9 ddd__eee_\n\ - \x20 ^ ^\n\ - \x20 ...\n\ - dummy.txt:11 __f_gg\n\ - \x20 ^ ^\n"; - - let expect0_end = "dummy.txt: 5 ccccc\n\ - dummy.txt: 6 xxxxx\n\ - dummy.txt: 7 yyyyy\n\ - \x20 ^\n\ - \x20 ...\n\ - dummy.txt: 9 ddd__eee_\n\ - \x20 ^ ^\n\ - \x20 ...\n\ - dummy.txt:11 __f_gg\n\ - \x20 ^ ^\n"; - - let expect_end_g1 = "dummy.txt:1 aaaaa\n\ - \x20 ...\n\ - dummy.txt:7 yyyyy\n\ - \x20 ^\n"; - - let expect2_end = "dummy.txt: 9 ddd__eee_\n\ - \x20 ^ ^\n\ - \x20 ...\n\ - dummy.txt:11 __f_gg\n\ - \x20 ^ ^\n"; - - let expect_groups = [expect2, expect_g1]; - let expect_end_groups = [expect2_end, expect_end_g1]; - let spans = vec![sp3, sp1, sp4, sp2, sp5]; + let expect0 = &r#" + --> dummy.txt:5:1 +5 |> ccccc + |> ^ +... +9 |> ddd__eee_ + |> ^^^ ^^^ +10 |> elided +11 |> __f_gg + |> ^ ^^ +"#[1..]; + + let expect = &r#" + --> dummy.txt:1:1 +1 |> aaaaa + |> ^ +... +9 |> ddd__eee_ + |> ^^^ ^^^ +10 |> elided +11 |> __f_gg + |> ^ ^^ +"#[1..]; macro_rules! test { ($expected: expr, $highlight: expr) => ({ @@ -1063,37 +854,14 @@ mod test { }); } - let msp0 = MultiSpan { spans: vec![sp0, sp2, sp3, sp4, sp5] }; - let msp = MultiSpan { spans: vec![sp1, sp2, sp3, sp4, sp5] }; - let msp2 = MultiSpan { spans: vec![sp2, sp3, sp4, sp5] }; + let msp0 = MultiSpan::from_spans(vec![sp0, sp2, sp3, sp4, sp5]); + let msp = MultiSpan::from_spans(vec![sp1, sp2, sp3, sp4, sp5]); test!(expect0, || { diag.highlight_lines(&msp0, Level::Error).unwrap(); }); - test!(expect0_end, || { - diag.end_highlight_lines(&msp0, Level::Error).unwrap(); - }); test!(expect, || { diag.highlight_lines(&msp, Level::Error).unwrap(); }); - test!(expect_end, || { - diag.end_highlight_lines(&msp, Level::Error).unwrap(); - }); - test!(expect2, || { - diag.highlight_lines(&msp2, Level::Error).unwrap(); - }); - test!(expect2_end, || { - diag.end_highlight_lines(&msp2, Level::Error).unwrap(); - }); - for (msp, expect) in cm.group_spans(spans.clone()).iter().zip(expect_groups.iter()) { - test!(expect, || { - diag.highlight_lines(&msp, Level::Error).unwrap(); - }); - } - for (msp, expect) in cm.group_spans(spans.clone()).iter().zip(expect_end_groups.iter()) { - test!(expect, || { - diag.end_highlight_lines(&msp, Level::Error).unwrap(); - }); - } } } diff --git a/src/libsyntax/errors/json.rs b/src/libsyntax/errors/json.rs index f369582bc5..93c6268cca 100644 --- a/src/libsyntax/errors/json.rs +++ b/src/libsyntax/errors/json.rs @@ -1,4 +1,4 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -20,13 +20,14 @@ // FIXME spec the JSON output properly. -use codemap::{self, Span, MultiSpan, CodeMap}; +use codemap::{self, MacroBacktrace, Span, SpanLabel, MultiSpan, CodeMap}; use diagnostics::registry::Registry; use errors::{Level, DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion}; use errors::emitter::Emitter; use std::rc::Rc; use std::io::{self, Write}; +use std::vec; use rustc_serialize::json::as_json; @@ -52,20 +53,13 @@ impl JsonEmitter { } impl Emitter for JsonEmitter { - fn emit(&mut self, span: Option<&MultiSpan>, msg: &str, code: Option<&str>, level: Level) { + fn emit(&mut self, span: &MultiSpan, msg: &str, code: Option<&str>, level: Level) { let data = Diagnostic::new(span, msg, code, level, self); if let Err(e) = writeln!(&mut self.dst, "{}", as_json(&data)) { panic!("failed to print diagnostics: {:?}", e); } } - fn custom_emit(&mut self, sp: &RenderSpan, msg: &str, level: Level) { - let data = Diagnostic::from_render_span(sp, msg, level, self); - if let Err(e) = writeln!(&mut self.dst, "{}", as_json(&data)) { - panic!("failed to print diagnostics: {:?}", e); - } - } - fn emit_struct(&mut self, db: &DiagnosticBuilder) { let data = Diagnostic::from_diagnostic_builder(db, self); if let Err(e) = writeln!(&mut self.dst, "{}", as_json(&data)) { @@ -84,8 +78,12 @@ struct Diagnostic<'a> { /// "error: internal compiler error", "error", "warning", "note", "help". level: &'static str, spans: Vec, - /// Assocaited diagnostic messages. + /// Associated diagnostic messages. children: Vec>, + /// The message as rustc would render it. Currently this is only + /// `Some` for "suggestions", but eventually it will include all + /// snippets. + rendered: Option, } #[derive(RustcEncodable)] @@ -99,18 +97,46 @@ struct DiagnosticSpan { /// 1-based, character offset. column_start: usize, column_end: usize, + /// Is this a "primary" span -- meaning the point, or one of the points, + /// where the error occurred? + is_primary: bool, /// Source text from the start of line_start to the end of line_end. text: Vec, + /// Label that should be placed at this location (if any) + label: Option, + /// If we are suggesting a replacement, this will contain text + /// that should be sliced in atop this span. You may prefer to + /// load the fully rendered version from the parent `Diagnostic`, + /// however. + suggested_replacement: Option, + /// Macro invocations that created the code at this span, if any. + expansion: Option>, } #[derive(RustcEncodable)] struct DiagnosticSpanLine { text: String, + /// 1-based, character offset in self.text. highlight_start: usize, + highlight_end: usize, } +#[derive(RustcEncodable)] +struct DiagnosticSpanMacroExpansion { + /// span where macro was applied to generate this code; note that + /// this may itself derive from a macro (if + /// `span.expansion.is_some()`) + span: DiagnosticSpan, + + /// name of macro that was applied (e.g., "foo!" or "#[derive(Eq)]") + macro_decl_name: String, + + /// span where macro was defined (if known) + def_site_span: Option, +} + #[derive(RustcEncodable)] struct DiagnosticCode { /// The code itself. @@ -120,7 +146,7 @@ struct DiagnosticCode { } impl<'a> Diagnostic<'a> { - fn new(msp: Option<&MultiSpan>, + fn new(msp: &MultiSpan, msg: &'a str, code: Option<&str>, level: Level, @@ -130,22 +156,9 @@ impl<'a> Diagnostic<'a> { message: msg, code: DiagnosticCode::map_opt_string(code.map(|c| c.to_owned()), je), level: level.to_str(), - spans: msp.map_or(vec![], |msp| DiagnosticSpan::from_multispan(msp, je)), - children: vec![], - } - } - - fn from_render_span(span: &RenderSpan, - msg: &'a str, - level: Level, - je: &JsonEmitter) - -> Diagnostic<'a> { - Diagnostic { - message: msg, - code: None, - level: level.to_str(), - spans: DiagnosticSpan::from_render_span(span, je), + spans: DiagnosticSpan::from_multispan(msp, je), children: vec![], + rendered: None, } } @@ -156,10 +169,11 @@ impl<'a> Diagnostic<'a> { message: &db.message, code: DiagnosticCode::map_opt_string(db.code.clone(), je), level: db.level.to_str(), - spans: db.span.as_ref().map_or(vec![], |sp| DiagnosticSpan::from_multispan(sp, je)), + spans: DiagnosticSpan::from_multispan(&db.span, je), children: db.children.iter().map(|c| { Diagnostic::from_sub_diagnostic(c, je) }).collect(), + rendered: None, } } @@ -170,81 +184,120 @@ impl<'a> Diagnostic<'a> { level: db.level.to_str(), spans: db.render_span.as_ref() .map(|sp| DiagnosticSpan::from_render_span(sp, je)) - .or_else(|| db.span.as_ref().map(|s| DiagnosticSpan::from_multispan(s, je))) - .unwrap_or(vec![]), + .unwrap_or_else(|| DiagnosticSpan::from_multispan(&db.span, je)), children: vec![], + rendered: db.render_span.as_ref() + .and_then(|rsp| je.render(rsp)), } } } impl DiagnosticSpan { - fn from_multispan(msp: &MultiSpan, je: &JsonEmitter) -> Vec { - msp.spans.iter().map(|span| { - let start = je.cm.lookup_char_pos(span.lo); - let end = je.cm.lookup_char_pos(span.hi); - DiagnosticSpan { - file_name: start.file.name.clone(), - byte_start: span.lo.0, - byte_end: span.hi.0, - line_start: start.line, - line_end: end.line, - column_start: start.col.0 + 1, - column_end: end.col.0 + 1, - text: DiagnosticSpanLine::from_span(span, je), - } - }).collect() + fn from_span_label(span: SpanLabel, + suggestion: Option<&String>, + je: &JsonEmitter) + -> DiagnosticSpan { + Self::from_span_etc(span.span, + span.is_primary, + span.label, + suggestion, + je) } - fn from_render_span(rsp: &RenderSpan, je: &JsonEmitter) -> Vec { - match *rsp { - RenderSpan::FullSpan(ref msp) | - // FIXME(#30701) handle Suggestion properly - RenderSpan::Suggestion(CodeSuggestion { ref msp, .. }) => { - DiagnosticSpan::from_multispan(msp, je) - } - RenderSpan::EndSpan(ref msp) => { - msp.spans.iter().map(|span| { - let end = je.cm.lookup_char_pos(span.hi); - DiagnosticSpan { - file_name: end.file.name.clone(), - byte_start: span.hi.0, - byte_end: span.hi.0, - line_start: end.line, - line_end: end.line, - column_start: end.col.0 + 1, - column_end: end.col.0 + 1, - text: DiagnosticSpanLine::from_span_end(span, je), - } - }).collect() - } - RenderSpan::FileLine(ref msp) => { - msp.spans.iter().map(|span| { - let start = je.cm.lookup_char_pos(span.lo); - let end = je.cm.lookup_char_pos(span.hi); - DiagnosticSpan { - file_name: start.file.name.clone(), - byte_start: span.lo.0, - byte_end: span.hi.0, - line_start: start.line, - line_end: end.line, - column_start: 0, - column_end: 0, - text: DiagnosticSpanLine::from_span(span, je), - } - }).collect() - } + fn from_span_etc(span: Span, + is_primary: bool, + label: Option, + suggestion: Option<&String>, + je: &JsonEmitter) + -> DiagnosticSpan { + // obtain the full backtrace from the `macro_backtrace` + // helper; in some ways, it'd be better to expand the + // backtrace ourselves, but the `macro_backtrace` helper makes + // some decision, such as dropping some frames, and I don't + // want to duplicate that logic here. + let backtrace = je.cm.macro_backtrace(span).into_iter(); + DiagnosticSpan::from_span_full(span, + is_primary, + label, + suggestion, + backtrace, + je) + } + + fn from_span_full(span: Span, + is_primary: bool, + label: Option, + suggestion: Option<&String>, + mut backtrace: vec::IntoIter, + je: &JsonEmitter) + -> DiagnosticSpan { + let start = je.cm.lookup_char_pos(span.lo); + let end = je.cm.lookup_char_pos(span.hi); + let backtrace_step = backtrace.next().map(|bt| { + let call_site = + Self::from_span_full(bt.call_site, + false, + None, + None, + backtrace, + je); + let def_site_span = bt.def_site_span.map(|sp| { + Self::from_span_full(sp, + false, + None, + None, + vec![].into_iter(), + je) + }); + Box::new(DiagnosticSpanMacroExpansion { + span: call_site, + macro_decl_name: bt.macro_decl_name, + def_site_span: def_site_span, + }) + }); + DiagnosticSpan { + file_name: start.file.name.clone(), + byte_start: span.lo.0, + byte_end: span.hi.0, + line_start: start.line, + line_end: end.line, + column_start: start.col.0 + 1, + column_end: end.col.0 + 1, + is_primary: is_primary, + text: DiagnosticSpanLine::from_span(span, je), + suggested_replacement: suggestion.cloned(), + expansion: backtrace_step, + label: label, } } -} -macro_rules! get_lines_for_span { - ($span: ident, $je: ident) => { - match $je.cm.span_to_lines(*$span) { - Ok(lines) => lines, - Err(_) => { - debug!("unprintable span"); - return Vec::new(); - } + fn from_multispan(msp: &MultiSpan, je: &JsonEmitter) -> Vec { + msp.span_labels() + .into_iter() + .map(|span_str| Self::from_span_label(span_str, None, je)) + .collect() + } + + fn from_suggestion(suggestion: &CodeSuggestion, je: &JsonEmitter) + -> Vec { + assert_eq!(suggestion.msp.span_labels().len(), suggestion.substitutes.len()); + suggestion.msp.span_labels() + .into_iter() + .zip(&suggestion.substitutes) + .map(|(span_label, suggestion)| { + DiagnosticSpan::from_span_label(span_label, + Some(suggestion), + je) + }) + .collect() + } + + fn from_render_span(rsp: &RenderSpan, je: &JsonEmitter) -> Vec { + match *rsp { + RenderSpan::FullSpan(ref msp) => + DiagnosticSpan::from_multispan(msp, je), + RenderSpan::Suggestion(ref suggestion) => + DiagnosticSpan::from_suggestion(suggestion, je), } } } @@ -265,45 +318,21 @@ impl DiagnosticSpanLine { /// Create a list of DiagnosticSpanLines from span - each line with any part /// of `span` gets a DiagnosticSpanLine, with the highlight indicating the /// `span` within the line. - fn from_span(span: &Span, je: &JsonEmitter) -> Vec { - let lines = get_lines_for_span!(span, je); - - let mut result = Vec::new(); - let fm = &*lines.file; - - for line in &lines.lines { - result.push(DiagnosticSpanLine::line_from_filemap(fm, - line.line_index, - line.start_col.0 + 1, - line.end_col.0 + 1)); - } - - result - } - - /// Create a list of DiagnosticSpanLines from span - the result covers all - /// of `span`, but the highlight is zero-length and at the end of `span`. - fn from_span_end(span: &Span, je: &JsonEmitter) -> Vec { - let lines = get_lines_for_span!(span, je); - - let mut result = Vec::new(); - let fm = &*lines.file; - - for (i, line) in lines.lines.iter().enumerate() { - // Invariant - CodeMap::span_to_lines will not return extra context - // lines - the last line returned is the last line of `span`. - let highlight = if i == lines.lines.len() - 1 { - (line.end_col.0 + 1, line.end_col.0 + 1) - } else { - (0, 0) - }; - result.push(DiagnosticSpanLine::line_from_filemap(fm, - line.line_index, - highlight.0, - highlight.1)); - } - - result + fn from_span(span: Span, je: &JsonEmitter) -> Vec { + je.cm.span_to_lines(span) + .map(|lines| { + let fm = &*lines.file; + lines.lines + .iter() + .map(|line| { + DiagnosticSpanLine::line_from_filemap(fm, + line.line_index, + line.start_col.0 + 1, + line.end_col.0 + 1) + }) + .collect() + }) + .unwrap_or(vec![]) } } @@ -322,3 +351,17 @@ impl DiagnosticCode { }) } } + +impl JsonEmitter { + fn render(&self, render_span: &RenderSpan) -> Option { + match *render_span { + RenderSpan::FullSpan(_) => { + None + } + RenderSpan::Suggestion(ref suggestion) => { + Some(suggestion.splice_lines(&self.cm)) + } + } + } +} + diff --git a/src/libsyntax/errors/mod.rs b/src/libsyntax/errors/mod.rs index c8c12d5a88..f06672fe11 100644 --- a/src/libsyntax/errors/mod.rs +++ b/src/libsyntax/errors/mod.rs @@ -13,18 +13,19 @@ pub use errors::emitter::ColorConfig; use self::Level::*; use self::RenderSpan::*; -use codemap::{self, CodeMap, MultiSpan}; +use codemap::{self, CodeMap, MultiSpan, NO_EXPANSION, Span}; use diagnostics; use errors::emitter::{Emitter, EmitterWriter}; use std::cell::{RefCell, Cell}; use std::{error, fmt}; -use std::io::prelude::*; use std::rc::Rc; +use std::thread::panicking; use term; pub mod emitter; pub mod json; +pub mod snippet; #[derive(Clone)] pub enum RenderSpan { @@ -33,22 +34,11 @@ pub enum RenderSpan { /// the source code covered by the span. FullSpan(MultiSpan), - /// Similar to a FullSpan, but the cited position is the end of - /// the span, instead of the start. Used, at least, for telling - /// compiletest/runtest to look at the last line of the span - /// (since `end_highlight_lines` displays an arrow to the end - /// of the span). - EndSpan(MultiSpan), - /// A suggestion renders with both with an initial line for the /// message, prefixed by file:linenum, followed by a summary /// of hypothetical source code, where each `String` is spliced /// into the lines in place of the code covered by each span. Suggestion(CodeSuggestion), - - /// A FileLine renders with just a line for the message prefixed - /// by file:linenum. - FileLine(MultiSpan), } #[derive(Clone)] @@ -61,9 +51,7 @@ impl RenderSpan { fn span(&self) -> &MultiSpan { match *self { FullSpan(ref msp) | - Suggestion(CodeSuggestion { ref msp, .. }) | - EndSpan(ref msp) | - FileLine(ref msp) => + Suggestion(CodeSuggestion { ref msp, .. }) => msp } } @@ -89,12 +77,24 @@ impl CodeSuggestion { } } } - let bounds = self.msp.to_span_bounds(); - let lines = cm.span_to_lines(bounds).unwrap(); - assert!(!lines.lines.is_empty()); - // This isn't strictly necessary, but would in all likelyhood be an error - assert_eq!(self.msp.spans.len(), self.substitutes.len()); + let mut primary_spans = self.msp.primary_spans().to_owned(); + + assert_eq!(primary_spans.len(), self.substitutes.len()); + if primary_spans.is_empty() { + return format!(""); + } + + // Assumption: all spans are in the same file, and all spans + // are disjoint. Sort in ascending order. + primary_spans.sort_by_key(|sp| sp.lo); + + // Find the bounding span. + let lo = primary_spans.iter().map(|sp| sp.lo).min().unwrap(); + let hi = primary_spans.iter().map(|sp| sp.hi).min().unwrap(); + let bounding_span = Span { lo: lo, hi: hi, expn_id: NO_EXPANSION }; + let lines = cm.span_to_lines(bounding_span).unwrap(); + assert!(!lines.lines.is_empty()); // To build up the result, we do this for each span: // - push the line segment trailing the previous span @@ -106,13 +106,13 @@ impl CodeSuggestion { // // Finally push the trailing line segment of the last span let fm = &lines.file; - let mut prev_hi = cm.lookup_char_pos(bounds.lo); + let mut prev_hi = cm.lookup_char_pos(bounding_span.lo); prev_hi.col = CharPos::from_usize(0); let mut prev_line = fm.get_line(lines.lines[0].line_index); let mut buf = String::new(); - for (sp, substitute) in self.msp.spans.iter().zip(self.substitutes.iter()) { + for (sp, substitute) in primary_spans.iter().zip(self.substitutes.iter()) { let cur_lo = cm.lookup_char_pos(sp.lo); if prev_hi.line == cur_lo.line { push_trailing(&mut buf, prev_line, &prev_hi, Some(&cur_lo)); @@ -178,20 +178,22 @@ impl error::Error for ExplicitBug { /// Used for emitting structured error messages and other diagnostic information. #[must_use] +#[derive(Clone)] pub struct DiagnosticBuilder<'a> { - emitter: &'a RefCell>, + handler: &'a Handler, level: Level, message: String, code: Option, - span: Option, + span: MultiSpan, children: Vec, } /// For example a note attached to an error. +#[derive(Clone)] struct SubDiagnostic { level: Level, message: String, - span: Option, + span: MultiSpan, render_span: Option, } @@ -202,8 +204,9 @@ impl<'a> DiagnosticBuilder<'a> { return; } - self.emitter.borrow_mut().emit_struct(&self); + self.handler.emit.borrow_mut().emit_struct(&self); self.cancel(); + self.handler.panic_if_treat_err_as_bug(); // if self.is_fatal() { // panic!(FatalError); @@ -227,37 +230,61 @@ impl<'a> DiagnosticBuilder<'a> { self.level == Level::Fatal } - pub fn note(&mut self , msg: &str) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Note, msg, None, None); + /// Add a span/label to be included in the resulting snippet. + /// This is pushed onto the `MultiSpan` that was created when the + /// diagnostic was first built. If you don't call this function at + /// all, and you just supplied a `Span` to create the diagnostic, + /// then the snippet will just include that `Span`, which is + /// called the primary span. + pub fn span_label(&mut self, span: Span, label: &fmt::Display) + -> &mut DiagnosticBuilder<'a> { + self.span.push_span_label(span, format!("{}", label)); + self + } + + pub fn note_expected_found(&mut self, + label: &fmt::Display, + expected: &fmt::Display, + found: &fmt::Display) + -> &mut DiagnosticBuilder<'a> + { + // For now, just attach these as notes + self.note(&format!("expected {} `{}`", label, expected)); + self.note(&format!(" found {} `{}`", label, found)); + self + } + + pub fn note(&mut self, msg: &str) -> &mut DiagnosticBuilder<'a> { + self.sub(Level::Note, msg, MultiSpan::new(), None); self } pub fn span_note>(&mut self, sp: S, msg: &str) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Note, msg, Some(sp.into()), None); + self.sub(Level::Note, msg, sp.into(), None); self } pub fn warn(&mut self, msg: &str) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Warning, msg, None, None); + self.sub(Level::Warning, msg, MultiSpan::new(), None); self } pub fn span_warn>(&mut self, sp: S, msg: &str) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Warning, msg, Some(sp.into()), None); + self.sub(Level::Warning, msg, sp.into(), None); self } pub fn help(&mut self , msg: &str) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Help, msg, None, None); + self.sub(Level::Help, msg, MultiSpan::new(), None); self } pub fn span_help>(&mut self, sp: S, msg: &str) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Help, msg, Some(sp.into()), None); + self.sub(Level::Help, msg, sp.into(), None); self } /// Prints out a message with a suggested edit of the code. @@ -268,43 +295,15 @@ impl<'a> DiagnosticBuilder<'a> { msg: &str, suggestion: String) -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Help, msg, None, Some(Suggestion(CodeSuggestion { + self.sub(Level::Help, msg, MultiSpan::new(), Some(Suggestion(CodeSuggestion { msp: sp.into(), substitutes: vec![suggestion], }))); self } - pub fn span_end_note>(&mut self, - sp: S, - msg: &str) - -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Note, msg, None, Some(EndSpan(sp.into()))); - self - } - pub fn fileline_warn>(&mut self, - sp: S, - msg: &str) - -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Warning, msg, None, Some(FileLine(sp.into()))); - self - } - pub fn fileline_note>(&mut self, - sp: S, - msg: &str) - -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Note, msg, None, Some(FileLine(sp.into()))); - self - } - pub fn fileline_help>(&mut self, - sp: S, - msg: &str) - -> &mut DiagnosticBuilder<'a> { - self.sub(Level::Help, msg, None, Some(FileLine(sp.into()))); - self - } - pub fn span>(&mut self, sp: S) -> &mut Self { - self.span = Some(sp.into()); + pub fn set_span>(&mut self, sp: S) -> &mut Self { + self.span = sp.into(); self } @@ -313,17 +312,25 @@ impl<'a> DiagnosticBuilder<'a> { self } + pub fn message(&self) -> &str { + &self.message + } + + pub fn level(&self) -> Level { + self.level + } + /// Convenience function for internal use, clients should use one of the /// struct_* methods on Handler. - fn new(emitter: &'a RefCell>, + fn new(handler: &'a Handler, level: Level, message: &str) -> DiagnosticBuilder<'a> { DiagnosticBuilder { - emitter: emitter, + handler: handler, level: level, message: message.to_owned(), code: None, - span: None, + span: MultiSpan::new(), children: vec![], } } @@ -333,7 +340,7 @@ impl<'a> DiagnosticBuilder<'a> { fn sub(&mut self, level: Level, message: &str, - span: Option, + span: MultiSpan, render_span: Option) { let sub = SubDiagnostic { level: level, @@ -355,8 +362,11 @@ impl<'a> fmt::Debug for DiagnosticBuilder<'a> { /// we emit a bug. impl<'a> Drop for DiagnosticBuilder<'a> { fn drop(&mut self) { - if !self.cancelled() { - self.emitter.borrow_mut().emit(None, "Error constructed but not emitted", None, Bug); + if !panicking() && !self.cancelled() { + self.handler.emit.borrow_mut().emit(&MultiSpan::new(), + "Error constructed but not emitted", + None, + Bug); panic!(); } } @@ -403,15 +413,15 @@ impl Handler { } pub fn struct_dummy<'a>(&'a self) -> DiagnosticBuilder<'a> { - DiagnosticBuilder::new(&self.emit, Level::Cancelled, "") + DiagnosticBuilder::new(self, Level::Cancelled, "") } pub fn struct_span_warn<'a, S: Into>(&'a self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { - let mut result = DiagnosticBuilder::new(&self.emit, Level::Warning, msg); - result.span(sp); + let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); + result.set_span(sp); if !self.can_emit_warnings { result.cancel(); } @@ -422,8 +432,8 @@ impl Handler { msg: &str, code: &str) -> DiagnosticBuilder<'a> { - let mut result = DiagnosticBuilder::new(&self.emit, Level::Warning, msg); - result.span(sp); + let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); + result.set_span(sp); result.code(code.to_owned()); if !self.can_emit_warnings { result.cancel(); @@ -431,7 +441,7 @@ impl Handler { result } pub fn struct_warn<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> { - let mut result = DiagnosticBuilder::new(&self.emit, Level::Warning, msg); + let mut result = DiagnosticBuilder::new(self, Level::Warning, msg); if !self.can_emit_warnings { result.cancel(); } @@ -442,8 +452,8 @@ impl Handler { msg: &str) -> DiagnosticBuilder<'a> { self.bump_err_count(); - let mut result = DiagnosticBuilder::new(&self.emit, Level::Error, msg); - result.span(sp); + let mut result = DiagnosticBuilder::new(self, Level::Error, msg); + result.set_span(sp); result } pub fn struct_span_err_with_code<'a, S: Into>(&'a self, @@ -452,22 +462,22 @@ impl Handler { code: &str) -> DiagnosticBuilder<'a> { self.bump_err_count(); - let mut result = DiagnosticBuilder::new(&self.emit, Level::Error, msg); - result.span(sp); + let mut result = DiagnosticBuilder::new(self, Level::Error, msg); + result.set_span(sp); result.code(code.to_owned()); result } pub fn struct_err<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> { self.bump_err_count(); - DiagnosticBuilder::new(&self.emit, Level::Error, msg) + DiagnosticBuilder::new(self, Level::Error, msg) } pub fn struct_span_fatal<'a, S: Into>(&'a self, sp: S, msg: &str) -> DiagnosticBuilder<'a> { self.bump_err_count(); - let mut result = DiagnosticBuilder::new(&self.emit, Level::Fatal, msg); - result.span(sp); + let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg); + result.set_span(sp); result } pub fn struct_span_fatal_with_code<'a, S: Into>(&'a self, @@ -476,14 +486,14 @@ impl Handler { code: &str) -> DiagnosticBuilder<'a> { self.bump_err_count(); - let mut result = DiagnosticBuilder::new(&self.emit, Level::Fatal, msg); - result.span(sp); + let mut result = DiagnosticBuilder::new(self, Level::Fatal, msg); + result.set_span(sp); result.code(code.to_owned()); result } pub fn struct_fatal<'a>(&'a self, msg: &str) -> DiagnosticBuilder<'a> { self.bump_err_count(); - DiagnosticBuilder::new(&self.emit, Level::Fatal, msg) + DiagnosticBuilder::new(self, Level::Fatal, msg) } pub fn cancel(&mut self, err: &mut DiagnosticBuilder) { @@ -494,45 +504,44 @@ impl Handler { err.cancel(); } - pub fn span_fatal>(&self, sp: S, msg: &str) -> FatalError { + fn panic_if_treat_err_as_bug(&self) { if self.treat_err_as_bug { - self.span_bug(sp, msg); + panic!("encountered error with `-Z treat_err_as_bug"); } - self.emit(Some(&sp.into()), msg, Fatal); + } + + pub fn span_fatal>(&self, sp: S, msg: &str) + -> FatalError { + self.emit(&sp.into(), msg, Fatal); self.bump_err_count(); + self.panic_if_treat_err_as_bug(); return FatalError; } pub fn span_fatal_with_code>(&self, sp: S, msg: &str, code: &str) - -> FatalError { - if self.treat_err_as_bug { - self.span_bug(sp, msg); - } - self.emit_with_code(Some(&sp.into()), msg, code, Fatal); + -> FatalError { + self.emit_with_code(&sp.into(), msg, code, Fatal); self.bump_err_count(); + self.panic_if_treat_err_as_bug(); return FatalError; } pub fn span_err>(&self, sp: S, msg: &str) { - if self.treat_err_as_bug { - self.span_bug(sp, msg); - } - self.emit(Some(&sp.into()), msg, Error); + self.emit(&sp.into(), msg, Error); self.bump_err_count(); + self.panic_if_treat_err_as_bug(); } pub fn span_err_with_code>(&self, sp: S, msg: &str, code: &str) { - if self.treat_err_as_bug { - self.span_bug(sp, msg); - } - self.emit_with_code(Some(&sp.into()), msg, code, Error); + self.emit_with_code(&sp.into(), msg, code, Error); self.bump_err_count(); + self.panic_if_treat_err_as_bug(); } pub fn span_warn>(&self, sp: S, msg: &str) { - self.emit(Some(&sp.into()), msg, Warning); + self.emit(&sp.into(), msg, Warning); } pub fn span_warn_with_code>(&self, sp: S, msg: &str, code: &str) { - self.emit_with_code(Some(&sp.into()), msg, code, Warning); + self.emit_with_code(&sp.into(), msg, code, Warning); } pub fn span_bug>(&self, sp: S, msg: &str) -> ! { - self.emit(Some(&sp.into()), msg, Bug); + self.emit(&sp.into(), msg, Bug); panic!(ExplicitBug); } pub fn delay_span_bug>(&self, sp: S, msg: &str) { @@ -540,11 +549,11 @@ impl Handler { *delayed = Some((sp.into(), msg.to_string())); } pub fn span_bug_no_panic>(&self, sp: S, msg: &str) { - self.emit(Some(&sp.into()), msg, Bug); + self.emit(&sp.into(), msg, Bug); self.bump_err_count(); } pub fn span_note_without_error>(&self, sp: S, msg: &str) { - self.emit.borrow_mut().emit(Some(&sp.into()), msg, None, Note); + self.emit.borrow_mut().emit(&sp.into(), msg, None, Note); } pub fn span_unimpl>(&self, sp: S, msg: &str) -> ! { self.span_bug(sp, &format!("unimplemented {}", msg)); @@ -553,7 +562,7 @@ impl Handler { if self.treat_err_as_bug { self.bug(msg); } - self.emit.borrow_mut().emit(None, msg, None, Fatal); + self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Fatal); self.bump_err_count(); FatalError } @@ -561,17 +570,17 @@ impl Handler { if self.treat_err_as_bug { self.bug(msg); } - self.emit.borrow_mut().emit(None, msg, None, Error); + self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Error); self.bump_err_count(); } pub fn warn(&self, msg: &str) { - self.emit.borrow_mut().emit(None, msg, None, Warning); + self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Warning); } pub fn note_without_error(&self, msg: &str) { - self.emit.borrow_mut().emit(None, msg, None, Note); + self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Note); } pub fn bug(&self, msg: &str) -> ! { - self.emit.borrow_mut().emit(None, msg, None, Bug); + self.emit.borrow_mut().emit(&MultiSpan::new(), msg, None, Bug); panic!(ExplicitBug); } pub fn unimpl(&self, msg: &str) -> ! { @@ -613,25 +622,20 @@ impl Handler { panic!(self.fatal(&s)); } pub fn emit(&self, - msp: Option<&MultiSpan>, + msp: &MultiSpan, msg: &str, lvl: Level) { if lvl == Warning && !self.can_emit_warnings { return } - self.emit.borrow_mut().emit(msp, msg, None, lvl); + self.emit.borrow_mut().emit(&msp, msg, None, lvl); if !self.continue_after_error.get() { self.abort_if_errors(); } } pub fn emit_with_code(&self, - msp: Option<&MultiSpan>, + msp: &MultiSpan, msg: &str, code: &str, lvl: Level) { if lvl == Warning && !self.can_emit_warnings { return } - self.emit.borrow_mut().emit(msp, msg, Some(code), lvl); - if !self.continue_after_error.get() { self.abort_if_errors(); } - } - pub fn custom_emit(&self, rsp: RenderSpan, msg: &str, lvl: Level) { - if lvl == Warning && !self.can_emit_warnings { return } - self.emit.borrow_mut().custom_emit(&rsp, msg, lvl); + self.emit.borrow_mut().emit(&msp, msg, Some(code), lvl); if !self.continue_after_error.get() { self.abort_if_errors(); } } } @@ -653,8 +657,6 @@ pub enum Level { impl fmt::Display for Level { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use std::fmt::Display; - self.to_str().fmt(f) } } @@ -663,7 +665,7 @@ impl Level { fn color(self) -> term::color::Color { match self { Bug | Fatal | PhaseFatal | Error => term::color::BRIGHT_RED, - Warning => term::color::BRIGHT_YELLOW, + Warning => term::color::YELLOW, Note => term::color::BRIGHT_GREEN, Help => term::color::BRIGHT_CYAN, Cancelled => unreachable!(), @@ -690,3 +692,20 @@ pub fn expect(diag: &Handler, opt: Option, msg: M) -> T where None => diag.bug(&msg()), } } + +/// True if we should use the old-skool error format style. This is +/// the default setting until the new errors are deemed stable enough +/// for general use. +/// +/// FIXME(#33240) +#[cfg(not(test))] +pub fn check_old_skool() -> bool { + use std::env; + env::var("RUST_NEW_ERROR_FORMAT").is_err() +} + +/// For unit tests, use the new format. +#[cfg(test)] +pub fn check_old_skool() -> bool { + false +} diff --git a/src/libsyntax/errors/snippet/mod.rs b/src/libsyntax/errors/snippet/mod.rs new file mode 100644 index 0000000000..188e676e7d --- /dev/null +++ b/src/libsyntax/errors/snippet/mod.rs @@ -0,0 +1,874 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Code for annotating snippets. + +use codemap::{CharPos, CodeMap, FileMap, LineInfo, Span}; +use errors::check_old_skool; +use std::cmp; +use std::rc::Rc; +use std::mem; + +mod test; + +#[derive(Clone)] +pub struct SnippetData { + codemap: Rc, + files: Vec, +} + +#[derive(Clone)] +pub struct FileInfo { + file: Rc, + + /// The "primary file", if any, gets a `-->` marker instead of + /// `>>>`, and has a line-number/column printed and not just a + /// filename. It appears first in the listing. It is known to + /// contain at least one primary span, though primary spans (which + /// are designated with `^^^`) may also occur in other files. + primary_span: Option, + + lines: Vec, +} + +#[derive(Clone, Debug)] +struct Line { + line_index: usize, + annotations: Vec, +} + +#[derive(Clone, Debug, PartialOrd, Ord, PartialEq, Eq)] +struct Annotation { + /// Start column, 0-based indexing -- counting *characters*, not + /// utf-8 bytes. Note that it is important that this field goes + /// first, so that when we sort, we sort orderings by start + /// column. + start_col: usize, + + /// End column within the line (exclusive) + end_col: usize, + + /// Is this annotation derived from primary span + is_primary: bool, + + /// Is this a large span minimized down to a smaller span + is_minimized: bool, + + /// Optional label to display adjacent to the annotation. + label: Option, +} + +#[derive(Debug)] +pub struct RenderedLine { + pub text: Vec, + pub kind: RenderedLineKind, +} + +#[derive(Debug)] +pub struct StyledString { + pub text: String, + pub style: Style, +} + +#[derive(Debug)] +pub struct StyledBuffer { + text: Vec>, + styles: Vec> +} + +#[derive(Copy, Clone, Debug, PartialEq)] +pub enum Style { + FileNameStyle, + LineAndColumn, + LineNumber, + Quotation, + UnderlinePrimary, + UnderlineSecondary, + LabelPrimary, + LabelSecondary, + OldSkoolNoteText, + OldSkoolNote, + NoStyle, +} + +#[derive(Debug, Clone)] +pub enum RenderedLineKind { + PrimaryFileName, + OtherFileName, + SourceText { + file: Rc, + line_index: usize, + }, + Annotations, + Elision, +} + +impl SnippetData { + pub fn new(codemap: Rc, + primary_span: Option) // (*) + -> Self { + // (*) The primary span indicates the file that must appear + // first, and which will have a line number etc in its + // name. Outside of tests, this is always `Some`, but for many + // tests it's not relevant to test this portion of the logic, + // and it's tedious to pick a primary span (read: tedious to + // port older tests that predate the existence of a primary + // span). + + debug!("SnippetData::new(primary_span={:?})", primary_span); + + let mut data = SnippetData { + codemap: codemap.clone(), + files: vec![] + }; + if let Some(primary_span) = primary_span { + let lo = codemap.lookup_char_pos(primary_span.lo); + data.files.push( + FileInfo { + file: lo.file, + primary_span: Some(primary_span), + lines: vec![], + }); + } + data + } + + pub fn push(&mut self, span: Span, is_primary: bool, label: Option) { + debug!("SnippetData::push(span={:?}, is_primary={}, label={:?})", + span, is_primary, label); + + let file_lines = match self.codemap.span_to_lines(span) { + Ok(file_lines) => file_lines, + Err(_) => { + // ignore unprintable spans completely. + return; + } + }; + + self.file(&file_lines.file) + .push_lines(&file_lines.lines, is_primary, label); + } + + fn file(&mut self, file_map: &Rc) -> &mut FileInfo { + let index = self.files.iter().position(|f| f.file.name == file_map.name); + if let Some(index) = index { + return &mut self.files[index]; + } + + self.files.push( + FileInfo { + file: file_map.clone(), + lines: vec![], + primary_span: None, + }); + self.files.last_mut().unwrap() + } + + pub fn render_lines(&self) -> Vec { + debug!("SnippetData::render_lines()"); + + let mut rendered_lines: Vec<_> = + self.files.iter() + .flat_map(|f| f.render_file_lines(&self.codemap)) + .collect(); + prepend_prefixes(&mut rendered_lines); + trim_lines(&mut rendered_lines); + rendered_lines + } +} + +pub trait StringSource { + fn make_string(self) -> String; +} + +impl StringSource for String { + fn make_string(self) -> String { + self + } +} + +impl StringSource for Vec { + fn make_string(self) -> String { + self.into_iter().collect() + } +} + +impl From<(S, Style, RenderedLineKind)> for RenderedLine + where S: StringSource +{ + fn from((text, style, kind): (S, Style, RenderedLineKind)) -> Self { + RenderedLine { + text: vec![StyledString { + text: text.make_string(), + style: style, + }], + kind: kind, + } + } +} + +impl From<(S1, Style, S2, Style, RenderedLineKind)> for RenderedLine + where S1: StringSource, S2: StringSource +{ + fn from(tuple: (S1, Style, S2, Style, RenderedLineKind)) -> Self { + let (text1, style1, text2, style2, kind) = tuple; + RenderedLine { + text: vec![ + StyledString { + text: text1.make_string(), + style: style1, + }, + StyledString { + text: text2.make_string(), + style: style2, + } + ], + kind: kind, + } + } +} + +impl RenderedLine { + fn trim_last(&mut self) { + if let Some(last_text) = self.text.last_mut() { + let len = last_text.text.trim_right().len(); + last_text.text.truncate(len); + } + } +} + +impl RenderedLineKind { + fn prefix(&self) -> StyledString { + match *self { + RenderedLineKind::SourceText { file: _, line_index } => + StyledString { + text: format!("{}", line_index + 1), + style: Style::LineNumber, + }, + RenderedLineKind::Elision => + StyledString { + text: String::from("..."), + style: Style::LineNumber, + }, + RenderedLineKind::PrimaryFileName | + RenderedLineKind::OtherFileName | + RenderedLineKind::Annotations => + StyledString { + text: String::from(""), + style: Style::LineNumber, + }, + } + } +} + +impl StyledBuffer { + fn new() -> StyledBuffer { + StyledBuffer { text: vec![], styles: vec![] } + } + + fn render(&self, source_kind: RenderedLineKind) -> Vec { + let mut output: Vec = vec![]; + let mut styled_vec: Vec = vec![]; + + for (row, row_style) in self.text.iter().zip(&self.styles) { + let mut current_style = Style::NoStyle; + let mut current_text = String::new(); + + for (&c, &s) in row.iter().zip(row_style) { + if s != current_style { + if !current_text.is_empty() { + styled_vec.push(StyledString { text: current_text, style: current_style }); + } + current_style = s; + current_text = String::new(); + } + current_text.push(c); + } + if !current_text.is_empty() { + styled_vec.push(StyledString { text: current_text, style: current_style }); + } + + if output.is_empty() { + //We know our first output line is source and the rest are highlights and labels + output.push(RenderedLine { text: styled_vec, kind: source_kind.clone() }); + } else { + output.push(RenderedLine { text: styled_vec, kind: RenderedLineKind::Annotations }); + } + styled_vec = vec![]; + } + + output + } + + fn putc(&mut self, line: usize, col: usize, chr: char, style: Style) { + while line >= self.text.len() { + self.text.push(vec![]); + self.styles.push(vec![]); + } + + if col < self.text[line].len() { + self.text[line][col] = chr; + self.styles[line][col] = style; + } else { + let mut i = self.text[line].len(); + while i < col { + let s = match self.text[0].get(i) { + Some(&'\t') => '\t', + _ => ' ' + }; + self.text[line].push(s); + self.styles[line].push(Style::NoStyle); + i += 1; + } + self.text[line].push(chr); + self.styles[line].push(style); + } + } + + fn puts(&mut self, line: usize, col: usize, string: &str, style: Style) { + let mut n = col; + for c in string.chars() { + self.putc(line, n, c, style); + n += 1; + } + } + + fn set_style(&mut self, line: usize, col: usize, style: Style) { + if self.styles.len() > line && self.styles[line].len() > col { + self.styles[line][col] = style; + } + } + + fn append(&mut self, line: usize, string: &str, style: Style) { + if line >= self.text.len() { + self.puts(line, 0, string, style); + } else { + let col = self.text[line].len(); + self.puts(line, col, string, style); + } + } +} + +impl FileInfo { + fn push_lines(&mut self, + lines: &[LineInfo], + is_primary: bool, + label: Option) { + assert!(lines.len() > 0); + + // If a span covers multiple lines, we reduce it to a single + // point at the start of the span. This means that instead + // of producing output like this: + // + // ``` + // --> foo.rs:2:1 + // 2 |> fn conflicting_items<'grammar>(state: &LR0State<'grammar>) + // |> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + // 3 |> -> Set> + // |> ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + // (and so on) + // ``` + // + // we produce: + // + // ``` + // --> foo.rs:2:1 + // 2 |> fn conflicting_items<'grammar>(state: &LR0State<'grammar>) + // ^ + // ``` + // + // Basically, although this loses information, multi-line spans just + // never look good. + + let (line, start_col, mut end_col, is_minimized) = if lines.len() == 1 { + (lines[0].line_index, lines[0].start_col, lines[0].end_col, false) + } else { + (lines[0].line_index, lines[0].start_col, CharPos(lines[0].start_col.0 + 1), true) + }; + + // Watch out for "empty spans". If we get a span like 6..6, we + // want to just display a `^` at 6, so convert that to + // 6..7. This is degenerate input, but it's best to degrade + // gracefully -- and the parser likes to suply a span like + // that for EOF, in particular. + if start_col == end_col { + end_col.0 += 1; + } + + let index = self.ensure_source_line(line); + self.lines[index].push_annotation(start_col, + end_col, + is_primary, + is_minimized, + label); + } + + /// Ensure that we have a `Line` struct corresponding to + /// `line_index` in the file. If we already have some other lines, + /// then this will add the intervening lines to ensure that we + /// have a complete snippet. (Note that when we finally display, + /// some of those lines may be elided.) + fn ensure_source_line(&mut self, line_index: usize) -> usize { + if self.lines.is_empty() { + self.lines.push(Line::new(line_index)); + return 0; + } + + // Find the range of lines we have thus far. + let first_line_index = self.lines.first().unwrap().line_index; + let last_line_index = self.lines.last().unwrap().line_index; + assert!(first_line_index <= last_line_index); + + // If the new line is lower than all the lines we have thus + // far, then insert the new line and any intervening lines at + // the front. In a silly attempt at micro-optimization, we + // don't just call `insert` repeatedly, but instead make a new + // (empty) vector, pushing the new lines onto it, and then + // appending the old vector. + if line_index < first_line_index { + let lines = mem::replace(&mut self.lines, vec![]); + self.lines.extend( + (line_index .. first_line_index) + .map(|line| Line::new(line)) + .chain(lines)); + return 0; + } + + // If the new line comes after the ones we have so far, insert + // lines for it. + if line_index > last_line_index { + self.lines.extend( + (last_line_index+1 .. line_index+1) + .map(|line| Line::new(line))); + return self.lines.len() - 1; + } + + // Otherwise it should already exist. + return line_index - first_line_index; + } + + fn render_file_lines(&self, codemap: &Rc) -> Vec { + let old_school = check_old_skool(); + + // As a first step, we elide any instance of more than one + // continuous unannotated line. + + let mut lines_iter = self.lines.iter(); + let mut output = vec![]; + + // First insert the name of the file. + if !old_school { + match self.primary_span { + Some(span) => { + let lo = codemap.lookup_char_pos(span.lo); + output.push(RenderedLine { + text: vec![StyledString { + text: lo.file.name.clone(), + style: Style::FileNameStyle, + }, StyledString { + text: format!(":{}:{}", lo.line, lo.col.0 + 1), + style: Style::LineAndColumn, + }], + kind: RenderedLineKind::PrimaryFileName, + }); + } + None => { + output.push(RenderedLine { + text: vec![StyledString { + text: self.file.name.clone(), + style: Style::FileNameStyle, + }], + kind: RenderedLineKind::OtherFileName, + }); + } + } + } + + let mut next_line = lines_iter.next(); + while next_line.is_some() { + // Consume lines with annotations. + while let Some(line) = next_line { + if line.annotations.is_empty() { break; } + + let mut rendered_lines = self.render_line(line); + assert!(!rendered_lines.is_empty()); + if old_school { + match self.primary_span { + Some(span) => { + let lo = codemap.lookup_char_pos(span.lo); + let hi = codemap.lookup_char_pos(span.hi); + //Before each secondary line in old skool-mode, print the label + //as an old-style note + if !line.annotations[0].is_primary { + if let Some(ann) = line.annotations[0].label.clone() { + output.push(RenderedLine { + text: vec![StyledString { + text: lo.file.name.clone(), + style: Style::FileNameStyle, + }, StyledString { + text: format!(":{}:{}: {}:{} ", lo.line, lo.col.0 + 1, + hi.line, hi.col.0+1), + style: Style::LineAndColumn, + }, StyledString { + text: format!("note: "), + style: Style::OldSkoolNote, + }, StyledString { + text: format!("{}", ann), + style: Style::OldSkoolNoteText, + }], + kind: RenderedLineKind::Annotations, + }); + } + } + rendered_lines[0].text.insert(0, StyledString { + text: format!(":{} ", lo.line), + style: Style::LineAndColumn, + }); + rendered_lines[0].text.insert(0, StyledString { + text: lo.file.name.clone(), + style: Style::FileNameStyle, + }); + let gap_amount = + rendered_lines[0].text[0].text.len() + + rendered_lines[0].text[1].text.len(); + assert!(rendered_lines.len() >= 2, + "no annotations resulted from: {:?}", + line); + for i in 1..rendered_lines.len() { + rendered_lines[i].text.insert(0, StyledString { + text: vec![" "; gap_amount].join(""), + style: Style::NoStyle + }); + } + } + _ =>() + } + } + output.append(&mut rendered_lines); + next_line = lines_iter.next(); + } + + // Emit lines without annotations, but only if they are + // followed by a line with an annotation. + let unannotated_line = next_line; + let mut unannotated_lines = 0; + while let Some(line) = next_line { + if !line.annotations.is_empty() { break; } + unannotated_lines += 1; + next_line = lines_iter.next(); + } + if unannotated_lines > 1 { + output.push(RenderedLine::from((String::new(), + Style::NoStyle, + RenderedLineKind::Elision))); + } else if let Some(line) = unannotated_line { + output.append(&mut self.render_line(line)); + } + } + + output + } + + fn render_line(&self, line: &Line) -> Vec { + let old_school = check_old_skool(); + let source_string = self.file.get_line(line.line_index) + .unwrap_or(""); + let source_kind = RenderedLineKind::SourceText { + file: self.file.clone(), + line_index: line.line_index, + }; + + let mut styled_buffer = StyledBuffer::new(); + + // First create the source line we will highlight. + styled_buffer.append(0, &source_string, Style::Quotation); + + if line.annotations.is_empty() { + return styled_buffer.render(source_kind); + } + + // We want to display like this: + // + // vec.push(vec.pop().unwrap()); + // --- ^^^ _ previous borrow ends here + // | | + // | error occurs here + // previous borrow of `vec` occurs here + // + // But there are some weird edge cases to be aware of: + // + // vec.push(vec.pop().unwrap()); + // -------- - previous borrow ends here + // || + // |this makes no sense + // previous borrow of `vec` occurs here + // + // For this reason, we group the lines into "highlight lines" + // and "annotations lines", where the highlight lines have the `~`. + + //let mut highlight_line = Self::whitespace(&source_string); + + // Sort the annotations by (start, end col) + let mut annotations = line.annotations.clone(); + annotations.sort(); + + // Next, create the highlight line. + for annotation in &annotations { + if old_school { + for p in annotation.start_col .. annotation.end_col { + if p == annotation.start_col { + styled_buffer.putc(1, p, '^', + if annotation.is_primary { + Style::UnderlinePrimary + } else { + Style::OldSkoolNote + }); + } + else { + styled_buffer.putc(1, p, '~', + if annotation.is_primary { + Style::UnderlinePrimary + } else { + Style::OldSkoolNote + }); + } + } + } + else { + for p in annotation.start_col .. annotation.end_col { + if annotation.is_primary { + styled_buffer.putc(1, p, '^', Style::UnderlinePrimary); + if !annotation.is_minimized { + styled_buffer.set_style(0, p, Style::UnderlinePrimary); + } + } else { + styled_buffer.putc(1, p, '-', Style::UnderlineSecondary); + if !annotation.is_minimized { + styled_buffer.set_style(0, p, Style::UnderlineSecondary); + } + } + } + } + } + + // Now we are going to write labels in. To start, we'll exclude + // the annotations with no labels. + let (labeled_annotations, unlabeled_annotations): (Vec<_>, _) = + annotations.into_iter() + .partition(|a| a.label.is_some()); + + // If there are no annotations that need text, we're done. + if labeled_annotations.is_empty() { + return styled_buffer.render(source_kind); + } + if old_school { + return styled_buffer.render(source_kind); + } + + // Now add the text labels. We try, when possible, to stick the rightmost + // annotation at the end of the highlight line: + // + // vec.push(vec.pop().unwrap()); + // --- --- - previous borrow ends here + // + // But sometimes that's not possible because one of the other + // annotations overlaps it. For example, from the test + // `span_overlap_label`, we have the following annotations + // (written on distinct lines for clarity): + // + // fn foo(x: u32) { + // -------------- + // - + // + // In this case, we can't stick the rightmost-most label on + // the highlight line, or we would get: + // + // fn foo(x: u32) { + // -------- x_span + // | + // fn_span + // + // which is totally weird. Instead we want: + // + // fn foo(x: u32) { + // -------------- + // | | + // | x_span + // fn_span + // + // which is...less weird, at least. In fact, in general, if + // the rightmost span overlaps with any other span, we should + // use the "hang below" version, so we can at least make it + // clear where the span *starts*. + let mut labeled_annotations = &labeled_annotations[..]; + match labeled_annotations.split_last().unwrap() { + (last, previous) => { + if previous.iter() + .chain(&unlabeled_annotations) + .all(|a| !overlaps(a, last)) + { + // append the label afterwards; we keep it in a separate + // string + let highlight_label: String = format!(" {}", last.label.as_ref().unwrap()); + if last.is_primary { + styled_buffer.append(1, &highlight_label, Style::LabelPrimary); + } else { + styled_buffer.append(1, &highlight_label, Style::LabelSecondary); + } + labeled_annotations = previous; + } + } + } + + // If that's the last annotation, we're done + if labeled_annotations.is_empty() { + return styled_buffer.render(source_kind); + } + + for (index, annotation) in labeled_annotations.iter().enumerate() { + // Leave: + // - 1 extra line + // - One line for each thing that comes after + let comes_after = labeled_annotations.len() - index - 1; + let blank_lines = 3 + comes_after; + + // For each blank line, draw a `|` at our column. The + // text ought to be long enough for this. + for index in 2..blank_lines { + if annotation.is_primary { + styled_buffer.putc(index, annotation.start_col, '|', Style::UnderlinePrimary); + } else { + styled_buffer.putc(index, annotation.start_col, '|', Style::UnderlineSecondary); + } + } + + if annotation.is_primary { + styled_buffer.puts(blank_lines, annotation.start_col, + annotation.label.as_ref().unwrap(), Style::LabelPrimary); + } else { + styled_buffer.puts(blank_lines, annotation.start_col, + annotation.label.as_ref().unwrap(), Style::LabelSecondary); + } + } + + styled_buffer.render(source_kind) + } +} + +fn prepend_prefixes(rendered_lines: &mut [RenderedLine]) { + let old_school = check_old_skool(); + if old_school { + return; + } + + let prefixes: Vec<_> = + rendered_lines.iter() + .map(|rl| rl.kind.prefix()) + .collect(); + + // find the max amount of spacing we need; add 1 to + // p.text.len() to leave space between the prefix and the + // source text + let padding_len = + prefixes.iter() + .map(|p| if p.text.len() == 0 { 0 } else { p.text.len() + 1 }) + .max() + .unwrap_or(0); + + // Ensure we insert at least one character of padding, so that the + // `-->` arrows can fit etc. + let padding_len = cmp::max(padding_len, 1); + + for (mut prefix, line) in prefixes.into_iter().zip(rendered_lines) { + let extra_spaces = (prefix.text.len() .. padding_len).map(|_| ' '); + prefix.text.extend(extra_spaces); + match line.kind { + RenderedLineKind::Elision => { + line.text.insert(0, prefix); + } + RenderedLineKind::PrimaryFileName => { + // --> filename + // 22 |> + // ^ + // padding_len + let dashes = (0..padding_len - 1).map(|_| ' ') + .chain(Some('-')) + .chain(Some('-')) + .chain(Some('>')) + .chain(Some(' ')); + line.text.insert(0, StyledString {text: dashes.collect(), + style: Style::LineNumber}) + } + RenderedLineKind::OtherFileName => { + // ::: filename + // 22 |> + // ^ + // padding_len + let dashes = (0..padding_len - 1).map(|_| ' ') + .chain(Some(':')) + .chain(Some(':')) + .chain(Some(':')) + .chain(Some(' ')); + line.text.insert(0, StyledString {text: dashes.collect(), + style: Style::LineNumber}) + } + _ => { + line.text.insert(0, prefix); + line.text.insert(1, StyledString {text: String::from("|> "), + style: Style::LineNumber}) + } + } + } +} + +fn trim_lines(rendered_lines: &mut [RenderedLine]) { + for line in rendered_lines { + while !line.text.is_empty() { + line.trim_last(); + if line.text.last().unwrap().text.is_empty() { + line.text.pop(); + } else { + break; + } + } + } +} + +impl Line { + fn new(line_index: usize) -> Line { + Line { + line_index: line_index, + annotations: vec![] + } + } + + fn push_annotation(&mut self, + start: CharPos, + end: CharPos, + is_primary: bool, + is_minimized: bool, + label: Option) { + self.annotations.push(Annotation { + start_col: start.0, + end_col: end.0, + is_primary: is_primary, + is_minimized: is_minimized, + label: label, + }); + } +} + +fn overlaps(a1: &Annotation, + a2: &Annotation) + -> bool +{ + (a2.start_col .. a2.end_col).contains(a1.start_col) || + (a1.start_col .. a1.end_col).contains(a2.start_col) +} diff --git a/src/libsyntax/errors/snippet/test.rs b/src/libsyntax/errors/snippet/test.rs new file mode 100644 index 0000000000..62ce3fa9dd --- /dev/null +++ b/src/libsyntax/errors/snippet/test.rs @@ -0,0 +1,583 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Code for testing annotated snippets. + +#![cfg(test)] + +use codemap::{BytePos, CodeMap, FileMap, NO_EXPANSION, Span}; +use std::rc::Rc; +use super::{RenderedLine, SnippetData}; + +/// Returns the span corresponding to the `n`th occurrence of +/// `substring` in `source_text`. +trait CodeMapExtension { + fn span_substr(&self, + file: &Rc, + source_text: &str, + substring: &str, + n: usize) + -> Span; +} + +impl CodeMapExtension for CodeMap { + fn span_substr(&self, + file: &Rc, + source_text: &str, + substring: &str, + n: usize) + -> Span + { + println!("span_substr(file={:?}/{:?}, substring={:?}, n={})", + file.name, file.start_pos, substring, n); + let mut i = 0; + let mut hi = 0; + loop { + let offset = source_text[hi..].find(substring).unwrap_or_else(|| { + panic!("source_text `{}` does not have {} occurrences of `{}`, only {}", + source_text, n, substring, i); + }); + let lo = hi + offset; + hi = lo + substring.len(); + if i == n { + let span = Span { + lo: BytePos(lo as u32 + file.start_pos.0), + hi: BytePos(hi as u32 + file.start_pos.0), + expn_id: NO_EXPANSION, + }; + assert_eq!(&self.span_to_snippet(span).unwrap()[..], + substring); + return span; + } + i += 1; + } + } +} + +fn splice(start: Span, end: Span) -> Span { + Span { + lo: start.lo, + hi: end.hi, + expn_id: NO_EXPANSION, + } +} + +fn make_string(lines: &[RenderedLine]) -> String { + lines.iter() + .flat_map(|rl| { + rl.text.iter() + .map(|s| &s.text[..]) + .chain(Some("\n")) + }) + .collect() +} + +#[test] +fn tab() { + let file_text = " +fn foo() { +\tbar; +} +"; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + let span_bar = cm.span_substr(&foo, file_text, "bar", 0); + + let mut snippet = SnippetData::new(cm, Some(span_bar)); + snippet.push(span_bar, true, None); + + let lines = snippet.render_lines(); + let text = make_string(&lines); + assert_eq!(&text[..], &" + --> foo.rs:3:2 +3 |> \tbar; + |> \t^^^ +"[1..]); +} + +#[test] +fn one_line() { + let file_text = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0); + let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1); + let span_semi = cm.span_substr(&foo, file_text, ";", 0); + + let mut snippet = SnippetData::new(cm, None); + snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here"))); + snippet.push(span_vec1, false, Some(format!("error occurs here"))); + snippet.push(span_semi, false, Some(format!("previous borrow ends here"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + + let text: String = make_string(&lines); + + println!("text=\n{}", text); + assert_eq!(&text[..], &r#" + ::: foo.rs +3 |> vec.push(vec.pop().unwrap()); + |> --- --- - previous borrow ends here + |> | | + |> | error occurs here + |> previous borrow of `vec` occurs here +"#[1..]); +} + +#[test] +fn two_files() { + let file_text_foo = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let file_text_bar = r#" +fn bar() { + // these blank links here + // serve to ensure that the line numbers + // from bar.rs + // require more digits + + + + + + + + + + + vec.push(); + + // this line will get elided + + vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo_map = cm.new_filemap_and_lines("foo.rs", file_text_foo); + let span_foo_vec0 = cm.span_substr(&foo_map, file_text_foo, "vec", 0); + let span_foo_vec1 = cm.span_substr(&foo_map, file_text_foo, "vec", 1); + let span_foo_semi = cm.span_substr(&foo_map, file_text_foo, ";", 0); + + let bar_map = cm.new_filemap_and_lines("bar.rs", file_text_bar); + let span_bar_vec0 = cm.span_substr(&bar_map, file_text_bar, "vec", 0); + let span_bar_vec1 = cm.span_substr(&bar_map, file_text_bar, "vec", 1); + let span_bar_semi = cm.span_substr(&bar_map, file_text_bar, ";", 0); + + let mut snippet = SnippetData::new(cm, Some(span_foo_vec1)); + snippet.push(span_foo_vec0, false, Some(format!("a"))); + snippet.push(span_foo_vec1, true, Some(format!("b"))); + snippet.push(span_foo_semi, false, Some(format!("c"))); + snippet.push(span_bar_vec0, false, Some(format!("d"))); + snippet.push(span_bar_vec1, false, Some(format!("e"))); + snippet.push(span_bar_semi, false, Some(format!("f"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + + let text: String = make_string(&lines); + + println!("text=\n{}", text); + + // Note that the `|>` remain aligned across both files: + assert_eq!(&text[..], &r#" + --> foo.rs:3:14 +3 |> vec.push(vec.pop().unwrap()); + |> --- ^^^ - c + |> | | + |> | b + |> a + ::: bar.rs +17 |> vec.push(); + |> --- - f + |> | + |> d +... +21 |> vec.pop().unwrap()); + |> --- e +"#[1..]); +} + +#[test] +fn multi_line() { + let file_text = r#" +fn foo() { + let name = find_id(&data, 22).unwrap(); + + // Add one more item we forgot to the vector. Silly us. + data.push(Data { name: format!("Hera"), id: 66 }); + + // Print everything out. + println!("Name: {:?}", name); + println!("Data: {:?}", data); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + let span_data0 = cm.span_substr(&foo, file_text, "data", 0); + let span_data1 = cm.span_substr(&foo, file_text, "data", 1); + let span_rbrace = cm.span_substr(&foo, file_text, "}", 3); + + let mut snippet = SnippetData::new(cm, None); + snippet.push(span_data0, false, Some(format!("immutable borrow begins here"))); + snippet.push(span_data1, false, Some(format!("mutable borrow occurs here"))); + snippet.push(span_rbrace, false, Some(format!("immutable borrow ends here"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + + let text: String = make_string(&lines); + + println!("text=\n{}", text); + assert_eq!(&text[..], &r#" + ::: foo.rs +3 |> let name = find_id(&data, 22).unwrap(); + |> ---- immutable borrow begins here +... +6 |> data.push(Data { name: format!("Hera"), id: 66 }); + |> ---- mutable borrow occurs here +... +11 |> } + |> - immutable borrow ends here +"#[1..]); +} + +#[test] +fn overlapping() { + let file_text = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + let span0 = cm.span_substr(&foo, file_text, "vec.push", 0); + let span1 = cm.span_substr(&foo, file_text, "vec", 0); + let span2 = cm.span_substr(&foo, file_text, "ec.push", 0); + let span3 = cm.span_substr(&foo, file_text, "unwrap", 0); + + let mut snippet = SnippetData::new(cm, None); + snippet.push(span0, false, Some(format!("A"))); + snippet.push(span1, false, Some(format!("B"))); + snippet.push(span2, false, Some(format!("C"))); + snippet.push(span3, false, Some(format!("D"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + let text: String = make_string(&lines); + + println!("text=r#\"\n{}\".trim_left()", text); + assert_eq!(&text[..], &r#" + ::: foo.rs +3 |> vec.push(vec.pop().unwrap()); + |> -------- ------ D + |> || + |> |C + |> A + |> B +"#[1..]); +} + +#[test] +fn one_line_out_of_order() { + let file_text = r#" +fn foo() { + vec.push(vec.pop().unwrap()); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + let span_vec0 = cm.span_substr(&foo, file_text, "vec", 0); + let span_vec1 = cm.span_substr(&foo, file_text, "vec", 1); + let span_semi = cm.span_substr(&foo, file_text, ";", 0); + + // intentionally don't push the snippets left to right + let mut snippet = SnippetData::new(cm, None); + snippet.push(span_vec1, false, Some(format!("error occurs here"))); + snippet.push(span_vec0, false, Some(format!("previous borrow of `vec` occurs here"))); + snippet.push(span_semi, false, Some(format!("previous borrow ends here"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + let text: String = make_string(&lines); + + println!("text=r#\"\n{}\".trim_left()", text); + assert_eq!(&text[..], &r#" + ::: foo.rs +3 |> vec.push(vec.pop().unwrap()); + |> --- --- - previous borrow ends here + |> | | + |> | error occurs here + |> previous borrow of `vec` occurs here +"#[1..]); +} + +#[test] +fn elide_unnecessary_lines() { + let file_text = r#" +fn foo() { + let mut vec = vec![0, 1, 2]; + let mut vec2 = vec; + vec2.push(3); + vec2.push(4); + vec2.push(5); + vec2.push(6); + vec.push(7); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + let span_vec0 = cm.span_substr(&foo, file_text, "vec", 3); + let span_vec1 = cm.span_substr(&foo, file_text, "vec", 8); + + let mut snippet = SnippetData::new(cm, None); + snippet.push(span_vec0, false, Some(format!("`vec` moved here because it \ + has type `collections::vec::Vec`"))); + snippet.push(span_vec1, false, Some(format!("use of moved value: `vec`"))); + + let lines = snippet.render_lines(); + println!("{:#?}", lines); + let text: String = make_string(&lines); + println!("text=r#\"\n{}\".trim_left()", text); + assert_eq!(&text[..], &r#" + ::: foo.rs +4 |> let mut vec2 = vec; + |> --- `vec` moved here because it has type `collections::vec::Vec` +... +9 |> vec.push(7); + |> --- use of moved value: `vec` +"#[1..]); +} + +#[test] +fn spans_without_labels() { + let file_text = r#" +fn foo() { + let mut vec = vec![0, 1, 2]; + let mut vec2 = vec; + vec2.push(3); + vec2.push(4); + vec2.push(5); + vec2.push(6); + vec.push(7); +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + + let mut snippet = SnippetData::new(cm.clone(), None); + for i in 0..4 { + let span_veci = cm.span_substr(&foo, file_text, "vec", i); + snippet.push(span_veci, false, None); + } + + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("text=&r#\"\n{}\n\"#[1..]", text); + assert_eq!(text, &r#" + ::: foo.rs +3 |> let mut vec = vec![0, 1, 2]; + |> --- --- +4 |> let mut vec2 = vec; + |> --- --- +"#[1..]); +} + +#[test] +fn span_long_selection() { + let file_text = r#" +impl SomeTrait for () { + fn foo(x: u32) { + // impl 1 + // impl 2 + // impl 3 + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + + let mut snippet = SnippetData::new(cm.clone(), None); + let fn_span = cm.span_substr(&foo, file_text, "fn", 0); + let rbrace_span = cm.span_substr(&foo, file_text, "}", 0); + snippet.push(splice(fn_span, rbrace_span), false, None); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs +3 |> fn foo(x: u32) { + |> - +"#[1..]); +} + +#[test] +fn span_overlap_label() { + // Test that we don't put `x_span` to the right of its highlight, + // since there is another highlight that overlaps it. + + let file_text = r#" + fn foo(x: u32) { + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + + let mut snippet = SnippetData::new(cm.clone(), None); + let fn_span = cm.span_substr(&foo, file_text, "fn foo(x: u32)", 0); + let x_span = cm.span_substr(&foo, file_text, "x", 0); + snippet.push(fn_span, false, Some(format!("fn_span"))); + snippet.push(x_span, false, Some(format!("x_span"))); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs +2 |> fn foo(x: u32) { + |> -------------- + |> | | + |> | x_span + |> fn_span +"#[1..]); +} + +#[test] +fn span_overlap_label2() { + // Test that we don't put `x_span` to the right of its highlight, + // since there is another highlight that overlaps it. In this + // case, the overlap is only at the beginning, but it's still + // better to show the beginning more clearly. + + let file_text = r#" + fn foo(x: u32) { + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + + let mut snippet = SnippetData::new(cm.clone(), None); + let fn_span = cm.span_substr(&foo, file_text, "fn foo(x", 0); + let x_span = cm.span_substr(&foo, file_text, "x: u32)", 0); + snippet.push(fn_span, false, Some(format!("fn_span"))); + snippet.push(x_span, false, Some(format!("x_span"))); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs +2 |> fn foo(x: u32) { + |> -------------- + |> | | + |> | x_span + |> fn_span +"#[1..]); +} + +#[test] +fn span_overlap_label3() { + // Test that we don't put `x_span` to the right of its highlight, + // since there is another highlight that overlaps it. In this + // case, the overlap is only at the beginning, but it's still + // better to show the beginning more clearly. + + let file_text = r#" + fn foo() { + let closure = || { + inner + }; + } +} +"#; + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + + let mut snippet = SnippetData::new(cm.clone(), None); + + let closure_span = { + let closure_start_span = cm.span_substr(&foo, file_text, "||", 0); + let closure_end_span = cm.span_substr(&foo, file_text, "}", 0); + splice(closure_start_span, closure_end_span) + }; + + let inner_span = cm.span_substr(&foo, file_text, "inner", 0); + + snippet.push(closure_span, false, Some(format!("foo"))); + snippet.push(inner_span, false, Some(format!("bar"))); + + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + ::: foo.rs +3 |> let closure = || { + |> - foo +4 |> inner + |> ----- bar +"#[1..]); +} + +#[test] +fn span_empty() { + // In one of the unit tests, we found that the parser sometimes + // gives empty spans, and in particular it supplied an EOF span + // like this one, which points at the very end. We want to + // fallback gracefully in this case. + + let file_text = r#" +fn main() { + struct Foo; + + impl !Sync for Foo {} + + unsafe impl Send for &'static Foo { + // error: cross-crate traits with a default impl, like `core::marker::Send`, + // can only be implemented for a struct/enum type, not + // `&'static Foo` +}"#; + + + let cm = Rc::new(CodeMap::new()); + let foo = cm.new_filemap_and_lines("foo.rs", file_text); + + let mut rbrace_span = cm.span_substr(&foo, file_text, "}", 1); + rbrace_span.lo = rbrace_span.hi; + + let mut snippet = SnippetData::new(cm.clone(), Some(rbrace_span)); + snippet.push(rbrace_span, false, None); + let lines = snippet.render_lines(); + let text: String = make_string(&lines); + println!("r#\"\n{}\"", text); + assert_eq!(text, &r#" + --> foo.rs:11:2 +11 |> } + |> - +"#[1..]); +} diff --git a/src/libsyntax/ext/base.rs b/src/libsyntax/ext/base.rs index c0306b8494..303187aeba 100644 --- a/src/libsyntax/ext/base.rs +++ b/src/libsyntax/ext/base.rs @@ -764,15 +764,14 @@ impl<'a> ExtCtxt<'a> { pub fn suggest_macro_name(&mut self, name: &str, - span: Span, err: &mut DiagnosticBuilder<'a>) { let names = &self.syntax_env.names; if let Some(suggestion) = find_best_match_for_name(names.iter(), name, None) { if suggestion != name { - err.fileline_help(span, &format!("did you mean `{}!`?", suggestion)); + err.help(&format!("did you mean `{}!`?", suggestion)); } else { - err.fileline_help(span, &format!("have you added the `#[macro_use]` on the \ - module/import?")); + err.help(&format!("have you added the `#[macro_use]` on the \ + module/import?")); } } } diff --git a/src/libsyntax/ext/build.rs b/src/libsyntax/ext/build.rs index a4e5b68277..7958162986 100644 --- a/src/libsyntax/ext/build.rs +++ b/src/libsyntax/ext/build.rs @@ -13,9 +13,7 @@ use ast::{self, Ident, Generics, Expr, BlockCheckMode, UnOp, PatKind}; use attr; use codemap::{Span, respan, Spanned, DUMMY_SP, Pos}; use ext::base::ExtCtxt; -use parse::token::special_idents; -use parse::token::InternedString; -use parse::token; +use parse::token::{self, keywords, InternedString}; use ptr::P; // Transitional reexports so qquote can find the paths it is looking for @@ -194,10 +192,14 @@ pub trait AstBuilder { cond: P, then: P, els: Option>) -> P; fn expr_loop(&self, span: Span, block: P) -> P; - fn lambda_fn_decl(&self, span: Span, - fn_decl: P, blk: P) -> P; + fn lambda_fn_decl(&self, + span: Span, + fn_decl: P, + blk: P, + fn_decl_span: Span) + -> P; - fn lambda(&self, span: Span, ids: Vec , blk: P) -> P; + fn lambda(&self, span: Span, ids: Vec, blk: P) -> P; fn lambda0(&self, span: Span, blk: P) -> P; fn lambda1(&self, span: Span, blk: P, ident: ast::Ident) -> P; @@ -602,7 +604,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr_path(self.path_ident(span, id)) } fn expr_self(&self, span: Span) -> P { - self.expr_ident(span, special_idents::self_) + self.expr_ident(span, keywords::SelfValue.ident()) } fn expr_binary(&self, sp: Span, op: ast::BinOpKind, @@ -753,7 +755,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { let expr_file_line_ptr = self.expr_addr_of(span, expr_file_line_tuple); self.expr_call_global( span, - self.std_path(&["rt", "begin_unwind"]), + self.std_path(&["rt", "begin_panic"]), vec!( self.expr_str(span, msg), expr_file_line_ptr)) @@ -894,17 +896,34 @@ impl<'a> AstBuilder for ExtCtxt<'a> { self.expr(span, ast::ExprKind::Loop(block, None)) } - fn lambda_fn_decl(&self, span: Span, - fn_decl: P, blk: P) -> P { - self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, fn_decl, blk)) + fn lambda_fn_decl(&self, + span: Span, + fn_decl: P, + blk: P, + fn_decl_span: Span) // span of the `|...|` part + -> P { + self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, + fn_decl, + blk, + fn_decl_span)) } - fn lambda(&self, span: Span, ids: Vec, blk: P) -> P { + + fn lambda(&self, + span: Span, + ids: Vec, + blk: P) + -> P { let fn_decl = self.fn_decl( ids.iter().map(|id| self.arg(span, *id, self.ty_infer(span))).collect(), self.ty_infer(span)); - self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, fn_decl, blk)) + // FIXME -- We are using `span` as the span of the `|...|` + // part of the lambda, but it probably (maybe?) corresponds to + // the entire lambda body. Probably we should extend the API + // here, but that's not entirely clear. + self.expr(span, ast::ExprKind::Closure(ast::CaptureBy::Ref, fn_decl, blk, span)) } + fn lambda0(&self, span: Span, blk: P) -> P { self.lambda(span, Vec::new(), blk) } @@ -1132,7 +1151,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> { vis: ast::Visibility, vp: P) -> P { P(ast::Item { id: ast::DUMMY_NODE_ID, - ident: special_idents::invalid, + ident: keywords::Invalid.ident(), attrs: vec![], node: ast::ItemKind::Use(vp), vis: vis, diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 8550617560..f243706eec 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -25,7 +25,7 @@ use fold; use fold::*; use util::move_map::MoveMap; use parse; -use parse::token::{fresh_mark, fresh_name, intern}; +use parse::token::{fresh_mark, fresh_name, intern, keywords}; use ptr::P; use util::small_vector::SmallVector; use visit; @@ -35,6 +35,16 @@ use std_inject; use std::collections::HashSet; use std::env; +// this function is called to detect use of feature-gated or invalid attributes +// on macro invoations since they will not be detected after macro expansion +fn check_attributes(attrs: &[ast::Attribute], fld: &MacroExpander) { + for attr in attrs.iter() { + feature_gate::check_attribute(&attr, &fld.cx.parse_sess.span_diagnostic, + &fld.cx.parse_sess.codemap(), + &fld.cx.ecfg.features.unwrap()); + } +} + pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { let expr_span = e.span; return e.and_then(|ast::Expr {id, node, span, attrs}| match node { @@ -42,6 +52,9 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { // expr_mac should really be expr_ext or something; it's the // entry-point for all syntax extensions. ast::ExprKind::Mac(mac) => { + if let Some(ref attrs) = attrs { + check_attributes(attrs, fld); + } // Assert that we drop any macro attributes on the floor here drop(attrs); @@ -57,23 +70,19 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { // Keep going, outside-in. let fully_expanded = fld.fold_expr(expanded_expr); - let span = fld.new_span(span); fld.cx.bt_pop(); - fully_expanded.map(|e| ast::Expr { - id: ast::DUMMY_NODE_ID, - node: e.node, - span: span, - attrs: e.attrs, - }) + fully_expanded } ast::ExprKind::InPlace(placer, value_expr) => { // Ensure feature-gate is enabled - feature_gate::check_for_placement_in( - fld.cx.ecfg.features, - &fld.cx.parse_sess.span_diagnostic, - expr_span); + if !fld.cx.ecfg.features.unwrap().placement_in_syntax { + feature_gate::emit_feature_err( + &fld.cx.parse_sess.span_diagnostic, "placement_in_syntax", expr_span, + feature_gate::GateIssue::Language, feature_gate::EXPLAIN_PLACEMENT_IN + ); + } let placer = fld.fold_expr(placer); let value_expr = fld.fold_expr(value_expr); @@ -149,14 +158,17 @@ pub fn expand_expr(e: P, fld: &mut MacroExpander) -> P { fld.cx.expr(span, il).with_attrs(fold_thin_attrs(attrs, fld)) } - ast::ExprKind::Closure(capture_clause, fn_decl, block) => { + ast::ExprKind::Closure(capture_clause, fn_decl, block, fn_decl_span) => { let (rewritten_fn_decl, rewritten_block) = expand_and_rename_fn_decl_and_block(fn_decl, block, fld); let new_node = ast::ExprKind::Closure(capture_clause, - rewritten_fn_decl, - rewritten_block); - P(ast::Expr{id:id, node: new_node, span: fld.new_span(span), - attrs: fold_thin_attrs(attrs, fld)}) + rewritten_fn_decl, + rewritten_block, + fld.new_span(fn_decl_span)); + P(ast::Expr{ id:id, + node: new_node, + span: fld.new_span(span), + attrs: fold_thin_attrs(attrs, fld) }) } _ => { @@ -204,7 +216,7 @@ fn expand_mac_invoc(mac: ast::Mac, pth.span, &format!("macro undefined: '{}!'", &extname)); - fld.cx.suggest_macro_name(&extname.as_str(), pth.span, &mut err); + fld.cx.suggest_macro_name(&extname.as_str(), &mut err); err.emit(); // let compilation continue @@ -309,9 +321,7 @@ macro_rules! with_exts_frame { // When we enter a module, record it, for the sake of `module!` pub fn expand_item(it: P, fld: &mut MacroExpander) -> SmallVector> { - let it = expand_item_multi_modifier(Annotatable::Item(it), fld); - - expand_annotatable(it, fld) + expand_annotatable(Annotatable::Item(it), fld) .into_iter().map(|i| i.expect_item()).collect() } @@ -339,8 +349,8 @@ fn contains_macro_use(fld: &mut MacroExpander, attrs: &[ast::Attribute]) -> bool "macro_escape is a deprecated synonym for macro_use"); is_use = true; if let ast::AttrStyle::Inner = attr.node.style { - err.fileline_help(attr.span, "consider an outer attribute, \ - #[macro_use] mod ...").emit(); + err.help("consider an outer attribute, \ + #[macro_use] mod ...").emit(); } else { err.emit(); } @@ -367,6 +377,8 @@ pub fn expand_item_mac(it: P, _ => fld.cx.span_bug(it.span, "invalid item macro invocation") }); + check_attributes(&attrs, fld); + let fm = fresh_mark(); let items = { let expanded = match fld.cx.syntax_env.find(extname) { @@ -380,7 +392,7 @@ pub fn expand_item_mac(it: P, Some(rc) => match *rc { NormalTT(ref expander, tt_span, allow_internal_unstable) => { - if ident.name != parse::token::special_idents::invalid.name { + if ident.name != keywords::Invalid.name() { fld.cx .span_err(path_span, &format!("macro {}! expects no ident argument, given '{}'", @@ -401,7 +413,7 @@ pub fn expand_item_mac(it: P, expander.expand(fld.cx, span, &marked_before[..]) } IdentTT(ref expander, tt_span, allow_internal_unstable) => { - if ident.name == parse::token::special_idents::invalid.name { + if ident.name == keywords::Invalid.name() { fld.cx.span_err(path_span, &format!("macro {}! expects an ident argument", extname)); @@ -420,7 +432,7 @@ pub fn expand_item_mac(it: P, expander.expand(fld.cx, span, ident, marked_tts) } MacroRulesTT => { - if ident.name == parse::token::special_idents::invalid.name { + if ident.name == keywords::Invalid.name() { fld.cx.span_err(path_span, "macro_rules! expects an ident argument"); return SmallVector::zero(); } @@ -441,18 +453,6 @@ pub fn expand_item_mac(it: P, let allow_internal_unstable = attr::contains_name(&attrs, "allow_internal_unstable"); - // ensure any #[allow_internal_unstable]s are - // detected (including nested macro definitions - // etc.) - if allow_internal_unstable && !fld.cx.ecfg.enable_allow_internal_unstable() { - feature_gate::emit_feature_err( - &fld.cx.parse_sess.span_diagnostic, - "allow_internal_unstable", - span, - feature_gate::GateIssue::Language, - feature_gate::EXPLAIN_ALLOW_INTERNAL_UNSTABLE) - } - let export = attr::contains_name(&attrs, "macro_export"); let def = ast::MacroDef { ident: ident, @@ -504,11 +504,22 @@ pub fn expand_item_mac(it: P, /// Expand a stmt fn expand_stmt(stmt: Stmt, fld: &mut MacroExpander) -> SmallVector { + // perform all pending renames + let stmt = { + let pending_renames = &mut fld.cx.syntax_env.info().pending_renames; + let mut rename_fld = IdentRenamer{renames:pending_renames}; + rename_fld.fold_stmt(stmt).expect_one("rename_fold didn't return one value") + }; + let (mac, style, attrs) = match stmt.node { StmtKind::Mac(mac, style, attrs) => (mac, style, attrs), _ => return expand_non_macro_stmt(stmt, fld) }; + if let Some(ref attrs) = attrs { + check_attributes(attrs, fld); + } + // Assert that we drop any macro attributes on the floor here drop(attrs); @@ -717,14 +728,8 @@ pub fn expand_block(blk: P, fld: &mut MacroExpander) -> P { pub fn expand_block_elts(b: P, fld: &mut MacroExpander) -> P { b.map(|Block {id, stmts, expr, rules, span}| { let new_stmts = stmts.into_iter().flat_map(|x| { - // perform all pending renames - let renamed_stmt = { - let pending_renames = &mut fld.cx.syntax_env.info().pending_renames; - let mut rename_fld = IdentRenamer{renames:pending_renames}; - rename_fld.fold_stmt(x).expect_one("rename_fold didn't return one value") - }; - // expand macros in the statement - fld.fold_stmt(renamed_stmt).into_iter() + // perform pending renames and expand macros in the statement + fld.fold_stmt(x).into_iter() }).collect(); let new_expr = expr.map(|x| { let expr = { @@ -892,7 +897,7 @@ fn expand_annotatable(a: Annotatable, } ast::ItemKind::Mod(_) | ast::ItemKind::ForeignMod(_) => { let valid_ident = - it.ident.name != parse::token::special_idents::invalid.name; + it.ident.name != keywords::Invalid.name(); if valid_ident { fld.cx.mod_push(it.ident); @@ -1062,7 +1067,7 @@ fn expand_impl_item(ii: ast::ImplItem, fld: &mut MacroExpander) attrs: ii.attrs, vis: ii.vis, defaultness: ii.defaultness, - node: match ii.node { + node: match ii.node { ast::ImplItemKind::Method(sig, body) => { let (sig, body) = expand_and_rename_method(sig, body, fld); ast::ImplItemKind::Method(sig, body) @@ -1071,13 +1076,11 @@ fn expand_impl_item(ii: ast::ImplItem, fld: &mut MacroExpander) }, span: fld.new_span(ii.span) }), - ast::ImplItemKind::Macro(_) => { - let (span, mac) = match ii.node { - ast::ImplItemKind::Macro(mac) => (ii.span, mac), - _ => unreachable!() - }; + ast::ImplItemKind::Macro(mac) => { + check_attributes(&ii.attrs, fld); + let maybe_new_items = - expand_mac_invoc(mac, span, + expand_mac_invoc(mac, ii.span, |r| r.make_impl_items(), |meths, mark| meths.move_map(|m| mark_impl_item(m, mark)), fld); @@ -1344,14 +1347,14 @@ impl<'feat> ExpansionConfig<'feat> { } feature_tests! { - fn enable_quotes = allow_quote, - fn enable_asm = allow_asm, - fn enable_log_syntax = allow_log_syntax, - fn enable_concat_idents = allow_concat_idents, - fn enable_trace_macros = allow_trace_macros, + fn enable_quotes = quote, + fn enable_asm = asm, + fn enable_log_syntax = log_syntax, + fn enable_concat_idents = concat_idents, + fn enable_trace_macros = trace_macros, fn enable_allow_internal_unstable = allow_internal_unstable, - fn enable_custom_derive = allow_custom_derive, - fn enable_pushpop_unsafe = allow_pushpop_unsafe, + fn enable_custom_derive = custom_derive, + fn enable_pushpop_unsafe = pushpop_unsafe, } } @@ -1485,7 +1488,7 @@ mod tests { use ext::mtwt; use fold::Folder; use parse; - use parse::token; + use parse::token::{self, keywords}; use util::parser_testing::{string_to_parser}; use util::parser_testing::{string_to_pat, string_to_crate, strs_to_idents}; use visit; @@ -1806,7 +1809,7 @@ mod tests { // run one of the renaming tests fn run_renaming_test(t: &RenamingTest, test_idx: usize) { - let invalid_name = token::special_idents::invalid.name; + let invalid_name = keywords::Invalid.name(); let (teststr, bound_connections, bound_ident_check) = match *t { (ref str,ref conns, bic) => (str.to_string(), conns.clone(), bic) }; diff --git a/src/libsyntax/ext/quote.rs b/src/libsyntax/ext/quote.rs index 77aeaf8459..ee9a197ce5 100644 --- a/src/libsyntax/ext/quote.rs +++ b/src/libsyntax/ext/quote.rs @@ -13,7 +13,7 @@ use codemap::Span; use ext::base::ExtCtxt; use ext::base; use ext::build::AstBuilder; -use parse::parser::{Parser, PathParsingMode}; +use parse::parser::{Parser, PathStyle}; use parse::token::*; use parse::token; use ptr::P; @@ -72,7 +72,7 @@ pub mod rt { impl ToTokens for ast::Ident { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec { - vec![TokenTree::Token(DUMMY_SP, token::Ident(*self, token::Plain))] + vec![TokenTree::Token(DUMMY_SP, token::Ident(*self))] } } @@ -401,7 +401,7 @@ pub fn parse_meta_item_panic(parser: &mut Parser) -> P { panictry!(parser.parse_meta_item()) } -pub fn parse_path_panic(parser: &mut Parser, mode: PathParsingMode) -> ast::Path { +pub fn parse_path_panic(parser: &mut Parser, mode: PathStyle) -> ast::Path { panictry!(parser.parse_path(mode)) } @@ -500,7 +500,7 @@ pub fn expand_quote_path(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box { - let mode = mk_parser_path(cx, sp, "LifetimeAndTypesWithoutColons"); + let mode = mk_parser_path(cx, sp, &["PathStyle", "Type"]); let expanded = expand_parse_call(cx, sp, "parse_path_panic", vec!(mode), tts); base::MacEager::expr(expanded) } @@ -557,8 +557,9 @@ fn mk_token_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { cx.expr_path(cx.path_global(sp, idents)) } -fn mk_parser_path(cx: &ExtCtxt, sp: Span, name: &str) -> P { - let idents = vec!(id_ext("syntax"), id_ext("parse"), id_ext("parser"), id_ext(name)); +fn mk_parser_path(cx: &ExtCtxt, sp: Span, names: &[&str]) -> P { + let mut idents = vec![id_ext("syntax"), id_ext("parse"), id_ext("parser")]; + idents.extend(names.iter().cloned().map(id_ext)); cx.expr_path(cx.path_global(sp, idents)) } @@ -646,14 +647,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { cx.expr_usize(sp, n)) } - token::Ident(ident, style) => { + token::Ident(ident) => { return cx.expr_call(sp, mk_token_path(cx, sp, "Ident"), - vec![mk_ident(cx, sp, ident), - match style { - ModName => mk_token_path(cx, sp, "ModName"), - Plain => mk_token_path(cx, sp, "Plain"), - }]); + vec![mk_ident(cx, sp, ident)]); } token::Lifetime(ident) => { @@ -668,19 +665,10 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))); } - token::MatchNt(name, kind, namep, kindp) => { + token::MatchNt(name, kind) => { return cx.expr_call(sp, mk_token_path(cx, sp, "MatchNt"), - vec!(mk_ident(cx, sp, name), - mk_ident(cx, sp, kind), - match namep { - ModName => mk_token_path(cx, sp, "ModName"), - Plain => mk_token_path(cx, sp, "Plain"), - }, - match kindp { - ModName => mk_token_path(cx, sp, "ModName"), - Plain => mk_token_path(cx, sp, "Plain"), - })); + vec![mk_ident(cx, sp, name), mk_ident(cx, sp, kind)]); } token::Interpolated(_) => panic!("quote! with interpolated token"), @@ -722,7 +710,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P { fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec { match *tt { - TokenTree::Token(sp, SubstNt(ident, _)) => { + TokenTree::Token(sp, SubstNt(ident)) => { // tt.extend($ident.to_tokens(ext_cx)) let e_to_toks = diff --git a/src/libsyntax/ext/tt/macro_parser.rs b/src/libsyntax/ext/tt/macro_parser.rs index 4e4c644776..89ecf02ee4 100644 --- a/src/libsyntax/ext/tt/macro_parser.rs +++ b/src/libsyntax/ext/tt/macro_parser.rs @@ -85,7 +85,7 @@ use codemap; use errors::FatalError; use parse::lexer::*; //resolve bug? use parse::ParseSess; -use parse::parser::{LifetimeAndTypesWithoutColons, Parser}; +use parse::parser::{PathStyle, Parser}; use parse::token::{DocComment, MatchNt, SubstNt}; use parse::token::{Token, Nonterminal}; use parse::token; @@ -216,7 +216,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc]) n_rec(p_s, next_m, res, ret_val, idx)?; } } - TokenTree::Token(sp, MatchNt(bind_name, _, _, _)) => { + TokenTree::Token(sp, MatchNt(bind_name, _)) => { match ret_val.entry(bind_name.name) { Vacant(spot) => { spot.insert(res[*idx].clone()); @@ -263,7 +263,7 @@ pub type PositionalParseResult = ParseResult>>; /// unhygienic comparison) pub fn token_name_eq(t1 : &Token, t2 : &Token) -> bool { match (t1,t2) { - (&token::Ident(id1,_),&token::Ident(id2,_)) + (&token::Ident(id1),&token::Ident(id2)) | (&token::Lifetime(id1),&token::Lifetime(id2)) => id1.name == id2.name, _ => *t1 == *t2 @@ -451,7 +451,7 @@ pub fn parse(sess: &ParseSess, if (!bb_eis.is_empty() && !next_eis.is_empty()) || bb_eis.len() > 1 { let nts = bb_eis.iter().map(|ei| match ei.top_elts.get_tt(ei.idx) { - TokenTree::Token(_, MatchNt(bind, name, _, _)) => { + TokenTree::Token(_, MatchNt(bind, name)) => { format!("{} ('{}')", name, bind) } _ => panic!() @@ -479,7 +479,7 @@ pub fn parse(sess: &ParseSess, let mut ei = bb_eis.pop().unwrap(); match ei.top_elts.get_tt(ei.idx) { - TokenTree::Token(span, MatchNt(_, ident, _, _)) => { + TokenTree::Token(span, MatchNt(_, ident)) => { let match_cur = ei.match_cur; (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( parse_nt(&mut rust_parser, span, &ident.name.as_str())))); @@ -534,9 +534,9 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { "ty" => token::NtTy(panictry!(p.parse_ty())), // this could be handled like a token, since it is one "ident" => match p.token { - token::Ident(sn,b) => { + token::Ident(sn) => { p.bump(); - token::NtIdent(Box::new(Spanned::{node: sn, span: p.span}),b) + token::NtIdent(Box::new(Spanned::{node: sn, span: p.span})) } _ => { let token_str = pprust::token_to_string(&p.token); @@ -546,7 +546,7 @@ pub fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal { } }, "path" => { - token::NtPath(Box::new(panictry!(p.parse_path(LifetimeAndTypesWithoutColons)))) + token::NtPath(Box::new(panictry!(p.parse_path(PathStyle::Type)))) }, "meta" => token::NtMeta(panictry!(p.parse_meta_item())), _ => { diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index 77bae4cb3f..41d3991aee 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -17,7 +17,7 @@ use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::parse; use parse::lexer::new_tt_reader; use parse::parser::{Parser, Restrictions}; -use parse::token::{self, special_idents, gensym_ident, NtTT, Token}; +use parse::token::{self, gensym_ident, NtTT, Token}; use parse::token::Token::*; use print; use ptr::P; @@ -244,8 +244,8 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt, // $( $lhs:tt => $rhs:tt );+ // ...quasiquoting this would be nice. // These spans won't matter, anyways - let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain); - let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain); + let match_lhs_tok = MatchNt(lhs_nm, token::str_to_ident("tt")); + let match_rhs_tok = MatchNt(rhs_nm, token::str_to_ident("tt")); let argument_gram = vec!( TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition { @@ -415,7 +415,7 @@ fn check_matcher_old<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token, on_fai let mut tokens = matcher.peekable(); while let Some(token) = tokens.next() { last = match *token { - TokenTree::Token(sp, MatchNt(ref name, ref frag_spec, _, _)) => { + TokenTree::Token(sp, MatchNt(ref name, ref frag_spec)) => { // ii. If T is a simple NT, look ahead to the next token T' in // M. If T' is in the set FOLLOW(NT), continue. Else; reject. if can_be_followed_by_any(&frag_spec.name.as_str()) { @@ -881,7 +881,7 @@ fn check_matcher_core(cx: &mut ExtCtxt, // Now `last` holds the complete set of NT tokens that could // end the sequence before SUFFIX. Check that every one works with `suffix`. 'each_last: for &(_sp, ref t) in &last.tokens { - if let MatchNt(ref name, ref frag_spec, _, _) = *t { + if let MatchNt(ref name, ref frag_spec) = *t { for &(sp, ref next_token) in &suffix_first.tokens { match is_in_follow(cx, next_token, &frag_spec.name.as_str()) { Err(msg) => { @@ -917,9 +917,8 @@ fn check_matcher_core(cx: &mut ExtCtxt, last } - fn token_can_be_followed_by_any(tok: &Token) -> bool { - if let &MatchNt(_, ref frag_spec, _, _) = tok { + if let &MatchNt(_, ref frag_spec) = tok { frag_can_be_followed_by_any(&frag_spec.name.as_str()) } else { // (Non NT's can always be followed by anthing in matchers.) @@ -1005,8 +1004,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { "pat" => { match *tok { FatArrow | Comma | Eq | BinOp(token::Or) => Ok(true), - Ident(i, _) if (i.name.as_str() == "if" || - i.name.as_str() == "in") => Ok(true), + Ident(i) if (i.name.as_str() == "if" || + i.name.as_str() == "in") => Ok(true), _ => Ok(false) } }, @@ -1014,8 +1013,8 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { match *tok { OpenDelim(token::DelimToken::Brace) | OpenDelim(token::DelimToken::Bracket) | Comma | FatArrow | Colon | Eq | Gt | Semi | BinOp(token::Or) => Ok(true), - Ident(i, _) if (i.name.as_str() == "as" || - i.name.as_str() == "where") => Ok(true), + MatchNt(_, ref frag) if frag.name.as_str() == "block" => Ok(true), + Ident(i) if i.name.as_str() == "as" || i.name.as_str() == "where" => Ok(true), _ => Ok(false) } }, @@ -1035,7 +1034,7 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result { fn has_legal_fragment_specifier(tok: &Token) -> Result<(), String> { debug!("has_legal_fragment_specifier({:?})", tok); - if let &MatchNt(_, ref frag_spec, _, _) = tok { + if let &MatchNt(_, ref frag_spec) = tok { let s = &frag_spec.name.as_str(); if !is_legal_fragment_specifier(s) { return Err(s.to_string()); diff --git a/src/libsyntax/ext/tt/transcribe.rs b/src/libsyntax/ext/tt/transcribe.rs index ae99fe8173..6b3b5ce9de 100644 --- a/src/libsyntax/ext/tt/transcribe.rs +++ b/src/libsyntax/ext/tt/transcribe.rs @@ -12,7 +12,7 @@ use self::LockstepIterSize::*; use ast; use ast::{TokenTree, Ident, Name}; use codemap::{Span, DUMMY_SP}; -use errors::Handler; +use errors::{Handler, DiagnosticBuilder}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use parse::token::{DocComment, MatchNt, SubstNt}; use parse::token::{Token, NtIdent, SpecialMacroVar}; @@ -50,6 +50,7 @@ pub struct TtReader<'a> { pub cur_span: Span, /// Transform doc comments. Only useful in macro invocations pub desugar_doc_comments: bool, + pub fatal_errs: Vec>, } /// This can do Macro-By-Example transcription. On the other hand, if @@ -99,6 +100,7 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler, /* dummy values, never read: */ cur_tok: token::Eof, cur_span: DUMMY_SP, + fatal_errs: Vec::new(), }; tt_next_token(&mut r); /* get cur_tok and cur_span set up */ r @@ -161,7 +163,7 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize { size + lockstep_iter_size(tt, r) }) }, - TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) => + TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) => match lookup_cur_matched(r, name) { Some(matched) => match *matched { MatchedNonterminal(_) => LisUnconstrained, @@ -186,7 +188,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { None => (), Some(sp) => { r.cur_span = sp; - r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain); + r.cur_tok = token::Ident(r.imported_from.unwrap()); return ret_val; }, } @@ -278,12 +280,12 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { } } // FIXME #2887: think about span stuff here - TokenTree::Token(sp, SubstNt(ident, namep)) => { + TokenTree::Token(sp, SubstNt(ident)) => { r.stack.last_mut().unwrap().idx += 1; match lookup_cur_matched(r, ident) { None => { r.cur_span = sp; - r.cur_tok = SubstNt(ident, namep); + r.cur_tok = SubstNt(ident); return ret_val; // this can't be 0 length, just like TokenTree::Delimited } @@ -292,9 +294,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan { // sidestep the interpolation tricks for ident because // (a) idents can be in lots of places, so it'd be a pain // (b) we actually can, since it's a token. - MatchedNonterminal(NtIdent(ref sn, b)) => { + MatchedNonterminal(NtIdent(ref sn)) => { r.cur_span = sn.span; - r.cur_tok = token::Ident(sn.node, b); + r.cur_tok = token::Ident(sn.node); return ret_val; } MatchedNonterminal(ref other_whole_nt) => { diff --git a/src/libsyntax/feature_gate.rs b/src/libsyntax/feature_gate.rs index fc18ef407a..acef98f2af 100644 --- a/src/libsyntax/feature_gate.rs +++ b/src/libsyntax/feature_gate.rs @@ -22,7 +22,6 @@ //! gate usage is added, *do not remove it again* even once the feature //! becomes stable. -use self::Status::*; use self::AttributeType::*; use self::AttributeGate::*; @@ -40,6 +39,59 @@ use parse::token::InternedString; use std::ascii::AsciiExt; use std::cmp; +macro_rules! setter { + ($field: ident) => {{ + fn f(features: &mut Features) -> &mut bool { + &mut features.$field + } + f as fn(&mut Features) -> &mut bool + }} +} + +macro_rules! declare_features { + ($((active, $feature: ident, $ver: expr, $issue: expr)),+) => { + /// Represents active features that are currently being implemented or + /// currently being considered for addition/removal. + const ACTIVE_FEATURES: &'static [(&'static str, &'static str, + Option, fn(&mut Features) -> &mut bool)] = &[ + $((stringify!($feature), $ver, $issue, setter!($feature))),+ + ]; + + /// A set of features to be used by later passes. + pub struct Features { + /// spans of #![feature] attrs for stable language features. for error reporting + pub declared_stable_lang_features: Vec, + /// #![feature] attrs for non-language (library) features + pub declared_lib_features: Vec<(InternedString, Span)>, + $(pub $feature: bool),+ + } + + impl Features { + pub fn new() -> Features { + Features { + declared_stable_lang_features: Vec::new(), + declared_lib_features: Vec::new(), + $($feature: false),+ + } + } + } + }; + + ($((removed, $feature: ident, $ver: expr, $issue: expr)),+) => { + /// Represents features which has since been removed (it was once Active) + const REMOVED_FEATURES: &'static [(&'static str, &'static str, Option)] = &[ + $((stringify!($feature), $ver, $issue)),+ + ]; + }; + + ($((accepted, $feature: ident, $ver: expr, $issue: expr)),+) => { + /// Those language feature has since been Accepted (it was once Active) + const ACCEPTED_FEATURES: &'static [(&'static str, &'static str, Option)] = &[ + $((stringify!($feature), $ver, $issue)),+ + ]; + } +} + // If you change this list without updating src/doc/reference.md, @cmr will be sad // Don't ever remove anything from this list; set them to 'Removed'. // The version numbers here correspond to the version in which the current status @@ -47,118 +99,88 @@ use std::cmp; // stable (active). // NB: The featureck.py script parses this information directly out of the source // so take care when modifying it. -const KNOWN_FEATURES: &'static [(&'static str, &'static str, Option, Status)] = &[ - ("globs", "1.0.0", None, Accepted), - ("macro_rules", "1.0.0", None, Accepted), - ("struct_variant", "1.0.0", None, Accepted), - ("asm", "1.0.0", Some(29722), Active), - ("managed_boxes", "1.0.0", None, Removed), - ("non_ascii_idents", "1.0.0", Some(28979), Active), - ("thread_local", "1.0.0", Some(29594), Active), - ("link_args", "1.0.0", Some(29596), Active), - ("plugin_registrar", "1.0.0", Some(29597), Active), - ("log_syntax", "1.0.0", Some(29598), Active), - ("trace_macros", "1.0.0", Some(29598), Active), - ("concat_idents", "1.0.0", Some(29599), Active), + +declare_features! ( + (active, asm, "1.0.0", Some(29722)), + (active, concat_idents, "1.0.0", Some(29599)), + (active, link_args, "1.0.0", Some(29596)), + (active, log_syntax, "1.0.0", Some(29598)), + (active, non_ascii_idents, "1.0.0", Some(28979)), + (active, plugin_registrar, "1.0.0", Some(29597)), + (active, thread_local, "1.0.0", Some(29594)), + (active, trace_macros, "1.0.0", Some(29598)), // rustc internal, for now: - ("intrinsics", "1.0.0", None, Active), - ("lang_items", "1.0.0", None, Active), + (active, intrinsics, "1.0.0", None), + (active, lang_items, "1.0.0", None), - ("simd", "1.0.0", Some(27731), Active), - ("default_type_params", "1.0.0", None, Accepted), - ("quote", "1.0.0", Some(29601), Active), - ("link_llvm_intrinsics", "1.0.0", Some(29602), Active), - ("linkage", "1.0.0", Some(29603), Active), - ("struct_inherit", "1.0.0", None, Removed), + (active, link_llvm_intrinsics, "1.0.0", Some(29602)), + (active, linkage, "1.0.0", Some(29603)), + (active, quote, "1.0.0", Some(29601)), + (active, simd, "1.0.0", Some(27731)), - ("quad_precision_float", "1.0.0", None, Removed), // rustc internal - ("rustc_diagnostic_macros", "1.0.0", None, Active), - ("unboxed_closures", "1.0.0", Some(29625), Active), - ("reflect", "1.0.0", Some(27749), Active), - ("import_shadowing", "1.0.0", None, Removed), - ("advanced_slice_patterns", "1.0.0", Some(23121), Active), - ("tuple_indexing", "1.0.0", None, Accepted), - ("associated_types", "1.0.0", None, Accepted), - ("visible_private_types", "1.0.0", None, Removed), - ("slicing_syntax", "1.0.0", None, Accepted), - ("box_syntax", "1.0.0", Some(27779), Active), - ("placement_in_syntax", "1.0.0", Some(27779), Active), + (active, rustc_diagnostic_macros, "1.0.0", None), + (active, advanced_slice_patterns, "1.0.0", Some(23121)), + (active, box_syntax, "1.0.0", Some(27779)), + (active, placement_in_syntax, "1.0.0", Some(27779)), + (active, reflect, "1.0.0", Some(27749)), + (active, unboxed_closures, "1.0.0", Some(29625)), // rustc internal. - ("pushpop_unsafe", "1.2.0", None, Active), - - ("on_unimplemented", "1.0.0", Some(29628), Active), - ("simd_ffi", "1.0.0", Some(27731), Active), - ("allocator", "1.0.0", Some(27389), Active), - ("needs_allocator", "1.4.0", Some(27389), Active), - ("linked_from", "1.3.0", Some(29629), Active), - - ("if_let", "1.0.0", None, Accepted), - ("while_let", "1.0.0", None, Accepted), - - ("plugin", "1.0.0", Some(29597), Active), - ("start", "1.0.0", Some(29633), Active), - ("main", "1.0.0", Some(29634), Active), - - ("fundamental", "1.0.0", Some(29635), Active), - - // A temporary feature gate used to enable parser extensions needed - // to bootstrap fix for #5723. - ("issue_5723_bootstrap", "1.0.0", None, Accepted), - - ("structural_match", "1.8.0", Some(31434), Active), - - // A way to temporarily opt out of opt in copy. This will *never* be accepted. - ("opt_out_copy", "1.0.0", None, Removed), + (active, pushpop_unsafe, "1.2.0", None), + + (active, allocator, "1.0.0", Some(27389)), + (active, fundamental, "1.0.0", Some(29635)), + (active, linked_from, "1.3.0", Some(29629)), + (active, main, "1.0.0", Some(29634)), + (active, needs_allocator, "1.4.0", Some(27389)), + (active, on_unimplemented, "1.0.0", Some(29628)), + (active, plugin, "1.0.0", Some(29597)), + (active, simd_ffi, "1.0.0", Some(27731)), + (active, start, "1.0.0", Some(29633)), + (active, structural_match, "1.8.0", Some(31434)), + (active, panic_runtime, "1.10.0", Some(32837)), + (active, needs_panic_runtime, "1.10.0", Some(32837)), // OIBIT specific features - ("optin_builtin_traits", "1.0.0", Some(13231), Active), + (active, optin_builtin_traits, "1.0.0", Some(13231)), // macro reexport needs more discussion and stabilization - ("macro_reexport", "1.0.0", Some(29638), Active), - - // These are used to test this portion of the compiler, they don't actually - // mean anything - ("test_accepted_feature", "1.0.0", None, Accepted), - ("test_removed_feature", "1.0.0", None, Removed), + (active, macro_reexport, "1.0.0", Some(29638)), // Allows use of #[staged_api] // rustc internal - ("staged_api", "1.0.0", None, Active), + (active, staged_api, "1.0.0", None), // Allows using items which are missing stability attributes // rustc internal - ("unmarked_api", "1.0.0", None, Active), - - // Allows using #![no_std] - ("no_std", "1.0.0", None, Accepted), + (active, unmarked_api, "1.0.0", None), // Allows using #![no_core] - ("no_core", "1.3.0", Some(29639), Active), + (active, no_core, "1.3.0", Some(29639)), // Allows using `box` in patterns; RFC 469 - ("box_patterns", "1.0.0", Some(29641), Active), + (active, box_patterns, "1.0.0", Some(29641)), // Allows using the unsafe_no_drop_flag attribute (unlikely to // switch to Accepted; see RFC 320) - ("unsafe_no_drop_flag", "1.0.0", None, Active), + (active, unsafe_no_drop_flag, "1.0.0", None), // Allows using the unsafe_destructor_blind_to_params attribute; // RFC 1238 - ("dropck_parametricity", "1.3.0", Some(28498), Active), + (active, dropck_parametricity, "1.3.0", Some(28498)), // Allows the use of custom attributes; RFC 572 - ("custom_attribute", "1.0.0", Some(29642), Active), + (active, custom_attribute, "1.0.0", Some(29642)), // Allows the use of #[derive(Anything)] as sugar for // #[derive_Anything]. - ("custom_derive", "1.0.0", Some(29644), Active), + (active, custom_derive, "1.0.0", Some(29644)), // Allows the use of rustc_* attributes; RFC 572 - ("rustc_attrs", "1.0.0", Some(29642), Active), + (active, rustc_attrs, "1.0.0", Some(29642)), // Allows the use of #[allow_internal_unstable]. This is an // attribute on macro_rules! and can't use the attribute handling @@ -166,109 +188,177 @@ const KNOWN_FEATURES: &'static [(&'static str, &'static str, Option, Status // macros disappear). // // rustc internal - ("allow_internal_unstable", "1.0.0", None, Active), + (active, allow_internal_unstable, "1.0.0", None), // #23121. Array patterns have some hazards yet. - ("slice_patterns", "1.0.0", Some(23121), Active), - - // Allows use of unary negate on unsigned integers, e.g. -e for e: u8 - ("negate_unsigned", "1.0.0", Some(29645), Removed), + (active, slice_patterns, "1.0.0", Some(23121)), // Allows the definition of associated constants in `trait` or `impl` // blocks. - ("associated_consts", "1.0.0", Some(29646), Active), + (active, associated_consts, "1.0.0", Some(29646)), // Allows the definition of `const fn` functions. - ("const_fn", "1.2.0", Some(24111), Active), + (active, const_fn, "1.2.0", Some(24111)), // Allows indexing into constant arrays. - ("const_indexing", "1.4.0", Some(29947), Active), + (active, const_indexing, "1.4.0", Some(29947)), // Allows using #[prelude_import] on glob `use` items. // // rustc internal - ("prelude_import", "1.2.0", None, Active), + (active, prelude_import, "1.2.0", None), // Allows the definition recursive static items. - ("static_recursion", "1.3.0", Some(29719), Active), + (active, static_recursion, "1.3.0", Some(29719)), // Allows default type parameters to influence type inference. - ("default_type_parameter_fallback", "1.3.0", Some(27336), Active), + (active, default_type_parameter_fallback, "1.3.0", Some(27336)), // Allows associated type defaults - ("associated_type_defaults", "1.2.0", Some(29661), Active), + (active, associated_type_defaults, "1.2.0", Some(29661)), // Allows macros to appear in the type position. - ("type_macros", "1.3.0", Some(27245), Active), + (active, type_macros, "1.3.0", Some(27245)), // allow `repr(simd)`, and importing the various simd intrinsics - ("repr_simd", "1.4.0", Some(27731), Active), + (active, repr_simd, "1.4.0", Some(27731)), // Allows cfg(target_feature = "..."). - ("cfg_target_feature", "1.4.0", Some(29717), Active), + (active, cfg_target_feature, "1.4.0", Some(29717)), // allow `extern "platform-intrinsic" { ... }` - ("platform_intrinsics", "1.4.0", Some(27731), Active), + (active, platform_intrinsics, "1.4.0", Some(27731)), // allow `#[unwind]` // rust runtime internal - ("unwind_attributes", "1.4.0", None, Active), + (active, unwind_attributes, "1.4.0", None), // allow the use of `#[naked]` on functions. - ("naked_functions", "1.9.0", Some(32408), Active), - - // allow empty structs and enum variants with braces - ("braced_empty_structs", "1.8.0", Some(29720), Accepted), - - // allow overloading augmented assignment operations like `a += b` - ("augmented_assignments", "1.8.0", Some(28235), Accepted), + (active, naked_functions, "1.9.0", Some(32408)), // allow `#[no_debug]` - ("no_debug", "1.5.0", Some(29721), Active), + (active, no_debug, "1.5.0", Some(29721)), // allow `#[omit_gdb_pretty_printer_section]` // rustc internal. - ("omit_gdb_pretty_printer_section", "1.5.0", None, Active), + (active, omit_gdb_pretty_printer_section, "1.5.0", None), // Allows cfg(target_vendor = "..."). - ("cfg_target_vendor", "1.5.0", Some(29718), Active), + (active, cfg_target_vendor, "1.5.0", Some(29718)), // Allow attributes on expressions and non-item statements - ("stmt_expr_attributes", "1.6.0", Some(15701), Active), - - // Allows `#[deprecated]` attribute - ("deprecated", "1.9.0", Some(29935), Accepted), + (active, stmt_expr_attributes, "1.6.0", Some(15701)), // allow using type ascription in expressions - ("type_ascription", "1.6.0", Some(23416), Active), + (active, type_ascription, "1.6.0", Some(23416)), // Allows cfg(target_thread_local) - ("cfg_target_thread_local", "1.7.0", Some(29594), Active), + (active, cfg_target_thread_local, "1.7.0", Some(29594)), // rustc internal - ("abi_vectorcall", "1.7.0", None, Active), + (active, abi_vectorcall, "1.7.0", None), // a...b and ...b - ("inclusive_range_syntax", "1.7.0", Some(28237), Active), + (active, inclusive_range_syntax, "1.7.0", Some(28237)), // `expr?` - ("question_mark", "1.9.0", Some(31436), Active), + (active, question_mark, "1.9.0", Some(31436)), // impl specialization (RFC 1210) - ("specialization", "1.7.0", Some(31844), Active), -]; + (active, specialization, "1.7.0", Some(31844)), + + // pub(restricted) visibilities (RFC 1422) + (active, pub_restricted, "1.9.0", Some(32409)), + + // Allow Drop types in statics/const functions (RFC 1440) + (active, drop_types_in_const, "1.9.0", Some(33156)), + + // Allows cfg(target_has_atomic = "..."). + (active, cfg_target_has_atomic, "1.9.0", Some(32976)) +); + +declare_features! ( + (removed, import_shadowing, "1.0.0", None), + (removed, managed_boxes, "1.0.0", None), + // Allows use of unary negate on unsigned integers, e.g. -e for e: u8 + (removed, negate_unsigned, "1.0.0", Some(29645)), + // A way to temporarily opt out of opt in copy. This will *never* be accepted. + (removed, opt_out_copy, "1.0.0", None), + (removed, quad_precision_float, "1.0.0", None), + (removed, struct_inherit, "1.0.0", None), + (removed, test_removed_feature, "1.0.0", None), + (removed, visible_private_types, "1.0.0", None) +); + +declare_features! ( + (accepted, associated_types, "1.0.0", None), + // allow overloading augmented assignment operations like `a += b` + (accepted, augmented_assignments, "1.8.0", Some(28235)), + // allow empty structs and enum variants with braces + (accepted, braced_empty_structs, "1.8.0", Some(29720)), + (accepted, default_type_params, "1.0.0", None), + (accepted, globs, "1.0.0", None), + (accepted, if_let, "1.0.0", None), + // A temporary feature gate used to enable parser extensions needed + // to bootstrap fix for #5723. + (accepted, issue_5723_bootstrap, "1.0.0", None), + (accepted, macro_rules, "1.0.0", None), + // Allows using #![no_std] + (accepted, no_std, "1.0.0", None), + (accepted, slicing_syntax, "1.0.0", None), + (accepted, struct_variant, "1.0.0", None), + // These are used to test this portion of the compiler, they don't actually + // mean anything + (accepted, test_accepted_feature, "1.0.0", None), + (accepted, tuple_indexing, "1.0.0", None), + (accepted, while_let, "1.0.0", None), + // Allows `#[deprecated]` attribute + (accepted, deprecated, "1.9.0", Some(29935)) +); + // (changing above list without updating src/doc/reference.md makes @cmr sad) -enum Status { - /// Represents an active feature that is currently being implemented or - /// currently being considered for addition/removal. - Active, +#[derive(PartialEq, Copy, Clone, Debug)] +pub enum AttributeType { + /// Normal, builtin attribute that is consumed + /// by the compiler before the unused_attribute check + Normal, + + /// Builtin attribute that may not be consumed by the compiler + /// before the unused_attribute check. These attributes + /// will be ignored by the unused_attribute lint + Whitelisted, + + /// Builtin attribute that is only allowed at the crate level + CrateLevel, +} - /// Represents a feature which has since been removed (it was once Active) - Removed, +pub enum AttributeGate { + /// Is gated by a given feature gate, reason + /// and function to check if enabled + Gated(&'static str, &'static str, fn(&Features) -> bool), - /// This language feature has since been Accepted (it was once Active) - Accepted, + /// Ungated attribute, can be used on all release channels + Ungated, +} + +// fn() is not Debug +impl ::std::fmt::Debug for AttributeGate { + fn fmt(&self, fmt: &mut ::std::fmt::Formatter) -> ::std::fmt::Result { + match *self { + Gated(ref name, ref expl, _) => write!(fmt, "Gated({}, {})", name, expl), + Ungated => write!(fmt, "Ungated") + } + } +} + +macro_rules! cfg_fn { + ($field: ident) => {{ + fn f(features: &Features) -> bool { + features.$field + } + f as fn(&Features) -> bool + }} } // Attributes that have a special meaning to rustc or rustdoc @@ -309,88 +399,121 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat // RFC #1445. ("structural_match", Whitelisted, Gated("structural_match", "the semantics of constant patterns is \ - not yet settled")), + not yet settled", + cfg_fn!(structural_match))), // Not used any more, but we can't feature gate it ("no_stack_check", Normal, Ungated), ("plugin", CrateLevel, Gated("plugin", "compiler plugins are experimental \ - and possibly buggy")), + and possibly buggy", + cfg_fn!(plugin))), + ("no_std", CrateLevel, Ungated), ("no_core", CrateLevel, Gated("no_core", - "no_core is experimental")), + "no_core is experimental", + cfg_fn!(no_core))), ("lang", Normal, Gated("lang_items", - "language items are subject to change")), + "language items are subject to change", + cfg_fn!(lang_items))), ("linkage", Whitelisted, Gated("linkage", "the `linkage` attribute is experimental \ - and not portable across platforms")), + and not portable across platforms", + cfg_fn!(linkage))), ("thread_local", Whitelisted, Gated("thread_local", "`#[thread_local]` is an experimental feature, and does \ not currently handle destructors. There is no \ corresponding `#[task_local]` mapping to the task \ - model")), + model", + cfg_fn!(thread_local))), ("rustc_on_unimplemented", Normal, Gated("on_unimplemented", "the `#[rustc_on_unimplemented]` attribute \ - is an experimental feature")), + is an experimental feature", + cfg_fn!(on_unimplemented))), ("allocator", Whitelisted, Gated("allocator", - "the `#[allocator]` attribute is an experimental feature")), + "the `#[allocator]` attribute is an experimental feature", + cfg_fn!(allocator))), ("needs_allocator", Normal, Gated("needs_allocator", "the `#[needs_allocator]` \ attribute is an experimental \ - feature")), + feature", + cfg_fn!(needs_allocator))), + ("panic_runtime", Whitelisted, Gated("panic_runtime", + "the `#[panic_runtime]` attribute is \ + an experimental feature", + cfg_fn!(panic_runtime))), + ("needs_panic_runtime", Whitelisted, Gated("needs_panic_runtime", + "the `#[needs_panic_runtime]` \ + attribute is an experimental \ + feature", + cfg_fn!(needs_panic_runtime))), ("rustc_variance", Normal, Gated("rustc_attrs", "the `#[rustc_variance]` attribute \ is just used for rustc unit tests \ - and will never be stable")), + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_error", Whitelisted, Gated("rustc_attrs", "the `#[rustc_error]` attribute \ is just used for rustc unit tests \ - and will never be stable")), + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_if_this_changed", Whitelisted, Gated("rustc_attrs", - "the `#[rustc_if_this_changed]` attribute \ - is just used for rustc unit tests \ - and will never be stable")), + "the `#[rustc_if_this_changed]` attribute \ + is just used for rustc unit tests \ + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_then_this_would_need", Whitelisted, Gated("rustc_attrs", - "the `#[rustc_if_this_changed]` attribute \ - is just used for rustc unit tests \ - and will never be stable")), + "the `#[rustc_if_this_changed]` attribute \ + is just used for rustc unit tests \ + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_dirty", Whitelisted, Gated("rustc_attrs", "the `#[rustc_dirty]` attribute \ is just used for rustc unit tests \ - and will never be stable")), + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_clean", Whitelisted, Gated("rustc_attrs", "the `#[rustc_clean]` attribute \ is just used for rustc unit tests \ - and will never be stable")), + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_symbol_name", Whitelisted, Gated("rustc_attrs", - "internal rustc attributes will never be stable")), + "internal rustc attributes will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_item_path", Whitelisted, Gated("rustc_attrs", - "internal rustc attributes will never be stable")), + "internal rustc attributes will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_move_fragments", Normal, Gated("rustc_attrs", "the `#[rustc_move_fragments]` attribute \ is just used for rustc unit tests \ - and will never be stable")), + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_mir", Whitelisted, Gated("rustc_attrs", "the `#[rustc_mir]` attribute \ is just used for rustc unit tests \ - and will never be stable")), + and will never be stable", + cfg_fn!(rustc_attrs))), ("rustc_no_mir", Whitelisted, Gated("rustc_attrs", "the `#[rustc_no_mir]` attribute \ is just used to make tests pass \ - and will never be stable")), + and will never be stable", + cfg_fn!(rustc_attrs))), ("allow_internal_unstable", Normal, Gated("allow_internal_unstable", - EXPLAIN_ALLOW_INTERNAL_UNSTABLE)), + EXPLAIN_ALLOW_INTERNAL_UNSTABLE, + cfg_fn!(allow_internal_unstable))), ("fundamental", Whitelisted, Gated("fundamental", "the `#[fundamental]` attribute \ - is an experimental feature")), + is an experimental feature", + cfg_fn!(fundamental))), ("linked_from", Normal, Gated("linked_from", "the `#[linked_from]` attribute \ - is an experimental feature")), + is an experimental feature", + cfg_fn!(linked_from))), // FIXME: #14408 whitelist docs since rustdoc looks at them ("doc", Whitelisted, Ungated), @@ -400,7 +523,8 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat ("cold", Whitelisted, Ungated), ("naked", Whitelisted, Gated("naked_functions", "the `#[naked]` attribute \ - is an experimental feature")), + is an experimental feature", + cfg_fn!(naked_functions))), ("export_name", Whitelisted, Ungated), ("inline", Whitelisted, Ungated), ("link", Whitelisted, Ungated), @@ -410,24 +534,30 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat ("no_mangle", Whitelisted, Ungated), ("no_debug", Whitelisted, Gated("no_debug", "the `#[no_debug]` attribute \ - is an experimental feature")), + is an experimental feature", + cfg_fn!(no_debug))), ("omit_gdb_pretty_printer_section", Whitelisted, Gated("omit_gdb_pretty_printer_section", "the `#[omit_gdb_pretty_printer_section]` \ attribute is just used for the Rust test \ - suite")), + suite", + cfg_fn!(omit_gdb_pretty_printer_section))), ("unsafe_no_drop_flag", Whitelisted, Gated("unsafe_no_drop_flag", "unsafe_no_drop_flag has unstable semantics \ - and may be removed in the future")), + and may be removed in the future", + cfg_fn!(unsafe_no_drop_flag))), ("unsafe_destructor_blind_to_params", Normal, Gated("dropck_parametricity", "unsafe_destructor_blind_to_params has unstable semantics \ - and may be removed in the future")), - ("unwind", Whitelisted, Gated("unwind_attributes", "#[unwind] is experimental")), + and may be removed in the future", + cfg_fn!(dropck_parametricity))), + ("unwind", Whitelisted, Gated("unwind_attributes", "#[unwind] is experimental", + cfg_fn!(unwind_attributes))), // used in resolve ("prelude_import", Whitelisted, Gated("prelude_import", - "`#[prelude_import]` is for use by rustc only")), + "`#[prelude_import]` is for use by rustc only", + cfg_fn!(prelude_import))), // FIXME: #14407 these are only looked at on-demand so we can't // guarantee they'll have already been checked @@ -438,9 +568,11 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat ("deprecated", Normal, Ungated), ("rustc_paren_sugar", Normal, Gated("unboxed_closures", - "unboxed_closures are still evolving")), + "unboxed_closures are still evolving", + cfg_fn!(unboxed_closures))), ("rustc_reflect_like", Whitelisted, Gated("reflect", - "defining reflective traits is still evolving")), + "defining reflective traits is still evolving", + cfg_fn!(reflect))), // Crate level attributes ("crate_name", CrateLevel, Ungated), @@ -453,21 +585,13 @@ pub const KNOWN_ATTRIBUTES: &'static [(&'static str, AttributeType, AttributeGat ("recursion_limit", CrateLevel, Ungated), ]; -macro_rules! cfg_fn { - (|$x: ident| $e: expr) => {{ - fn f($x: &Features) -> bool { - $e - } - f as fn(&Features) -> bool - }} -} // cfg(...)'s that are feature gated const GATED_CFGS: &'static [(&'static str, &'static str, fn(&Features) -> bool)] = &[ // (name in cfg, feature, function to check if the feature is enabled) - ("target_feature", "cfg_target_feature", cfg_fn!(|x| x.cfg_target_feature)), - ("target_vendor", "cfg_target_vendor", cfg_fn!(|x| x.cfg_target_vendor)), - ("target_thread_local", "cfg_target_thread_local", - cfg_fn!(|x| x.cfg_target_thread_local)), + ("target_feature", "cfg_target_feature", cfg_fn!(cfg_target_feature)), + ("target_vendor", "cfg_target_vendor", cfg_fn!(cfg_target_vendor)), + ("target_thread_local", "cfg_target_thread_local", cfg_fn!(cfg_target_thread_local)), + ("target_has_atomic", "cfg_target_has_atomic", cfg_fn!(cfg_target_has_atomic)), ]; #[derive(Debug, Eq, PartialEq)] @@ -548,170 +672,38 @@ impl GatedCfg { } } - -#[derive(PartialEq, Copy, Clone, Debug)] -pub enum AttributeType { - /// Normal, builtin attribute that is consumed - /// by the compiler before the unused_attribute check - Normal, - - /// Builtin attribute that may not be consumed by the compiler - /// before the unused_attribute check. These attributes - /// will be ignored by the unused_attribute lint - Whitelisted, - - /// Builtin attribute that is only allowed at the crate level - CrateLevel, -} - -#[derive(PartialEq, Copy, Clone, Debug)] -pub enum AttributeGate { - /// Is gated by a given feature gate and reason - Gated(&'static str, &'static str), - - /// Ungated attribute, can be used on all release channels - Ungated, -} - -/// A set of features to be used by later passes. -pub struct Features { - pub unboxed_closures: bool, - pub rustc_diagnostic_macros: bool, - pub allow_quote: bool, - pub allow_asm: bool, - pub allow_log_syntax: bool, - pub allow_concat_idents: bool, - pub allow_trace_macros: bool, - pub allow_internal_unstable: bool, - pub allow_custom_derive: bool, - pub allow_placement_in: bool, - pub allow_box: bool, - pub allow_pushpop_unsafe: bool, - pub allow_inclusive_range: bool, - pub simd_ffi: bool, - pub unmarked_api: bool, - /// spans of #![feature] attrs for stable language features. for error reporting - pub declared_stable_lang_features: Vec, - /// #![feature] attrs for non-language (library) features - pub declared_lib_features: Vec<(InternedString, Span)>, - pub const_fn: bool, - pub const_indexing: bool, - pub static_recursion: bool, - pub default_type_parameter_fallback: bool, - pub rustc_attrs: bool, - pub type_macros: bool, - pub cfg_target_feature: bool, - pub cfg_target_vendor: bool, - pub cfg_target_thread_local: bool, - pub staged_api: bool, - pub stmt_expr_attributes: bool, - pub deprecated: bool, - pub question_mark: bool, - pub specialization: bool, -} - -impl Features { - pub fn new() -> Features { - Features { - unboxed_closures: false, - rustc_diagnostic_macros: false, - allow_quote: false, - allow_asm: false, - allow_log_syntax: false, - allow_concat_idents: false, - allow_trace_macros: false, - allow_internal_unstable: false, - allow_custom_derive: false, - allow_placement_in: false, - allow_box: false, - allow_pushpop_unsafe: false, - allow_inclusive_range: false, - simd_ffi: false, - unmarked_api: false, - declared_stable_lang_features: Vec::new(), - declared_lib_features: Vec::new(), - const_fn: false, - const_indexing: false, - static_recursion: false, - default_type_parameter_fallback: false, - rustc_attrs: false, - type_macros: false, - cfg_target_feature: false, - cfg_target_vendor: false, - cfg_target_thread_local: false, - staged_api: false, - stmt_expr_attributes: false, - deprecated: false, - question_mark: false, - specialization: false, - } - } -} - -const EXPLAIN_BOX_SYNTAX: &'static str = - "box expression syntax is experimental; you can call `Box::new` instead."; - -const EXPLAIN_PLACEMENT_IN: &'static str = - "placement-in expression syntax is experimental and subject to change."; - -const EXPLAIN_PUSHPOP_UNSAFE: &'static str = - "push/pop_unsafe macros are experimental and subject to change."; - -const EXPLAIN_STMT_ATTR_SYNTAX: &'static str = - "attributes on non-item statements and expressions are experimental."; - -pub fn check_for_box_syntax(f: Option<&Features>, diag: &Handler, span: Span) { - if let Some(&Features { allow_box: true, .. }) = f { - return; - } - emit_feature_err(diag, "box_syntax", span, GateIssue::Language, EXPLAIN_BOX_SYNTAX); -} - -pub fn check_for_placement_in(f: Option<&Features>, diag: &Handler, span: Span) { - if let Some(&Features { allow_placement_in: true, .. }) = f { - return; - } - emit_feature_err(diag, "placement_in_syntax", span, GateIssue::Language, EXPLAIN_PLACEMENT_IN); -} - -pub fn check_for_pushpop_syntax(f: Option<&Features>, diag: &Handler, span: Span) { - if let Some(&Features { allow_pushpop_unsafe: true, .. }) = f { - return; - } - emit_feature_err(diag, "pushpop_unsafe", span, GateIssue::Language, EXPLAIN_PUSHPOP_UNSAFE); -} - struct Context<'a> { - features: Vec<&'static str>, + features: &'a Features, span_handler: &'a Handler, cm: &'a CodeMap, plugin_attributes: &'a [(String, AttributeType)], } -impl<'a> Context<'a> { - fn enable_feature(&mut self, feature: &'static str) { - debug!("enabling feature: {}", feature); - self.features.push(feature); - } - - fn gate_feature(&self, feature: &str, span: Span, explain: &str) { - let has_feature = self.has_feature(feature); - debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", feature, span, has_feature); - if !has_feature && !self.cm.span_allows_unstable(span) { - emit_feature_err(self.span_handler, feature, span, GateIssue::Language, explain); +macro_rules! gate_feature_fn { + ($cx: expr, $has_feature: expr, $span: expr, $name: expr, $explain: expr) => {{ + let (cx, has_feature, span, name, explain) = ($cx, $has_feature, $span, $name, $explain); + let has_feature: bool = has_feature(&$cx.features); + debug!("gate_feature(feature = {:?}, span = {:?}); has? {}", name, span, has_feature); + if !has_feature && !cx.cm.span_allows_unstable(span) { + emit_feature_err(cx.span_handler, name, span, GateIssue::Language, explain); } + }} +} + +macro_rules! gate_feature { + ($cx: expr, $feature: ident, $span: expr, $explain: expr) => { + gate_feature_fn!($cx, |x:&Features| x.$feature, $span, stringify!($feature), $explain) } - fn has_feature(&self, feature: &str) -> bool { - self.features.iter().any(|&n| n == feature) - } +} +impl<'a> Context<'a> { fn check_attribute(&self, attr: &ast::Attribute, is_macro: bool) { debug!("check_attribute(attr = {:?})", attr); let name = &*attr.name(); - for &(n, ty, gateage) in KNOWN_ATTRIBUTES { + for &(n, ty, ref gateage) in KNOWN_ATTRIBUTES { if n == name { - if let Gated(gate, desc) = gateage { - self.gate_feature(gate, attr.span, desc); + if let &Gated(ref name, ref desc, ref has_feature) = gateage { + gate_feature_fn!(self, has_feature, attr.span, name, desc); } debug!("check_attribute: {:?} is known, {:?}, {:?}", name, ty, gateage); return; @@ -727,41 +719,50 @@ impl<'a> Context<'a> { } } if name.starts_with("rustc_") { - self.gate_feature("rustc_attrs", attr.span, - "unless otherwise specified, attributes \ - with the prefix `rustc_` \ - are reserved for internal compiler diagnostics"); + gate_feature!(self, rustc_attrs, attr.span, + "unless otherwise specified, attributes \ + with the prefix `rustc_` \ + are reserved for internal compiler diagnostics"); } else if name.starts_with("derive_") { - self.gate_feature("custom_derive", attr.span, - "attributes of the form `#[derive_*]` are reserved \ - for the compiler"); + gate_feature!(self, custom_derive, attr.span, EXPLAIN_DERIVE_UNDERSCORE); } else { // Only run the custom attribute lint during regular // feature gate checking. Macro gating runs // before the plugin attributes are registered // so we skip this then if !is_macro { - self.gate_feature("custom_attribute", attr.span, - &format!("The attribute `{}` is currently \ - unknown to the compiler and \ - may have meaning \ - added to it in the future", - name)); + gate_feature!(self, custom_attribute, attr.span, + &format!("The attribute `{}` is currently \ + unknown to the compiler and \ + may have meaning \ + added to it in the future", + name)); } } } } +pub fn check_attribute(attr: &ast::Attribute, handler: &Handler, + cm: &CodeMap, features: &Features) { + let cx = Context { + features: features, span_handler: handler, + cm: cm, plugin_attributes: &[] + }; + cx.check_attribute(attr, true); +} + fn find_lang_feature_issue(feature: &str) -> Option { - let info = KNOWN_FEATURES.iter() - .find(|t| t.0 == feature) - .unwrap(); - let issue = info.2; - if let Active = info.3 { + if let Some(info) = ACTIVE_FEATURES.iter().find(|t| t.0 == feature) { + let issue = info.2; // FIXME (#28244): enforce that active features have issue numbers // assert!(issue.is_some()) + issue + } else { + // search in Accepted or Removed features + ACCEPTED_FEATURES.iter().chain(REMOVED_FEATURES.iter()) + .find(|t| t.0 == feature) + .unwrap().2 } - issue } pub enum GateIssue { @@ -787,12 +788,18 @@ pub fn emit_feature_err(diag: &Handler, feature: &str, span: Span, issue: GateIs err.emit(); return; } - err.fileline_help(span, &format!("add #![feature({})] to the \ - crate attributes to enable", - feature)); + err.help(&format!("add #![feature({})] to the \ + crate attributes to enable", + feature)); err.emit(); } +const EXPLAIN_BOX_SYNTAX: &'static str = + "box expression syntax is experimental; you can call `Box::new` instead."; + +const EXPLAIN_STMT_ATTR_SYNTAX: &'static str = + "attributes on non-item statements and expressions are experimental."; + pub const EXPLAIN_ASM: &'static str = "inline assembly is not stable enough for use and is subject to change"; @@ -810,75 +817,23 @@ pub const EXPLAIN_ALLOW_INTERNAL_UNSTABLE: &'static str = pub const EXPLAIN_CUSTOM_DERIVE: &'static str = "`#[derive]` for custom traits is not stable enough for use and is subject to change"; -struct MacroVisitor<'a> { - context: &'a Context<'a> -} - -impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> { - fn visit_mac(&mut self, mac: &ast::Mac) { - let path = &mac.node.path; - let name = path.segments.last().unwrap().identifier.name.as_str(); - - // Issue 22234: If you add a new case here, make sure to also - // add code to catch the macro during or after expansion. - // - // We still keep this MacroVisitor (rather than *solely* - // relying on catching cases during or after expansion) to - // catch uses of these macros within conditionally-compiled - // code, e.g. `#[cfg]`-guarded functions. - - if name == "asm" { - self.context.gate_feature("asm", path.span, EXPLAIN_ASM); - } - - else if name == "log_syntax" { - self.context.gate_feature("log_syntax", path.span, EXPLAIN_LOG_SYNTAX); - } - - else if name == "trace_macros" { - self.context.gate_feature("trace_macros", path.span, EXPLAIN_TRACE_MACROS); - } - - else if name == "concat_idents" { - self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS); - } - } - - fn visit_attribute(&mut self, attr: &'v ast::Attribute) { - self.context.check_attribute(attr, true); - } +pub const EXPLAIN_DERIVE_UNDERSCORE: &'static str = + "attributes of the form `#[derive_*]` are reserved for the compiler"; - fn visit_expr(&mut self, e: &ast::Expr) { - // Issue 22181: overloaded-`box` and placement-`in` are - // implemented via a desugaring expansion, so their feature - // gates go into MacroVisitor since that works pre-expansion. - // - // Issue 22234: we also check during expansion as well. - // But we keep these checks as a pre-expansion check to catch - // uses in e.g. conditionalized code. - - if let ast::ExprKind::Box(_) = e.node { - self.context.gate_feature("box_syntax", e.span, EXPLAIN_BOX_SYNTAX); - } - - if let ast::ExprKind::InPlace(..) = e.node { - self.context.gate_feature("placement_in_syntax", e.span, EXPLAIN_PLACEMENT_IN); - } - - visit::walk_expr(self, e); - } -} +pub const EXPLAIN_PLACEMENT_IN: &'static str = + "placement-in expression syntax is experimental and subject to change."; struct PostExpansionVisitor<'a> { context: &'a Context<'a>, } -impl<'a> PostExpansionVisitor<'a> { - fn gate_feature(&self, feature: &str, span: Span, explain: &str) { - if !self.context.cm.span_allows_unstable(span) { - self.context.gate_feature(feature, span, explain) +macro_rules! gate_feature_post { + ($cx: expr, $feature: ident, $span: expr, $explain: expr) => {{ + let (cx, span) = ($cx, $span); + if !cx.context.cm.span_allows_unstable(span) { + gate_feature!(cx.context, $feature, span, $explain) } - } + }} } impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { @@ -890,8 +845,8 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_name(&mut self, sp: Span, name: ast::Name) { if !name.as_str().is_ascii() { - self.gate_feature("non_ascii_idents", sp, - "non-ascii idents are not fully supported."); + gate_feature_post!(&self, non_ascii_idents, sp, + "non-ascii idents are not fully supported."); } } @@ -899,60 +854,59 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { match i.node { ast::ItemKind::ExternCrate(_) => { if attr::contains_name(&i.attrs[..], "macro_reexport") { - self.gate_feature("macro_reexport", i.span, - "macros reexports are experimental \ - and possibly buggy"); + gate_feature_post!(&self, macro_reexport, i.span, + "macros reexports are experimental \ + and possibly buggy"); } } ast::ItemKind::ForeignMod(ref foreign_module) => { if attr::contains_name(&i.attrs[..], "link_args") { - self.gate_feature("link_args", i.span, + gate_feature_post!(&self, link_args, i.span, "the `link_args` attribute is not portable \ across platforms, it is recommended to \ use `#[link(name = \"foo\")]` instead") } - let maybe_feature = match foreign_module.abi { - Abi::RustIntrinsic => Some(("intrinsics", "intrinsics are subject to change")), + match foreign_module.abi { + Abi::RustIntrinsic => + gate_feature_post!(&self, intrinsics, i.span, + "intrinsics are subject to change"), Abi::PlatformIntrinsic => { - Some(("platform_intrinsics", - "platform intrinsics are experimental and possibly buggy")) + gate_feature_post!(&self, platform_intrinsics, i.span, + "platform intrinsics are experimental \ + and possibly buggy") }, Abi::Vectorcall => { - Some(("abi_vectorcall", - "vectorcall is experimental and subject to change" - )) + gate_feature_post!(&self, abi_vectorcall, i.span, + "vectorcall is experimental and subject to change") } - _ => None - }; - if let Some((feature, msg)) = maybe_feature { - self.gate_feature(feature, i.span, msg) + _ => () } } ast::ItemKind::Fn(..) => { if attr::contains_name(&i.attrs[..], "plugin_registrar") { - self.gate_feature("plugin_registrar", i.span, - "compiler plugins are experimental and possibly buggy"); + gate_feature_post!(&self, plugin_registrar, i.span, + "compiler plugins are experimental and possibly buggy"); } if attr::contains_name(&i.attrs[..], "start") { - self.gate_feature("start", i.span, + gate_feature_post!(&self, start, i.span, "a #[start] function is an experimental \ feature whose signature may change \ over time"); } if attr::contains_name(&i.attrs[..], "main") { - self.gate_feature("main", i.span, - "declaration of a nonstandard #[main] \ - function may change over time, for now \ - a top-level `fn main()` is required"); + gate_feature_post!(&self, main, i.span, + "declaration of a nonstandard #[main] \ + function may change over time, for now \ + a top-level `fn main()` is required"); } } ast::ItemKind::Struct(..) => { if attr::contains_name(&i.attrs[..], "simd") { - self.gate_feature("simd", i.span, - "SIMD types are experimental and possibly buggy"); + gate_feature_post!(&self, simd, i.span, + "SIMD types are experimental and possibly buggy"); self.context.span_handler.span_warn(i.span, "the `#[simd]` attribute is deprecated, \ use `#[repr(simd)]` instead"); @@ -961,8 +915,9 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { if attr.name() == "repr" { for item in attr.meta_item_list().unwrap_or(&[]) { if item.name() == "simd" { - self.gate_feature("repr_simd", i.span, - "SIMD types are experimental and possibly buggy"); + gate_feature_post!(&self, repr_simd, i.span, + "SIMD types are experimental \ + and possibly buggy"); } } @@ -971,19 +926,19 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } ast::ItemKind::DefaultImpl(..) => { - self.gate_feature("optin_builtin_traits", - i.span, - "default trait implementations are experimental \ - and possibly buggy"); + gate_feature_post!(&self, optin_builtin_traits, + i.span, + "default trait implementations are experimental \ + and possibly buggy"); } ast::ItemKind::Impl(_, polarity, _, _, _, _) => { match polarity { ast::ImplPolarity::Negative => { - self.gate_feature("optin_builtin_traits", - i.span, - "negative trait bounds are not yet fully implemented; \ - use marker types for now"); + gate_feature_post!(&self, optin_builtin_traits, + i.span, + "negative trait bounds are not yet fully implemented; \ + use marker types for now"); }, _ => {} } @@ -996,7 +951,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { } fn visit_variant_data(&mut self, s: &'v ast::VariantData, _: ast::Ident, - _: &'v ast::Generics, _: ast::NodeId, span: Span) { + _: &'v ast::Generics, _: ast::NodeId, span: Span) { if s.fields().is_empty() { if s.is_tuple() { self.context.span_handler.struct_span_err(span, "empty tuple structs and enum \ @@ -1018,7 +973,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { _ => false }; if links_to_llvm { - self.gate_feature("link_llvm_intrinsics", i.span, + gate_feature_post!(&self, link_llvm_intrinsics, i.span, "linking to LLVM intrinsics is experimental"); } @@ -1028,22 +983,19 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_expr(&mut self, e: &ast::Expr) { match e.node { ast::ExprKind::Box(_) => { - self.gate_feature("box_syntax", - e.span, - "box expression syntax is experimental; \ - you can call `Box::new` instead."); + gate_feature_post!(&self, box_syntax, e.span, EXPLAIN_BOX_SYNTAX); } ast::ExprKind::Type(..) => { - self.gate_feature("type_ascription", e.span, + gate_feature_post!(&self, type_ascription, e.span, "type ascription is experimental"); } ast::ExprKind::Range(_, _, ast::RangeLimits::Closed) => { - self.gate_feature("inclusive_range_syntax", + gate_feature_post!(&self, inclusive_range_syntax, e.span, "inclusive range syntax is experimental"); } ast::ExprKind::Try(..) => { - self.gate_feature("question_mark", e.span, "the `?` operator is not stable"); + gate_feature_post!(&self, question_mark, e.span, "the `?` operator is not stable"); } _ => {} } @@ -1053,19 +1005,19 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_pat(&mut self, pattern: &ast::Pat) { match pattern.node { PatKind::Vec(_, Some(_), ref last) if !last.is_empty() => { - self.gate_feature("advanced_slice_patterns", + gate_feature_post!(&self, advanced_slice_patterns, pattern.span, "multiple-element slice matches anywhere \ but at the end of a slice (e.g. \ `[0, ..xs, 0]`) are experimental") } PatKind::Vec(..) => { - self.gate_feature("slice_patterns", + gate_feature_post!(&self, slice_patterns, pattern.span, "slice pattern syntax is experimental"); } PatKind::Box(..) => { - self.gate_feature("box_patterns", + gate_feature_post!(&self, box_patterns, pattern.span, "box pattern syntax is experimental"); } @@ -1083,7 +1035,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { // check for const fn declarations match fn_kind { FnKind::ItemFn(_, _, _, ast::Constness::Const, _, _) => { - self.gate_feature("const_fn", span, "const fn is unstable"); + gate_feature_post!(&self, const_fn, span, "const fn is unstable"); } _ => { // stability of const fn methods are covered in @@ -1095,18 +1047,18 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { match fn_kind { FnKind::ItemFn(_, _, _, _, abi, _) if abi == Abi::RustIntrinsic => { - self.gate_feature("intrinsics", + gate_feature_post!(&self, intrinsics, span, "intrinsics are subject to change") } FnKind::ItemFn(_, _, _, _, abi, _) | FnKind::Method(_, &ast::MethodSig { abi, .. }, _) => match abi { Abi::RustCall => { - self.gate_feature("unboxed_closures", span, + gate_feature_post!(&self, unboxed_closures, span, "rust-call ABI is subject to change"); }, Abi::Vectorcall => { - self.gate_feature("abi_vectorcall", span, + gate_feature_post!(&self, abi_vectorcall, span, "vectorcall is experimental and subject to change"); }, _ => {} @@ -1119,17 +1071,17 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_trait_item(&mut self, ti: &'v ast::TraitItem) { match ti.node { ast::TraitItemKind::Const(..) => { - self.gate_feature("associated_consts", + gate_feature_post!(&self, associated_consts, ti.span, "associated constants are experimental") } ast::TraitItemKind::Method(ref sig, _) => { if sig.constness == ast::Constness::Const { - self.gate_feature("const_fn", ti.span, "const fn is unstable"); + gate_feature_post!(&self, const_fn, ti.span, "const fn is unstable"); } } ast::TraitItemKind::Type(_, Some(_)) => { - self.gate_feature("associated_type_defaults", ti.span, + gate_feature_post!(&self, associated_type_defaults, ti.span, "associated type defaults are unstable"); } _ => {} @@ -1139,44 +1091,51 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { fn visit_impl_item(&mut self, ii: &'v ast::ImplItem) { if ii.defaultness == ast::Defaultness::Default { - self.gate_feature("specialization", + gate_feature_post!(&self, specialization, ii.span, "specialization is unstable"); } match ii.node { ast::ImplItemKind::Const(..) => { - self.gate_feature("associated_consts", + gate_feature_post!(&self, associated_consts, ii.span, "associated constants are experimental") } ast::ImplItemKind::Method(ref sig, _) => { if sig.constness == ast::Constness::Const { - self.gate_feature("const_fn", ii.span, "const fn is unstable"); + gate_feature_post!(&self, const_fn, ii.span, "const fn is unstable"); } } _ => {} } visit::walk_impl_item(self, ii); } -} -fn check_crate_inner(cm: &CodeMap, span_handler: &Handler, - krate: &ast::Crate, - plugin_attributes: &[(String, AttributeType)], - check: F) - -> Features - where F: FnOnce(&mut Context, &ast::Crate) -{ - let mut cx = Context { - features: Vec::new(), - span_handler: span_handler, - cm: cm, - plugin_attributes: plugin_attributes, - }; + fn visit_vis(&mut self, vis: &'v ast::Visibility) { + let span = match *vis { + ast::Visibility::Crate(span) => span, + ast::Visibility::Restricted { ref path, .. } => { + // Check for type parameters + let found_param = path.segments.iter().any(|segment| { + !segment.parameters.types().is_empty() || + !segment.parameters.lifetimes().is_empty() || + !segment.parameters.bindings().is_empty() + }); + if found_param { + self.context.span_handler.span_err(path.span, "type or lifetime parameters \ + in visibility path"); + } + path.span + } + _ => return, + }; + gate_feature_post!(&self, pub_restricted, span, "`pub(restricted)` syntax is experimental"); + } +} - let mut accepted_features = Vec::new(); - let mut unknown_features = Vec::new(); +pub fn get_features(span_handler: &Handler, krate: &ast::Crate) -> Features { + let mut features = Features::new(); for attr in &krate.attrs { if !attr.check_name("feature") { @@ -1199,81 +1158,43 @@ fn check_crate_inner(cm: &CodeMap, span_handler: &Handler, continue } }; - match KNOWN_FEATURES.iter() - .find(|& &(n, _, _, _)| name == n) { - Some(&(name, _, _, Active)) => { - cx.enable_feature(name); - } - Some(&(_, _, _, Removed)) => { - span_handler.span_err(mi.span, "feature has been removed"); - } - Some(&(_, _, _, Accepted)) => { - accepted_features.push(mi.span); - } - None => { - unknown_features.push((name, mi.span)); - } + if let Some(&(_, _, _, setter)) = ACTIVE_FEATURES.iter() + .find(|& &(n, _, _, _)| name == n) { + *(setter(&mut features)) = true; + } + else if let Some(&(_, _, _)) = REMOVED_FEATURES.iter() + .find(|& &(n, _, _)| name == n) { + span_handler.span_err(mi.span, "feature has been removed"); + } + else if let Some(&(_, _, _)) = ACCEPTED_FEATURES.iter() + .find(|& &(n, _, _)| name == n) { + features.declared_stable_lang_features.push(mi.span); + } else { + features.declared_lib_features.push((name, mi.span)); } } } } } - check(&mut cx, krate); - - // FIXME (pnkfelix): Before adding the 99th entry below, change it - // to a single-pass (instead of N calls to `.has_feature`). - - Features { - unboxed_closures: cx.has_feature("unboxed_closures"), - rustc_diagnostic_macros: cx.has_feature("rustc_diagnostic_macros"), - allow_quote: cx.has_feature("quote"), - allow_asm: cx.has_feature("asm"), - allow_log_syntax: cx.has_feature("log_syntax"), - allow_concat_idents: cx.has_feature("concat_idents"), - allow_trace_macros: cx.has_feature("trace_macros"), - allow_internal_unstable: cx.has_feature("allow_internal_unstable"), - allow_custom_derive: cx.has_feature("custom_derive"), - allow_placement_in: cx.has_feature("placement_in_syntax"), - allow_box: cx.has_feature("box_syntax"), - allow_pushpop_unsafe: cx.has_feature("pushpop_unsafe"), - allow_inclusive_range: cx.has_feature("inclusive_range_syntax"), - simd_ffi: cx.has_feature("simd_ffi"), - unmarked_api: cx.has_feature("unmarked_api"), - declared_stable_lang_features: accepted_features, - declared_lib_features: unknown_features, - const_fn: cx.has_feature("const_fn"), - const_indexing: cx.has_feature("const_indexing"), - static_recursion: cx.has_feature("static_recursion"), - default_type_parameter_fallback: cx.has_feature("default_type_parameter_fallback"), - rustc_attrs: cx.has_feature("rustc_attrs"), - type_macros: cx.has_feature("type_macros"), - cfg_target_feature: cx.has_feature("cfg_target_feature"), - cfg_target_vendor: cx.has_feature("cfg_target_vendor"), - cfg_target_thread_local: cx.has_feature("cfg_target_thread_local"), - staged_api: cx.has_feature("staged_api"), - stmt_expr_attributes: cx.has_feature("stmt_expr_attributes"), - deprecated: cx.has_feature("deprecated"), - question_mark: cx.has_feature("question_mark"), - specialization: cx.has_feature("specialization"), - } -} - -pub fn check_crate_macros(cm: &CodeMap, span_handler: &Handler, krate: &ast::Crate) --> Features { - check_crate_inner(cm, span_handler, krate, &[] as &'static [_], - |ctx, krate| visit::walk_crate(&mut MacroVisitor { context: ctx }, krate)) + features } pub fn check_crate(cm: &CodeMap, span_handler: &Handler, krate: &ast::Crate, plugin_attributes: &[(String, AttributeType)], - unstable: UnstableFeatures) -> Features -{ + unstable: UnstableFeatures) -> Features { maybe_stage_features(span_handler, krate, unstable); - - check_crate_inner(cm, span_handler, krate, plugin_attributes, - |ctx, krate| visit::walk_crate(&mut PostExpansionVisitor { context: ctx }, - krate)) + let features = get_features(span_handler, krate); + { + let ctx = Context { + features: &features, + span_handler: span_handler, + cm: cm, + plugin_attributes: plugin_attributes, + }; + visit::walk_crate(&mut PostExpansionVisitor { context: &ctx }, krate); + } + features } #[derive(Clone, Copy)] diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 5d378763be..2c325080c0 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -22,7 +22,7 @@ use ast::*; use ast; use attr::{ThinAttributes, ThinAttributesExt}; use codemap::{respan, Span, Spanned}; -use parse::token; +use parse::token::{self, keywords}; use ptr::P; use util::small_vector::SmallVector; use util::move_map::MoveMap; @@ -610,17 +610,11 @@ pub fn noop_fold_tts(tts: &[TokenTree], fld: &mut T) -> Vec(t: token::Token, fld: &mut T) -> token::Token { match t { - token::Ident(id, followed_by_colons) => { - token::Ident(fld.fold_ident(id), followed_by_colons) - } + token::Ident(id) => token::Ident(fld.fold_ident(id)), token::Lifetime(id) => token::Lifetime(fld.fold_ident(id)), token::Interpolated(nt) => token::Interpolated(fld.fold_interpolated(nt)), - token::SubstNt(ident, namep) => { - token::SubstNt(fld.fold_ident(ident), namep) - } - token::MatchNt(name, kind, namep, kindp) => { - token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind), namep, kindp) - } + token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)), + token::MatchNt(name, kind) => token::MatchNt(fld.fold_ident(name), fld.fold_ident(kind)), _ => t } } @@ -664,9 +658,8 @@ pub fn noop_fold_interpolated(nt: token::Nonterminal, fld: &mut T) token::NtPat(pat) => token::NtPat(fld.fold_pat(pat)), token::NtExpr(expr) => token::NtExpr(fld.fold_expr(expr)), token::NtTy(ty) => token::NtTy(fld.fold_ty(ty)), - token::NtIdent(id, is_mod_name) => - token::NtIdent(Box::new(Spanned::{node: fld.fold_ident(id.node), .. *id}), - is_mod_name), + token::NtIdent(id) => + token::NtIdent(Box::new(Spanned::{node: fld.fold_ident(id.node), ..*id})), token::NtMeta(meta_item) => token::NtMeta(fld.fold_meta_item(meta_item)), token::NtPath(path) => token::NtPath(Box::new(fld.fold_path(*path))), token::NtTT(tt) => token::NtTT(P(fld.fold_tt(&tt))), @@ -850,7 +843,7 @@ pub fn noop_fold_struct_field(f: StructField, fld: &mut T) -> StructF span: fld.new_span(f.span), id: fld.new_id(f.id), ident: f.ident.map(|ident| fld.fold_ident(ident)), - vis: f.vis, + vis: fld.fold_vis(f.vis), ty: fld.fold_ty(f.ty), attrs: fold_attrs(f.attrs, fld), } @@ -1022,7 +1015,7 @@ pub fn noop_fold_crate(Crate {module, attrs, config, mut exported_mac let config = folder.fold_meta_items(config); let mut items = folder.fold_item(P(ast::Item { - ident: token::special_idents::invalid, + ident: keywords::Invalid.ident(), attrs: attrs, id: ast::DUMMY_NODE_ID, vis: ast::Visibility::Public, @@ -1241,10 +1234,11 @@ pub fn noop_fold_expr(Expr {id, node, span, attrs}: Expr, folder: &mu ExprKind::Match(folder.fold_expr(expr), arms.move_map(|x| folder.fold_arm(x))) } - ExprKind::Closure(capture_clause, decl, body) => { + ExprKind::Closure(capture_clause, decl, body, span) => { ExprKind::Closure(capture_clause, - folder.fold_fn_decl(decl), - folder.fold_block(body)) + folder.fold_fn_decl(decl), + folder.fold_block(body), + folder.new_span(span)) } ExprKind::Block(blk) => ExprKind::Block(folder.fold_block(blk)), ExprKind::Assign(el, er) => { diff --git a/src/libsyntax/lib.rs b/src/libsyntax/lib.rs index ca7e5729c0..420a41e03b 100644 --- a/src/libsyntax/lib.rs +++ b/src/libsyntax/lib.rs @@ -25,6 +25,7 @@ #![cfg_attr(not(stage0), deny(warnings))] #![feature(associated_consts)] +#![feature(const_fn)] #![feature(filling_drop)] #![feature(libc)] #![feature(rustc_private)] @@ -32,6 +33,7 @@ #![feature(str_escape)] #![feature(unicode)] #![feature(question_mark)] +#![feature(range_contains)] extern crate serialize; extern crate term; @@ -96,7 +98,6 @@ pub mod config; pub mod entry; pub mod feature_gate; pub mod fold; -pub mod owned_slice; pub mod parse; pub mod ptr; pub mod show_span; diff --git a/src/libsyntax/parse/attr.rs b/src/libsyntax/parse/attr.rs index b8e320e36e..db643eb0df 100644 --- a/src/libsyntax/parse/attr.rs +++ b/src/libsyntax/parse/attr.rs @@ -35,7 +35,10 @@ impl<'a> Parser<'a> { self.span.hi ); if attr.node.style != ast::AttrStyle::Outer { - return Err(self.fatal("expected outer comment")); + let mut err = self.fatal("expected outer doc comment"); + err.note("inner doc comments like this (starting with \ + `//!` or `/*!`) can only appear before items"); + return Err(err); } attrs.push(attr); self.bump(); @@ -69,9 +72,8 @@ impl<'a> Parser<'a> { self.diagnostic() .struct_span_err(span, "an inner attribute is not permitted in this context") - .fileline_help(span, - "place inner attribute at the top of the module or \ - block") + .help("place inner attribute at the top of the module or \ + block") .emit() } ast::AttrStyle::Inner diff --git a/src/libsyntax/parse/lexer/mod.rs b/src/libsyntax/parse/lexer/mod.rs index a5cb5c7117..da62e5286d 100644 --- a/src/libsyntax/parse/lexer/mod.rs +++ b/src/libsyntax/parse/lexer/mod.rs @@ -13,8 +13,7 @@ use codemap::{BytePos, CharPos, CodeMap, Pos, Span}; use codemap; use errors::{FatalError, Handler, DiagnosticBuilder}; use ext::tt::transcribe::tt_next_token; -use parse::token::str_to_ident; -use parse::token; +use parse::token::{self, keywords, str_to_ident}; use str::char_at; use rustc_unicode::property::Pattern_White_Space; @@ -30,24 +29,42 @@ mod unicode_chars; pub trait Reader { fn is_eof(&self) -> bool; - fn next_token(&mut self) -> TokenAndSpan; + fn try_next_token(&mut self) -> Result; + fn next_token(&mut self) -> TokenAndSpan where Self: Sized { + let res = self.try_next_token(); + self.unwrap_or_abort(res) + } /// Report a fatal error with the current span. fn fatal(&self, &str) -> FatalError; /// Report a non-fatal error with the current span. fn err(&self, &str); + fn emit_fatal_errors(&mut self); + fn unwrap_or_abort(&mut self, res: Result) -> TokenAndSpan { + match res { + Ok(tok) => tok, + Err(_) => { + self.emit_fatal_errors(); + panic!(FatalError); + } + } + } fn peek(&self) -> TokenAndSpan; /// Get a token the parser cares about. - fn real_token(&mut self) -> TokenAndSpan { - let mut t = self.next_token(); + fn try_real_token(&mut self) -> Result { + let mut t = self.try_next_token()?; loop { match t.tok { token::Whitespace | token::Comment | token::Shebang(_) => { - t = self.next_token(); + t = self.try_next_token()?; } _ => break, } } - t + Ok(t) + } + fn real_token(&mut self) -> TokenAndSpan { + let res = self.try_real_token(); + self.unwrap_or_abort(res) } } @@ -71,7 +88,7 @@ pub struct StringReader<'a> { // cached: pub peek_tok: token::Token, pub peek_span: Span, - + pub fatal_errs: Vec>, // cache a direct reference to the source text, so that we don't have to // retrieve it via `self.filemap.src.as_ref().unwrap()` all the time. source_text: Rc, @@ -82,13 +99,14 @@ impl<'a> Reader for StringReader<'a> { self.curr.is_none() } /// Return the next token. EFFECT: advances the string_reader. - fn next_token(&mut self) -> TokenAndSpan { + fn try_next_token(&mut self) -> Result { + assert!(self.fatal_errs.is_empty()); let ret_val = TokenAndSpan { tok: replace(&mut self.peek_tok, token::Underscore), sp: self.peek_span, }; - self.advance_token(); - ret_val + self.advance_token()?; + Ok(ret_val) } fn fatal(&self, m: &str) -> FatalError { self.fatal_span(self.peek_span, m) @@ -96,6 +114,12 @@ impl<'a> Reader for StringReader<'a> { fn err(&self, m: &str) { self.err_span(self.peek_span, m) } + fn emit_fatal_errors(&mut self) { + for err in &mut self.fatal_errs { + err.emit(); + } + self.fatal_errs.clear(); + } fn peek(&self) -> TokenAndSpan { // FIXME(pcwalton): Bad copy! TokenAndSpan { @@ -109,10 +133,11 @@ impl<'a> Reader for TtReader<'a> { fn is_eof(&self) -> bool { self.cur_tok == token::Eof } - fn next_token(&mut self) -> TokenAndSpan { + fn try_next_token(&mut self) -> Result { + assert!(self.fatal_errs.is_empty()); let r = tt_next_token(self); debug!("TtReader: r={:?}", r); - r + Ok(r) } fn fatal(&self, m: &str) -> FatalError { self.sp_diag.span_fatal(self.cur_span, m) @@ -120,6 +145,12 @@ impl<'a> Reader for TtReader<'a> { fn err(&self, m: &str) { self.sp_diag.span_err(self.cur_span, m); } + fn emit_fatal_errors(&mut self) { + for err in &mut self.fatal_errs { + err.emit(); + } + self.fatal_errs.clear(); + } fn peek(&self) -> TokenAndSpan { TokenAndSpan { tok: self.cur_tok.clone(), @@ -152,6 +183,7 @@ impl<'a> StringReader<'a> { peek_tok: token::Eof, peek_span: codemap::DUMMY_SP, source_text: source_text, + fatal_errs: Vec::new(), }; sr.bump(); sr @@ -161,7 +193,10 @@ impl<'a> StringReader<'a> { filemap: Rc) -> StringReader<'b> { let mut sr = StringReader::new_raw(span_diagnostic, filemap); - sr.advance_token(); + if let Err(_) = sr.advance_token() { + sr.emit_fatal_errors(); + panic!(FatalError); + } sr } @@ -250,7 +285,7 @@ impl<'a> StringReader<'a> { /// Advance peek_tok and peek_span to refer to the next token, and /// possibly update the interner. - fn advance_token(&mut self) { + fn advance_token(&mut self) -> Result<(), ()> { match self.scan_whitespace_or_comment() { Some(comment) => { self.peek_span = comment.sp; @@ -262,11 +297,12 @@ impl<'a> StringReader<'a> { self.peek_span = codemap::mk_sp(self.filemap.end_pos, self.filemap.end_pos); } else { let start_bytepos = self.last_pos; - self.peek_tok = self.next_token_inner(); + self.peek_tok = self.next_token_inner()?; self.peek_span = codemap::mk_sp(start_bytepos, self.last_pos); }; } } + Ok(()) } fn byte_offset(&self, pos: BytePos) -> BytePos { @@ -895,11 +931,10 @@ impl<'a> StringReader<'a> { _ => { if ascii_only && first_source_char > '\x7F' { let last_pos = self.last_pos; - self.err_span_char(start, - last_pos, - "byte constant must be ASCII. Use a \\xHH escape for a \ - non-ASCII byte", - first_source_char); + self.err_span_(start, + last_pos, + "byte constant must be ASCII. Use a \\xHH escape for a \ + non-ASCII byte"); return false; } } @@ -1014,7 +1049,7 @@ impl<'a> StringReader<'a> { /// Return the next token from the string, advances the input past that /// token, and updates the interner - fn next_token_inner(&mut self) -> token::Token { + fn next_token_inner(&mut self) -> Result { let c = self.curr; if ident_start(c) && match (c.unwrap(), self.nextch(), self.nextnextch()) { @@ -1034,36 +1069,32 @@ impl<'a> StringReader<'a> { self.bump(); } - return self.with_str_from(start, |string| { + return Ok(self.with_str_from(start, |string| { if string == "_" { token::Underscore } else { // FIXME: perform NFKC normalization here. (Issue #2253) - if self.curr_is(':') && self.nextch_is(':') { - token::Ident(str_to_ident(string), token::ModName) - } else { - token::Ident(str_to_ident(string), token::Plain) - } + token::Ident(str_to_ident(string)) } - }); + })); } if is_dec_digit(c) { let num = self.scan_number(c.unwrap()); let suffix = self.scan_optional_raw_name(); debug!("next_token_inner: scanned number {:?}, {:?}", num, suffix); - return token::Literal(num, suffix); + return Ok(token::Literal(num, suffix)); } match c.expect("next_token_inner called at EOF") { // One-byte tokens. ';' => { self.bump(); - return token::Semi; + return Ok(token::Semi); } ',' => { self.bump(); - return token::Comma; + return Ok(token::Comma); } '.' => { self.bump(); @@ -1071,67 +1102,67 @@ impl<'a> StringReader<'a> { self.bump(); if self.curr_is('.') { self.bump(); - token::DotDotDot + Ok(token::DotDotDot) } else { - token::DotDot + Ok(token::DotDot) } } else { - token::Dot + Ok(token::Dot) }; } '(' => { self.bump(); - return token::OpenDelim(token::Paren); + return Ok(token::OpenDelim(token::Paren)); } ')' => { self.bump(); - return token::CloseDelim(token::Paren); + return Ok(token::CloseDelim(token::Paren)); } '{' => { self.bump(); - return token::OpenDelim(token::Brace); + return Ok(token::OpenDelim(token::Brace)); } '}' => { self.bump(); - return token::CloseDelim(token::Brace); + return Ok(token::CloseDelim(token::Brace)); } '[' => { self.bump(); - return token::OpenDelim(token::Bracket); + return Ok(token::OpenDelim(token::Bracket)); } ']' => { self.bump(); - return token::CloseDelim(token::Bracket); + return Ok(token::CloseDelim(token::Bracket)); } '@' => { self.bump(); - return token::At; + return Ok(token::At); } '#' => { self.bump(); - return token::Pound; + return Ok(token::Pound); } '~' => { self.bump(); - return token::Tilde; + return Ok(token::Tilde); } '?' => { self.bump(); - return token::Question; + return Ok(token::Question); } ':' => { self.bump(); if self.curr_is(':') { self.bump(); - return token::ModSep; + return Ok(token::ModSep); } else { - return token::Colon; + return Ok(token::Colon); } } '$' => { self.bump(); - return token::Dollar; + return Ok(token::Dollar); } // Multi-byte tokens. @@ -1139,21 +1170,21 @@ impl<'a> StringReader<'a> { self.bump(); if self.curr_is('=') { self.bump(); - return token::EqEq; + return Ok(token::EqEq); } else if self.curr_is('>') { self.bump(); - return token::FatArrow; + return Ok(token::FatArrow); } else { - return token::Eq; + return Ok(token::Eq); } } '!' => { self.bump(); if self.curr_is('=') { self.bump(); - return token::Ne; + return Ok(token::Ne); } else { - return token::Not; + return Ok(token::Not); } } '<' => { @@ -1161,21 +1192,21 @@ impl<'a> StringReader<'a> { match self.curr.unwrap_or('\x00') { '=' => { self.bump(); - return token::Le; + return Ok(token::Le); } '<' => { - return self.binop(token::Shl); + return Ok(self.binop(token::Shl)); } '-' => { self.bump(); match self.curr.unwrap_or('\x00') { _ => { - return token::LArrow; + return Ok(token::LArrow); } } } _ => { - return token::Lt; + return Ok(token::Lt); } } } @@ -1184,13 +1215,13 @@ impl<'a> StringReader<'a> { match self.curr.unwrap_or('\x00') { '=' => { self.bump(); - return token::Ge; + return Ok(token::Ge); } '>' => { - return self.binop(token::Shr); + return Ok(self.binop(token::Shr)); } _ => { - return token::Gt; + return Ok(token::Gt); } } } @@ -1231,20 +1262,14 @@ impl<'a> StringReader<'a> { let keyword_checking_ident = self.with_str_from(start, |lifetime_name| { str_to_ident(lifetime_name) }); - let keyword_checking_token = &token::Ident(keyword_checking_ident, - token::Plain); + let keyword_checking_token = &token::Ident(keyword_checking_ident); let last_bpos = self.last_pos; - if keyword_checking_token.is_keyword(token::keywords::SelfValue) { - self.err_span_(start, - last_bpos, - "invalid lifetime name: 'self is no longer a special \ - lifetime"); - } else if keyword_checking_token.is_any_keyword() && - !keyword_checking_token.is_keyword(token::keywords::Static) { - self.err_span_(start, last_bpos, "invalid lifetime name"); + if keyword_checking_token.is_any_keyword() && + !keyword_checking_token.is_keyword(keywords::Static) { + self.err_span_(start, last_bpos, "lifetimes cannot use keyword names"); } - return token::Lifetime(ident); + return Ok(token::Lifetime(ident)); } let valid = self.scan_char_or_byte(start, @@ -1266,7 +1291,7 @@ impl<'a> StringReader<'a> { }; self.bump(); // advance curr past token let suffix = self.scan_optional_raw_name(); - return token::Literal(token::Char(id), suffix); + return Ok(token::Literal(token::Char(id), suffix)); } 'b' => { self.bump(); @@ -1277,7 +1302,7 @@ impl<'a> StringReader<'a> { _ => unreachable!(), // Should have been a token::Ident above. }; let suffix = self.scan_optional_raw_name(); - return token::Literal(lit, suffix); + return Ok(token::Literal(lit, suffix)); } '"' => { let start_bpos = self.last_pos; @@ -1308,7 +1333,7 @@ impl<'a> StringReader<'a> { }; self.bump(); let suffix = self.scan_optional_raw_name(); - return token::Literal(token::Str_(id), suffix); + return Ok(token::Literal(token::Str_(id), suffix)); } 'r' => { let start_bpos = self.last_pos; @@ -1379,24 +1404,24 @@ impl<'a> StringReader<'a> { token::intern("??") }; let suffix = self.scan_optional_raw_name(); - return token::Literal(token::StrRaw(id, hash_count), suffix); + return Ok(token::Literal(token::StrRaw(id, hash_count), suffix)); } '-' => { if self.nextch_is('>') { self.bump(); self.bump(); - return token::RArrow; + return Ok(token::RArrow); } else { - return self.binop(token::Minus); + return Ok(self.binop(token::Minus)); } } '&' => { if self.nextch_is('&') { self.bump(); self.bump(); - return token::AndAnd; + return Ok(token::AndAnd); } else { - return self.binop(token::And); + return Ok(self.binop(token::And)); } } '|' => { @@ -1404,27 +1429,27 @@ impl<'a> StringReader<'a> { Some('|') => { self.bump(); self.bump(); - return token::OrOr; + return Ok(token::OrOr); } _ => { - return self.binop(token::Or); + return Ok(self.binop(token::Or)); } } } '+' => { - return self.binop(token::Plus); + return Ok(self.binop(token::Plus)); } '*' => { - return self.binop(token::Star); + return Ok(self.binop(token::Star)); } '/' => { - return self.binop(token::Slash); + return Ok(self.binop(token::Slash)); } '^' => { - return self.binop(token::Caret); + return Ok(self.binop(token::Caret)); } '%' => { - return self.binop(token::Percent); + return Ok(self.binop(token::Percent)); } c => { let last_bpos = self.last_pos; @@ -1434,8 +1459,8 @@ impl<'a> StringReader<'a> { "unknown start of token", c); unicode_chars::check_for_substitution(&self, c, &mut err); - err.emit(); - panic!(FatalError); + self.fatal_errs.push(err); + Err(()) } } } @@ -1687,7 +1712,7 @@ mod tests { assert_eq!(string_reader.next_token().tok, token::Whitespace); let tok1 = string_reader.next_token(); let tok2 = TokenAndSpan { - tok: token::Ident(id, token::Plain), + tok: token::Ident(id), sp: Span { lo: BytePos(21), hi: BytePos(23), @@ -1701,7 +1726,7 @@ mod tests { // read another token: let tok3 = string_reader.next_token(); let tok4 = TokenAndSpan { - tok: token::Ident(str_to_ident("main"), token::Plain), + tok: token::Ident(str_to_ident("main")), sp: Span { lo: BytePos(24), hi: BytePos(28), @@ -1722,8 +1747,8 @@ mod tests { } // make the identifier by looking up the string in the interner - fn mk_ident(id: &str, style: token::IdentStyle) -> token::Token { - token::Ident(str_to_ident(id), style) + fn mk_ident(id: &str) -> token::Token { + token::Ident(str_to_ident(id)) } #[test] @@ -1731,9 +1756,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a b".to_string()), - vec![mk_ident("a", token::Plain), - token::Whitespace, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); } #[test] @@ -1741,9 +1764,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a::b".to_string()), - vec![mk_ident("a", token::ModName), - token::ModSep, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::ModSep, mk_ident("b")]); } #[test] @@ -1751,10 +1772,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a ::b".to_string()), - vec![mk_ident("a", token::Plain), - token::Whitespace, - token::ModSep, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); } #[test] @@ -1762,10 +1780,7 @@ mod tests { let cm = Rc::new(CodeMap::new()); let sh = mk_sh(cm.clone()); check_tokenization(setup(&cm, &sh, "a:: b".to_string()), - vec![mk_ident("a", token::ModName), - token::ModSep, - token::Whitespace, - mk_ident("b", token::Plain)]); + vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); } #[test] diff --git a/src/libsyntax/parse/lexer/unicode_chars.rs b/src/libsyntax/parse/lexer/unicode_chars.rs index 1d32dd4973..d337c78bee 100644 --- a/src/libsyntax/parse/lexer/unicode_chars.rs +++ b/src/libsyntax/parse/lexer/unicode_chars.rs @@ -16,6 +16,22 @@ use errors::DiagnosticBuilder; use super::StringReader; const UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[ + (' ', "No-Break Space", ' '), + (' ', "Ogham Space Mark", ' '), + (' ', "En Quad", ' '), + (' ', "Em Quad", ' '), + (' ', "En Space", ' '), + (' ', "Em Space", ' '), + (' ', "Three-Per-Em Space", ' '), + (' ', "Four-Per-Em Space", ' '), + (' ', "Six-Per-Em Space", ' '), + (' ', "Figure Space", ' '), + (' ', "Punctuation Space", ' '), + (' ', "Thin Space", ' '), + (' ', "Hair Space", ' '), + (' ', "Narrow No-Break Space", ' '), + (' ', "Medium Mathematical Space", ' '), + (' ', "Ideographic Space", ' '), ('ߺ', "Nko Lajanyalan", '_'), ('﹍', "Dashed Low Line", '_'), ('﹎', "Centreline Low Line", '_'), @@ -24,14 +40,18 @@ const UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[ ('‑', "Non-Breaking Hyphen", '-'), ('‒', "Figure Dash", '-'), ('–', "En Dash", '-'), + ('—', "Em Dash", '-'), ('﹘', "Small Em Dash", '-'), ('⁃', "Hyphen Bullet", '-'), ('˗', "Modifier Letter Minus Sign", '-'), ('−', "Minus Sign", '-'), + ('ー', "Katakana-Hiragana Prolonged Sound Mark", '-'), ('Ù«', "Arabic Decimal Separator", ','), ('‚', "Single Low-9 Quotation Mark", ','), ('ꓹ', "Lisu Letter Tone Na Po", ','), + (',', "Fullwidth Comma", ','), (';', "Greek Question Mark", ';'), + (';', "Fullwidth Semicolon", ';'), ('ः', "Devanagari Sign Visarga", ':'), ('ઃ', "Gujarati Sign Visarga", ':'), (':', "Fullwidth Colon", ':'), @@ -53,6 +73,7 @@ const UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[ ('ʔ', "Latin Letter Glottal Stop", '?'), ('ॽ', "Devanagari Letter Glottal Stop", '?'), ('Ꭾ', "Cherokee Letter He", '?'), + ('?', "Fullwidth Question Mark", '?'), ('𝅭', "Musical Symbol Combining Augmentation Dot", '.'), ('․', "One Dot Leader", '.'), ('۔', "Arabic Full Stop", '.'), @@ -60,9 +81,12 @@ const UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[ ('܂', "Syriac Sublinear Full Stop", '.'), ('꘎', "Vai Full Stop", '.'), ('𐩐', "Kharoshthi Punctuation Dot", '.'), + ('·', "Middle Dot", '.'), ('Ù ', "Arabic-Indic Digit Zero", '.'), ('Û°', "Extended Arabic-Indic Digit Zero", '.'), ('ꓸ', "Lisu Letter Tone Mya Ti", '.'), + ('。', "Ideographic Full Stop", '.'), + ('・', "Katakana Middle Dot", '.'), ('՝', "Armenian Comma", '\''), (''', "Fullwidth Apostrophe", '\''), ('‘', "Left Single Quotation Mark", '\''), @@ -108,16 +132,30 @@ const UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[ ('ײ', "Hebrew Ligature Yiddish Double Yod", '"'), ('❞', "Heavy Double Comma Quotation Mark Ornament", '"'), ('❝', "Heavy Double Turned Comma Quotation Mark Ornament", '"'), - ('ï¼»', "Fullwidth Left Square Bracket", '('), ('❨', "Medium Left Parenthesis Ornament", '('), - ('❲', "Light Left Tortoise Shell Bracket Ornament", '('), - ('〔', "Left Tortoise Shell Bracket", '('), ('ï´¾', "Ornate Left Parenthesis", '('), - ('ï¼½', "Fullwidth Right Square Bracket", ')'), + ('(', "Fullwidth Left Parenthesis", '('), ('❩', "Medium Right Parenthesis Ornament", ')'), - ('❳', "Light Right Tortoise Shell Bracket Ornament", ')'), - ('〕', "Right Tortoise Shell Bracket", ')'), ('ï´¿', "Ornate Right Parenthesis", ')'), + (')', "Fullwidth Right Parenthesis", ')'), + ('ï¼»', "Fullwidth Left Square Bracket", '['), + ('❲', "Light Left Tortoise Shell Bracket Ornament", '['), + ('「', "Left Corner Bracket", '['), + ('『', "Left White Corner Bracket", '['), + ('【', "Left Black Lenticular Bracket", '['), + ('〔', "Left Tortoise Shell Bracket", '['), + ('〖', "Left White Lenticular Bracket", '['), + ('〘', "Left White Tortoise Shell Bracket", '['), + ('〚', "Left White Square Bracket", '['), + ('ï¼½', "Fullwidth Right Square Bracket", ']'), + ('❳', "Light Right Tortoise Shell Bracket Ornament", ']'), + ('」', "Right Corner Bracket", ']'), + ('』', "Right White Corner Bracket", ']'), + ('】', "Right Black Lenticular Bracket", ']'), + ('〕', "Right Tortoise Shell Bracket", ']'), + ('〗', "Right White Lenticular Bracket", ']'), + ('〙', "Right White Tortoise Shell Bracket", ']'), + ('〛', "Right White Square Bracket", ']'), ('❴', "Medium Left Curly Bracket Ornament", '{'), ('❵', "Medium Right Curly Bracket Ornament", '}'), ('⁎', "Low Asterisk", '*'), @@ -140,6 +178,8 @@ const UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[ ('⟍', "Mathematical Falling Diagonal", '\\'), ('⧵', "Reverse Solidus Operator", '\\'), ('⧹', "Big Reverse Solidus", '\\'), + ('、', "Ideographic Comma", '\\'), + ('ヽ', "Katakana Iteration Mark", '\\'), ('㇔', "Cjk Stroke D", '\\'), ('丶', "Cjk Unified Ideograph-4E36", '\\'), ('⼂', "Kangxi Radical Dot", '\\'), @@ -148,15 +188,20 @@ const UNICODE_ARRAY: &'static [(char, &'static str, char)] = &[ ('‹', "Single Left-Pointing Angle Quotation Mark", '<'), ('❮', "Heavy Left-Pointing Angle Quotation Mark Ornament", '<'), ('˂', "Modifier Letter Left Arrowhead", '<'), + ('〈', "Left Angle Bracket", '<'), + ('《', "Left Double Angle Bracket", '<'), ('꓿', "Lisu Punctuation Full Stop", '='), ('›', "Single Right-Pointing Angle Quotation Mark", '>'), ('❯', "Heavy Right-Pointing Angle Quotation Mark Ornament", '>'), ('˃', "Modifier Letter Right Arrowhead", '>'), + ('〉', "Right Angle Bracket", '>'), + ('》', "Right Double Angle Bracket", '>'), ('Ⲻ', "Coptic Capital Letter Dialect-P Ni", '-'), ('Ɂ', "Latin Capital Letter Glottal Stop", '?'), ('Ⳇ', "Coptic Capital Letter Old Coptic Esh", '/'), ]; const ASCII_ARRAY: &'static [(char, &'static str)] = &[ + (' ', "Space"), ('_', "Underscore"), ('-', "Minus/Hyphen"), (',', "Comma"), @@ -169,6 +214,8 @@ const ASCII_ARRAY: &'static [(char, &'static str)] = &[ ('"', "Quotation Mark"), ('(', "Left Parenthesis"), (')', "Right Parenthesis"), + ('[', "Left Square Bracket"), + (']', "Right Square Bracket"), ('{', "Left Curly Brace"), ('}', "Right Curly Brace"), ('*', "Asterisk"), diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index ea5d6739e6..2a9bcfd658 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -19,7 +19,6 @@ use ptr::P; use str::char_at; use std::cell::RefCell; -use std::io::Read; use std::iter; use std::path::{Path, PathBuf}; use std::rc::Rc; @@ -446,11 +445,11 @@ fn filtered_float_lit(data: token::InternedString, suffix: Option<&str>, if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) { // if it looks like a width, lets try to be helpful. sd.struct_span_err(sp, &format!("invalid width `{}` for float literal", &suf[1..])) - .fileline_help(sp, "valid widths are 32 and 64") + .help("valid widths are 32 and 64") .emit(); } else { sd.struct_span_err(sp, &format!("invalid suffix `{}` for float literal", suf)) - .fileline_help(sp, "valid suffixes are `f32` and `f64`") + .help("valid suffixes are `f32` and `f64`") .emit(); } @@ -622,12 +621,12 @@ pub fn integer_lit(s: &str, if looks_like_width_suffix(&['i', 'u'], suf) { sd.struct_span_err(sp, &format!("invalid width `{}` for integer literal", &suf[1..])) - .fileline_help(sp, "valid widths are 8, 16, 32 and 64") + .help("valid widths are 8, 16, 32 and 64") .emit(); } else { sd.struct_span_err(sp, &format!("invalid suffix `{}` for numeric literal", suf)) - .fileline_help(sp, "the suffix must be one of the integral types \ - (`u32`, `isize`, etc)") + .help("the suffix must be one of the integral types \ + (`u32`, `isize`, etc)") .emit(); } @@ -735,9 +734,9 @@ mod tests { match (tts.len(), tts.get(0), tts.get(1), tts.get(2), tts.get(3)) { ( 4, - Some(&TokenTree::Token(_, token::Ident(name_macro_rules, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(name_macro_rules))), Some(&TokenTree::Token(_, token::Not)), - Some(&TokenTree::Token(_, token::Ident(name_zip, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(name_zip))), Some(&TokenTree::Delimited(_, ref macro_delimed)), ) if name_macro_rules.name.as_str() == "macro_rules" @@ -756,7 +755,7 @@ mod tests { ( 2, Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(ident))), ) if first_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -767,7 +766,7 @@ mod tests { ( 2, Some(&TokenTree::Token(_, token::Dollar)), - Some(&TokenTree::Token(_, token::Ident(ident, token::Plain))), + Some(&TokenTree::Token(_, token::Ident(ident))), ) if second_delimed.delim == token::Paren && ident.name.as_str() == "a" => {}, @@ -786,26 +785,17 @@ mod tests { let tts = string_to_tts("fn a (b : i32) { b; }".to_string()); let expected = vec![ - TokenTree::Token(sp(0, 2), - token::Ident(str_to_ident("fn"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(3, 4), - token::Ident(str_to_ident("a"), - token::IdentStyle::Plain)), + TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))), + TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))), TokenTree::Delimited( sp(5, 14), Rc::new(ast::Delimited { delim: token::DelimToken::Paren, open_span: sp(5, 6), tts: vec![ - TokenTree::Token(sp(6, 7), - token::Ident(str_to_ident("b"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(8, 9), - token::Colon), - TokenTree::Token(sp(10, 13), - token::Ident(str_to_ident("i32"), - token::IdentStyle::Plain)), + TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(8, 9), token::Colon), + TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))), ], close_span: sp(13, 14), })), @@ -815,11 +805,8 @@ mod tests { delim: token::DelimToken::Brace, open_span: sp(15, 16), tts: vec![ - TokenTree::Token(sp(17, 18), - token::Ident(str_to_ident("b"), - token::IdentStyle::Plain)), - TokenTree::Token(sp(18, 19), - token::Semi) + TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))), + TokenTree::Token(sp(18, 19), token::Semi), ], close_span: sp(20, 21), })) @@ -938,7 +925,7 @@ mod tests { Abi::Rust, ast::Generics{ // no idea on either of these: lifetimes: Vec::new(), - ty_params: P::empty(), + ty_params: P::new(), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index b8c926f8de..fc62cee92f 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub use self::PathParsingMode::*; - use abi::{self, Abi}; use ast::BareFnTy; use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier}; @@ -51,7 +49,7 @@ use parse::common::SeqSep; use parse::lexer::{Reader, TokenAndSpan}; use parse::obsolete::{ParserObsoleteMethods, ObsoleteSyntax}; use parse::token::{self, intern, MatchNt, SubstNt, SpecialVarNt, InternedString}; -use parse::token::{keywords, special_idents, SpecialMacroVar}; +use parse::token::{keywords, SpecialMacroVar}; use parse::{new_sub_parser_from_file, ParseSess}; use util::parser::{AssocOp, Fixity}; use print::pprust; @@ -59,7 +57,6 @@ use ptr::P; use parse::PResult; use std::collections::HashSet; -use std::io::prelude::*; use std::mem; use std::path::{Path, PathBuf}; use std::rc::Rc; @@ -75,18 +72,19 @@ bitflags! { type ItemInfo = (Ident, ItemKind, Option >); -/// How to parse a path. There are four different kinds of paths, all of which +/// How to parse a path. There are three different kinds of paths, all of which /// are parsed somewhat differently. #[derive(Copy, Clone, PartialEq)] -pub enum PathParsingMode { - /// A path with no type parameters; e.g. `foo::bar::Baz` - NoTypesAllowed, +pub enum PathStyle { + /// A path with no type parameters, e.g. `foo::bar::Baz`, used in imports or visibilities. + Mod, /// A path with a lifetime and type parameters, with no double colons - /// before the type parameters; e.g. `foo::bar<'a>::Baz` - LifetimeAndTypesWithoutColons, + /// before the type parameters; e.g. `foo::bar<'a>::Baz`, used in types. + /// Paths using this style can be passed into macros expecting `path` nonterminals. + Type, /// A path with a lifetime and type parameters with double colons before - /// the type parameters; e.g. `foo::bar::<'a>::Baz::` - LifetimeAndTypesWithColons, + /// the type parameters; e.g. `foo::bar::<'a>::Baz::`, used in expressions or patterns. + Expr, } /// How to parse a bound, whether to allow bound modifiers such as `?`. @@ -290,13 +288,13 @@ impl TokenType { match *self { TokenType::Token(ref t) => format!("`{}`", Parser::token_to_string(t)), TokenType::Operator => "an operator".to_string(), - TokenType::Keyword(kw) => format!("`{}`", kw.to_name()), + TokenType::Keyword(kw) => format!("`{}`", kw.name()), } } } -fn is_plain_ident_or_underscore(t: &token::Token) -> bool { - t.is_plain_ident() || *t == token::Underscore +fn is_ident_or_underscore(t: &token::Token) -> bool { + t.is_ident() || *t == token::Underscore } /// Information about the path to a module. @@ -396,6 +394,17 @@ impl<'a> Parser<'a> { Parser::token_to_string(&self.token) } + pub fn this_token_descr(&self) -> String { + let s = self.this_token_to_string(); + if self.token.is_strict_keyword() { + format!("keyword `{}`", s) + } else if self.token.is_reserved_keyword() { + format!("reserved keyword `{}`", s) + } else { + format!("`{}`", s) + } + } + pub fn unexpected_last(&self, t: &token::Token) -> PResult<'a, T> { let token_str = Parser::token_to_string(t); let last_span = self.last_span; @@ -563,7 +572,7 @@ impl<'a> Parser<'a> { self.check_strict_keywords(); self.check_reserved_keywords(); match self.token { - token::Ident(i, _) => { + token::Ident(i) => { self.bump(); Ok(i) } @@ -574,33 +583,16 @@ impl<'a> Parser<'a> { let mut err = self.fatal(&format!("expected identifier, found `{}`", self.this_token_to_string())); if self.token == token::Underscore { - err.fileline_note(self.span, "`_` is a wildcard pattern, not an identifier"); + err.note("`_` is a wildcard pattern, not an identifier"); } Err(err) } } } - pub fn parse_ident_or_self_type(&mut self) -> PResult<'a, ast::Ident> { - if self.is_self_type_ident() { - self.expect_self_type_ident() - } else { - self.parse_ident() - } - } - - pub fn parse_path_list_item(&mut self) -> PResult<'a, ast::PathListItem> { - let lo = self.span.lo; - let node = if self.eat_keyword(keywords::SelfValue) { - let rename = self.parse_rename()?; - ast::PathListItemKind::Mod { id: ast::DUMMY_NODE_ID, rename: rename } - } else { - let ident = self.parse_ident()?; - let rename = self.parse_rename()?; - ast::PathListItemKind::Ident { name: ident, rename: rename, id: ast::DUMMY_NODE_ID } - }; - let hi = self.last_span.hi; - Ok(spanned(lo, hi, node)) + fn parse_ident_into_path(&mut self) -> PResult<'a, ast::Path> { + let ident = self.parse_ident()?; + Ok(ast::Path::from_ident(self.last_span, ident)) } /// Check if the next token is `tok`, and return `true` if so. @@ -647,9 +639,8 @@ impl<'a> Parser<'a> { } pub fn check_contextual_keyword(&mut self, ident: Ident) -> bool { - let tok = token::Ident(ident, token::Plain); - self.expected_tokens.push(TokenType::Token(tok)); - if let token::Ident(ref cur_ident, _) = self.token { + self.expected_tokens.push(TokenType::Token(token::Ident(ident))); + if let token::Ident(ref cur_ident) = self.token { cur_ident.name == ident.name } else { false @@ -1091,7 +1082,7 @@ impl<'a> Parser<'a> { } pub fn span_fatal_help(&self, sp: Span, m: &str, help: &str) -> DiagnosticBuilder<'a> { let mut err = self.sess.span_diagnostic.struct_span_fatal(sp, m); - err.fileline_help(sp, help); + err.help(help); err } pub fn bug(&self, m: &str) -> ! { @@ -1169,7 +1160,7 @@ impl<'a> Parser<'a> { let other_bounds = if self.eat(&token::BinOp(token::Plus)) { self.parse_ty_param_bounds(BoundParsingMode::Bare)? } else { - P::empty() + P::new() }; let all_bounds = Some(TraitTyParamBound(poly_trait_ref, TraitBoundModifier::None)).into_iter() @@ -1180,7 +1171,7 @@ impl<'a> Parser<'a> { } pub fn parse_ty_path(&mut self) -> PResult<'a, TyKind> { - Ok(TyKind::Path(None, self.parse_path(LifetimeAndTypesWithoutColons)?)) + Ok(TyKind::Path(None, self.parse_path(PathStyle::Type)?)) } /// parse a TyKind::BareFn type: @@ -1483,13 +1474,11 @@ impl<'a> Parser<'a> { } else if self.eat_lt() { let (qself, path) = - self.parse_qualified_path(NoTypesAllowed)?; + self.parse_qualified_path(PathStyle::Type)?; TyKind::Path(Some(qself), path) - } else if self.check(&token::ModSep) || - self.token.is_ident() || - self.token.is_path() { - let path = self.parse_path(LifetimeAndTypesWithoutColons)?; + } else if self.token.is_path_start() { + let path = self.parse_path(PathStyle::Type)?; if self.check(&token::Not) { // MACRO INVOCATION self.bump(); @@ -1507,9 +1496,8 @@ impl<'a> Parser<'a> { // TYPE TO BE INFERRED TyKind::Infer } else { - let this_token_str = self.this_token_to_string(); - let msg = format!("expected type, found `{}`", this_token_str); - return Err(self.fatal(&msg[..])); + let msg = format!("expected type, found {}", self.this_token_descr()); + return Err(self.fatal(&msg)); }; let sp = mk_sp(lo, self.last_span.hi); @@ -1532,9 +1520,8 @@ impl<'a> Parser<'a> { } else { let span = self.last_span; self.span_err(span, - "bare raw pointers are no longer allowed, you should \ - likely use `*mut T`, but otherwise `*T` is now \ - known as `*const T`"); + "expected mut or const in raw pointer type (use \ + `*mut T` or `*const T` as appropriate)"); Mutability::Immutable }; let t = self.parse_ty()?; @@ -1552,10 +1539,10 @@ impl<'a> Parser<'a> { debug!("parser is_named_argument offset:{}", offset); if offset == 0 { - is_plain_ident_or_underscore(&self.token) + is_ident_or_underscore(&self.token) && self.look_ahead(1, |t| *t == token::Colon) } else { - self.look_ahead(offset, |t| is_plain_ident_or_underscore(t)) + self.look_ahead(offset, |t| is_ident_or_underscore(t)) && self.look_ahead(offset + 1, |t| *t == token::Colon) } } @@ -1575,7 +1562,7 @@ impl<'a> Parser<'a> { } else { debug!("parse_arg_general ident_to_pat"); let sp = self.last_span; - let spanned = Spanned { span: sp, node: special_idents::invalid }; + let spanned = Spanned { span: sp, node: keywords::Invalid.ident() }; P(Pat { id: ast::DUMMY_NODE_ID, node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), @@ -1627,12 +1614,12 @@ impl<'a> Parser<'a> { } /// Matches token_lit = LIT_INTEGER | ... - pub fn lit_from_token(&self, tok: &token::Token) -> PResult<'a, LitKind> { - match *tok { + pub fn parse_lit_token(&mut self) -> PResult<'a, LitKind> { + let out = match self.token { token::Interpolated(token::NtExpr(ref v)) => { match v.node { - ExprKind::Lit(ref lit) => { Ok(lit.node.clone()) } - _ => { return self.unexpected_last(tok); } + ExprKind::Lit(ref lit) => { lit.node.clone() } + _ => { return self.unexpected_last(&self.token); } } } token::Literal(lit, suf) => { @@ -1647,13 +1634,13 @@ impl<'a> Parser<'a> { (false, parse::integer_lit(&s.as_str(), suf.as_ref().map(|s| s.as_str()), &self.sess.span_diagnostic, - self.last_span)) + self.span)) } token::Float(s) => { (false, parse::float_lit(&s.as_str(), suf.as_ref().map(|s| s.as_str()), &self.sess.span_diagnostic, - self.last_span)) + self.span)) } token::Str_(s) => { @@ -1675,14 +1662,17 @@ impl<'a> Parser<'a> { }; if suffix_illegal { - let sp = self.last_span; + let sp = self.span; self.expect_no_suffix(sp, &format!("{} literal", lit.short_name()), suf) } - Ok(out) + out } - _ => { return self.unexpected_last(tok); } - } + _ => { return self.unexpected_last(&self.token); } + }; + + self.bump(); + Ok(out) } /// Matches lit = true | false | token_lit @@ -1693,8 +1683,7 @@ impl<'a> Parser<'a> { } else if self.eat_keyword(keywords::False) { LitKind::Bool(false) } else { - let token = self.bump_and_get(); - let lit = self.lit_from_token(&token)?; + let lit = self.parse_lit_token()?; lit }; Ok(codemap::Spanned { node: lit, span: mk_sp(lo, self.last_span.hi) }) @@ -1718,6 +1707,16 @@ impl<'a> Parser<'a> { } } + pub fn parse_path_segment_ident(&mut self) -> PResult<'a, ast::Ident> { + match self.token { + token::Ident(sid) if self.token.is_path_segment_keyword() => { + self.bump(); + Ok(sid) + } + _ => self.parse_ident(), + } + } + /// Parses qualified path. /// /// Assumes that the leading `<` has been parsed already. @@ -1733,12 +1732,12 @@ impl<'a> Parser<'a> { /// /// `::a` /// `::F::a::` - pub fn parse_qualified_path(&mut self, mode: PathParsingMode) + pub fn parse_qualified_path(&mut self, mode: PathStyle) -> PResult<'a, (QSelf, ast::Path)> { let span = self.last_span; let self_type = self.parse_ty_sum()?; let mut path = if self.eat_keyword(keywords::As) { - self.parse_path(LifetimeAndTypesWithoutColons)? + self.parse_path(PathStyle::Type)? } else { ast::Path { span: span, @@ -1756,13 +1755,13 @@ impl<'a> Parser<'a> { self.expect(&token::ModSep)?; let segments = match mode { - LifetimeAndTypesWithoutColons => { + PathStyle::Type => { self.parse_path_segments_without_colons()? } - LifetimeAndTypesWithColons => { + PathStyle::Expr => { self.parse_path_segments_with_colons()? } - NoTypesAllowed => { + PathStyle::Mod => { self.parse_path_segments_without_types()? } }; @@ -1777,7 +1776,7 @@ impl<'a> Parser<'a> { /// mode. The `mode` parameter determines whether lifetimes, types, and/or /// bounds are permitted and whether `::` must precede type parameter /// groups. - pub fn parse_path(&mut self, mode: PathParsingMode) -> PResult<'a, ast::Path> { + pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { // Check for a whole path... let found = match self.token { token::Interpolated(token::NtPath(_)) => Some(self.bump_and_get()), @@ -1794,13 +1793,13 @@ impl<'a> Parser<'a> { // identifier followed by an optional lifetime and a set of types. // A bound set is a set of type parameter bounds. let segments = match mode { - LifetimeAndTypesWithoutColons => { + PathStyle::Type => { self.parse_path_segments_without_colons()? } - LifetimeAndTypesWithColons => { + PathStyle::Expr => { self.parse_path_segments_with_colons()? } - NoTypesAllowed => { + PathStyle::Mod => { self.parse_path_segments_without_types()? } }; @@ -1824,7 +1823,7 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. - let identifier = self.parse_ident_or_self_type()?; + let identifier = self.parse_path_segment_ident()?; // Parse types, optionally. let parameters = if self.eat_lt() { @@ -1877,7 +1876,7 @@ impl<'a> Parser<'a> { let mut segments = Vec::new(); loop { // First, parse an identifier. - let identifier = self.parse_ident_or_self_type()?; + let identifier = self.parse_path_segment_ident()?; // If we do not see a `::`, stop. if !self.eat(&token::ModSep) { @@ -1916,14 +1915,14 @@ impl<'a> Parser<'a> { } } - /// Examples: /// - `a::b::c` - pub fn parse_path_segments_without_types(&mut self) -> PResult<'a, Vec> { + pub fn parse_path_segments_without_types(&mut self) + -> PResult<'a, Vec> { let mut segments = Vec::new(); loop { // First, parse an identifier. - let identifier = self.parse_ident_or_self_type()?; + let identifier = self.parse_path_segment_ident()?; // Assemble and push the result. segments.push(ast::PathSegment { @@ -1931,9 +1930,11 @@ impl<'a> Parser<'a> { parameters: ast::PathParameters::none() }); - // If we do not see a `::`, stop. - if !self.eat(&token::ModSep) { + // If we do not see a `::` or see `::{`/`::*`, stop. + if !self.check(&token::ModSep) || self.is_import_coupler() { return Ok(segments); + } else { + self.bump(); } } } @@ -2035,7 +2036,7 @@ impl<'a> Parser<'a> { } } - /// Parse mutability declaration (mut/const/imm) + /// Parse mutability (`mut` or nothing). pub fn parse_mutability(&mut self) -> PResult<'a, Mutability> { if self.eat_keyword(keywords::Mut) { Ok(Mutability::Mutable) @@ -2220,15 +2221,6 @@ impl<'a> Parser<'a> { let lo = self.span.lo; return self.parse_lambda_expr(lo, CaptureBy::Ref, attrs); }, - token::Ident(id @ ast::Ident { - name: token::SELF_KEYWORD_NAME, - ctxt: _ - }, token::Plain) => { - self.bump(); - let path = ast::Path::from_ident(mk_sp(lo, hi), id); - ex = ExprKind::Path(None, path); - hi = self.last_span.hi; - } token::OpenDelim(token::Bracket) => { self.bump(); @@ -2271,7 +2263,7 @@ impl<'a> Parser<'a> { _ => { if self.eat_lt() { let (qself, path) = - self.parse_qualified_path(LifetimeAndTypesWithColons)?; + self.parse_qualified_path(PathStyle::Expr)?; hi = path.span.hi; return Ok(self.mk_expr(lo, hi, ExprKind::Path(Some(qself), path), attrs)); } @@ -2358,12 +2350,8 @@ impl<'a> Parser<'a> { let mut db = self.fatal("expected expression, found statement (`let`)"); db.note("variable declaration using `let` is a statement"); return Err(db); - } else if self.check(&token::ModSep) || - self.token.is_ident() && - !self.check_keyword(keywords::True) && - !self.check_keyword(keywords::False) { - let pth = - self.parse_path(LifetimeAndTypesWithColons)?; + } else if self.token.is_path_start() { + let pth = self.parse_path(PathStyle::Expr)?; // `!`, as an operator, is prefix, so we know this isn't that if self.check(&token::Not) { @@ -2443,10 +2431,18 @@ impl<'a> Parser<'a> { hi = pth.span.hi; ex = ExprKind::Path(None, pth); } else { - // other literal expression - let lit = self.parse_lit()?; - hi = lit.span.hi; - ex = ExprKind::Lit(P(lit)); + match self.parse_lit() { + Ok(lit) => { + hi = lit.span.hi; + ex = ExprKind::Lit(P(lit)); + } + Err(mut err) => { + err.cancel(); + let msg = format!("expected expression, found {}", + self.this_token_descr()); + return Err(self.fatal(&msg)); + } + } } } } @@ -2578,14 +2574,14 @@ impl<'a> Parser<'a> { loop { // expr? while self.eat(&token::Question) { - let hi = self.span.hi; + let hi = self.last_span.hi; e = self.mk_expr(lo, hi, ExprKind::Try(e), None); } // expr.f if self.eat(&token::Dot) { match self.token { - token::Ident(i, _) => { + token::Ident(i) => { let dot_pos = self.last_span.hi; hi = self.span.hi; self.bump(); @@ -2626,10 +2622,9 @@ impl<'a> Parser<'a> { Some(f) => f, None => continue, }; - err.fileline_help(last_span, - &format!("try parenthesizing the first index; e.g., `(foo.{}){}`", - float.trunc() as usize, - format!(".{}", fstr.splitn(2, ".").last().unwrap()))); + err.help(&format!("try parenthesizing the first index; e.g., `(foo.{}){}`", + float.trunc() as usize, + format!(".{}", fstr.splitn(2, ".").last().unwrap()))); } return Err(err); @@ -2640,7 +2635,7 @@ impl<'a> Parser<'a> { self.span_err(self.span, &format!("unexpected token: `{}`", actual)); let dot_pos = self.last_span.hi; - e = self.parse_dot_suffix(special_idents::invalid, + e = self.parse_dot_suffix(keywords::Invalid.ident(), mk_sp(dot_pos, dot_pos), e, lo)?; } @@ -2682,7 +2677,7 @@ impl<'a> Parser<'a> { // Parse unquoted tokens after a `$` in a token tree fn parse_unquoted(&mut self) -> PResult<'a, TokenTree> { let mut sp = self.span; - let (name, namep) = match self.token { + let name = match self.token { token::Dollar => { self.bump(); @@ -2702,40 +2697,36 @@ impl<'a> Parser<'a> { op: repeat, num_captures: name_num }))); - } else if self.token.is_keyword_allow_following_colon(keywords::Crate) { + } else if self.token.is_keyword(keywords::Crate) { self.bump(); return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar))); } else { sp = mk_sp(sp.lo, self.span.hi); - let namep = match self.token { token::Ident(_, p) => p, _ => token::Plain }; - let name = self.parse_ident()?; - (name, namep) + self.parse_ident()? } } - token::SubstNt(name, namep) => { + token::SubstNt(name) => { self.bump(); - (name, namep) + name } _ => unreachable!() }; // continue by trying to parse the `:ident` after `$name` - if self.token == token::Colon && self.look_ahead(1, |t| t.is_ident() && - !t.is_strict_keyword() && - !t.is_reserved_keyword()) { + if self.token == token::Colon && + self.look_ahead(1, |t| t.is_ident() && !t.is_any_keyword()) { self.bump(); sp = mk_sp(sp.lo, self.span.hi); - let kindp = match self.token { token::Ident(_, p) => p, _ => token::Plain }; let nt_kind = self.parse_ident()?; - Ok(TokenTree::Token(sp, MatchNt(name, nt_kind, namep, kindp))) + Ok(TokenTree::Token(sp, MatchNt(name, nt_kind))) } else { - Ok(TokenTree::Token(sp, SubstNt(name, namep))) + Ok(TokenTree::Token(sp, SubstNt(name))) } } pub fn check_unknown_macro_variable(&mut self) { if self.quote_depth == 0 { match self.token { - token::SubstNt(name, _) => + token::SubstNt(name) => self.fatal(&format!("unknown macro variable `{}`", name)).emit(), _ => {} } @@ -3037,15 +3028,8 @@ impl<'a> Parser<'a> { // We have 2 alternatives here: `x..y`/`x...y` and `x..`/`x...` The other // two variants are handled with `parse_prefix_range_expr` call above. let rhs = if self.is_at_start_of_range_notation_rhs() { - let rhs = self.parse_assoc_expr_with(op.precedence() + 1, - LhsExpr::NotYetParsed); - match rhs { - Ok(e) => Some(e), - Err(mut e) => { - e.cancel(); - None - } - } + Some(self.parse_assoc_expr_with(op.precedence() + 1, + LhsExpr::NotYetParsed)?) } else { None }; @@ -3142,7 +3126,7 @@ impl<'a> Parser<'a> { let mut err = self.diagnostic().struct_span_err(op_span, "chained comparison operators require parentheses"); if op.node == BinOpKind::Lt && *outer_op == AssocOp::Greater { - err.fileline_help(op_span, + err.help( "use `::<...>` instead of `<...>` if you meant to specify type arguments"); } err.emit(); @@ -3233,13 +3217,15 @@ impl<'a> Parser<'a> { Ok(self.mk_expr(lo, hi, ExprKind::IfLet(pat, expr, thn, els), attrs)) } - // `|args| expr` - pub fn parse_lambda_expr(&mut self, lo: BytePos, + // `move |args| expr` + pub fn parse_lambda_expr(&mut self, + lo: BytePos, capture_clause: CaptureBy, attrs: ThinAttributes) -> PResult<'a, P> { let decl = self.parse_fn_block_decl()?; + let decl_hi = self.last_span.hi; let body = match decl.output { FunctionRetTy::Default(_) => { // If no explicit return type is given, parse any @@ -3263,7 +3249,8 @@ impl<'a> Parser<'a> { Ok(self.mk_expr( lo, body.span.hi, - ExprKind::Closure(capture_clause, decl, body), attrs)) + ExprKind::Closure(capture_clause, decl, body, mk_sp(lo, decl_hi)), + attrs)) } // `else` token already eaten @@ -3595,16 +3582,16 @@ impl<'a> Parser<'a> { } fn parse_pat_range_end(&mut self) -> PResult<'a, P> { - if self.is_path_start() { + if self.token.is_path_start() { let lo = self.span.lo; let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = - self.parse_qualified_path(NoTypesAllowed)?; + self.parse_qualified_path(PathStyle::Expr)?; (Some(qself), path) } else { // Parse an unqualified path - (None, self.parse_path(LifetimeAndTypesWithColons)?) + (None, self.parse_path(PathStyle::Expr)?) }; let hi = self.last_span.hi; Ok(self.mk_expr(lo, hi, ExprKind::Path(qself, path), None)) @@ -3613,12 +3600,6 @@ impl<'a> Parser<'a> { } } - fn is_path_start(&self) -> bool { - (self.token == token::Lt || self.token == token::ModSep - || self.token.is_ident() || self.token.is_path()) - && !self.token.is_keyword(keywords::True) && !self.token.is_keyword(keywords::False) - } - /// Parse a pattern. pub fn parse_pat(&mut self) -> PResult<'a, P> { maybe_whole!(self, NtPat); @@ -3669,19 +3650,16 @@ impl<'a> Parser<'a> { // Parse box pat let subpat = self.parse_pat()?; pat = PatKind::Box(subpat); - } else if self.is_path_start() { + } else if self.token.is_path_start() { // Parse pattern starting with a path - if self.token.is_plain_ident() && self.look_ahead(1, |t| *t != token::DotDotDot && + if self.token.is_ident() && self.look_ahead(1, |t| *t != token::DotDotDot && *t != token::OpenDelim(token::Brace) && *t != token::OpenDelim(token::Paren) && - // Contrary to its definition, a plain ident can be followed by :: in macros *t != token::ModSep) { // Plain idents have some extra abilities here compared to general paths if self.look_ahead(1, |t| *t == token::Not) { // Parse macro invocation - let ident = self.parse_ident()?; - let ident_span = self.last_span; - let path = ast::Path::from_ident(ident_span, ident); + let path = self.parse_ident_into_path()?; self.bump(); let delim = self.expect_open_delim()?; let tts = self.parse_seq_to_end( @@ -3701,11 +3679,11 @@ impl<'a> Parser<'a> { let (qself, path) = if self.eat_lt() { // Parse a qualified path let (qself, path) = - self.parse_qualified_path(NoTypesAllowed)?; + self.parse_qualified_path(PathStyle::Expr)?; (Some(qself), path) } else { // Parse an unqualified path - (None, self.parse_path(LifetimeAndTypesWithColons)?) + (None, self.parse_path(PathStyle::Expr)?) }; match self.token { token::DotDotDot => { @@ -3762,12 +3740,20 @@ impl<'a> Parser<'a> { } } else { // Try to parse everything else as literal with optional minus - let begin = self.parse_pat_literal_maybe_minus()?; - if self.eat(&token::DotDotDot) { - let end = self.parse_pat_range_end()?; - pat = PatKind::Range(begin, end); - } else { - pat = PatKind::Lit(begin); + match self.parse_pat_literal_maybe_minus() { + Ok(begin) => { + if self.eat(&token::DotDotDot) { + let end = self.parse_pat_range_end()?; + pat = PatKind::Range(begin, end); + } else { + pat = PatKind::Lit(begin); + } + } + Err(mut err) => { + err.cancel(); + let msg = format!("expected pattern, found {}", self.this_token_descr()); + return Err(self.fatal(&msg)); + } } } } @@ -3964,11 +3950,11 @@ impl<'a> Parser<'a> { // Potential trouble: if we allow macros with paths instead of // idents, we'd need to look ahead past the whole path here... - let pth = self.parse_path(NoTypesAllowed)?; + let pth = self.parse_ident_into_path()?; self.bump(); let id = match self.token { - token::OpenDelim(_) => token::special_idents::invalid, // no special identifier + token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier _ => self.parse_ident()?, }; @@ -3980,7 +3966,7 @@ impl<'a> Parser<'a> { _ => { // we only expect an ident if we didn't parse one // above. - let ident_str = if id.name == token::special_idents::invalid.name { + let ident_str = if id.name == keywords::Invalid.name() { "identifier, " } else { "" @@ -4006,7 +3992,7 @@ impl<'a> Parser<'a> { MacStmtStyle::NoBraces }; - if id.name == token::special_idents::invalid.name { + if id.name == keywords::Invalid.name() { let mac = P(spanned(lo, hi, Mac_ { path: pth, tts: tts, ctxt: EMPTY_CTXT })); let stmt = StmtKind::Mac(mac, style, attrs.into_thin_attrs()); spanned(lo, hi, stmt) @@ -4248,7 +4234,7 @@ impl<'a> Parser<'a> { -> PResult<'a, TyParamBounds> { if !self.eat(&token::Colon) { - Ok(P::empty()) + Ok(P::new()) } else { self.parse_ty_param_bounds(mode) } @@ -4426,11 +4412,7 @@ impl<'a> Parser<'a> { p.forbid_lifetime()?; let lo = p.span.lo; let ident = p.parse_ident()?; - let found_eq = p.eat(&token::Eq); - if !found_eq { - let span = p.span; - p.span_warn(span, "whoops, no =?"); - } + p.expect(&token::Eq)?; let ty = p.parse_ty()?; let hi = ty.span.hi; let span = mk_sp(lo, hi); @@ -4634,210 +4616,142 @@ impl<'a> Parser<'a> { })) } - fn is_self_ident(&mut self) -> bool { - match self.token { - token::Ident(id, token::Plain) => id.name == special_idents::self_.name, - _ => false - } - } - - fn expect_self_ident(&mut self) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(id, token::Plain) if id.name == special_idents::self_.name => { - self.bump(); - Ok(id) - }, - _ => { - let token_str = self.this_token_to_string(); - return Err(self.fatal(&format!("expected `self`, found `{}`", - token_str))) - } - } - } - - fn is_self_type_ident(&mut self) -> bool { - match self.token { - token::Ident(id, token::Plain) => id.name == special_idents::type_self.name, - _ => false - } - } - - fn expect_self_type_ident(&mut self) -> PResult<'a, ast::Ident> { - match self.token { - token::Ident(id, token::Plain) if id.name == special_idents::type_self.name => { - self.bump(); - Ok(id) - }, - _ => { - let token_str = self.this_token_to_string(); - Err(self.fatal(&format!("expected `Self`, found `{}`", - token_str))) - } - } - } - - /// Parse the argument list and result type of a function - /// that may have a self type. + /// Parse the parameter list and result type of a function that may have a `self` parameter. fn parse_fn_decl_with_self(&mut self, - parse_arg_fn: F) -> PResult<'a, (ExplicitSelf, P)> where - F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>, + parse_arg_fn: F) + -> PResult<'a, (ExplicitSelf, P)> + where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>, { - fn maybe_parse_borrowed_explicit_self<'b>(this: &mut Parser<'b>) - -> PResult<'b, ast::SelfKind> { - // The following things are possible to see here: - // - // fn(&mut self) - // fn(&mut self) - // fn(&'lt self) - // fn(&'lt mut self) - // - // We already know that the current token is `&`. - - if this.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) { - this.bump(); - Ok(SelfKind::Region(None, Mutability::Immutable, this.expect_self_ident()?)) - } else if this.look_ahead(1, |t| t.is_mutability()) && - this.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { - this.bump(); - let mutability = this.parse_mutability()?; - Ok(SelfKind::Region(None, mutability, this.expect_self_ident()?)) - } else if this.look_ahead(1, |t| t.is_lifetime()) && - this.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { - this.bump(); - let lifetime = this.parse_lifetime()?; - let ident = this.expect_self_ident()?; - Ok(SelfKind::Region(Some(lifetime), Mutability::Immutable, ident)) - } else if this.look_ahead(1, |t| t.is_lifetime()) && - this.look_ahead(2, |t| t.is_mutability()) && - this.look_ahead(3, |t| t.is_keyword(keywords::SelfValue)) { - this.bump(); - let lifetime = this.parse_lifetime()?; - let mutability = this.parse_mutability()?; - Ok(SelfKind::Region(Some(lifetime), mutability, this.expect_self_ident()?)) - } else { - Ok(SelfKind::Static) - } - } + let expect_ident = |this: &mut Self| match this.token { + token::Ident(ident) => { this.bump(); ident } // Preserve hygienic context. + _ => unreachable!() + }; self.expect(&token::OpenDelim(token::Paren))?; - // A bit of complexity and lookahead is needed here in order to be - // backwards compatible. - let lo = self.span.lo; - let mut self_ident_lo = self.span.lo; - let mut self_ident_hi = self.span.hi; - - let mut mutbl_self = Mutability::Immutable; - let explicit_self = match self.token { + // Parse optional self parameter of a method. + // Only a limited set of initial token sequences is considered self parameters, anything + // else is parsed as a normal function parameter list, so some lookahead is required. + let eself_lo = self.span.lo; + let mut eself_mutbl = Mutability::Immutable; + let (eself, eself_ident_sp) = match self.token { token::BinOp(token::And) => { - let eself = maybe_parse_borrowed_explicit_self(self)?; - self_ident_lo = self.last_span.lo; - self_ident_hi = self.last_span.hi; - eself + // &self + // &mut self + // &'lt self + // &'lt mut self + // ¬_self + if self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) { + self.bump(); + (SelfKind::Region(None, Mutability::Immutable, expect_ident(self)), + self.last_span) + } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) && + self.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { + self.bump(); + self.bump(); + (SelfKind::Region(None, Mutability::Mutable, expect_ident(self)), + self.last_span) + } else if self.look_ahead(1, |t| t.is_lifetime()) && + self.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { + self.bump(); + let lt = self.parse_lifetime()?; + (SelfKind::Region(Some(lt), Mutability::Immutable, expect_ident(self)), + self.last_span) + } else if self.look_ahead(1, |t| t.is_lifetime()) && + self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) && + self.look_ahead(3, |t| t.is_keyword(keywords::SelfValue)) { + self.bump(); + let lt = self.parse_lifetime()?; + self.bump(); + (SelfKind::Region(Some(lt), Mutability::Mutable, expect_ident(self)), + self.last_span) + } else { + (SelfKind::Static, codemap::DUMMY_SP) + } } token::BinOp(token::Star) => { - // Possibly "*self" or "*mut self" -- not supported. Try to avoid - // emitting cryptic "unexpected token" errors. - self.bump(); - let _mutability = if self.token.is_mutability() { - self.parse_mutability()? - } else { - Mutability::Immutable - }; - if self.is_self_ident() { - let span = self.span; - self.span_err(span, "cannot pass self by raw pointer"); + // *self + // *const self + // *mut self + // *not_self + // Emit special error for `self` cases. + if self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) { + self.bump(); + self.span_err(self.span, "cannot pass `self` by raw pointer"); + (SelfKind::Value(expect_ident(self)), self.last_span) + } else if self.look_ahead(1, |t| t.is_mutability()) && + self.look_ahead(2, |t| t.is_keyword(keywords::SelfValue)) { self.bump(); + self.bump(); + self.span_err(self.span, "cannot pass `self` by raw pointer"); + (SelfKind::Value(expect_ident(self)), self.last_span) + } else { + (SelfKind::Static, codemap::DUMMY_SP) } - // error case, making bogus self ident: - SelfKind::Value(special_idents::self_) } token::Ident(..) => { - if self.is_self_ident() { - let self_ident = self.expect_self_ident()?; - - // Determine whether this is the fully explicit form, `self: - // TYPE`. + if self.token.is_keyword(keywords::SelfValue) { + // self + // self: TYPE + let eself_ident = expect_ident(self); + let eself_ident_sp = self.last_span; if self.eat(&token::Colon) { - SelfKind::Explicit(self.parse_ty_sum()?, self_ident) + (SelfKind::Explicit(self.parse_ty_sum()?, eself_ident), eself_ident_sp) } else { - SelfKind::Value(self_ident) + (SelfKind::Value(eself_ident), eself_ident_sp) } - } else if self.token.is_mutability() && + } else if self.token.is_keyword(keywords::Mut) && self.look_ahead(1, |t| t.is_keyword(keywords::SelfValue)) { - mutbl_self = self.parse_mutability()?; - let self_ident = self.expect_self_ident()?; - - // Determine whether this is the fully explicit form, - // `self: TYPE`. + // mut self + // mut self: TYPE + eself_mutbl = Mutability::Mutable; + self.bump(); + let eself_ident = expect_ident(self); + let eself_ident_sp = self.last_span; if self.eat(&token::Colon) { - SelfKind::Explicit(self.parse_ty_sum()?, self_ident) + (SelfKind::Explicit(self.parse_ty_sum()?, eself_ident), eself_ident_sp) } else { - SelfKind::Value(self_ident) + (SelfKind::Value(eself_ident), eself_ident_sp) } } else { - SelfKind::Static + (SelfKind::Static, codemap::DUMMY_SP) } } - _ => SelfKind::Static, + _ => (SelfKind::Static, codemap::DUMMY_SP) }; + let mut eself = codemap::respan(mk_sp(eself_lo, self.last_span.hi), eself); - let explicit_self_sp = mk_sp(self_ident_lo, self_ident_hi); - - // shared fall-through for the three cases below. borrowing prevents simply - // writing this as a closure - macro_rules! parse_remaining_arguments { - ($self_id:ident) => - { - // If we parsed a self type, expect a comma before the argument list. - match self.token { - token::Comma => { + // Parse the rest of the function parameter list. + let sep = SeqSep::trailing_allowed(token::Comma); + let fn_inputs = match eself.node { + SelfKind::Static => { + eself.span = codemap::DUMMY_SP; + self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn) + } + SelfKind::Value(..) | SelfKind::Region(..) | SelfKind::Explicit(..) => { + if self.check(&token::CloseDelim(token::Paren)) { + vec![Arg::from_self(eself.clone(), eself_ident_sp, eself_mutbl)] + } else if self.check(&token::Comma) { self.bump(); - let sep = SeqSep::trailing_allowed(token::Comma); - let mut fn_inputs = self.parse_seq_to_before_end( - &token::CloseDelim(token::Paren), - sep, - parse_arg_fn + let mut fn_inputs = vec![Arg::from_self(eself.clone(), eself_ident_sp, + eself_mutbl)]; + fn_inputs.append(&mut self.parse_seq_to_before_end( + &token::CloseDelim(token::Paren), sep, parse_arg_fn) ); - fn_inputs.insert(0, Arg::new_self(explicit_self_sp, mutbl_self, $self_id)); fn_inputs + } else { + return self.unexpected(); } - token::CloseDelim(token::Paren) => { - vec!(Arg::new_self(explicit_self_sp, mutbl_self, $self_id)) - } - _ => { - let token_str = self.this_token_to_string(); - return Err(self.fatal(&format!("expected `,` or `)`, found `{}`", - token_str))) - } - } - } - } - - let fn_inputs = match explicit_self { - SelfKind::Static => { - let sep = SeqSep::trailing_allowed(token::Comma); - self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn) } - SelfKind::Value(id) => parse_remaining_arguments!(id), - SelfKind::Region(_,_,id) => parse_remaining_arguments!(id), - SelfKind::Explicit(_,id) => parse_remaining_arguments!(id), }; - + // Parse closing paren and return type. self.expect(&token::CloseDelim(token::Paren))?; - - let hi = self.span.hi; - - let ret_ty = self.parse_ret_ty()?; - - let fn_decl = P(FnDecl { + Ok((eself, P(FnDecl { inputs: fn_inputs, - output: ret_ty, + output: self.parse_ret_ty()?, variadic: false - }); - - Ok((spanned(lo, hi, explicit_self), fn_decl)) + }))) } // parse the |arg, arg| header on a lambda @@ -4938,7 +4852,7 @@ impl<'a> Parser<'a> { let mut attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let vis = self.parse_visibility()?; + let vis = self.parse_visibility(true)?; let defaultness = self.parse_defaultness()?; let (name, node) = if self.eat_keyword(keywords::Type) { let name = self.parse_ident()?; @@ -4977,19 +4891,19 @@ impl<'a> Parser<'a> { Visibility::Inherited => (), _ => { let is_macro_rules: bool = match self.token { - token::Ident(sid, _) => sid.name == intern("macro_rules"), + token::Ident(sid) => sid.name == intern("macro_rules"), _ => false, }; if is_macro_rules { self.diagnostic().struct_span_err(span, "can't qualify macro_rules \ invocation with `pub`") - .fileline_help(span, "did you mean #[macro_export]?") + .help("did you mean #[macro_export]?") .emit(); } else { self.diagnostic().struct_span_err(span, "can't qualify macro \ invocation with `pub`") - .fileline_help(span, "try adjusting the macro to put `pub` \ - inside the invocation") + .help("try adjusting the macro to put `pub` \ + inside the invocation") .emit(); } } @@ -5010,7 +4924,7 @@ impl<'a> Parser<'a> { self.complain_if_pub_macro(&vis, last_span); let lo = self.span.lo; - let pth = self.parse_path(NoTypesAllowed)?; + let pth = self.parse_ident_into_path()?; self.expect(&token::Not)?; // eat a matched-delimiter token tree: @@ -5025,7 +4939,7 @@ impl<'a> Parser<'a> { if delim != token::Brace { self.expect(&token::Semi)? } - Ok((token::special_idents::invalid, vec![], ast::ImplItemKind::Macro(m))) + Ok((keywords::Invalid.ident(), vec![], ast::ImplItemKind::Macro(m))) } else { let (constness, unsafety, abi) = self.parse_fn_front_matter()?; let ident = self.parse_ident()?; @@ -5120,7 +5034,7 @@ impl<'a> Parser<'a> { self.expect(&token::OpenDelim(token::Brace))?; self.expect(&token::CloseDelim(token::Brace))?; - Ok((special_idents::invalid, + Ok((keywords::Invalid.ident(), ItemKind::DefaultImpl(unsafety, opt_trait.unwrap()), None)) } else { if opt_trait.is_some() { @@ -5136,7 +5050,7 @@ impl<'a> Parser<'a> { impl_items.push(self.parse_impl_item()?); } - Ok((special_idents::invalid, + Ok((keywords::Invalid.ident(), ItemKind::Impl(unsafety, polarity, generics, opt_trait, ty, impl_items), Some(attrs))) } @@ -5145,7 +5059,7 @@ impl<'a> Parser<'a> { /// Parse a::B fn parse_trait_ref(&mut self) -> PResult<'a, TraitRef> { Ok(ast::TraitRef { - path: self.parse_path(LifetimeAndTypesWithoutColons)?, + path: self.parse_path(PathStyle::Type)?, ref_id: ast::DUMMY_NODE_ID, }) } @@ -5250,8 +5164,25 @@ impl<'a> Parser<'a> { |p| { let attrs = p.parse_outer_attributes()?; let lo = p.span.lo; - let vis = p.parse_visibility()?; - let ty = p.parse_ty_sum()?; + let mut vis = p.parse_visibility(false)?; + let ty_is_interpolated = + p.token.is_interpolated() || p.look_ahead(1, |t| t.is_interpolated()); + let mut ty = p.parse_ty_sum()?; + + // Handle `pub(path) type`, in which `vis` will be `pub` and `ty` will be `(path)`. + if vis == Visibility::Public && !ty_is_interpolated && + p.token != token::Comma && p.token != token::CloseDelim(token::Paren) { + ty = if let TyKind::Paren(ref path_ty) = ty.node { + if let TyKind::Path(None, ref path) = path_ty.node { + vis = Visibility::Restricted { path: P(path.clone()), id: path_ty.id }; + Some(p.parse_ty_sum()?) + } else { + None + } + } else { + None + }.unwrap_or(ty); + } Ok(StructField { span: mk_sp(lo, p.span.hi), vis: vis, @@ -5290,25 +5221,44 @@ impl<'a> Parser<'a> { /// Parse an element of a struct definition fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> { - let attrs = self.parse_outer_attributes()?; - - if self.eat_keyword(keywords::Pub) { - return self.parse_single_struct_field(Visibility::Public, attrs); - } - - return self.parse_single_struct_field(Visibility::Inherited, attrs); + let vis = self.parse_visibility(true)?; + self.parse_single_struct_field(vis, attrs) } - /// Parse visibility: PUB or nothing - fn parse_visibility(&mut self) -> PResult<'a, Visibility> { - if self.eat_keyword(keywords::Pub) { Ok(Visibility::Public) } - else { Ok(Visibility::Inherited) } + // If `allow_path` is false, just parse the `pub` in `pub(path)` (but still parse `pub(crate)`) + fn parse_visibility(&mut self, allow_path: bool) -> PResult<'a, Visibility> { + let pub_crate = |this: &mut Self| { + let span = this.last_span; + this.expect(&token::CloseDelim(token::Paren))?; + Ok(Visibility::Crate(span)) + }; + + if !self.eat_keyword(keywords::Pub) { + Ok(Visibility::Inherited) + } else if !allow_path { + // Look ahead to avoid eating the `(` in `pub(path)` while still parsing `pub(crate)` + if self.token == token::OpenDelim(token::Paren) && + self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) { + self.bump(); self.bump(); + pub_crate(self) + } else { + Ok(Visibility::Public) + } + } else if !self.eat(&token::OpenDelim(token::Paren)) { + Ok(Visibility::Public) + } else if self.eat_keyword(keywords::Crate) { + pub_crate(self) + } else { + let path = self.parse_path(PathStyle::Mod)?; + self.expect(&token::CloseDelim(token::Paren))?; + Ok(Visibility::Restricted { path: P(path), id: ast::DUMMY_NODE_ID }) + } } /// Parse defaultness: DEFAULT or nothing fn parse_defaultness(&mut self) -> PResult<'a, Defaultness> { - if self.eat_contextual_keyword(special_idents::DEFAULT) { + if self.eat_contextual_keyword(keywords::Default.ident()) { Ok(Defaultness::Default) } else { Ok(Defaultness::Final) @@ -5636,7 +5586,7 @@ impl<'a> Parser<'a> { }; Ok(self.mk_item(lo, last_span.hi, - special_idents::invalid, + keywords::Invalid.ident(), ItemKind::ForeignMod(m), visibility, attrs)) @@ -5765,7 +5715,7 @@ impl<'a> Parser<'a> { let lo = self.span.lo; - let visibility = self.parse_visibility()?; + let visibility = self.parse_visibility(true)?; if self.eat_keyword(keywords::Use) { // USE ITEM @@ -5775,7 +5725,7 @@ impl<'a> Parser<'a> { let last_span = self.last_span; let item = self.mk_item(lo, last_span.hi, - token::special_idents::invalid, + keywords::Invalid.ident(), item_, visibility, attrs); @@ -5853,7 +5803,7 @@ impl<'a> Parser<'a> { if self.eat_keyword(keywords::Mut) { let last_span = self.last_span; self.diagnostic().struct_span_err(last_span, "const globals cannot be mutable") - .fileline_help(last_span, "did you mean to declare a static?") + .help("did you mean to declare a static?") .emit(); } let (ident, item_, extra_attrs) = self.parse_item_const(None)?; @@ -6015,13 +5965,13 @@ impl<'a> Parser<'a> { fn parse_foreign_item(&mut self) -> PResult<'a, Option> { let attrs = self.parse_outer_attributes()?; let lo = self.span.lo; - let visibility = self.parse_visibility()?; + let visibility = self.parse_visibility(true)?; if self.check_keyword(keywords::Static) { // FOREIGN STATIC ITEM return Ok(Some(self.parse_item_foreign_static(visibility, lo, attrs)?)); } - if self.check_keyword(keywords::Fn) || self.check_keyword(keywords::Unsafe) { + if self.check_keyword(keywords::Fn) { // FOREIGN FUNCTION ITEM return Ok(Some(self.parse_item_foreign_fn(visibility, lo, attrs)?)); } @@ -6046,7 +5996,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, Option>> { if macros_allowed && !self.token.is_any_keyword() && self.look_ahead(1, |t| *t == token::Not) - && (self.look_ahead(2, |t| t.is_plain_ident()) + && (self.look_ahead(2, |t| t.is_ident()) || self.look_ahead(2, |t| *t == token::OpenDelim(token::Paren)) || self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))) { // MACRO INVOCATION ITEM @@ -6057,16 +6007,16 @@ impl<'a> Parser<'a> { let mac_lo = self.span.lo; // item macro. - let pth = self.parse_path(NoTypesAllowed)?; + let pth = self.parse_ident_into_path()?; self.expect(&token::Not)?; // a 'special' identifier (like what `macro_rules!` uses) // is optional. We should eventually unify invoc syntax // and remove this. - let id = if self.token.is_plain_ident() { + let id = if self.token.is_ident() { self.parse_ident()? } else { - token::special_idents::invalid // no special identifier + keywords::Invalid.ident() // no special identifier }; // eat a matched-delimiter token tree: let delim = self.expect_open_delim()?; @@ -6120,106 +6070,67 @@ impl<'a> Parser<'a> { self.parse_item_(attrs, true, false) } + fn parse_path_list_items(&mut self) -> PResult<'a, Vec> { + self.parse_unspanned_seq(&token::OpenDelim(token::Brace), + &token::CloseDelim(token::Brace), + SeqSep::trailing_allowed(token::Comma), |this| { + let lo = this.span.lo; + let node = if this.eat_keyword(keywords::SelfValue) { + let rename = this.parse_rename()?; + ast::PathListItemKind::Mod { id: ast::DUMMY_NODE_ID, rename: rename } + } else { + let ident = this.parse_ident()?; + let rename = this.parse_rename()?; + ast::PathListItemKind::Ident { name: ident, rename: rename, id: ast::DUMMY_NODE_ID } + }; + let hi = this.last_span.hi; + Ok(spanned(lo, hi, node)) + }) + } - /// Matches view_path : MOD? non_global_path as IDENT - /// | MOD? non_global_path MOD_SEP LBRACE RBRACE - /// | MOD? non_global_path MOD_SEP LBRACE ident_seq RBRACE - /// | MOD? non_global_path MOD_SEP STAR - /// | MOD? non_global_path + /// `::{` or `::*` + fn is_import_coupler(&mut self) -> bool { + self.check(&token::ModSep) && + self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace) || + *t == token::BinOp(token::Star)) + } + + /// Matches ViewPath: + /// MOD_SEP? non_global_path + /// MOD_SEP? non_global_path as IDENT + /// MOD_SEP? non_global_path MOD_SEP STAR + /// MOD_SEP? non_global_path MOD_SEP LBRACE item_seq RBRACE + /// MOD_SEP? LBRACE item_seq RBRACE fn parse_view_path(&mut self) -> PResult<'a, P> { let lo = self.span.lo; - - // Allow a leading :: because the paths are absolute either way. - // This occurs with "use $crate::..." in macros. - let is_global = self.eat(&token::ModSep); - - if self.check(&token::OpenDelim(token::Brace)) { - // use {foo,bar} - let idents = self.parse_unspanned_seq( - &token::OpenDelim(token::Brace), - &token::CloseDelim(token::Brace), - SeqSep::trailing_allowed(token::Comma), - |p| p.parse_path_list_item())?; - let path = ast::Path { + if self.check(&token::OpenDelim(token::Brace)) || self.is_import_coupler() { + // `{foo, bar}` or `::{foo, bar}` + let prefix = ast::Path { + global: self.eat(&token::ModSep), + segments: Vec::new(), span: mk_sp(lo, self.span.hi), - global: is_global, - segments: Vec::new() }; - return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents)))); - } - - let first_ident = self.parse_ident()?; - let mut path = vec!(first_ident); - if let token::ModSep = self.token { - // foo::bar or foo::{a,b,c} or foo::* - while self.check(&token::ModSep) { + let items = self.parse_path_list_items()?; + Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) + } else { + let prefix = self.parse_path(PathStyle::Mod)?; + if self.is_import_coupler() { + // `foo::bar::{a, b}` or `foo::bar::*` self.bump(); - - match self.token { - token::Ident(..) => { - let ident = self.parse_ident()?; - path.push(ident); - } - - // foo::bar::{a,b,c} - token::OpenDelim(token::Brace) => { - let idents = self.parse_unspanned_seq( - &token::OpenDelim(token::Brace), - &token::CloseDelim(token::Brace), - SeqSep::trailing_allowed(token::Comma), - |p| p.parse_path_list_item() - )?; - let path = ast::Path { - span: mk_sp(lo, self.span.hi), - global: is_global, - segments: path.into_iter().map(|identifier| { - ast::PathSegment { - identifier: identifier, - parameters: ast::PathParameters::none(), - } - }).collect() - }; - return Ok(P(spanned(lo, self.span.hi, ViewPathList(path, idents)))); - } - - // foo::bar::* - token::BinOp(token::Star) => { + if self.check(&token::BinOp(token::Star)) { self.bump(); - let path = ast::Path { - span: mk_sp(lo, self.span.hi), - global: is_global, - segments: path.into_iter().map(|identifier| { - ast::PathSegment { - identifier: identifier, - parameters: ast::PathParameters::none(), - } - }).collect() - }; - return Ok(P(spanned(lo, self.span.hi, ViewPathGlob(path)))); - } - - // fall-through for case foo::bar::; - token::Semi => { - self.span_err(self.span, "expected identifier or `{` or `*`, found `;`"); - } - - _ => break + Ok(P(spanned(lo, self.span.hi, ViewPathGlob(prefix)))) + } else { + let items = self.parse_path_list_items()?; + Ok(P(spanned(lo, self.span.hi, ViewPathList(prefix, items)))) } + } else { + // `foo::bar` or `foo::bar as baz` + let rename = self.parse_rename()?. + unwrap_or(prefix.segments.last().unwrap().identifier); + Ok(P(spanned(lo, self.last_span.hi, ViewPathSimple(rename, prefix)))) } } - let mut rename_to = path[path.len() - 1]; - let path = ast::Path { - span: mk_sp(lo, self.last_span.hi), - global: is_global, - segments: path.into_iter().map(|identifier| { - ast::PathSegment { - identifier: identifier, - parameters: ast::PathParameters::none(), - } - }).collect() - }; - rename_to = self.parse_rename()?.unwrap_or(rename_to); - Ok(P(spanned(lo, self.last_span.hi, ViewPathSimple(rename_to, path)))) } fn parse_rename(&mut self) -> PResult<'a, Option> { diff --git a/src/libsyntax/parse/token.rs b/src/libsyntax/parse/token.rs index 16417ac004..47de32ed7d 100644 --- a/src/libsyntax/parse/token.rs +++ b/src/libsyntax/parse/token.rs @@ -11,7 +11,6 @@ pub use self::BinOpToken::*; pub use self::Nonterminal::*; pub use self::DelimToken::*; -pub use self::IdentStyle::*; pub use self::Lit::*; pub use self::Token::*; @@ -26,7 +25,6 @@ use std::fmt; use std::ops::Deref; use std::rc::Rc; -#[allow(non_camel_case_types)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum BinOpToken { Plus, @@ -52,13 +50,6 @@ pub enum DelimToken { Brace, } -#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] -pub enum IdentStyle { - /// `::` follows the identifier with no whitespace in-between. - ModName, - Plain, -} - #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] pub enum SpecialMacroVar { /// `$crate` will be filled in with the name of the crate a macro was @@ -99,7 +90,6 @@ impl Lit { } } -#[allow(non_camel_case_types)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)] pub enum Token { /* Expression-operator symbols. */ @@ -141,7 +131,7 @@ pub enum Token { Literal(Lit, Option), /* Name components */ - Ident(ast::Ident, IdentStyle), + Ident(ast::Ident), Underscore, Lifetime(ast::Ident), @@ -151,11 +141,11 @@ pub enum Token { /// Doc comment DocComment(ast::Name), // In left-hand-sides of MBE macros: - /// Parse a nonterminal (name to bind, name of NT, styles of their idents) - MatchNt(ast::Ident, ast::Ident, IdentStyle, IdentStyle), + /// Parse a nonterminal (name to bind, name of NT) + MatchNt(ast::Ident, ast::Ident), // In right-hand-sides of MBE macros: /// A syntactic variable that will be filled in by macro expansion. - SubstNt(ast::Ident, IdentStyle), + SubstNt(ast::Ident), /// A macro variable with special meaning. SpecialVarNt(SpecialMacroVar), @@ -185,7 +175,7 @@ impl Token { pub fn can_begin_expr(&self) -> bool { match *self { OpenDelim(_) => true, - Ident(_, _) => true, + Ident(..) => true, Underscore => true, Tilde => true, Literal(_, _) => true, @@ -218,7 +208,7 @@ impl Token { /// Returns `true` if the token is an identifier. pub fn is_ident(&self) -> bool { match *self { - Ident(_, _) => true, + Ident(..) => true, _ => false, } } @@ -239,16 +229,6 @@ impl Token { } } - /// Returns `true` if the token is a path that is not followed by a `::` - /// token. - #[allow(non_upper_case_globals)] - pub fn is_plain_ident(&self) -> bool { - match *self { - Ident(_, Plain) => true, - _ => false, - } - } - /// Returns `true` if the token is a lifetime. pub fn is_lifetime(&self) -> bool { match *self { @@ -263,6 +243,11 @@ impl Token { self.is_keyword(keywords::Const) } + pub fn is_path_start(&self) -> bool { + self == &ModSep || self == &Lt || self.is_path() || + self.is_path_segment_keyword() || self.is_ident() && !self.is_any_keyword() + } + /// Maps a token to its corresponding binary operator. pub fn to_binop(&self) -> Option { match *self { @@ -289,77 +274,41 @@ impl Token { } /// Returns `true` if the token is a given keyword, `kw`. - #[allow(non_upper_case_globals)] pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { match *self { - Ident(sid, Plain) => kw.to_name() == sid.name, - _ => false, + Ident(id) => id.name == kw.name(), + _ => false, } } - pub fn is_keyword_allow_following_colon(&self, kw: keywords::Keyword) -> bool { + pub fn is_path_segment_keyword(&self) -> bool { match *self { - Ident(sid, _) => { kw.to_name() == sid.name } - _ => { false } + Ident(id) => id.name == keywords::Super.name() || + id.name == keywords::SelfValue.name() || + id.name == keywords::SelfType.name(), + _ => false, } } - /// Returns `true` if the token is either a special identifier, or a strict - /// or reserved keyword. - #[allow(non_upper_case_globals)] + /// Returns `true` if the token is either a strict or reserved keyword. pub fn is_any_keyword(&self) -> bool { - match *self { - Ident(sid, Plain) => { - let n = sid.name; - - n == SELF_KEYWORD_NAME - || n == STATIC_KEYWORD_NAME - || n == SUPER_KEYWORD_NAME - || n == SELF_TYPE_KEYWORD_NAME - || STRICT_KEYWORD_START <= n - && n <= RESERVED_KEYWORD_FINAL - }, - _ => false - } + self.is_strict_keyword() || self.is_reserved_keyword() } - /// Returns `true` if the token may not appear as an identifier. - #[allow(non_upper_case_globals)] + /// Returns `true` if the token is a strict keyword. pub fn is_strict_keyword(&self) -> bool { match *self { - Ident(sid, Plain) => { - let n = sid.name; - - n == SELF_KEYWORD_NAME - || n == STATIC_KEYWORD_NAME - || n == SUPER_KEYWORD_NAME - || n == SELF_TYPE_KEYWORD_NAME - || STRICT_KEYWORD_START <= n - && n <= STRICT_KEYWORD_FINAL - }, - Ident(sid, ModName) => { - let n = sid.name; - - n != SELF_KEYWORD_NAME - && n != SUPER_KEYWORD_NAME - && STRICT_KEYWORD_START <= n - && n <= STRICT_KEYWORD_FINAL - } + Ident(id) => id.name >= keywords::As.name() && + id.name <= keywords::While.name(), _ => false, } } - /// Returns `true` if the token is a keyword that has been reserved for - /// possible future use. - #[allow(non_upper_case_globals)] + /// Returns `true` if the token is a keyword reserved for possible future use. pub fn is_reserved_keyword(&self) -> bool { match *self { - Ident(sid, Plain) => { - let n = sid.name; - - RESERVED_KEYWORD_START <= n - && n <= RESERVED_KEYWORD_FINAL - }, + Ident(id) => id.name >= keywords::Abstract.name() && + id.name <= keywords::Yield.name(), _ => false, } } @@ -369,7 +318,7 @@ impl Token { /// See `styntax::ext::mtwt`. pub fn mtwt_eq(&self, other : &Token) -> bool { match (self, other) { - (&Ident(id1,_), &Ident(id2,_)) | (&Lifetime(id1), &Lifetime(id2)) => + (&Ident(id1), &Ident(id2)) | (&Lifetime(id1), &Lifetime(id2)) => mtwt::resolve(id1) == mtwt::resolve(id2), _ => *self == *other } @@ -385,7 +334,7 @@ pub enum Nonterminal { NtPat(P), NtExpr(P), NtTy(P), - NtIdent(Box, IdentStyle), + NtIdent(Box), /// Stuff inside brackets for attributes NtMeta(P), NtPath(Box), @@ -422,191 +371,104 @@ impl fmt::Debug for Nonterminal { } } - -// Get the first "argument" -macro_rules! first { - ( $first:expr, $( $remainder:expr, )* ) => ( $first ) -} - -// Get the last "argument" (has to be done recursively to avoid phoney local ambiguity error) -macro_rules! last { - ( $first:expr, $( $remainder:expr, )+ ) => ( last!( $( $remainder, )+ ) ); - ( $first:expr, ) => ( $first ) -} - // In this macro, there is the requirement that the name (the number) must be monotonically // increasing by one in the special identifiers, starting at 0; the same holds for the keywords, -// except starting from the next number instead of zero, and with the additional exception that -// special identifiers are *also* allowed (they are deduplicated in the important place, the -// interner), an exception which is demonstrated by "static" and "self". -macro_rules! declare_special_idents_and_keywords {( - // So now, in these rules, why is each definition parenthesised? - // Answer: otherwise we get a spurious local ambiguity bug on the "}" - pub mod special_idents { - $( ($si_name:expr, $si_static:ident, $si_str:expr); )* - } - - pub mod keywords { - 'strict: - $( ($sk_name:expr, $sk_variant:ident, $sk_str:expr); )* - 'reserved: - $( ($rk_name:expr, $rk_variant:ident, $rk_str:expr); )* - } +// except starting from the next number instead of zero. +macro_rules! declare_keywords {( + $( ($index: expr, $konst: ident, $string: expr) )* ) => { - const STRICT_KEYWORD_START: ast::Name = first!($( ast::Name($sk_name), )*); - const STRICT_KEYWORD_FINAL: ast::Name = last!($( ast::Name($sk_name), )*); - const RESERVED_KEYWORD_START: ast::Name = first!($( ast::Name($rk_name), )*); - const RESERVED_KEYWORD_FINAL: ast::Name = last!($( ast::Name($rk_name), )*); - - pub mod special_idents { - use ast; - $( - #[allow(non_upper_case_globals)] - pub const $si_static: ast::Ident = ast::Ident { - name: ast::Name($si_name), - ctxt: ast::EMPTY_CTXT, - }; - )* - } - - pub mod special_names { - use ast; - $( - #[allow(non_upper_case_globals)] - pub const $si_static: ast::Name = ast::Name($si_name); - )* - } - - /// All the valid words that have meaning in the Rust language. - /// - /// Rust keywords are either 'strict' or 'reserved'. Strict keywords may not - /// appear as identifiers at all. Reserved keywords are not used anywhere in - /// the language and may not appear as identifiers. pub mod keywords { - pub use self::Keyword::*; use ast; - - #[derive(Copy, Clone, PartialEq, Eq)] - pub enum Keyword { - $( $sk_variant, )* - $( $rk_variant, )* + #[derive(Clone, Copy, PartialEq, Eq)] + pub struct Keyword { + ident: ast::Ident, } - impl Keyword { - pub fn to_name(&self) -> ast::Name { - match *self { - $( $sk_variant => ast::Name($sk_name), )* - $( $rk_variant => ast::Name($rk_name), )* - } - } + #[inline] pub fn ident(self) -> ast::Ident { self.ident } + #[inline] pub fn name(self) -> ast::Name { self.ident.name } } + $( + #[allow(non_upper_case_globals)] + pub const $konst: Keyword = Keyword { + ident: ast::Ident::with_empty_ctxt(ast::Name($index)) + }; + )* } fn mk_fresh_ident_interner() -> IdentInterner { - let mut init_vec = Vec::new(); - $(init_vec.push($si_str);)* - $(init_vec.push($sk_str);)* - $(init_vec.push($rk_str);)* - interner::StrInterner::prefill(&init_vec[..]) + interner::StrInterner::prefill(&[$($string,)*]) } }} -// If the special idents get renumbered, remember to modify these two as appropriate -pub const SELF_KEYWORD_NAME: ast::Name = ast::Name(SELF_KEYWORD_NAME_NUM); -const STATIC_KEYWORD_NAME: ast::Name = ast::Name(STATIC_KEYWORD_NAME_NUM); -pub const SUPER_KEYWORD_NAME: ast::Name = ast::Name(SUPER_KEYWORD_NAME_NUM); -const SELF_TYPE_KEYWORD_NAME: ast::Name = ast::Name(SELF_TYPE_KEYWORD_NAME_NUM); - -pub const SELF_KEYWORD_NAME_NUM: u32 = 1; -const STATIC_KEYWORD_NAME_NUM: u32 = 2; -const SUPER_KEYWORD_NAME_NUM: u32 = 3; -const SELF_TYPE_KEYWORD_NAME_NUM: u32 = 10; - // NB: leaving holes in the ident table is bad! a different ident will get // interned with the id from the hole, but it will be between the min and max // of the reserved words, and thus tagged as "reserved". - -declare_special_idents_and_keywords! { - pub mod special_idents { - // These ones are statics - (0, invalid, ""); - (super::SELF_KEYWORD_NAME_NUM, self_, "self"); - (super::STATIC_KEYWORD_NAME_NUM, statik, "static"); - (super::SUPER_KEYWORD_NAME_NUM, super_, "super"); - (4, static_lifetime, "'static"); - - // for matcher NTs - (5, tt, "tt"); - (6, matchers, "matchers"); - - // outside of libsyntax - (7, clownshoe_abi, "__rust_abi"); - (8, opaque, ""); - (9, __unused1, "<__unused1>"); - (super::SELF_TYPE_KEYWORD_NAME_NUM, type_self, "Self"); - (11, prelude_import, "prelude_import"); - (12, DEFAULT, "default"); - } - - pub mod keywords { - // These ones are variants of the Keyword enum - - 'strict: - (13, As, "as"); - (14, Break, "break"); - (15, Crate, "crate"); - (16, Else, "else"); - (17, Enum, "enum"); - (18, Extern, "extern"); - (19, False, "false"); - (20, Fn, "fn"); - (21, For, "for"); - (22, If, "if"); - (23, Impl, "impl"); - (24, In, "in"); - (25, Let, "let"); - (26, Loop, "loop"); - (27, Match, "match"); - (28, Mod, "mod"); - (29, Move, "move"); - (30, Mut, "mut"); - (31, Pub, "pub"); - (32, Ref, "ref"); - (33, Return, "return"); - // Static and Self are also special idents (prefill de-dupes) - (super::STATIC_KEYWORD_NAME_NUM, Static, "static"); - (super::SELF_KEYWORD_NAME_NUM, SelfValue, "self"); - (super::SELF_TYPE_KEYWORD_NAME_NUM, SelfType, "Self"); - (34, Struct, "struct"); - (super::SUPER_KEYWORD_NAME_NUM, Super, "super"); - (35, True, "true"); - (36, Trait, "trait"); - (37, Type, "type"); - (38, Unsafe, "unsafe"); - (39, Use, "use"); - (40, While, "while"); - (41, Continue, "continue"); - (42, Box, "box"); - (43, Const, "const"); - (44, Where, "where"); - 'reserved: - (45, Virtual, "virtual"); - (46, Proc, "proc"); - (47, Alignof, "alignof"); - (48, Become, "become"); - (49, Offsetof, "offsetof"); - (50, Priv, "priv"); - (51, Pure, "pure"); - (52, Sizeof, "sizeof"); - (53, Typeof, "typeof"); - (54, Unsized, "unsized"); - (55, Yield, "yield"); - (56, Do, "do"); - (57, Abstract, "abstract"); - (58, Final, "final"); - (59, Override, "override"); - (60, Macro, "macro"); - } +// After modifying this list adjust `is_strict_keyword`/`is_reserved_keyword`, +// this should be rarely necessary though if the keywords are kept in alphabetic order. +declare_keywords! { + // Invalid identifier + (0, Invalid, "") + + // Strict keywords used in the language. + (1, As, "as") + (2, Box, "box") + (3, Break, "break") + (4, Const, "const") + (5, Continue, "continue") + (6, Crate, "crate") + (7, Else, "else") + (8, Enum, "enum") + (9, Extern, "extern") + (10, False, "false") + (11, Fn, "fn") + (12, For, "for") + (13, If, "if") + (14, Impl, "impl") + (15, In, "in") + (16, Let, "let") + (17, Loop, "loop") + (18, Match, "match") + (19, Mod, "mod") + (20, Move, "move") + (21, Mut, "mut") + (22, Pub, "pub") + (23, Ref, "ref") + (24, Return, "return") + (25, SelfValue, "self") + (26, SelfType, "Self") + (27, Static, "static") + (28, Struct, "struct") + (29, Super, "super") + (30, Trait, "trait") + (31, True, "true") + (32, Type, "type") + (33, Unsafe, "unsafe") + (34, Use, "use") + (35, Where, "where") + (36, While, "while") + + // Keywords reserved for future use. + (37, Abstract, "abstract") + (38, Alignof, "alignof") + (39, Become, "become") + (40, Do, "do") + (41, Final, "final") + (42, Macro, "macro") + (43, Offsetof, "offsetof") + (44, Override, "override") + (45, Priv, "priv") + (46, Proc, "proc") + (47, Pure, "pure") + (48, Sizeof, "sizeof") + (49, Typeof, "typeof") + (50, Unsized, "unsized") + (51, Virtual, "virtual") + (52, Yield, "yield") + + // Weak keywords, have special meaning only in specific contexts. + (53, Default, "default") + (54, StaticLifetime, "'static") + (55, Union, "union") } // looks like we can get rid of this completely... @@ -704,6 +566,28 @@ impl<'a> PartialEq for &'a str { } } +impl PartialEq for InternedString { + #[inline(always)] + fn eq(&self, other: &str) -> bool { + PartialEq::eq(&self.string[..], other) + } + #[inline(always)] + fn ne(&self, other: &str) -> bool { + PartialEq::ne(&self.string[..], other) + } +} + +impl PartialEq for str { + #[inline(always)] + fn eq(&self, other: &InternedString) -> bool { + PartialEq::eq(self, &other.string[..]) + } + #[inline(always)] + fn ne(&self, other: &InternedString) -> bool { + PartialEq::ne(self, &other.string[..]) + } +} + impl Decodable for InternedString { fn decode(d: &mut D) -> Result { Ok(intern(d.read_str()?.as_ref()).as_str()) @@ -779,6 +663,6 @@ mod tests { assert!(Gt.mtwt_eq(&Gt)); let a = str_to_ident("bac"); let a1 = mark_ident(a,92); - assert!(Ident(a, ModName).mtwt_eq(&Ident(a1, Plain))); + assert!(Ident(a).mtwt_eq(&Ident(a1))); } } diff --git a/src/libsyntax/print/pp.rs b/src/libsyntax/print/pp.rs index c381a3a843..4a92ad8ddb 100644 --- a/src/libsyntax/print/pp.rs +++ b/src/libsyntax/print/pp.rs @@ -61,8 +61,9 @@ //! line (which it can't) and so naturally place the content on its own line to //! avoid combining it with other lines and making matters even worse. +use std::collections::VecDeque; +use std::fmt; use std::io; -use std::string; #[derive(Clone, Copy, PartialEq)] pub enum Breaks { @@ -112,35 +113,30 @@ impl Token { } } -pub fn tok_str(token: &Token) -> String { - match *token { - Token::String(ref s, len) => format!("STR({},{})", s, len), - Token::Break(_) => "BREAK".to_string(), - Token::Begin(_) => "BEGIN".to_string(), - Token::End => "END".to_string(), - Token::Eof => "EOF".to_string() +impl fmt::Display for Token { + fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { + match *self { + Token::String(ref s, len) => write!(f, "STR({},{})", s, len), + Token::Break(_) => f.write_str("BREAK"), + Token::Begin(_) => f.write_str("BEGIN"), + Token::End => f.write_str("END"), + Token::Eof => f.write_str("EOF"), + } } } -pub fn buf_str(toks: &[Token], - szs: &[isize], - left: usize, - right: usize, - lim: usize) - -> String { +fn buf_str(toks: &[Token], szs: &[isize], left: usize, right: usize, lim: usize) -> String { let n = toks.len(); assert_eq!(n, szs.len()); let mut i = left; let mut l = lim; - let mut s = string::String::from("["); + let mut s = String::from("["); while i != right && l != 0 { l -= 1; if i != left { s.push_str(", "); } - s.push_str(&format!("{}={}", - szs[i], - tok_str(&toks[i]))); + s.push_str(&format!("{}={}", szs[i], &toks[i])); i += 1; i %= n; } @@ -169,7 +165,7 @@ pub fn mk_printer<'a>(out: Box, linewidth: usize) -> Printer<'a> { debug!("mk_printer {}", linewidth); let token = vec![Token::Eof; n]; let size = vec![0; n]; - let scan_stack = vec![0; n]; + let scan_stack = VecDeque::with_capacity(n); Printer { out: out, buf_len: n, @@ -182,9 +178,6 @@ pub fn mk_printer<'a>(out: Box, linewidth: usize) -> Printer<'a> { left_total: 0, right_total: 0, scan_stack: scan_stack, - scan_stack_empty: true, - top: 0, - bottom: 0, print_stack: Vec::new(), pending_indentation: 0 } @@ -246,9 +239,8 @@ pub fn mk_printer<'a>(out: Box, linewidth: usize) -> Printer<'a> { /// approximation for purposes of line breaking). /// /// The "input side" of the printer is managed as an abstract process called -/// SCAN, which uses 'scan_stack', 'scan_stack_empty', 'top' and 'bottom', to -/// manage calculating 'size'. SCAN is, in other words, the process of -/// calculating 'size' entries. +/// SCAN, which uses 'scan_stack', to manage calculating 'size'. SCAN is, in +/// other words, the process of calculating 'size' entries. /// /// The "output side" of the printer is managed by an abstract process called /// PRINT, which uses 'print_stack', 'margin' and 'space' to figure out what to @@ -291,13 +283,7 @@ pub struct Printer<'a> { /// Begin (if there is any) on top of it. Stuff is flushed off the /// bottom as it becomes irrelevant due to the primary ring-buffer /// advancing. - scan_stack: Vec , - /// Top==bottom disambiguator - scan_stack_empty: bool, - /// Index of top of scan_stack - top: usize, - /// Index of bottom of scan_stack - bottom: usize, + scan_stack: VecDeque , /// Stack of blocks-in-progress being flushed by print print_stack: Vec , /// Buffered indentation to avoid writing trailing whitespace @@ -316,7 +302,7 @@ impl<'a> Printer<'a> { debug!("pp Vec<{},{}>", self.left, self.right); match token { Token::Eof => { - if !self.scan_stack_empty { + if !self.scan_stack.is_empty() { self.check_stack(0); self.advance_left()?; } @@ -324,7 +310,7 @@ impl<'a> Printer<'a> { Ok(()) } Token::Begin(b) => { - if self.scan_stack_empty { + if self.scan_stack.is_empty() { self.left_total = 1; self.right_total = 1; self.left = 0; @@ -339,7 +325,7 @@ impl<'a> Printer<'a> { Ok(()) } Token::End => { - if self.scan_stack_empty { + if self.scan_stack.is_empty() { debug!("pp End/print Vec<{},{}>", self.left, self.right); self.print(token, 0) } else { @@ -353,7 +339,7 @@ impl<'a> Printer<'a> { } } Token::Break(b) => { - if self.scan_stack_empty { + if self.scan_stack.is_empty() { self.left_total = 1; self.right_total = 1; self.left = 0; @@ -370,7 +356,7 @@ impl<'a> Printer<'a> { Ok(()) } Token::String(s, len) => { - if self.scan_stack_empty { + if self.scan_stack.is_empty() { debug!("pp String('{}')/print Vec<{},{}>", s, self.left, self.right); self.print(Token::String(s, len), len) @@ -392,12 +378,10 @@ impl<'a> Printer<'a> { if self.right_total - self.left_total > self.space { debug!("scan window is {}, longer than space on line ({})", self.right_total - self.left_total, self.space); - if !self.scan_stack_empty { - if self.left == self.scan_stack[self.bottom] { - debug!("setting {} to infinity and popping", self.left); - let scanned = self.scan_pop_bottom(); - self.size[scanned] = SIZE_INFINITY; - } + if Some(&self.left) == self.scan_stack.back() { + debug!("setting {} to infinity and popping", self.left); + let scanned = self.scan_pop_bottom(); + self.size[scanned] = SIZE_INFINITY; } self.advance_left()?; if self.left != self.right { @@ -408,43 +392,21 @@ impl<'a> Printer<'a> { } pub fn scan_push(&mut self, x: usize) { debug!("scan_push {}", x); - if self.scan_stack_empty { - self.scan_stack_empty = false; - } else { - self.top += 1; - self.top %= self.buf_len; - assert!((self.top != self.bottom)); - } - self.scan_stack[self.top] = x; + self.scan_stack.push_front(x); } pub fn scan_pop(&mut self) -> usize { - assert!((!self.scan_stack_empty)); - let x = self.scan_stack[self.top]; - if self.top == self.bottom { - self.scan_stack_empty = true; - } else { - self.top += self.buf_len - 1; self.top %= self.buf_len; - } - return x; + self.scan_stack.pop_front().unwrap() } pub fn scan_top(&mut self) -> usize { - assert!((!self.scan_stack_empty)); - return self.scan_stack[self.top]; + *self.scan_stack.front().unwrap() } pub fn scan_pop_bottom(&mut self) -> usize { - assert!((!self.scan_stack_empty)); - let x = self.scan_stack[self.bottom]; - if self.top == self.bottom { - self.scan_stack_empty = true; - } else { - self.bottom += 1; self.bottom %= self.buf_len; - } - return x; + self.scan_stack.pop_back().unwrap() } pub fn advance_right(&mut self) { self.right += 1; self.right %= self.buf_len; - assert!((self.right != self.left)); + assert!(self.right != self.left); } pub fn advance_left(&mut self) -> io::Result<()> { debug!("advance_left Vec<{},{}>, sizeof({})={}", self.left, self.right, @@ -481,7 +443,7 @@ impl<'a> Printer<'a> { Ok(()) } pub fn check_stack(&mut self, k: isize) { - if !self.scan_stack_empty { + if !self.scan_stack.is_empty() { let x = self.scan_top(); match self.token[x] { Token::Begin(_) => { @@ -512,19 +474,16 @@ impl<'a> Printer<'a> { let ret = write!(self.out, "\n"); self.pending_indentation = 0; self.indent(amount); - return ret; + ret } pub fn indent(&mut self, amount: isize) { debug!("INDENT {}", amount); self.pending_indentation += amount; } pub fn get_top(&mut self) -> PrintStackElem { - let print_stack = &mut self.print_stack; - let n = print_stack.len(); - if n != 0 { - (*print_stack)[n - 1] - } else { - PrintStackElem { + match self.print_stack.last() { + Some(el) => *el, + None => PrintStackElem { offset: 0, pbreak: PrintStackBreak::Broken(Breaks::Inconsistent) } @@ -538,7 +497,7 @@ impl<'a> Printer<'a> { write!(self.out, "{}", s) } pub fn print(&mut self, token: Token, l: isize) -> io::Result<()> { - debug!("print {} {} (remaining line space={})", tok_str(&token), l, + debug!("print {} {} (remaining line space={})", token, l, self.space); debug!("{}", buf_str(&self.token, &self.size, @@ -566,7 +525,7 @@ impl<'a> Printer<'a> { Token::End => { debug!("print End -> pop End"); let print_stack = &mut self.print_stack; - assert!((!print_stack.is_empty())); + assert!(!print_stack.is_empty()); print_stack.pop().unwrap(); Ok(()) } @@ -603,12 +562,12 @@ impl<'a> Printer<'a> { } } } - Token::String(s, len) => { + Token::String(ref s, len) => { debug!("print String({})", s); assert_eq!(l, len); // assert!(l <= space); self.space -= len; - self.print_str(&s[..]) + self.print_str(s) } Token::Eof => { // Eof should never get here. @@ -652,15 +611,15 @@ pub fn eof(p: &mut Printer) -> io::Result<()> { } pub fn word(p: &mut Printer, wrd: &str) -> io::Result<()> { - p.pretty_print(Token::String(/* bad */ wrd.to_string(), wrd.len() as isize)) + p.pretty_print(Token::String(wrd.to_string(), wrd.len() as isize)) } pub fn huge_word(p: &mut Printer, wrd: &str) -> io::Result<()> { - p.pretty_print(Token::String(/* bad */ wrd.to_string(), SIZE_INFINITY)) + p.pretty_print(Token::String(wrd.to_string(), SIZE_INFINITY)) } pub fn zero_word(p: &mut Printer, wrd: &str) -> io::Result<()> { - p.pretty_print(Token::String(/* bad */ wrd.to_string(), 0)) + p.pretty_print(Token::String(wrd.to_string(), 0)) } pub fn spaces(p: &mut Printer, n: usize) -> io::Result<()> { diff --git a/src/libsyntax/print/pprust.rs b/src/libsyntax/print/pprust.rs index fcd83b4104..ebb4927d69 100644 --- a/src/libsyntax/print/pprust.rs +++ b/src/libsyntax/print/pprust.rs @@ -20,7 +20,7 @@ use attr; use attr::{AttrMetaMethods, AttributeMethods}; use codemap::{self, CodeMap, BytePos}; use errors; -use parse::token::{self, BinOpToken, Token, InternedString}; +use parse::token::{self, keywords, BinOpToken, Token, InternedString}; use parse::lexer::comments; use parse; use print::pp::{self, break_offset, word, space, zerobreak, hardbreak}; @@ -270,14 +270,14 @@ pub fn token_to_string(tok: &Token) -> String { } /* Name components */ - token::Ident(s, _) => s.to_string(), + token::Ident(s) => s.to_string(), token::Lifetime(s) => s.to_string(), token::Underscore => "_".to_string(), /* Other */ token::DocComment(s) => s.to_string(), - token::SubstNt(s, _) => format!("${}", s), - token::MatchNt(s, t, _, _) => format!("${}:{}", s, t), + token::SubstNt(s) => format!("${}", s), + token::MatchNt(s, t) => format!("${}:{}", s, t), token::Eof => "".to_string(), token::Whitespace => " ".to_string(), token::Comment => "/* */".to_string(), @@ -294,7 +294,7 @@ pub fn token_to_string(tok: &Token) -> String { token::NtBlock(ref e) => block_to_string(&e), token::NtStmt(ref e) => stmt_to_string(&e), token::NtPat(ref e) => pat_to_string(&e), - token::NtIdent(ref e, _) => ident_to_string(e.node), + token::NtIdent(ref e) => ident_to_string(e.node), token::NtTT(ref e) => tt_to_string(&e), token::NtArm(ref e) => arm_to_string(&e), token::NtImplItem(ref e) => impl_item_to_string(&e), @@ -435,7 +435,7 @@ pub fn mac_to_string(arg: &ast::Mac) -> String { pub fn visibility_qualified(vis: &ast::Visibility, s: &str) -> String { match *vis { ast::Visibility::Public => format!("pub {}", s), - ast::Visibility::Crate => format!("pub(crate) {}", s), + ast::Visibility::Crate(_) => format!("pub(crate) {}", s), ast::Visibility::Restricted { ref path, .. } => format!("pub({}) {}", path, s), ast::Visibility::Inherited => s.to_string() } @@ -995,7 +995,7 @@ impl<'a> State<'a> { ast::TyKind::BareFn(ref f) => { let generics = ast::Generics { lifetimes: f.lifetimes.clone(), - ty_params: P::empty(), + ty_params: P::new(), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), @@ -1386,7 +1386,7 @@ impl<'a> State<'a> { pub fn print_visibility(&mut self, vis: &ast::Visibility) -> io::Result<()> { match *vis { ast::Visibility::Public => self.word_nbsp("pub"), - ast::Visibility::Crate => self.word_nbsp("pub(crate)"), + ast::Visibility::Crate(_) => self.word_nbsp("pub(crate)"), ast::Visibility::Restricted { ref path, .. } => self.word_nbsp(&format!("pub({})", path)), ast::Visibility::Inherited => Ok(()) @@ -1488,20 +1488,11 @@ impl<'a> State<'a> { pub fn print_tts(&mut self, tts: &[ast::TokenTree]) -> io::Result<()> { self.ibox(0)?; - let mut suppress_space = false; for (i, tt) in tts.iter().enumerate() { - if i != 0 && !suppress_space { + if i != 0 { space(&mut self.s)?; } self.print_tt(tt)?; - // There should be no space between the module name and the following `::` in paths, - // otherwise imported macros get re-parsed from crate metadata incorrectly (#20701) - suppress_space = match *tt { - TokenTree::Token(_, token::Ident(_, token::ModName)) | - TokenTree::Token(_, token::MatchNt(_, _, _, token::ModName)) | - TokenTree::Token(_, token::SubstNt(_, token::ModName)) => true, - _ => false - } } self.end() } @@ -1531,7 +1522,7 @@ impl<'a> State<'a> { m.abi, Some(ident), &m.generics, - Some(&m.explicit_self.node), + None, vis) } @@ -2086,7 +2077,7 @@ impl<'a> State<'a> { } self.bclose_(expr.span, INDENT_UNIT)?; } - ast::ExprKind::Closure(capture_clause, ref decl, ref body) => { + ast::ExprKind::Closure(capture_clause, ref decl, ref body, _) => { self.print_capture_clause(capture_clause)?; self.print_fn_block_args(&decl)?; @@ -2665,36 +2656,9 @@ impl<'a> State<'a> { } pub fn print_fn_args(&mut self, decl: &ast::FnDecl, - opt_explicit_self: Option<&ast::SelfKind>, + _: Option<&ast::SelfKind>, is_closure: bool) -> io::Result<()> { - // It is unfortunate to duplicate the commasep logic, but we want the - // self type and the args all in the same box. - self.rbox(0, Inconsistent)?; - let mut first = true; - if let Some(explicit_self) = opt_explicit_self { - let m = match *explicit_self { - ast::SelfKind::Static => ast::Mutability::Immutable, - _ => match decl.inputs[0].pat.node { - PatKind::Ident(ast::BindingMode::ByValue(m), _, _) => m, - _ => ast::Mutability::Immutable - } - }; - first = !self.print_explicit_self(explicit_self, m)?; - } - - // HACK(eddyb) ignore the separately printed self argument. - let args = if first { - &decl.inputs[..] - } else { - &decl.inputs[1..] - }; - - for arg in args { - if first { first = false; } else { self.word_space(",")?; } - self.print_arg(arg, is_closure)?; - } - - self.end() + self.commasep(Inconsistent, &decl.inputs, |s, arg| s.print_arg(arg, is_closure)) } pub fn print_fn_args_and_ret(&mut self, decl: &ast::FnDecl, @@ -2965,19 +2929,24 @@ impl<'a> State<'a> { match input.ty.node { ast::TyKind::Infer if is_closure => self.print_pat(&input.pat)?, _ => { - match input.pat.node { - PatKind::Ident(_, ref path1, _) if - path1.node.name == - parse::token::special_idents::invalid.name => { - // Do nothing. + let (mutbl, invalid) = match input.pat.node { + PatKind::Ident(ast::BindingMode::ByValue(mutbl), ident, _) | + PatKind::Ident(ast::BindingMode::ByRef(mutbl), ident, _) => { + (mutbl, ident.node.name == keywords::Invalid.name()) } - _ => { + _ => (ast::Mutability::Immutable, false) + }; + + if let Some(eself) = input.to_self() { + self.print_explicit_self(&eself.node, mutbl)?; + } else { + if !invalid { self.print_pat(&input.pat)?; word(&mut self.s, ":")?; space(&mut self.s)?; } + self.print_type(&input.ty)?; } - self.print_type(&input.ty)?; } } self.end() @@ -3021,7 +2990,7 @@ impl<'a> State<'a> { } let generics = ast::Generics { lifetimes: Vec::new(), - ty_params: P::empty(), + ty_params: P::new(), where_clause: ast::WhereClause { id: ast::DUMMY_NODE_ID, predicates: Vec::new(), diff --git a/src/libsyntax/ptr.rs b/src/libsyntax/ptr.rs index fda9741d35..9d04cb75da 100644 --- a/src/libsyntax/ptr.rs +++ b/src/libsyntax/ptr.rs @@ -83,10 +83,10 @@ impl P { } } -impl Deref for P { +impl Deref for P { type Target = T; - fn deref<'a>(&'a self) -> &'a T { + fn deref(&self) -> &T { &self.ptr } } @@ -97,11 +97,12 @@ impl Clone for P { } } -impl Debug for P { +impl Debug for P { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - Debug::fmt(&**self, f) + Debug::fmt(&self.ptr, f) } } + impl Display for P { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { Display::fmt(&**self, f) @@ -126,19 +127,8 @@ impl Encodable for P { } } - -impl fmt::Debug for P<[T]> { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - self.ptr.fmt(fmt) - } -} - impl P<[T]> { pub fn new() -> P<[T]> { - P::empty() - } - - pub fn empty() -> P<[T]> { P { ptr: Default::default() } } @@ -151,31 +141,11 @@ impl P<[T]> { pub fn into_vec(self) -> Vec { self.ptr.into_vec() } - - pub fn as_slice<'a>(&'a self) -> &'a [T] { - &self.ptr - } - - pub fn move_iter(self) -> vec::IntoIter { - self.into_vec().into_iter() - } - - pub fn map U>(&self, f: F) -> P<[U]> { - self.iter().map(f).collect() - } -} - -impl Deref for P<[T]> { - type Target = [T]; - - fn deref(&self) -> &[T] { - self.as_slice() - } } impl Default for P<[T]> { fn default() -> P<[T]> { - P::empty() + P::new() } } diff --git a/src/libsyntax/std_inject.rs b/src/libsyntax/std_inject.rs index 9049b21d8b..84a7b14484 100644 --- a/src/libsyntax/std_inject.rs +++ b/src/libsyntax/std_inject.rs @@ -14,7 +14,7 @@ use codemap::{DUMMY_SP, Span, ExpnInfo, NameAndSpan, MacroAttribute}; use codemap; use fold::Folder; use fold; -use parse::token::{intern, InternedString, special_idents}; +use parse::token::{intern, InternedString, keywords}; use parse::{token, ParseSess}; use ptr::P; use util::small_vector::SmallVector; @@ -148,7 +148,7 @@ impl fold::Folder for PreludeInjector { let vp = P(codemap::dummy_spanned(ast::ViewPathGlob(prelude_path))); mod_.items.insert(0, P(ast::Item { id: ast::DUMMY_NODE_ID, - ident: special_idents::invalid, + ident: keywords::Invalid.ident(), node: ast::ItemKind::Use(vp), attrs: vec![ast::Attribute { span: self.span, @@ -157,7 +157,9 @@ impl fold::Folder for PreludeInjector { style: ast::AttrStyle::Outer, value: P(ast::MetaItem { span: self.span, - node: ast::MetaItemKind::Word(special_idents::prelude_import.name.as_str()), + node: ast::MetaItemKind::Word( + token::intern_and_get_ident("prelude_import") + ), }), is_sugared_doc: false, }, diff --git a/src/libsyntax/test.rs b/src/libsyntax/test.rs index 703b161154..8eeb61e0de 100644 --- a/src/libsyntax/test.rs +++ b/src/libsyntax/test.rs @@ -31,7 +31,7 @@ use ext::expand::ExpansionConfig; use fold::Folder; use util::move_map::MoveMap; use fold; -use parse::token::{intern, InternedString}; +use parse::token::{intern, keywords, InternedString}; use parse::{token, ParseSess}; use print::pprust; use ast; @@ -116,7 +116,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { fn fold_item(&mut self, i: P) -> SmallVector> { let ident = i.ident; - if ident.name != token::special_idents::invalid.name { + if ident.name != keywords::Invalid.name() { self.cx.path.push(ident); } debug!("current path: {}", path_name_i(&self.cx.path)); @@ -160,7 +160,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> { ast::ItemKind::Mod(..) => fold::noop_fold_item(i, self), _ => SmallVector::one(i), }; - if ident.name != token::special_idents::invalid.name { + if ident.name != keywords::Invalid.name() { self.cx.path.pop(); } res @@ -453,7 +453,7 @@ fn mk_std(cx: &TestCtxt) -> P { (ast::ItemKind::Use( P(nospan(ast::ViewPathSimple(id_test, path_node(vec!(id_test)))))), - ast::Visibility::Public, token::special_idents::invalid) + ast::Visibility::Public, keywords::Invalid.ident()) } else { (ast::ItemKind::ExternCrate(None), ast::Visibility::Inherited, id_test) }; @@ -545,7 +545,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P, Option>) { P(ast::Item { id: ast::DUMMY_NODE_ID, - ident: token::special_idents::invalid, + ident: keywords::Invalid.ident(), attrs: vec![], node: ast::ItemKind::Use(P(use_path)), vis: ast::Visibility::Inherited, @@ -590,7 +590,7 @@ fn mk_tests(cx: &TestCtxt) -> P { let struct_type = ecx.ty_path(ecx.path(sp, vec![ecx.ident_of("self"), ecx.ident_of("test"), ecx.ident_of("TestDescAndFn")])); - let static_lt = ecx.lifetime(sp, token::special_idents::static_lifetime.name); + let static_lt = ecx.lifetime(sp, keywords::StaticLifetime.name()); // &'static [self::test::TestDescAndFn] let static_type = ecx.ty_rptr(sp, ecx.ty(sp, ast::TyKind::Vec(struct_type)), diff --git a/src/libsyntax/util/interner.rs b/src/libsyntax/util/interner.rs index 6e9c161293..8e20358027 100644 --- a/src/libsyntax/util/interner.rs +++ b/src/libsyntax/util/interner.rs @@ -115,14 +115,12 @@ impl Ord for RcStr { impl fmt::Debug for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use std::fmt::Debug; self[..].fmt(f) } } impl fmt::Display for RcStr { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { - use std::fmt::Display; self[..].fmt(f) } } diff --git a/src/libsyntax/visit.rs b/src/libsyntax/visit.rs index 839bbf4805..f50a480e5e 100644 --- a/src/libsyntax/visit.rs +++ b/src/libsyntax/visit.rs @@ -231,6 +231,7 @@ pub fn walk_trait_ref<'v,V>(visitor: &mut V, } pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) { + visitor.visit_vis(&item.vis); visitor.visit_ident(item.span, item.ident); match item.node { ItemKind::ExternCrate(opt_name) => { @@ -470,6 +471,7 @@ pub fn walk_pat<'v, V: Visitor<'v>>(visitor: &mut V, pattern: &'v Pat) { pub fn walk_foreign_item<'v, V: Visitor<'v>>(visitor: &mut V, foreign_item: &'v ForeignItem) { + visitor.visit_vis(&foreign_item.vis); visitor.visit_ident(foreign_item.span, foreign_item.ident); match foreign_item.node { @@ -592,6 +594,7 @@ pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v Trai } pub fn walk_impl_item<'v, V: Visitor<'v>>(visitor: &mut V, impl_item: &'v ImplItem) { + visitor.visit_vis(&impl_item.vis); visitor.visit_ident(impl_item.span, impl_item.ident); walk_list!(visitor, visit_attribute, &impl_item.attrs); match impl_item.node { @@ -619,6 +622,7 @@ pub fn walk_struct_def<'v, V: Visitor<'v>>(visitor: &mut V, pub fn walk_struct_field<'v, V: Visitor<'v>>(visitor: &mut V, struct_field: &'v StructField) { + visitor.visit_vis(&struct_field.vis); walk_opt_ident(visitor, struct_field.span, struct_field.ident); visitor.visit_ty(&struct_field.ty); walk_list!(visitor, visit_attribute, &struct_field.attrs); @@ -739,7 +743,7 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) { visitor.visit_expr(subexpression); walk_list!(visitor, visit_arm, arms); } - ExprKind::Closure(_, ref function_declaration, ref body) => { + ExprKind::Closure(_, ref function_declaration, ref body, _decl_span) => { visitor.visit_fn(FnKind::Closure, function_declaration, body, @@ -812,8 +816,7 @@ pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) { } pub fn walk_vis<'v, V: Visitor<'v>>(visitor: &mut V, vis: &'v Visibility) { - match *vis { - Visibility::Restricted { ref path, id } => visitor.visit_path(path, id), - _ => {} + if let Visibility::Restricted { ref path, id } = *vis { + visitor.visit_path(path, id); } } diff --git a/src/libsyntax_ext/concat_idents.rs b/src/libsyntax_ext/concat_idents.rs index 85453f6dfc..dce808756c 100644 --- a/src/libsyntax_ext/concat_idents.rs +++ b/src/libsyntax_ext/concat_idents.rs @@ -40,7 +40,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) } } else { match *e { - TokenTree::Token(_, token::Ident(ident, _)) => { + TokenTree::Token(_, token::Ident(ident)) => { res_str.push_str(&ident.name.as_str()) }, _ => { diff --git a/src/libsyntax_ext/deriving/clone.rs b/src/libsyntax_ext/deriving/clone.rs index 0085182d1a..30fe0f2db8 100644 --- a/src/libsyntax_ext/deriving/clone.rs +++ b/src/libsyntax_ext/deriving/clone.rs @@ -11,26 +11,71 @@ use deriving::generic::*; use deriving::generic::ty::*; -use syntax::ast::{MetaItem, Expr, VariantData}; +use syntax::ast::{Expr, ItemKind, Generics, MetaItem, VariantData}; +use syntax::attr; use syntax::codemap::Span; use syntax::ext::base::{ExtCtxt, Annotatable}; use syntax::ext::build::AstBuilder; use syntax::parse::token::InternedString; use syntax::ptr::P; +#[derive(PartialEq)] +enum Mode { Deep, Shallow } + pub fn expand_deriving_clone(cx: &mut ExtCtxt, span: Span, mitem: &MetaItem, item: &Annotatable, push: &mut FnMut(Annotatable)) { + // check if we can use a short form + // + // the short form is `fn clone(&self) -> Self { *self }` + // + // we can use the short form if: + // - the item is Copy (unfortunately, all we can check is whether it's also deriving Copy) + // - there are no generic parameters (after specialization this limitation can be removed) + // if we used the short form with generics, we'd have to bound the generics with + // Clone + Copy, and then there'd be no Clone impl at all if the user fills in something + // that is Clone but not Copy. and until specialization we can't write both impls. + let bounds; + let unify_fieldless_variants; + let substructure; + match *item { + Annotatable::Item(ref annitem) => { + match annitem.node { + ItemKind::Struct(_, Generics { ref ty_params, .. }) | + ItemKind::Enum(_, Generics { ref ty_params, .. }) + if ty_params.is_empty() + && attr::contains_name(&annitem.attrs, "derive_Copy") => { + + bounds = vec![Literal(path_std!(cx, core::marker::Copy))]; + unify_fieldless_variants = true; + substructure = combine_substructure(Box::new(|c, s, sub| { + cs_clone("Clone", c, s, sub, Mode::Shallow) + })); + } + + _ => { + bounds = vec![]; + unify_fieldless_variants = false; + substructure = combine_substructure(Box::new(|c, s, sub| { + cs_clone("Clone", c, s, sub, Mode::Deep) + })); + } + } + } + + _ => cx.span_bug(span, "#[derive(Clone)] on trait item or impl item") + } + let inline = cx.meta_word(span, InternedString::new("inline")); let attrs = vec!(cx.attribute(span, inline)); let trait_def = TraitDef { span: span, attributes: Vec::new(), path: path_std!(cx, core::clone::Clone), - additional_bounds: Vec::new(), + additional_bounds: bounds, generics: LifetimeBounds::empty(), is_unsafe: false, methods: vec!( @@ -42,9 +87,8 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, ret_ty: Self_, attributes: attrs, is_unsafe: false, - combine_substructure: combine_substructure(Box::new(|c, s, sub| { - cs_clone("Clone", c, s, sub) - })), + unify_fieldless_variants: unify_fieldless_variants, + combine_substructure: substructure, } ), associated_types: Vec::new(), @@ -56,14 +100,24 @@ pub fn expand_deriving_clone(cx: &mut ExtCtxt, fn cs_clone( name: &str, cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> P { + substr: &Substructure, + mode: Mode) -> P { let ctor_path; let all_fields; - let fn_path = cx.std_path(&["clone", "Clone", "clone"]); + let fn_path = match mode { + Mode::Shallow => cx.std_path(&["clone", "assert_receiver_is_clone"]), + Mode::Deep => cx.std_path(&["clone", "Clone", "clone"]), + }; let subcall = |field: &FieldInfo| { let args = vec![cx.expr_addr_of(field.span, field.self_.clone())]; - cx.expr_call_global(field.span, fn_path.clone(), args) + let span = if mode == Mode::Shallow { + // set the expn ID so we can call the unstable method + Span { expn_id: cx.backtrace(), .. trait_span } + } else { + field.span + }; + cx.expr_call_global(span, fn_path.clone(), args) }; let vdata; @@ -89,29 +143,41 @@ fn cs_clone( } } - match *vdata { - VariantData::Struct(..) => { - let fields = all_fields.iter().map(|field| { - let ident = match field.name { - Some(i) => i, - None => { - cx.span_bug(trait_span, - &format!("unnamed field in normal struct in \ - `derive({})`", name)) - } - }; - cx.field_imm(field.span, ident, subcall(field)) - }).collect::>(); - - cx.expr_struct(trait_span, ctor_path, fields) + match mode { + Mode::Shallow => { + cx.expr_block(cx.block(trait_span, + all_fields.iter() + .map(subcall) + .map(|e| cx.stmt_expr(e)) + .collect(), + Some(cx.expr_deref(trait_span, cx.expr_self(trait_span))))) } - VariantData::Tuple(..) => { - let subcalls = all_fields.iter().map(subcall).collect(); - let path = cx.expr_path(ctor_path); - cx.expr_call(trait_span, path, subcalls) - } - VariantData::Unit(..) => { - cx.expr_path(ctor_path) + Mode::Deep => { + match *vdata { + VariantData::Struct(..) => { + let fields = all_fields.iter().map(|field| { + let ident = match field.name { + Some(i) => i, + None => { + cx.span_bug(trait_span, + &format!("unnamed field in normal struct in \ + `derive({})`", name)) + } + }; + cx.field_imm(field.span, ident, subcall(field)) + }).collect::>(); + + cx.expr_struct(trait_span, ctor_path, fields) + } + VariantData::Tuple(..) => { + let subcalls = all_fields.iter().map(subcall).collect(); + let path = cx.expr_path(ctor_path); + cx.expr_call(trait_span, path, subcalls) + } + VariantData::Unit(..) => { + cx.expr_path(ctor_path) + } + } } } } diff --git a/src/libsyntax_ext/deriving/cmp/eq.rs b/src/libsyntax_ext/deriving/cmp/eq.rs index 1b855c56a4..8bd12c3933 100644 --- a/src/libsyntax_ext/deriving/cmp/eq.rs +++ b/src/libsyntax_ext/deriving/cmp/eq.rs @@ -62,6 +62,7 @@ pub fn expand_deriving_eq(cx: &mut ExtCtxt, ret_ty: nil_ty(), attributes: attrs, is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(Box::new(|a, b, c| { cs_total_eq_assert(a, b, c) })) diff --git a/src/libsyntax_ext/deriving/cmp/ord.rs b/src/libsyntax_ext/deriving/cmp/ord.rs index 74706c4708..6133adb8fc 100644 --- a/src/libsyntax_ext/deriving/cmp/ord.rs +++ b/src/libsyntax_ext/deriving/cmp/ord.rs @@ -42,6 +42,7 @@ pub fn expand_deriving_ord(cx: &mut ExtCtxt, ret_ty: Literal(path_std!(cx, core::cmp::Ordering)), attributes: attrs, is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(Box::new(|a, b, c| { cs_cmp(a, b, c) })), diff --git a/src/libsyntax_ext/deriving/cmp/partial_eq.rs b/src/libsyntax_ext/deriving/cmp/partial_eq.rs index 6406ee59a5..e5890d7213 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_eq.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_eq.rs @@ -73,6 +73,7 @@ pub fn expand_deriving_partial_eq(cx: &mut ExtCtxt, ret_ty: Literal(path_local!(bool)), attributes: attrs, is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(Box::new(|a, b, c| { $f(a, b, c) })) diff --git a/src/libsyntax_ext/deriving/cmp/partial_ord.rs b/src/libsyntax_ext/deriving/cmp/partial_ord.rs index e49c77285a..cfc6dbe5cd 100644 --- a/src/libsyntax_ext/deriving/cmp/partial_ord.rs +++ b/src/libsyntax_ext/deriving/cmp/partial_ord.rs @@ -38,6 +38,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, ret_ty: Literal(path_local!(bool)), attributes: attrs, is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(Box::new(|cx, span, substr| { cs_op($op, $equal, cx, span, substr) })) @@ -62,6 +63,7 @@ pub fn expand_deriving_partial_ord(cx: &mut ExtCtxt, ret_ty: ret_ty, attributes: attrs, is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(Box::new(|cx, span, substr| { cs_partial_cmp(cx, span, substr) })) diff --git a/src/libsyntax_ext/deriving/debug.rs b/src/libsyntax_ext/deriving/debug.rs index 323c6c388f..d86eae820a 100644 --- a/src/libsyntax_ext/deriving/debug.rs +++ b/src/libsyntax_ext/deriving/debug.rs @@ -45,6 +45,7 @@ pub fn expand_deriving_debug(cx: &mut ExtCtxt, ret_ty: Literal(path_std!(cx, core::fmt::Result)), attributes: Vec::new(), is_unsafe: false, + unify_fieldless_variants: false, combine_substructure: combine_substructure(Box::new(|a, b, c| { show_substructure(a, b, c) })) diff --git a/src/libsyntax_ext/deriving/decodable.rs b/src/libsyntax_ext/deriving/decodable.rs index 49f14c937e..04888d046a 100644 --- a/src/libsyntax_ext/deriving/decodable.rs +++ b/src/libsyntax_ext/deriving/decodable.rs @@ -74,7 +74,7 @@ fn expand_deriving_decodable_imp(cx: &mut ExtCtxt, }, explicit_self: None, args: vec!(Ptr(Box::new(Literal(Path::new_local(typaram))), - Borrowed(None, Mutability::Mutable))), + Borrowed(None, Mutability::Mutable))), ret_ty: Literal(Path::new_( pathvec_std!(cx, core::result::Result), None, @@ -85,6 +85,7 @@ fn expand_deriving_decodable_imp(cx: &mut ExtCtxt, )), attributes: Vec::new(), is_unsafe: false, + unify_fieldless_variants: false, combine_substructure: combine_substructure(Box::new(|a, b, c| { decodable_substructure(a, b, c, krate) })), diff --git a/src/libsyntax_ext/deriving/default.rs b/src/libsyntax_ext/deriving/default.rs index bee63a98c2..a6a4830fab 100644 --- a/src/libsyntax_ext/deriving/default.rs +++ b/src/libsyntax_ext/deriving/default.rs @@ -42,6 +42,7 @@ pub fn expand_deriving_default(cx: &mut ExtCtxt, ret_ty: Self_, attributes: attrs, is_unsafe: false, + unify_fieldless_variants: false, combine_substructure: combine_substructure(Box::new(|a, b, c| { default_substructure(a, b, c) })) diff --git a/src/libsyntax_ext/deriving/encodable.rs b/src/libsyntax_ext/deriving/encodable.rs index a05bd7869b..6667230582 100644 --- a/src/libsyntax_ext/deriving/encodable.rs +++ b/src/libsyntax_ext/deriving/encodable.rs @@ -150,7 +150,7 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, }, explicit_self: borrowed_explicit_self(), args: vec!(Ptr(Box::new(Literal(Path::new_local(typaram))), - Borrowed(None, Mutability::Mutable))), + Borrowed(None, Mutability::Mutable))), ret_ty: Literal(Path::new_( pathvec_std!(cx, core::result::Result), None, @@ -161,8 +161,9 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, )), attributes: Vec::new(), is_unsafe: false, + unify_fieldless_variants: false, combine_substructure: combine_substructure(Box::new(|a, b, c| { - encodable_substructure(a, b, c) + encodable_substructure(a, b, c, krate) })), } ), @@ -173,12 +174,14 @@ fn expand_deriving_encodable_imp(cx: &mut ExtCtxt, } fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, - substr: &Substructure) -> P { + substr: &Substructure, krate: &'static str) -> P { let encoder = substr.nonself_args[0].clone(); // throw an underscore in front to suppress unused variable warnings let blkarg = cx.ident_of("_e"); let blkencoder = cx.expr_ident(trait_span, blkarg); - let encode = cx.ident_of("encode"); + let fn_path = cx.expr_path(cx.path_global(trait_span, vec![cx.ident_of(krate), + cx.ident_of("Encodable"), + cx.ident_of("encode")])); return match *substr.fields { Struct(_, ref fields) => { @@ -196,8 +199,8 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, token::intern_and_get_ident(&format!("_field{}", i)) } }; - let enc = cx.expr_method_call(span, self_.clone(), - encode, vec!(blkencoder.clone())); + let self_ref = cx.expr_addr_of(span, self_.clone()); + let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder.clone(), emit_struct_field, @@ -245,8 +248,9 @@ fn encodable_substructure(cx: &mut ExtCtxt, trait_span: Span, if !fields.is_empty() { let last = fields.len() - 1; for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() { - let enc = cx.expr_method_call(span, self_.clone(), - encode, vec!(blkencoder.clone())); + let self_ref = cx.expr_addr_of(span, self_.clone()); + let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, + blkencoder.clone()]); let lambda = cx.lambda_expr_1(span, enc, blkarg); let call = cx.expr_method_call(span, blkencoder.clone(), emit_variant_arg, diff --git a/src/libsyntax_ext/deriving/generic/mod.rs b/src/libsyntax_ext/deriving/generic/mod.rs index b8ba1a58f2..20fb4bf32c 100644 --- a/src/libsyntax_ext/deriving/generic/mod.rs +++ b/src/libsyntax_ext/deriving/generic/mod.rs @@ -201,8 +201,7 @@ use syntax::codemap::{self, DUMMY_SP}; use syntax::codemap::Span; use syntax::errors::Handler; use syntax::util::move_map::MoveMap; -use syntax::parse::token::{intern, InternedString}; -use syntax::parse::token::special_idents; +use syntax::parse::token::{intern, keywords, InternedString}; use syntax::ptr::P; use self::ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty}; @@ -258,6 +257,9 @@ pub struct MethodDef<'a> { // Is it an `unsafe fn`? pub is_unsafe: bool, + /// Can we combine fieldless variants for enums into a single match arm? + pub unify_fieldless_variants: bool, + pub combine_substructure: RefCell>, } @@ -526,7 +528,7 @@ impl<'a> TraitDef<'a> { span: self.span, bound_lifetimes: wb.bound_lifetimes.clone(), bounded_ty: wb.bounded_ty.clone(), - bounds: P::from_vec(wb.bounds.iter().cloned().collect()) + bounds: wb.bounds.iter().cloned().collect(), }) } ast::WherePredicate::RegionPredicate(ref rb) => { @@ -596,9 +598,9 @@ impl<'a> TraitDef<'a> { let trait_ref = cx.trait_ref(trait_path); // Create the type parameters on the `self` path. - let self_ty_params = generics.ty_params.map(|ty_param| { + let self_ty_params = generics.ty_params.iter().map(|ty_param| { cx.ty_ident(self.span, ty_param.ident) - }); + }).collect(); let self_lifetimes: Vec = generics.lifetimes @@ -609,7 +611,7 @@ impl<'a> TraitDef<'a> { // Create the type of `self`. let self_type = cx.ty_path( cx.path_all(self.span, false, vec!( type_ident ), self_lifetimes, - self_ty_params.into_vec(), Vec::new())); + self_ty_params, Vec::new())); let attr = cx.attribute( self.span, @@ -635,7 +637,7 @@ impl<'a> TraitDef<'a> { cx.item( self.span, - special_idents::invalid, + keywords::Invalid.ident(), a, ast::ItemKind::Impl(unsafety, ast::ImplPolarity::Positive, @@ -858,15 +860,15 @@ impl<'a> MethodDef<'a> { explicit_self: ast::ExplicitSelf, arg_types: Vec<(Ident, P)> , body: P) -> ast::ImplItem { + // create the generics that aren't for Self let fn_generics = self.generics.to_generics(cx, trait_.span, type_ident, generics); let self_arg = match explicit_self.node { ast::SelfKind::Static => None, // creating fresh self id - _ => Some(ast::Arg::new_self(trait_.span, - ast::Mutability::Immutable, - special_idents::self_)) + _ => Some(ast::Arg::from_self(explicit_self.clone(), trait_.span, + ast::Mutability::Immutable)), }; let args = { let args = arg_types.into_iter().map(|(name, ty)| { @@ -991,6 +993,7 @@ impl<'a> MethodDef<'a> { body = cx.expr_match(trait_.span, arg_expr.clone(), vec!( cx.arm(trait_.span, vec!(pat.clone()), body) )) } + body } @@ -1130,12 +1133,15 @@ impl<'a> MethodDef<'a> { let catch_all_substructure = EnumNonMatchingCollapsed( self_arg_idents, &variants[..], &vi_idents[..]); + let first_fieldless = variants.iter().find(|v| v.node.data.fields().is_empty()); + // These arms are of the form: // (Variant1, Variant1, ...) => Body1 // (Variant2, Variant2, ...) => Body2 // ... // where each tuple has length = self_args.len() let mut match_arms: Vec = variants.iter().enumerate() + .filter(|&(_, v)| !(self.unify_fieldless_variants && v.node.data.fields().is_empty())) .map(|(index, variant)| { let mk_self_pat = |cx: &mut ExtCtxt, self_arg_name: &str| { let (p, idents) = trait_.create_enum_variant_pattern( @@ -1218,6 +1224,28 @@ impl<'a> MethodDef<'a> { cx.arm(sp, vec![single_pat], arm_expr) }).collect(); + + let default = match first_fieldless { + Some(v) if self.unify_fieldless_variants => { + // We need a default case that handles the fieldless variants. + // The index and actual variant aren't meaningful in this case, + // so just use whatever + Some(self.call_substructure_method( + cx, trait_, type_ident, &self_args[..], nonself_args, + &EnumMatching(0, v, Vec::new()))) + } + _ if variants.len() > 1 && self_args.len() > 1 => { + // Since we know that all the arguments will match if we reach + // the match expression we add the unreachable intrinsics as the + // result of the catch all which should help llvm in optimizing it + Some(deriving::call_intrinsic(cx, sp, "unreachable", vec![])) + } + _ => None + }; + if let Some(arm) = default { + match_arms.push(cx.arm(sp, vec![cx.pat_wild(sp)], arm)); + } + // We will usually need the catch-all after matching the // tuples `(VariantK, VariantK, ...)` for each VariantK of the // enum. But: @@ -1291,13 +1319,6 @@ impl<'a> MethodDef<'a> { cx, trait_, type_ident, &self_args[..], nonself_args, &catch_all_substructure); - //Since we know that all the arguments will match if we reach the match expression we - //add the unreachable intrinsics as the result of the catch all which should help llvm - //in optimizing it - match_arms.push(cx.arm(sp, - vec![cx.pat_wild(sp)], - deriving::call_intrinsic(cx, sp, "unreachable", vec![]))); - // Final wrinkle: the self_args are expressions that deref // down to desired l-values, but we cannot actually deref // them when they are fed as r-values into a tuple diff --git a/src/libsyntax_ext/deriving/generic/ty.rs b/src/libsyntax_ext/deriving/generic/ty.rs index a924cc0695..e31d45d91a 100644 --- a/src/libsyntax_ext/deriving/generic/ty.rs +++ b/src/libsyntax_ext/deriving/generic/ty.rs @@ -19,7 +19,7 @@ use syntax::ast::{Expr,Generics,Ident}; use syntax::ext::base::ExtCtxt; use syntax::ext::build::AstBuilder; use syntax::codemap::{Span,respan}; -use syntax::parse::token::special_idents; +use syntax::parse::token::keywords; use syntax::ptr::P; /// The types of pointers @@ -169,15 +169,14 @@ impl<'a> Ty<'a> { -> ast::Path { match *self { Self_ => { - let self_params = self_generics.ty_params.map(|ty_param| { + let self_params = self_generics.ty_params.iter().map(|ty_param| { cx.ty_ident(span, ty_param.ident) - }); + }).collect(); let lifetimes = self_generics.lifetimes.iter() .map(|d| d.lifetime) .collect(); - cx.path_all(span, false, vec!(self_ty), lifetimes, - self_params.into_vec(), Vec::new()) + cx.path_all(span, false, vec![self_ty], lifetimes, self_params, Vec::new()) } Literal(ref p) => { p.to_path(cx, span, self_ty, self_generics) @@ -264,7 +263,7 @@ pub fn get_explicit_self(cx: &ExtCtxt, span: Span, self_ptr: &Option) let self_path = cx.expr_self(span); match *self_ptr { None => { - (self_path, respan(span, ast::SelfKind::Value(special_idents::self_))) + (self_path, respan(span, ast::SelfKind::Value(keywords::SelfValue.ident()))) } Some(ref ptr) => { let self_ty = respan( @@ -272,7 +271,7 @@ pub fn get_explicit_self(cx: &ExtCtxt, span: Span, self_ptr: &Option) match *ptr { Borrowed(ref lt, mutbl) => { let lt = lt.map(|s| cx.lifetime(span, cx.ident_of(s).name)); - ast::SelfKind::Region(lt, mutbl, special_idents::self_) + ast::SelfKind::Region(lt, mutbl, keywords::SelfValue.ident()) } Raw(_) => cx.span_bug(span, "attempted to use *self in deriving definition") }); diff --git a/src/libsyntax_ext/deriving/hash.rs b/src/libsyntax_ext/deriving/hash.rs index c37ae116d3..fd449372cb 100644 --- a/src/libsyntax_ext/deriving/hash.rs +++ b/src/libsyntax_ext/deriving/hash.rs @@ -51,6 +51,7 @@ pub fn expand_deriving_hash(cx: &mut ExtCtxt, ret_ty: nil_ty(), attributes: vec![], is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(Box::new(|a, b, c| { hash_substructure(a, b, c) })) diff --git a/src/libsyntax_ext/deriving/mod.rs b/src/libsyntax_ext/deriving/mod.rs index 92a141fb4e..91c272c59c 100644 --- a/src/libsyntax_ext/deriving/mod.rs +++ b/src/libsyntax_ext/deriving/mod.rs @@ -96,6 +96,36 @@ fn expand_derive(cx: &mut ExtCtxt, let mut found_partial_eq = false; let mut found_eq = false; + // This span is **very** sensitive and crucial to + // getting the stability behavior we want. What we are + // doing is marking the generated `#[derive_*]` with the + // span of the `#[deriving(...)]` attribute (the + // entire attribute, not just the `PartialEq` or `Eq` + // part), but with the current backtrace. The current + // backtrace will contain a topmost entry that IS this + // `#[deriving(...)]` attribute and with the + // "allow-unstable" flag set to true. + // + // Note that we do NOT use the span of the `Eq` + // text itself. You might think this is + // equivalent, because the `Eq` appears within the + // `#[deriving(Eq)]` attribute, and hence we would + // inherit the "allows unstable" from the + // backtrace. But in fact this is not always the + // case. The actual source text that led to + // deriving can be `#[$attr]`, for example, where + // `$attr == deriving(Eq)`. In that case, the + // "#[derive_*]" would be considered to + // originate not from the deriving call but from + // text outside the deriving call, and hence would + // be forbidden from using unstable + // content. + // + // See tests src/run-pass/rfc1445 for + // examples. --nmatsakis + let span = Span { expn_id: cx.backtrace(), .. span }; + assert!(cx.parse_sess.codemap().span_allows_unstable(span)); + for titem in traits.iter().rev() { let tname = match titem.node { MetaItemKind::Word(ref tname) => tname, @@ -121,42 +151,13 @@ fn expand_derive(cx: &mut ExtCtxt, } // #[derive(Foo, Bar)] expands to #[derive_Foo] #[derive_Bar] - item.attrs.push(cx.attribute(titem.span, cx.meta_word(titem.span, + item.attrs.push(cx.attribute(span, cx.meta_word(titem.span, intern_and_get_ident(&format!("derive_{}", tname))))); } // RFC #1445. `#[derive(PartialEq, Eq)]` adds a (trusted) // `#[structural_match]` attribute. if found_partial_eq && found_eq { - // This span is **very** sensitive and crucial to - // getting the stability behavior we want. What we are - // doing is marking `#[structural_match]` with the - // span of the `#[deriving(...)]` attribute (the - // entire attribute, not just the `PartialEq` or `Eq` - // part), but with the current backtrace. The current - // backtrace will contain a topmost entry that IS this - // `#[deriving(...)]` attribute and with the - // "allow-unstable" flag set to true. - // - // Note that we do NOT use the span of the `Eq` - // text itself. You might think this is - // equivalent, because the `Eq` appears within the - // `#[deriving(Eq)]` attribute, and hence we would - // inherit the "allows unstable" from the - // backtrace. But in fact this is not always the - // case. The actual source text that led to - // deriving can be `#[$attr]`, for example, where - // `$attr == deriving(Eq)`. In that case, the - // "#[structural_match]" would be considered to - // originate not from the deriving call but from - // text outside the deriving call, and hence would - // be forbidden from using unstable - // content. - // - // See tests src/run-pass/rfc1445 for - // examples. --nmatsakis - let span = Span { expn_id: cx.backtrace(), .. span }; - assert!(cx.parse_sess.codemap().span_allows_unstable(span)); debug!("inserting structural_match with span {:?}", span); let structural_match = intern_and_get_ident("structural_match"); item.attrs.push(cx.attribute(span, @@ -188,6 +189,39 @@ macro_rules! derive_traits { mitem: &MetaItem, annotatable: &Annotatable, push: &mut FnMut(Annotatable)) { + if !ecx.parse_sess.codemap().span_allows_unstable(sp) + && !ecx.ecfg.features.unwrap().custom_derive { + // FIXME: + // https://github.com/rust-lang/rust/pull/32671#issuecomment-206245303 + // This is just to avoid breakage with syntex. + // Remove that to spawn an error instead. + let cm = ecx.parse_sess.codemap(); + let parent = cm.with_expn_info(ecx.backtrace(), + |info| info.unwrap().call_site.expn_id); + cm.with_expn_info(parent, |info| { + if info.is_some() { + let mut w = ecx.parse_sess.span_diagnostic.struct_span_warn( + sp, feature_gate::EXPLAIN_DERIVE_UNDERSCORE, + ); + if option_env!("CFG_DISABLE_UNSTABLE_FEATURES").is_none() { + w.help( + &format!("add #![feature(custom_derive)] to \ + the crate attributes to enable") + ); + } + w.emit(); + } else { + feature_gate::emit_feature_err( + &ecx.parse_sess.span_diagnostic, + "custom_derive", sp, feature_gate::GateIssue::Language, + feature_gate::EXPLAIN_DERIVE_UNDERSCORE + ); + + return; + } + }) + } + warn_if_deprecated(ecx, sp, $name); $func(ecx, sp, mitem, annotatable, push); } diff --git a/src/libsyntax_ext/format.rs b/src/libsyntax_ext/format.rs index fd68ba7342..abfa655806 100644 --- a/src/libsyntax_ext/format.rs +++ b/src/libsyntax_ext/format.rs @@ -19,8 +19,7 @@ use syntax::ext::base::*; use syntax::ext::base; use syntax::ext::build::AstBuilder; use syntax::fold::Folder; -use syntax::parse::token::special_idents; -use syntax::parse::token; +use syntax::parse::token::{self, keywords}; use syntax::ptr::P; use std::collections::HashMap; @@ -68,8 +67,7 @@ struct Context<'a, 'b:'a> { name_positions: HashMap, - /// Updated as arguments are consumed or methods are entered - nest_level: usize, + /// Updated as arguments are consumed next_arg: usize, } @@ -106,7 +104,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) { named = true; let ident = match p.token { - token::Ident(i, _) => { + token::Ident(i) => { p.bump(); i } @@ -165,9 +163,7 @@ impl<'a, 'b> Context<'a, 'b> { let pos = match arg.position { parse::ArgumentNext => { let i = self.next_arg; - if self.check_positional_ok() { - self.next_arg += 1; - } + self.next_arg += 1; Exact(i) } parse::ArgumentIs(i) => Exact(i), @@ -190,25 +186,13 @@ impl<'a, 'b> Context<'a, 'b> { self.verify_arg_type(Named(s.to_string()), Unsigned); } parse::CountIsNextParam => { - if self.check_positional_ok() { - let next_arg = self.next_arg; - self.verify_arg_type(Exact(next_arg), Unsigned); - self.next_arg += 1; - } + let next_arg = self.next_arg; + self.verify_arg_type(Exact(next_arg), Unsigned); + self.next_arg += 1; } } } - fn check_positional_ok(&mut self) -> bool { - if self.nest_level != 0 { - self.ecx.span_err(self.fmtsp, "cannot use implicit positional \ - arguments nested inside methods"); - false - } else { - true - } - } - fn describe_num_args(&self) -> String { match self.args.len() { 0 => "no arguments given".to_string(), @@ -449,7 +433,7 @@ impl<'a, 'b> Context<'a, 'b> { let sp = piece_ty.span; let ty = ecx.ty_rptr(sp, ecx.ty(sp, ast::TyKind::Vec(piece_ty)), - Some(ecx.lifetime(sp, special_idents::static_lifetime.name)), + Some(ecx.lifetime(sp, keywords::StaticLifetime.name())), ast::Mutability::Immutable); let slice = ecx.expr_vec_slice(sp, pieces); // static instead of const to speed up codegen by not requiring this to be inlined @@ -475,7 +459,7 @@ impl<'a, 'b> Context<'a, 'b> { // First, build up the static array which will become our precompiled // format "string" - let static_lifetime = self.ecx.lifetime(self.fmtsp, special_idents::static_lifetime.name); + let static_lifetime = self.ecx.lifetime(self.fmtsp, keywords::StaticLifetime.name()); let piece_ty = self.ecx.ty_rptr( self.fmtsp, self.ecx.ty_ident(self.fmtsp, self.ecx.ident_of("str")), @@ -656,7 +640,6 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span, name_positions: HashMap::new(), name_types: HashMap::new(), name_ordering: name_ordering, - nest_level: 0, next_arg: 0, literal: String::new(), pieces: Vec::new(), diff --git a/src/libterm/terminfo/parm.rs b/src/libterm/terminfo/parm.rs index 60b5dffac5..fbc6bfb6c8 100644 --- a/src/libterm/terminfo/parm.rs +++ b/src/libterm/terminfo/parm.rs @@ -594,7 +594,7 @@ mod test { assert!(res.is_ok(), "Op {} failed with 1 stack entry: {}", cap, - res.err().unwrap()); + res.unwrap_err()); } let caps = ["%+", "%-", "%*", "%/", "%m", "%&", "%|", "%A", "%O"]; for &cap in caps.iter() { @@ -610,7 +610,7 @@ mod test { assert!(res.is_ok(), "Binop {} failed with 2 stack entries: {}", cap, - res.err().unwrap()); + res.unwrap_err()); } } @@ -625,15 +625,15 @@ mod test { for &(op, bs) in v.iter() { let s = format!("%{{1}}%{{2}}%{}%d", op); let res = expand(s.as_bytes(), &[], &mut Variables::new()); - assert!(res.is_ok(), res.err().unwrap()); + assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), vec![b'0' + bs[0]]); let s = format!("%{{1}}%{{1}}%{}%d", op); let res = expand(s.as_bytes(), &[], &mut Variables::new()); - assert!(res.is_ok(), res.err().unwrap()); + assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), vec![b'0' + bs[1]]); let s = format!("%{{2}}%{{1}}%{}%d", op); let res = expand(s.as_bytes(), &[], &mut Variables::new()); - assert!(res.is_ok(), res.err().unwrap()); + assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), vec![b'0' + bs[2]]); } } @@ -643,13 +643,13 @@ mod test { let mut vars = Variables::new(); let s = b"\\E[%?%p1%{8}%<%t3%p1%d%e%p1%{16}%<%t9%p1%{8}%-%d%e38;5;%p1%d%;m"; let res = expand(s, &[Number(1)], &mut vars); - assert!(res.is_ok(), res.err().unwrap()); + assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), "\\E[31m".bytes().collect::>()); let res = expand(s, &[Number(8)], &mut vars); - assert!(res.is_ok(), res.err().unwrap()); + assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), "\\E[90m".bytes().collect::>()); let res = expand(s, &[Number(42)], &mut vars); - assert!(res.is_ok(), res.err().unwrap()); + assert!(res.is_ok(), res.unwrap_err()); assert_eq!(res.unwrap(), "\\E[38;5;42m".bytes().collect::>()); } diff --git a/src/libtest/lib.rs b/src/libtest/lib.rs index e78fd0dea2..88be3ade83 100644 --- a/src/libtest/lib.rs +++ b/src/libtest/lib.rs @@ -41,10 +41,12 @@ #![feature(set_stdio)] #![feature(staged_api)] #![feature(question_mark)] +#![feature(panic_unwind)] extern crate getopts; extern crate term; extern crate libc; +extern crate panic_unwind; pub use self::TestFn::*; pub use self::ColorConfig::*; @@ -54,9 +56,7 @@ use self::TestEvent::*; use self::NamePadding::*; use self::OutputLocation::*; -use stats::Stats; use std::boxed::FnBox; -use term::Terminal; use std::any::Any; use std::cmp; diff --git a/src/libunwind/Cargo.toml b/src/libunwind/Cargo.toml new file mode 100644 index 0000000000..b537c6b1b7 --- /dev/null +++ b/src/libunwind/Cargo.toml @@ -0,0 +1,14 @@ +[package] +authors = ["The Rust Project Developers"] +name = "unwind" +version = "0.0.0" +build = "build.rs" + +[lib] +name = "unwind" +path = "lib.rs" +test = false + +[dependencies] +core = { path = "../libcore" } +libc = { path = "../rustc/libc_shim" } diff --git a/src/libunwind/build.rs b/src/libunwind/build.rs new file mode 100644 index 0000000000..ebe6fd5479 --- /dev/null +++ b/src/libunwind/build.rs @@ -0,0 +1,39 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::env; + +fn main() { + println!("cargo:rustc-cfg=cargobuild"); + + let target = env::var("TARGET").unwrap(); + + if target.contains("linux") { + if target.contains("musl") && (target.contains("x86_64") || target.contains("i686")) { + println!("cargo:rustc-link-lib=static=unwind"); + } else if !target.contains("android") { + println!("cargo:rustc-link-lib=gcc_s"); + } + } else if target.contains("freebsd") { + println!("cargo:rustc-link-lib=gcc_s"); + } else if target.contains("rumprun") { + println!("cargo:rustc-link-lib=unwind"); + } else if target.contains("netbsd") { + println!("cargo:rustc-link-lib=gcc_s"); + } else if target.contains("openbsd") { + println!("cargo:rustc-link-lib=gcc"); + } else if target.contains("bitrig") { + println!("cargo:rustc-link-lib=c++abi"); + } else if target.contains("dragonfly") { + println!("cargo:rustc-link-lib=gcc_pic"); + } else if target.contains("windows-gnu") { + println!("cargo:rustc-link-lib=gcc_eh"); + } +} diff --git a/src/libunwind/lib.rs b/src/libunwind/lib.rs new file mode 100644 index 0000000000..3ff61fd93d --- /dev/null +++ b/src/libunwind/lib.rs @@ -0,0 +1,30 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![no_std] +#![crate_name = "unwind"] +#![crate_type = "rlib"] +#![unstable(feature = "panic_unwind", issue = "32837")] +#![cfg_attr(not(stage0), deny(warnings))] + +#![feature(cfg_target_vendor)] +#![feature(staged_api)] +#![feature(unwind_attributes)] + +#![cfg_attr(not(target_env = "msvc"), feature(libc))] + +#[cfg(not(target_env = "msvc"))] +extern crate libc; + +#[cfg(not(target_env = "msvc"))] +mod libunwind; +#[cfg(not(target_env = "msvc"))] +pub use libunwind::*; + diff --git a/src/libstd/sys/common/libunwind.rs b/src/libunwind/libunwind.rs similarity index 57% rename from src/libstd/sys/common/libunwind.rs rename to src/libunwind/libunwind.rs index c1e9782852..ce74e5de3d 100644 --- a/src/libstd/sys/common/libunwind.rs +++ b/src/libunwind/libunwind.rs @@ -1,4 +1,4 @@ -// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,12 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -//! Unwind library interface +#![allow(bad_style)] -#![allow(non_upper_case_globals)] -#![allow(non_camel_case_types)] -#![allow(non_snake_case)] -#![allow(dead_code)] // these are just bindings +use libc; #[cfg(any(not(target_arch = "arm"), target_os = "ios"))] pub use self::_Unwind_Action::*; @@ -21,11 +18,9 @@ pub use self::_Unwind_Action::*; pub use self::_Unwind_State::*; pub use self::_Unwind_Reason_Code::*; -use libc; - #[cfg(any(not(target_arch = "arm"), target_os = "ios"))] #[repr(C)] -#[derive(Copy, Clone)] +#[derive(Clone, Copy)] pub enum _Unwind_Action { _UA_SEARCH_PHASE = 1, _UA_CLEANUP_PHASE = 2, @@ -36,7 +31,7 @@ pub enum _Unwind_Action { #[cfg(target_arch = "arm")] #[repr(C)] -#[derive(Copy, Clone)] +#[derive(Clone, Copy)] pub enum _Unwind_State { _US_VIRTUAL_UNWIND_FRAME = 0, _US_UNWIND_FRAME_STARTING = 1, @@ -47,7 +42,6 @@ pub enum _Unwind_State { } #[repr(C)] -#[derive(Copy, Clone)] pub enum _Unwind_Reason_Code { _URC_NO_REASON = 0, _URC_FOREIGN_EXCEPTION_CAUGHT = 1, @@ -65,6 +59,10 @@ pub type _Unwind_Exception_Class = u64; pub type _Unwind_Word = libc::uintptr_t; +pub type _Unwind_Trace_Fn = + extern fn(ctx: *mut _Unwind_Context, + arg: *mut libc::c_void) -> _Unwind_Reason_Code; + #[cfg(target_arch = "x86")] pub const unwinder_private_data_size: usize = 5; @@ -126,9 +124,12 @@ pub type _Unwind_Exception_Cleanup_Fn = link(name = "gcc_pic"))] #[cfg_attr(target_os = "bitrig", link(name = "c++abi"))] -#[cfg_attr(all(target_os = "windows", target_env="gnu"), +#[cfg_attr(all(target_os = "windows", target_env = "gnu"), link(name = "gcc_eh"))] -extern "C" { +#[cfg(not(cargobuild))] +extern {} + +extern { // iOS on armv7 uses SjLj exceptions and requires to link // against corresponding routine (..._SjLj_...) #[cfg(not(all(target_os = "ios", target_arch = "arm")))] @@ -145,14 +146,102 @@ extern "C" { #[unwind] pub fn _Unwind_Resume(exception: *mut _Unwind_Exception) -> !; + + // No native _Unwind_Backtrace on iOS + #[cfg(not(all(target_os = "ios", target_arch = "arm")))] + pub fn _Unwind_Backtrace(trace: _Unwind_Trace_Fn, + trace_argument: *mut libc::c_void) + -> _Unwind_Reason_Code; + + // available since GCC 4.2.0, should be fine for our purpose + #[cfg(all(not(all(target_os = "android", target_arch = "arm")), + not(all(target_os = "linux", target_arch = "arm"))))] + pub fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, + ip_before_insn: *mut libc::c_int) + -> libc::uintptr_t; + + #[cfg(all(not(target_os = "android"), + not(all(target_os = "linux", target_arch = "arm"))))] + pub fn _Unwind_FindEnclosingFunction(pc: *mut libc::c_void) + -> *mut libc::c_void; } // ... and now we just providing access to SjLj counterspart // through a standard name to hide those details from others // (see also comment above regarding _Unwind_RaiseException) #[cfg(all(target_os = "ios", target_arch = "arm"))] -#[inline(always)] +#[inline] pub unsafe fn _Unwind_RaiseException(exc: *mut _Unwind_Exception) -> _Unwind_Reason_Code { _Unwind_SjLj_RaiseException(exc) } + +// On android, the function _Unwind_GetIP is a macro, and this is the +// expansion of the macro. This is all copy/pasted directly from the +// header file with the definition of _Unwind_GetIP. +#[cfg(any(all(target_os = "android", target_arch = "arm"), + all(target_os = "linux", target_arch = "arm")))] +pub unsafe fn _Unwind_GetIP(ctx: *mut _Unwind_Context) -> libc::uintptr_t { + #[repr(C)] + enum _Unwind_VRS_Result { + _UVRSR_OK = 0, + _UVRSR_NOT_IMPLEMENTED = 1, + _UVRSR_FAILED = 2, + } + #[repr(C)] + enum _Unwind_VRS_RegClass { + _UVRSC_CORE = 0, + _UVRSC_VFP = 1, + _UVRSC_FPA = 2, + _UVRSC_WMMXD = 3, + _UVRSC_WMMXC = 4, + } + #[repr(C)] + enum _Unwind_VRS_DataRepresentation { + _UVRSD_UINT32 = 0, + _UVRSD_VFPX = 1, + _UVRSD_FPAX = 2, + _UVRSD_UINT64 = 3, + _UVRSD_FLOAT = 4, + _UVRSD_DOUBLE = 5, + } + + type _Unwind_Word = libc::c_uint; + extern { + fn _Unwind_VRS_Get(ctx: *mut _Unwind_Context, + klass: _Unwind_VRS_RegClass, + word: _Unwind_Word, + repr: _Unwind_VRS_DataRepresentation, + data: *mut libc::c_void) + -> _Unwind_VRS_Result; + } + + let mut val: _Unwind_Word = 0; + let ptr = &mut val as *mut _Unwind_Word; + let _ = _Unwind_VRS_Get(ctx, _Unwind_VRS_RegClass::_UVRSC_CORE, 15, + _Unwind_VRS_DataRepresentation::_UVRSD_UINT32, + ptr as *mut libc::c_void); + (val & !1) as libc::uintptr_t +} + +// This function doesn't exist on Android or ARM/Linux, so make it same +// to _Unwind_GetIP +#[cfg(any(all(target_os = "android", target_arch = "arm"), + all(target_os = "linux", target_arch = "arm")))] +pub unsafe fn _Unwind_GetIPInfo(ctx: *mut _Unwind_Context, + ip_before_insn: *mut libc::c_int) + -> libc::uintptr_t +{ + *ip_before_insn = 0; + _Unwind_GetIP(ctx) +} + +// This function also doesn't exist on Android or ARM/Linux, so make it +// a no-op +#[cfg(any(target_os = "android", + all(target_os = "linux", target_arch = "arm")))] +pub unsafe fn _Unwind_FindEnclosingFunction(pc: *mut libc::c_void) + -> *mut libc::c_void +{ + pc +} diff --git a/src/rustc/Cargo.lock b/src/rustc/Cargo.lock index a3420d7521..7a63742fba 100644 --- a/src/rustc/Cargo.lock +++ b/src/rustc/Cargo.lock @@ -7,15 +7,6 @@ dependencies = [ "rustdoc 0.0.0", ] -[[package]] -name = "advapi32-sys" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "arena" version = "0.0.0" @@ -29,7 +20,7 @@ name = "flate" version = "0.0.0" dependencies = [ "build_helper 0.1.0", - "gcc 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] @@ -38,12 +29,8 @@ version = "0.0.0" [[package]] name = "gcc" -version = "0.3.17" +version = "0.3.28" source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", -] [[package]] name = "graphviz" @@ -75,6 +62,7 @@ dependencies = [ "rustc_bitflags 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", + "rustc_llvm 0.0.0", "serialize 0.0.0", "syntax 0.0.0", ] @@ -191,7 +179,7 @@ name = "rustc_llvm" version = "0.0.0" dependencies = [ "build_helper 0.1.0", - "gcc 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "rustc_bitflags 0.0.0", ] @@ -219,6 +207,7 @@ dependencies = [ "log 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", + "rustc_bitflags 0.0.0", "rustc_const_eval 0.0.0", "rustc_const_math 0.0.0", "rustc_data_structures 0.0.0", @@ -232,6 +221,7 @@ dependencies = [ "log 0.0.0", "rustc 0.0.0", "rustc_const_eval 0.0.0", + "rustc_const_math 0.0.0", "syntax 0.0.0", ] @@ -268,7 +258,6 @@ dependencies = [ "arena 0.0.0", "log 0.0.0", "rustc 0.0.0", - "rustc_bitflags 0.0.0", "syntax 0.0.0", ] @@ -278,6 +267,7 @@ version = "0.0.0" dependencies = [ "log 0.0.0", "rustc 0.0.0", + "serialize 0.0.0", "syntax 0.0.0", ] @@ -323,7 +313,7 @@ version = "0.0.0" dependencies = [ "arena 0.0.0", "build_helper 0.1.0", - "gcc 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.28 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.0.0", "rustc 0.0.0", "rustc_back 0.0.0", @@ -362,13 +352,3 @@ dependencies = [ "syntax 0.0.0", ] -[[package]] -name = "winapi" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" - diff --git a/src/rustc/Cargo.toml b/src/rustc/Cargo.toml index 7431c35efb..24499cb8f0 100644 --- a/src/rustc/Cargo.toml +++ b/src/rustc/Cargo.toml @@ -13,6 +13,8 @@ path = "rustdoc.rs" [profile.release] opt-level = 2 +[profile.bench] +opt-level = 2 # These options are controlled from our rustc wrapper script, so turn them off # here and have them controlled elsewhere. diff --git a/src/rustc/libc_shim/Cargo.toml b/src/rustc/libc_shim/Cargo.toml index a7860b50e0..8fc713e0f1 100644 --- a/src/rustc/libc_shim/Cargo.toml +++ b/src/rustc/libc_shim/Cargo.toml @@ -15,6 +15,7 @@ build = "build.rs" [lib] name = "libc" path = "../../liblibc/src/lib.rs" +test = false [dependencies] core = { path = "../../libcore" } diff --git a/src/rustc/libc_shim/build.rs b/src/rustc/libc_shim/build.rs index bc428d6908..546f60482e 100644 --- a/src/rustc/libc_shim/build.rs +++ b/src/rustc/libc_shim/build.rs @@ -8,8 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#![deny(warnings)] + // See comments in Cargo.toml for why this exists fn main() { println!("cargo:rustc-cfg=stdbuild"); + println!("cargo:rerun-if-changed=build.rs"); } diff --git a/src/rustc/std_shim/Cargo.lock b/src/rustc/std_shim/Cargo.lock index 530c04da8a..bad46966ff 100644 --- a/src/rustc/std_shim/Cargo.lock +++ b/src/rustc/std_shim/Cargo.lock @@ -5,15 +5,6 @@ dependencies = [ "std 0.0.0", ] -[[package]] -name = "advapi32-sys" -version = "0.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -dependencies = [ - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi-build 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)", -] - [[package]] name = "alloc" version = "0.0.0" @@ -27,7 +18,7 @@ version = "0.0.0" dependencies = [ "build_helper 0.1.0", "core 0.0.0", - "gcc 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.26 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.0.0", ] @@ -58,18 +49,32 @@ version = "0.0.0" [[package]] name = "gcc" -version = "0.3.17" +version = "0.3.26" source = "registry+https://github.com/rust-lang/crates.io-index" + +[[package]] +name = "libc" +version = "0.0.0" dependencies = [ - "advapi32-sys 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", - "winapi 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "core 0.0.0", ] [[package]] -name = "libc" +name = "panic_abort" version = "0.0.0" dependencies = [ "core 0.0.0", + "libc 0.0.0", +] + +[[package]] +name = "panic_unwind" +version = "0.0.0" +dependencies = [ + "alloc 0.0.0", + "core 0.0.0", + "libc 0.0.0", + "unwind 0.0.0", ] [[package]] @@ -96,19 +101,20 @@ dependencies = [ "build_helper 0.1.0", "collections 0.0.0", "core 0.0.0", - "gcc 0.3.17 (registry+https://github.com/rust-lang/crates.io-index)", + "gcc 0.3.26 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.0.0", + "panic_abort 0.0.0", + "panic_unwind 0.0.0", "rand 0.0.0", "rustc_unicode 0.0.0", + "unwind 0.0.0", ] [[package]] -name = "winapi" -version = "0.2.2" -source = "registry+https://github.com/rust-lang/crates.io-index" - -[[package]] -name = "winapi-build" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" +name = "unwind" +version = "0.0.0" +dependencies = [ + "core 0.0.0", + "libc 0.0.0", +] diff --git a/src/rustc/std_shim/Cargo.toml b/src/rustc/std_shim/Cargo.toml index 1ce3937157..5602ef866b 100644 --- a/src/rustc/std_shim/Cargo.toml +++ b/src/rustc/std_shim/Cargo.toml @@ -30,6 +30,8 @@ path = "lib.rs" [profile.release] opt-level = 2 +[profile.bench] +opt-level = 2 # These options are controlled from our rustc wrapper script, so turn them off # here and have them controlled elsewhere. diff --git a/src/rustc/test_shim/Cargo.toml b/src/rustc/test_shim/Cargo.toml index bf57665048..87f2ccd51e 100644 --- a/src/rustc/test_shim/Cargo.toml +++ b/src/rustc/test_shim/Cargo.toml @@ -14,6 +14,8 @@ path = "lib.rs" [profile.release] opt-level = 2 +[profile.bench] +opt-level = 2 # These options are controlled from our rustc wrapper script, so turn them off # here and have them controlled elsewhere. diff --git a/src/rustc/test_shim/lib.rs b/src/rustc/test_shim/lib.rs index a626c9440d..d614d967e3 100644 --- a/src/rustc/test_shim/lib.rs +++ b/src/rustc/test_shim/lib.rs @@ -9,3 +9,7 @@ // except according to those terms. // See comments in Cargo.toml for why this exists + +#![feature(test)] + +extern crate test; diff --git a/src/rustllvm/PassWrapper.cpp b/src/rustllvm/PassWrapper.cpp index d6985719ac..b3d4e35d7b 100644 --- a/src/rustllvm/PassWrapper.cpp +++ b/src/rustllvm/PassWrapper.cpp @@ -97,6 +97,75 @@ LLVMRustAddPass(LLVMPassManagerRef PM, Pass *pass) { pm->add(pass); } +#ifdef LLVM_COMPONENT_X86 +#define SUBTARGET_X86 SUBTARGET(X86) +#else +#define SUBTARGET_X86 +#endif + +#ifdef LLVM_COMPONENT_ARM +#define SUBTARGET_ARM SUBTARGET(ARM) +#else +#define SUBTARGET_ARM +#endif + +#ifdef LLVM_COMPONENT_AARCH64 +#define SUBTARGET_AARCH64 SUBTARGET(AArch64) +#else +#define SUBTARGET_AARCH64 +#endif + +#ifdef LLVM_COMPONENT_MIPS +#define SUBTARGET_MIPS SUBTARGET(Mips) +#else +#define SUBTARGET_MIPS +#endif + +#ifdef LLVM_COMPONENT_POWERPC +#define SUBTARGET_PPC SUBTARGET(PPC) +#else +#define SUBTARGET_PPC +#endif + +#define GEN_SUBTARGETS \ + SUBTARGET_X86 \ + SUBTARGET_ARM \ + SUBTARGET_AARCH64 \ + SUBTARGET_MIPS \ + SUBTARGET_PPC + +#define SUBTARGET(x) namespace llvm { \ + extern const SubtargetFeatureKV x##FeatureKV[]; \ + extern const SubtargetFeatureKV x##SubTypeKV[]; \ + } + +GEN_SUBTARGETS +#undef SUBTARGET + +extern "C" bool +LLVMRustHasFeature(LLVMTargetMachineRef TM, + const char *feature) { + TargetMachine *Target = unwrap(TM); + const MCSubtargetInfo *MCInfo = Target->getMCSubtargetInfo(); + const FeatureBitset &Bits = MCInfo->getFeatureBits(); + const llvm::SubtargetFeatureKV *FeatureEntry; + +#define SUBTARGET(x) \ + if (MCInfo->isCPUStringValid(x##SubTypeKV[0].Key)) { \ + FeatureEntry = x##FeatureKV; \ + } else + + GEN_SUBTARGETS { + return false; + } +#undef SUBTARGET + + while (strcmp(feature, FeatureEntry->Key) != 0) + FeatureEntry++; + + return (Bits & FeatureEntry->Value) == FeatureEntry->Value; +} + extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(const char *triple, const char *cpu, diff --git a/src/rustllvm/llvm-auto-clean-trigger b/src/rustllvm/llvm-auto-clean-trigger index d6e8852cfe..4017c3856c 100644 --- a/src/rustllvm/llvm-auto-clean-trigger +++ b/src/rustllvm/llvm-auto-clean-trigger @@ -1,4 +1,4 @@ # If this file is modified, then llvm will be forcibly cleaned and then rebuilt. # The actual contents of this file do not matter, but to trigger a change on the # build bots then the contents should be changed so git updates the mtime. -2016-03-18 +2016-04-28 diff --git a/src/snapshots.txt b/src/snapshots.txt deleted file mode 100644 index 61dfd4f8f8..0000000000 --- a/src/snapshots.txt +++ /dev/null @@ -1,2325 +0,0 @@ -S 2016-03-18 235d774 - linux-i386 0e0e4448b80d0a12b75485795244bb3857a0a7ef - linux-x86_64 1273b6b6aed421c9e40c59f366d0df6092ec0397 - macos-i386 9f9c0b4a2db09acbce54b792fb8839a735585565 - macos-x86_64 52570f6fd915b0210a9be98cfc933148e16a75f8 - winnt-i386 7703869608cc4192b8f1943e51b19ba1a03c0110 - winnt-x86_64 8512b5ecc0c53a2cd3552e4f5688577de95cd978 - openbsd-x86_64 c5b6feda38138a12cd5c05574b585dadebbb5e87 - freebsd-x86_64 390b9a9f60f3d0d6a52c04d939a0355f572d03b3 - -S 2016-02-17 4d3eebf - dragonfly-x86_64 765bb5820ad406e966ec0ac51c8070b656459b02 - linux-i386 5f194aa7628c0703f0fd48adc4ec7f3cc64b98c7 - linux-x86_64 d29b7607d13d64078b6324aec82926fb493f59ba - macos-i386 4c8e42dd649e247f3576bf9dfa273327b4907f9c - macos-x86_64 411a41363f922d1d93fa62ff2fedf5c35e9cccb2 - winnt-i386 0c336d794a65f8e285c121866c7d59aa2dd0d1e1 - winnt-x86_64 27e75b1bf99770b3564bcebd7f3230be01135a92 - openbsd-x86_64 ac957c6b84de2bd67f01df085d9ea515f96e22f3 - freebsd-i386 4e2af0b34eb335e173aebff543be693724a956c2 - freebsd-x86_64 f38991fbb81c1cd8d0bbda396f98f13a55b42804 - -S 2015-12-18 3391630 - bitrig-x86_64 6476e1562df02389b55553b4c88b1f4fd121cd40 - freebsd-i386 7e624c50494402e1feb14c743d659fbd71b448f5 - freebsd-x86_64 91724d4e655807a2a2e940ac50992ebeaac16ea9 - dragonfly-x86_64 e74d79488e88ac2de3bd03afd5959d2ae6e2b628 - linux-i386 a09c4a4036151d0cb28e265101669731600e01f2 - linux-x86_64 97e2a5eb8904962df8596e95d6e5d9b574d73bf4 - macos-i386 ca52d2d3ba6497ed007705ee3401cf7efc136ca1 - macos-x86_64 3c44ffa18f89567c2b81f8d695e711c86d81ffc7 - openbsd-x86_64 6c8aab2c8a169274942f9a15e460069a3ff64be9 - winnt-i386 f9056ebd3db9611d31c2dc6dc5f96c7208d5d227 - winnt-x86_64 a85a40e535d828016181d3aa40afe34c3e36ab8c - -S 2015-08-11 1af31d4 - bitrig-x86_64 739e0635cd5a1b3635f1457aae3ef6390ea9a7a8 - freebsd-i386 3cd4a44fb97b3135be3d1b760bea604a381e85dc - freebsd-x86_64 de1f36592bac0eeb90c049a8421246652c511b9e - dragonfly-x86_64 08e7dd9d77434b377c0905cc5f8c705b2daf3a0e - linux-i386 e2553bf399cd134a08ef3511a0a6ab0d7a667216 - linux-x86_64 7df8ba9dec63ec77b857066109d4b6250f3d222f - macos-i386 29750870c82a0347f8b8b735a4e2e0da26f5098d - macos-x86_64 c9f2c588238b4c6998190c3abeb33fd6164099a2 - openbsd-x86_64 9cae790c4ca19b1b29a048605ce249fe1c20a498 - winnt-i386 dc01ac690efdcc40eb6fd8154205a1f21893eb6c - winnt-x86_64 1464a2e54768498222277f904ccd559e27608950 - -S 2015-07-26 a5c12f4 - bitrig-x86_64 8734eb41ffbe6ddc1120aa2910db4162ec9cf270 - freebsd-i386 2fee22adec101e2f952a5548fd1437ce1bd8d26f - freebsd-x86_64 bc50b0f8d7f6d62f4f5ffa136f5387f5bf6524fd - linux-i386 3459275cdf3896f678e225843fa56f0d9fdbabe8 - linux-x86_64 e451e3bd6e5fcef71e41ae6f3da9fb1cf0e13a0c - macos-i386 428944a7984c0988e77909d82ca2ef77d96a1fbd - macos-x86_64 b0515bb7d2892b9a58282fc865fee11a885406d6 - winnt-i386 22286e815372c3e03729853af48a2f6d538ed086 - winnt-x86_64 f13aa3c02a15f8e794b9e180487bdf04378f8f04 - -S 2015-07-17 d4432b3 - bitrig-x86_64 af77768e0eb0f4c7ec5a8e36047a08053b54b230 - freebsd-i386 b049325e5b2efe5f4884f3dafda448c1dac49b4f - freebsd-x86_64 a59e397188dbfe67456a6301df5ca13c7e238ab9 - linux-i386 93f6216a35d3bed3cedf244c9aff4cd716336bd9 - linux-x86_64 d8f4967fc71a153c925faecf95a7feadf7e463a4 - macos-i386 29852c4d4b5a851f16d627856a279cae5bf9bd01 - macos-x86_64 1a20259899321062a0325edb1d22990f05d18708 - winnt-i386 df50210f41db9a6f2968be5773b8e3bae32bb823 - winnt-x86_64 d7774b724988485652781a804bdf8e05d28ead48 - -S 2015-05-24 ba0e1cd - bitrig-x86_64 2a710e16e3e3ef3760df1f724d66b3af34c1ef3f - freebsd-x86_64 370db40613f5c08563ed7e38357826dd42d4e0f8 - linux-i386 a6f22e481eabf098cc65bda97bf7e434a1fcc20b - linux-x86_64 5fd8698fdfe953e6c4d86cf4fa1d5f3a0053248c - macos-i386 9a273324a6b63a40f67a553029c0a9fb692ffd1f - macos-x86_64 e5b12cb7c179fc98fa905a3c84803645d946a6ae - winnt-i386 18d8d76c5380ee2247dd534bfb2c4ed1b3d83461 - winnt-x86_64 ef27ce42af4941be24a2f6097d969ffc845a31ee - -S 2015-04-27 857ef6e - bitrig-x86_64 d28e2a5f8b478e69720703e751774f5e728a8edd - freebsd-x86_64 18925db56f6298cc190d1f41615ab5871de1dda0 - linux-i386 0bc8cffdce611fb71fd7d3d8e7cdbfaf748a4f16 - linux-x86_64 94089740e48167c5975c92c139ae9c286764012f - macos-i386 54cc35e76497e6e94fddf38d6e40e9d168491ddb - macos-x86_64 43a1c1fba0d1dfee4c2ca310d506f8f5f51b3f6f - winnt-i386 3db3adf2eaf37075043ec4ee41a5ea9b88810c44 - winnt-x86_64 82b6eaea67741517ce6d7901ad2a9fd223c3aaf1 - -S 2015-03-27 5520801 - bitrig-x86_64 55a69b0ae5481ccda54c2fcfc54025a0945c4f57 - freebsd-x86_64 0910bbad35e213f679d0433884fd51398eb3bc8d - linux-i386 1ef82402ed16f5a6d2f87a9a62eaa83170e249ec - linux-x86_64 ef2154372e97a3cb687897d027fd51c8f2c5f349 - macos-i386 0310b1a970f2da7e61770fd14dbbbdca3b518234 - macos-x86_64 5f35d9c920b8083a7420ef8cf5b00d5ef3085dfa - winnt-i386 808b7961f85872f04ec15ad0d3e9e23ae9bc0c3b - winnt-x86_64 903a99a58f57a9bd9848cc68a2445dda881f1ee8 - -S 2015-03-25 a923278 - bitrig-x86_64 e56c400a04bca7b52ab54e0780484bb68fa449c2 - freebsd-x86_64 cd02c86a9218da73b2a45aff293787010d33bf3e - linux-i386 da50141558eed6dabab97b79b2c6a7de4f2d2c5e - linux-x86_64 bca03458d28d07506bad4b80e5770b2117286244 - macos-i386 522d59b23dd885a45e2c5b33e80e76240bb2d9af - macos-x86_64 82df09d51d73d119a2f4e4d8041879615cb22081 - winnt-i386 5056e8def5ab4f4283b8f3aab160cc10231bb28d - winnt-x86_64 3f6b35ac12625b4b4b42dfd5eee5f6cbf122794e - -S 2015-03-17 c64d671 - bitrig-x86_64 41de2c7a69a1ac648d3fa3b65e96a29bdc122163 - freebsd-x86_64 14ced24e1339a4dd8baa9db69995daa52a948d54 - linux-i386 200450ad3cc56bc715ca105b9acae35204bf7351 - linux-x86_64 a54f50fee722ba6bc7281dec3e4d5121af7c15e3 - macos-i386 e33fd692f3808a0265e7b02fbc40e403080cdd4f - macos-x86_64 9a89ed364ae71aeb9d659ad223eae5f5986fc03f - winnt-i386 8911a28581e490d413b56467a6184545633ca04a - winnt-x86_64 38ce4556b19472c23ccce28685e3a2ebefb9bfbc - -S 2015-03-07 270a677 - bitrig-x86_64 4b2f11a96b1b5b3782d74bda707aca33bc179880 - freebsd-x86_64 3c147d8e4cfdcb02c2569f5aca689a1d8920d17b - linux-i386 50a47ef247610fb089d2c4f24e4b641eb0ba4afb - linux-x86_64 ccb20709b3c984f960ddde996451be8ce2268d7c - macos-i386 ad263bdeadcf9bf1889426e0c1391a7cf277364e - macos-x86_64 01c8275828042264206b7acd8e86dc719a2f27aa - winnt-i386 cb73ac7a9bf408e8b5cdb92d595082a537a90794 - winnt-x86_64 b9b47e80101f726ae4f5919373ea20b92d827f3c - -S 2015-02-25 880fb89 - bitrig-x86_64 8cdc4ca0a80103100f46cbf8caa9fe497df048c5 - freebsd-x86_64 f4cbe4227739de986444211f8ee8d74745ab8f7f - linux-i386 3278ebbce8cb269acc0614dac5ddac07eab6a99c - linux-x86_64 72287d0d88de3e5a53bae78ac0d958e1a7637d73 - macos-i386 33b366b5287427a340a0aa6ed886d5ff4edf6a76 - macos-x86_64 914bf9baa32081a9d5633f1d06f4d382cd71504e - winnt-i386 d58b415b9d8629cb6c4952f1f6611a526a38323f - winnt-x86_64 2cb1dcc563d2ac6deada054de15748f5dd599c7e - -S 2015-02-19 522d09d - freebsd-x86_64 7ea14ef85a25bca70a310a2cd660b356cf61abc7 - linux-i386 26e3caa1ce1c482b9941a6bdc64b3e65d036c200 - linux-x86_64 44f514aabb4e4049e4db9a4e1fdeb16f6cee60f2 - macos-i386 157910592224083df56f5f31ced3e6f3dc9b1de0 - macos-x86_64 56c28aa0e14ec6991ad6ca213568f1155561105d - winnt-i386 da0f7a3fbc913fbb177917f2850bb41501affb5c - winnt-x86_64 22bd816ccd2690fc9804b27ca525f603be8aeaa5 - -S 2015-02-17 f1bb6c2 - freebsd-x86_64 59f3a2c6350c170804fb65838e1b504eeab89105 - linux-i386 191ed5ec4f17e32d36abeade55a1c6085e51245c - linux-x86_64 acec86045632f4f3f085c072ba696f889906dffe - macos-i386 9d9e622584bfa318f32bcb5b9ce6a365febff595 - macos-x86_64 e96c1e9860b186507cc75c186d1b96d44df12292 - winnt-i386 3f43e0e71311636f9143ad6f2ee7a514e9fa3f8e - winnt-x86_64 26ef3d9098ea346e5ff8945d5b224bb10c24341d - -S 2015-02-04 ac134f7 - freebsd-x86_64 483e37a02a7ebc12a872e3146145e342ba4a5c04 - linux-i386 8af64e5df839cc945399484380a8b2ebe05a6751 - linux-x86_64 9f485d79c1f0d273ed864eddadb1707f3e2df489 - macos-i386 800f86abd589a1d46c37a8700211c7ba6b05decb - macos-x86_64 b9961faccf79dcfdc0b7a6909bef8b6769798d08 - winnt-i386 2c56a7573f8d0f78271dab1c8e29d3ced7a44ed8 - winnt-x86_64 f1736f47de2a6fad1ff881e51c176f71db5dc2a5 - -S 2015-01-31 474b324 - freebsd-x86_64 c5b55eb488790ff8425d74afa3b37c49517bc55f - linux-i386 319f2f3573c058cb2c4dfc75faaf8ea3ae86ef11 - linux-x86_64 7e71108be890adfecc7644ab6ad183e8a657dc97 - macos-i386 b0b2676681c6d8ec8cb85700428555761c7bdbb8 - macos-x86_64 381dd1587920388e2f71e120a1eabac2648d9672 - winnt-i386 5f99509f88355437824a746f7f90fc22233edb9b - winnt-x86_64 382aa20518b1a19d374f06a038025619ba00b77d - -S 2015-01-28 a45e117 - freebsd-x86_64 08a3ce7331fd1a52466acc0598cf745a009f86f6 - linux-i386 66e36a3461c12e2102a7f7f241d1b0e242c704d0 - linux-x86_64 0ae2f5da9913cfa211a367de77d5faa2ff798918 - macos-i386 d1a6776f00bf5091d73816d46c7fca8617575bd8 - macos-x86_64 cd4d7659b93e2341316cef4b7c5c9b50d23c6bbf - winnt-i386 14859dde2eb57f8c54989852ae6f807e66576338 - winnt-x86_64 693c0d1068debe5781e89e0d9efee85825eeae6c - -S 2015-01-27 7774359 - freebsd-x86_64 63623b632d4f9c33ad3b3cfaeebf8e2dd8395c96 - linux-i386 937b0b126aade54dc2c7198cad67f40d711b64ba - linux-x86_64 3a0ed2a90e1e8b3ee7d81ac7d2feddda0b359c9c - macos-i386 3dbed5c058661cab4ece146fb76acd35cc4d333b - macos-x86_64 fc776bc6b9b60cbd25f07fad43e0f01c76663542 - winnt-i386 77ed0484b6ceb53e5ffa50028d986af8b09a0441 - winnt-x86_64 db1ee5b7939197958e59fe37ce7e123285be64fb - -S 2015-01-20 9006c3c - freebsd-x86_64 240b30b33263d175e30f925ed1e1e1a4e553a513 - linux-i386 544c2063b8d5035342c705b881b8868244c1e9a1 - linux-x86_64 eb41db80978210a013a8dcf8f4fe804969197337 - macos-i386 3ed08c5ae66367e85b8f2b207615d45bfd9cf89d - macos-x86_64 d102760316b90b17d54b0bef02ca6dc35f82e6bd - winnt-i386 6940fef6caa2f64d158b8f5eb00afd5c8e0c71a5 - winnt-x86_64 36b6f239fe1264bceb4b8202e692b7d49947eebe - -S 2015-01-15 9ade482 - freebsd-x86_64 eb8f52c6e8dc24a293456d5e4dc5d1072442e758 - linux-i386 0197ad7179d74eba06a8b46432548caf226aa03d - linux-x86_64 03459f8b216e96ed8b9abe25a42a75859195103d - macos-i386 b5c004883ddff84159f11a3329cde682e0b7f75b - macos-x86_64 b69ea42e1c995682adf0390ed4ef8a762c001a4e - winnt-i386 7fa6e35d26bbffa3888d440a0d5f116414ef8c0a - winnt-x86_64 ac04a4f1f26e0219d91e7eae6f580ca3cfee4231 - -S 2015-01-07 9e4e524 - freebsd-x86_64 2563d33151bce1bbe08a85d712564bddc7503fc6 - linux-i386 d8b73fc9aa3ad72ce1408a41e35d78dba10eb4d4 - linux-x86_64 697880d3640e981bbbf23284363e8e9a158b588d - macos-i386 a73b1fc03e8cac747aab0aa186292bb4332a7a98 - macos-x86_64 e4ae2670ea4ba5c2e5b4245409c9cab45c9eeb5b - winnt-i386 ddffa59d9605aa05e83e8f664db802da512611e9 - winnt-x86_64 a56261ebbc580c6c14b1c1d0be25010f5201dc3f - -S 2015-01-06 340ac04 - freebsd-x86_64 5413b8931d7076e90c873e0cc7a43e0793c2b17a - linux-i386 cacb8e3ad15937916e455d8f63e740c30a807b10 - linux-x86_64 e4a7f73959130671a5eb9107d593d37b43e571b0 - macos-i386 da6b0b7d12964e815175eb1fe5fc495098ca3125 - macos-x86_64 edf8a109e175be6e5bc44484261d2248946f1dd1 - winnt-i386 34fc7d9905d5845cdc7901f661d43a463534a20d - winnt-x86_64 99580840f5763f517f47cd55ff373e4d116b018d - -S 2015-01-04 b2085d9 - freebsd-x86_64 50ccb6bf9c0645d0746a5167493a39b2be40c2d4 - linux-i386 b880b98d832c9a049b8ef6a50df50061e363de5a - linux-x86_64 82a09c162474b69d2d1e4e8399086f3f0f4e31c3 - macos-i386 569055bb10d96ab25f78ecf2c80ffbccd5e69b8d - macos-x86_64 cff1f9ebd63dae6890359b7d353bd9486d8ecdfc - winnt-i386 553790fe493413287a19d17a42bf7225d3e2272d - winnt-x86_64 bab0d13960afb7ccdd6bf11452de1b9c457cc3e9 - -S 2015-01-02 c894171 - freebsd-x86_64 ea8bcf75eada3539f5cbab51708eecf40d436b77 - linux-i386 646ae265721e3cbe19404aae4fea4ffa1f1d90cf - linux-x86_64 85183ce0724af3dfb7616b9e81a4e5510415f351 - macos-i386 b3eced7fc5e78f767edb4595dfcde02dad206f3f - macos-x86_64 36418bce8c18f1b49ec6b5aec2bf35ff1cd833a3 - winnt-i386 6c7ddf23b389be723d34ab91a9baa4a06c5f9571 - winnt-x86_64 d086d4019d603db09166d0609a21da8ee8fe306a - -S 2015-01-01 7d4f487 - freebsd-x86_64 5dc87adb17bc33abc08f1bf4c092e0b5b92a6ca4 - linux-i386 63bf82a5b540d8acbbf1e445ce48be0fa0f003fc - linux-x86_64 b1a414355ef5d2feff18ab9d008a2e9afc7b4625 - macos-i386 26042e3e648eb40848bf02f3e05ba31fd686179c - macos-x86_64 f01d7c6faf5db480a18a521c6971364f4ce8ddca - winnt-i386 6f04af045d26a0c87d487ba7254d4ad0c166ecaf - winnt-x86_64 392ab49482ec926de6a167afe920518b9a502a3f - -S 2014-12-30 023dfb0 - freebsd-x86_64 41ecd0ac557c823831c46696c7d78dc250398f25 - linux-i386 fe6b59bf70a397e18629cb82264f7c6a70df34d4 - linux-x86_64 8ab3a223f65fbf6b0aa80fcf0564a6d0fb9122e8 - macos-i386 d23edb1be58b8683782a473cdc249c58a959c165 - macos-x86_64 ab87616fa5d427978db3acd2d705042133ca3c09 - winnt-i386 f2c26ac1ccb9d9a00886da9b504190681de89a5f - winnt-x86_64 fa2c7636bb15583ae387554b561ab09babee281a - -S 2014-12-20 8443b09 - freebsd-x86_64 004f54dce86faeebc15abf92c8742634b53987e6 - linux-i386 3daf531aed03f5769402f2fef852377e2838db98 - linux-x86_64 4f3c8b092dd4fe159d6f25a217cf62e0e899b365 - macos-i386 2a3e647b9c400505bd49cfe56091e866c83574ca - macos-x86_64 5e730efc34d79a33f464a87686c10eace0760a2e - winnt-i386 8ea056043de82096d5ce5abc98c8c74ebac7e77d - winnt-x86_64 9804100dafae9b64a76e0ea7e1be157719dae151 - -S 2014-12-15 1b97cd3 - freebsd-x86_64 a5d7ff81ec04e01e64dc201c7aa2d875ebd0cbbb - linux-i386 47e13c2f1d26a0d13e593e0881a80ca103aa7b2e - linux-x86_64 84b712f55cf3b2688919143b03f0fe01978df459 - macos-i386 09a93fe0a09616e02185dbdc4bd718f7b160d40c - macos-x86_64 78f952a3e77a9921a23c957bb133131017b57324 - winnt-i386 0e76ae48a02636909fc9e3826b56ba641db5bb95 - winnt-x86_64 3462ccf4754221f3e59f550a4bd9fab6883e98a5 - -S 2014-12-09 cafe296 - freebsd-x86_64 f80b943b94747764282d1b54d861b600eadd224f - linux-i386 96b390dc7d0c38b7b60f04fc5a6565ecfa9cb977 - linux-x86_64 3c29c67bc752b30e4bf430a20cf573bd0a4c5e11 - macos-i386 c5f6b7c4d40d052ccb212e794df7ea0a2941854d - macos-x86_64 b522ac305ad777a51429564bab27ea4af432ab87 - winnt-i386 f6db8b1832022b1e028d5fc038eba41b261d44b1 - winnt-x86_64 f5b633290669b0ffb70efdc5dd5584814c640181 - -S 2014-11-21 c9f6d69 - freebsd-x86_64 0ef316e7c369177de043e69e964418bd637cbfc0 - linux-i386 c8342e762a1720be939ed7c6a39bdaa27892f66f - linux-x86_64 7a7fe6f5ed47b9cc66261f880e166c7c8738b73e - macos-i386 63e8644512bd5665c14389a83d5af564c7c0b103 - macos-x86_64 7933ae0e974d1b897806138b7052cb2b4514585f - winnt-i386 94f5e2974e6120945c909753010d73b53cd6ff90 - winnt-x86_64 905ffbdd94580854b01dc4e27fdad7e7c8ae18fe - -S 2014-11-18 9c96a79 - freebsd-x86_64 22c93a289bdbc886af882b5bb76bfa673d46aa4f - linux-i386 999ba4a0dfb70adca628138a7d5f491023621140 - linux-x86_64 55eaaed3bd6dd5a8d08e99aa4cd618d207f87d8c - macos-i386 0581dff21a238343602ec0202a551bac93d21300 - macos-x86_64 aad290cf3f8ac4aa0661984a9799c78161ea5a72 - winnt-i386 aef291fcdfdbc1d15e066f506ea8a311e0687798 - winnt-x86_64 b4ac01c01f81cf112c1263e7b6c61874df0c840d - -S 2014-11-10 f89e975 - freebsd-x86_64 52702569e3c3361d6fd96968443791e76bed18e3 - linux-i386 3f8bb33f86800affca3cb7245925c19b28a94498 - linux-x86_64 e0e13a4312bea0bcc7db35b46bcce957178b18a4 - macos-i386 22f084aaecb773e8348c64fb9ac6d5eba363eb56 - macos-x86_64 c8554badab19cee96fbf51c2b98ee1bba87caa5c - winnt-i386 936bd7a60bce83208d34f2369a0178937e140fba - winnt-x86_64 09ba12dc41b7305d3f15ca27ec8d0a5a2a64b204 - -S 2014-11-04 1b2ad78 - freebsd-x86_64 f8c41a522d6a3c9691a0865dab170dcb988e9141 - linux-i386 d827fbbd778b854923971873cf03bdb79c2e8575 - linux-x86_64 1ddca522a8ba4a4f662dc17ca16b0f50f2c15f87 - macos-i386 597cd42301e1569df8ad090574cd535d19283387 - macos-x86_64 4bfb2aff1c3e3c57653b32adc34b399c5aeb759b - winnt-i386 11390f5607bf638b515931dd2f85a7245bf91581 - winnt-x86_64 905c34b5eeaa502fe4ad7446b3d9afb4a8d167c9 - -S 2014-10-22 d44ea72 - freebsd-x86_64 8bf5ee7c1ca8ab880800cf3a535e16bb7ffbf9e8 - linux-i386 1fc8302b405406a3fc183b23c8397bef5a56c52a - linux-x86_64 3e04d8197a96b0c858e4e2763b3893df35ae2fb3 - macos-i386 b9823771ae6237a3c1c19eb2e98a2372ce23439d - macos-x86_64 3cf9fc1cd252a80430d8673e35a1256674e122ae - winnt-i386 5a6d2ad82a31deffad5b6a17487a8cd5c21f7636 - winnt-x86_64 7468b87eb5be238993ccd41ad74bbd88dd176d31 - -S 2014-10-10 78a7676 - freebsd-x86_64 511061af382e2e837a6d615823e1a952e8281483 - linux-i386 0644637db852db8a6c603ded0531ccaa60291bd3 - linux-x86_64 656b8c23fbb97794e85973aca725a4b9cd07b29e - macos-i386 e4d9709fcfe485fcca00f0aa1fe456e2f164ed96 - macos-x86_64 6b1aa5a441965da87961be81950e8663eadba377 - winnt-i386 b87f8f040adb464e9f8455a37de8582e9e2c8cf3 - winnt-x86_64 b883264902ac0585a80175ba27dc141f5c4f8618 - -S 2014-10-04 749ff5e - freebsd-x86_64 f39d94487d29b3d48217b1295ad2cda8c941e694 - linux-i386 555aca74f9a268f80cab2df1147dc6406403e9e4 - linux-x86_64 6a43c2f6c8ba2cbbcb9da1f7b58f748aef99f431 - macos-i386 331bd7ef519cbb424188c546273e8c7d738f0894 - macos-x86_64 2c83a79a9febfe1d326acb17c3af76ba053c6ca9 - winnt-i386 fcf0526e5dc7ca4b149e074ff056ac03e2240ac7 - winnt-x86_64 611f19816fbfe0730b1fee51481b8d25dd78fa10 - -S 2014-09-28 7eb9337 - freebsd-x86_64 d45e0edd44f40a976ea0affaadd98732684cfca0 - linux-i386 3acb35755aa62b7ff78f76007d9a70696fce7aa7 - linux-x86_64 2615b67b700ae8f7d8d87c043207a1a6e2339389 - macos-i386 5eb4552dc66a14e1eff6e806a8ba27f4a73bb02a - macos-x86_64 c6052632443f638f5024ae38f33ae2c80d8b18bd - winnt-i386 269f46347b5766bff6f888c4307d50c475d3fe0f - winnt-x86_64 06f89825cecda7f2e36a4660ffe6d2d4a0430ab4 - -S 2014-09-22 437179e - freebsd-x86_64 f693c0441de3dbb2d471dde5a5045ac8a48807d8 - linux-i386 5c2132b65f45c21b43d28de6a9460978b1a7b08a - linux-x86_64 152be582853c2cf1ddcc88b085153b52ebbeb065 - macos-i386 7adbb076aeae8e1d9bdf3fe968bc7ef8a8fe0096 - macos-x86_64 c4c697416bfc2ea0b336582336f2ec652e97ae1d - winnt-i386 639b6511a6eea6c042f5cb2d9d5ca6730ff6d4b2 - winnt-x86_64 d6163e4427e16760782b07b4e0332c443f9fc542 - -S 2014-09-16 828e075 - winnt-x86_64 ce1e9d7f6967bfa368853e7c968e1626cc319951 - winnt-i386 a8bd994666dfe683a5d7922c7998500255780724 - linux-x86_64 88ff474db96c6ffc5c1dc7a43442cbe1cd88c8a2 - linux-i386 7a731891f726c8a0590b142a4e8924c5e8b22e8d - freebsd-x86_64 e67a56f76484f775cd4836dedb2d1069ab5d7921 - macos-i386 f48023648a77e89086f4a2b39d76b09e4fff032d - macos-x86_64 2ad6457b2b3036f87eae7581d64ee5341a07fb06 - -S 2014-09-10 6faa4f3 - winnt-x86_64 939eb546469cb936441cff3b6f2478f562f77c46 - winnt-i386 cfe4f8b519bb9d62588f9310a8f94bc919d5423b - linux-x86_64 40e2ab1b67d0a2859f7da15e13bfd2748b50f0c7 - linux-i386 6f5464c9ab191d93bfea0894ca7c6f90c3506f2b - freebsd-x86_64 648f35800ba98f1121d418b6d0c13c63b7a8951b - macos-i386 545fc45a0071142714639c6be377e6d308c3a4e1 - macos-x86_64 8b44fbbbd1ba519d2e83d0d5ce1f6053d3cab8c6 - -S 2014-09-05 67b97ab - freebsd-x86_64 5ed208394cb2a378ddfaa005b6298d2f142ad47f - linux-i386 d90866947bfa09738cf8540d17a8eedc70988fcc - linux-x86_64 52955b8f7a3b1bf664345060f421101979ced9f2 - macos-i386 2a38d39afa94ad6d274464ee4e82b1b98c2b3a11 - macos-x86_64 51df6e27c7d0776f83023e30a976525934ddb93f - winnt-i386 3b0bc6d5c1435f22a3782ae25acd19bc27b2cff4 - -S 2014-08-29 6025926 - freebsd-x86_64 285330b798eefcc929fc94c9d0604b6172ce3309 - linux-i386 5b57ab2dc32952dc78551a955f3c1746b2d915a3 - linux-x86_64 2624aeac3fe1b2359b61c1109e4708680e237ca5 - macos-i386 deffce32408b023bcda84f6ce338ca3de02f406b - macos-x86_64 8ef7351e34fc1583570d752d223ddc6eb68ef27b - winnt-i386 c2dfa9a358de8cc554007addbc09e3e43d49aec6 - -S 2014-08-17 a86d9ad - freebsd-x86_64 f49e0c8e64c8a60dc47df9965974d2a98ef70b34 - linux-i386 8f2c5f6a1b6504ee63de73c7a53aee1e4b07bca5 - linux-x86_64 c04f92c5583d92217ea9641a8e0a3108acbc5283 - macos-i386 2f1e849bab50ce2b92f5eb0ef5a63954904cac97 - macos-x86_64 977a8e496dd4ccf79c86a93a1794e65213d76f96 - winnt-i386 c2b08d721c5c1628aea4932c0e09e5bf07d54881 - -S 2014-08-14 aa98b25 - freebsd-x86_64 6a84283dfd8600fca6ec08144228f405ad9fe333 - linux-i386 6aeac588b086a99f81b53cfadd15617c82c540aa - linux-x86_64 0efd0a8516affe0b1c6bf2e02e08ceff0f9c3a1d - macos-i386 4b2535d247d24869ef00942f80a5a1bd17cafedb - macos-x86_64 7b88a4aa4e424a5cb3b6d978132591ea54e9589d - winnt-i386 19b86f3c413f07363726fb6bf086307ed14fdae2 - -S 2014-08-07 12e0f72 - freebsd-x86_64 e55055a876ebbde0d3ed3bcb97579afab9264def - linux-i386 2665e45879f2ef77ce0c9015f971642fe424ac33 - linux-x86_64 51ed1f4cf0707585a136bb149a443394067c074c - macos-i386 78f1996954a6e0718d684a3756b4870a6f8771ee - macos-x86_64 216f46f65866207a9f41c3ed654f5c1e085cb7f3 - winnt-i386 95a9b8a8bf587761ae954392aee2ccee3758a533 - -S 2014-07-17 9fc8394 - freebsd-x86_64 5a4b645e2b42ae06224cc679d4a43b3d89be1482 - linux-i386 a5e1bb723020ac35173d49600e76b0935e257a6a - linux-x86_64 1a2407df17442d93d1c34c916269a345658045d7 - macos-i386 6648fa88e41ad7c0991a085366e36d56005873ca - macos-x86_64 71b2d1dfd0abe1052908dc091e098ed22cf272c6 - winnt-i386 c26f0a713c5fadf99cce935f60dce0ea403fb411 - -S 2014-07-09 8ddd286 - freebsd-x86_64 de0c39057f409b69e5ddb888ba3e20b90d63f5db - linux-i386 28bef31f2a017e1998256d0c2b2e0a0c9221451b - linux-x86_64 853bd73501a10d49cafdf823110c61f13a3392d6 - macos-i386 b89540ae54f9e565565d36147a586bb4bfbd861b - macos-x86_64 58709eb936e7fd66a28a1bb82aaf43a4d8260dea - winnt-i386 64a32dcb008d4590a6c6a9efaffbe1d22a334d34 - -S 2014-07-05 aaff4e0 - freebsd-x86_64 10272ca9eb17e1be4a4b172aacfb4b33fffcc8fb - linux-i386 72ba9f6e0d096c30f128cb3736ffac0b57530a20 - linux-x86_64 e5621f84934a7d76002ab95a354fbbb9ae6ebbb1 - macos-i386 a88fd84ee959e59265de12b8f551ed56c0e943df - macos-x86_64 f19d479e5a0d2a6067a05b1910e4a6a544836b0a - winnt-i386 0c5a91e422409b89ac22f8c265af66f759d476c8 - -S 2014-06-25 bab614f - freebsd-x86_64 14cb361c8fdefa2534bb6776a04815c08680ecd6 - linux-i386 8fec4845626c557431a4aa7bfb2b5cfc65ad9eda - linux-x86_64 2304534c8e2431a5da2086164dd3a3e019b87ecd - macos-i386 d9e348cc1f9021f0f8e8907880fded80afb5db5b - macos-x86_64 aa790195d1f8191dce2f990ec4323bcc69566288 - winnt-i386 19b67f8a583516553a4fe62e453eecc5c17aff8e - -S 2014-06-21 db9af1d - freebsd-x86_64 ef2bd0fc0b0efa2bd6f5c1eaa60a2ec8df533254 - linux-i386 84339ea0f796ae468ef86797ef4587274bec19ea - linux-x86_64 bd8a6bc1f28845b7f4b768f6bfa06e7fbdcfcaae - macos-i386 3f25b2680efbab16ad074477a19d49dcce475977 - macos-x86_64 4a8c2e1b7634d73406bac32a1a97893ec3ed818d - winnt-i386 0bf4e101d979ce116977d660ef149d03fbc90b99 - -S 2014-06-18 d6736a1 - freebsd-x86_64 c1479bb3dc0ae3d8ba9193ff2caf92c805a95c51 - linux-i386 bb1543b21235a51e81460b9419e112396ccf1d20 - linux-x86_64 08df93f138bc6c9d083d28bb71384fcebf0380c1 - macos-i386 d6c0039ad7cbd5959e69c980ecf822e5097bac2c - macos-x86_64 ee54924aa4103d35cf490da004d3cc4e48ca8fb0 - winnt-i386 943c99971e82847abe272df58bb7656ac3b91430 - -S 2014-06-14 2c6caad - freebsd-x86_64 0152ba43f238014f0aede7c29f1c684c21077b0b - linux-i386 2eb1897c25abe0d5978ff03171ca943e92666046 - linux-x86_64 c974465b482334461bf9771864eee3873e3cb3f9 - macos-i386 5b138c0ea9028ec56ed215ad86ec3e69e5a9ffd3 - macos-x86_64 294afb78328d63c7774f07303ef7138219ee02e7 - winnt-i386 303afde8b5ca002c151f42df727d6ae701d086cd - -S 2014-06-11 f9260d4 - freebsd-x86_64 57f155da12e561a277506f999a616ff689a55dcc - linux-i386 df46b5dab3620375d6175c284ea0aeb3f9c6a11e - linux-x86_64 a760c8271ecb850bc802e151c2a321f212edf526 - macos-i386 d6c831717aebd16694fb7e63dca98845e6583378 - macos-x86_64 76932cacbdc2557e51565917a1bb6629b0b4ebc1 - winnt-i386 6285faeac311a9a84db078ab93d299a65abeeea9 - -S 2014-05-30 60a43f9 - freebsd-x86_64 59067eb9e89bde3e20a1078104f4b1105e4b56fc - linux-i386 c1a81811e8e104c91c35d94a140e3cf8463c7655 - linux-x86_64 5c7167a2f964118103af5735a9215e8421803406 - macos-i386 872b9818badefda412c7513b93742369f969094d - macos-x86_64 78c0f2ead6287c433d0cd18e1860404526ba5049 - winnt-i386 63ca814f86493a8f06ab616b5e31d49a19bec1b2 - -S 2014-05-29 50b8528 - freebsd-x86_64 cfa0dcc98a57f03a53bb53df6fd5db02143e2bee - linux-i386 baf7c6ab5792f3d560a0f2adc94d7ff96d0cab3d - linux-x86_64 ed97bc90842b96b95e860b9d21fe1ade3f682fd3 - macos-i386 2d0e27f72e34be53b1f16d704e9a3b8bedbdbd31 - macos-x86_64 f8912b07014c234ae2d625d6da84c752508a2b04 - winnt-i386 0af7b426f57015d8392e00ee9a9d1f2f5eb10761 - -S 2014-05-16 5327218 - freebsd-x86_64 e91e235c808eb7e8a1e82f7e00c874de9b1df345 - linux-i386 3a3b7d68ed42a144fa06c9a49c63966da9adeef2 - linux-x86_64 212f9062080a6e6cba472d8530d086ad787bbf72 - macos-i386 8306ef93b626cedbc0cb45296a72b294d8e073a0 - macos-x86_64 3025ff751d888994431eb3cf970936cd2143f95d - winnt-i386 9b271338ecc29684e7db15c402e8c90b8b152781 - -S 2014-05-15 6a2b3d1 - freebsd-x86_64 afc98b59cb819025fecdb9d145ca4463f857a477 - linux-i386 d6f7a404412ea34db3d19814ca21fe6fa662b02f - linux-x86_64 3dfb54406a7ea75565a7ea3071daad885cb91775 - macos-i386 bebb937551d601ad908c9e4eaa196cc7a977c503 - macos-x86_64 08346ed401ad2891c7d2ba0aac0960f6e77bb78b - winnt-i386 ad8e5b8292a00f60f1f7dc2e35bd18abeb5b858d - -S 2014-05-11 72fc4a5 - freebsd-x86_64 82db6355b0b7c8023c8845a74e2f224da2831b50 - linux-i386 91901299d5f86f5b67377d940073908a1f0e4e82 - linux-x86_64 2a80e40bb8d832dba307ad6a43bb63081627c22c - macos-i386 3d7ce9b9201f07cecddae6f1b8025e9c28b10bbf - macos-x86_64 4cfe69a0499d486a7bfdb9cd05c52845ad607dcb - winnt-i386 328d13aeb6c573125c57d7103a12bebd34fadd1f - -S 2014-05-09 47ecc2e - freebsd-x86_64 5c085972690e1f9412c3c0c7ec64f6b148fe04fd - linux-i386 690d2e310c025f10c54b1f2b9f32c65ea34575ed - linux-x86_64 b869118e628589d6546a4716c91e1a41952f294c - macos-i386 29a044bdd539355fde013797d600bb70c9d05009 - macos-x86_64 b88ce60be4f70b014669103cb39c8f65814ae311 - winnt-i386 0da39548596d0596c1c9fb98382c5225d36f4b44 - -S 2014-05-06 24f6f26 - freebsd-x86_64 cebcfcece5676c9aea30241bf13c517ffdb37b7c - linux-i386 e9960c7c793ff7ae87c9d30c88cfedf7e40345f7 - linux-x86_64 120f63393bf9071b1424dc5e6e55eb3db4fa7c8d - macos-i386 1973f342f19d346a7ae1e2e6079e8335edbcebe3 - macos-x86_64 7b3dc30a2c28b546751593d5e34301fb314258c0 - winnt-i386 1c7898079cece7d5fbc98566f6826d270a3111f5 - -S 2014-05-04 922c420 - freebsd-x86_64 635f28dd48340db0c1cdc01adad18866acfc7020 - linux-i386 360a40acf713e6f2d7fcde0112ae87d8336f320c - linux-x86_64 a6dfa69483824d525180ac6040b59beed7df165b - macos-i386 75e466423e2183e57a5f02358c6f9210997eae94 - macos-x86_64 12575c620e163002f6d30d1843564eeae31de7b9 - winnt-i386 be45073b14691e2b0aa9181d4238cbc310f5ae58 - -S 2014-04-23 b5dd3f0 - freebsd-x86_64 b6ccb045b9bea4cc4781bc128e047a1c68dc2c17 - linux-i386 9e4e8d2bc70ff5b8db21169f762cb20c4dba6c2c - linux-x86_64 3367b8e1a0295c8124c26c5c627343ebe9a0ac5d - macos-i386 3029be6d6cc7a34b8a03a0a659b427c2916a8962 - macos-x86_64 2de6d89ac8063588a37059140a602f028f2cc2ea - winnt-i386 021b39bc24b293f166aa329224f08cc5dedd5769 - -S 2014-04-15 349d66a - freebsd-x86_64 0e8078e24b3f86481c5ae0a47a15e5ed2703f241 - linux-i386 b4e5d104fc2b1eb0236b662ab3cbbb729f789bd6 - linux-x86_64 c1492f09cfbce535bcf32403cd3aaff84f2094f1 - macos-i386 e7a093b6c3d45786eeebd73760c4643514ed0c9a - macos-x86_64 9401f60e9b6a1d1ae9890a25a512f87c47facc2d - winnt-i386 801f7dcaa3117e277981660033869695a9cb865a - -S 2014-04-10 e263ef1 - freebsd-x86_64 dc7cfe4266b28f1361b8c2d9ec5bd9ae8ec64e70 - linux-i386 ca0c3b5258dc3eb4a62f0508a662431a4b9cf227 - linux-x86_64 1acdc5b2dd9fd3eb466a4408ba31f254ae381d04 - macos-i386 103d27d3865ae40d15fdaed176e1a49f0e6d2763 - macos-x86_64 4e86eab52e2db8d3a680fffa0ac2a44f85d62e0a - winnt-i386 d52e4640f8f33aae815219901cdfafc784418f02 - -S 2014-04-07 c7fac44 - freebsd-x86_64 3c01e2a52a1487c360a8e075df0d4fd412d84fe9 - linux-i386 145f83ec557db77160a207aa2a17e2db8e254b84 - linux-x86_64 647d2922311a497280d49e91e4946a271d44a232 - macos-i386 fa19ebca45f83e224911bad13475e40d531e6515 - macos-x86_64 c0b4df5eed015c527a2a23ca3f2755a44782f61d - winnt-i386 e93af6e5ce88e1220f8b4b4cce14436af0b4279a - -S 2014-04-03 e7fe207 - freebsd-x86_64 6d40f547d13896ab9d9dd4a4fdf2e72be553b01b - linux-i386 875a8f6956f7d703f7206db91ca2a9b67c244cf8 - linux-x86_64 4d90df12231d1c9f51b5ae6e75546ccddcf0534b - macos-i386 e5486efa1356abca8f8d5cac9aa6135c9626ab51 - macos-x86_64 8341419e4295d780f72950cfe2187195d0d03e83 - winnt-i386 60c2fb349ac8a7ad30c9ba2518a61e669debb7bf - -S 2014-03-28 b8601a3 - freebsd-x86_64 c6b0651b2a90697754920ad381c13f9b7942ab47 - linux-i386 3bef5684fd0582fbd4ddebd4514182d4f72924f7 - linux-x86_64 a7b2af1076d48e4a687a71a21478293e834349bd - macos-i386 41fb091c3bf5f0ebe9341f26129be82782266ddd - macos-x86_64 22b884a3876cb3e40ad942ad68a496b5f239fca5 - winnt-i386 65174e80fbf69f92e41110b0bcc7e15704ca359b - -S 2014-03-22 94e4e91 - freebsd-x86_64 7793127e1b9ad22cb2e020f9bb01f34526cc4656 - linux-i386 aa53699d32d7acb86a6447f988c4ac73ac310bab - linux-x86_64 7ca5d5bfeec8d05ee3503bb48e4c5f6d75a6b49c - macos-i386 bc33e8fabe4d95848e91f5648268a6cc974d5468 - macos-x86_64 99da3ba5ec8924ccc0ad1fc17e2e04d1c29fccd0 - winnt-i386 578102567096d97b353b291076bdfa1e9da62513 - -S 2014-03-19 4ca51ae - freebsd-x86_64 3059005ca4ee2972bb705313bfb7848e859a20cd - linux-i386 8907cd4ce8b8301d70a6bbc0100c955444270c19 - linux-x86_64 54df3ad0c9f0db585090ab98692956cfbd7a3bd8 - macos-i386 b4b3ff9d658e6fa5b2441f0f315941119fe091a8 - macos-x86_64 0e67338c4395ef22231b63fa8139d85f277a8edd - winnt-i386 d39f2c0b53a914c1b60a99d0e32892e3fb511273 - -S 2014-03-03 6e7f170 - freebsd-x86_64 1afdfbb838af95cea5d5037018f220356da954f6 - linux-i386 edd73d291614907ad502a9f726cd9facb1f2f49f - linux-x86_64 6b4d38bde52db482e5d41cc3fe06f777646930dd - macos-i386 ac4e833996a92f94bff8955035ff4cea92bb11de - macos-x86_64 2ba2903243c7dc31c80305af0b9f9a30b159797a - winnt-i386 e64d3d670f11f48b487024bd3a6838c1d23a7483 - -S 2014-02-22 4995a85 - freebsd-x86_64 0d8e2577d4b626cd8a5d9c29f0f91a4452bc1621 - linux-i386 afadce8cba1098b1b9ae1e53a649c1515cd26e3c - linux-x86_64 da512f607f63c4ea735d58e7fd01d44456569ca1 - macos-i386 5ea768930449b86146039da1336db7a87d599a49 - macos-x86_64 766ce886cc2716c336cceb51cccff760a6a22a38 - winnt-i386 06d14256eba9838afab4bb6f6ea857056f2d13c5 - -S 2014-02-20 1366e04 - freebsd-x86_64 f26a4fd99f824db7cf3b323389d560b90f6f16a7 - linux-i386 26fd77a81ceaeff6916895bfb6b2bbaf4e19f21b - linux-x86_64 4e69d56f74a942d44f0abed32354178a5811b517 - macos-i386 035ea4debd3cac6509db9f8ad3b33c084fac6fcf - macos-x86_64 cf19cdc54b2ba0afe70cbd2a10ae1a77dc0c9658 - winnt-i386 4e487f3b4e890494d3754873c221becefbcd5f11 - -S 2014-02-14 18477ac - freebsd-x86_64 102df7dfab2a1c59d9e2f16a3f02f368310dd022 - linux-i386 fcf5891e9b3c7c9ef5ee5ea37e62089346099425 - linux-x86_64 d7c2df185fd2e25b4b8f5b2caad277b5ba664b81 - macos-i386 c15faa408339ceebbb68e952e9bf7f2624ceb9e0 - macos-x86_64 445c6759db5e69250b8a8631ea7751d1474e4250 - winnt-i386 f78a892f47627f34233e44c2ff4a00b68063a2ce - -S 2014-02-12 c62f6ce - freebsd-x86_64 737a423c5f803119ff5a692eac432fa9d0c595a8 - linux-i386 a7e90e27e8b6a3fa79ddc15f0ed217ccbade875d - linux-x86_64 8f5fdf9f07b2afbc55d8d8c06c60aeb532b5ea83 - macos-i386 57bb225f45bc57fef4c34552a2d5814ab4913087 - macos-x86_64 d37b62478aa1c1dd1babb19d1df494d2aaf59c4c - winnt-i386 2c5c5f7228140cd79f120201805504a9e07ad245 - -S 2014-02-03 346d378 - freebsd-x86_64 d369c1a83a2be6eb42bd0e550a1adc38ffed0804 - linux-i386 a6d4ab441f5b285d7aecbb940fa733526b413f34 - linux-x86_64 83c3e5e74e8c359a557bb281ced7b4d9e53c91dd - macos-i386 e42877c707ace1a79d58317b5f3ff6f8d3fdd849 - macos-x86_64 77769bbcda13e7763ed81aecdf183ebebf7b0a74 - winnt-i386 9482b0930db28f681a6c01a5480982e5c5f9564f - -S 2014-01-20 b6400f9 - freebsd-x86_64 22b1774700781d190061e66666fdc5f9e9c414ee - linux-i386 ca6d66dcbe90806e50a46037c3102cffecce14ed - linux-x86_64 ad8b455804ff46aa721db9453438591da4c35b48 - macos-i386 3e38ca0328422469ba5f25544ca2a9770cff438d - macos-x86_64 6458d3b46a951da62c20dd5b587d44333402e30b - winnt-i386 2bb2a2d7d834fd98ac23f0afe29f0fc3d6098703 - -S 2014-01-14 29070c3 - freebsd-x86_64 c2fb6e6313a9f1d41df810fcf1ae354858a8bf76 - linux-i386 6437656b81cf9f3d1377523c1e36d5cf06b2d645 - linux-x86_64 f3ca80c146f3a6495c19fc77dba13f9c0abece49 - macos-i386 3f1f9925fe1ddca94f2727194bd5763b0705016e - macos-x86_64 0c10e160e3a754f2cdc89aea037c458fefe03d30 - winnt-i386 5cb277524157a8a883a8641b829f8aa6f53cdcf8 - -S 2014-01-08 f3a8baa - freebsd-x86_64 9f2491ebe48ff77774c73c111acdd951973d7e47 - linux-i386 e2ba50e6a7d0cf6a7d65393f0c6416a2af58f8d4 - linux-x86_64 d4a3bd07ee22ac7e1d6eccd2a0bc3cb2a854e4e1 - macos-i386 371c87e17980d082c6033a3705900a41083cd6dc - macos-x86_64 b4d32829782489b12cdb84b926c3eb0a2d5a1dd3 - winnt-i386 423146ff752011e09dc9c4a841e60bc728e72096 - -S 2014-01-05 a6d3e57 - freebsd-x86_64 ac21ea9537da2aaee35b7f3d2698b651dc3e8bd9 - linux-i386 03e60be1f1b90dddd15f3597bc45ec8d9626b35d - linux-x86_64 aa8fbbacdb1d8a078f3a3fe3478dcbc506bd4090 - macos-i386 48678a07106207de939f0f90389abb3170b20531 - macos-x86_64 ec746585cb20d1f9edffec74f6ff8be6e93a75f7 - winnt-i386 5c6e697a9a7946f9f15dbe7d5936cc9875ce7636 - -S 2013-12-27 a5fa1d9 - freebsd-x86_64 7ed0226bb924d40bebda19e7bb0a7663233a600c - linux-i386 10c113aa925f9985b5111ded570b7db4a50b8569 - linux-x86_64 64a2e929996833dc5d032ccdb9c288c4d4ec3d3c - macos-i386 c7d4c05a04aa7d3865a431b0a7243894c08d1619 - macos-x86_64 164731b12d766fc562ef8b6950cec99e475047b3 - winnt-i386 0037dfef804a72419945c8b3a7b8a4ff24017a13 - -S 2013-12-25 cab6af5 - freebsd-x86_64 bf06667e19a24ffad3a4111c704e9e919f08b076 - linux-i386 1d3c54daa6192f823dab694e5ea42c636633df6b - linux-x86_64 1af14ca2b248061d5a999062d6bb0825420c0add - macos-i386 7cee8aefd84c8d93cde40b89bf787b669e911171 - macos-x86_64 d4a4ef8f2b28d3d5eafbccf0df07999efd8115af - winnt-i386 847e8858bcae47cd56bf1ea3b3c0d536897c8d68 - -S 2013-12-17 d5798b3 - freebsd-x86_64 8f71dbd1aef6c59867ce6287cc82c6033e1ee5e5 - linux-i386 3a6e8950ef704ec57ab690b30beda19d7a63a537 - linux-x86_64 dc6cab75e98b6ca77ba7792aa39eda5875741d93 - macos-i386 ec9f8fd4b2f47f4160ed674350c70560a0cf5bb8 - macos-x86_64 ebae3ac1c05b42031e5f2859cd9c499dc644f8b1 - winnt-i386 4419876bcd00c1b75df7d86381aff4c0617030f7 - -S 2013-12-10 b8b16ae - freebsd-x86_64 08d28a3a47f4263dc0a005374e59d30b1b942ab2 - linux-i386 4cd5d8b80014a69dea9744fd12c8cdab6269d263 - linux-x86_64 b68c106f377b226ebd17210fbce84423e3ffa6c2 - macos-i386 797b3e01c5ed9fbd3549d75300a005e4a987a84c - macos-x86_64 b5f33edc57378ef1533dea258ac04c3bbe0e71da - winnt-i386 656ffae4528290f942acf4ac5af515fb802d6bd9 - -S 2013-12-09 b485e2b - freebsd-x86_64 424f1504e7dfe43d1238a8b6e6abea8879297fa2 - linux-i386 214dc3cb3224f7ad6ef02e684cf6505113e5dc9a - linux-x86_64 1dac6088c72497476691927359bc55b6cf6a30d6 - macos-i386 f8bc4efa3118484109e1202983b85d6ec92de717 - macos-x86_64 eb2b52a1d7e8d25b23a7962f653c50d67490887f - winnt-i386 ed6179b217e82a34f0c7a8fea9d8b76f80c11fed - -S 2013-12-07 49b751d - freebsd-x86_64 b604a8c1846067756488d7d46f382328b35ae492 - linux-i386 e3c11032b19a74b86b5b6f271ee7457ce0b00d48 - linux-x86_64 e33ed8c5872144e0d3ba5785db700511766a4653 - macos-i386 4e4cca6d947c3984bd280a2c7606dd22db1b32f4 - macos-x86_64 492d28b86de14c8c359ab6dfbe7efc15f6305269 - winnt-i386 3075b0033433bd5449f27d0b6039e8ab939e27c2 - -S 2013-12-04 9169579 - freebsd-x86_64 0dc6dcd9435e23c891b9e9e37d483e92cfe51449 - linux-i386 9802f6ba319bec4dd02299af08e421270ede5f5f - linux-x86_64 195d69ac1846cd646faaa50d05639d41b03e33ae - macos-i386 b0729f5ce3f5581fbb02f963a0f5261e6f011e01 - macos-x86_64 5091706c1111bbbd21a22e11b18f68717be17c29 - winnt-i386 1f975067f08f7702ab7f1d88b13221a0c2becd2f - -S 2013-11-30 4252a24 - freebsd-x86_64 2e0bfc6eb0dd75b8a1ad5c9958d90f3b626650dd - linux-i386 2ea4b43fbc22c295e60874ef494dfbea1cc08a63 - linux-x86_64 757a935bfce41fdacd9429c37d2ebd9b98baee17 - macos-i386 d64f14ba746c21bd5d52deb26e79f74b219b8d3c - macos-x86_64 bc669acd847cfcdccc78f01c8dd4bfa47933b56a - winnt-i386 d5e37b109c58a8b9b60bc2e79149c15d3a399eba - -S 2013-11-28 859c3ba - freebsd-x86_64 4110bb67408e382623b6c55e997e2c34342dc98d - linux-i386 072e638a6a11f7d00cf2c7b487162d0d2e3b5036 - linux-x86_64 bf6d926e5da26e41f3697aa2e997c270c02a7965 - macos-i386 470635a2cc9b71d05833ca483e0a9ecca1c9f97d - macos-x86_64 f4c78234f13df7e4505b31d01f3b2cc6aa01f384 - winnt-i386 4929d471d01175641e2b797c61450637abcc585f - -S 2013-11-10 b5e602a - freebsd-x86_64 bdcfcfa63216559765b83fe4056abb953da25da0 - linux-i386 0fb5cdff8a46e9a02bcdba100803bb5504b6f332 - linux-x86_64 fe861214208cacef085bfc13c22ce655c207facc - macos-i386 83d7853554e5d5404227924cc3cbf86c673f4626 - macos-x86_64 27a71031d9030057746199a3c82efac8f8607093 - winnt-i386 d1a0e2a3cfbc09e360aa0ac4f47b3e8a638b39f7 - -S 2013-11-06 fdc830d - freebsd-x86_64 ef38f3acf8d05eda3c9f21e75c2bbd2f90a614a3 - linux-i386 6ad20f6722c15a71fe7654d187dc431e26c1da6f - linux-x86_64 699b4bef2eff078ae6cfaac093c580b322dc769c - macos-i386 8c9d906116359bc665d8ad04ce117b9f5a8a9ae2 - macos-x86_64 1954f546017639f7ff4cc584120ba41c29c790d2 - winnt-i386 ce528f85f1470b3183c1e310452103c0c7f89751 - -S 2013-11-01 8ea2123 - freebsd-x86_64 bc7dea1ca297cfb4bd6d8a32185c6a4fddca3e6b - linux-i386 4b33599d160d757f6021ff05d0213fba3097dde2 - linux-x86_64 e7bfa823fedbee5071b8f50b9ffe0cefef844d7c - macos-i386 60a03fed4662abc02b2e836fb0b72b834c9f99d4 - macos-x86_64 bfe8351d3dc4e60cc9ab033202e75dcaa10ed12d - winnt-i386 047a1e1a2b7cb0f91c0a634d816d0d44f9f15aa6 - -S 2013-10-29 fed48cc - freebsd-x86_64 4a43216b432511a5fd6b727753aedb749f6a68dc - linux-i386 53f65d4b1377c17fc12d05d7c4a0fbd92eea071f - linux-x86_64 afa5f19a37a2cf137e5d4277951fa07efda8e072 - macos-i386 7522c24f78ed35020e2877e3eada058ea8a11f35 - macos-x86_64 a18afdcbbdbb81c1fdf08788b24f0d3ea8701eb1 - winnt-i386 c78f0839c9524eda33c54a5232121886021b5352 - -S 2013-10-28 2ab4a6f - freebsd-x86_64 08af04bcf739930bdb7d0ad244b2c8094cd5096a - linux-i386 c233de1ed09872d5c7a3e1ce9ab9eb6e16631201 - linux-x86_64 3c16b8e0cf3a1af654cc084a6ccb80f2de8fe84f - macos-i386 0fadb8c5afa4e4656ff37d0949aaa5a382f197a6 - macos-x86_64 c8b7a8f30f95bf71542451b8e1a8b9108de1f39a - winnt-i386 c1c9dd8b2ef0c004e3327740732c6e06c3826fde - -S 2013-10-22 ae0905a - freebsd-x86_64 6e9d81c160963308c2bf7886fae6726274b365ed - linux-i386 1c449504aa04c0345ad09f5bcb5a57b9656d11c3 - linux-x86_64 ab649598e9af2ab3c38179e6229ea8cc4be6e338 - macos-i386 28d4a2e8495a981e86e0c57fce05bcfc15ba656d - macos-x86_64 9506a3d746c1d1cc1befc5c9556f1653fee25f85 - winnt-i386 6038235d3a2284beb7be8e80631ec6b6952df8ab - -S 2013-10-21 6e6981c - freebsd-x86_64 7f630581d0e881483272140e84bc2f494daf6204 - linux-i386 5c754455610013bbe448e8f6ef8ba1a05b167919 - linux-x86_64 d993920210413add241f7b235dec0e4d72164be5 - macos-i386 8b48d8598f4d503e6bbc40bff52457c659d6de2b - macos-x86_64 e84bcd0b08af6a0ff602dca7a5bec21a001c5eb8 - winnt-i386 3f4441509f89a794ec9b53e38756e6124df78df0 - -S 2013-10-16 6c08cc2 - freebsd-x86_64 03caf882078eff9b4e04d116732b41a3cdfc260f - linux-i386 ce30bb90434e9eb9920028a5408e1f986ba2ad5d - linux-x86_64 58b1d58a9bf4f0cd11ab479e84f6167cb623cd7a - macos-i386 9efd28f2eabbc60f507f023faa4f20f3b87aab17 - macos-x86_64 5f877e0593925d488591e6f0386f4db9b76d2e34 - winnt-i386 ca2b4d24e992dc3178c5cde648305d5bc5c11676 - -S 2013-10-10 8015f9c - freebsd-x86_64 e63594f61d24bec15bc6fa2401fbc76d3651a743 - linux-i386 7838768d94ba17866ac1e880b896372f08cb48e9 - linux-x86_64 e7220e9c569a731abdfb4a34ac45974df3f40233 - macos-i386 37473eb65447e73af9e3bac3b92dcef22f7faa28 - macos-x86_64 2532e0d87d3f17bbd9daf6ced32d2116261705c4 - winnt-i386 5471ede56928d92d9042331346de91e4e571ab7e - -S 2013-10-07 c919629 - freebsd-x86_64 c9af0c52bdcc1ffe2db4c9a3a1aaae66ff7fcc2c - linux-i386 0245cb9e57c9b39f3441e9768256783ba76be6e7 - linux-x86_64 483d9bd109316e647a11d387653568d95e3581e6 - macos-i386 c12154816a8f5cd7b2c758250859cf6abf3eddbf - macos-x86_64 9d46c31618a3bbd6ddffa598f1350e16c620621b - winnt-i386 b111d291a15ff7f02aba9c59bb81ae7a3cd86628 - -S 2013-10-04 8bb55db - freebsd-x86_64 8b68b99033e68f5d98e3e3d077de9d2e085be1ba - linux-i386 10bc0069efdca378155640963d70d3a08a7248dc - linux-x86_64 b69a32998c798c0e6d3b628aa9c2aa5b56e01e5b - macos-i386 e7deef6e0252322086697b40caf45d7614795b5a - macos-x86_64 3664ef781d302e115d8d90bdbb58759b94247542 - winnt-i386 443fa28f8168e25413dcf23bfd35578829e73360 - -S 2013-10-01 320af9b - freebsd-x86_64 a228e126beacd282bf64dfea972775e4bbf0f2cb - linux-i386 5ea6415f07001dfe628009059fe84621ded9569e - linux-x86_64 bbac91f2eb5159bd902308a0965e6f2979d7ccb6 - macos-i386 c1e3b891c992ec02275e85f395946169b02df1de - macos-x86_64 2a809e8b7348b7224ff1b572757b0cca87d0e334 - winnt-i386 259631c32644fcf0aa96ac8497d44586c70dc50d - -S 2013-09-26 1434b4b - freebsd-x86_64 e0493c3b79e71487452dfb2155d6c972ca6c5822 - linux-i386 9b38e3773c097656549a109b01589e057abf23a7 - linux-x86_64 26c54870b6ea586c37435e319c8861f7f9539c82 - macos-i386 ceb98f65e53668cde984160582ab2a47ed909c16 - macos-x86_64 834efd21b948971dc0ddacd96972d1a354fdbbc3 - winnt-i386 b2be7e7b15c0bfce01812787e69d277c9e5e5803 - -S 2013-09-23 348d844 - freebsd-x86_64 8b99ec197e441f013c5ba0788f8bcfa689bfc75e - linux-i386 9a237fcbe4d29986a360b1dc8984da3b946463e6 - linux-x86_64 47906010eb676cbf9e0caa0773d9ef2dce89e9f8 - macos-i386 7085e4dd6bc63864f2ad8a3a21dab945ffd99d8d - macos-x86_64 efefdca6b4a40ebeb977037ebbf46c1353f09ee5 - winnt-i386 7988b58a9530a4ac0688ec978e9124c5db56717c - -S 2013-09-17 cbd1eef - freebsd-x86_64 9166867a8859076343cb3e57da918b5c0eea720b - linux-i386 38347b579312ff30c36d257a1161660eb0ae8422 - linux-x86_64 0c169bba5d6729d0c0f096d61d9274fb082b4b34 - macos-i386 1eb229510dd12b91800674566b8dad401a3f80d3 - macos-x86_64 1c5d8e29b9671af93963e1b5fa9fcca081124a39 - winnt-i386 56baa04a1f02235ebc5a75be05aa65fdc822a4e6 - -S 2013-08-14 e7b5729 - freebsd-x86_64 9de0b5583a5c4413f9e77df7071498385e936dd2 - linux-i386 29119a9072f74c639c2bad998edc40e582da540e - linux-x86_64 319fb73727da9a8e4dd6debe37e7647e40ed361b - macos-i386 f74a0f02efec35e327a9c819c5c8347579d1b7fe - macos-x86_64 f44aba76e9d7a9a28b8a6dd78f14576e7c84fbf3 - winnt-i386 49dd1f264e17e6cd929c827ccbe23ee09058c7fc - -S 2013-08-12 ecfc9a8 - freebsd-x86_64 ae903580d6328b8517dc64b013c1b0740bfa4e83 - linux-i386 3076bf032ce980157a894a0a4446902ba8b1783d - linux-x86_64 241090d135e1ce95f0b17a198c194d29cd917bb1 - macos-i386 15a749ed891bfaad9515d01391fbcd1788d9adc6 - macos-x86_64 037e007d82dffdf3c8dfddaa6837ace821a1d3d5 - winnt-i386 d1272610ac2b7b938a5d992b61947aed7e4ebc3d - -S 2013-08-03 18e3db7 - freebsd-x86_64 addf91b20416bf21a7c53ea9508bc302ec957ce9 - linux-i386 ce103c323c0a0b75d1307014f1d6f8ff4d03c873 - linux-x86_64 6828d854d174c7b514a4350dfdd92c059df059d6 - macos-i386 ef4b67859146cacce54367f9243b5da9f9fde386 - macos-x86_64 936d4e170d8207b40df64c5eca3a3a27b3eee08a - winnt-i386 332f0181fb68fcbeaaae342d5fb22889aa902152 - -S 2013-07-31 389aba0 - freebsd-x86_64 c9783bb5723404be8ae371d265bbb9a1c679e4db - linux-i386 7413d98325b23dc461ced92757e5e19bec750dbd - linux-x86_64 cd3fedbb02423f330aedaae6173914659ccb98e5 - macos-i386 fef296e534b5e12d382708e658370957d5df9a0e - macos-x86_64 57bd3da763607386a08b2a50ecf5a7778aea8356 - winnt-i386 a532aaabf043370a21458fd6874be41d432b5696 - -S 2013-07-25 4cf3072 - macos-i386 f682d6e9ca0d56768bd36a0c05b7e58e12694dff - macos-x86_64 2f4e85c9756ba31a04fa8dd1c999fbaf8e1d3d1a - winnt-i386 6360e61fb5c432ad1511cb28af8e44cc0106f1aa - freebsd-x86_64 5e76c40a64b76e0a065d5b8d51c85dfe38ea833a - linux-i386 46961cef9d4efccf5df23a8389d63cf35d35c1d6 - linux-x86_64 b416ca2644b14403818f0219673f6f8fe189e8b4 - -S 2013-07-21 e336cbf - macos-i386 d9666dccc1040ebe298a54acb378902a7472ad0f - macos-x86_64 808f68916444e3857ef2aab20f8db9db8f4b0b4a - winnt-i386 f9a5f891fd24e9446acb2a1b5d697461665c4388 - freebsd-x86_64 8e79f6e970bc33ea6a3b9329bc4526d89ca63d47 - linux-i386 054a0229b9cbdadf013868ba01a8277883f83a6d - linux-x86_64 2c53a72e9c9bb547df248a2d4b857d480ce0b910 - -S 2013-06-23 f827561 - macos-i386 63ffbcf99b6853d7840bdfe01380068518d0e466 - macos-x86_64 b34fdf3845f8ef4760817007d8ef820cd32f2e07 - winnt-i386 6602150074ec442fd376fddb2eaf63f5da6fdff9 - freebsd-x86_64 a05bdda2d9ec0e66336d81b98bee8a95442a501f - linux-i386 b8f4a0f0c2250aa4d76ec1eb57c83bfae5725f93 - linux-x86_64 caea3402663334d0a3967c21f58a860c060d5474 - -S 2013-06-21 6759ce4 - macos-i386 6e5395d2fda1db356f64af28ba525031bf9871c7 - macos-x86_64 7b8ded4e1ba1e999a5614eea3a4acacb2c7cef1d - winnt-i386 8337460667b1ea67d5a7aeeb7ba34c0b87e44b83 - freebsd-x86_64 21e5219ac858795c757a3ac894a6c3e88b43f74d - linux-i386 f02609c28f05d9feb9ba4b5f34940a77f22d76f2 - linux-x86_64 951c9fe9082393c5761bac3163ea59c253f1bece - -S 2013-05-17 2d28d64 - macos-i386 abadafb33c9f858543351c822fb468195163559f - macos-x86_64 4a484693f73bcc8ce2a85708fd4f0c3f6e34969d - winnt-i386 558dac018b2b6dbb23841772e1f4b9591558850c - freebsd-x86_64 59ca6fc1eae2d160525c705928d551dd8993e01c - linux-i386 2d3e61efe30f55176c72b3dbe31d693630f59abd - linux-x86_64 86ecc1833df8e28d08ff3a9a952ec424abdcb157 - -S 2013-05-03 213f7b2 - macos-i386 0bf8b88ea01cc4cdd81ac4db1d301ea9b3371f13 - macos-x86_64 2da3990639ab5a9c9d51b3478c437cb459de84e3 - linux-i386 094500e587bfac27d7be752b635c242e07774c0d - linux-x86_64 75733a5a58f53aa783253c8cfd56923b78676705 - winnt-i386 bd07c935a917c0796d4dc803d973b864d4794ade - freebsd-x86_64 b95d648d9bfeacdd04cc5213bdc803b0fd94add7 - -S 2013-03-28 f7a2371 - macos-i386 2e05a33716fc4982db53946c3b0dccf0194826fe - macos-x86_64 fbd3feec8dd17a6b6c8df114e6e9b4cd17cc6172 - linux-i386 b89197edd3ba5be7c2ee6577f048d7663640e1d1 - linux-x86_64 61a4377c6d0ca5814c2b2b752d73b61b741a23c9 - winnt-i386 858a74afb210b30697227a87b67e44786b383a0c - freebsd-x86_64 01f1e4b94504045e763eecb71c7e0852f6e85036 - -S 2013-03-27 8c15409 - macos-x86_64 05eb3801b60056d95715c891d00c5d372e34d00c - macos-i386 4119e3fa614fa86adf60ed0183d00db3ce6d0dbc - linux-x86_64 e9308bade0e068bca4abe59ef4afe8c8bb7c134d - linux-i386 bcb30ed1817df1a07588cde2fb0ccaf9ffad7efb - winnt-i386 a16d409465e125bc6f779b45821ae1ea276c6bd4 - freebsd-x86_64 348192f348f03541549f4e2c97af78901d59ca6e - -S 2013-03-21 ed25a67 - freebsd-x86_64 5f0b08839ae3d1207808f0d57cbfdb00eff9c883 - linux-i386 54765a17c6b6d04a7013cada2a51d190462979b8 - linux-x86_64 c6cae795aecb8c4d5f17c73bfdd01d2b0ff32126 - macos-i386 bc05e17fc93187a1906f118ecdb258f09317f220 - macos-x86_64 c39838814f45e343d4f5754390aad22c41a34ba6 - winnt-i386 c4a858ef45ab2c9319e607640b2bbb3bc4b48093 - -S 2013-02-27 a6d9689 - freebsd-x86_64 683f329fe589af854f9a375405468691d98015ac - linux-i386 22f5c2a91941735007ed804586fc0f0e82fc3601 - linux-x86_64 328fb144edbed8cabb8c2c6306304e3d8460ef60 - macos-i386 5dda51347f9aba4c70a0890d3ec084d98a49c015 - macos-x86_64 ca57514c51d6a38d53a92a8eab212379872baddb - winnt-i386 1ba692a33072597ae124736c9e4040197bc5c1cb - -S 2013-02-04 2f46b76 - freebsd-x86_64 d33b5ebbf3335f6a8a5cc23572f630ad66539830 - linux-i386 7537519ae3de82592d6150b9ca81cd4bf45d9457 - linux-x86_64 17472c4ffa6a59a6dbf45158827992c4404f7d92 - macos-i386 a8473c209a199822f9bf0132449a8c38374d4d42 - macos-x86_64 3213a4166ee59dfb1167b9a31c64b747ce65884c - winnt-i386 74c718ee885193719afa2ae44f395399bf993dd3 - -S 2013-01-23 e8f4da7 - macos-i386 6860345b8d05986ae1b20c7532fd9667dff31b2a - macos-x86_64 d0c6131e4afe93759af08d88e81cd7a8496595f0 - linux-i386 de79415a0976eda62f921eaace843fa0345b6cfd - linux-x86_64 9e74ea2160f9f4640f474322a09cd5d3b62cb557 - freebsd-x86_64 d1981eb02a7e97496309b783693d8b50489e6f58 - winnt-i386 7ef2b440a1e0345084a4caeb1b2d6bf82a9378c2 - -S 2012-12-19 8554d5e - linux-i386 9323192fd4d1502621d3a4fcc2ba65ee03d8b94b - linux-x86_64 8334f2d82b8f3c4ee8480a5ee61d9a231045a327 - macos-i386 151ff211c01f0b7a1895b93ff0bc021bf1472346 - macos-x86_64 e4564933f11b17f7dbd25b61032233693da21dc5 - winnt-i386 5c8aeb5dd4652be517a7b2480126b8a2941c0d03 - freebsd-x86_64 2bffa280f338913c735672f6ddb630c6da126fad - -S 2012-12-18 9057e98 - linux-i386 9af73f943484188db6723d027f1cd9bdcb8d7598 - linux-x86_64 f7046c4bd5e0ce792f030fb87f7ee1d489181e36 - macos-i386 9ca811bf461d52878eaaebe24c8a17cbeed4536f - macos-x86_64 ebdd7ba76f24bb7a345cb9cdb5b41455858b8b25 - winnt-i386 da8074c67dc6843818353341bfa83069f9383b88 - freebsd-x86_64 5c13f1231eb0c9fef76465b9dbcb0f3865300e78 - -S 2012-12-14 dbc52ce - macos-i386 994697c57810b8f139d71df47024512970db860c - macos-x86_64 f8faa6a80b0b98b27ceee6fec71ded7ca058104d - winnt-i386 92ac1ac09a262a59f40160c9dcf535e1c8ea8e75 - freebsd-x86_64 d39ff0cbd7fdf04de55baea9d4003705f5ff6441 - linux-i386 a7cfdd83d18e9064644147204f96fada70f3b47e - linux-x86_64 601d0e105b46a8c6487e8ac2cd159750039a6049 - -S 2012-12-08 6630d75 - macos-i386 cdb0f0ebe99b8fea7688d9ae32860a63d1e05399 - macos-x86_64 c509dc252be6e1c5cf862c659235dc94bde397aa - freebsd-x86_64 3df01545e10c25fd72f88ac7d0d836da23924e63 - linux-i386 339478a9ee8127c608d0d8e8c718d34db4073d31 - linux-x86_64 ed287803e3d3ea1164a9a00ce0e0b41345464bd8 - winnt-i386 62a541cb5fba7b7c9752d5d92cf17ce25125f4b1 - -S 2012-11-26 be6613e - winnt-i386 f800925ce98d23f842a03be65f01aae0dfa1e897 - freebsd-x86_64 23462b234b8ff3c0b6d6f94e5952178dbcef3488 - linux-i386 e5fc408495952b61c3c103265cf1f54e2ab51e05 - linux-x86_64 beb6454c57267c9fb198e4f0f8d4773e28f5bdf4 - macos-i386 46b83a3fec0731198e010827c6842f2854cc79df - macos-x86_64 d06b16853e2a81fa5edb7fb2de73e6665c1ccd28 - -S 2012-11-18 68c73dc - freebsd-x86_64 976e75614c455557e3763e270cbf7b5fce1c5c67 - linux-i386 d44088ce7183622921626038becf9c5e2d76cb66 - linux-x86_64 6b7b05137636f68c21a17aa9e20aa55700ad81fd - macos-i386 b115b137361237127f439a3c15c0566c3148a2d3 - macos-x86_64 db8d5d176beccb55a97ea79166cff10eed295e11 - winnt-i386 70c60549ac710a123a40f296f862e6d3d0ee358c - -S 2012-11-16 5005be6 - freebsd-x86_64 eadff4103167fb2562fa558272f41155bef459a4 - linux-i386 040b94e5302d939a2e4491b30052339bc51ab26f - linux-x86_64 a2d0f017b5c4abc8e2ba684d84cff5f29a75d79d - macos-i386 1ebdde9de260706d82674886d589114990700f50 - macos-x86_64 3ca65a3c89267b8f8f92879237a71169015d8cf0 - winnt-i386 deb1a276ea12fe82aace1e6985bfdb91867b29e3 - -S 2012-11-02 4876eb7 - macos-i386 e391b167f0361c70440ef2b45541b0832f77fa44 - macos-x86_64 480a9f39b995a4bd7213275f6048b08ec599a22e - freebsd-x86_64 4253e538863c4d668d145587a296c92595db0a3e - linux-i386 cd666d55fe54b80a28dfe940598dd54987decdad - linux-x86_64 549ab5b90f0118b8b02e00da7cc396d96066bbff - winnt-i386 5fda5900795d2c137602140e8618f37b7e42b4cd - -S 2012-10-09 cd6f24f - macos-i386 7f2f2857eac33ff0792e4ea7a3ff91a09304fcab - macos-x86_64 bb3d191e2e31cb754223ab162281fd9727e63ea9 - freebsd-x86_64 a2b5e9dddfa8f21cc8a068b77a47ba5425bfdcc6 - linux-i386 7c13c04ed6593dc77db6b3b56f057213f567a32b - linux-x86_64 7860cdd4023e9d6bec892dc5a7144b286a7fd38e - winnt-i386 9e917c2f3d72f72042d5e9b60b45790740676e82 - -S 2012-10-08 a477c5a - macos-i386 c059c3d5bd113f7edec48a4c2128f4b6138c3db8 - macos-x86_64 f121f4e2d831434f7825f72f3d328c11b13a522f - freebsd-x86_64 7f0de8feefc13267cbdebd299adc6b06f832cb9f - linux-i386 5975e794e5939034516fe888b70b532d34327bb2 - linux-x86_64 6dd88754f170f85d268e9430f7728efe43522383 - winnt-i386 76655d202b59c9b61cae860110ad5d0ca6e12cbf - -S 2012-10-07 d301dd3 - macos-i386 c9dfce9f231f22969b7e7995c1f39fcf86f81b2b - macos-x86_64 3b1f6fd43fe03d7af334eeb111bc384428c4cd3d - freebsd-x86_64 784ac161fee0351281e3edfefc81a0c1b5d068b5 - linux-i386 4d945d7e0de4e4544928ed5aa111d1508522c697 - linux-x86_64 9bea5436042dd4bb7e682d3a10d0d51c3590b531 - winnt-i386 62de5eea3eba70a2f4a2b4d42c72aa2fa75f999a - -S 2012-10-05 937f8f4 - macos-i386 8b5ddc78b3004e539c6fbe224e492e4a6a1bc867 - macos-x86_64 03793e0136512c644edfb5f13cc5bb7d67fb24e5 - freebsd-x86_64 f7f4b402f06b9344fe327a9aa0282aa4ac18fcb0 - linux-i386 789223cb3db37f6f81f48dff5fa202311fae6c2b - linux-x86_64 b5f1ada95528ac5b24d2b3dd3c817b8bcfc3302e - winnt-i386 e984437412dc4450931e0bb7ed140652bd66443c - -S 2012-10-03 5585514 - macos-i386 c910d42405e66b444b7870ea66b93e1135776df3 - macos-x86_64 e0dfa93e8d0d25b91c9684d4f6e92dec521e2d74 - freebsd-x86_64 228e68ac17ca104554dd8a39a466d20f1b68de24 - linux-i386 0a2760b24d5bc3cabcc9321b92a08796f95da377 - linux-x86_64 eace8a5c46f7525355e85b3b570dbd7f4b3b6471 - winnt-i386 25680d15a358cf4163e08f4e56e54fb497de5eb4 - -S 2012-10-02 4d30b34 - macos-i386 2bcce3cde8a7e53df202972cda85b0b59ce4e50d - macos-x86_64 fc5592828392f9eabe8b51cc59639be6d709cc26 - freebsd-x86_64 5e09dad0800f16f5d79286330bcb82b6d2b8782e - linux-i386 92fc541d4dde19fe2af5930d72a5a50ca67bad60 - linux-x86_64 1067a27ba6e22011c199ddabe41f2769e3a18228 - winnt-i386 40029da1ea0b2fb8b8fbc24182eb24dbc680e512 - -S 2012-09-29 2f95f7d - macos-i386 e73ea6685a7d70647c127c2ab5b57c12d84ee0d6 - macos-x86_64 7454e7872d772040c46cb0c7d65d68596143ac1f - freebsd-x86_64 37227d6ed35b72b12293615aa845c3c0aa0ced32 - linux-i386 dc530df77174c022b53c51eef7879659f68e9633 - linux-x86_64 e99179e93798dc4c1bed53d9fefe043b51b7b43d - winnt-i386 794f40a0e5422aedd56e383ff532d8f2e3ae0c9d - -S 2012-09-28 d0333a8 - macos-i386 fd57e38d5fa9cd25dbe72c3ae0dd500f48ba7026 - macos-x86_64 bc5a204e56348d4ea2b37e64bcd746f6cec75f9a - freebsd-x86_64 2015128280bf85a02aef8ad1c387c1459cd9d8a3 - linux-i386 8106f24abeb0822afc0ff6bb08288de7cb286a36 - linux-x86_64 ae7147c5c810548bc3c5b423d765792015ded1f7 - winnt-i386 e4772c3dceb12d7724180371262a0d7ee3739eb5 - -S 2012-09-26 010f805 - macos-i386 847b1cda4780badb9529a73aa00adfd907f69106 - macos-x86_64 6645c4302bcc04c76fae92fb0b3703b5b5310d17 - freebsd-x86_64 f951bc129a20a5c360cd318da71d849baba9cb27 - linux-i386 c2993f34ba8469f37f94939fef80c36bfbf6a7df - linux-x86_64 862850a54364de970d0f77cc091d631d343730e7 - winnt-i386 f497329c7e254de7e23f43daf62e38ee562fc92c - -S 2012-09-23 92752a4 - macos-i386 0d5130364e8610413c9da965ca5ce6967f32ab3d - macos-x86_64 a0bc17c9025c509b0ecfb35d04e35b8b232f2687 - freebsd-x86_64 6568c5ab6199e0e24fb442511ccb2ebad8eb4785 - linux-i386 685318ae8cd618eff60b51015ebe51b34383072b - linux-x86_64 d8893f7c2be05bf18fbda26e65a151446251a3bd - winnt-i386 23f8a9d0d788cdcc0a69a3276adb457cf4aa474f - -S 2012-09-12 fa74edf - macos-i386 da38aa39fd1515ea765790a3c320194bc50fd602 - macos-x86_64 7509861e5a6d3e082ad9a9d3474661856b7731c1 - freebsd-x86_64 c4e0517d6894d18342eb852b6e738f48d086004f - linux-i386 ee0d36b221d8d2dbb02cd1fd1feb09294e0dbb6f - linux-x86_64 ce585fd55ae42e33398870c3bc77a671547411aa - winnt-i386 5b32231d75d65c057ccd91a6f810e67941f5400a - -S 2012-09-12 8fbe4b5 - macos-x86_64 bdbd0feff0391b902ea0a0c68948962c0acf5dae - macos-i386 c7e0da4ffb778045f77b84878872dc3944d147da - freebsd-x86_64 cf1edd87540db6457ecb4e33ea4ee84d0bff1820 - linux-i386 7b72212ee4e041ead9193749f9d02e58f6af3fb4 - linux-x86_64 2133ce4bd4a976004973d251338c67f529f84e79 - winnt-i386 42f7c816fa535f4f5093f9a0180c9128e1bddc20 - -S 2012-09-06 5e36a99 - macos-x86_64 2a5b4e45f1afe1f6e4d91c8f34718cfc54c8a454 - macos-i386 ceb90a1013ce66ab98bd39686fe5d08ed1f7963e - freebsd-x86_64 2dd7c78730ae37bc766bbb0cdc3d810934365d3e - linux-i386 52fe496314ed056c090ff700c6dd678190bb5e0c - linux-x86_64 2d8cd365fabba92f3d73ff40156474b341a87c0b - winnt-i386 af5b64db32dd56e71fe5a852af25edab5dcbebe3 - -S 2012-08-29 8aca44e - macos-i386 3c09a69757fc2b704d922ee2cbce7830c65fb976 - macos-x86_64 9c9ede896ce8a723858146d8d5ae736816ec1063 - freebsd-x86_64 31f22c5bdef7b25b9be943718092fa7629ddca71 - linux-i386 2aee9ad037adef883c9f859ae5eb74aaab54c513 - linux-x86_64 807dd19b0b93768eb6a40ac489a3e7740413c865 - winnt-i386 e8d937d5d1734e3e8a7e8bd9158b7e4350776506 - -S 2012-08-24 e55c5ce - macos-i386 7aa0d796fcf79073ed82d0c58ac2d4863203c5d0 - macos-x86_64 e0b9a4a1080b7c163fd41d759b7ba31f34f4a084 - freebsd-x86_64 510e5093df77d4dc4361b29fe086edccf633d572 - linux-i386 05ada13f3c6a58e281860a71fc0ab20341745989 - linux-x86_64 4f5f90da1919ee9e66d18d6aec8d30b9d12d0e7c - winnt-i386 579b73e5fefe1bdf70b1279ff12618ae70464c9d - -S 2012-08-22 11fbbd0 - macos-i386 cb24ce3744dcf9312353a6dc5bac81e8d9bd1b4d - macos-x86_64 020abe69efe21f9e43f6f5ed95cbf0869320290c - freebsd-x86_64 88143f2434f2b833f1c2f62acbc65c46a43ccdb7 - linux-i386 dec6db06e3c4344ccd7a0a18ef077179cef14540 - linux-x86_64 f10c876a0f19dd385a2de4ffffc72909ad2bc009 - winnt-i386 03057c958719201689162ca34f62cb49e78ed56b - -S 2012-08-20 849d564 - macos-i386 1e996d46aa8be5ecd86023e1f7fe211f41c4f728 - macos-x86_64 b84044d35412215ee78429834470415826c6d324 - freebsd-x86_64 9dc973a044bc1c3808299f3c021eaee68c44605b - linux-i386 08c0e354989944e3ec7b9a848ced44a5e4443fed - linux-x86_64 d3561c18f07993d5792e545ba4008e0ed9516395 - winnt-i386 81be9522cdac406077e01919cc1adda2216f8ed1 - -S 2012-08-14 58546df - winnt-i386 45cfcc1e5fc94a7b2184c1efcf604b430555ef5f - linux-i386 8cfa1a11aaa7e4be09d8acb246225f5737f96084 - linux-x86_64 a3b11b7ec0f74379ff2465f955bafe041b28a806 - freebsd-x86_64 17aab5e8a903684b154d3f4dd0095b5c547fdc38 - macos-i386 d61338b312de2242c617f14305a74ca1f4772d05 - macos-x86_64 379a78d75aedbc97e740b06b06bbbe94ec474f29 - -S 2012-08-09 1b2d91c - winnt-i386 52d027feab4342413bda232eaf7c904d0a2494cd - linux-i386 31d3c1f060e885b7676e60632de1f02f47ed9730 - linux-x86_64 e8dd42094793c4dede12af4c6bd779328f689eff - freebsd-x86_64 644da9d954107da5a8fa7566cb1248e95410250d - macos-i386 f5b22b1524b3b947197c6e826361a426c7327f6f - macos-x86_64 7cd5e146d837294c270db75dbd2f70d5be8292cf - -S 2012-08-07 92ef17a - winnt-i386 00425da34df29a9db94e1aabf8c7d7941becfd8c - linux-i386 e2bab668374ff096761fc6fcab3b0e98eb86b2ea - linux-x86_64 41a81c61d0637a67d500c950137ae24b594d0204 - freebsd-x86_64 037742197b370e7def10e049d71960abff64017e - macos-i386 18cfbdc9fc275c17675256d501fe82d0f71816cc - macos-x86_64 01d581a2f0d2d4a44c3b8c66346778232983713f - -S 2012-08-01 507fba5 - winnt-i386 e69e0b302d5794694c072dc2344fce3b9db9b6bd - linux-x86_64 ebbb4f4beaebd4d59ca31c00edaac531af171ee9 - linux-i386 b6bdeb992039975cb25304477f161dc3148ac7c8 - freebsd-x86_64 81d417f11d5f9eec235b158de058830681eb7433 - macos-i386 3bc366de66d0c128e73b90a3b5cca2cbccf8ac6b - macos-x86_64 b8936bad0b99c6323c4240d19a0112ca9943810f - -S 2012-07-31 c5437c0 - macos-i386 f4996074d4c7a75c0c0ee09c90e78a90245559d9 - macos-x86_64 0d6026db575c3fa1052b3847bd6a9b05631957b8 - freebsd-x86_64 e7aec74a0bb7da39e4a150e86c272530be8d5ed7 - linux-i386 3a9cd8b1dce697d176d36fefcb8826d20fbc9d96 - linux-x86_64 9738105611ffdc96352d435a156c9699c229fe2d - winnt-i386 3e502577848861246d408e43a78e37229d7c6d7a - -S 2012-07-26 5805616 - macos-i386 cded3df1c96da88a593438b3c473b0c0e2acf211 - macos-x86_64 2eade230d378daff7ee4eac7c2922df2c4b71277 - freebsd-x86_64 9190485b8b86dcfb33e4ee14bb5954d55cb92a8b - linux-i386 fbb14d21652f49b1eb741e926ba6d7a96436556b - linux-x86_64 fbd5dc14d4e99feee3c6086dd3ad11145507ba34 - winnt-i386 bab3360e67c7e6b333d9f514bbd922a79359e6a3 - -S 2012-07-16 0e42004 - macos-i386 67616307e5498327bcf4f0c13287e7f9f4439c1c - macos-x86_64 f3348eb9314895ffa71056fad8c1f79d8d45b161 - freebsd-x86_64 70ac23545a9716d8b6190949b851276dc63e6437 - linux-i386 dd4d35fabfb46d100c153d8f1245edb521380a03 - linux-x86_64 764eb38a4732c530efcf7386771bca0408395aa2 - winnt-i386 e421682c415faa7ebc012d18089312034a9da0de - -S 2012-07-14 6822ec3 - macos-i386 edf1e3482869dae26709611051615d4ffd64ffe8 - macos-x86_64 30f23cfbe2d1abf009e8715ba0b8a65e42590020 - freebsd-x86_64 a4d997dae8effc2c531660697ef5caeae804f50b - linux-i386 fb536e47fd7a5b69e0306a7deb83462dbfe5503e - linux-x86_64 97297e3a352a1ddaa8cb334fc451b5abefeddf28 - winnt-i386 73def325ff521ed882f921d4c5304f5893e51e49 - -S 2012-07-13 6247a52 - macos-i386 90490415d34d0c0cdfca7edaf880f268e50f346b - macos-x86_64 2114a361fe1162d0a20671dcfa61378ba924544b - freebsd-x86_64 49a2503abfd426a010cbde2a79cbd29c3dd200db - linux-i386 a6dc9a74adb6bbe41daf1daddb792921f3d45f51 - linux-x86_64 0cb3bd25bb46d0df7ad6bc386e2ff6ed56b14170 - winnt-i386 e11e07dacd711f7832a1b63c075df76972ee4d4c - -S 2012-07-13 d7f4d8d - macos-i386 0796275306d77c0f5d376a388dc7914e28b56bf0 - macos-x86_64 0c6cae29afc39f5e9d14e44825e0ec6f799a9f76 - freebsd-x86_64 8eced1b9246d8a15c00071b414b6420fa935174a - linux-i386 def8afd6a3da27f197552a9b9875d21302b9cb77 - linux-x86_64 ecbbb565ccd702ed851f111cff8c156024af9ff2 - winnt-i386 a16f903bc613b25bd65440829e5d724b10aff385 - -S 2012-07-12 8ad4e92 - macos-i386 b31efba34f0af7ce527adc49cd345548da528ccf - macos-x86_64 ef82309eb8ba269091fbe5f41a1e28fa3c6da90e - freebsd-x86_64 2a6471cf27a9d03637b96aa12f39736f663312e3 - linux-i386 94f77c50d753816df8c42608054a8cc3112ef34c - linux-x86_64 6d95183ceace8ae009b0d43a8df3a9cf13ad85a2 - winnt-i386 90edcaf74134a12d656898605c67551931830fcd - -S 2012-07-06 b5f5676 - macos-i386 c0f36f05f84696b98243046d0ebcb0fcc84995e7 - macos-x86_64 b2bc7ba068de0ca426dfe7ae018ae3c6442fc0a5 - freebsd-x86_64 926e17746576397c10af9796d30af6a730329f71 - linux-i386 c01f4c8974741898ef4b0e8a5c08a89529dc8c36 - linux-x86_64 f1711216ab7b144306e4c3ce7d7e50e32eae762f - winnt-i386 eb6a44b5c3daed27f928cd75b5277ec4ed7c8e96 - -S 2012-07-02 c74b3fd - macos-i386 73e705bf11eb29291690701783fb0d48b21853a7 - macos-x86_64 13746b6c5fff645333b0154f03c438ba0d2fbfcd - freebsd-x86_64 4c18fe91280deb75166f74f61981254dee68adcb - linux-i386 6b3d745db4dd9ee7a2a782551b1dbdabd562f2e1 - linux-x86_64 b9c011ac252e8d25d0b13a260ee7e7992fa1fc35 - winnt-i386 5535834bcc10605dd5efd1fe7159b74f7657b1bc - -S 2012-06-30 ed834f0 - macos-i386 8fc728a70801311e8de88b4f1bcfb3cdbfe1ca44 - macos-x86_64 2102405e1aa524431f4c833c038f4e8c07f6be92 - linux-i386 18d79b62a4e5472ef1065ab7c11f451d33af4481 - linux-x86_64 3e08d33682d7dfcb2783521ab9d9f165adb579ae - winnt-i386 b019cfaf07c8b29f259475ea843a583a15da76b7 - -S 2012-06-30 1c13507 - freebsd-x86_64 87c699b23a10443a7a44ba1a0975ac7cde6355d2 - -S 2012-06-28 7aa43b2 - macos-i386 9c1373ffdfb409d7ade9ed560bcbdf0c26eede51 - macos-x86_64 1ce9c1cd10d0eb06daca0c796bab8efa2af8c59e - linux-i386 0a240d29360f4fd72732d2e7877a03d63dab4e7c - linux-x86_64 1137ccd9de01b69dfce96647d44d4686f9915090 - winnt-i386 a8b610307de821f5a38532d3de2d46c7ac0e1e25 - -S 2012-06-28 810677e - macos-x86_64 ee659583a09bb8466985428e4baa16498eedf4fb - macos-i386 8e97646a4a87c239ce5075c24b8bfb490dd90cf9 - linux-x86_64 0520e6f907981b6900a1b98eee43d5416c47c801 - linux-i386 fa960de5a5e21a822aca6c2924426bef2cc74367 - winnt-i386 d331a09b93fa6081908a8f1c06e4d67565256074 - -S 2012-06-26 b9d3ad0 - macos-x86_64 48206274146453f19f35be553469ac40d6319884 - macos-i386 042bc8d4275947e74f65e52eda30eb0780e8f385 - freebsd-x86_64 b3c726694d055fa9d51531c8a7dd2b9b05fbea0c - linux-x86_64 81802af32b84b7d49c91f74d6967b13fbd5bf0c3 - linux-i386 17969455dab456fbb8d95e3a34611c4ec2b9cb09 - winnt-i386 9b16886dd1afb92a37f04189bcbdf6b01533a04c - -S 2012-06-24 f861602 - macos-x86_64 cbae504b8bd5679d679b24c02e4f67b96b4ee9a2 - macos-i386 1bc7012e244be193b6b28191415157090016a6a6 - freebsd-x86_64 9121cf402786a33b30d0fefcff12837868ec52c9 - linux-x86_64 37ea68620a29dd7749c0e52c2a8c6c55699de631 - linux-i386 459b2e9b4531c43d798dfe56f43412078ad1be34 - winnt-i386 6873c011104bc475b145cf6e583bf06d4b43387b - -S 2012-06-20 c891dec - macos-x86_64 cd7b3213a05e11dbf7440db016c9f7db16598501 - macos-i386 eba609b4c815c415ca9485cac749c08ede5bf9ff - freebsd-x86_64 c93d3297bf68d12a55af04fecab5c1792394fcca - linux-x86_64 eb0e614c6f463fdbf3f40953ff122eb7cd829b85 - linux-i386 6d858ef6915517135e633043115ab51d677010c5 - winnt-i386 ffc26150a21aac3c5b023070c0e52d3c01b1881c - -S 2012-06-19 de491ea - freebsd-x86_64 b5c1080df70136bb316286e1973fa2b5734c9a01 - winnt-i386 fa1c7b2295dbde00269f859b8cb637a59a8deec4 - linux-i386 88886207b1f594ce9b3d6db1a6fcbaf94d710c0a - linux-x86_64 d8711b88786d58d0079623beeace070ca6a85635 - macos-i386 9074b395dc92f1e54dec2e757daaf05dc86d6208 - macos-x86_64 2cb02b7063191ed9400b3ed91c6aefad2799593d - -S 2012-06-14 623d825 - macos-x86_64 ab8d81cf3f538cd259cbec2be9615688482f9bc1 - macos-i386 1ec2b3425fab510684858bb7b28f2c2ad474d5af - freebsd-x86_64 f5d4c97cc8900c3eb24584ec3234abd7884b9598 - linux-x86_64 dc6a402bc4b25510af6166c62cdb9f77a62b3e28 - linux-i386 c70b86e8065b9a41d71876a05940f1a07da26d46 - winnt-i386 b576dd7db9c1b4fcc7c2363c5f6856d021945402 - -S 2012-06-12 11e30b2 - macos-x86_64 b6e031112f4619dcd5aa708cf9ea9871a7b11595 - macos-i386 497875c1fb6289c704485b8bf2e3c40c7918bf4e - freebsd-x86_64 8b7efe161706066f3711fcb28a0edc969870851d - linux-x86_64 b12a342144309e0053a7654ce95b19666a0a3521 - linux-i386 75e0916903362fcfd3a306d53d932c7ad3f43f28 - winnt-i386 470d7ededd8e5dc9ded9317d5cfbc8794fe9c594 - -S 2012-06-07 3cbd1e2 - macos-x86_64 e899c985a4c7b73e3d597b7139bae9a68f0be3ed - macos-i386 f002bd36e38dbd6455887da915b449fc22fa3df7 - freebsd-x86_64 a294755f3bf1e3a78aeeca05c57db24db9550833 - linux-x86_64 4957a55c189e423f1eb12faf716d7150579298a5 - linux-i386 92f678c58bcdeca284f4de740bdbed8e3fdeab7b - winnt-i386 1e9994b2fd473a791bd0fcc27aace5b88b28ca37 - -S 2012-06-06 db31969 - winnt-i386 8f7d95b268e44e1127fa004afbc3a4d688e597e5 - linux-x86_64 94516551990e260b9839d15b501c161cf3c63a37 - linux-i386 f7d65f9bdf61ea8674524739f8391a00390dcb10 - freebsd-x86_64 0197ab330f7e53d566df13599e179298f5875e44 - macos-x86_64 bd99c6745ae0b41b2ac749fdc58fe76a3ccb50a6 - macos-i386 cba5620bbfe9195917799dd02fac5b9e2be997cd - -S 2012-06-05 fec3b91 - winnt-i386 36348a2b016f25d9e3b7e1a8814a352c18123839 - linux-x86_64 7308f0eb3d6a9985c14dfbbde7e1f9eb901cc966 - linux-i386 d4c1e1733fd30945f96ae67dbc10289f2a9ec380 - freebsd-x86_64 d0ee6054d7d8320d64aa4dbb9b041537fa2665d5 - macos-x86_64 652501172b4fee6631f595c90538fd95914ef444 - macos-i386 5c54b5ecf54cc2631fdd48caa326ab44b5a2e494 - -S 2012-06-04 7213274 - winnt-i386 94b9414433fd83c086b349ded3159f0541aace16 - linux-x86_64 eb9cf0de4cc09e8b8bfcf741eff4b20510e13a5b - linux-i386 8cedb0a45f4c1119d3ba654dfd15ea96ed2a27df - freebsd-x86_64 e73a291fc175d2c2190e97fad5534b8941d4ec74 - macos-x86_64 e12f1957998bc3ed84fea53c36c41978f981e095 - macos-i386 c6f1010e08741a55d3174dbdc6463c00c192d443 - -S 2012-06-02 77c470d - winnt-i386 b7693de764902ba25239a79ded9bda01cf975c3f - linux-x86_64 274dd5c4e9e60cff2fc154dcbfdbb0adf3f0cc2e - linux-i386 a049889743767654fb86d7cceff75fd74c1889a0 - freebsd-x86_64 1448d02ad7332312868a1cd85705263a7433fcce - macos-x86_64 df6c8da8311350981470b7daf927e2eb7bf51528 - macos-i386 754dc18680956a6ac3cf4d9a1daec70fedef5747 - -S 2012-05-31 c2ce274 - winnt-i386 4d2c21e26f32e7060984623c3ba6d4066cd8644d - linux-x86_64 8647b8de0316740d84352fb1e718d9ec84262947 - linux-i386 c3df2f4c3c60c6e7fa0736da258bd622d3573a06 - freebsd-x86_64 2ee9cd36555859ea6569549f2ba86ec33e53a7f0 - macos-x86_64 b3359d74b874e2b49cf98c09931cd32397bc135c - macos-i386 92fb186618509f3c9eebb325ce4f5c2a2e432dca - -S 2012-05-30 0c0818b - winnt-i386 910bc6b562e8e310e99f8174292f2b0a0d93ef6c - linux-x86_64 482bfe7c000bc798945524dd750d3e06be2c0283 - linux-i386 f794e99472bc59773fa5281a4f9e2875e59f812f - freebsd-x86_64 29ec0b56d05c59e3a6eec2a97f17e2429c1a6331 - macos-x86_64 d562f05c8911e7405b0a9ff5815f49fca69045d3 - macos-i386 4d09cc2a4882d92f125718161e3d7086531748e6 - -S 2012-05-30 02dde78 - winnt-i386 b394e3db0639942a9844ce6f78d34239a90127af - linux-x86_64 bd3128bce34f2f131f37bc1c7077a4d2e367ff50 - linux-i386 ccd43b8d56a31d6cfb97ec7997a9ff6dd899f542 - freebsd-x86_64 408d720f3620ea534eae77fcc4fd6ecb395a6903 - macos-x86_64 ed29d1f7a72da3ad7dcddf49ef6c3b9a1eed2646 - macos-i386 dcf74f82ad65b9d5368ced4098827fc9343d694b - -S 2012-05-28 7b36d66 - winnt-i386 fe348cb80c7e2722032228643d932d1ba8041871 - linux-x86_64 ec562500380024118f2e9ef2a34e12b3448b5917 - linux-i386 37fd26b9381de7b5119cd5fcc695046f1c71bf43 - freebsd-x86_64 59ba035b65c6834a45ac42e474531906d802a2dc - macos-x86_64 63d483b94ff76f94bf9ecd7cd526472e1cf955a2 - macos-i386 99a5a50ba814e63fba27b05b233dc95162223bd1 - -S 2012-05-26 432c6cb - macos-x86_64 c1bc0a5998dcd186df75f8c66c26e647d8839fab - macos-i386 f25a5cdff561fd636689bd1dbadccd0d046c8b1e - freebsd-x86_64 06740f66496fb95ea307b0658288c1cedf6eebd8 - linux-x86_64 112d7981d0d11cfde15ff2dfef7ed1f9c3fd0063 - linux-i386 2102b9d101baa84b229498a2880df4eb4ea0566c - winnt-i386 92c0df09d055ed587116483b3aa439db5ad1a560 - -S 2012-05-24 433e4ca - macos-i386 894c0ae7c7ed3aa688fc3bb2266361d66901098f - macos-x86_64 1df482a9af266d87f6a87b6f08ad6fddbff8bea9 - freebsd-x86_64 5a35aa23913457556dd039731d968c1e224d83b8 - linux-x86_64 408fe735425ae419ed27faf431a72d2277d13518 - linux-i386 872bea2625434a0761708bcc82828152bb337dcc - winnt-i386 9a68efe03a5dc91c9df7213abf290cfc151310ce - -S 2012-05-21 248e439 - macos-i386 0e4433772d39cb824f73954199b8da61d7e65bbd - macos-x86_64 6bbddabd65cc579f61a12e702275f6333077eb2b - freebsd-x86_64 8989700eac1a06aced19b10d8bf42620bd463aaa - linux-x86_64 83e97ebd85a6664bedb26c881d2095c857708982 - linux-i386 c001c6b3d3967c7c19113e5fd48206bffd53d815 - winnt-i386 3c389de7ec57dfd2d4307f88da61a80359d8ecc4 - -S 2012-05-15 17d6b09 - freebsd-x86_64 631f4fd0bf13f72870b144e99a97273450618046 - linux-i386 c16019d7c62feb3a851790987fb0b49d357a9084 - linux-x86_64 a33dec897c3851010240a14371214f5bf4dadccd - macos-i386 62f566b8ba823b9093a58609997244217b1e180b - macos-x86_64 df3c5a47b315feadbe9abfe55c7d79f179385bca - winnt-i386 1f8aa7b3146859d5e37d241083d5825d99ebcf6c - -S 2012-05-07 4885ffb - macos-i386 ba898d71757dd15361c3b04a4d44d6062b32c1b3 - macos-x86_64 0b1b7d7f2cc886b6ba30d996c8ad8f565a444fbd - freebsd-x86_64 341a37ccb6622b7168224dfcc8e1c44d56cd9794 - linux-x86_64 ba6fc977f86e00ef2ae73412a3726a02682d4786 - linux-i386 4a56bf69c37d6094954f19a5fd3f3f59ee360e62 - winnt-i386 82f7e697e8951a6479aa944ecbfec72f50e0ba2b - -S 2012-04-27 09624a6 - macos-i386 145d107c9dd4d371c5655d7e827b9dbc9713b9a6 - macos-x86_64 0cc4c9330a006895d114ea73fc6614d8285466f1 - freebsd-x86_64 6aaf23e1c6c5657452e225d01f5f60da1af8fbcf - linux-i386 1733970ac13718fa4634926ff9eb3c32ed00a05b - linux-x86_64 f8b6b2d0f4285afb3d3f308faa610c8d3390471a - winnt-i386 75e5228384f5eca1f6585f11c271c15dc0705954 - -S 2012-04-25 9f99c32 - linux-i386 7248f34a32f5854786d88a0e2c5963bfc0f5bab0 - linux-x86_64 7725f6902fde93ade51a27188b0c7bb9b778645b - macos-i386 7c35eb6dc5d03b271eeb1379b215815e6db2d723 - macos-x86_64 226ba4b29bbe225bf928d786a464303f1163c78a - winnt-i386 8ec52b7bc29a7015fa75bda883814fd0f589767f - freebsd-x86_64 fcb229fd8b7376012896c37e8a64709827cd7243 - -S 2012-04-23 b04b415 - winnt-i386 61d1c4672f0d945903bf353ab46af7e6af3d0745 - linux-i386 e95efb515723cc735bcca85c58ffec19515cc87b - macos-i386 8f927267a0bcd2ac31700200bf34945c751ba09b - linux-x86_64 4869f75074fc6bffb85ad0583f461c766cfa911b - macos-x86_64 b38e60b337429c4f2c7e5e2a57b50b3cc0e32ac0 - freebsd-x86_64 c66a40445fc5b42fe1a81b0981c151884168262d - -S 2012-04-20 43061f3 - winnt-i386 adc06c30db89ff9143ab94548baa0891e9bc4f41 - linux-i386 978637a34fc21a4e03e61fe0e83316a9a6103019 - macos-i386 839cbc1c02ff7f2270a4c66534021792216ba452 - linux-x86_64 c6591cf79f1ecd373f2e3ce9889361e5f14fe520 - macos-x86_64 eb9dd92b15611d49737909a66dd7c52517d7d37e - freebsd-x86_64 71970fc1ecfe5c9d2a31a7408e0b0ab0c0f77bb5 - -S 2012-04-14 eb935b8 - macos-i386 373f6d363c2ce699a87c7577fc25ff4dbe99be8f - macos-x86_64 cf5bcc07695a192cd4a8a613c19f8109391d384f - freebsd-x86_64 bc67e65ed0f71523190cf3ff6735d3a4e23b5575 - linux-i386 e064b293127feae1bb6bbf4ff84a9af51085435d - linux-x86_64 9462c8c85699d561660df1de3d5a858b1f0e5e13 - winnt-i386 56901e7e63d3434a1dadc3003cad947f34165c00 - -S 2012-04-04 1ad62de - winnt-i386 a666a6c6a6081dbdfca37477bde8ed3030fe85d7 - freebsd-x86_64 0db4e66271a92e5ff6c3b7c897cbb9539c8b7d0e - linux-x86_64 b08a3e33a6f627d4f0f8a562914bba85dae7e9d4 - macos-x86_64 035497d8e19d350789992943a0684981c4af610f - macos-i386 2de381d77d45b8df1999c35e17077541ce538b8b - linux-i386 29e105f60a24f8f73bebcbeb2a8418b7d92f139b - -S 2012-03-27 eec6383 - winnt-i386 4afa5ddf3b0b1c648b3b1ee0cd5f328ca2e60f67 - freebsd-x86_64 595f06a3c7a073917fa3274d2aafa6328c50754b - linux-x86_64 4c7687011b61f16c700ad6bb853d9c089aff8ee4 - macos-x86_64 33f82949bacf00fd8781d3e29ee728641cd53aa3 - macos-i386 fda6c6ada98534840ff6f234fedc413be1b5d7ea - linux-i386 e8f406fb5db6c5e362826025fa17748a7acd881f - -S 2012-03-26 46d9456 - winnt-i386 b8f6543f1ce2ab8d8f51a90fdf958615a7b31b76 - linux-i386 2d0549d2465cf83d13a735350dbd6842714141eb - macos-i386 eb39a1f9d20e218af3201159fe339cca9af0fb6e - linux-x86_64 8208bced91e48dd0c7b683e8b200324e083a1427 - macos-x86_64 ca05c4068e0969598d0c4204a07a6f29cb3c8544 - freebsd-x86_64 76b27528b9f29d8e0c014dc7f23ee52ac17e9dd6 - -S 2012-03-23 a84b56b - winnt-i386 175bba044564eb6abf5a43e4d75e547270ea26b4 - freebsd-x86_64 3c2ee150a6c5e48364aa47cb510366285c891107 - linux-x86_64 9b45b67d64d811f9286b39560e7f63f28a2a4260 - linux-i386 0e59512ffc7617c36ae6290a73af743bc2701261 - macos-x86_64 5c73f28c854100380105bfb31f75b7c759beeea9 - macos-i386 787c7b611701bdee1b1f4e9816b46009c19552a7 - -S 2012-03-23 6374ffc - winnt-i386 d857261f80f6014f5bfc426181d582c637cc4c06 - freebsd-x86_64 92dbc98c69332e53f390825cf9e951558a449988 - linux-i386 67b777486ea3edf2cc25aab3086892ffdb900b4f - linux-x86_64 41d7bf5482761db6ea986339e422beb791f8408a - macos-x86_64 b94fd07e9a781873666947b2887a55a39fbbf30b - macos-i386 d9146adfb703083b348adbdc02afe84c648ae7af - -S 2012-03-20 8404ea0 - winnt-i386 73b016adc843c42a7deb6e7978d3abe39fb1fd98 - linux-i386 f44a5bb175c32ac2ae2ccea42401a4195dc65f42 - macos-i386 f0feb16cceef84b4496d029683be2d3431c31d0f - linux-x86_64 96a85397931851b0ece2fbb77761db4f7aa218e9 - macos-x86_64 eafe42534abc9eb3cbf47ea8cfee5c62be3951f1 - freebsd-x86_64 16e52d4ba0a2e02bfb2ad601e9724de98e168f07 - -S 2012-03-16 664bed5 - winnt-i386 129aa24a337715263607e09cfa2e8381cd50d232 - linux-i386 f0772cbaffac403e0607e825d8e39ae96d7b9ed9 - macos-i386 9ffdb247f4d63776c7efaaf5c78c1e6d5af1c3b9 - linux-x86_64 53ecdf9ca0c5ad4acb878c9fe94975d0c423f67e - macos-x86_64 2ae147325ba1ac0bd0e4b14cfb69fdb01a72a278 - freebsd-x86_64 99fbff620c298ae5de89bf25acc4812f410589b6 - -S 2012-03-16 735fdfa - macos-x86_64 f88f0323f3c5cc6f61c2c154b5439d5c0de7bb91 - linux-x86_64 a425ba73e4c72553df1f0d2b7ae190f6c57d1a63 - freebsd-x86_64 9a845a9f7d57bd82712a8e1e4eac732047ade87d - winnt-i386 3c8e0ddca9085a1b199ce2485322b117b9d18824 - linux-i386 2702f27578ce0b4f466e20b31a886d9183e6ce36 - macos-i386 715f593ae0a77b0cd763bd3d260e09ce65b138d3 - -S 2012-03-15 c86135e - macos-x86_64 b81e5bcb9c4d2d5b9da860a6f4f4ed3616342b22 - linux-i386 606d5b5058c823935e6daf0d0be3ccc92456a47b - freebsd-x86_64 f6b636caf066479e2bafdf512e9f9cc73e0ff3a3 - winnt-i386 4c9e3bbf6b05f7107b68e9469ad3a0209b99ff94 - macos-x86_64 b81e5bcb9c4d2d5b9da860a6f4f4ed3616342b22 - linux-x86_64 2f21ac561ec09bf31ba645ddc0589f002c05fc84 - -S 2012-03-15 1745ac9 - winnt-i386 950ce8b9b7d8e1eb283c3f668efd25c2d2bc0ac4 - linux-i386 6ccf2bb572120eb199ba67a6b7d3fcf5d500ef21 - macos-i386 766dfde1aa2d7a21f28134214f7759e1fb2b5b5a - linux-x86_64 1b65c5c2bf6d1dbf5b66d1e72d9c40cc33ae74cf - macos-x86_64 cae648d4365f09afda8c991ba78c6a8cccce77ab - freebsd-x86_64 7c29e7eed900260bdf39e0565811401b0f1be880 - -S 2012-03-13 7c70d35 - winnt-i386 960c27da1a869f913de19c42959c9954ca6e757b - linux-i386 56e85468b5b52dd85e69c17ab0243e75f67961b1 - macos-i386 f0ab58b77663a435a445b2c49e206ed047698286 - linux-x86_64 c55a8710847379ef9d0febb5886a57ae55251055 - macos-x86_64 ef5a1dc7f261685cae013e77c8302b10f2df772a - freebsd-x86_64 5d6979caa4dba24290f239aeb7cf12b949e83b59 - -S 2012-03-13 b968c8e - winnt-i386 1363da30a95454d9053b96bd0c34e122d50e909c - linux-i386 f5a3d2e4d5271145a4860c212f3b8973f7f2f989 - macos-i386 1538f289ef3a8b4bc00401a420bef0e2721ba519 - linux-x86_64 b3e335b15066708b866fbe2fad815349d181c7dc - macos-x86_64 365b1450691256445e00ef5d9c16bde2d85367c7 - freebsd-x86_64 b15e772aa61db82544562f240b94aa5e69fdf5a7 - -S 2012-03-10 04c045a - winnt-i386 0f7e1371adc36dd9ee0f32219a95d91bc4bc5785 - linux-i386 99ce94d6da9a4a9446a342540c0221e29634d0d2 - macos-i386 30e4fb17a901649453028966e569f4aa4baebfce - linux-x86_64 124792ddfb6e5be4b3cae78a27b37fd833fdb36d - macos-x86_64 b87217f57a22d0952b5ca11cf96632bc2c5545b9 - freebsd-x86_64 f4c3e20c7bb63f0e3a62a3f118ea697dbffdfd99 - -S 2012-03-09 321fd80 - winnt-i386 9279e86f5b37cf65deb983626165ed3295f3f689 - linux-i386 62d657e7f22b00895e7e2ce2b4622a76563b66d3 - macos-i386 cf6a72190d842e84bcf7c9c37b8e653bc97fcb6d - linux-x86_64 d8235735a635e06457deef89ee9d56f9c726f5bf - macos-x86_64 fe131d8f041c09c1f7a45d0b36e53ead8156c6f7 - freebsd-x86_64 66b90726e32f56a7e7992074a66444d9e042e44f - -S 2012-03-06 04e7bd6 - winnt-i386 3acde1daa5471d7be54c11778d5a664ecfe99194 - linux-i386 cc0482e8505826f70d84754be904095f91420d2a - macos-i386 cc2ee655e86524015358edbe3dcf1222092f2faf - linux-x86_64 385c032600c5bd0326ebd4e1773eaab4ce01de13 - macos-x86_64 1e9a4011f42cccfff6c8360be5b1d333a46176f7 - freebsd-x86_64 0c4812474856a36c44dd8226ad49c6dabb358b49 - -S 2012-03-05 5bf185b - winnt-i386 9bf9a98b5f104ef4a422ec9d8cc20e95c44e3cba - linux-i386 b3b49096d4fe7dd437460665bb240ace5fe477e3 - macos-i386 aaaa5c0b0889ff33e1c8b6f0614162ba02796101 - linux-x86_64 f3667435f6183c4c95fc02191a22bdc6000d83ed - macos-x86_64 c7e0e2edfd82a2b33819258b5f47993e5f118373 - freebsd-x86_64 6a0650b42411344319626d1436697cd35f745b88 - -S 2012-02-24 16d290d - winnt-i386 6f42923795324ff969736865f484caf77eb158fd - linux-i386 7c733f2a7bebe3cf5665e252260c1529430bb97d - macos-i386 b50181c93321e2329e5bc27decbf181829ec65e2 - linux-x86_64 86befe0a26813b5555c743a484f21ae2ea0da734 - macos-x86_64 982bd4899925c324ac42114a801867a825c702b8 - freebsd-x86_64 f69da93d1db4829a95f62ad2c7932c4281bb06d3 - -S 2012-02-15 9e68500 - winnt-i386 539ff6786dd58616c557d7cde303afc67afd8bfd - linux-i386 8351c6b990382885831c05174f4b335dec7b66dc - macos-i386 defac592c6763f329ef58867d8a920efedd2939e - linux-x86_64 7facb4c737f6d28687b6e52aebabe78ce3dc4e5c - macos-x86_64 e57f737494fe1674a7831e72796ea20bd7d7be00 - freebsd-x86_64 687ddacf8d012acf9eca23ccf670de75b5b3aaf6 - -S 2012-02-12 2784314 - winnt-i386 0a6cf092596cd31d3fafdc2072521f785811e835 - linux-i386 7f298cc3810b8f38152c343c69d8943378434738 - macos-i386 63790c18664bf28ee7f9212d21cf891158562e3f - linux-x86_64 c499fce00a8077d1e71e5e10fce65c05f56a4392 - macos-x86_64 c48465b7d0bbeddafdfd4d49003b78d0881f34cb - freebsd-x86_64 80f081479e30597c47be7c6012407ae749c44240 - -S 2012-02-07 6cf40bd - winnt-i386 041cfc95965fa12006e31c9cce939d3437d63e9f - linux-i386 51e39bacc7cf8e56eacb0e8f10b33069847ab75c - macos-i386 0ccb20d7f0039146bcb1771b4a47b40e23793241 - linux-x86_64 00c5f6205cd411e7565b8c0e0f2f38cfcd9af500 - macos-x86_64 8ee4ada52c64925471c20e787abc7fbb49ba7b80 - -S 2012-02-01 196d69b - winnt-i386 25f5906994ea01dc322a4b52b2b9a4b1264a6c2d - linux-i386 8d4aef1320b62043997a10d28005a2c0c592da6d - macos-i386 55d70915501a65e16588c1c2e73632a930c12092 - linux-x86_64 9a030fc6a649127f55aace94152275c0ee7b1e69 - macos-x86_64 0d0259cd6e52f30121044f36fd64167ced6d4f09 - -S 2012-01-31 e5d095d - winnt-i386 4d80e3c51fa7bb0f8933eece86dc469016dc020a - linux-i386 9e2632c68104fd371b173f130472d6424394a957 - macos-i386 c74a126bd8367779c2d5914c98f262fc095dab6d - linux-x86_64 91618dfd51cc7681854ffb5a0eb41d71b8283d4a - macos-x86_64 89c3d55bba1d7d25e2553e2f511afb6d81541ee9 - -S 2012-01-26 28fbb19 - winnt-i386 34428e930722e75b1bc7a313ade232052cf95a5d - linux-i386 beefec8535c52d614df22664fd75165f6cec3034 - macos-i386 6328fb506270d763a1fbfcba9f248ae8efe4e2f5 - linux-x86_64 2a6deace038135e3f546b500f4b087e1586bc10c - macos-x86_64 6d417390c6c4ff0a664f702f98d4c35950f8caf0 - -S 2012-01-23 6db688e - winnt-i386 e2e854ea3b53c39c348ecff56c46ec2e742930ef - linux-i386 ec28f47ea12512c79d110209d7278cbdb9db826c - macos-i386 e2fae898344cbbfd52a8cdd21063b8aadc9ca12e - linux-x86_64 b8492ad26d3ad9fbf91e7e1983bec484f76aeb49 - macos-x86_64 8a8ecedc8ab6e02881395394a8ca6cb3aca3dece - -S 2012-01-21 ec82735 - winnt-i386 b77e911a0651199b2459fc429c3ab05d623cd1e1 - linux-i386 177e2a4d6996f704af9e5676352278d52cfe0d18 - macos-i386 bd62ccea85e1476155428fc81fd9fed38b472150 - linux-x86_64 77e3c549fb352accf25b2dd9cc2099e76cc8d36d - macos-x86_64 08fa8589102330bf0bd173d4b25ec84dee164f28 - -S 2012-01-18 8c97854 - winnt-i386 a33c6a8af6a07d357bf8d93071e85b6068fd6535 - linux-i386 c56af8533f61261c878f7dd766226dc59d844bec - macos-i386 250380440c4af2df26e29bf3d58e26dce9026fa4 - linux-x86_64 798f019d0b32c7df2ad173877c6c52b90ae25462 - macos-x86_64 e6879151e8b18ced016c8766142edb0189c53c73 - -S 2012-01-14 c6f62b6 - winnt-i386 f5242fd9064c3ae744b2395c9613b61dd3e157b1 - linux-i386 614cb4931f5ab8c2ccb7a541bbe4ab1880fa0740 - macos-i386 3a360fc8618a70e919c44628c4f6f67cea90f6b6 - linux-x86_64 aa6a0bed5f66879b8427b6836c1b2dc4bfb45517 - macos-x86_64 c6598b2c0ec504b9594bb8fa8bdac31bc0d3286d - -S 2012-01-13 da6674b - winnt-i386 94591d200400ac0db7e6bfa2397edf3e868dc320 - linux-i386 83615901ad69b4f5914e24ef38b797c6980f186f - macos-i386 1a2bb8b8406a8e3a853b60217921f9992f71fc9e - linux-x86_64 8b17e6757f869d0f2bf3f42d2b6a9deb6d67f60c - macos-x86_64 0b3db1b90dc5171a1d1a87f6da4a6583705ef2d6 - -S 2012-01-12 44352df - winnt-i386 fcb87845b0c80b1fa8b798d319a1bd4aaeaa91d1 - linux-x86_64 dc84514290cafab43d0e79387931d6c79129b5ac - linux-i386 0395595777656a3f362b86fccae05d38984d4c74 - macos-i386 a683ee438e4f8b3e6668d56ce473e8b6f0e98533 - macos-x86_64 2a201b058bf9af9379dcc31f78ffdd95f614e187 - -S 2012-01-11 f2352f4 - macos-i386 ef499c9148a2fae63f5909ca6ea7c199e0828630 - macos-x86_64 4a6f45cb9e3e5afa2d2ecf6650d8c77a84abc37a - linux-x86_64 e1bb54415f7daba9089f806a65959ed97cadde57 - linux-i386 d5e49cc310f15296a900dd3f42905ba5dd5ce579 - winnt-i386 393ecff83a83fac359f8365abf039bd12da8606f - -S 2012-01-09 d4ae1ca - winnt-i386 e51d7d778367c945186c4b8ebf51e988d30cbd1e - linux-i386 475b8208d2b04a04b95b0948f3aa4ac0a6792f0a - macos-i386 98788147af8e61ee4b985d2c109aa0f699ab7b41 - linux-x86_64 a1736ce4ee3ee16912413f4a9220ea395b544fde - macos-x86_64 cc3c9ab174184ce741357961941091adf8735c08 - -S 2012-01-05 3eb3590 - winnt-i386 bfa10b10e65c953f900296e7d28c89fa50257808 - linux-i386 f3744aaba89ba8995b0e5f911c15822304507c70 - macos-i386 64e33d698ff4dfba56e69a5191698d7f50a97e2d - linux-x86_64 80a5eb9fce7fcf6c61d46607c2d6a1669e50f96f - macos-x86_64 5cab4f241407ae5d867855aa02c869cf7d65c8b5 - -S 2011-12-22 ccb5b6f - winnt-i386 35be60caa888246e9710bd34ccbbe8a322d3c6de - linux-i386 8f88a285fa86613e268503a310e32cb053c2e300 - macos-i386 c4cdd0a41fedb6d58d2a503ee3000c51d7878679 - linux-x86_64 9c78b05bf4fba7d42e1d41c92790d018f98336ef - macos-x86_64 40d3bce53fefcd07e4f35b926e89fac2213792c1 - -S 2011-12-22 3b61064 - winnt-i386 800fdc0e15f6917ff4318812edac611ca0162b5e - linux-i386 468d13787022414aeb7f8ca755a4559691728a49 - macos-i386 acaf9f2b0588993d50339905e3343226e464aace - linux-x86_64 51af1a655a1552c4699fea6bf603326c1ba9f05c - macos-x86_64 a898937ea8e36e271b5d2df4caa21ef5268de7d5 - -S 2011-12-19 edf6e1e - winnt-i386 7a358117e123ad3d16fa66106819ec0daf5a6aba - linux-i386 87b9a54e2e165b5c800236f49ee58522c8664226 - macos-i386 be1da455be1aad6305185d120dd3594413ae66de - linux-x86_64 ef255409d0cc6079d0a13d5bb7d436832dd294bc - macos-x86_64 520f5af26e71a74d492d4d4d943974dd5404f798 - -S 2011-12-14 5b35c9c - winnt-i386 1ffa2c58ff2f35a058e3da755febb173917b95ff - linux-i386 16cceed41cdbf33b1442349b83aafacf62f9a827 - macos-i386 ab19794713acc90e6918f020b185c43ca30fa4ea - linux-x86_64 a13e0e971e058b430d0015c564f6529c9318e8fc - macos-x86_64 19cdccfe635ed69bf5d5d65a9ae8267a8ed2aa95 - -S 2011-12-12 1c1bc2f - winnt-i386 550dc539e8ab6837c52b3a35e6119061490190b1 - linux-i386 66c5e97e3072d8c88a52d986d1fadbd43de615a5 - macos-i386 c3db65606d7bbea2ae0482b20e177f7463dc58ef - linux-x86_64 c5bbd08e423844ee938c8f6824ba2b3b4dc851b0 - macos-x86_64 29accda0ace162169408357371482ddd044b6dd3 - -S 2011-12-07 3ccdd2a - linux-i386 7c639851d8a4ae43f781ad4592b6d3b6e0f4837b - macos-i386 8fdf95481943aff1953d93eed31528c221d33373 - winnt-i386 628f0421557d0bd34db38acb0f5ce61d236b723d - linux-x86_64 25c9701d38d39e620489c7368ec6b7277cac5164 - macos-x86_64 8e672750ca0dac91782b4fadd36545f0b88e9ccf - -S 2011-12-06 1652b92 - winnt-i386 345aeebce0010d86ad03a094928614117d150849 - linux-i386 c5d3ad9c54e8d2a3899ca66de4092f7fa2b3ab79 - macos-i386 7d2384d0432c9c6334216590499f1d2bec98aa23 - linux-x86_64 28e9e07d4570c16832c2440da8b6517938f2420e - macos-x86_64 a0177101efc1ec22717cad45d4b74769d2d6b16d - -S 2011-11-30 0ed5c84 - linux-i386 9a4482c86bc4afd2ee35af8200585cbfc06e2df9 - macos-i386 0792f1b6d7c16ac8982cfc4d9ed1cff3395472df - winnt-i386 291cf01a3b8fa068e99f40d40047b16bc6d498cc - linux-x86_64 3d18ba4932c11f7ebd917661838eb1c8581c51ac - macos-x86_64 1479b9aaa90aa6b43df44877443618e50c59a816 - -S 2011-11-25 9a188b2 - linux-i386 4f1c315b232a6031b1f1d23732a85c49b43290c2 - macos-i386 8a1419d8801278b01edf658de905391d6322a2b3 - winnt-i386 4583bf9b81a29f238afd3cac3bec14a09c35e983 - -S 2011-11-24 05b2cb8 - linux-i386 696dc13ec6036be2eca4250b02f3305d3908c86a - macos-i386 5a10c8dd45a210ab9296452a4abc90aec786ca08 - winnt-i386 de78b1f875487d7048dad28938a4ba04804479da - -S 2011-11-22 148a6c1 - linux-i386 a05bcbd556a08b82b13b62ae67b30721bd21d1c2 - macos-i386 7cd6a80c4576ca31d762278840356ab68a8998ad - winnt-i386 5a7ea88aacead777f9f35a94eccb0a22d5502f48 - -S 2011-11-18 196b2b9 - linux-i386 0c02c31345b4f8243805b225a8a10ed1afecf097 - macos-i386 e3713a44d502d245a86966f0639f50d3552114d8 - winnt-i386 9ba9cbc602ad2811f8e11000234a5952985273ba - -S 2011-11-18 9cf48d3 - linux-i386 49d9af29949865483e03d7fa996be22723e94b3a - macos-i386 b5a38e68e9f0e9ecaee757311355e3fdbb629126 - winnt-i386 e6b2c8058653a82ec965efbd9bb2854e2a95aae1 - -S 2011-11-17 dd275cd - linux-i386 16fc193c2b86b6565035e04d18c0165b3a1d095b - macos-i386 fe4ff8fef4a7e13131cb6c8686664cd954e8d5f4 - winnt-i386 a84ba65ca31138e7bbbd2e7f6b080a6684200901 - -S 2011-11-17 a92699f - linux-i386 160fb4e75ebedc4b265169b6cbb161a5aab58c6a - macos-i386 ef9f703a15d99d69d1db24c674d2ed9693c4cef6 - winnt-i386 207595af91428788e023e311154b1665e1643608 - -S 2011-11-16 045a437 - linux-i386 41cbc23f21b7978b8a029b1ddabadf67638c412a - macos-i386 938a3ec1f3d5e4634617e9b0ac8a451fb939b099 - winnt-i386 b3f3987440fa44f87fa41c914a4b2e806032d441 - -S 2011-11-16 fba0df7 - linux-i386 8a8f56fa0012d99c6defc81dee49a668143d1dbe - macos-i386 193f3b68af09199839ba64bd497cfea6bdd7294f - winnt-i386 d65f1ac41f247d3a1ef069baff55ae7d9ae23937 - -S 2011-11-09 d26a96d - linux-i386 015a7756403f41031382025be4006602959baad6 - macos-i386 6ba52c813b2d73445de5a4bda021c7388c933292 - winnt-i386 55139711077fb41b99927fe19fc7feb68e8ee23b - -S 2011-11-09 4a4d31c - linux-i386 84062ae5c7742c4863abc21743fc771ebc476463 - macos-i386 94b3559330aa74b87a9871405408d43c03e78a88 - winnt-i386 6a4debbccafe4acc860e530e9af6e0744037d7e2 - -S 2011-11-01 35affdf - linux-i386 c6f35bdfc28633ced3170bd9d1a70f3ca5d4991a - macos-i386 7b221e369a1bd6a9c1b1b3137e837cdb402a1626 - winnt-i386 95dfffde8e306f49e31a59d1ccc5af008891a911 - -S 2011-10-31 bd33951 - linux-i386 ba99fdd5ca1b67ced60c97eb2f167d5932a91deb - macos-i386 d1256e5fd4a3bbd7e269371cae53099597394b64 - winnt-i386 301fc3ef74e5a0f7e9d16d20970a42ee97dea46b - -S 2011-10-28 3397fa4 - linux-i386 4031e37753bde12a8016ade37e887b7a5d2a7944 - macos-i386 4059a95efb3886e20be40c0303d957fa6bce3904 - winnt-i386 4fc1064613c4876083c67dedd7dcd57872c7dce3 - -S 2011-10-25 2884c72 - linux-i386 1022e76be6a1b22e987676c08893700f5fab84d2 - macos-i386 7f87ac89f14f1e66d7a43a2cb53d4d11fbb47a8e - winnt-i386 d5f311eb0a8020d932a979d04619c2b80b319a2b - -S 2011-10-23 ff669cf - linux-i386 ea21824432c4cb3b6b1a03c761580062eb5706c8 - macos-i386 c3fe2a7f4cd381674356d6dae9fe47ab85e8ffaf - winnt-i386 d98b7a3ec09eec3b0189e2e47f92dad8bdf0d3b7 - -S 2011-10-21 020726c - linux-i386 50c629628e7b1b6e65105255d2347d4ec14103b5 - macos-i386 69880d865d66d3a430015dd85ba3e89d61fa0b55 - winnt-i386 3c41b7e38fd516af253bc4b03756ba18f45bc8e3 - -S 2011-10-21 f134261 - linux-i386 0e5caf40ee5e7dc8cee284dc87d08e1c4424197e - macos-i386 afc8fd483584b46389ca27682081faa4a48f1e33 - winnt-i386 f43c7897822b9e9893833229b143fa018d29e47f - -S 2011-10-20 c10eb22 - linux-i386 aa2312a5b1e62ecaf0ee53b86ef6db6025b08404 - macos-i386 0fc006e00bc602ca927d9e7ec79ff358a82aad59 - winnt-i386 a7423c7fa9fad54f5659f02bafa1d622634ec9d3 - -S 2011-10-20 ef63f09 - linux-i386 6fd86f268892e8d023d057406278e11cff2baf2a - macos-i386 b20875c084e3c1c79d7cca477cdd418027b6f26f - winnt-i386 50d3a7046e886971120e9bd622bd8f92bb315f73 - -S 2011-10-18 ac276f7 - linux-i386 158ff44edf495bb8d5d585be66e3a7bb37010068 - macos-i386 31f2aaf90cec5de51e13d9ba9696a4193065e28e - winnt-i386 fc4a87b5b52f746e95c09716e456904400b54c5f - -S 2011-10-12 40fa88f - linux-i386 76a69cd5560a674fc2899a7c2201faadad926e92 - macos-i386 a9921da04e24781f1fafe353211ced1d909303b0 - winnt-i386 df2f0977f3a87ed1d0c63b8773e744af44af4d9f - -S 2011-10-11 cfa2346 - linux-i386 ef3dc204e39386721acc6fc075347d4b328bc1d3 - macos-i386 6a414ee6df5d310f8a6bce02d0915d9f42a1dc7d - winnt-i386 01f94fc53594fdd0feb5e1aca1d7086f0d417682 - -S 2011-10-09 941d5e7 - linux-i386 b51615a008e07c954b319f4cd4a63a6e6baf801e - macos-i386 f0339cd9c1ce4d45f197dfaf7cfa91491d16b636 - winnt-i386 26c1e36b42da6a2e2cdcefcb23cecc39117b3770 - -S 2011-10-06 e4068f6 - linux-i386 51b2b89cabe88850e09ffd2a928b47e40964637f - macos-i386 4f5ae86cac304efddc8ee29de1491e19672d9715 - winnt-i386 d17a5ed3e0233564c534323099b462d048bda354 - -S 2011-10-06 6a42705 - linux-i386 664ea77500a7efee3368e0da69a6a92b53198c8d - macos-i386 f967c9eaf9617c2ca9b489bbb01f9d0f751f62ee - winnt-i386 53a30bd8e03b61ecb132883ef1e01ec1c4356786 - -S 2011-10-05 8d8b48a - linux-i386 ac787eb564adfab43ff65eadbaf56945d117a349 - macos-i386 bc7747c95e20bf5cfac801cca0c2d93bfc134a56 - winnt-i386 2d162f07d96eb005c22a2725b11b24b8470c795e - -S 2011-09-30 821dd6c - linux-i386 cbeb6c98137ec14512a26427820ad1405bfb41a7 - macos-i386 98600346722a3f27adad2ec1f6112aa41403c7f8 - winnt-i386 cfa9409039683a2de2d3337abdb53437f59b267b - -S 2011-09-29 80829af - linux-i386 0bf17c8bfdf7e3400cd546577d4f57b7be821441 - macos-i386 9b40e0c830396469811e9e6e0b430d0d51876a27 - winnt-i386 735b189037ab45d88f937cbe391c4e30aaf1182b - -S 2011-09-28 a3a2737 - linux-i386 d255b2ec7205209b7d7fe71be84def6306491387 - macos-i386 1ff3b71ccfc7e404c813b68a7a257406f292b819 - winnt-i386 d19a9ffffbdfe53a9513b45973f8f7e2286ca1d1 - -S 2011-09-15 b843cf2 - linux-i386 2678a89ee598e010dfda6eb1e03a1bfeb7c38056 - macos-i386 89140ea0fd535985d8c70bd64f93f54d3a06c04c - winnt-i386 0fb5e32b0bf023880d186c8ee6c591be8aee6203 - -S 2011-09-12 3667137 - linux-i386 014e4cf4c91ffb301a91e7c1429daff4b1423b64 - macos-i386 926c41945f4b38c9586e825a13228a524193593a - winnt-i386 ce465c55fb24bd488960c596d7802b01ab601d80 - -S 2011-09-12 8b7909a - linux-i386 08fbadbe671ce8069f3e9aec1654102f3681d666 - macos-i386 c867e8341cb697b4d52c771d0f6361e62d98cbe6 - winnt-i386 f77fc11b5f6d5504321500ca682d66af9b54233a - -S 2011-09-02 1b67d21 - linux-i386 121ceb51d20ae5d10aded3f16a722f203af9e8b2 - macos-i386 49208265ecac4692699e55d7275dda97ab4d6872 - winnt-i386 edccc79edd5027e7bb8e794b1d15ff6709b3bd31 - -S 2011-09-01 6972f07 - linux-i386 fbc5cd318fe1914663729bb07dd9ce125d95ec4e - macos-i386 e494a708408bc54e31ce009e78fb690a2f0b4d42 - winnt-i386 b0248e68346a1724c673a2fa5bc5a73eda2d821f - -S 2011-09-01 91ea257 - linux-i386 dcea4ce8001eaba3e3b2c404a147fbad47defe96 - macos-i386 0807e3a7c2c88dbf459a2a78601403090d38c01d - winnt-i386 03d0fd04f6b080d9f601bb1a3711c98f9eab2490 - -S 2011-08-30 be2ad97 - linux-i386 5045aacf211215dac5d34c7a83c381cab1c5ff09 - macos-i386 0610bb65df5135f7a1c9da6bd41f5cc2c6d65a7c - winnt-i386 02ed92a9951c2b7f4cbbe1633c90da980af5eb98 - -S 2011-08-29 c9c5ee2 - linux-i386 76885aac9e4823e5cbb81ec77e37b6455919cdfa - macos-i386 d9b1de77ad70a60140f9fd052347efc818b76d8f - winnt-i386 bed0aeb9cdf7831a7c1d9337a2b351699f0d25cd - -S 2011-08-27 5f57a50 - linux-i386 b1deeaf0fc9e609d0bc760538f237f818d12a4de - macos-i386 533e6f7c7d0a7abaf6cf865d5ac9da400f89a073 - winnt-i386 f331533e2322c777d3b1cffa027e86fc71ab67dc - -S 2011-08-26 844e2d7 - linux-i386 a896a6fe1bfbf38fac66db71edfa3750871edd55 - macos-i386 d8d5c6144870389d9c233684576ac7b816e82655 - winnt-i386 570d7515403f9bd5afa41e98f027ceedba88d588 - -S 2011-08-25 e241f29 - linux-i386 af777f99bf51da80f24c53092773546868b27d02 - macos-i386 5f460da5988e469ced04670dc4bcfb9b95128717 - winnt-i386 7d4a4ff621b5e7a4a1408d053137dcf80316d7ee - -S 2011-08-24 5b5689d - linux-i386 a5434acd4f6ac028aa6744dadec19182d8ce5ee1 - macos-i386 d72b5ba4b72a4487ac7d0c22e38e8e85243c04f2 - winnt-i386 523474eff702d8a46b9d818a645086ca988cdbab - -S 2011-08-19 390dd38 - linux-i386 634f15a8fba176101a594ffdd089d9bbb35e6fd9 - macos-i386 79743ce4bdd02d0480c3f377989747fd69001f53 - winnt-i386 b86404f4e0235165f315c8e7b0d54cd4804ba5ad - -S 2011-08-16 014c692 - linux-i386 2888c6feec5a5f7ad4364f96034a5864ab1e57ec - macos-i386 f8d52502757c45cff55560377c945a629149b12d - winnt-i386 ffb7aa98263cb622b72485b2b9fa683fddacf2b8 - -S 2011-08-15 42034f5 - linux-i386 6539fc262a4059fbf931e8c9aa39e2e6a2c49dfd - macos-i386 805c98be1d074c4ed4cf5fe2dfb5ce3cd30a0c7b - winnt-i386 85fd36620fae5d19215dbd90a5f26c957a50bf70 - -S 2011-08-14 83128f4 - linux-i386 8074d0b9d2a2f5084e4920a72262af28d0521b66 - macos-i386 8e52161f00f24de345d727a1671bb20597165687 - winnt-i386 0a4b419ed326b9da80a93d6d23db24a40bb37c8b - -S 2011-08-09 e5533a5 - linux-i386 6fedd9943689fa3457e77e62d0f8b100fecfe69c - macos-i386 f87e7dc9d27856ee39eea9a2c44b6b4d25113ac5 - winnt-i386 c732be8b6ba6bd0092028fa6587824a5a112008e - -S 2011-08-04 6bb6922 - linux-i386 059ba100737c567c531b69bd7bc446aa890b0123 - macos-i386 c09d8f1f93c45cb1d94d04678f5c821bf0d1ade8 - winnt-i386 d15cef6d382941d72a1a94e26eeba5ce8ad40786 - -S 2011-08-02 091a2c2 - linux-i386 51ef5f0a945e26609b2d0a682e26c72d98fcfe3b - macos-i386 f1513bed98edaf2336b92ecfce21856efd1b8a71 - winnt-i386 e51ea4ca6d2da9781e029b779b6a18596200c5f6 - -S 2011-07-29 5adf87a - linux-i386 d5b2a779d408bce4582af12e78c23b7b3feb7d38 - macos-i386 12fd2d9201fa4f890f96eb663c87a5152e01ea0f - winnt-i386 972c6cd424e14cb0453ac0c16d0a4f8f02bc7d77 - -S 2011-07-28 0ea5a8a - linux-i386 ac5f6977c64066a6a8a15aa011a236a3f0ec9deb - macos-i386 bb88c3657affc05008cee0b8f6cd5843d6856516 - winnt-i386 745558c1ddbc93b297529698f8dedaeec4d89e49 - -S 2011-07-27 7647cf7 - linux-i386 a535219fbeb69a058f128f465f25718ba82adc0e - macos-i386 ec8643d70a708ef6fa2e99ceac811052193927b6 - winnt-i386 bc9e49cc88a6663db5a70f84c74807baca436fe0 - -T 2011-07-27 f45524d - linux-i386 878cd9899277f467cde5eb80a62be01573677d5f - macos-i386 3a3e764c78568fe1b8e820a5bcab5db76b1bf580 - winnt-i386 8803e40ff74e5ee9a4fbf93e17d8252f4fbbf358 - -S 2011-07-21 c62a9fe - linux-i386 3c8f658093adea1cb0e5b84150d3d342571013b4 - macos-i386 1833b475fd1adec3df02ab3dd2fb7c6f280a33a5 - winnt-i386 f6b6b42d00f25d82603b9466229d3229bfce3317 - -S 2011-07-20 8a7f2e0 - linux-i386 c4881df0302a4a1e126a7ea847e1d7b4de49b943 - macos-i386 d5d486af50fade251fd04a24062385d67cfff30a - winnt-i386 0c75c31bc950381cfbaa6a59bc71b3646e484328 - -S 2011-07-19 0ff76eb - linux-i386 56c91388d013b2934738f25d122e5587fa0dcb99 - macos-i386 55baa51c849fa56aff3d30d62f9b0d29d60f0f02 - winnt-i386 09e24c46bc9630ae16ee30ed642d8dd1440e8a56 - -S 2011-07-14 7376e75 - linux-i386 d2ec6a1aa586926f8f1a5f400b5edb5f9803c7b2 - macos-i386 858c1acfc9676778b8dd6f82dd979d26cad34799 - winnt-i386 5ad5c8a467d48f1f988d909ef3e4135834f82bb9 - -S 2011-07-11 a84310a - linux-i386 18e6caa490241d63ede86d289e6553bfa1c58953 - macos-i386 e775077d87e508020d8e0282f944d66c89cae630 - winnt-i386 5f428071e8dbacbf641e5ef006f3e9db51fd3ee8 - -S 2011-07-11 f666c97 - linux-i386 434346526e3feecf80ceb58f132fb4ea6cbfec0a - macos-i386 f643f88b1414bf43c1821a9d3eea0bdc8df2f68d - winnt-i386 67e09bf1fb2ae0894cfc99fabee1ed716f218486 - -S 2011-07-09 f42c947 - linux-i386 9cb82d02baaccc6efc1b5dff5f3d41aa0c8546e8 - macos-i386 5fd5b3fff6bdaaffc5a55d208370d3764076b54e - winnt-i386 5efb0fbf18317c1055eaa7d135601015d0c1380b - -S 2011-07-08 0864a22 - linux-i386 84fdcb1989428167465af69436653aa719bddaac - macos-i386 eacb54e79356d891d8652c7a619d70eefc172010 - winnt-i386 c695cbd3bf882b8d973fa6fadaca4ed79360f5bd - -S 2011-07-07 64595a5 - linux-i386 bd5837c265e2f07034f01c016298aefa5697e3a5 - macos-i386 6292b0c12e24f51b0270a6d07d3ed18ffe5c3143 - winnt-i386 07e72b6b74542f235a62a486984e4d2ea2970dfd - -S 2011-07-07 6ee1ffe - linux-i386 5080aaffc36748f41fa8bf8030ef5a51976e673f - macos-i386 34db4f38c40ef5e7d08eb3e994def595297589bb - winnt-i386 ed541e6e657731d10df04048b357fda5db63ad08 - -S 2011-07-06 8e585e7 - linux-i386 df3dd532da83012aaae2a74eb55a43536292cbae - macos-i386 f53c000d1fc9963fe334ef3e3560b70d5860e845 - winnt-i386 65afdf81c6a243b144e2070ea1c9025f4c3bdd9b - -S 2011-07-05 6853e04 - linux-i386 14c749c26e4b9c1fb04d7ea7931f4a549ad80243 - macos-i386 84f23c442bbe2a6d964a97f73e9ee6927ef07177 - winnt-i386 7792697ba4482d0eeefea17ef2d3420cbf8f5606 - -S 2011-06-29 8655349 - linux-i386 23313e17a4535fb484f4c26f0d1e38622a43e78e - macos-i386 bd5c9c5dcf71fd9767ae47009c45714dd32982ba - winnt-i386 6c769bf178a9b655fc25df8b432141cbdb11b5ee - -S 2011-06-17 175fd8e - linux-i386 ba92b05deee82f2d8f954cf014de1faec1167e36 - macos-i386 6bacf4769deda26b614b4ba36bcb46f65649cce0 - winnt-i386 057c6ae72ec2059806e33bc3578c71534779eac5 - -S 2011-06-16 15f71b3 - linux-i386 c81ac692140484ee8944ab50ca69347abdca30b4 - macos-i386 ac27541e4438956cb29bd70c8d353c487a315634 - winnt-i386 1661fef49d0fcf4c1a4944b803fa395ea6b371b7 - -S 2011-06-15 02018fe - linux-i386 74661121d9ea02881abd77e488f448038268c4df - macos-i386 4942eeb8897d75cbdb8cf15bef3b9bc941bc9f34 - winnt-i386 b8cf72007bf751062de0802984e7e5a68b66ae61 - -S 2011-06-15 92bd350 - linux-i386 145b68488a780623cc31c5e043c6770007273f80 - macos-i386 d3149431f12603927bb211f53f8572fc188eb7d2 - winnt-i386 8ee37c22195ce180961db1fad013a0743cf97cdd - -S 2011-06-15 01ea27b - linux-i386 10a8fe7ed1ea30508a06f2e883e2177e14af7b1c - macos-i386 d431f6e57c331c12fd1ec0ffcb2f567ef5e22c35 - winnt-i386 096e4dd5ddfef37f9e3c3cb55037041b2198cc9a - -S 2011-06-14 c3015b8 - linux-i386 b091d33519a1bbf6cf12ee1954a1607940ae3915 - macos-i386 7d47cece095a7b30f5a3e2467417652d3b2353e5 - winnt-i386 27d57fd96d2c328d83fcbf7322035019377ab40f - -S 2011-06-09 efcf857 - linux-i386 af8e6dba00bdf290768251d55bdda9cba6752e4f - macos-i386 0c45af8b96effa957906d1d6a2e05bffd627aa51 - winnt-i386 1ac18b27fae252e9c9142534e42c3c9a979bc4e8 - -S 2011-06-08 f54f279 - linux-i386 4a13693dc548c764e3b662a90d52e57359e3bc91 - macos-i386 e0bd0fd650a194c361881cf62c4af33c78276939 - winnt-i386 5c3c2a634ad2fed5b3987fb0e9de7fd75c510bd4 - -S 2011-06-07 a4ca75e - linux-i386 3de95184c59f5b0554df3da995090aaba26a190f - macos-i386 1537f2e01c746dc6ca2a9bfb4b2e81256bb01e96 - winnt-i386 538765c5c4c31dfe8d6e998ef7503beb5cfa1525 - -S 2011-06-03 f29d046 - linux-i386 76e4ee5b9148aae30ebfaabb12791bfee4fa8727 - macos-i386 e146fa18f4b0f22fe4fd7fd104242837569d7702 - winnt-i386 d1d4560c449d675c22601a1377b3c12389272e5d - -S 2011-06-01 a6ca9c2 - linux-i386 64569e47e1693a6e100d62fd31258c1d6ba3ea17 - macos-i386 e36a8d3d8365bc353ed2477d5c52656c1e8541df - winnt-i386 7d8e9e663feb424620db06c081800912838c3036 - -S 2011-05-31 84a56ed - linux-i386 f3a29ca94df09d7af553565de7d687bb4abfb385 - macos-i386 116b63ce05870ea716209e6d88524f6c72e0420f - winnt-i386 5f143adc2b391b82f2a09dcda7d64613367a7460 - -S 2011-05-31 e66f9e4 - linux-i386 237738606b2d9f514d7044ec268d4d7cd04e044e - macos-i386 f988b5516da9e21a94608751fc69ff762a08de6f - winnt-i386 c3299a18e84ce56ccb622541b3bc7309bdc2d132 - -S 2011-05-31 c7e3f88 - linux-i386 2c2898da08b7c8817c6b4f7fad79052b88616dde - macos-i386 b9a8f61d0230cac706a4b08079a9f8b4f94f27f0 - winnt-i386 7f62fac7e45c07945e40005846c656feb1f04cb4 - -S 2011-05-26 9491dc3 - linux-i386 03d665acee86be1f9bc5c6ed2a0d0664150ac3ed - macos-i386 926982c4949f387cc5dbd5ba27b0efd4ed546b49 - winnt-i386 54edb2ca559e69c0f9f0135a8faa56aad6db4485 - -S 2011-05-21 085fb1a - linux-i386 cc547cae447aeecc3c95f3c24a1ee0367c0a6624 - macos-i386 93e669a70f994d6f13f4a7d8f9dcdca3a28ca0ff - winnt-i386 015b4f6d2a6693c1920b8b46472ddb65fbb089d0 - -S 2011-05-20 c8d488b - linux-i386 4da3ba31a31848d9b4d8cf006bcef48a7795bfae - macos-i386 e63cad9da47d45db1d2c8b7470b23421e892cdb9 - winnt-i386 0d33eab302f75096042980152343b86c2220669e - -S 2011-05-16 4c5e315 - linux-i386 2dc16b91e6a381ed2023ee3ca427e669444ef775 - macos-i386 037a7bda6829daf85e7195db4fdce00797360a15 - winnt-i386 e1bf4bba221377568371394f89ea1a766349d594 - -T 2011-05-16 ae030c5 - linux-i386 83a6f52df4029b61ebd628795b0a400265c98179 - macos-i386 1167e8b782165be738cbd08eeab104ede0d61df6 - winnt-i386 456bc38c2bc7ebb27fd008e3ccd05f16f6a31fe6 - -S 2011-05-12 b1d3364 - linux-i386 7671ac0de19d9ea981616b3c58c1d48f1b43820a - macos-i386 bc7ee4d146ef6e0236afbd7cc4a9241582fd2952 - winnt-i386 5d3279a2dd0e3179b0e982432d63d79f87cac221 - -S 2011-05-11 14f1fe0 - linux-i386 1a2c56832b202d75186b0c0e99907e54c4b45c66 - macos-i386 fdbf4e5da258cc369ec3ba243131eb06f7af861a - winnt-i386 5c9a47b3c52beafedb6191c9410bbd96cbe48ea9 - -S 2011-05-09 b00f307 - linux-i386 e522d7376d515dec2908335db291b568a6eb09af - macos-i386 410585116bc21e5e0946f7e565445f1becdfe47c - winnt-i386 07d2daba676dc4b9928728abbea0c24085623a0a - -S 2011-05-09 11d22de - linux-i386 6e68c09fc78eed5c51e95ab7213e28eb0b765cea - macos-i386 ac01c6277c1f52d52aa749607a21e1e450ffcb80 - winnt-i386 c16c8b10c21eb50d2975ed6e3266da8c51906a65 - -S 2011-05-06 ab43725 - linux-i386 ce9fd9f1787ce7bba167bd411d331de7510b5be2 - macos-i386 e80a9ee1c3e362d2bfe45b7b602ed798584dc4d6 - winnt-i386 69fe7fc75cb2310e71e2c6340b8fe94bce0e21b6 - -S 2011-05-05 157f61f - linux-i386 26dcf09d52a432a1647a697018277648302fc538 - macos-i386 aab5814246aea3a6dee755fa0b3a921d17eb6916 - winnt-i386 bd2ba0b814c1c0ff7916c46eef714e5300744c84 - -S 2011-05-04 add7d34 - linux-i386 a85df9efb67bf83c5d71218a90d2a4935de2bec3 - macos-i386 68e40fddb4aa9ac86a0fa93e7c36d461698e204e - winnt-i386 fbc7bf335af774cc7f904ceba725f18acbb5fcd9 - -S 2011-05-04 4642d7a - linux-i386 631d4c375a8cc74de77c2f9aa79d3c404f8c353b - macos-i386 61b5a0eebb4eea8242d5cbc4a9967882b6a99cb4 - winnt-i386 c82b74eacaaa8b77db08ceeb3caa33ab40fafb85 - -S 2011-05-03 54bfe8e - linux-i386 1e831f0904acf20c24e5ee944be61b8665a45c31 - macos-i386 eb20970d6ca784a9a7768a7eb1ce7d8f4af96fc9 - winnt-i386 2799f8ab1fdc45a706f530e1552cbfcd07116edb - -S 2011-04-29 7b95b5c - linux-i386 f0e166816ce34adc9f7202bd3cfbd80623505f28 - macos-i386 abf2ee279da63676ca17c9dc9e54d04d8f752b00 - winnt-i386 7d27adcc5e0c111e3221751962a7df0bcb9a9288 diff --git a/src/stage0.txt b/src/stage0.txt new file mode 100644 index 0000000000..3c84cab2a1 --- /dev/null +++ b/src/stage0.txt @@ -0,0 +1,17 @@ +# This file describes the stage0 compiler that's used to then bootstrap the Rust +# compiler itself. For the rustbuild build system, this also describes the +# relevant Cargo revision that we're using. +# +# Currently Rust always bootstrap from the previous stable release, and in our +# train model this means that the master branch bootstraps from beta, beta +# bootstraps from current stable, and stable bootstraps from the previous stable +# release. +# +# If you're looking at this file on the master branch, you'll likely see that +# rustc bootstraps from `beta-$date`, whereas if you're looking at a source +# tarball for a stable release you'll likely see `1.x.0-$date` where `1.x.0` was +# released on `$date` + +rustc: 1.9.0-2016-05-24 +rustc_key: d16b8f0e +cargo: nightly-2016-04-10 diff --git a/src/test/auxiliary/cgu_export_trait_method.rs b/src/test/codegen-units/item-collection/auxiliary/cgu_export_trait_method.rs similarity index 100% rename from src/test/auxiliary/cgu_export_trait_method.rs rename to src/test/codegen-units/item-collection/auxiliary/cgu_export_trait_method.rs diff --git a/src/test/auxiliary/cgu_extern_closures.rs b/src/test/codegen-units/item-collection/auxiliary/cgu_extern_closures.rs similarity index 100% rename from src/test/auxiliary/cgu_extern_closures.rs rename to src/test/codegen-units/item-collection/auxiliary/cgu_extern_closures.rs diff --git a/src/test/auxiliary/cgu_generic_function.rs b/src/test/codegen-units/item-collection/auxiliary/cgu_generic_function.rs similarity index 96% rename from src/test/auxiliary/cgu_generic_function.rs rename to src/test/codegen-units/item-collection/auxiliary/cgu_generic_function.rs index 83bb65bc2b..04c68748ec 100644 --- a/src/test/auxiliary/cgu_generic_function.rs +++ b/src/test/codegen-units/item-collection/auxiliary/cgu_generic_function.rs @@ -12,12 +12,13 @@ struct Struct(u32); +#[inline(never)] pub fn foo(x: T) -> (T, u32, i8) { let (x, Struct(y)) = bar(x); (x, y, 2) } - +#[inline(never)] fn bar(x: T) -> (T, Struct) { let _ = not_exported_and_not_generic(0); (x, Struct(1)) diff --git a/src/test/codegen-units/cross-crate-closures.rs b/src/test/codegen-units/item-collection/cross-crate-closures.rs similarity index 98% rename from src/test/codegen-units/cross-crate-closures.rs rename to src/test/codegen-units/item-collection/cross-crate-closures.rs index 30f3ef12d0..546bb235a5 100644 --- a/src/test/codegen-units/cross-crate-closures.rs +++ b/src/test/codegen-units/item-collection/cross-crate-closures.rs @@ -27,7 +27,7 @@ fn main() { //~ TRANS_ITEM fn cgu_extern_closures::inlined_fn_generic[0]::{{closure}}[0] let _ = cgu_extern_closures::inlined_fn_generic(3, 4, 5i32); - // Nothing should be generated for this call, we just link to the instance instance + // Nothing should be generated for this call, we just link to the instance // in the extern crate. let _ = cgu_extern_closures::non_inlined_fn(6, 7); } diff --git a/src/test/codegen-units/cross-crate-generic-functions.rs b/src/test/codegen-units/item-collection/cross-crate-generic-functions.rs similarity index 100% rename from src/test/codegen-units/cross-crate-generic-functions.rs rename to src/test/codegen-units/item-collection/cross-crate-generic-functions.rs diff --git a/src/test/codegen-units/cross-crate-trait-method.rs b/src/test/codegen-units/item-collection/cross-crate-trait-method.rs similarity index 100% rename from src/test/codegen-units/cross-crate-trait-method.rs rename to src/test/codegen-units/item-collection/cross-crate-trait-method.rs diff --git a/src/test/codegen-units/item-collection/drop_in_place_intrinsic.rs b/src/test/codegen-units/item-collection/drop_in_place_intrinsic.rs new file mode 100644 index 0000000000..db940b6804 --- /dev/null +++ b/src/test/codegen-units/item-collection/drop_in_place_intrinsic.rs @@ -0,0 +1,41 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// compile-flags:-Zprint-trans-items=eager + +//~ TRANS_ITEM drop-glue drop_in_place_intrinsic::StructWithDtor[0] +//~ TRANS_ITEM drop-glue-contents drop_in_place_intrinsic::StructWithDtor[0] +struct StructWithDtor(u32); + +impl Drop for StructWithDtor { + //~ TRANS_ITEM fn drop_in_place_intrinsic::{{impl}}[0]::drop[0] + fn drop(&mut self) {} +} + +//~ TRANS_ITEM fn drop_in_place_intrinsic::main[0] +fn main() { + + //~ TRANS_ITEM drop-glue [drop_in_place_intrinsic::StructWithDtor[0]; 2] + let x = [StructWithDtor(0), StructWithDtor(1)]; + + drop_slice_in_place(&x); +} + +//~ TRANS_ITEM fn drop_in_place_intrinsic::drop_slice_in_place[0] +fn drop_slice_in_place(x: &[StructWithDtor]) { + unsafe { + // This is the interesting thing in this test case: Normally we would + // not have drop-glue for the unsized [StructWithDtor]. This has to be + // generated though when the drop_in_place() intrinsic is used. + //~ TRANS_ITEM drop-glue [drop_in_place_intrinsic::StructWithDtor[0]] + ::std::ptr::drop_in_place(x as *const _ as *mut [StructWithDtor]); + } +} diff --git a/src/test/codegen-units/function-as-argument.rs b/src/test/codegen-units/item-collection/function-as-argument.rs similarity index 100% rename from src/test/codegen-units/function-as-argument.rs rename to src/test/codegen-units/item-collection/function-as-argument.rs diff --git a/src/test/codegen-units/generic-drop-glue.rs b/src/test/codegen-units/item-collection/generic-drop-glue.rs similarity index 84% rename from src/test/codegen-units/generic-drop-glue.rs rename to src/test/codegen-units/item-collection/generic-drop-glue.rs index 476c84044e..6da8154540 100644 --- a/src/test/codegen-units/generic-drop-glue.rs +++ b/src/test/codegen-units/item-collection/generic-drop-glue.rs @@ -46,19 +46,22 @@ struct NonGenericNoDrop(i32); struct NonGenericWithDrop(i32); //~ TRANS_ITEM drop-glue generic_drop_glue::NonGenericWithDrop[0] +//~ TRANS_ITEM drop-glue-contents generic_drop_glue::NonGenericWithDrop[0] impl Drop for NonGenericWithDrop { + //~ TRANS_ITEM fn generic_drop_glue::{{impl}}[2]::drop[0] fn drop(&mut self) {} -//~ TRANS_ITEM fn generic_drop_glue::{{impl}}[2]::drop[0] } //~ TRANS_ITEM fn generic_drop_glue::main[0] fn main() { //~ TRANS_ITEM drop-glue generic_drop_glue::StructWithDrop[0] + //~ TRANS_ITEM drop-glue-contents generic_drop_glue::StructWithDrop[0] //~ TRANS_ITEM fn generic_drop_glue::{{impl}}[0]::drop[0] let _ = StructWithDrop { x: 0i8, y: 'a' }.x; //~ TRANS_ITEM drop-glue generic_drop_glue::StructWithDrop[0]<&str, generic_drop_glue::NonGenericNoDrop[0]> + //~ TRANS_ITEM drop-glue-contents generic_drop_glue::StructWithDrop[0]<&str, generic_drop_glue::NonGenericNoDrop[0]> //~ TRANS_ITEM fn generic_drop_glue::{{impl}}[0]::drop[0]<&str, generic_drop_glue::NonGenericNoDrop[0]> let _ = StructWithDrop { x: "&str", y: NonGenericNoDrop(0) }.y; @@ -71,6 +74,7 @@ fn main() { let _ = StructNoDrop { x: NonGenericWithDrop(0), y: 0f64 }.y; //~ TRANS_ITEM drop-glue generic_drop_glue::EnumWithDrop[0] + //~ TRANS_ITEM drop-glue-contents generic_drop_glue::EnumWithDrop[0] //~ TRANS_ITEM fn generic_drop_glue::{{impl}}[1]::drop[0] let _ = match EnumWithDrop::A::(0) { EnumWithDrop::A(x) => x, @@ -78,6 +82,7 @@ fn main() { }; //~ TRANS_ITEM drop-glue generic_drop_glue::EnumWithDrop[0] + //~ TRANS_ITEM drop-glue-contents generic_drop_glue::EnumWithDrop[0] //~ TRANS_ITEM fn generic_drop_glue::{{impl}}[1]::drop[0] let _ = match EnumWithDrop::B::(1.0) { EnumWithDrop::A(x) => x, diff --git a/src/test/codegen-units/generic-functions.rs b/src/test/codegen-units/item-collection/generic-functions.rs similarity index 100% rename from src/test/codegen-units/generic-functions.rs rename to src/test/codegen-units/item-collection/generic-functions.rs diff --git a/src/test/codegen-units/generic-impl.rs b/src/test/codegen-units/item-collection/generic-impl.rs similarity index 100% rename from src/test/codegen-units/generic-impl.rs rename to src/test/codegen-units/item-collection/generic-impl.rs diff --git a/src/test/codegen-units/impl-in-non-instantiated-generic.rs b/src/test/codegen-units/item-collection/impl-in-non-instantiated-generic.rs similarity index 97% rename from src/test/codegen-units/impl-in-non-instantiated-generic.rs rename to src/test/codegen-units/item-collection/impl-in-non-instantiated-generic.rs index a3bfa67e1a..c43c254f33 100644 --- a/src/test/codegen-units/impl-in-non-instantiated-generic.rs +++ b/src/test/codegen-units/item-collection/impl-in-non-instantiated-generic.rs @@ -32,5 +32,3 @@ pub fn generic_function(x: T) -> (T, i32) { fn main() { 0i64.foo(); } - -//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/instantiation-through-vtable.rs b/src/test/codegen-units/item-collection/instantiation-through-vtable.rs similarity index 100% rename from src/test/codegen-units/instantiation-through-vtable.rs rename to src/test/codegen-units/item-collection/instantiation-through-vtable.rs diff --git a/src/test/codegen-units/items-within-generic-items.rs b/src/test/codegen-units/item-collection/items-within-generic-items.rs similarity index 100% rename from src/test/codegen-units/items-within-generic-items.rs rename to src/test/codegen-units/item-collection/items-within-generic-items.rs diff --git a/src/test/codegen-units/non-generic-closures.rs b/src/test/codegen-units/item-collection/non-generic-closures.rs similarity index 98% rename from src/test/codegen-units/non-generic-closures.rs rename to src/test/codegen-units/item-collection/non-generic-closures.rs index bf8804e12c..ba77266d07 100644 --- a/src/test/codegen-units/non-generic-closures.rs +++ b/src/test/codegen-units/item-collection/non-generic-closures.rs @@ -59,5 +59,3 @@ fn main() { fn run_closure(f: &Fn(i32)) { f(3); } - -//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/non-generic-drop-glue.rs b/src/test/codegen-units/item-collection/non-generic-drop-glue.rs similarity index 90% rename from src/test/codegen-units/non-generic-drop-glue.rs rename to src/test/codegen-units/item-collection/non-generic-drop-glue.rs index bd8b0c605a..91be81a0b8 100644 --- a/src/test/codegen-units/non-generic-drop-glue.rs +++ b/src/test/codegen-units/item-collection/non-generic-drop-glue.rs @@ -14,6 +14,7 @@ #![deny(dead_code)] //~ TRANS_ITEM drop-glue non_generic_drop_glue::StructWithDrop[0] +//~ TRANS_ITEM drop-glue-contents non_generic_drop_glue::StructWithDrop[0] struct StructWithDrop { x: i32 } @@ -28,6 +29,7 @@ struct StructNoDrop { } //~ TRANS_ITEM drop-glue non_generic_drop_glue::EnumWithDrop[0] +//~ TRANS_ITEM drop-glue-contents non_generic_drop_glue::EnumWithDrop[0] enum EnumWithDrop { A(i32) } diff --git a/src/test/codegen-units/non-generic-functions.rs b/src/test/codegen-units/item-collection/non-generic-functions.rs similarity index 100% rename from src/test/codegen-units/non-generic-functions.rs rename to src/test/codegen-units/item-collection/non-generic-functions.rs diff --git a/src/test/codegen-units/overloaded-operators.rs b/src/test/codegen-units/item-collection/overloaded-operators.rs similarity index 100% rename from src/test/codegen-units/overloaded-operators.rs rename to src/test/codegen-units/item-collection/overloaded-operators.rs diff --git a/src/test/codegen-units/item-collection/static-init.rs b/src/test/codegen-units/item-collection/static-init.rs new file mode 100644 index 0000000000..41c0f46f80 --- /dev/null +++ b/src/test/codegen-units/item-collection/static-init.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-Zprint-trans-items=eager + +pub static FN : fn() = foo::; + +pub fn foo() { } + +//~ TRANS_ITEM fn static_init::foo[0] +//~ TRANS_ITEM static static_init::FN[0] + +fn main() { } + +//~ TRANS_ITEM fn static_init::main[0] +//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/statics-and-consts.rs b/src/test/codegen-units/item-collection/statics-and-consts.rs similarity index 100% rename from src/test/codegen-units/statics-and-consts.rs rename to src/test/codegen-units/item-collection/statics-and-consts.rs diff --git a/src/test/codegen-units/trait-implementations.rs b/src/test/codegen-units/item-collection/trait-implementations.rs similarity index 100% rename from src/test/codegen-units/trait-implementations.rs rename to src/test/codegen-units/item-collection/trait-implementations.rs diff --git a/src/test/codegen-units/trait-method-as-argument.rs b/src/test/codegen-units/item-collection/trait-method-as-argument.rs similarity index 100% rename from src/test/codegen-units/trait-method-as-argument.rs rename to src/test/codegen-units/item-collection/trait-method-as-argument.rs diff --git a/src/test/codegen-units/trait-method-default-impl.rs b/src/test/codegen-units/item-collection/trait-method-default-impl.rs similarity index 100% rename from src/test/codegen-units/trait-method-default-impl.rs rename to src/test/codegen-units/item-collection/trait-method-default-impl.rs diff --git a/src/test/codegen-units/transitive-drop-glue.rs b/src/test/codegen-units/item-collection/transitive-drop-glue.rs similarity index 89% rename from src/test/codegen-units/transitive-drop-glue.rs rename to src/test/codegen-units/item-collection/transitive-drop-glue.rs index 21bb29199a..81a7059fe2 100644 --- a/src/test/codegen-units/transitive-drop-glue.rs +++ b/src/test/codegen-units/item-collection/transitive-drop-glue.rs @@ -18,6 +18,7 @@ struct Root(Intermediate); //~ TRANS_ITEM drop-glue transitive_drop_glue::Intermediate[0] struct Intermediate(Leaf); //~ TRANS_ITEM drop-glue transitive_drop_glue::Leaf[0] +//~ TRANS_ITEM drop-glue-contents transitive_drop_glue::Leaf[0] struct Leaf; impl Drop for Leaf { @@ -25,11 +26,8 @@ impl Drop for Leaf { fn drop(&mut self) {} } -//~ TRANS_ITEM drop-glue transitive_drop_glue::Root[0] struct RootGen(IntermediateGen); -//~ TRANS_ITEM drop-glue transitive_drop_glue::Root[0] struct IntermediateGen(LeafGen); -//~ TRANS_ITEM drop-glue transitive_drop_glue::Root[0] struct LeafGen(T); impl Drop for LeafGen { @@ -44,12 +42,14 @@ fn main() { //~ TRANS_ITEM drop-glue transitive_drop_glue::RootGen[0] //~ TRANS_ITEM drop-glue transitive_drop_glue::IntermediateGen[0] //~ TRANS_ITEM drop-glue transitive_drop_glue::LeafGen[0] + //~ TRANS_ITEM drop-glue-contents transitive_drop_glue::LeafGen[0] //~ TRANS_ITEM fn transitive_drop_glue::{{impl}}[1]::drop[0] let _ = RootGen(IntermediateGen(LeafGen(0u32))); //~ TRANS_ITEM drop-glue transitive_drop_glue::RootGen[0] //~ TRANS_ITEM drop-glue transitive_drop_glue::IntermediateGen[0] //~ TRANS_ITEM drop-glue transitive_drop_glue::LeafGen[0] + //~ TRANS_ITEM drop-glue-contents transitive_drop_glue::LeafGen[0] //~ TRANS_ITEM fn transitive_drop_glue::{{impl}}[1]::drop[0] let _ = RootGen(IntermediateGen(LeafGen(0i16))); } diff --git a/src/test/codegen-units/tuple-drop-glue.rs b/src/test/codegen-units/item-collection/tuple-drop-glue.rs similarity index 94% rename from src/test/codegen-units/tuple-drop-glue.rs rename to src/test/codegen-units/item-collection/tuple-drop-glue.rs index 1bc235de88..ef4bc1dca5 100644 --- a/src/test/codegen-units/tuple-drop-glue.rs +++ b/src/test/codegen-units/item-collection/tuple-drop-glue.rs @@ -14,6 +14,7 @@ #![deny(dead_code)] //~ TRANS_ITEM drop-glue tuple_drop_glue::Dropped[0] +//~ TRANS_ITEM drop-glue-contents tuple_drop_glue::Dropped[0] struct Dropped; impl Drop for Dropped { diff --git a/src/test/codegen-units/unsizing.rs b/src/test/codegen-units/item-collection/unsizing.rs similarity index 100% rename from src/test/codegen-units/unsizing.rs rename to src/test/codegen-units/item-collection/unsizing.rs diff --git a/src/test/codegen-units/unused-traits-and-generics.rs b/src/test/codegen-units/item-collection/unused-traits-and-generics.rs similarity index 100% rename from src/test/codegen-units/unused-traits-and-generics.rs rename to src/test/codegen-units/item-collection/unused-traits-and-generics.rs diff --git a/src/test/codegen-units/partitioning/auxiliary/cgu_explicit_inlining.rs b/src/test/codegen-units/partitioning/auxiliary/cgu_explicit_inlining.rs new file mode 100644 index 0000000000..e4ba9fae41 --- /dev/null +++ b/src/test/codegen-units/partitioning/auxiliary/cgu_explicit_inlining.rs @@ -0,0 +1,20 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "lib"] + +#[inline] +pub fn inlined() {} + +#[inline(always)] +pub fn always_inlined() {} + +#[inline(never)] +pub fn never_inlined() {} diff --git a/src/test/codegen-units/partitioning/auxiliary/cgu_extern_drop_glue.rs b/src/test/codegen-units/partitioning/auxiliary/cgu_extern_drop_glue.rs new file mode 100644 index 0000000000..049bdb4657 --- /dev/null +++ b/src/test/codegen-units/partitioning/auxiliary/cgu_extern_drop_glue.rs @@ -0,0 +1,17 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "lib"] + +pub struct Struct(pub u32); + +impl Drop for Struct { + fn drop(&mut self) {} +} diff --git a/src/test/codegen-units/partitioning/auxiliary/cgu_generic_function.rs b/src/test/codegen-units/partitioning/auxiliary/cgu_generic_function.rs new file mode 100644 index 0000000000..04c68748ec --- /dev/null +++ b/src/test/codegen-units/partitioning/auxiliary/cgu_generic_function.rs @@ -0,0 +1,37 @@ +// Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "lib"] + +struct Struct(u32); + +#[inline(never)] +pub fn foo(x: T) -> (T, u32, i8) { + let (x, Struct(y)) = bar(x); + (x, y, 2) +} + +#[inline(never)] +fn bar(x: T) -> (T, Struct) { + let _ = not_exported_and_not_generic(0); + (x, Struct(1)) +} + +// These should not contribute to the codegen items of other crates. +#[inline(never)] +pub fn exported_but_not_generic(x: i32) -> i64 { + x as i64 +} + +#[inline(never)] +fn not_exported_and_not_generic(x: u32) -> u64 { + x as u64 +} + diff --git a/src/test/codegen-units/partitioning/extern-drop-glue.rs b/src/test/codegen-units/partitioning/extern-drop-glue.rs new file mode 100644 index 0000000000..5262d31ae0 --- /dev/null +++ b/src/test/codegen-units/partitioning/extern-drop-glue.rs @@ -0,0 +1,46 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength + +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=lazy -Zincremental=tmp/partitioning-tests/extern-drop-glue + +#![allow(dead_code)] +#![crate_type="lib"] + +// aux-build:cgu_extern_drop_glue.rs +extern crate cgu_extern_drop_glue; + +//~ TRANS_ITEM drop-glue cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[OnceODR] extern_drop_glue-mod1[OnceODR] +//~ TRANS_ITEM drop-glue-contents cgu_extern_drop_glue::Struct[0] @@ extern_drop_glue[OnceODR] extern_drop_glue-mod1[OnceODR] + +struct LocalStruct(cgu_extern_drop_glue::Struct); + +//~ TRANS_ITEM fn extern_drop_glue::user[0] @@ extern_drop_glue[WeakODR] +fn user() +{ + //~ TRANS_ITEM drop-glue extern_drop_glue::LocalStruct[0] @@ extern_drop_glue[OnceODR] + let _ = LocalStruct(cgu_extern_drop_glue::Struct(0)); +} + +mod mod1 { + use cgu_extern_drop_glue; + + struct LocalStruct(cgu_extern_drop_glue::Struct); + + //~ TRANS_ITEM fn extern_drop_glue::mod1[0]::user[0] @@ extern_drop_glue-mod1[WeakODR] + fn user() + { + //~ TRANS_ITEM drop-glue extern_drop_glue::mod1[0]::LocalStruct[0] @@ extern_drop_glue-mod1[OnceODR] + let _ = LocalStruct(cgu_extern_drop_glue::Struct(0)); + } +} diff --git a/src/test/codegen-units/partitioning/extern-generic.rs b/src/test/codegen-units/partitioning/extern-generic.rs new file mode 100644 index 0000000000..6beed231df --- /dev/null +++ b/src/test/codegen-units/partitioning/extern-generic.rs @@ -0,0 +1,64 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=eager -Zincremental=tmp/partitioning-tests/extern-generic + +#![allow(dead_code)] +#![crate_type="lib"] + +// aux-build:cgu_generic_function.rs +extern crate cgu_generic_function; + +//~ TRANS_ITEM fn extern_generic::user[0] @@ extern_generic[WeakODR] +fn user() { + let _ = cgu_generic_function::foo("abc"); +} + +mod mod1 { + use cgu_generic_function; + + //~ TRANS_ITEM fn extern_generic::mod1[0]::user[0] @@ extern_generic-mod1[WeakODR] + fn user() { + let _ = cgu_generic_function::foo("abc"); + } + + mod mod1 { + use cgu_generic_function; + + //~ TRANS_ITEM fn extern_generic::mod1[0]::mod1[0]::user[0] @@ extern_generic-mod1-mod1[WeakODR] + fn user() { + let _ = cgu_generic_function::foo("abc"); + } + } +} + +mod mod2 { + use cgu_generic_function; + + //~ TRANS_ITEM fn extern_generic::mod2[0]::user[0] @@ extern_generic-mod2[WeakODR] + fn user() { + let _ = cgu_generic_function::foo("abc"); + } +} + +mod mod3 { + //~ TRANS_ITEM fn extern_generic::mod3[0]::non_user[0] @@ extern_generic-mod3[WeakODR] + fn non_user() {} +} + +// Make sure the two generic functions from the extern crate get instantiated +// privately in every module they are use in. +//~ TRANS_ITEM fn cgu_generic_function::foo[0]<&str> @@ extern_generic[OnceODR] extern_generic-mod1[OnceODR] extern_generic-mod2[OnceODR] extern_generic-mod1-mod1[OnceODR] +//~ TRANS_ITEM fn cgu_generic_function::bar[0]<&str> @@ extern_generic[OnceODR] extern_generic-mod1[OnceODR] extern_generic-mod2[OnceODR] extern_generic-mod1-mod1[OnceODR] + +//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/partitioning/inlining-from-extern-crate.rs b/src/test/codegen-units/partitioning/inlining-from-extern-crate.rs new file mode 100644 index 0000000000..967824f31d --- /dev/null +++ b/src/test/codegen-units/partitioning/inlining-from-extern-crate.rs @@ -0,0 +1,61 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=lazy -Zincremental=tmp/partitioning-tests/inlining-from-extern-crate + +#![crate_type="lib"] + +// aux-build:cgu_explicit_inlining.rs +extern crate cgu_explicit_inlining; + +// This test makes sure that items inlined from external crates are privately +// instantiated in every codegen unit they are used in. + +//~ TRANS_ITEM fn cgu_explicit_inlining::inlined[0] @@ inlining_from_extern_crate[OnceODR] inlining_from_extern_crate-mod1[OnceODR] +//~ TRANS_ITEM fn cgu_explicit_inlining::always_inlined[0] @@ inlining_from_extern_crate[OnceODR] inlining_from_extern_crate-mod2[OnceODR] + +//~ TRANS_ITEM fn inlining_from_extern_crate::user[0] @@ inlining_from_extern_crate[WeakODR] +pub fn user() +{ + cgu_explicit_inlining::inlined(); + cgu_explicit_inlining::always_inlined(); + + // does not generate a translation item in this crate + cgu_explicit_inlining::never_inlined(); +} + +mod mod1 { + use cgu_explicit_inlining; + + //~ TRANS_ITEM fn inlining_from_extern_crate::mod1[0]::user[0] @@ inlining_from_extern_crate-mod1[WeakODR] + pub fn user() + { + cgu_explicit_inlining::inlined(); + + // does not generate a translation item in this crate + cgu_explicit_inlining::never_inlined(); + } +} + +mod mod2 { + use cgu_explicit_inlining; + + //~ TRANS_ITEM fn inlining_from_extern_crate::mod2[0]::user[0] @@ inlining_from_extern_crate-mod2[WeakODR] + pub fn user() + { + cgu_explicit_inlining::always_inlined(); + + // does not generate a translation item in this crate + cgu_explicit_inlining::never_inlined(); + } +} diff --git a/src/test/codegen-units/partitioning/local-drop-glue.rs b/src/test/codegen-units/partitioning/local-drop-glue.rs new file mode 100644 index 0000000000..04ebef645e --- /dev/null +++ b/src/test/codegen-units/partitioning/local-drop-glue.rs @@ -0,0 +1,64 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=lazy -Zincremental=tmp/partitioning-tests/local-drop-glue + +#![allow(dead_code)] +#![crate_type="lib"] + +//~ TRANS_ITEM drop-glue local_drop_glue::Struct[0] @@ local_drop_glue[OnceODR] local_drop_glue-mod1[OnceODR] +//~ TRANS_ITEM drop-glue-contents local_drop_glue::Struct[0] @@ local_drop_glue[OnceODR] local_drop_glue-mod1[OnceODR] +struct Struct { + _a: u32 +} + +impl Drop for Struct { + //~ TRANS_ITEM fn local_drop_glue::{{impl}}[0]::drop[0] @@ local_drop_glue[WeakODR] + fn drop(&mut self) {} +} + +//~ TRANS_ITEM drop-glue local_drop_glue::Outer[0] @@ local_drop_glue[OnceODR] +struct Outer { + _a: Struct +} + +//~ TRANS_ITEM fn local_drop_glue::user[0] @@ local_drop_glue[WeakODR] +fn user() +{ + let _ = Outer { + _a: Struct { + _a: 0 + } + }; +} + +mod mod1 +{ + use super::Struct; + + //~ TRANS_ITEM drop-glue local_drop_glue::mod1[0]::Struct2[0] @@ local_drop_glue-mod1[OnceODR] + struct Struct2 { + _a: Struct, + //~ TRANS_ITEM drop-glue (u32, local_drop_glue::Struct[0]) @@ local_drop_glue-mod1[OnceODR] + _b: (u32, Struct), + } + + //~ TRANS_ITEM fn local_drop_glue::mod1[0]::user[0] @@ local_drop_glue-mod1[WeakODR] + fn user() + { + let _ = Struct2 { + _a: Struct { _a: 0 }, + _b: (0, Struct { _a: 0 }), + }; + } +} diff --git a/src/test/codegen-units/partitioning/local-generic.rs b/src/test/codegen-units/partitioning/local-generic.rs new file mode 100644 index 0000000000..e38e676b95 --- /dev/null +++ b/src/test/codegen-units/partitioning/local-generic.rs @@ -0,0 +1,60 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=eager -Zincremental=tmp/partitioning-tests/local-generic + +#![allow(dead_code)] +#![crate_type="lib"] + +// Used in different modules/codegen units but always instantiated in the same +// codegen unit. + +//~ TRANS_ITEM fn local_generic::generic[0] @@ local_generic.volatile[WeakODR] +//~ TRANS_ITEM fn local_generic::generic[0] @@ local_generic.volatile[WeakODR] +//~ TRANS_ITEM fn local_generic::generic[0] @@ local_generic.volatile[WeakODR] +//~ TRANS_ITEM fn local_generic::generic[0]<&str> @@ local_generic.volatile[WeakODR] +pub fn generic(x: T) -> T { x } + +//~ TRANS_ITEM fn local_generic::user[0] @@ local_generic[WeakODR] +fn user() { + let _ = generic(0u32); +} + +mod mod1 { + pub use super::generic; + + //~ TRANS_ITEM fn local_generic::mod1[0]::user[0] @@ local_generic-mod1[WeakODR] + fn user() { + let _ = generic(0u64); + } + + mod mod1 { + use super::generic; + + //~ TRANS_ITEM fn local_generic::mod1[0]::mod1[0]::user[0] @@ local_generic-mod1-mod1[WeakODR] + fn user() { + let _ = generic('c'); + } + } +} + +mod mod2 { + use super::generic; + + //~ TRANS_ITEM fn local_generic::mod2[0]::user[0] @@ local_generic-mod2[WeakODR] + fn user() { + let _ = generic("abc"); + } +} + +//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/partitioning/local-inlining.rs b/src/test/codegen-units/partitioning/local-inlining.rs new file mode 100644 index 0000000000..880cc0a4fb --- /dev/null +++ b/src/test/codegen-units/partitioning/local-inlining.rs @@ -0,0 +1,54 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=lazy -Zincremental=tmp/partitioning-tests/local-inlining + +#![allow(dead_code)] +#![crate_type="lib"] + +mod inline { + + // Important: This function should show up in all codegen units where it is inlined + //~ TRANS_ITEM fn local_inlining::inline[0]::inlined_function[0] @@ local_inlining-inline[WeakODR] local_inlining-user1[Available] local_inlining-user2[Available] + #[inline(always)] + pub fn inlined_function() + { + + } +} + +mod user1 { + use super::inline; + + //~ TRANS_ITEM fn local_inlining::user1[0]::foo[0] @@ local_inlining-user1[WeakODR] + fn foo() { + inline::inlined_function(); + } +} + +mod user2 { + use super::inline; + + //~ TRANS_ITEM fn local_inlining::user2[0]::bar[0] @@ local_inlining-user2[WeakODR] + fn bar() { + inline::inlined_function(); + } +} + +mod non_user { + + //~ TRANS_ITEM fn local_inlining::non_user[0]::baz[0] @@ local_inlining-non_user[WeakODR] + fn baz() { + + } +} diff --git a/src/test/codegen-units/partitioning/local-transitive-inlining.rs b/src/test/codegen-units/partitioning/local-transitive-inlining.rs new file mode 100644 index 0000000000..f3efa2587d --- /dev/null +++ b/src/test/codegen-units/partitioning/local-transitive-inlining.rs @@ -0,0 +1,54 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=lazy -Zincremental=tmp/partitioning-tests/local-transitive-inlining + +#![allow(dead_code)] +#![crate_type="lib"] + +mod inline { + + //~ TRANS_ITEM fn local_transitive_inlining::inline[0]::inlined_function[0] @@ local_transitive_inlining-inline[WeakODR] local_transitive_inlining-direct_user[Available] local_transitive_inlining-indirect_user[Available] + #[inline(always)] + pub fn inlined_function() + { + + } +} + +mod direct_user { + use super::inline; + + //~ TRANS_ITEM fn local_transitive_inlining::direct_user[0]::foo[0] @@ local_transitive_inlining-direct_user[WeakODR] local_transitive_inlining-indirect_user[Available] + #[inline(always)] + pub fn foo() { + inline::inlined_function(); + } +} + +mod indirect_user { + use super::direct_user; + + //~ TRANS_ITEM fn local_transitive_inlining::indirect_user[0]::bar[0] @@ local_transitive_inlining-indirect_user[WeakODR] + fn bar() { + direct_user::foo(); + } +} + +mod non_user { + + //~ TRANS_ITEM fn local_transitive_inlining::non_user[0]::baz[0] @@ local_transitive_inlining-non_user[WeakODR] + fn baz() { + + } +} diff --git a/src/test/codegen-units/partitioning/methods-are-with-self-type.rs b/src/test/codegen-units/partitioning/methods-are-with-self-type.rs new file mode 100644 index 0000000000..99dda0e38b --- /dev/null +++ b/src/test/codegen-units/partitioning/methods-are-with-self-type.rs @@ -0,0 +1,80 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=lazy -Zincremental=tmp/partitioning-tests/methods-are-with-self-type + +#![allow(dead_code)] + +struct SomeType; + +struct SomeGenericType(T1, T2); + +mod mod1 { + use super::{SomeType, SomeGenericType}; + + // Even though the impl is in `mod1`, the methods should end up in the + // parent module, since that is where their self-type is. + impl SomeType { + //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::method[0] @@ methods_are_with_self_type[WeakODR] + fn method(&self) {} + + //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[0]::associated_fn[0] @@ methods_are_with_self_type[WeakODR] + fn associated_fn() {} + } + + impl SomeGenericType { + pub fn method(&self) {} + pub fn associated_fn(_: T1, _: T2) {} + } +} + +trait Trait { + fn foo(&self); + fn default(&self) {} +} + +// We provide an implementation of `Trait` for all types. The corresponding +// monomorphizations should end up in whichever module the concrete `T` is. +impl Trait for T +{ + fn foo(&self) {} +} + +mod type1 { + pub struct Struct; +} + +mod type2 { + pub struct Struct; +} + +//~ TRANS_ITEM fn methods_are_with_self_type::main[0] +fn main() +{ + //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::method[0] @@ methods_are_with_self_type.volatile[WeakODR] + SomeGenericType(0u32, 0u64).method(); + //~ TRANS_ITEM fn methods_are_with_self_type::mod1[0]::{{impl}}[1]::associated_fn[0] @@ methods_are_with_self_type.volatile[WeakODR] + SomeGenericType::associated_fn('c', "&str"); + + //~ TRANS_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0] @@ methods_are_with_self_type-type1.volatile[WeakODR] + type1::Struct.foo(); + //~ TRANS_ITEM fn methods_are_with_self_type::{{impl}}[0]::foo[0] @@ methods_are_with_self_type-type2.volatile[WeakODR] + type2::Struct.foo(); + + //~ TRANS_ITEM fn methods_are_with_self_type::Trait[0]::default[0] @@ methods_are_with_self_type-type1.volatile[WeakODR] + type1::Struct.default(); + //~ TRANS_ITEM fn methods_are_with_self_type::Trait[0]::default[0] @@ methods_are_with_self_type-type2.volatile[WeakODR] + type2::Struct.default(); +} + +//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/partitioning/regular-modules.rs b/src/test/codegen-units/partitioning/regular-modules.rs new file mode 100644 index 0000000000..c3af86f820 --- /dev/null +++ b/src/test/codegen-units/partitioning/regular-modules.rs @@ -0,0 +1,84 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=eager -Zincremental=tmp/partitioning-tests/regular-modules + +#![allow(dead_code)] +#![crate_type="lib"] + +//~ TRANS_ITEM fn regular_modules::foo[0] @@ regular_modules[WeakODR] +fn foo() {} + +//~ TRANS_ITEM fn regular_modules::bar[0] @@ regular_modules[WeakODR] +fn bar() {} + +//~ TRANS_ITEM static regular_modules::BAZ[0] @@ regular_modules[External] +static BAZ: u64 = 0; + +mod mod1 { + + //~ TRANS_ITEM fn regular_modules::mod1[0]::foo[0] @@ regular_modules-mod1[WeakODR] + fn foo() {} + //~ TRANS_ITEM fn regular_modules::mod1[0]::bar[0] @@ regular_modules-mod1[WeakODR] + fn bar() {} + //~ TRANS_ITEM static regular_modules::mod1[0]::BAZ[0] @@ regular_modules-mod1[External] + static BAZ: u64 = 0; + + mod mod1 { + //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::foo[0] @@ regular_modules-mod1-mod1[WeakODR] + fn foo() {} + //~ TRANS_ITEM fn regular_modules::mod1[0]::mod1[0]::bar[0] @@ regular_modules-mod1-mod1[WeakODR] + fn bar() {} + //~ TRANS_ITEM static regular_modules::mod1[0]::mod1[0]::BAZ[0] @@ regular_modules-mod1-mod1[External] + static BAZ: u64 = 0; + } + + mod mod2 { + //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::foo[0] @@ regular_modules-mod1-mod2[WeakODR] + fn foo() {} + //~ TRANS_ITEM fn regular_modules::mod1[0]::mod2[0]::bar[0] @@ regular_modules-mod1-mod2[WeakODR] + fn bar() {} + //~ TRANS_ITEM static regular_modules::mod1[0]::mod2[0]::BAZ[0] @@ regular_modules-mod1-mod2[External] + static BAZ: u64 = 0; + } +} + +mod mod2 { + + //~ TRANS_ITEM fn regular_modules::mod2[0]::foo[0] @@ regular_modules-mod2[WeakODR] + fn foo() {} + //~ TRANS_ITEM fn regular_modules::mod2[0]::bar[0] @@ regular_modules-mod2[WeakODR] + fn bar() {} + //~ TRANS_ITEM static regular_modules::mod2[0]::BAZ[0] @@ regular_modules-mod2[External] + static BAZ: u64 = 0; + + mod mod1 { + //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::foo[0] @@ regular_modules-mod2-mod1[WeakODR] + fn foo() {} + //~ TRANS_ITEM fn regular_modules::mod2[0]::mod1[0]::bar[0] @@ regular_modules-mod2-mod1[WeakODR] + fn bar() {} + //~ TRANS_ITEM static regular_modules::mod2[0]::mod1[0]::BAZ[0] @@ regular_modules-mod2-mod1[External] + static BAZ: u64 = 0; + } + + mod mod2 { + //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::foo[0] @@ regular_modules-mod2-mod2[WeakODR] + fn foo() {} + //~ TRANS_ITEM fn regular_modules::mod2[0]::mod2[0]::bar[0] @@ regular_modules-mod2-mod2[WeakODR] + fn bar() {} + //~ TRANS_ITEM static regular_modules::mod2[0]::mod2[0]::BAZ[0] @@ regular_modules-mod2-mod2[External] + static BAZ: u64 = 0; + } +} + +//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen-units/partitioning/statics.rs b/src/test/codegen-units/partitioning/statics.rs new file mode 100644 index 0000000000..9e878b95a3 --- /dev/null +++ b/src/test/codegen-units/partitioning/statics.rs @@ -0,0 +1,50 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-tidy-linelength +// We specify -Z incremental here because we want to test the partitioning for +// incremental compilation +// compile-flags:-Zprint-trans-items=lazy -Zincremental=tmp/partitioning-tests/statics + +#![crate_type="lib"] + +//~ TRANS_ITEM static statics::FOO[0] @@ statics[External] +static FOO: u32 = 0; + +//~ TRANS_ITEM static statics::BAR[0] @@ statics[External] +static BAR: u32 = 0; + +//~ TRANS_ITEM fn statics::function[0] @@ statics[WeakODR] +fn function() { + //~ TRANS_ITEM static statics::function[0]::FOO[0] @@ statics[External] + static FOO: u32 = 0; + + //~ TRANS_ITEM static statics::function[0]::BAR[0] @@ statics[External] + static BAR: u32 = 0; +} + +mod mod1 { + //~ TRANS_ITEM static statics::mod1[0]::FOO[0] @@ statics-mod1[External] + static FOO: u32 = 0; + + //~ TRANS_ITEM static statics::mod1[0]::BAR[0] @@ statics-mod1[External] + static BAR: u32 = 0; + + //~ TRANS_ITEM fn statics::mod1[0]::function[0] @@ statics-mod1[WeakODR] + fn function() { + //~ TRANS_ITEM static statics::mod1[0]::function[0]::FOO[0] @@ statics-mod1[External] + static FOO: u32 = 0; + + //~ TRANS_ITEM static statics::mod1[0]::function[0]::BAR[0] @@ statics-mod1[External] + static BAR: u32 = 0; + } +} + +//~ TRANS_ITEM drop-glue i8 diff --git a/src/test/codegen/lto-removes-invokes.rs b/src/test/codegen/lto-removes-invokes.rs new file mode 100644 index 0000000000..b2f4348952 --- /dev/null +++ b/src/test/codegen/lto-removes-invokes.rs @@ -0,0 +1,31 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -C lto -C panic=abort -O +// no-prefer-dynamic + +fn main() { + foo(); +} + +#[no_mangle] +#[inline(never)] +fn foo() { + let _a = Box::new(3); + bar(); +// CHECK-LABEL: foo +// CHECK: call {{.*}} void @bar +} + +#[inline(never)] +#[no_mangle] +fn bar() { + println!("hello!"); +} diff --git a/src/test/codegen/stores.rs b/src/test/codegen/stores.rs index f849a6c9b1..89bb5d93c7 100644 --- a/src/test/codegen/stores.rs +++ b/src/test/codegen/stores.rs @@ -26,8 +26,12 @@ pub struct Bytes { #[no_mangle] #[rustc_no_mir] // FIXME #27840 MIR has different codegen. pub fn small_array_alignment(x: &mut [i8; 4], y: [i8; 4]) { -// CHECK: [[VAR:%[0-9]+]] = bitcast [4 x i8]* %y to i32* -// CHECK: store i32 %{{.*}}, i32* [[VAR]], align 1 +// CHECK: %y = alloca [4 x i8] +// CHECK: [[TMP:%.+]] = alloca i32 +// CHECK: store i32 %1, i32* [[TMP]] +// CHECK: [[Y8:%[0-9]+]] = bitcast [4 x i8]* %y to i8* +// CHECK: [[TMP8:%[0-9]+]] = bitcast i32* [[TMP]] to i8* +// CHECK: call void @llvm.memcpy.{{.*}}(i8* [[Y8]], i8* [[TMP8]], i{{[0-9]+}} 4, i32 1, i1 false) *x = y; } @@ -37,7 +41,11 @@ pub fn small_array_alignment(x: &mut [i8; 4], y: [i8; 4]) { #[no_mangle] #[rustc_no_mir] // FIXME #27840 MIR has different codegen. pub fn small_struct_alignment(x: &mut Bytes, y: Bytes) { -// CHECK: [[VAR:%[0-9]+]] = bitcast %Bytes* %y to i32* -// CHECK: store i32 %{{.*}}, i32* [[VAR]], align 1 +// CHECK: %y = alloca %Bytes +// CHECK: [[TMP:%.+]] = alloca i32 +// CHECK: store i32 %1, i32* [[TMP]] +// CHECK: [[Y8:%[0-9]+]] = bitcast %Bytes* %y to i8* +// CHECK: [[TMP8:%[0-9]+]] = bitcast i32* [[TMP]] to i8* +// CHECK: call void @llvm.memcpy.{{.*}}(i8* [[Y8]], i8* [[TMP8]], i{{[0-9]+}} 4, i32 1, i1 false) *x = y; } diff --git a/src/test/auxiliary/attr_plugin_test.rs b/src/test/compile-fail-fulldeps/auxiliary/attr_plugin_test.rs similarity index 100% rename from src/test/auxiliary/attr_plugin_test.rs rename to src/test/compile-fail-fulldeps/auxiliary/attr_plugin_test.rs diff --git a/src/test/auxiliary/lint_for_crate.rs b/src/test/compile-fail-fulldeps/auxiliary/lint_for_crate.rs similarity index 100% rename from src/test/auxiliary/lint_for_crate.rs rename to src/test/compile-fail-fulldeps/auxiliary/lint_for_crate.rs diff --git a/src/test/auxiliary/lint_group_plugin_test.rs b/src/test/compile-fail-fulldeps/auxiliary/lint_group_plugin_test.rs similarity index 100% rename from src/test/auxiliary/lint_group_plugin_test.rs rename to src/test/compile-fail-fulldeps/auxiliary/lint_group_plugin_test.rs diff --git a/src/test/auxiliary/lint_plugin_test.rs b/src/test/compile-fail-fulldeps/auxiliary/lint_plugin_test.rs similarity index 100% rename from src/test/auxiliary/lint_plugin_test.rs rename to src/test/compile-fail-fulldeps/auxiliary/lint_plugin_test.rs diff --git a/src/test/auxiliary/macro_crate_MacroRulesTT.rs b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_MacroRulesTT.rs similarity index 100% rename from src/test/auxiliary/macro_crate_MacroRulesTT.rs rename to src/test/compile-fail-fulldeps/auxiliary/macro_crate_MacroRulesTT.rs diff --git a/src/test/auxiliary/macro_crate_test.rs b/src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs similarity index 100% rename from src/test/auxiliary/macro_crate_test.rs rename to src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs diff --git a/src/test/auxiliary/macro_reexport_1.rs b/src/test/compile-fail-fulldeps/auxiliary/macro_reexport_1.rs similarity index 100% rename from src/test/auxiliary/macro_reexport_1.rs rename to src/test/compile-fail-fulldeps/auxiliary/macro_reexport_1.rs diff --git a/src/test/auxiliary/rlib_crate_test.rs b/src/test/compile-fail-fulldeps/auxiliary/rlib_crate_test.rs similarity index 100% rename from src/test/auxiliary/rlib_crate_test.rs rename to src/test/compile-fail-fulldeps/auxiliary/rlib_crate_test.rs diff --git a/src/test/auxiliary/use_from_trait_xc.rs b/src/test/compile-fail-fulldeps/auxiliary/use_from_trait_xc.rs similarity index 100% rename from src/test/auxiliary/use_from_trait_xc.rs rename to src/test/compile-fail-fulldeps/auxiliary/use_from_trait_xc.rs diff --git a/src/test/compile-fail/E0001.rs b/src/test/compile-fail/E0001.rs new file mode 100644 index 0000000000..906642d855 --- /dev/null +++ b/src/test/compile-fail/E0001.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let foo = Some(1); + match foo { + Some(bar) => {/* ... */} + None => {/* ... */} + _ => {/* ... */} //~ ERROR E0001 + } +} diff --git a/src/test/compile-fail/E0002.rs b/src/test/compile-fail/E0002.rs new file mode 100644 index 0000000000..0e94c9595d --- /dev/null +++ b/src/test/compile-fail/E0002.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let x = Some(1); + + match x { } //~ ERROR E0002 +} diff --git a/src/test/compile-fail/E0004.rs b/src/test/compile-fail/E0004.rs new file mode 100644 index 0000000000..79e53c7a29 --- /dev/null +++ b/src/test/compile-fail/E0004.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +enum Terminator { + HastaLaVistaBaby, + TalkToMyHand, +} + +fn main() { + let x = Terminator::HastaLaVistaBaby; + + match x { //~ ERROR E0004 + Terminator::TalkToMyHand => {} + } +} \ No newline at end of file diff --git a/src/test/compile-fail/E0005.rs b/src/test/compile-fail/E0005.rs new file mode 100644 index 0000000000..0405bba81b --- /dev/null +++ b/src/test/compile-fail/E0005.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let x = Some(1); + let Some(y) = x; //~ ERROR E0005 +} diff --git a/src/test/compile-fail/E0007.rs b/src/test/compile-fail/E0007.rs new file mode 100644 index 0000000000..bfc0f1afe3 --- /dev/null +++ b/src/test/compile-fail/E0007.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let x = Some("s".to_string()); + match x { + op_string @ Some(s) => {}, //~ ERROR E0007 + //~| ERROR E0303 + None => {}, + } +} diff --git a/src/test/compile-fail/E0008.rs b/src/test/compile-fail/E0008.rs new file mode 100644 index 0000000000..97dd0f368b --- /dev/null +++ b/src/test/compile-fail/E0008.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + match Some("hi".to_string()) { + Some(s) if s.len() == 0 => {}, //~ ERROR E0008 + _ => {}, + } +} diff --git a/src/test/compile-fail/E0009.rs b/src/test/compile-fail/E0009.rs new file mode 100644 index 0000000000..51f71ea10c --- /dev/null +++ b/src/test/compile-fail/E0009.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + struct X { x: (), } + let x = Some((X { x: () }, X { x: () })); + match x { + Some((y, ref z)) => {}, //~ ERROR E0009 + None => panic!() + } +} diff --git a/src/test/compile-fail/E0010.rs b/src/test/compile-fail/E0010.rs new file mode 100644 index 0000000000..9ae9e79546 --- /dev/null +++ b/src/test/compile-fail/E0010.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(box_syntax)] + +const CON : Box = box 0; //~ ERROR E0010 + +fn main() {} diff --git a/src/test/compile-fail/E0017.rs b/src/test/compile-fail/E0017.rs new file mode 100644 index 0000000000..13f2c23d8c --- /dev/null +++ b/src/test/compile-fail/E0017.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +static X: i32 = 1; +const C: i32 = 2; + +const CR: &'static mut i32 = &mut C; //~ ERROR E0017 + //~| ERROR E0017 +static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017 + //~| ERROR E0017 + //~| ERROR E0388 +static CONST_REF: &'static mut i32 = &mut C; //~ ERROR E0017 + //~| ERROR E0017 + +fn main() {} diff --git a/src/test/compile-fail/E0023.rs b/src/test/compile-fail/E0023.rs new file mode 100644 index 0000000000..05f126baf9 --- /dev/null +++ b/src/test/compile-fail/E0023.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +enum Fruit { + Apple(String, String), + Pear(u32), +} + +fn main() { + let x = Fruit::Apple(String::new(), String::new()); + match x { + Fruit::Apple(a) => {}, //~ ERROR E0023 + Fruit::Apple(a, b, c) => {}, //~ ERROR E0023 + } +} diff --git a/src/test/compile-fail/E0024.rs b/src/test/compile-fail/E0024.rs new file mode 100644 index 0000000000..18f4dcf19d --- /dev/null +++ b/src/test/compile-fail/E0024.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +enum Number { + Zero, + One(u32) +} + +fn main() { + let x = Number::Zero; + match x { + Number::Zero(inside) => {}, //~ ERROR E0024 + Number::One(inside) => {}, + } +} diff --git a/src/test/compile-fail/E0025.rs b/src/test/compile-fail/E0025.rs new file mode 100644 index 0000000000..3f5922cdc0 --- /dev/null +++ b/src/test/compile-fail/E0025.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Foo { + a: u8, + b: u8, +} + +fn main() { + let x = Foo { a:1, b:2 }; + let Foo { a: x, a: y, b: 0 } = x; //~ ERROR E0025 +} diff --git a/src/test/compile-fail/E0026.rs b/src/test/compile-fail/E0026.rs new file mode 100644 index 0000000000..359c2a822a --- /dev/null +++ b/src/test/compile-fail/E0026.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Thing { + x: u32, + y: u32 +} + +fn main() { + let thing = Thing { x: 0, y: 0 }; + match thing { + Thing { x, y, z } => {} //~ ERROR E0026 + } +} diff --git a/src/test/compile-fail/E0027.rs b/src/test/compile-fail/E0027.rs new file mode 100644 index 0000000000..b2f20442b7 --- /dev/null +++ b/src/test/compile-fail/E0027.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Dog { + name: String, + age: u32, +} + +fn main() { + let d = Dog { name: "Rusty".to_string(), age: 8 }; + + match d { + Dog { age: x } => {} //~ ERROR E0027 + } +} diff --git a/src/test/compile-fail/E0029.rs b/src/test/compile-fail/E0029.rs new file mode 100644 index 0000000000..9cbdec9952 --- /dev/null +++ b/src/test/compile-fail/E0029.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let s = "hoho"; + + match s { + "hello" ... "world" => {} //~ ERROR E0029 + _ => {} + } +} diff --git a/src/test/compile-fail/E0030.rs b/src/test/compile-fail/E0030.rs new file mode 100644 index 0000000000..7f26f6cdb8 --- /dev/null +++ b/src/test/compile-fail/E0030.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + + +fn main() { + match 5u32 { + 1000 ... 5 => {} //~ ERROR E0030 + } +} diff --git a/src/test/compile-fail/E0033.rs b/src/test/compile-fail/E0033.rs new file mode 100644 index 0000000000..946600013f --- /dev/null +++ b/src/test/compile-fail/E0033.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait SomeTrait { + fn foo(); +} + +fn main() { + let trait_obj: &SomeTrait = SomeTrait; //~ ERROR E0425 + //~^ ERROR E0038 + let &invalid = trait_obj; //~ ERROR E0033 +} diff --git a/src/test/compile-fail/E0034.rs b/src/test/compile-fail/E0034.rs new file mode 100644 index 0000000000..669bece0f7 --- /dev/null +++ b/src/test/compile-fail/E0034.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Test; + +trait Trait1 { + fn foo(); +} + +trait Trait2 { + fn foo(); +} + +impl Trait1 for Test { fn foo() {} } +impl Trait2 for Test { fn foo() {} } + +fn main() { + Test::foo() //~ ERROR E0034 +} diff --git a/src/test/compile-fail/E0035.rs b/src/test/compile-fail/E0035.rs new file mode 100644 index 0000000000..43f46e3578 --- /dev/null +++ b/src/test/compile-fail/E0035.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Test; + +impl Test { + fn method(&self) {} +} + +fn main() { + let x = Test; + x.method::(); //~ ERROR E0035 +} diff --git a/src/test/compile-fail/E0036.rs b/src/test/compile-fail/E0036.rs new file mode 100644 index 0000000000..35fd6e8942 --- /dev/null +++ b/src/test/compile-fail/E0036.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Test; + +impl Test { + fn method(&self, v: &[T]) -> usize { + v.len() + } +} + +fn main() { + let x = Test; + let v = &[0]; + x.method::(v); //~ ERROR E0036 +} diff --git a/src/test/compile-fail/E0038.rs b/src/test/compile-fail/E0038.rs new file mode 100644 index 0000000000..26d2f33976 --- /dev/null +++ b/src/test/compile-fail/E0038.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Trait { + fn foo(&self) -> Self; +} + +fn call_foo(x: Box) { //~ ERROR E0038 + let y = x.foo(); +} + +fn main() { +} diff --git a/src/test/compile-fail/E0040.rs b/src/test/compile-fail/E0040.rs new file mode 100644 index 0000000000..f998778a50 --- /dev/null +++ b/src/test/compile-fail/E0040.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct Foo { + x: i32, +} + +impl Drop for Foo { + fn drop(&mut self) { + println!("kaboom"); + } +} + +fn main() { + let mut x = Foo { x: -7 }; + x.drop(); //~ ERROR E0040 +} diff --git a/src/test/compile-fail/E0044.rs b/src/test/compile-fail/E0044.rs new file mode 100644 index 0000000000..48fe230003 --- /dev/null +++ b/src/test/compile-fail/E0044.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +extern { fn some_func(x: T); } //~ ERROR E0044 + +fn main() { +} diff --git a/src/test/compile-fail/E0045.rs b/src/test/compile-fail/E0045.rs new file mode 100644 index 0000000000..edec911d3c --- /dev/null +++ b/src/test/compile-fail/E0045.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +extern "rust-call" { fn foo(x: u8, ...); } //~ ERROR E0045 + +fn main() { +} diff --git a/src/test/compile-fail/E0046.rs b/src/test/compile-fail/E0046.rs new file mode 100644 index 0000000000..63bd0a5ca2 --- /dev/null +++ b/src/test/compile-fail/E0046.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Foo { + fn foo(); +} + +struct Bar; + +impl Foo for Bar {} //~ ERROR E0046 + +fn main() { +} diff --git a/src/test/compile-fail/E0049.rs b/src/test/compile-fail/E0049.rs new file mode 100644 index 0000000000..5867e11e9a --- /dev/null +++ b/src/test/compile-fail/E0049.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Foo { + fn foo(x: T) -> Self; +} + +struct Bar; + +impl Foo for Bar { + fn foo(x: bool) -> Self { Bar } //~ ERROR E0049 +} + +fn main() { +} diff --git a/src/test/compile-fail/E0050.rs b/src/test/compile-fail/E0050.rs new file mode 100644 index 0000000000..2f7dc96361 --- /dev/null +++ b/src/test/compile-fail/E0050.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Foo { + fn foo(&self, x: u8) -> bool; +} + +struct Bar; + +impl Foo for Bar { + fn foo(&self) -> bool { true } //~ ERROR E0050 +} + +fn main() { +} diff --git a/src/test/compile-fail/E0053.rs b/src/test/compile-fail/E0053.rs new file mode 100644 index 0000000000..4effda3c49 --- /dev/null +++ b/src/test/compile-fail/E0053.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Foo { + fn foo(x: u16); + fn bar(&self); +} + +struct Bar; + +impl Foo for Bar { + fn foo(x: i16) { } //~ ERROR E0053 + fn bar(&mut self) { } //~ ERROR E0053 +} + +fn main() { +} diff --git a/src/test/compile-fail/E0054.rs b/src/test/compile-fail/E0054.rs new file mode 100644 index 0000000000..158cd6ff9b --- /dev/null +++ b/src/test/compile-fail/E0054.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let x = 5; + let x_is_nonzero = x as bool; //~ ERROR E0054 +} diff --git a/src/test/compile-fail/E0055.rs b/src/test/compile-fail/E0055.rs new file mode 100644 index 0000000000..cb78f4b3bb --- /dev/null +++ b/src/test/compile-fail/E0055.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![recursion_limit="2"] +struct Foo; + +impl Foo { + fn foo(&self) {} +} + +fn main() { + let foo = Foo; + let ref_foo = &&Foo; + ref_foo.foo(); //~ ERROR E0055 + //~^ ERROR E0275 +} diff --git a/src/test/compile-fail/E0057.rs b/src/test/compile-fail/E0057.rs new file mode 100644 index 0000000000..1fb5498b09 --- /dev/null +++ b/src/test/compile-fail/E0057.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let f = |x| x * 3; + let a = f(); //~ ERROR E0057 + let b = f(4); + let c = f(2, 3); //~ ERROR E0057 +} diff --git a/src/test/compile-fail/E0059.rs b/src/test/compile-fail/E0059.rs new file mode 100644 index 0000000000..4ae9b2f91d --- /dev/null +++ b/src/test/compile-fail/E0059.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(unboxed_closures)] + +fn foo>(f: F) -> F::Output { f(3) } //~ ERROR E0059 + +fn main() { +} diff --git a/src/test/compile-fail/E0060.rs b/src/test/compile-fail/E0060.rs new file mode 100644 index 0000000000..b4a2898749 --- /dev/null +++ b/src/test/compile-fail/E0060.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +extern "C" { + fn printf(_: *const u8, ...) -> u32; +} + +fn main() { + unsafe { printf(); } //~ ERROR E0060 +} diff --git a/src/test/compile-fail/E0061.rs b/src/test/compile-fail/E0061.rs new file mode 100644 index 0000000000..4a8eac2a9e --- /dev/null +++ b/src/test/compile-fail/E0061.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn f(a: u16, b: &str) {} + +fn main() { + f(0); //~ ERROR E0061 +} diff --git a/src/test/compile-fail/array-not-vector.rs b/src/test/compile-fail/array-not-vector.rs index 6c9b8f81b2..1bbccae53a 100644 --- a/src/test/compile-fail/array-not-vector.rs +++ b/src/test/compile-fail/array-not-vector.rs @@ -11,16 +11,14 @@ fn main() { let _x: i32 = [1, 2, 3]; //~^ ERROR mismatched types - //~| expected `i32` - //~| found `[_; 3]` - //~| expected i32 - //~| found array of 3 elements + //~| expected type `i32` + //~| found type `[_; 3]` + //~| expected i32, found array of 3 elements let x: &[i32] = &[1, 2, 3]; let _y: &i32 = x; //~^ ERROR mismatched types - //~| expected `&i32` - //~| found `&[i32]` - //~| expected i32 - //~| found slice + //~| expected type `&i32` + //~| found type `&[i32]` + //~| expected i32, found slice } diff --git a/src/test/compile-fail/array_const_index-0.rs b/src/test/compile-fail/array_const_index-0.rs index 63a5cf65e3..1134dbfd1c 100644 --- a/src/test/compile-fail/array_const_index-0.rs +++ b/src/test/compile-fail/array_const_index-0.rs @@ -8,8 +8,9 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -static A: &'static [i32] = &[]; -static B: i32 = (&A)[1]; //~ ERROR: const index-expr is out of bounds +const A: &'static [i32] = &[]; +const B: i32 = (&A)[1]; +//~^ ERROR: array index out of bounds fn main() { let _ = B; diff --git a/src/test/compile-fail/array_const_index-1.rs b/src/test/compile-fail/array_const_index-1.rs index 1f56cd8e87..e59895cda4 100644 --- a/src/test/compile-fail/array_const_index-1.rs +++ b/src/test/compile-fail/array_const_index-1.rs @@ -9,7 +9,7 @@ // except according to those terms. const A: [i32; 0] = []; -const B: i32 = A[1]; //~ ERROR: const index-expr is out of bounds +const B: i32 = A[1]; //~ ERROR: array index out of bounds fn main() { let _ = B; diff --git a/src/test/compile-fail/associated-types-eq-3.rs b/src/test/compile-fail/associated-types-eq-3.rs index f01f2b111c..8c66160e8a 100644 --- a/src/test/compile-fail/associated-types-eq-3.rs +++ b/src/test/compile-fail/associated-types-eq-3.rs @@ -32,10 +32,9 @@ fn foo1>(x: I) { fn foo2(x: I) { let _: Bar = x.boo(); //~^ ERROR mismatched types - //~| expected `Bar` - //~| found `::A` - //~| expected struct `Bar` - //~| found associated type + //~| expected type `Bar` + //~| found type `::A` + //~| expected struct `Bar`, found associated type } diff --git a/src/test/compile-fail/associated-types-eq-hr.rs b/src/test/compile-fail/associated-types-eq-hr.rs index d5678c155f..52a2ca9082 100644 --- a/src/test/compile-fail/associated-types-eq-hr.rs +++ b/src/test/compile-fail/associated-types-eq-hr.rs @@ -40,6 +40,17 @@ impl<'a> TheTrait<&'a isize> for UintStruct { } } +struct Tuple { +} + +impl<'a> TheTrait<(&'a isize, &'a isize)> for Tuple { + type A = &'a isize; + + fn get(&self, t: (&'a isize, &'a isize)) -> &'a isize { + t.0 + } +} + fn foo() where T : for<'x> TheTrait<&'x isize, A = &'x isize> { @@ -52,10 +63,28 @@ fn bar() // ok for UintStruct, but not IntStruct } -fn baz() - where T : for<'x,'y> TheTrait<&'x isize, A = &'y isize> +fn tuple_one() + where T : for<'x,'y> TheTrait<(&'x isize, &'y isize), A = &'x isize> +{ + // not ok for tuple, two lifetimes and we pick first +} + +fn tuple_two() + where T : for<'x,'y> TheTrait<(&'x isize, &'y isize), A = &'y isize> { - // not ok for either struct, due to the use of two lifetimes + // not ok for tuple, two lifetimes and we pick second +} + +fn tuple_three() + where T : for<'x> TheTrait<(&'x isize, &'x isize), A = &'x isize> +{ + // ok for tuple +} + +fn tuple_four() + where T : for<'x,'y> TheTrait<(&'x isize, &'y isize)> +{ + // not ok for tuple, two lifetimes, and lifetime matching is invariant } pub fn main() { @@ -65,6 +94,16 @@ pub fn main() { bar::(); //~ ERROR type mismatch bar::(); - baz::(); //~ ERROR type mismatch - baz::(); //~ ERROR type mismatch + tuple_one::(); + //~^ ERROR E0277 + //~| ERROR type mismatch + + tuple_two::(); + //~^ ERROR E0277 + //~| ERROR type mismatch + + tuple_three::(); + + tuple_four::(); + //~^ ERROR E0277 } diff --git a/src/test/compile-fail/associated-types-path-2.rs b/src/test/compile-fail/associated-types-path-2.rs index 0c077e37e4..cdb7dff692 100644 --- a/src/test/compile-fail/associated-types-path-2.rs +++ b/src/test/compile-fail/associated-types-path-2.rs @@ -28,8 +28,7 @@ pub fn f2(a: T) -> T::A { pub fn f1_int_int() { f1(2i32, 4i32); //~^ ERROR mismatched types - //~| expected u32 - //~| found i32 + //~| expected u32, found i32 } pub fn f1_int_uint() { @@ -49,8 +48,7 @@ pub fn f1_uint_int() { pub fn f2_int() { let _: i32 = f2(2i32); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `u32` + //~| expected i32, found u32 } pub fn main() { } diff --git a/src/test/compile-fail/associated-types/bound-lifetime-constrained.rs b/src/test/compile-fail/associated-types/bound-lifetime-constrained.rs new file mode 100644 index 0000000000..f60f06b4ec --- /dev/null +++ b/src/test/compile-fail/associated-types/bound-lifetime-constrained.rs @@ -0,0 +1,66 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// revisions: func object clause + +#![allow(dead_code)] +#![feature(rustc_attrs)] +#![feature(unboxed_closures)] +#![deny(hr_lifetime_in_assoc_type)] + +trait Foo<'a> { + type Item; +} + +impl<'a> Foo<'a> for() { + type Item = (); +} + +// Check that appearing in a projection input in the argument is not enough: +#[cfg(func)] +fn func1(_: for<'a> fn(<() as Foo<'a>>::Item) -> &'a i32) { + //[func]~^ ERROR return type references lifetime `'a` + //[func]~| WARNING previously accepted +} + +// Check that appearing in a projection input in the return still +// causes an error: +#[cfg(func)] +fn func2(_: for<'a> fn() -> <() as Foo<'a>>::Item) { + //[func]~^ ERROR return type references lifetime `'a` + //[func]~| WARNING previously accepted +} + +#[cfg(object)] +fn object1(_: Box Fn(<() as Foo<'a>>::Item) -> &'a i32>) { + //[object]~^ ERROR `Output` references lifetime `'a` + //[object]~| WARNING previously accepted +} + +#[cfg(object)] +fn object2(_: Box Fn() -> <() as Foo<'a>>::Item>) { + //[object]~^ ERROR `Output` references lifetime `'a` + //[object]~| WARNING previously accepted +} + +#[cfg(clause)] +fn clause1() where T: for<'a> Fn(<() as Foo<'a>>::Item) -> &'a i32 { + //[clause]~^ ERROR `Output` references lifetime `'a` + //[clause]~| WARNING previously accepted +} + +#[cfg(clause)] +fn clause2() where T: for<'a> Fn() -> <() as Foo<'a>>::Item { + //[clause]~^ ERROR `Output` references lifetime `'a` + //[clause]~| WARNING previously accepted +} + +#[rustc_error] +fn main() { } //[ok]~ ERROR compilation successful diff --git a/src/test/compile-fail/associated-types/bound-lifetime-in-binding-only.rs b/src/test/compile-fail/associated-types/bound-lifetime-in-binding-only.rs new file mode 100644 index 0000000000..020c9e5e1d --- /dev/null +++ b/src/test/compile-fail/associated-types/bound-lifetime-in-binding-only.rs @@ -0,0 +1,90 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// revisions: angle paren ok elision + +#![allow(dead_code)] +#![feature(rustc_attrs)] +#![feature(unboxed_closures)] +#![deny(hr_lifetime_in_assoc_type)] + +trait Foo { + type Item; +} + +#[cfg(angle)] +fn angle Foo>() { + //[angle]~^ ERROR binding for associated type `Item` references lifetime `'a` + //[angle]~| WARNING previously accepted +} + +#[cfg(angle)] +fn angle1() where T: for<'a> Foo { + //[angle]~^ ERROR binding for associated type `Item` references lifetime `'a` + //[angle]~| WARNING previously accepted +} + +#[cfg(angle)] +fn angle2() where for<'a> T: Foo { + //[angle]~^ ERROR binding for associated type `Item` references lifetime `'a` + //[angle]~| WARNING previously accepted +} + +#[cfg(angle)] +fn angle3(_: &for<'a> Foo) { + //[angle]~^ ERROR binding for associated type `Item` references lifetime `'a` + //[angle]~| WARNING previously accepted +} + +#[cfg(paren)] +fn paren Fn() -> &'a i32>() { + //[paren]~^ ERROR binding for associated type `Output` references lifetime `'a` + //[paren]~| WARNING previously accepted +} + +#[cfg(paren)] +fn paren1() where T: for<'a> Fn() -> &'a i32 { + //[paren]~^ ERROR binding for associated type `Output` references lifetime `'a` + //[paren]~| WARNING previously accepted +} + +#[cfg(paren)] +fn paren2() where for<'a> T: Fn() -> &'a i32 { + //[paren]~^ ERROR binding for associated type `Output` references lifetime `'a` + //[paren]~| WARNING previously accepted +} + +#[cfg(paren)] +fn paren3(_: &for<'a> Fn() -> &'a i32) { + //[paren]~^ ERROR binding for associated type `Output` references lifetime `'a` + //[paren]~| WARNING previously accepted +} + +#[cfg(elision)] +fn elision &i32>() { + //[elision]~^ ERROR E0106 +} + +struct Parameterized<'a> { x: &'a str } + +#[cfg(ok)] +fn ok1 Fn(&Parameterized<'a>) -> &'a i32>() { +} + +#[cfg(ok)] +fn ok2 Fn<(&'b Parameterized<'a>,), Output=&'a i32>>() { +} + +#[cfg(ok)] +fn ok3() where for<'a> Parameterized<'a>: Foo { +} + +#[rustc_error] +fn main() { } //[ok]~ ERROR compilation successful diff --git a/src/test/compile-fail/associated-types/bound-lifetime-in-return-only.rs b/src/test/compile-fail/associated-types/bound-lifetime-in-return-only.rs new file mode 100644 index 0000000000..0b4a9bf58a --- /dev/null +++ b/src/test/compile-fail/associated-types/bound-lifetime-in-return-only.rs @@ -0,0 +1,64 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// revisions: sig local structure ok elision + +#![allow(dead_code)] +#![feature(rustc_attrs)] +#![feature(unboxed_closures)] +#![deny(hr_lifetime_in_assoc_type)] + +trait Foo { + type Item; +} + +#[cfg(sig)] +fn sig1(_: for<'a> fn() -> &'a i32) { + //[sig]~^ ERROR return type references lifetime `'a` + //[sig]~| WARNING previously accepted +} + +#[cfg(sig)] +fn sig2(_: for<'a, 'b> fn(&'b i32) -> &'a i32) { + //[sig]~^ ERROR return type references lifetime `'a` + //[sig]~| WARNING previously accepted +} + +#[cfg(local)] +fn local1() { + let _: for<'a> fn() -> &'a i32 = loop { }; + //[local]~^ ERROR return type references lifetime `'a` + //[local]~| WARNING previously accepted +} + +#[cfg(structure)] +struct Struct1 { + x: for<'a> fn() -> &'a i32 + //[structure]~^ ERROR return type references lifetime `'a` + //[structure]~| WARNING previously accepted +} + +#[cfg(elision)] +fn elision(_: fn() -> &i32) { + //[elision]~^ ERROR E0106 +} + +struct Parameterized<'a> { x: &'a str } + +#[cfg(ok)] +fn ok1(_: &for<'a> Fn(&Parameterized<'a>) -> &'a i32) { +} + +#[cfg(ok)] +fn ok2(_: &for<'a,'b> Fn<(&'b Parameterized<'a>,), Output=&'a i32>) { +} + +#[rustc_error] +fn main() { } //[ok]~ ERROR compilation successful diff --git a/src/test/compile-fail/augmented-assignments.rs b/src/test/compile-fail/augmented-assignments.rs index 221015d512..92a8b10669 100644 --- a/src/test/compile-fail/augmented-assignments.rs +++ b/src/test/compile-fail/augmented-assignments.rs @@ -21,11 +21,15 @@ impl AddAssign for Int { fn main() { let mut x = Int(1); x //~ error: use of moved value: `x` + //~^ value used here after move + //~| note: move occurs because `x` has type `Int` += - x; //~ note: `x` moved here because it has type `Int`, which is non-copyable + x; //~ value moved here let y = Int(2); + //~^use `mut y` here to make mutable y //~ error: cannot borrow immutable local variable `y` as mutable + //~| cannot borrow += Int(1); } diff --git a/src/test/auxiliary/allocator-dylib.rs b/src/test/compile-fail/auxiliary/allocator-dylib.rs similarity index 100% rename from src/test/auxiliary/allocator-dylib.rs rename to src/test/compile-fail/auxiliary/allocator-dylib.rs diff --git a/src/test/auxiliary/allocator-dylib2.rs b/src/test/compile-fail/auxiliary/allocator-dylib2.rs similarity index 100% rename from src/test/auxiliary/allocator-dylib2.rs rename to src/test/compile-fail/auxiliary/allocator-dylib2.rs diff --git a/src/test/auxiliary/allocator1.rs b/src/test/compile-fail/auxiliary/allocator1.rs similarity index 100% rename from src/test/auxiliary/allocator1.rs rename to src/test/compile-fail/auxiliary/allocator1.rs diff --git a/src/test/auxiliary/allocator2.rs b/src/test/compile-fail/auxiliary/allocator2.rs similarity index 100% rename from src/test/auxiliary/allocator2.rs rename to src/test/compile-fail/auxiliary/allocator2.rs diff --git a/src/test/auxiliary/allocator3.rs b/src/test/compile-fail/auxiliary/allocator3.rs similarity index 100% rename from src/test/auxiliary/allocator3.rs rename to src/test/compile-fail/auxiliary/allocator3.rs diff --git a/src/test/auxiliary/ambig_impl_2_lib.rs b/src/test/compile-fail/auxiliary/ambig_impl_2_lib.rs similarity index 100% rename from src/test/auxiliary/ambig_impl_2_lib.rs rename to src/test/compile-fail/auxiliary/ambig_impl_2_lib.rs diff --git a/src/test/auxiliary/cci_class.rs b/src/test/compile-fail/auxiliary/cci_class.rs similarity index 100% rename from src/test/auxiliary/cci_class.rs rename to src/test/compile-fail/auxiliary/cci_class.rs diff --git a/src/test/auxiliary/cci_class_5.rs b/src/test/compile-fail/auxiliary/cci_class_5.rs similarity index 100% rename from src/test/auxiliary/cci_class_5.rs rename to src/test/compile-fail/auxiliary/cci_class_5.rs diff --git a/src/test/compile-fail/auxiliary/cdylib-dep.rs b/src/test/compile-fail/auxiliary/cdylib-dep.rs new file mode 100644 index 0000000000..a3d0222a14 --- /dev/null +++ b/src/test/compile-fail/auxiliary/cdylib-dep.rs @@ -0,0 +1,11 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "dylib"] diff --git a/src/test/auxiliary/changing-crates-a1.rs b/src/test/compile-fail/auxiliary/changing-crates-a1.rs similarity index 100% rename from src/test/auxiliary/changing-crates-a1.rs rename to src/test/compile-fail/auxiliary/changing-crates-a1.rs diff --git a/src/test/auxiliary/changing-crates-a2.rs b/src/test/compile-fail/auxiliary/changing-crates-a2.rs similarity index 100% rename from src/test/auxiliary/changing-crates-a2.rs rename to src/test/compile-fail/auxiliary/changing-crates-a2.rs diff --git a/src/test/auxiliary/changing-crates-b.rs b/src/test/compile-fail/auxiliary/changing-crates-b.rs similarity index 100% rename from src/test/auxiliary/changing-crates-b.rs rename to src/test/compile-fail/auxiliary/changing-crates-b.rs diff --git a/src/test/auxiliary/coherence_copy_like_lib.rs b/src/test/compile-fail/auxiliary/coherence_copy_like_lib.rs similarity index 100% rename from src/test/auxiliary/coherence_copy_like_lib.rs rename to src/test/compile-fail/auxiliary/coherence_copy_like_lib.rs diff --git a/src/test/auxiliary/coherence_inherent_cc_lib.rs b/src/test/compile-fail/auxiliary/coherence_inherent_cc_lib.rs similarity index 100% rename from src/test/auxiliary/coherence_inherent_cc_lib.rs rename to src/test/compile-fail/auxiliary/coherence_inherent_cc_lib.rs diff --git a/src/test/auxiliary/coherence_lib.rs b/src/test/compile-fail/auxiliary/coherence_lib.rs similarity index 100% rename from src/test/auxiliary/coherence_lib.rs rename to src/test/compile-fail/auxiliary/coherence_lib.rs diff --git a/src/test/auxiliary/coherence_orphan_lib.rs b/src/test/compile-fail/auxiliary/coherence_orphan_lib.rs similarity index 100% rename from src/test/auxiliary/coherence_orphan_lib.rs rename to src/test/compile-fail/auxiliary/coherence_orphan_lib.rs diff --git a/src/test/auxiliary/const_fn_lib.rs b/src/test/compile-fail/auxiliary/const_fn_lib.rs similarity index 100% rename from src/test/auxiliary/const_fn_lib.rs rename to src/test/compile-fail/auxiliary/const_fn_lib.rs diff --git a/src/test/auxiliary/crate_a1.rs b/src/test/compile-fail/auxiliary/crate_a1.rs similarity index 100% rename from src/test/auxiliary/crate_a1.rs rename to src/test/compile-fail/auxiliary/crate_a1.rs diff --git a/src/test/auxiliary/crate_a2.rs b/src/test/compile-fail/auxiliary/crate_a2.rs similarity index 100% rename from src/test/auxiliary/crate_a2.rs rename to src/test/compile-fail/auxiliary/crate_a2.rs diff --git a/src/test/auxiliary/crateresolve1-1.rs b/src/test/compile-fail/auxiliary/crateresolve1-1.rs similarity index 100% rename from src/test/auxiliary/crateresolve1-1.rs rename to src/test/compile-fail/auxiliary/crateresolve1-1.rs diff --git a/src/test/auxiliary/crateresolve1-2.rs b/src/test/compile-fail/auxiliary/crateresolve1-2.rs similarity index 100% rename from src/test/auxiliary/crateresolve1-2.rs rename to src/test/compile-fail/auxiliary/crateresolve1-2.rs diff --git a/src/test/auxiliary/crateresolve1-3.rs b/src/test/compile-fail/auxiliary/crateresolve1-3.rs similarity index 100% rename from src/test/auxiliary/crateresolve1-3.rs rename to src/test/compile-fail/auxiliary/crateresolve1-3.rs diff --git a/src/test/auxiliary/default_ty_param_cross_crate_crate.rs b/src/test/compile-fail/auxiliary/default_ty_param_cross_crate_crate.rs similarity index 100% rename from src/test/auxiliary/default_ty_param_cross_crate_crate.rs rename to src/test/compile-fail/auxiliary/default_ty_param_cross_crate_crate.rs diff --git a/src/test/auxiliary/deprecation-lint.rs b/src/test/compile-fail/auxiliary/deprecation-lint.rs similarity index 100% rename from src/test/auxiliary/deprecation-lint.rs rename to src/test/compile-fail/auxiliary/deprecation-lint.rs diff --git a/src/test/auxiliary/empty-struct.rs b/src/test/compile-fail/auxiliary/empty-struct.rs similarity index 100% rename from src/test/auxiliary/empty-struct.rs rename to src/test/compile-fail/auxiliary/empty-struct.rs diff --git a/src/test/auxiliary/go_trait.rs b/src/test/compile-fail/auxiliary/go_trait.rs similarity index 100% rename from src/test/auxiliary/go_trait.rs rename to src/test/compile-fail/auxiliary/go_trait.rs diff --git a/src/test/auxiliary/inherited_stability.rs b/src/test/compile-fail/auxiliary/inherited_stability.rs similarity index 100% rename from src/test/auxiliary/inherited_stability.rs rename to src/test/compile-fail/auxiliary/inherited_stability.rs diff --git a/src/test/auxiliary/internal_unstable.rs b/src/test/compile-fail/auxiliary/internal_unstable.rs similarity index 100% rename from src/test/auxiliary/internal_unstable.rs rename to src/test/compile-fail/auxiliary/internal_unstable.rs diff --git a/src/test/auxiliary/issue-19163.rs b/src/test/compile-fail/auxiliary/issue-19163.rs similarity index 100% rename from src/test/auxiliary/issue-19163.rs rename to src/test/compile-fail/auxiliary/issue-19163.rs diff --git a/src/test/auxiliary/issue-21146-inc.rs b/src/test/compile-fail/auxiliary/issue-21146-inc.rs similarity index 100% rename from src/test/auxiliary/issue-21146-inc.rs rename to src/test/compile-fail/auxiliary/issue-21146-inc.rs diff --git a/src/test/auxiliary/issue-21221-3.rs b/src/test/compile-fail/auxiliary/issue-21221-3.rs similarity index 100% rename from src/test/auxiliary/issue-21221-3.rs rename to src/test/compile-fail/auxiliary/issue-21221-3.rs diff --git a/src/test/auxiliary/issue-21221-4.rs b/src/test/compile-fail/auxiliary/issue-21221-4.rs similarity index 100% rename from src/test/auxiliary/issue-21221-4.rs rename to src/test/compile-fail/auxiliary/issue-21221-4.rs diff --git a/src/test/auxiliary/issue-29181.rs b/src/test/compile-fail/auxiliary/issue-29181.rs similarity index 100% rename from src/test/auxiliary/issue-29181.rs rename to src/test/compile-fail/auxiliary/issue-29181.rs diff --git a/src/test/auxiliary/issue-30535.rs b/src/test/compile-fail/auxiliary/issue-30535.rs similarity index 100% rename from src/test/auxiliary/issue-30535.rs rename to src/test/compile-fail/auxiliary/issue-30535.rs diff --git a/src/test/auxiliary/issue_11680.rs b/src/test/compile-fail/auxiliary/issue_11680.rs similarity index 100% rename from src/test/auxiliary/issue_11680.rs rename to src/test/compile-fail/auxiliary/issue_11680.rs diff --git a/src/test/auxiliary/issue_12612_1.rs b/src/test/compile-fail/auxiliary/issue_12612_1.rs similarity index 100% rename from src/test/auxiliary/issue_12612_1.rs rename to src/test/compile-fail/auxiliary/issue_12612_1.rs diff --git a/src/test/auxiliary/issue_16725.rs b/src/test/compile-fail/auxiliary/issue_16725.rs similarity index 100% rename from src/test/auxiliary/issue_16725.rs rename to src/test/compile-fail/auxiliary/issue_16725.rs diff --git a/src/test/auxiliary/issue_17718_const_privacy.rs b/src/test/compile-fail/auxiliary/issue_17718_const_privacy.rs similarity index 100% rename from src/test/auxiliary/issue_17718_const_privacy.rs rename to src/test/compile-fail/auxiliary/issue_17718_const_privacy.rs diff --git a/src/test/auxiliary/issue_21202.rs b/src/test/compile-fail/auxiliary/issue_21202.rs similarity index 100% rename from src/test/auxiliary/issue_21202.rs rename to src/test/compile-fail/auxiliary/issue_21202.rs diff --git a/src/test/auxiliary/issue_30123_aux.rs b/src/test/compile-fail/auxiliary/issue_30123_aux.rs similarity index 100% rename from src/test/auxiliary/issue_30123_aux.rs rename to src/test/compile-fail/auxiliary/issue_30123_aux.rs diff --git a/src/test/auxiliary/issue_3907.rs b/src/test/compile-fail/auxiliary/issue_3907.rs similarity index 100% rename from src/test/auxiliary/issue_3907.rs rename to src/test/compile-fail/auxiliary/issue_3907.rs diff --git a/src/test/auxiliary/issue_5844_aux.rs b/src/test/compile-fail/auxiliary/issue_5844_aux.rs similarity index 100% rename from src/test/auxiliary/issue_5844_aux.rs rename to src/test/compile-fail/auxiliary/issue_5844_aux.rs diff --git a/src/test/auxiliary/lifetime_bound_will_change_warning_lib.rs b/src/test/compile-fail/auxiliary/lifetime_bound_will_change_warning_lib.rs similarity index 100% rename from src/test/auxiliary/lifetime_bound_will_change_warning_lib.rs rename to src/test/compile-fail/auxiliary/lifetime_bound_will_change_warning_lib.rs diff --git a/src/test/auxiliary/lint_output_format.rs b/src/test/compile-fail/auxiliary/lint_output_format.rs similarity index 100% rename from src/test/auxiliary/lint_output_format.rs rename to src/test/compile-fail/auxiliary/lint_output_format.rs diff --git a/src/test/auxiliary/lint_stability.rs b/src/test/compile-fail/auxiliary/lint_stability.rs similarity index 100% rename from src/test/auxiliary/lint_stability.rs rename to src/test/compile-fail/auxiliary/lint_stability.rs diff --git a/src/test/auxiliary/lint_stability_fields.rs b/src/test/compile-fail/auxiliary/lint_stability_fields.rs similarity index 100% rename from src/test/auxiliary/lint_stability_fields.rs rename to src/test/compile-fail/auxiliary/lint_stability_fields.rs diff --git a/src/test/auxiliary/lint_unused_extern_crate.rs b/src/test/compile-fail/auxiliary/lint_unused_extern_crate.rs similarity index 100% rename from src/test/auxiliary/lint_unused_extern_crate.rs rename to src/test/compile-fail/auxiliary/lint_unused_extern_crate.rs diff --git a/src/test/auxiliary/macro_crate_nonterminal.rs b/src/test/compile-fail/auxiliary/macro_crate_nonterminal.rs similarity index 100% rename from src/test/auxiliary/macro_crate_nonterminal.rs rename to src/test/compile-fail/auxiliary/macro_crate_nonterminal.rs diff --git a/src/test/auxiliary/macro_non_reexport_2.rs b/src/test/compile-fail/auxiliary/macro_non_reexport_2.rs similarity index 100% rename from src/test/auxiliary/macro_non_reexport_2.rs rename to src/test/compile-fail/auxiliary/macro_non_reexport_2.rs diff --git a/src/test/compile-fail/auxiliary/macro_reexport_1.rs b/src/test/compile-fail/auxiliary/macro_reexport_1.rs new file mode 100644 index 0000000000..aaeccc6e89 --- /dev/null +++ b/src/test/compile-fail/auxiliary/macro_reexport_1.rs @@ -0,0 +1,15 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "dylib"] +#[macro_export] +macro_rules! reexported { + () => ( 3 ) +} diff --git a/src/test/auxiliary/namespaced_enums.rs b/src/test/compile-fail/auxiliary/namespaced_enums.rs similarity index 100% rename from src/test/auxiliary/namespaced_enums.rs rename to src/test/compile-fail/auxiliary/namespaced_enums.rs diff --git a/src/test/auxiliary/needs_allocator.rs b/src/test/compile-fail/auxiliary/needs_allocator.rs similarity index 100% rename from src/test/auxiliary/needs_allocator.rs rename to src/test/compile-fail/auxiliary/needs_allocator.rs diff --git a/src/test/auxiliary/no_method_suggested_traits.rs b/src/test/compile-fail/auxiliary/no_method_suggested_traits.rs similarity index 100% rename from src/test/auxiliary/no_method_suggested_traits.rs rename to src/test/compile-fail/auxiliary/no_method_suggested_traits.rs diff --git a/src/test/auxiliary/noexporttypelib.rs b/src/test/compile-fail/auxiliary/noexporttypelib.rs similarity index 100% rename from src/test/auxiliary/noexporttypelib.rs rename to src/test/compile-fail/auxiliary/noexporttypelib.rs diff --git a/src/test/auxiliary/orphan_check_diagnostics.rs b/src/test/compile-fail/auxiliary/orphan_check_diagnostics.rs similarity index 100% rename from src/test/auxiliary/orphan_check_diagnostics.rs rename to src/test/compile-fail/auxiliary/orphan_check_diagnostics.rs diff --git a/src/test/auxiliary/privacy_tuple_struct.rs b/src/test/compile-fail/auxiliary/privacy_tuple_struct.rs similarity index 100% rename from src/test/auxiliary/privacy_tuple_struct.rs rename to src/test/compile-fail/auxiliary/privacy_tuple_struct.rs diff --git a/src/test/auxiliary/private_trait_xc.rs b/src/test/compile-fail/auxiliary/private_trait_xc.rs similarity index 100% rename from src/test/auxiliary/private_trait_xc.rs rename to src/test/compile-fail/auxiliary/private_trait_xc.rs diff --git a/src/test/auxiliary/pub_static_array.rs b/src/test/compile-fail/auxiliary/pub_static_array.rs similarity index 92% rename from src/test/auxiliary/pub_static_array.rs rename to src/test/compile-fail/auxiliary/pub_static_array.rs index 4419a5ae83..7248d0e543 100644 --- a/src/test/auxiliary/pub_static_array.rs +++ b/src/test/compile-fail/auxiliary/pub_static_array.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub static ARRAY: &'static [u8] = &[1]; +pub static ARRAY: [u8; 1] = [1]; diff --git a/src/test/auxiliary/regions_bounded_method_type_parameters_cross_crate_lib.rs b/src/test/compile-fail/auxiliary/rbmtp_cross_crate_lib.rs similarity index 100% rename from src/test/auxiliary/regions_bounded_method_type_parameters_cross_crate_lib.rs rename to src/test/compile-fail/auxiliary/rbmtp_cross_crate_lib.rs diff --git a/src/test/auxiliary/stability_attribute_issue.rs b/src/test/compile-fail/auxiliary/stability_attribute_issue.rs similarity index 100% rename from src/test/auxiliary/stability_attribute_issue.rs rename to src/test/compile-fail/auxiliary/stability_attribute_issue.rs diff --git a/src/test/auxiliary/stability_cfg1.rs b/src/test/compile-fail/auxiliary/stability_cfg1.rs similarity index 100% rename from src/test/auxiliary/stability_cfg1.rs rename to src/test/compile-fail/auxiliary/stability_cfg1.rs diff --git a/src/test/auxiliary/stability_cfg2.rs b/src/test/compile-fail/auxiliary/stability_cfg2.rs similarity index 100% rename from src/test/auxiliary/stability_cfg2.rs rename to src/test/compile-fail/auxiliary/stability_cfg2.rs diff --git a/src/test/auxiliary/static_priv_by_default.rs b/src/test/compile-fail/auxiliary/static_priv_by_default.rs similarity index 100% rename from src/test/auxiliary/static_priv_by_default.rs rename to src/test/compile-fail/auxiliary/static_priv_by_default.rs diff --git a/src/test/auxiliary/struct_field_privacy.rs b/src/test/compile-fail/auxiliary/struct_field_privacy.rs similarity index 100% rename from src/test/auxiliary/struct_field_privacy.rs rename to src/test/compile-fail/auxiliary/struct_field_privacy.rs diff --git a/src/test/auxiliary/struct_variant_privacy.rs b/src/test/compile-fail/auxiliary/struct_variant_privacy.rs similarity index 100% rename from src/test/auxiliary/struct_variant_privacy.rs rename to src/test/compile-fail/auxiliary/struct_variant_privacy.rs diff --git a/src/test/auxiliary/svh-a-base.rs b/src/test/compile-fail/auxiliary/svh-a-base.rs similarity index 100% rename from src/test/auxiliary/svh-a-base.rs rename to src/test/compile-fail/auxiliary/svh-a-base.rs diff --git a/src/test/auxiliary/svh-a-change-lit.rs b/src/test/compile-fail/auxiliary/svh-a-change-lit.rs similarity index 100% rename from src/test/auxiliary/svh-a-change-lit.rs rename to src/test/compile-fail/auxiliary/svh-a-change-lit.rs diff --git a/src/test/auxiliary/svh-a-change-significant-cfg.rs b/src/test/compile-fail/auxiliary/svh-a-change-significant-cfg.rs similarity index 100% rename from src/test/auxiliary/svh-a-change-significant-cfg.rs rename to src/test/compile-fail/auxiliary/svh-a-change-significant-cfg.rs diff --git a/src/test/auxiliary/svh-a-change-trait-bound.rs b/src/test/compile-fail/auxiliary/svh-a-change-trait-bound.rs similarity index 100% rename from src/test/auxiliary/svh-a-change-trait-bound.rs rename to src/test/compile-fail/auxiliary/svh-a-change-trait-bound.rs diff --git a/src/test/auxiliary/svh-a-change-type-arg.rs b/src/test/compile-fail/auxiliary/svh-a-change-type-arg.rs similarity index 100% rename from src/test/auxiliary/svh-a-change-type-arg.rs rename to src/test/compile-fail/auxiliary/svh-a-change-type-arg.rs diff --git a/src/test/auxiliary/svh-a-change-type-ret.rs b/src/test/compile-fail/auxiliary/svh-a-change-type-ret.rs similarity index 100% rename from src/test/auxiliary/svh-a-change-type-ret.rs rename to src/test/compile-fail/auxiliary/svh-a-change-type-ret.rs diff --git a/src/test/auxiliary/svh-a-change-type-static.rs b/src/test/compile-fail/auxiliary/svh-a-change-type-static.rs similarity index 100% rename from src/test/auxiliary/svh-a-change-type-static.rs rename to src/test/compile-fail/auxiliary/svh-a-change-type-static.rs diff --git a/src/test/auxiliary/svh-b.rs b/src/test/compile-fail/auxiliary/svh-b.rs similarity index 100% rename from src/test/auxiliary/svh-b.rs rename to src/test/compile-fail/auxiliary/svh-b.rs diff --git a/src/test/auxiliary/svh-uta-base.rs b/src/test/compile-fail/auxiliary/svh-uta-base.rs similarity index 100% rename from src/test/auxiliary/svh-uta-base.rs rename to src/test/compile-fail/auxiliary/svh-uta-base.rs diff --git a/src/test/auxiliary/svh-uta-change-use-trait.rs b/src/test/compile-fail/auxiliary/svh-uta-change-use-trait.rs similarity index 100% rename from src/test/auxiliary/svh-uta-change-use-trait.rs rename to src/test/compile-fail/auxiliary/svh-uta-change-use-trait.rs diff --git a/src/test/auxiliary/svh-utb.rs b/src/test/compile-fail/auxiliary/svh-utb.rs similarity index 100% rename from src/test/auxiliary/svh-utb.rs rename to src/test/compile-fail/auxiliary/svh-utb.rs diff --git a/src/test/auxiliary/typeck_default_trait_impl_cross_crate_coherence_lib.rs b/src/test/compile-fail/auxiliary/tdticc_coherence_lib.rs similarity index 100% rename from src/test/auxiliary/typeck_default_trait_impl_cross_crate_coherence_lib.rs rename to src/test/compile-fail/auxiliary/tdticc_coherence_lib.rs diff --git a/src/test/auxiliary/trait_bounds_on_structs_and_enums_xc.rs b/src/test/compile-fail/auxiliary/trait_bounds_on_structs_and_enums_xc.rs similarity index 100% rename from src/test/auxiliary/trait_bounds_on_structs_and_enums_xc.rs rename to src/test/compile-fail/auxiliary/trait_bounds_on_structs_and_enums_xc.rs diff --git a/src/test/auxiliary/trait_impl_conflict.rs b/src/test/compile-fail/auxiliary/trait_impl_conflict.rs similarity index 100% rename from src/test/auxiliary/trait_impl_conflict.rs rename to src/test/compile-fail/auxiliary/trait_impl_conflict.rs diff --git a/src/test/auxiliary/trait_safety_lib.rs b/src/test/compile-fail/auxiliary/trait_safety_lib.rs similarity index 100% rename from src/test/auxiliary/trait_safety_lib.rs rename to src/test/compile-fail/auxiliary/trait_safety_lib.rs diff --git a/src/test/auxiliary/trait_superkinds_in_metadata.rs b/src/test/compile-fail/auxiliary/trait_superkinds_in_metadata.rs similarity index 100% rename from src/test/auxiliary/trait_superkinds_in_metadata.rs rename to src/test/compile-fail/auxiliary/trait_superkinds_in_metadata.rs diff --git a/src/test/auxiliary/two_macros.rs b/src/test/compile-fail/auxiliary/two_macros.rs similarity index 100% rename from src/test/auxiliary/two_macros.rs rename to src/test/compile-fail/auxiliary/two_macros.rs diff --git a/src/test/auxiliary/unreachable_variant.rs b/src/test/compile-fail/auxiliary/unreachable_variant.rs similarity index 100% rename from src/test/auxiliary/unreachable_variant.rs rename to src/test/compile-fail/auxiliary/unreachable_variant.rs diff --git a/src/test/compile-fail/auxiliary/use_from_trait_xc.rs b/src/test/compile-fail/auxiliary/use_from_trait_xc.rs new file mode 100644 index 0000000000..7024c9dad7 --- /dev/null +++ b/src/test/compile-fail/auxiliary/use_from_trait_xc.rs @@ -0,0 +1,41 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(associated_consts)] + +pub use self::sub::{Bar, Baz}; + +pub trait Trait { + fn foo(&self); + type Assoc; + const CONST: u32; +} + +struct Foo; + +impl Foo { + pub fn new() {} + + pub const C: u32 = 0; +} + +mod sub { + pub struct Bar; + + impl Bar { + pub fn new() {} + } + + pub enum Baz {} + + impl Baz { + pub fn new() {} + } +} diff --git a/src/test/auxiliary/variant-namespacing.rs b/src/test/compile-fail/auxiliary/variant-namespacing.rs similarity index 100% rename from src/test/auxiliary/variant-namespacing.rs rename to src/test/compile-fail/auxiliary/variant-namespacing.rs diff --git a/src/test/auxiliary/weak-lang-items.rs b/src/test/compile-fail/auxiliary/weak-lang-items.rs similarity index 100% rename from src/test/auxiliary/weak-lang-items.rs rename to src/test/compile-fail/auxiliary/weak-lang-items.rs diff --git a/src/test/auxiliary/xc_private_method_lib.rs b/src/test/compile-fail/auxiliary/xc_private_method_lib.rs similarity index 100% rename from src/test/auxiliary/xc_private_method_lib.rs rename to src/test/compile-fail/auxiliary/xc_private_method_lib.rs diff --git a/src/test/auxiliary/xcrate_unit_struct.rs b/src/test/compile-fail/auxiliary/xcrate_unit_struct.rs similarity index 100% rename from src/test/auxiliary/xcrate_unit_struct.rs rename to src/test/compile-fail/auxiliary/xcrate_unit_struct.rs diff --git a/src/test/compile-fail/bad-const-type.rs b/src/test/compile-fail/bad-const-type.rs index f05c8c31f1..ee6ac33072 100644 --- a/src/test/compile-fail/bad-const-type.rs +++ b/src/test/compile-fail/bad-const-type.rs @@ -10,8 +10,7 @@ static i: String = 10; //~^ ERROR mismatched types -//~| expected `std::string::String` -//~| found `_` -//~| expected struct `std::string::String` -//~| found integral variable +//~| expected type `std::string::String` +//~| found type `_` +//~| expected struct `std::string::String`, found integral variable fn main() { println!("{}", i); } diff --git a/src/test/compile-fail/bad-intrinsic-monomorphization.rs b/src/test/compile-fail/bad-intrinsic-monomorphization.rs index 049552aa2d..cfb64f8076 100644 --- a/src/test/compile-fail/bad-intrinsic-monomorphization.rs +++ b/src/test/compile-fail/bad-intrinsic-monomorphization.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(repr_simd, platform_intrinsics, rustc_attrs, core_intrinsics)] +#![feature(repr_simd, platform_intrinsics, core_intrinsics)] #![allow(warnings)] // Bad monomorphizations could previously cause LLVM asserts even though the @@ -23,19 +23,16 @@ use std::intrinsics; #[derive(Copy, Clone)] struct Foo(i64); -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_cttz(v: Foo) -> Foo { intrinsics::cttz(v) //~^ ERROR `cttz` intrinsic: expected basic integer type, found `Foo` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_fadd_fast(a: Foo, b: Foo) -> Foo { intrinsics::fadd_fast(a, b) //~^ ERROR `fadd_fast` intrinsic: expected basic float type, found `Foo` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_simd_add(a: Foo, b: Foo) -> Foo { simd_add(a, b) //~^ ERROR `simd_add` intrinsic: expected SIMD input type, found non-SIMD `Foo` diff --git a/src/test/compile-fail/bad-main.rs b/src/test/compile-fail/bad-main.rs index 321dca8989..1253f7569e 100644 --- a/src/test/compile-fail/bad-main.rs +++ b/src/test/compile-fail/bad-main.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn main(x: isize) { } //~ ERROR: main function expects type +fn main(x: isize) { } //~ ERROR: main function has wrong type diff --git a/src/test/compile-fail/bad-sized.rs b/src/test/compile-fail/bad-sized.rs index f62404e60e..8aaf752125 100644 --- a/src/test/compile-fail/bad-sized.rs +++ b/src/test/compile-fail/bad-sized.rs @@ -14,5 +14,4 @@ pub fn main() { let x: Vec = Vec::new(); //~^ ERROR `Trait + Sized: std::marker::Sized` is not satisfied //~| ERROR `Trait + Sized: std::marker::Sized` is not satisfied - //~| ERROR `Trait + Sized: std::marker::Sized` is not satisfied } diff --git a/src/test/compile-fail/binop-move-semantics.rs b/src/test/compile-fail/binop-move-semantics.rs index cff0064497..0cc6ea3e98 100644 --- a/src/test/compile-fail/binop-move-semantics.rs +++ b/src/test/compile-fail/binop-move-semantics.rs @@ -62,6 +62,7 @@ fn mut_plus_immut() { &mut f + &f; //~ ERROR: cannot borrow `f` as immutable because it is also borrowed as mutable + //~^ cannot borrow `f` as immutable because it is also borrowed as mutable } fn immut_plus_mut() { @@ -70,6 +71,7 @@ fn immut_plus_mut() { &f + &mut f; //~ ERROR: cannot borrow `f` as mutable because it is also borrowed as immutable + //~^ cannot borrow `f` as mutable because it is also borrowed as immutable } fn main() {} diff --git a/src/test/compile-fail/blind-item-block-middle.rs b/src/test/compile-fail/blind-item-block-middle.rs index 930f769771..287eab7a56 100644 --- a/src/test/compile-fail/blind-item-block-middle.rs +++ b/src/test/compile-fail/blind-item-block-middle.rs @@ -12,6 +12,6 @@ mod foo { pub struct bar; } fn main() { let bar = 5; - //~^ ERROR declaration of `bar` shadows an enum variant or unit-like struct in scope + //~^ ERROR cannot be named the same use foo::bar; } diff --git a/src/test/compile-fail/blind-item-item-shadow.rs b/src/test/compile-fail/blind-item-item-shadow.rs index b08c78e906..853282ff01 100644 --- a/src/test/compile-fail/blind-item-item-shadow.rs +++ b/src/test/compile-fail/blind-item-item-shadow.rs @@ -10,6 +10,8 @@ mod foo { pub mod foo { } } //~ NOTE previous definition of `foo` here -use foo::foo; //~ ERROR a module named `foo` has already been defined in this module +use foo::foo; +//~^ ERROR a module named `foo` has already been defined in this module +//~| was already imported fn main() {} diff --git a/src/test/compile-fail/block-must-not-have-result-do.rs b/src/test/compile-fail/block-must-not-have-result-do.rs index 30039a1c54..2a6c71dbe3 100644 --- a/src/test/compile-fail/block-must-not-have-result-do.rs +++ b/src/test/compile-fail/block-must-not-have-result-do.rs @@ -11,9 +11,5 @@ fn main() { loop { true //~ ERROR mismatched types - //~| expected () - //~| found bool - //~| expected () - //~| found bool } } diff --git a/src/test/compile-fail/block-must-not-have-result-res.rs b/src/test/compile-fail/block-must-not-have-result-res.rs index 6161660ddf..8728685fc8 100644 --- a/src/test/compile-fail/block-must-not-have-result-res.rs +++ b/src/test/compile-fail/block-must-not-have-result-res.rs @@ -13,10 +13,6 @@ struct r; impl Drop for r { fn drop(&mut self) { true //~ ERROR mismatched types - //~| expected () - //~| found bool - //~| expected () - //~| found bool } } diff --git a/src/test/compile-fail/block-must-not-have-result-while.rs b/src/test/compile-fail/block-must-not-have-result-while.rs index ba6340ed39..a0fb470e1e 100644 --- a/src/test/compile-fail/block-must-not-have-result-while.rs +++ b/src/test/compile-fail/block-must-not-have-result-while.rs @@ -11,9 +11,8 @@ fn main() { while true { true //~ ERROR mismatched types - //~| expected `()` - //~| found `bool` - //~| expected () - //~| found bool + //~| expected type `()` + //~| found type `bool` + //~| expected (), found bool } } diff --git a/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs b/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs index 309e286f48..7b811f581c 100644 --- a/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs +++ b/src/test/compile-fail/borrowck/borrowck-borrowed-uniq-rvalue-2.rs @@ -8,6 +8,8 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// error-pattern: borrowed value does not live long enough + struct defer<'a> { x: &'a [&'a str], } @@ -28,6 +30,5 @@ fn defer<'r>(x: &'r [&'r str]) -> defer<'r> { fn main() { let x = defer(&vec!("Goodbye", "world!")); - //~^ ERROR borrowed value does not live long enough x.x[0]; } diff --git a/src/test/compile-fail/borrowck/borrowck-box-insensitivity.rs b/src/test/compile-fail/borrowck/borrowck-box-insensitivity.rs index 7c3d632078..3fd71f7156 100644 --- a/src/test/compile-fail/borrowck/borrowck-box-insensitivity.rs +++ b/src/test/compile-fail/borrowck/borrowck-box-insensitivity.rs @@ -33,29 +33,37 @@ struct D { fn copy_after_move() { let a: Box<_> = box A { x: box 0, y: 1 }; let _x = a.x; + //~^ value moved here let _y = a.y; //~ ERROR use of moved - //~^^ NOTE `a` moved here (through moving `a.x`) + //~^ move occurs because `a.x` has type `Box` + //~| value used here after move } fn move_after_move() { let a: Box<_> = box B { x: box 0, y: box 1 }; let _x = a.x; + //~^ value moved here let _y = a.y; //~ ERROR use of moved - //~^^ NOTE `a` moved here (through moving `a.x`) + //~^ move occurs because `a.x` has type `Box` + //~| value used here after move } fn borrow_after_move() { let a: Box<_> = box A { x: box 0, y: 1 }; let _x = a.x; + //~^ value moved here let _y = &a.y; //~ ERROR use of moved - //~^^ NOTE `a` moved here (through moving `a.x`) + //~^ move occurs because `a.x` has type `Box` + //~| value used here after move } fn move_after_borrow() { let a: Box<_> = box B { x: box 0, y: box 1 }; let _x = &a.x; //~^ NOTE borrow of `a.x` occurs here - let _y = a.y; //~ ERROR cannot move + let _y = a.y; + //~^ ERROR cannot move + //~| move out of } fn copy_after_mut_borrow() { @@ -69,51 +77,63 @@ fn move_after_mut_borrow() { let mut a: Box<_> = box B { x: box 0, y: box 1 }; let _x = &mut a.x; //~^ NOTE borrow of `a.x` occurs here - let _y = a.y; //~ ERROR cannot move + let _y = a.y; + //~^ ERROR cannot move + //~| move out of } fn borrow_after_mut_borrow() { let mut a: Box<_> = box A { x: box 0, y: 1 }; let _x = &mut a.x; - //~^ NOTE previous borrow of `a` occurs here (through borrowing `a.x`); + //~^ NOTE mutable borrow occurs here (via `a.x`) let _y = &a.y; //~ ERROR cannot borrow + //~^ immutable borrow occurs here (via `a.y`) } -//~^ NOTE previous borrow ends here +//~^ NOTE mutable borrow ends here fn mut_borrow_after_borrow() { let mut a: Box<_> = box A { x: box 0, y: 1 }; let _x = &a.x; - //~^ NOTE previous borrow of `a` occurs here (through borrowing `a.x`) + //~^ NOTE immutable borrow occurs here (via `a.x`) let _y = &mut a.y; //~ ERROR cannot borrow + //~^ mutable borrow occurs here (via `a.y`) } -//~^ NOTE previous borrow ends here +//~^ NOTE immutable borrow ends here fn copy_after_move_nested() { let a: Box<_> = box C { x: box A { x: box 0, y: 1 }, y: 2 }; let _x = a.x.x; - //~^ NOTE `a.x.x` moved here because it has type `Box`, which is moved by default + //~^ value moved here let _y = a.y; //~ ERROR use of collaterally moved + //~^ NOTE move occurs because `a.x.x` has type `Box` + //~| value used here after move } fn move_after_move_nested() { let a: Box<_> = box D { x: box A { x: box 0, y: 1 }, y: box 2 }; let _x = a.x.x; - //~^ NOTE `a.x.x` moved here because it has type `Box`, which is moved by default + //~^ value moved here let _y = a.y; //~ ERROR use of collaterally moved + //~^ NOTE move occurs because `a.x.x` has type `Box` + //~| value used here after move } fn borrow_after_move_nested() { let a: Box<_> = box C { x: box A { x: box 0, y: 1 }, y: 2 }; let _x = a.x.x; - //~^ NOTE `a.x.x` moved here because it has type `Box`, which is moved by default + //~^ value moved here let _y = &a.y; //~ ERROR use of collaterally moved + //~^ NOTE move occurs because `a.x.x` has type `Box` + //~| value used here after move } fn move_after_borrow_nested() { let a: Box<_> = box D { x: box A { x: box 0, y: 1 }, y: box 2 }; let _x = &a.x.x; - //~^ NOTE borrow of `a.x.x` occurs here - let _y = a.y; //~ ERROR cannot move + //~^ borrow of `a.x.x` occurs here + let _y = a.y; + //~^ ERROR cannot move + //~| move out of } fn copy_after_mut_borrow_nested() { @@ -127,24 +147,28 @@ fn move_after_mut_borrow_nested() { let mut a: Box<_> = box D { x: box A { x: box 0, y: 1 }, y: box 2 }; let _x = &mut a.x.x; //~^ NOTE borrow of `a.x.x` occurs here - let _y = a.y; //~ ERROR cannot move + let _y = a.y; + //~^ ERROR cannot move + //~| move out of } fn borrow_after_mut_borrow_nested() { let mut a: Box<_> = box C { x: box A { x: box 0, y: 1 }, y: 2 }; let _x = &mut a.x.x; - //~^ NOTE previous borrow of `a.x.x` occurs here; the mutable borrow prevents + //~^ mutable borrow occurs here let _y = &a.y; //~ ERROR cannot borrow + //~^ immutable borrow occurs here } -//~^ NOTE previous borrow ends here +//~^ NOTE mutable borrow ends here fn mut_borrow_after_borrow_nested() { let mut a: Box<_> = box C { x: box A { x: box 0, y: 1 }, y: 2 }; let _x = &a.x.x; - //~^ NOTE previous borrow of `a.x.x` occurs here; the immutable borrow prevents + //~^ immutable borrow occurs here let _y = &mut a.y; //~ ERROR cannot borrow + //~^ mutable borrow occurs here } -//~^ NOTE previous borrow ends here +//~^ NOTE immutable borrow ends here fn main() { copy_after_move(); diff --git a/src/test/compile-fail/borrowck/borrowck-closures-mut-of-imm.rs b/src/test/compile-fail/borrowck/borrowck-closures-mut-of-imm.rs index 40f9be2dd8..dc2f0e8395 100644 --- a/src/test/compile-fail/borrowck/borrowck-closures-mut-of-imm.rs +++ b/src/test/compile-fail/borrowck/borrowck-closures-mut-of-imm.rs @@ -24,7 +24,7 @@ fn a(x: &isize) { //~^ ERROR cannot borrow let c2 = || set(&mut *x); //~^ ERROR cannot borrow - //~| ERROR closure requires unique access + //~| ERROR two closures require unique access to `x` at the same time } fn main() { diff --git a/src/test/compile-fail/borrowck/borrowck-closures-unique.rs b/src/test/compile-fail/borrowck/borrowck-closures-unique.rs index 3646a68f06..1b22dc4d2c 100644 --- a/src/test/compile-fail/borrowck/borrowck-closures-unique.rs +++ b/src/test/compile-fail/borrowck/borrowck-closures-unique.rs @@ -39,7 +39,7 @@ fn c(x: &mut isize) { fn d(x: &mut isize) { let c1 = || set(x); - let c2 = || set(x); //~ ERROR closure requires unique access to `x` + let c2 = || set(x); //~ ERROR two closures require unique access to `x` at the same time } fn e(x: &mut isize) { diff --git a/src/test/compile-fail/borrowck/borrowck-lend-flow-loop.rs b/src/test/compile-fail/borrowck/borrowck-lend-flow-loop.rs index f09e7ffd7e..56cbe0b187 100644 --- a/src/test/compile-fail/borrowck/borrowck-lend-flow-loop.rs +++ b/src/test/compile-fail/borrowck/borrowck-lend-flow-loop.rs @@ -109,6 +109,7 @@ fn while_aliased_mut_cond(cond: bool, cond2: bool) { borrow(&*v); //~ ERROR cannot borrow if cond2 { x = &mut v; //~ ERROR cannot borrow + //~^ ERROR cannot borrow } } } diff --git a/src/test/compile-fail/borrowck/borrowck-let-suggestion-suffixes.rs b/src/test/compile-fail/borrowck/borrowck-let-suggestion-suffixes.rs index f551a2aa81..02aa771c78 100644 --- a/src/test/compile-fail/borrowck/borrowck-let-suggestion-suffixes.rs +++ b/src/test/compile-fail/borrowck/borrowck-let-suggestion-suffixes.rs @@ -26,6 +26,7 @@ fn f() { v3.push(&'x'); // statement 6 //~^ ERROR borrowed value does not live long enough + //~| does not live long enough //~| NOTE ...but borrowed value is only valid for the statement //~| HELP consider using a `let` binding to increase its lifetime @@ -36,6 +37,7 @@ fn f() { v4.push(&'y'); //~^ ERROR borrowed value does not live long enough + //~| does not live long enough //~| NOTE ...but borrowed value is only valid for the statement //~| HELP consider using a `let` binding to increase its lifetime @@ -46,6 +48,7 @@ fn f() { v5.push(&'z'); //~^ ERROR borrowed value does not live long enough + //~| does not live long enough //~| NOTE ...but borrowed value is only valid for the statement //~| HELP consider using a `let` binding to increase its lifetime diff --git a/src/test/compile-fail/borrowck/borrowck-let-suggestion.rs b/src/test/compile-fail/borrowck/borrowck-let-suggestion.rs index 7e9d448275..866e72f1a5 100644 --- a/src/test/compile-fail/borrowck/borrowck-let-suggestion.rs +++ b/src/test/compile-fail/borrowck/borrowck-let-suggestion.rs @@ -9,10 +9,12 @@ // except according to those terms. fn f() { - let x = [1].iter(); //~ ERROR borrowed value does not live long enough - //~^ NOTE reference must be valid for the block suffix following statement - //~^^ HELP consider using a `let` binding to increase its lifetime - //~^^^ NOTE ...but borrowed value is only valid for the statement at 12:4 + let x = [1].iter(); + //~^ ERROR borrowed value does not live long enough + //~|does not live long enough + //~| NOTE reference must be valid for the block suffix following statement + //~| HELP consider using a `let` binding to increase its lifetime + //~| NOTE ...but borrowed value is only valid for the statement at 12:4 } fn main() { diff --git a/src/test/compile-fail/borrowck/borrowck-move-error-with-note.rs b/src/test/compile-fail/borrowck/borrowck-move-error-with-note.rs index e4b9fb2671..5d9c9d0bd4 100644 --- a/src/test/compile-fail/borrowck/borrowck-move-error-with-note.rs +++ b/src/test/compile-fail/borrowck/borrowck-move-error-with-note.rs @@ -19,7 +19,8 @@ enum Foo { fn blah() { let f = &Foo::Foo1(box 1, box 2); match *f { //~ ERROR cannot move out of - Foo::Foo1(num1, //~ NOTE attempting to move value to here + //~| cannot move out + Foo::Foo1(num1, //~ NOTE to prevent move num2) => (), //~ NOTE and here Foo::Foo2(num) => (), //~ NOTE and here Foo::Foo3 => () @@ -36,8 +37,9 @@ impl Drop for S { fn move_in_match() { match (S {f: "foo".to_string(), g: "bar".to_string()}) { - S { //~ ERROR cannot move out of type `S`, which defines the `Drop` trait - f: _s, //~ NOTE attempting to move value to here + S { //~ ERROR cannot move out of type `S`, which implements the `Drop` trait + //~| cannot move out of here + f: _s, //~ NOTE to prevent move g: _t //~ NOTE and here } => {} } @@ -53,7 +55,8 @@ fn free(_: T) {} fn blah2() { let a = &A { a: box 1 }; match a.a { //~ ERROR cannot move out of - n => { //~ NOTE attempting to move value to here + //~| cannot move out + n => { //~ NOTE to prevent move free(n) } } diff --git a/src/test/compile-fail/borrowck/borrowck-move-out-of-struct-with-dtor.rs b/src/test/compile-fail/borrowck/borrowck-move-out-of-struct-with-dtor.rs index 3d13cbe30c..16302d276c 100644 --- a/src/test/compile-fail/borrowck/borrowck-move-out-of-struct-with-dtor.rs +++ b/src/test/compile-fail/borrowck/borrowck-move-out-of-struct-with-dtor.rs @@ -16,17 +16,17 @@ impl Drop for S { fn move_in_match() { match (S {f:"foo".to_string()}) { S {f:_s} => {} - //~^ ERROR cannot move out of type `S`, which defines the `Drop` trait + //~^ ERROR cannot move out of type `S`, which implements the `Drop` trait } } fn move_in_let() { let S {f:_s} = S {f:"foo".to_string()}; - //~^ ERROR cannot move out of type `S`, which defines the `Drop` trait + //~^ ERROR cannot move out of type `S`, which implements the `Drop` trait } fn move_in_fn_arg(S {f:_s}: S) { - //~^ ERROR cannot move out of type `S`, which defines the `Drop` trait + //~^ ERROR cannot move out of type `S`, which implements the `Drop` trait } fn main() {} diff --git a/src/test/compile-fail/borrowck/borrowck-move-out-of-tuple-struct-with-dtor.rs b/src/test/compile-fail/borrowck/borrowck-move-out-of-tuple-struct-with-dtor.rs index 625f718490..f5fedb8d48 100644 --- a/src/test/compile-fail/borrowck/borrowck-move-out-of-tuple-struct-with-dtor.rs +++ b/src/test/compile-fail/borrowck/borrowck-move-out-of-tuple-struct-with-dtor.rs @@ -16,17 +16,17 @@ impl Drop for S { fn move_in_match() { match S("foo".to_string()) { S(_s) => {} - //~^ ERROR cannot move out of type `S`, which defines the `Drop` trait + //~^ ERROR cannot move out of type `S`, which implements the `Drop` trait } } fn move_in_let() { let S(_s) = S("foo".to_string()); - //~^ ERROR cannot move out of type `S`, which defines the `Drop` trait + //~^ ERROR cannot move out of type `S`, which implements the `Drop` trait } fn move_in_fn_arg(S(_s): S) { - //~^ ERROR cannot move out of type `S`, which defines the `Drop` trait + //~^ ERROR cannot move out of type `S`, which implements the `Drop` trait } fn main() {} diff --git a/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs b/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs index d9a2f89a9e..1577129574 100644 --- a/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs +++ b/src/test/compile-fail/borrowck/borrowck-move-out-of-vec-tail.rs @@ -27,10 +27,12 @@ pub fn main() { match x { [_, tail..] => { match tail { - [Foo { string: a }, //~ ERROR cannot move out of borrowed content + [Foo { string: a }, + //~^ ERROR cannot move out of borrowed content + //~| cannot move out + //~| to prevent move Foo { string: b }] => { - //~^^ NOTE attempting to move value to here - //~^^ NOTE and here + //~^ NOTE and here } _ => { unreachable!(); diff --git a/src/test/compile-fail/borrowck/borrowck-mut-borrow-linear-errors.rs b/src/test/compile-fail/borrowck/borrowck-mut-borrow-linear-errors.rs index 38e0e27a7b..f789d44016 100644 --- a/src/test/compile-fail/borrowck/borrowck-mut-borrow-linear-errors.rs +++ b/src/test/compile-fail/borrowck/borrowck-mut-borrow-linear-errors.rs @@ -19,6 +19,7 @@ fn main() { match 1 { 1 => { addr = &mut x; } //~^ ERROR cannot borrow `x` as mutable more than once at a time + //~| ERROR cannot borrow `x` as mutable more than once at a time 2 => { addr = &mut x; } //~^ ERROR cannot borrow `x` as mutable more than once at a time _ => { addr = &mut x; } diff --git a/src/test/compile-fail/borrowck/borrowck-report-with-custom-diagnostic.rs b/src/test/compile-fail/borrowck/borrowck-report-with-custom-diagnostic.rs index 2b1ff47ee3..3ca8cc431e 100644 --- a/src/test/compile-fail/borrowck/borrowck-report-with-custom-diagnostic.rs +++ b/src/test/compile-fail/borrowck/borrowck-report-with-custom-diagnostic.rs @@ -13,10 +13,11 @@ fn main() { // Original borrow ends at end of function let mut x = 1; let y = &mut x; - //~^ previous borrow of `x` occurs here; the mutable borrow prevents + //~^ mutable borrow occurs here let z = &x; //~ ERROR cannot borrow + //~^ immutable borrow occurs here } -//~^ NOTE previous borrow ends here +//~^ NOTE mutable borrow ends here fn foo() { match true { @@ -24,10 +25,11 @@ fn foo() { // Original borrow ends at end of match arm let mut x = 1; let y = &x; - //~^ previous borrow of `x` occurs here; the immutable borrow prevents + //~^ immutable borrow occurs here let z = &mut x; //~ ERROR cannot borrow + //~^ mutable borrow occurs here } - //~^ NOTE previous borrow ends here + //~^ NOTE immutable borrow ends here false => () } } @@ -37,8 +39,9 @@ fn bar() { || { let mut x = 1; let y = &mut x; - //~^ previous borrow of `x` occurs here; the mutable borrow prevents + //~^ first mutable borrow occurs here let z = &mut x; //~ ERROR cannot borrow + //~^ second mutable borrow occurs here }; - //~^ NOTE previous borrow ends here + //~^ NOTE first borrow ends here } diff --git a/src/test/compile-fail/borrowck/borrowck-struct-update-with-dtor.rs b/src/test/compile-fail/borrowck/borrowck-struct-update-with-dtor.rs index bf1497420e..c364788a9c 100644 --- a/src/test/compile-fail/borrowck/borrowck-struct-update-with-dtor.rs +++ b/src/test/compile-fail/borrowck/borrowck-struct-update-with-dtor.rs @@ -19,11 +19,13 @@ struct T { a: isize, mv: Box } impl Drop for T { fn drop(&mut self) { } } fn f(s0:S) { - let _s2 = S{a: 2, ..s0}; //~error: cannot move out of type `S`, which defines the `Drop` trait + let _s2 = S{a: 2, ..s0}; + //~^ error: cannot move out of type `S`, which implements the `Drop` trait } fn g(s0:T) { - let _s2 = T{a: 2, ..s0}; //~error: cannot move out of type `T`, which defines the `Drop` trait + let _s2 = T{a: 2, ..s0}; + //~^ error: cannot move out of type `T`, which implements the `Drop` trait } fn main() { } diff --git a/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs b/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs index 1a21b03a45..eec6c8473e 100644 --- a/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs +++ b/src/test/compile-fail/borrowck/borrowck-vec-pattern-nesting.rs @@ -19,6 +19,7 @@ fn a() { [box ref _a, _, _] => { //~^ borrow of `vec[..]` occurs here vec[0] = box 4; //~ ERROR cannot assign + //~^ assignment to borrowed `vec[..]` occurs here } } } @@ -30,6 +31,7 @@ fn b() { [_b..] => { //~^ borrow of `vec[..]` occurs here vec[0] = box 4; //~ ERROR cannot assign + //~^ assignment to borrowed `vec[..]` occurs here } } } @@ -39,8 +41,9 @@ fn c() { let vec: &mut [Box] = &mut vec; match vec { [_a, //~ ERROR cannot move out - _b..] => { //~^ NOTE attempting to move value to here - + //~| cannot move out + //~| to prevent move + _b..] => { // Note: `_a` is *moved* here, but `b` is borrowing, // hence illegal. // @@ -50,7 +53,8 @@ fn c() { _ => {} } let a = vec[0]; //~ ERROR cannot move out - //~^ NOTE attempting to move value to here + //~^ NOTE to prevent move + //~| cannot move out of here } fn d() { @@ -58,11 +62,13 @@ fn d() { let vec: &mut [Box] = &mut vec; match vec { [_a.., //~ ERROR cannot move out - _b] => {} //~ NOTE attempting to move value to here + //~^ cannot move out + _b] => {} //~ NOTE to prevent move _ => {} } let a = vec[0]; //~ ERROR cannot move out - //~^ NOTE attempting to move value to here + //~^ NOTE to prevent move + //~| cannot move out of here } fn e() { @@ -70,13 +76,15 @@ fn e() { let vec: &mut [Box] = &mut vec; match vec { [_a, _b, _c] => {} //~ ERROR cannot move out - //~^ NOTE attempting to move value to here - //~^^ NOTE and here - //~^^^ NOTE and here + //~| cannot move out + //~| NOTE to prevent move + //~| NOTE and here + //~| NOTE and here _ => {} } let a = vec[0]; //~ ERROR cannot move out - //~^ NOTE attempting to move value to here + //~^ NOTE to prevent move + //~| cannot move out of here } fn main() {} diff --git a/src/test/compile-fail/cast-as-bool.rs b/src/test/compile-fail/cast-as-bool.rs index 4764ae380f..af42d5c275 100644 --- a/src/test/compile-fail/cast-as-bool.rs +++ b/src/test/compile-fail/cast-as-bool.rs @@ -11,6 +11,5 @@ fn main() { let u = 5 as bool; //~^ ERROR cannot cast as `bool` - //~^^ HELP compare with zero instead - //~^^^ HELP run `rustc --explain E0054` to see a detailed explanation + //~| HELP compare with zero instead } diff --git a/src/test/compile-fail/trace_macros-gate2.rs b/src/test/compile-fail/cast-rfc0401-2.rs similarity index 62% rename from src/test/compile-fail/trace_macros-gate2.rs rename to src/test/compile-fail/cast-rfc0401-2.rs index 71cc45e132..1598a9aa1f 100644 --- a/src/test/compile-fail/trace_macros-gate2.rs +++ b/src/test/compile-fail/cast-rfc0401-2.rs @@ -8,13 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// Test that the trace_macros feature gate is on. +// RFC 401 test extracted into distinct file. This is because some the +// change to suppress "derived" errors wound up suppressing this error +// message, since the fallback for `3` doesn't occur. fn main() { - // (Infrastructure does not attempt to detect uses in macro definitions.) - macro_rules! expando { - ($x: ident) => { trace_macros!($x) } - } - - expando!(true); //~ ERROR `trace_macros` is not stable + let _ = 3 as bool; + //~^ ERROR cannot cast as `bool` + //~| HELP compare with zero } diff --git a/src/test/compile-fail/cast-rfc0401.rs b/src/test/compile-fail/cast-rfc0401.rs index dcd49e34bb..05c531e91f 100644 --- a/src/test/compile-fail/cast-rfc0401.rs +++ b/src/test/compile-fail/cast-rfc0401.rs @@ -58,14 +58,12 @@ fn main() let _ = f as *const u8; //~^ ERROR casting //~^^ HELP through a usize first - let _ = 3 as bool; + let _ = 3_i32 as bool; //~^ ERROR cannot cast as `bool` - //~^^ HELP compare with zero - //~^^^ HELP run `rustc --explain E0054` to see a detailed explanation + //~| HELP compare with zero let _ = E::A as bool; //~^ ERROR cannot cast as `bool` - //~^^ HELP compare with zero - //~^^^ HELP run `rustc --explain E0054` to see a detailed explanation + //~| HELP compare with zero let _ = 0x61u32 as char; //~ ERROR only `u8` can be cast let _ = false as f32; @@ -92,9 +90,8 @@ fn main() let _ = v as *const [u8]; //~ ERROR cannot cast let _ = fat_v as *const Foo; //~^ ERROR the trait bound `[u8]: std::marker::Sized` is not satisfied - //~^^ HELP run `rustc --explain E0277` to see a detailed explanation - //~^^^ NOTE `[u8]` does not have a constant size known at compile-time - //~^^^^ NOTE required for the cast to the object type `Foo` + //~| NOTE `[u8]` does not have a constant size known at compile-time + //~| NOTE required for the cast to the object type `Foo` let _ = foo as *const str; //~ ERROR casting let _ = foo as *mut str; //~ ERROR casting let _ = main as *mut str; //~ ERROR casting @@ -107,9 +104,8 @@ fn main() let a : *const str = "hello"; let _ = a as *const Foo; //~^ ERROR the trait bound `str: std::marker::Sized` is not satisfied - //~^^ HELP run `rustc --explain E0277` to see a detailed explanation - //~^^^ NOTE `str` does not have a constant size known at compile-time - //~^^^^ NOTE required for the cast to the object type `Foo` + //~| NOTE `str` does not have a constant size known at compile-time + //~| NOTE required for the cast to the object type `Foo` // check no error cascade let _ = main.f as *const u32; //~ ERROR attempted access of field diff --git a/src/test/compile-fail/cdylib-deps-must-be-static.rs b/src/test/compile-fail/cdylib-deps-must-be-static.rs new file mode 100644 index 0000000000..4b160f26e9 --- /dev/null +++ b/src/test/compile-fail/cdylib-deps-must-be-static.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern: dependency `cdylib_dep` not found in rlib format +// aux-build:cdylib-dep.rs +// ignore-musl + +#![crate_type = "cdylib"] + +extern crate cdylib_dep; diff --git a/src/test/compile-fail/changing-crates.rs b/src/test/compile-fail/changing-crates.rs index 0b42015848..f74855a084 100644 --- a/src/test/compile-fail/changing-crates.rs +++ b/src/test/compile-fail/changing-crates.rs @@ -17,7 +17,7 @@ extern crate a; extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on -//~| NOTE: perhaps this crate needs to be recompiled +//~| NOTE: perhaps that crate needs to be recompiled //~| NOTE: crate `a` path #1: //~| NOTE: crate `b` path #1: diff --git a/src/test/compile-fail/check-static-immutable-mut-slices.rs b/src/test/compile-fail/check-static-immutable-mut-slices.rs index 1804b9e04c..370cfe9d55 100644 --- a/src/test/compile-fail/check-static-immutable-mut-slices.rs +++ b/src/test/compile-fail/check-static-immutable-mut-slices.rs @@ -12,5 +12,6 @@ static TEST: &'static mut [isize] = &mut []; //~^ ERROR references in statics may only refer to immutable values +//~^^ ERROR references in statics may only refer to immutable values pub fn main() { } diff --git a/src/test/compile-fail/check-static-values-constraints.rs b/src/test/compile-fail/check-static-values-constraints.rs index c3a1de1175..df22e2ea4d 100644 --- a/src/test/compile-fail/check-static-values-constraints.rs +++ b/src/test/compile-fail/check-static-values-constraints.rs @@ -37,7 +37,7 @@ static STATIC2: SafeEnum = SafeEnum::Variant2(0); // This one should fail static STATIC3: SafeEnum = SafeEnum::Variant3(WithDtor); -//~^ ERROR statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature // This enum will be used to test that variants @@ -54,9 +54,9 @@ impl Drop for UnsafeEnum { static STATIC4: UnsafeEnum = UnsafeEnum::Variant5; -//~^ ERROR statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature static STATIC5: UnsafeEnum = UnsafeEnum::Variant6(0); -//~^ ERROR statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature struct SafeStruct { @@ -71,7 +71,7 @@ static STATIC6: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, field2: Safe // field2 has an unsafe value, hence this should fail static STATIC7: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, field2: SafeEnum::Variant3(WithDtor)}; -//~^ ERROR statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature // Test variadic constructor for structs. The base struct should be examined // as well as every field present in the constructor. @@ -84,7 +84,7 @@ static STATIC8: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, static STATIC9: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, ..SafeStruct{field1: SafeEnum::Variant3(WithDtor), field2: SafeEnum::Variant1}}; -//~^^ ERROR statics are not allowed to have destructors +//~^^ ERROR destructors in statics are an unstable feature struct UnsafeStruct; @@ -94,7 +94,7 @@ impl Drop for UnsafeStruct { // Types with destructors are not allowed for statics static STATIC10: UnsafeStruct = UnsafeStruct; -//~^ ERROR statics are not allowed to have destructor +//~^ ERROR destructors in statics are an unstable feature struct MyOwned; @@ -105,19 +105,19 @@ static STATIC11: Box = box MyOwned; // to have types with destructors // These should fail static mut STATIC12: UnsafeStruct = UnsafeStruct; -//~^ ERROR mutable statics are not allowed to have destructors -//~^^ ERROR statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature +//~^^ ERROR destructors in statics are an unstable feature static mut STATIC13: SafeStruct = SafeStruct{field1: SafeEnum::Variant1, -//~^ ERROR mutable statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature field2: SafeEnum::Variant3(WithDtor)}; -//~^ ERROR: statics are not allowed to have destructors +//~^ ERROR: destructors in statics are an unstable feature static mut STATIC14: SafeStruct = SafeStruct { -//~^ ERROR mutable statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature field1: SafeEnum::Variant1, field2: SafeEnum::Variant4("str".to_string()) -//~^ ERROR method calls in statics are limited to constant inherent methods +//~^ ERROR calls in statics are limited to constant functions }; static STATIC15: &'static [Box] = &[ @@ -131,7 +131,7 @@ static STATIC16: (&'static Box, &'static Box) = ( ); static mut STATIC17: SafeEnum = SafeEnum::Variant1; -//~^ ERROR mutable statics are not allowed to have destructors +//~^ ERROR destructors in statics are an unstable feature static STATIC19: Box = box 3; @@ -140,4 +140,5 @@ static STATIC19: Box = pub fn main() { let y = { static x: Box = box 3; x }; //~^ ERROR allocations are not allowed in statics + //~^^ ERROR cannot move out of static item } diff --git a/src/test/compile-fail/closure-wrong-kind.rs b/src/test/compile-fail/closure-wrong-kind.rs new file mode 100644 index 0000000000..a387e4c5ec --- /dev/null +++ b/src/test/compile-fail/closure-wrong-kind.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +/* Any copyright is dedicated to the Public Domain. + * http://creativecommons.org/publicdomain/zero/1.0/ */ + +struct X; +fn foo(_: T) {} +fn bar(_: T) {} + +fn main() { + let x = X; + let closure = |_| foo(x); //~ ERROR E0525 + bar(closure); +} diff --git a/src/test/compile-fail/coerce-mut.rs b/src/test/compile-fail/coerce-mut.rs index 30c1b66a7b..634d12441a 100644 --- a/src/test/compile-fail/coerce-mut.rs +++ b/src/test/compile-fail/coerce-mut.rs @@ -14,7 +14,7 @@ fn main() { let x = 0; f(&x); //~^ ERROR mismatched types - //~| expected `&mut i32` - //~| found `&_` + //~| expected type `&mut i32` + //~| found type `&_` //~| values differ in mutability } diff --git a/src/test/compile-fail/coercion-slice.rs b/src/test/compile-fail/coercion-slice.rs index bb4d1693af..bd7e6c2a21 100644 --- a/src/test/compile-fail/coercion-slice.rs +++ b/src/test/compile-fail/coercion-slice.rs @@ -13,8 +13,7 @@ fn main() { let _: &[i32] = [0]; //~^ ERROR mismatched types - //~| expected `&[i32]` - //~| found `[_; 1]` - //~| expected &-ptr - //~| found array of 1 elements + //~| expected type `&[i32]` + //~| found type `[_; 1]` + //~| expected &-ptr, found array of 1 elements } diff --git a/src/test/compile-fail/coherence-conflicting-negative-trait-impl.rs b/src/test/compile-fail/coherence-conflicting-negative-trait-impl.rs index afc3b8d4cc..7fd1b17f29 100644 --- a/src/test/compile-fail/coherence-conflicting-negative-trait-impl.rs +++ b/src/test/compile-fail/coherence-conflicting-negative-trait-impl.rs @@ -20,7 +20,7 @@ impl !Send for TestType {} //~^ ERROR conflicting implementations of trait `std::marker::Send` unsafe impl Send for TestType {} -//~^ ERROR error: conflicting implementations of trait `std::marker::Send` +//~^ ERROR conflicting implementations of trait `std::marker::Send` impl !Send for TestType {} diff --git a/src/test/compile-fail/consider-removing-last-semi.rs b/src/test/compile-fail/consider-removing-last-semi.rs index 02148a138c..2e110cb3d0 100644 --- a/src/test/compile-fail/consider-removing-last-semi.rs +++ b/src/test/compile-fail/consider-removing-last-semi.rs @@ -9,13 +9,11 @@ // except according to those terms. fn f() -> String { //~ ERROR E0269 - //~^ HELP detailed explanation 0u8; "bla".to_string(); //~ HELP consider removing this semicolon } fn g() -> String { //~ ERROR E0269 - //~^ HELP detailed explanation "this won't work".to_string(); "removeme".to_string(); //~ HELP consider removing this semicolon } diff --git a/src/test/compile-fail/const-block-non-item-statement.rs b/src/test/compile-fail/const-block-non-item-statement.rs index 5ccfb1ddec..edb85023c9 100644 --- a/src/test/compile-fail/const-block-non-item-statement.rs +++ b/src/test/compile-fail/const-block-non-item-statement.rs @@ -21,6 +21,20 @@ const C: usize = { foo!(); 2 }; const D: usize = { let x = 4; 2 }; //~^ ERROR: blocks in constants are limited to items and tail expressions +//~^^ ERROR: blocks in constants are limited to items and tail expressions + +enum Foo { + Bar = { let x = 1; 3 } + //~^ ERROR: blocks in constants are limited to items and tail expressions + //~^^ ERROR: blocks in constants are limited to items and tail expressions +} + +type Array = [u32; { let x = 2; 5 }]; +//~^ ERROR: blocks in constants are limited to items and tail expressions +//~^^ ERROR: blocks in constants are limited to items and tail expressions pub fn main() { + let _: Array = [0; { let x = 3; 5 }]; + //~^ ERROR: blocks in constants are limited to items and tail expressions + //~^^ ERROR: blocks in constants are limited to items and tail expressions } diff --git a/src/test/compile-fail/const-err-early.rs b/src/test/compile-fail/const-err-early.rs index cdcdb919bd..7567791c24 100644 --- a/src/test/compile-fail/const-err-early.rs +++ b/src/test/compile-fail/const-err-early.rs @@ -18,5 +18,5 @@ pub const D: u8 = 42u8 - (42u8 + 1); //~ ERROR attempted to subtract with overfl pub const E: u8 = [5u8][1]; //~ ERROR index out of bounds fn main() { - let _e = [6u8][1]; + let _e = [6u8][1]; //~ ERROR: array index out of bounds } diff --git a/src/test/compile-fail/const-err-multi.rs b/src/test/compile-fail/const-err-multi.rs new file mode 100644 index 0000000000..7de93a213b --- /dev/null +++ b/src/test/compile-fail/const-err-multi.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![deny(const_err)] + +pub const A: i8 = -std::i8::MIN; //~ ERROR attempted to negate with overflow +pub const B: i8 = A; +pub const C: u8 = A as u8; +pub const D: i8 = 50 - A; + +fn main() { +} diff --git a/src/test/compile-fail/const-err.rs b/src/test/compile-fail/const-err.rs index 45e8fc37d8..a25255c010 100644 --- a/src/test/compile-fail/const-err.rs +++ b/src/test/compile-fail/const-err.rs @@ -8,28 +8,39 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// these errors are not actually "const_err", they occur in trans/consts +// and are unconditional warnings that can't be denied or allowed + #![feature(rustc_attrs)] #![allow(exceeding_bitshifts)] +#![allow(const_err)] fn black_box(_: T) { unimplemented!() } +// Make sure that the two uses get two errors. +const FOO: u8 = [5u8][1]; +//~^ ERROR array index out of bounds +//~^^ ERROR array index out of bounds + #[rustc_no_mir] // FIXME #29769 MIR overflow checking is TBD. fn main() { let a = -std::i8::MIN; //~^ WARN attempted to negate with overflow let b = 200u8 + 200u8 + 200u8; //~^ WARN attempted to add with overflow - //~^^ WARN attempted to add with overflow + //~| WARN attempted to add with overflow let c = 200u8 * 4; //~^ WARN attempted to multiply with overflow let d = 42u8 - (42u8 + 1); //~^ WARN attempted to subtract with overflow let _e = [5u8][1]; - //~^ ERROR const index-expr is out of bounds + //~^ WARN array index out of bounds black_box(a); black_box(b); black_box(c); black_box(d); + + black_box((FOO, FOO)); } diff --git a/src/test/compile-fail/const-err2.rs b/src/test/compile-fail/const-err2.rs new file mode 100644 index 0000000000..f0d65f1424 --- /dev/null +++ b/src/test/compile-fail/const-err2.rs @@ -0,0 +1,34 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rustc_attrs)] +#![allow(exceeding_bitshifts)] +#![deny(const_err)] + +fn black_box(_: T) { + unimplemented!() +} + +fn main() { + let a = -std::i8::MIN; + //~^ ERROR attempted to negate with overflow + let b = 200u8 + 200u8 + 200u8; + //~^ ERROR attempted to add with overflow + //~| ERROR attempted to add with overflow + let c = 200u8 * 4; + //~^ ERROR attempted to multiply with overflow + let d = 42u8 - (42u8 + 1); + //~^ ERROR attempted to subtract with overflow + let _e = [5u8][1]; + black_box(a); + black_box(b); + black_box(c); + black_box(d); +} diff --git a/src/test/compile-fail/const-eval-overflow-4b.rs b/src/test/compile-fail/const-eval-overflow-4b.rs index 5aa93cf638..31e1a72967 100644 --- a/src/test/compile-fail/const-eval-overflow-4b.rs +++ b/src/test/compile-fail/const-eval-overflow-4b.rs @@ -15,7 +15,6 @@ #![allow(unused_imports)] -use std::fmt; use std::{i8, i16, i32, i64, isize}; use std::{u8, u16, u32, u64, usize}; @@ -26,10 +25,15 @@ const A_I8_T //~| found `u8` [E0250] = [0; (i8::MAX as usize) + 1]; -fn main() { - foo(&A_I8_T[..]); -} -fn foo(x: T) { - println!("{:?}", x); -} +const A_CHAR_USIZE + : [u32; 5u8 as char as usize] + = [0; 5]; + + +const A_BAD_CHAR_USIZE + : [u32; 5i8 as char as usize] + //~^ ERROR only `u8` can be cast as `char`, not `i8` + = [0; 5]; + +fn main() {} diff --git a/src/test/compile-fail/const-fn-destructuring-arg.rs b/src/test/compile-fail/const-fn-destructuring-arg.rs index 1642c04106..c3d5975fe0 100644 --- a/src/test/compile-fail/const-fn-destructuring-arg.rs +++ b/src/test/compile-fail/const-fn-destructuring-arg.rs @@ -13,6 +13,11 @@ #![feature(const_fn)] // no destructuring -const fn i((a, b): (u32, u32)) -> u32 { a + b } //~ ERROR: E0022 +const fn i(( + a, //~ ERROR: E0022 + b //~ ERROR: E0022 + ): (u32, u32)) -> u32 { + a + b +} fn main() {} diff --git a/src/test/compile-fail/const-fn-error.rs b/src/test/compile-fail/const-fn-error.rs index cb6f2d0215..45a00de48e 100644 --- a/src/test/compile-fail/const-fn-error.rs +++ b/src/test/compile-fail/const-fn-error.rs @@ -8,19 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// test that const fn signature and body errors are checked -// even in array lengths, which are evaluated before check_const - #![feature(const_fn)] const X : usize = 2; const fn f(x: usize) -> usize { - let mut sum = 0; //~ ERROR: E0016 - for i in 0..x { //~ ERROR: E0016 + let mut sum = 0; + for i in 0..x { sum += i; } - sum + sum //~ ERROR: E0250 } #[allow(unused_variables)] diff --git a/src/test/compile-fail/const-fn-not-safe-for-const.rs b/src/test/compile-fail/const-fn-not-safe-for-const.rs index f8381978dc..48877a60d2 100644 --- a/src/test/compile-fail/const-fn-not-safe-for-const.rs +++ b/src/test/compile-fail/const-fn-not-safe-for-const.rs @@ -29,7 +29,7 @@ static Y: u32 = 0; const fn get_Y() -> u32 { Y //~^ ERROR E0013 - //~| ERROR cannot refer to other statics by value + //~| ERROR cannot refer to statics by value } const fn get_Y_addr() -> &'static u32 { @@ -37,5 +37,11 @@ const fn get_Y_addr() -> &'static u32 { //~^ ERROR E0013 } +const fn get() -> u32 { + let x = 22; //~ ERROR E0016 + let y = 44; //~ ERROR E0016 + x + y +} + fn main() { } diff --git a/src/test/compile-fail/const-fn-not-safe-for-const2.rs b/src/test/compile-fail/const-fn-not-safe-for-const2.rs deleted file mode 100644 index a053847e88..0000000000 --- a/src/test/compile-fail/const-fn-not-safe-for-const2.rs +++ /dev/null @@ -1,44 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Test that we can't call random fns in a const fn or do other bad things. - -#![feature(const_fn)] - -use std::mem::transmute; - -fn random() -> u32 { 0 } - -const fn sub(x: &u32) -> usize { - unsafe { transmute(x) } -} - -const fn sub1() -> u32 { - random() -} - -static Y: u32 = 0; - -const fn get_Y() -> u32 { - Y -} - -const fn get_Y_addr() -> &'static u32 { - &Y -} - -const fn get() -> u32 { - let x = 22; //~ ERROR E0016 - let y = 44; //~ ERROR E0016 - x + y -} - -fn main() { -} diff --git a/src/test/compile-fail/const-pattern-irrefutable.rs b/src/test/compile-fail/const-pattern-irrefutable.rs index bc395af962..392f391fb5 100644 --- a/src/test/compile-fail/const-pattern-irrefutable.rs +++ b/src/test/compile-fail/const-pattern-irrefutable.rs @@ -9,20 +9,20 @@ // except according to those terms. mod foo { - pub const b: u8 = 2; //~ NOTE constant defined here - pub const d: u8 = 2; //~ NOTE constant defined here + pub const b: u8 = 2; + pub const d: u8 = 2; } -use foo::b as c; //~ NOTE constant imported here -use foo::d; //~ NOTE constant imported here +use foo::b as c; //~ NOTE is imported here +use foo::d; //~ NOTE is imported here -const a: u8 = 2; //~ NOTE constant defined here +const a: u8 = 2; //~ NOTE is defined here fn main() { - let a = 4; //~ ERROR only irrefutable - //~^ NOTE there already is a constant in scope - let c = 4; //~ ERROR only irrefutable - //~^ NOTE there already is a constant in scope - let d = 4; //~ ERROR only irrefutable - //~^ NOTE there already is a constant in scope + let a = 4; //~ ERROR let variables cannot + //~^ NOTE cannot be named the same as a const variable + let c = 4; //~ ERROR let variables cannot + //~^ NOTE cannot be named the same as a const variable + let d = 4; //~ ERROR let variables cannot + //~^ NOTE cannot be named the same as a const variable } diff --git a/src/test/compile-fail/const-slice-oob.rs b/src/test/compile-fail/const-slice-oob.rs index 519c4917c7..b50468c33f 100644 --- a/src/test/compile-fail/const-slice-oob.rs +++ b/src/test/compile-fail/const-slice-oob.rs @@ -9,7 +9,7 @@ // except according to those terms. const FOO: &'static[u32] = &[1, 2, 3]; -const BAR: u32 = FOO[5]; //~ ERROR const index-expr is out of bounds +const BAR: u32 = FOO[5]; //~ ERROR array index out of bounds fn main() { let _ = BAR; diff --git a/src/test/compile-fail/cross-borrow-trait.rs b/src/test/compile-fail/cross-borrow-trait.rs index d60fb1d5d1..ea9a29c0e2 100644 --- a/src/test/compile-fail/cross-borrow-trait.rs +++ b/src/test/compile-fail/cross-borrow-trait.rs @@ -19,8 +19,7 @@ pub fn main() { // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. let x: Box = Box::new(Foo); let _y: &Trait = x; //~ ERROR mismatched types - //~| expected `&Trait` - //~| found `Box` - //~| expected &-ptr - //~| found box + //~| expected type `&Trait` + //~| found type `Box` + //~| expected &-ptr, found box } diff --git a/src/test/compile-fail/default_ty_param_conflict.rs b/src/test/compile-fail/default_ty_param_conflict.rs index 48c5cd1ff7..4702b504f1 100644 --- a/src/test/compile-fail/default_ty_param_conflict.rs +++ b/src/test/compile-fail/default_ty_param_conflict.rs @@ -23,6 +23,9 @@ fn main() { // Here, F is instantiated with $0=uint let x = foo(); //~^ ERROR: mismatched types + //~| expected type `usize` + //~| found type `isize` + //~| NOTE: conflicting type parameter defaults `usize` and `isize` //~| NOTE: conflicting type parameter defaults `usize` and `isize` //~| NOTE: ...that was applied to an unconstrained type variable here diff --git a/src/test/compile-fail/default_ty_param_conflict_cross_crate.rs b/src/test/compile-fail/default_ty_param_conflict_cross_crate.rs index fc2c49d65a..b608c6c99b 100644 --- a/src/test/compile-fail/default_ty_param_conflict_cross_crate.rs +++ b/src/test/compile-fail/default_ty_param_conflict_cross_crate.rs @@ -24,7 +24,11 @@ fn main() { //~^ NOTE: ...that also applies to the same type variable here meh(foo); - //~^ ERROR: mismatched types: + //~^ ERROR: mismatched types //~| NOTE: conflicting type parameter defaults `bool` and `char` + //~| NOTE: conflicting type parameter defaults `bool` and `char` + //~| a second default is defined on `default_param_test::bleh` //~| NOTE: ...that was applied to an unconstrained type variable here + //~| expected type `bool` + //~| found type `char` } diff --git a/src/test/compile-fail/dep-graph-trait-impl-two-traits-same-method.rs b/src/test/compile-fail/dep-graph-trait-impl-two-traits-same-method.rs index 1afecd80ff..5e4f43af66 100644 --- a/src/test/compile-fail/dep-graph-trait-impl-two-traits-same-method.rs +++ b/src/test/compile-fail/dep-graph-trait-impl-two-traits-same-method.rs @@ -15,6 +15,7 @@ #![feature(rustc_attrs)] #![allow(dead_code)] +#![allow(unused_imports)] fn main() { } diff --git a/src/test/compile-fail/issue-30580.rs b/src/test/compile-fail/derived-errors/issue-30580.rs similarity index 100% rename from src/test/compile-fail/issue-30580.rs rename to src/test/compile-fail/derived-errors/issue-30580.rs diff --git a/src/test/compile-fail/derived-errors/issue-31997-1.rs b/src/test/compile-fail/derived-errors/issue-31997-1.rs new file mode 100644 index 0000000000..7d79c48c06 --- /dev/null +++ b/src/test/compile-fail/derived-errors/issue-31997-1.rs @@ -0,0 +1,66 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for this example from #31997 -- main goal is to +// emit as minimal and precise an error set as possible. Ideally, we'd +// only emit the E0433 error below, but right now we emit two. + +use std::io::prelude::*; +// use std::collections::HashMap; +use std::io; + +#[derive(Debug)] +struct Instance { + name: String, + start: Option, + end: Option, +} + +fn main() { + let input = io::stdin(); + let mut input = input.lock(); + + let mut map = HashMap::new(); + //~^ ERROR E0433 + + for line in input.lines() { + let line = line.unwrap(); + println!("process: {}", line); + let mut parts = line.splitn(2, ":"); + let _logfile = parts.next().unwrap(); + let rest = parts.next().unwrap(); + let mut parts = line.split(" [-] "); + + let stamp = parts.next().unwrap(); + + let rest = parts.next().unwrap(); + let words = rest.split_whitespace().collect::>(); + + let instance = words.iter().find(|a| a.starts_with("i-")).unwrap(); + let name = words[1].to_owned(); + let mut entry = map.entry(instance.to_owned()).or_insert(Instance { + name: name, + start: None, + end: None, + }); + + if rest.contains("terminating") { + assert!(entry.end.is_none()); + entry.end = Some(stamp.to_string()); + } + if rest.contains("waiting for") { + assert!(entry.start.is_none()); + entry.start = Some(stamp.to_string()); + } + + } + + println!("{:?}", map); +} diff --git a/src/test/compile-fail/derived-errors/issue-31997.rs b/src/test/compile-fail/derived-errors/issue-31997.rs new file mode 100644 index 0000000000..cf283f6d3e --- /dev/null +++ b/src/test/compile-fail/derived-errors/issue-31997.rs @@ -0,0 +1,27 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that the resolve failure does not lead to downstream type errors. +// See issue #31997. + +trait TheTrait { } + +fn closure(x: F) -> Result + where F: FnMut() -> T, T: TheTrait, +{ + unimplemented!() +} + +fn foo() -> Result<(), ()> { + try!(closure(|| bar(0 as *mut _))); //~ ERROR unresolved name `bar` + Ok(()) +} + +fn main() { } diff --git a/src/test/compile-fail/deriving-copyclone.rs b/src/test/compile-fail/deriving-copyclone.rs new file mode 100644 index 0000000000..92fb7c5737 --- /dev/null +++ b/src/test/compile-fail/deriving-copyclone.rs @@ -0,0 +1,48 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// this will get a no-op Clone impl +#[derive(Copy, Clone)] +struct A { + a: i32, + b: i64 +} + +// this will get a deep Clone impl +#[derive(Copy, Clone)] +struct B { + a: i32, + b: T +} + +struct C; // not Copy or Clone +#[derive(Clone)] struct D; // Clone but not Copy + +fn is_copy(_: T) {} +fn is_clone(_: T) {} + +fn main() { + // A can be copied and cloned + is_copy(A { a: 1, b: 2 }); + is_clone(A { a: 1, b: 2 }); + + // B can be copied and cloned + is_copy(B { a: 1, b: 2 }); + is_clone(B { a: 1, b: 2 }); + + // B cannot be copied or cloned + is_copy(B { a: 1, b: C }); //~ERROR Copy + is_clone(B { a: 1, b: C }); //~ERROR Clone + + // B can be cloned but not copied + is_copy(B { a: 1, b: D }); //~ERROR Copy + is_clone(B { a: 1, b: D }); +} + diff --git a/src/test/compile-fail/destructure-trait-ref.rs b/src/test/compile-fail/destructure-trait-ref.rs index 68d9795710..d0a31fbce9 100644 --- a/src/test/compile-fail/destructure-trait-ref.rs +++ b/src/test/compile-fail/destructure-trait-ref.rs @@ -40,20 +40,17 @@ fn main() { // n > m let &&x = &1isize as &T; //~^ ERROR mismatched types - //~| expected `T` - //~| found `&_` - //~| expected trait T - //~| found &-ptr + //~| expected type `T` + //~| found type `&_` + //~| expected trait T, found &-ptr let &&&x = &(&1isize as &T); //~^ ERROR mismatched types - //~| expected `T` - //~| found `&_` - //~| expected trait T - //~| found &-ptr + //~| expected type `T` + //~| found type `&_` + //~| expected trait T, found &-ptr let box box x = box 1isize as Box; //~^ ERROR mismatched types - //~| expected `T` - //~| found `Box<_>` - //~| expected trait T - //~| found box + //~| expected type `T` + //~| found type `Box<_>` + //~| expected trait T, found box } diff --git a/src/test/compile-fail/disallowed-deconstructing-destructing-struct-match.rs b/src/test/compile-fail/disallowed-deconstructing-destructing-struct-match.rs index 5078009d4b..3804920990 100644 --- a/src/test/compile-fail/disallowed-deconstructing-destructing-struct-match.rs +++ b/src/test/compile-fail/disallowed-deconstructing-destructing-struct-match.rs @@ -23,6 +23,6 @@ fn main() { match x { X { x: y } => println!("contents: {}", y) - //~^ ERROR cannot move out of type `X`, which defines the `Drop` trait + //~^ ERROR cannot move out of type `X`, which implements the `Drop` trait } } diff --git a/src/test/compile-fail/dst-bad-assign.rs b/src/test/compile-fail/dst-bad-assign.rs index 2d21d0ebc7..9e71ad2417 100644 --- a/src/test/compile-fail/dst-bad-assign.rs +++ b/src/test/compile-fail/dst-bad-assign.rs @@ -45,9 +45,8 @@ pub fn main() { let z: Box = Box::new(Bar1 {f: 36}); f5.ptr = Bar1 {f: 36}; //~^ ERROR mismatched types - //~| expected `ToBar` - //~| found `Bar1` - //~| expected trait ToBar - //~| found struct `Bar1` + //~| expected type `ToBar` + //~| found type `Bar1` + //~| expected trait ToBar, found struct `Bar1` //~| ERROR `ToBar: std::marker::Sized` is not satisfied } diff --git a/src/test/compile-fail/dst-bad-coerce4.rs b/src/test/compile-fail/dst-bad-coerce4.rs index c1443bdbb3..9d4d56cf79 100644 --- a/src/test/compile-fail/dst-bad-coerce4.rs +++ b/src/test/compile-fail/dst-bad-coerce4.rs @@ -19,8 +19,7 @@ pub fn main() { let f1: &Fat<[isize]> = &Fat { ptr: [1, 2, 3] }; let f2: &Fat<[isize; 3]> = f1; //~^ ERROR mismatched types - //~| expected `&Fat<[isize; 3]>` - //~| found `&Fat<[isize]>` - //~| expected array of 3 elements - //~| found slice + //~| expected type `&Fat<[isize; 3]>` + //~| found type `&Fat<[isize]>` + //~| expected array of 3 elements, found slice } diff --git a/src/test/compile-fail/empty-struct-unit-pat.rs b/src/test/compile-fail/empty-struct-unit-pat.rs index a75290c940..05733762d3 100644 --- a/src/test/compile-fail/empty-struct-unit-pat.rs +++ b/src/test/compile-fail/empty-struct-unit-pat.rs @@ -37,11 +37,11 @@ fn main() { // } match e2 { Empty2(..) => () //~ ERROR `Empty2` does not name a tuple variant or a tuple struct - //~^ ERROR hard error + //~^ WARNING hard error } match xe2 { XEmpty2(..) => () //~ ERROR `XEmpty2` does not name a tuple variant or a tuple struct - //~^ ERROR hard error + //~^ WARNING hard error } // Rejected by parser as yet // match e4 { @@ -53,11 +53,11 @@ fn main() { // } match e4 { E::Empty4(..) => () //~ ERROR `E::Empty4` does not name a tuple variant or a tuple struct - //~^ ERROR hard error + //~^ WARNING hard error } match xe4 { XE::XEmpty4(..) => (), //~ ERROR `XE::XEmpty4` does not name a tuple variant or a tuple - //~^ ERROR hard error + //~^ WARNING hard error _ => {}, } } diff --git a/src/test/compile-fail/enum-in-scope.rs b/src/test/compile-fail/enum-in-scope.rs index 7be06ec7de..6dffd1999d 100644 --- a/src/test/compile-fail/enum-in-scope.rs +++ b/src/test/compile-fail/enum-in-scope.rs @@ -11,5 +11,5 @@ struct hello(isize); fn main() { - let hello = 0; //~ERROR declaration of `hello` shadows + let hello = 0; //~ERROR cannot be named the same } diff --git a/src/test/compile-fail/expanded-cfg.rs b/src/test/compile-fail/expanded-cfg.rs new file mode 100644 index 0000000000..2f74aeba9e --- /dev/null +++ b/src/test/compile-fail/expanded-cfg.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rustc_attrs)] + +macro_rules! mac { + {} => { + #[cfg(attr)] + mod m { + #[lang_item] + fn f() {} + } + } +} + +mac! {} + +#[rustc_error] +fn main() {} //~ ERROR compilation successful diff --git a/src/test/compile-fail/explicit-self-lifetime-mismatch.rs b/src/test/compile-fail/explicit-self-lifetime-mismatch.rs index 922e58698d..b5432fafb1 100644 --- a/src/test/compile-fail/explicit-self-lifetime-mismatch.rs +++ b/src/test/compile-fail/explicit-self-lifetime-mismatch.rs @@ -16,12 +16,12 @@ struct Foo<'a,'b> { impl<'a,'b> Foo<'a,'b> { fn bar(self: Foo<'b,'a>) {} //~^ ERROR mismatched types - //~| expected `Foo<'a, 'b>` - //~| found `Foo<'b, 'a>` + //~| expected type `Foo<'a, 'b>` + //~| found type `Foo<'b, 'a>` //~| lifetime mismatch //~| ERROR mismatched types - //~| expected `Foo<'a, 'b>` - //~| found `Foo<'b, 'a>` + //~| expected type `Foo<'a, 'b>` + //~| found type `Foo<'b, 'a>` //~| lifetime mismatch } diff --git a/src/test/compile-fail/extern-main-fn.rs b/src/test/compile-fail/extern-main-fn.rs index 05ce3eefda..11f299acef 100644 --- a/src/test/compile-fail/extern-main-fn.rs +++ b/src/test/compile-fail/extern-main-fn.rs @@ -8,4 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -extern fn main() {} //~ ERROR: main function expects type +extern fn main() {} //~ ERROR: main function has wrong type diff --git a/src/test/compile-fail/fail-simple.rs b/src/test/compile-fail/fail-simple.rs index 97b709592a..e889d35477 100644 --- a/src/test/compile-fail/fail-simple.rs +++ b/src/test/compile-fail/fail-simple.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. - -// error-pattern:unexpected token fn main() { - panic!(@); + panic!(@); //~ ERROR expected expression, found `@` } diff --git a/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs b/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs index c9251c925c..9ebf8a9b74 100644 --- a/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs +++ b/src/test/compile-fail/feature-gate-allow-internal-unstable-nested-macro.rs @@ -11,8 +11,8 @@ macro_rules! bar { () => { // more layers don't help: - #[allow_internal_unstable] - macro_rules! baz { //~ ERROR allow_internal_unstable side-steps + #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps + macro_rules! baz { () => {} } } diff --git a/src/test/compile-fail/feature-gate-allow-internal-unstable-struct.rs b/src/test/compile-fail/feature-gate-allow-internal-unstable-struct.rs new file mode 100644 index 0000000000..b186278ef8 --- /dev/null +++ b/src/test/compile-fail/feature-gate-allow-internal-unstable-struct.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// checks that this attribute is caught on non-macro items. +// this needs a different test since this is done after expansion + +#[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps +struct S; + +fn main() {} diff --git a/src/test/compile-fail/feature-gate-negate-unsigned.rs b/src/test/compile-fail/feature-gate-negate-unsigned.rs index 93e09c6d8d..98cc2fc0c3 100644 --- a/src/test/compile-fail/feature-gate-negate-unsigned.rs +++ b/src/test/compile-fail/feature-gate-negate-unsigned.rs @@ -16,7 +16,9 @@ impl std::ops::Neg for S { fn neg(self) -> u32 { 0 } } -const _MAX: usize = -1; +// FIXME(eddyb) move this back to a `-1` literal when +// MIR building stops eagerly erroring in that case. +const _MAX: usize = -(2 - 1); //~^ WARN unary negation of unsigned integer //~| ERROR unary negation of unsigned integer //~| HELP use a cast or the `!` operator diff --git a/src/test/compile-fail/fn-item-type.rs b/src/test/compile-fail/fn-item-type.rs index c90a7113f1..6217a9f16b 100644 --- a/src/test/compile-fail/fn-item-type.rs +++ b/src/test/compile-fail/fn-item-type.rs @@ -22,26 +22,22 @@ impl Foo for T { /* `foo` is still default here */ } fn main() { eq(foo::, bar::); //~^ ERROR mismatched types - //~| expected `fn(isize) -> isize {foo::}` - //~| found `fn(isize) -> isize {bar::}` - //~| expected fn item - //~| found a different fn item + //~| expected type `fn(isize) -> isize {foo::}` + //~| found type `fn(isize) -> isize {bar::}` + //~| expected fn item, found a different fn item eq(foo::, foo::); //~^ ERROR mismatched types - //~| expected `fn(isize) -> isize {foo::}` - //~| found `fn(isize) -> isize {foo::}` + //~| expected u8, found i8 eq(bar::, bar::>); //~^ ERROR mismatched types - //~| expected `fn(isize) -> isize {bar::}` - //~| found `fn(isize) -> isize {bar::>}` - //~| expected struct `std::string::String` - //~| found struct `std::vec::Vec` + //~| expected type `fn(isize) -> isize {bar::}` + //~| found type `fn(isize) -> isize {bar::>}` + //~| expected struct `std::string::String`, found struct `std::vec::Vec` // Make sure we distinguish between trait methods correctly. eq(::foo, ::foo); //~^ ERROR mismatched types - //~| expected `fn() {::foo}` - //~| found `fn() {::foo}` + //~| expected u8, found u16 } diff --git a/src/test/compile-fail/fn-trait-formatting.rs b/src/test/compile-fail/fn-trait-formatting.rs index 8cbfc520ff..fd140cd1d3 100644 --- a/src/test/compile-fail/fn-trait-formatting.rs +++ b/src/test/compile-fail/fn-trait-formatting.rs @@ -16,22 +16,19 @@ fn needs_fn(x: F) where F: Fn(isize) -> isize {} fn main() { let _: () = (box |_: isize| {}) as Box; //~^ ERROR mismatched types - //~| expected `()` - //~| found `Box` - //~| expected () - //~| found box + //~| expected type `()` + //~| found type `Box` + //~| expected (), found box let _: () = (box |_: isize, isize| {}) as Box; //~^ ERROR mismatched types - //~| expected `()` - //~| found `Box` - //~| expected () - //~| found box + //~| expected type `()` + //~| found type `Box` + //~| expected (), found box let _: () = (box || -> isize { unimplemented!() }) as Box isize>; //~^ ERROR mismatched types - //~| expected `()` - //~| found `Box isize>` - //~| expected () - //~| found box + //~| expected type `()` + //~| found type `Box isize>` + //~| expected (), found box needs_fn(1); //~^ ERROR : std::ops::Fn<(isize,)>` diff --git a/src/test/compile-fail/fully-qualified-type-name1.rs b/src/test/compile-fail/fully-qualified-type-name1.rs index fb787e8572..5ea8ce2264 100644 --- a/src/test/compile-fail/fully-qualified-type-name1.rs +++ b/src/test/compile-fail/fully-qualified-type-name1.rs @@ -14,8 +14,7 @@ fn main() { let x: Option; x = 5; //~^ ERROR mismatched types - //~| expected `std::option::Option` - //~| found `_` - //~| expected enum `std::option::Option` - //~| found integral variable + //~| expected type `std::option::Option` + //~| found type `_` + //~| expected enum `std::option::Option`, found integral variable } diff --git a/src/test/compile-fail/fully-qualified-type-name2.rs b/src/test/compile-fail/fully-qualified-type-name2.rs index ab542d9080..9ba8a11d53 100644 --- a/src/test/compile-fail/fully-qualified-type-name2.rs +++ b/src/test/compile-fail/fully-qualified-type-name2.rs @@ -21,10 +21,9 @@ mod y { fn bar(x: x::foo) -> y::foo { return x; //~^ ERROR mismatched types - //~| expected `y::foo` - //~| found `x::foo` - //~| expected enum `y::foo` - //~| found enum `x::foo` + //~| expected type `y::foo` + //~| found type `x::foo` + //~| expected enum `y::foo`, found enum `x::foo` } fn main() { diff --git a/src/test/compile-fail/fully-qualified-type-name4.rs b/src/test/compile-fail/fully-qualified-type-name4.rs index 9242849efc..3c8fde751f 100644 --- a/src/test/compile-fail/fully-qualified-type-name4.rs +++ b/src/test/compile-fail/fully-qualified-type-name4.rs @@ -15,10 +15,9 @@ use std::option::Option; fn bar(x: usize) -> Option { return x; //~^ ERROR mismatched types - //~| expected `std::option::Option` - //~| found `usize` - //~| expected enum `std::option::Option` - //~| found usize + //~| expected type `std::option::Option` + //~| found type `usize` + //~| expected enum `std::option::Option`, found usize } fn main() { diff --git a/src/test/compile-fail/generic-type-params-name-repr.rs b/src/test/compile-fail/generic-type-params-name-repr.rs index adf9a98a05..71d7cf792e 100644 --- a/src/test/compile-fail/generic-type-params-name-repr.rs +++ b/src/test/compile-fail/generic-type-params-name-repr.rs @@ -22,46 +22,40 @@ fn main() { // Ensure that the printed type doesn't include the default type params... let _: Foo = (); //~^ ERROR mismatched types - //~| expected `Foo` - //~| found `()` - //~| expected struct `Foo` - //~| found () + //~| expected type `Foo` + //~| found type `()` + //~| expected struct `Foo`, found () // ...even when they're present, but the same types as the defaults. let _: Foo = (); //~^ ERROR mismatched types - //~| expected `Foo` - //~| found `()` - //~| expected struct `Foo` - //~| found () + //~| expected type `Foo` + //~| found type `()` + //~| expected struct `Foo`, found () // Including cases where the default is using previous type params. let _: HashMap = (); //~^ ERROR mismatched types - //~| expected `HashMap` - //~| found `()` - //~| expected struct `HashMap` - //~| found () + //~| expected type `HashMap` + //~| found type `()` + //~| expected struct `HashMap`, found () let _: HashMap> = (); //~^ ERROR mismatched types - //~| expected `HashMap` - //~| found `()` - //~| expected struct `HashMap` - //~| found () + //~| expected type `HashMap` + //~| found type `()` + //~| expected struct `HashMap`, found () // But not when there's a different type in between. let _: Foo = (); //~^ ERROR mismatched types - //~| expected `Foo` - //~| found `()` - //~| expected struct `Foo` - //~| found () + //~| expected type `Foo` + //~| found type `()` + //~| expected struct `Foo`, found () // And don't print <> at all when there's just defaults. let _: Foo = (); //~^ ERROR mismatched types - //~| expected `Foo` - //~| found `()` - //~| expected struct `Foo` - //~| found () + //~| expected type `Foo` + //~| found type `()` + //~| expected struct `Foo`, found () } diff --git a/src/test/compile-fail/if-branch-types.rs b/src/test/compile-fail/if-branch-types.rs index 2c730531b8..ca9803f66b 100644 --- a/src/test/compile-fail/if-branch-types.rs +++ b/src/test/compile-fail/if-branch-types.rs @@ -11,6 +11,5 @@ fn main() { let x = if true { 10i32 } else { 10u32 }; //~^ ERROR if and else have incompatible types - //~| expected `i32` - //~| found `u32` + //~| expected i32, found u32 } diff --git a/src/test/compile-fail/if-let-arm-types.rs b/src/test/compile-fail/if-let-arm-types.rs index d179ec015d..c7b1e1a62c 100644 --- a/src/test/compile-fail/if-let-arm-types.rs +++ b/src/test/compile-fail/if-let-arm-types.rs @@ -10,6 +10,9 @@ fn main() { if let Some(b) = None { //~ ERROR: `if let` arms have incompatible types + //~^ expected (), found integral variable + //~| expected type `()` + //~| found type `_` () } else { //~ NOTE: `if let` arm with an incompatible type 1 diff --git a/src/test/compile-fail/if-without-else-result.rs b/src/test/compile-fail/if-without-else-result.rs index a9567f4272..e8aa1f70ea 100644 --- a/src/test/compile-fail/if-without-else-result.rs +++ b/src/test/compile-fail/if-without-else-result.rs @@ -11,9 +11,8 @@ fn main() { let a = if true { true }; //~^ ERROR if may be missing an else clause - //~| expected `()` - //~| found `bool` - //~| expected () - //~| found bool + //~| expected type `()` + //~| found type `bool` + //~| expected (), found bool println!("{}", a); } diff --git a/src/test/compile-fail/impl-duplicate-methods.rs b/src/test/compile-fail/impl-duplicate-methods.rs index 148958ae12..981eddc9dd 100644 --- a/src/test/compile-fail/impl-duplicate-methods.rs +++ b/src/test/compile-fail/impl-duplicate-methods.rs @@ -9,9 +9,10 @@ // except according to those terms. struct Foo; + impl Foo { - fn orange(&self){} - fn orange(&self){} //~ ERROR duplicate definitions + fn orange(&self) {} //~ NOTE previous definition of `orange` here + fn orange(&self) {} //~ ERROR duplicate definitions with name `orange` } fn main() {} diff --git a/src/test/compile-fail/import-prefix-macro-1.rs b/src/test/compile-fail/import-prefix-macro-1.rs new file mode 100644 index 0000000000..beb15a11a9 --- /dev/null +++ b/src/test/compile-fail/import-prefix-macro-1.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod a { + pub mod b { + pub mod c { + pub struct S; + pub struct Z; + } + } +} + +macro_rules! import { + ($p: path) => (use $p {S, Z}); //~ERROR expected one of `::`, `;`, or `as`, found `{` +} + +import! { a::b::c } + +fn main() {} diff --git a/src/test/compile-fail/import-prefix-macro-2.rs b/src/test/compile-fail/import-prefix-macro-2.rs new file mode 100644 index 0000000000..56c6273aa9 --- /dev/null +++ b/src/test/compile-fail/import-prefix-macro-2.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod a { + pub mod b { + pub mod c { + pub struct S; + pub struct Z; + } + } +} + +macro_rules! import { + ($p: path) => (use ::$p {S, Z}); //~ERROR expected identifier, found `a::b::c` +} + +import! { a::b::c } + +fn main() {} diff --git a/src/test/compile-fail/import-ty-params.rs b/src/test/compile-fail/import-ty-params.rs new file mode 100644 index 0000000000..7344f31535 --- /dev/null +++ b/src/test/compile-fail/import-ty-params.rs @@ -0,0 +1,25 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod a { + pub mod b { + pub mod c { + pub struct S(T); + } + } +} + +macro_rules! import { + ($p: path) => (use $p;); +} + +import! { a::b::c::S } //~ERROR type or lifetime parameters in import path + +fn main() {} diff --git a/src/test/compile-fail/integer-literal-suffix-inference.rs b/src/test/compile-fail/integer-literal-suffix-inference.rs index 8f04b58b77..7a850d90a8 100644 --- a/src/test/compile-fail/integer-literal-suffix-inference.rs +++ b/src/test/compile-fail/integer-literal-suffix-inference.rs @@ -41,168 +41,132 @@ fn main() { id_i8(a8); // ok id_i8(a16); //~^ ERROR mismatched types - //~| expected `i8` - //~| found `i16` + //~| expected i8, found i16 id_i8(a32); //~^ ERROR mismatched types - //~| expected `i8` - //~| found `i32` + //~| expected i8, found i32 id_i8(a64); //~^ ERROR mismatched types - //~| expected `i8` - //~| found `i64` + //~| expected i8, found i64 id_i16(a8); //~^ ERROR mismatched types - //~| expected `i16` - //~| found `i8` + //~| expected i16, found i8 id_i16(a16); // ok id_i16(a32); //~^ ERROR mismatched types - //~| expected `i16` - //~| found `i32` + //~| expected i16, found i32 id_i16(a64); //~^ ERROR mismatched types - //~| expected `i16` - //~| found `i64` + //~| expected i16, found i64 id_i32(a8); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `i8` + //~| expected i32, found i8 id_i32(a16); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `i16` + //~| expected i32, found i16 id_i32(a32); // ok id_i32(a64); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `i64` + //~| expected i32, found i64 id_i64(a8); //~^ ERROR mismatched types - //~| expected `i64` - //~| found `i8` + //~| expected i64, found i8 id_i64(a16); //~^ ERROR mismatched types - //~| expected `i64` - //~| found `i16` + //~| expected i64, found i16 id_i64(a32); //~^ ERROR mismatched types - //~| expected `i64` - //~| found `i32` + //~| expected i64, found i32 id_i64(a64); // ok id_i8(c8); // ok id_i8(c16); //~^ ERROR mismatched types - //~| expected `i8` - //~| found `i16` + //~| expected i8, found i16 id_i8(c32); //~^ ERROR mismatched types - //~| expected `i8` - //~| found `i32` + //~| expected i8, found i32 id_i8(c64); //~^ ERROR mismatched types - //~| expected `i8` - //~| found `i64` + //~| expected i8, found i64 id_i16(c8); //~^ ERROR mismatched types - //~| expected `i16` - //~| found `i8` + //~| expected i16, found i8 id_i16(c16); // ok id_i16(c32); //~^ ERROR mismatched types - //~| expected `i16` - //~| found `i32` + //~| expected i16, found i32 id_i16(c64); //~^ ERROR mismatched types - //~| expected `i16` - //~| found `i64` + //~| expected i16, found i64 id_i32(c8); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `i8` + //~| expected i32, found i8 id_i32(c16); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `i16` + //~| expected i32, found i16 id_i32(c32); // ok id_i32(c64); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `i64` + //~| expected i32, found i64 id_i64(a8); //~^ ERROR mismatched types - //~| expected `i64` - //~| found `i8` + //~| expected i64, found i8 id_i64(a16); //~^ ERROR mismatched types - //~| expected `i64` - //~| found i16 + //~| expected i64, found i16 id_i64(a32); //~^ ERROR mismatched types - //~| expected `i64` - //~| found `i32` + //~| expected i64, found i32 id_i64(a64); // ok id_u8(b8); // ok id_u8(b16); //~^ ERROR mismatched types - //~| expected `u8` - //~| found `u16` + //~| expected u8, found u16 id_u8(b32); //~^ ERROR mismatched types - //~| expected `u8` - //~| found `u32` + //~| expected u8, found u32 id_u8(b64); //~^ ERROR mismatched types - //~| expected `u8` - //~| found `u64` + //~| expected u8, found u64 id_u16(b8); //~^ ERROR mismatched types - //~| expected `u16` - //~| found `u8` + //~| expected u16, found u8 id_u16(b16); // ok id_u16(b32); //~^ ERROR mismatched types - //~| expected `u16` - //~| found `u32` + //~| expected u16, found u32 id_u16(b64); //~^ ERROR mismatched types - //~| expected `u16` - //~| found `u64` + //~| expected u16, found u64 id_u32(b8); //~^ ERROR mismatched types - //~| expected `u32` - //~| found `u8` + //~| expected u32, found u8 id_u32(b16); //~^ ERROR mismatched types - //~| expected `u32` - //~| found `u16` + //~| expected u32, found u16 id_u32(b32); // ok id_u32(b64); //~^ ERROR mismatched types - //~| expected `u32` - //~| found `u64` + //~| expected u32, found u64 id_u64(b8); //~^ ERROR mismatched types - //~| expected `u64` - //~| found `u8` + //~| expected u64, found u8 id_u64(b16); //~^ ERROR mismatched types - //~| expected `u64` - //~| found `u16` + //~| expected u64, found u16 id_u64(b32); //~^ ERROR mismatched types - //~| expected `u64` - //~| found `u32` + //~| expected u64, found u32 id_u64(b64); // ok } diff --git a/src/test/compile-fail/integral-variable-unification-error.rs b/src/test/compile-fail/integral-variable-unification-error.rs index 3374f71591..99f2d25166 100644 --- a/src/test/compile-fail/integral-variable-unification-error.rs +++ b/src/test/compile-fail/integral-variable-unification-error.rs @@ -12,8 +12,7 @@ fn main() { let mut x = 2; x = 5.0; //~^ ERROR mismatched types - //~| expected `_` - //~| found `_` - //~| expected integral variable - //~| found floating-point variable + //~| expected type `_` + //~| found type `_` + //~| expected integral variable, found floating-point variable } diff --git a/src/test/compile-fail/issue-10176.rs b/src/test/compile-fail/issue-10176.rs index 6e84e77789..434b795ff3 100644 --- a/src/test/compile-fail/issue-10176.rs +++ b/src/test/compile-fail/issue-10176.rs @@ -11,10 +11,9 @@ fn f() -> isize { (return 1, return 2) //~^ ERROR mismatched types -//~| expected `isize` -//~| found `(_, _)` -//~| expected isize -//~| found tuple +//~| expected type `isize` +//~| found type `(_, _)` +//~| expected isize, found tuple } fn main() {} diff --git a/src/test/compile-fail/issue-10636-2.rs b/src/test/compile-fail/issue-10636-2.rs index 747252d592..beaf9e5059 100644 --- a/src/test/compile-fail/issue-10636-2.rs +++ b/src/test/compile-fail/issue-10636-2.rs @@ -15,4 +15,4 @@ pub fn trace_option(option: Option) { option.map(|some| 42; //~ NOTE: unclosed delimiter //~^ ERROR: expected one of } //~ ERROR: incorrect close delimiter -//~^ ERROR: unexpected token +//~^ ERROR: expected expression, found `)` diff --git a/src/test/compile-fail/issue-11319.rs b/src/test/compile-fail/issue-11319.rs index d3e44b71b1..8242fa1c2e 100644 --- a/src/test/compile-fail/issue-11319.rs +++ b/src/test/compile-fail/issue-11319.rs @@ -10,11 +10,10 @@ fn main() { match Some(10) { - //~^ ERROR match arms have incompatible types: - //~| expected `bool` - //~| found `()` - //~| expected bool - //~| found () + //~^ ERROR match arms have incompatible types + //~| expected type `bool` + //~| found type `()` + //~| expected bool, found () Some(5) => false, Some(2) => true, None => (), //~ NOTE match arm with an incompatible type diff --git a/src/test/compile-fail/issue-11714.rs b/src/test/compile-fail/issue-11714.rs index 6dde59d4a2..998576097a 100644 --- a/src/test/compile-fail/issue-11714.rs +++ b/src/test/compile-fail/issue-11714.rs @@ -9,7 +9,6 @@ // except according to those terms. fn blah() -> i32 { //~ ERROR not all control paths return a value - //~^ HELP run `rustc --explain E0269` to see a detailed explanation 1 ; //~ HELP consider removing this semicolon: diff --git a/src/test/compile-fail/issue-12997-2.rs b/src/test/compile-fail/issue-12997-2.rs index 8b467c2ba1..436d9e91dc 100644 --- a/src/test/compile-fail/issue-12997-2.rs +++ b/src/test/compile-fail/issue-12997-2.rs @@ -15,7 +15,6 @@ #[bench] fn bar(x: isize) { } //~^ ERROR mismatched types -//~| expected `fn(&mut __test::test::Bencher)` -//~| found `fn(isize) {bar}` -//~| expected &-ptr -//~| found isize +//~| expected type `fn(&mut __test::test::Bencher)` +//~| found type `fn(isize) {bar}` +//~| expected &-ptr, found isize diff --git a/src/test/compile-fail/issue-13058.rs b/src/test/compile-fail/issue-13058.rs index b552d7678d..de578257e4 100644 --- a/src/test/compile-fail/issue-13058.rs +++ b/src/test/compile-fail/issue-13058.rs @@ -36,5 +36,4 @@ fn check<'r, I: Iterator, T: Itble<'r, usize, I>>(cont: &T) -> bool fn main() { check((3, 5)); //~^ ERROR mismatched types -//~| HELP run `rustc --explain E0308` to see a detailed explanation } diff --git a/src/test/compile-fail/issue-13359.rs b/src/test/compile-fail/issue-13359.rs index 775412a12c..e33859e8c1 100644 --- a/src/test/compile-fail/issue-13359.rs +++ b/src/test/compile-fail/issue-13359.rs @@ -15,11 +15,9 @@ fn bar(_s: u32) { } fn main() { foo(1*(1 as isize)); //~^ ERROR mismatched types - //~| expected `i16` - //~| found `isize` + //~| expected i16, found isize bar(1*(1 as usize)); //~^ ERROR mismatched types - //~| expected `u32` - //~| found `usize` + //~| expected u32, found usize } diff --git a/src/test/compile-fail/issue-13428.rs b/src/test/compile-fail/issue-13428.rs index 5b8ab08aef..c771970650 100644 --- a/src/test/compile-fail/issue-13428.rs +++ b/src/test/compile-fail/issue-13428.rs @@ -11,7 +11,6 @@ // Regression test for #13428 fn foo() -> String { //~ ERROR not all control paths return a value - //~^ HELP run `rustc --explain E0269` to see a detailed explanation format!("Hello {}", "world") // Put the trailing semicolon on its own line to test that the @@ -20,7 +19,6 @@ fn foo() -> String { //~ ERROR not all control paths return a value } fn bar() -> String { //~ ERROR not all control paths return a value - //~^ HELP run `rustc --explain E0269` to see a detailed explanation "foobar".to_string() ; //~ HELP consider removing this semicolon } diff --git a/src/test/compile-fail/issue-13466.rs b/src/test/compile-fail/issue-13466.rs index d2c8b679ff..17b9641160 100644 --- a/src/test/compile-fail/issue-13466.rs +++ b/src/test/compile-fail/issue-13466.rs @@ -17,16 +17,14 @@ pub fn main() { let _x: usize = match Some(1) { Ok(u) => u, //~^ ERROR mismatched types - //~| expected `std::option::Option<_>` - //~| found `std::result::Result<_, _>` - //~| expected enum `std::option::Option` - //~| found enum `std::result::Result` + //~| expected type `std::option::Option<_>` + //~| found type `std::result::Result<_, _>` + //~| expected enum `std::option::Option`, found enum `std::result::Result` Err(e) => panic!(e) //~^ ERROR mismatched types - //~| expected `std::option::Option<_>` - //~| found `std::result::Result<_, _>` - //~| expected enum `std::option::Option` - //~| found enum `std::result::Result` + //~| expected type `std::option::Option<_>` + //~| found type `std::result::Result<_, _>` + //~| expected enum `std::option::Option`, found enum `std::result::Result` }; } diff --git a/src/test/compile-fail/issue-13482-2.rs b/src/test/compile-fail/issue-13482-2.rs index e1fe2d0699..fe03373a45 100644 --- a/src/test/compile-fail/issue-13482-2.rs +++ b/src/test/compile-fail/issue-13482-2.rs @@ -17,10 +17,9 @@ fn main() { let y = match x { [] => None, //~^ ERROR mismatched types -//~| expected `[_#1i; 2]` -//~| found `[_#7t; 0]` -//~| expected an array with a fixed size of 2 elements -//~| found one with 0 elements +//~| expected type `[_#1i; 2]` +//~| found type `[_#7t; 0]` +//~| expected an array with a fixed size of 2 elements, found one with 0 elements [a,_] => Some(a) }; } diff --git a/src/test/compile-fail/issue-13482.rs b/src/test/compile-fail/issue-13482.rs index 2fbfd6cc84..7ed7f5898b 100644 --- a/src/test/compile-fail/issue-13482.rs +++ b/src/test/compile-fail/issue-13482.rs @@ -15,9 +15,9 @@ fn main() { let y = match x { [] => None, //~^ ERROR mismatched types - //~| expected `[_; 2]` - //~| found `[_; 0]` - //~| expected array with a fixed size of 2 elements + //~| expected type `[_; 2]` + //~| found type `[_; 0]` + //~| expected an array with a fixed size of 2 elements [a,_] => Some(a) }; } diff --git a/src/test/compile-fail/issue-13624.rs b/src/test/compile-fail/issue-13624.rs index 2a5805790a..e4ed87c3cb 100644 --- a/src/test/compile-fail/issue-13624.rs +++ b/src/test/compile-fail/issue-13624.rs @@ -16,10 +16,9 @@ mod a { pub fn get_enum_struct_variant() -> () { Enum::EnumStructVariant { x: 1, y: 2, z: 3 } //~^ ERROR mismatched types - //~| expected `()` - //~| found `a::Enum` - //~| expected () - //~| found enum `a::Enum` + //~| expected type `()` + //~| found type `a::Enum` + //~| expected (), found enum `a::Enum` } } @@ -32,10 +31,9 @@ mod b { match enum_struct_variant { a::Enum::EnumStructVariant { x, y, z } => { //~^ ERROR mismatched types - //~| expected `()` - //~| found `a::Enum` - //~| expected () - // found enum `a::Enum` + //~| expected type `()` + //~| found type `a::Enum` + //~| expected (), found enum `a::Enum` } } } diff --git a/src/test/compile-fail/issue-13853.rs b/src/test/compile-fail/issue-13853.rs index 7643310298..86a6bdfd4d 100644 --- a/src/test/compile-fail/issue-13853.rs +++ b/src/test/compile-fail/issue-13853.rs @@ -35,7 +35,7 @@ impl Node for Stuff { fn iterate>(graph: &G) { for node in graph.iter() { //~ ERROR no method named `iter` found - node.zomg(); //~ error: the type of this value must be known in this context + node.zomg(); } } diff --git a/src/test/compile-fail/issue-14091.rs b/src/test/compile-fail/issue-14091.rs index 3ceb465cb4..9c594ef485 100644 --- a/src/test/compile-fail/issue-14091.rs +++ b/src/test/compile-fail/issue-14091.rs @@ -9,9 +9,5 @@ // except according to those terms. // error-pattern:mismatched types -// error-pattern:expected `bool` -// error-pattern:found `_` -// error-pattern:expected bool -// error-pattern:found integral variable fn main(){assert!(1,1);} diff --git a/src/test/compile-fail/issue-14541.rs b/src/test/compile-fail/issue-14541.rs index deb8f00cd0..84c600d220 100644 --- a/src/test/compile-fail/issue-14541.rs +++ b/src/test/compile-fail/issue-14541.rs @@ -14,10 +14,9 @@ struct vec3 { y: f32, z: f32 } fn make(v: vec2) { let vec3 { y: _, z: _ } = v; //~^ ERROR mismatched types - //~| expected `vec2` - //~| found `vec3` - //~| expected struct `vec2` - //~| found struct `vec3` + //~| expected type `vec2` + //~| found type `vec3` + //~| expected struct `vec2`, found struct `vec3` } fn main() { } diff --git a/src/test/compile-fail/issue-15167.rs b/src/test/compile-fail/issue-15167.rs index 898e6be6fc..2bd7da91d2 100644 --- a/src/test/compile-fail/issue-15167.rs +++ b/src/test/compile-fail/issue-15167.rs @@ -11,22 +11,26 @@ // macro f should not be able to inject a reference to 'n'. macro_rules! f { () => (n) } +//~^ ERROR unresolved name `n` +//~| ERROR unresolved name `n` +//~| ERROR unresolved name `n` +//~| ERROR unresolved name `n` fn main() -> (){ for n in 0..1 { - println!("{}", f!()); //~ ERROR unresolved name `n` + println!("{}", f!()); } if let Some(n) = None { - println!("{}", f!()); //~ ERROR unresolved name `n` + println!("{}", f!()); } if false { } else if let Some(n) = None { - println!("{}", f!()); //~ ERROR unresolved name `n` + println!("{}", f!()); } while let Some(n) = None { - println!("{}", f!()); //~ ERROR unresolved name `n` + println!("{}", f!()); } } diff --git a/src/test/compile-fail/issue-15783.rs b/src/test/compile-fail/issue-15783.rs index 9a139021e4..37a2f1582b 100644 --- a/src/test/compile-fail/issue-15783.rs +++ b/src/test/compile-fail/issue-15783.rs @@ -17,9 +17,8 @@ fn main() { let x = Some(&[name]); let msg = foo(x); //~^ ERROR mismatched types -//~| expected `std::option::Option<&[&str]>` -//~| found `std::option::Option<&[&str; 1]>` -//~| expected slice -//~| found array of 1 elements +//~| expected type `std::option::Option<&[&str]>` +//~| found type `std::option::Option<&[&str; 1]>` +//~| expected slice, found array of 1 elements assert_eq!(msg, 3); } diff --git a/src/test/compile-fail/issue-15896.rs b/src/test/compile-fail/issue-15896.rs index 7381ade263..35ef9ba2b4 100644 --- a/src/test/compile-fail/issue-15896.rs +++ b/src/test/compile-fail/issue-15896.rs @@ -20,10 +20,9 @@ fn main() { E::B( Tau{t: x}, //~^ ERROR mismatched types - //~| expected `main::R` - //~| found `main::Tau` - //~| expected enum `main::R` - //~| found struct `main::Tau` + //~| expected type `main::R` + //~| found type `main::Tau` + //~| expected enum `main::R`, found struct `main::Tau` _) => x, }; } diff --git a/src/test/compile-fail/issue-16338.rs b/src/test/compile-fail/issue-16338.rs index 30775a958b..da6d081a7a 100644 --- a/src/test/compile-fail/issue-16338.rs +++ b/src/test/compile-fail/issue-16338.rs @@ -13,8 +13,7 @@ use std::raw::Slice; fn main() { let Slice { data: data, len: len } = "foo"; //~^ ERROR mismatched types - //~| expected `&str` - //~| found `std::raw::Slice<_>` - //~| expected &-ptr - //~| found struct `std::raw::Slice` + //~| expected type `&str` + //~| found type `std::raw::Slice<_>` + //~| expected &-ptr, found struct `std::raw::Slice` } diff --git a/src/test/compile-fail/issue-16401.rs b/src/test/compile-fail/issue-16401.rs index a90f9fe26e..df272a71ce 100644 --- a/src/test/compile-fail/issue-16401.rs +++ b/src/test/compile-fail/issue-16401.rs @@ -14,10 +14,9 @@ fn main() { match () { Slice { data: data, len: len } => (), //~^ ERROR mismatched types - //~| expected `()` - //~| found `std::raw::Slice<_>` - //~| expected () - //~| found struct `std::raw::Slice` + //~| expected type `()` + //~| found type `std::raw::Slice<_>` + //~| expected (), found struct `std::raw::Slice` _ => unreachable!() } } diff --git a/src/test/compile-fail/issue-16747.rs b/src/test/compile-fail/issue-16747.rs index 0fdb5f74e8..dd7e8a869e 100644 --- a/src/test/compile-fail/issue-16747.rs +++ b/src/test/compile-fail/issue-16747.rs @@ -19,7 +19,6 @@ struct List<'a, T: ListItem<'a>> { //~^ ERROR the parameter type `T` may not live long enough //~| HELP consider adding an explicit lifetime bound //~| NOTE ...so that the reference type `&'a [T]` does not outlive the data it points at -//~| HELP run `rustc --explain E0309` to see a detailed explanation } impl<'a, T: ListItem<'a>> Collection for List<'a, T> { fn len(&self) -> usize { diff --git a/src/test/compile-fail/issue-17033.rs b/src/test/compile-fail/issue-17033.rs index 6010e20692..f0fe01b415 100644 --- a/src/test/compile-fail/issue-17033.rs +++ b/src/test/compile-fail/issue-17033.rs @@ -12,10 +12,9 @@ fn f<'r>(p: &'r mut fn(p: &mut ())) { (*p)(()) //~ ERROR mismatched types - //~| expected `&mut ()` - //~| found `()` - //~| expected &-ptr - //~| found () + //~| expected type `&mut ()` + //~| found type `()` + //~| expected &-ptr, found () } fn main() {} diff --git a/src/test/compile-fail/issue-17263.rs b/src/test/compile-fail/issue-17263.rs index 2320bc02ba..063afe285f 100644 --- a/src/test/compile-fail/issue-17263.rs +++ b/src/test/compile-fail/issue-17263.rs @@ -15,13 +15,15 @@ struct Foo { a: isize, b: isize } fn main() { let mut x: Box<_> = box Foo { a: 1, b: 2 }; let (a, b) = (&mut x.a, &mut x.b); - //~^ ERROR cannot borrow `x` (here through borrowing `x.b`) as mutable more than once at a time - //~^^ NOTE previous borrow of `x` occurs here (through borrowing `x.a`) + //~^ ERROR cannot borrow `x` (via `x.b`) as mutable more than once at a time + //~| NOTE first mutable borrow occurs here (via `x.a`) + //~| NOTE second mutable borrow occurs here (via `x.b`) let mut foo: Box<_> = box Foo { a: 1, b: 2 }; let (c, d) = (&mut foo.a, &foo.b); - //~^ ERROR cannot borrow `foo` (here through borrowing `foo.b`) as immutable - //~^^ NOTE previous borrow of `foo` occurs here (through borrowing `foo.a`) + //~^ ERROR cannot borrow `foo` (via `foo.b`) as immutable + //~| NOTE mutable borrow occurs here (via `foo.a`) + //~| NOTE immutable borrow occurs here (via `foo.b`) } -//~^ NOTE previous borrow ends here -//~^^ NOTE previous borrow ends here +//~^ NOTE first borrow ends here +//~^^ NOTE mutable borrow ends here diff --git a/src/test/compile-fail/issue-17283.rs b/src/test/compile-fail/issue-17283.rs index c7d6443663..98208bcfdb 100644 --- a/src/test/compile-fail/issue-17283.rs +++ b/src/test/compile-fail/issue-17283.rs @@ -24,28 +24,25 @@ fn main() { // `x { ... }` should not be interpreted as a struct literal here if x = x { //~^ ERROR mismatched types - //~| expected `bool` - //~| found `()` - //~| expected bool - //~| found () + //~| expected type `bool` + //~| found type `()` + //~| expected bool, found () println!("{}", x); } // Explicit parentheses on the left should match behavior of above if (x = x) { //~^ ERROR mismatched types - //~| expected `bool` - //~| found `()` - //~| expected bool - //~| found () + //~| expected type `bool` + //~| found type `()` + //~| expected bool, found () println!("{}", x); } // The struct literal interpretation is fine with explicit parentheses on the right if y = (Foo { foo: x }) { //~^ ERROR mismatched types - //~| expected `bool` - //~| found `()` - //~| expected bool - //~| found () + //~| expected type `bool` + //~| found type `()` + //~| expected bool, found () println!("{}", x); } } diff --git a/src/test/compile-fail/issue-17431-2.rs b/src/test/compile-fail/issue-17431-2.rs index edbc8c8243..f39fb0e31c 100644 --- a/src/test/compile-fail/issue-17431-2.rs +++ b/src/test/compile-fail/issue-17431-2.rs @@ -9,6 +9,7 @@ // except according to those terms. struct Baz { q: Option } +//~^ ERROR recursive type `Baz` has infinite size struct Foo { q: Option } //~^ ERROR recursive type `Foo` has infinite size diff --git a/src/test/compile-fail/issue-17718-borrow-interior.rs b/src/test/compile-fail/issue-17718-borrow-interior.rs index d33c12668f..31352c57f1 100644 --- a/src/test/compile-fail/issue-17718-borrow-interior.rs +++ b/src/test/compile-fail/issue-17718-borrow-interior.rs @@ -17,7 +17,8 @@ static C: &'static usize = &(A.a); static D: [usize; 1] = [1]; static E: usize = D[0]; -//~^ ERROR: cannot refer to other statics by value +//~^ ERROR: cannot refer to the interior of another static +//~^^ ERROR: cannot refer to other statics by value static F: &'static usize = &D[0]; //~^ ERROR: cannot refer to the interior of another static diff --git a/src/test/compile-fail/issue-17718-const-bad-values.rs b/src/test/compile-fail/issue-17718-const-bad-values.rs index 6ee869d65a..af356588ed 100644 --- a/src/test/compile-fail/issue-17718-const-bad-values.rs +++ b/src/test/compile-fail/issue-17718-const-bad-values.rs @@ -10,10 +10,13 @@ const C1: &'static mut [usize] = &mut []; //~^ ERROR: references in constants may only refer to immutable values +//~| ERROR: references in constants may only refer to immutable values static mut S: usize = 3; -const C2: &'static mut usize = &mut S; -//~^ ERROR: constants cannot refer to other statics -//~^^ ERROR: references in constants may only refer to immutable values +const C2: &'static mut usize = unsafe { &mut S }; +//~^ ERROR: constants cannot refer to statics +//~| ERROR: references in constants may only refer to immutable values +//~| ERROR: references in constants may only refer to immutable values +//~| ERROR: references in constants may only refer to immutable values fn main() {} diff --git a/src/test/compile-fail/issue-17718-references.rs b/src/test/compile-fail/issue-17718-references.rs index 9d8b116f56..c159168030 100644 --- a/src/test/compile-fail/issue-17718-references.rs +++ b/src/test/compile-fail/issue-17718-references.rs @@ -14,19 +14,19 @@ const C: usize = 1; static S: usize = 1; const T1: &'static usize = &C; -const T2: &'static usize = &S; //~ ERROR: constants cannot refer to other statics +const T2: &'static usize = &S; //~ ERROR: constants cannot refer to statics static T3: &'static usize = &C; static T4: &'static usize = &S; const T5: usize = C; -const T6: usize = S; //~ ERROR: constants cannot refer to other statics -//~^ cannot refer to other statics +const T6: usize = S; //~ ERROR: constants cannot refer to statics +//~^ cannot refer to statics static T7: usize = C; static T8: usize = S; //~ ERROR: cannot refer to other statics by value const T9: Struct = Struct { a: C }; -const T10: Struct = Struct { a: S }; //~ ERROR: cannot refer to other statics by value -//~^ ERROR: constants cannot refer to other statics +const T10: Struct = Struct { a: S }; //~ ERROR: cannot refer to statics by value +//~^ ERROR: constants cannot refer to statics static T11: Struct = Struct { a: C }; static T12: Struct = Struct { a: S }; //~ ERROR: cannot refer to other statics by value diff --git a/src/test/compile-fail/issue-17728.rs b/src/test/compile-fail/issue-17728.rs index 787eb7a3b8..f508d7123d 100644 --- a/src/test/compile-fail/issue-17728.rs +++ b/src/test/compile-fail/issue-17728.rs @@ -108,6 +108,9 @@ impl Debug for Player { fn str_to_direction(to_parse: &str) -> RoomDirection { match to_parse { //~ ERROR match arms have incompatible types + //~^ expected enum `RoomDirection`, found enum `std::option::Option` + //~| expected type `RoomDirection` + //~| found type `std::option::Option<_>` "w" | "west" => RoomDirection::West, "e" | "east" => RoomDirection::East, "n" | "north" => RoomDirection::North, diff --git a/src/test/compile-fail/issue-17740.rs b/src/test/compile-fail/issue-17740.rs index 4381bf22e2..6b9294b203 100644 --- a/src/test/compile-fail/issue-17740.rs +++ b/src/test/compile-fail/issue-17740.rs @@ -15,12 +15,12 @@ struct Foo<'a> { impl <'a> Foo<'a>{ fn bar(self: &mut Foo) { //~^ mismatched types - //~| expected `&mut Foo<'a>` - //~| found `&mut Foo<'_>` + //~| expected type `&mut Foo<'a>` + //~| found type `&mut Foo<'_>` //~| lifetime mismatch //~| mismatched types - //~| expected `&mut Foo<'a>` - //~| found `&mut Foo<'_>` + //~| expected type `&mut Foo<'a>` + //~| found type `&mut Foo<'_>` //~| lifetime mismatch } } diff --git a/src/test/compile-fail/issue-18118-2.rs b/src/test/compile-fail/issue-18118-2.rs index 1fbf48f5b2..6efe532b5f 100644 --- a/src/test/compile-fail/issue-18118-2.rs +++ b/src/test/compile-fail/issue-18118-2.rs @@ -12,6 +12,6 @@ pub fn main() { const z: &'static isize = { static p: isize = 3; &p - //~^ ERROR constants cannot refer to other statics, insert an intermediate constant instead + //~^ ERROR constants cannot refer to statics, use a constant instead }; } diff --git a/src/test/compile-fail/issue-18118.rs b/src/test/compile-fail/issue-18118.rs index 9c8ed314d2..3afb34f037 100644 --- a/src/test/compile-fail/issue-18118.rs +++ b/src/test/compile-fail/issue-18118.rs @@ -10,6 +10,7 @@ pub fn main() { const z: &'static isize = { + //~^ ERROR blocks in constants are limited to items and tail expressions let p = 3; //~^ ERROR blocks in constants are limited to items and tail expressions &p diff --git a/src/test/compile-fail/issue-19109.rs b/src/test/compile-fail/issue-19109.rs index 1ffffa9fc7..580684e2e1 100644 --- a/src/test/compile-fail/issue-19109.rs +++ b/src/test/compile-fail/issue-19109.rs @@ -12,11 +12,10 @@ trait Trait { } fn function(t: &mut Trait) { t as *mut Trait - //~^ ERROR: mismatched types: - //~| expected `()`, - //~| found `*mut Trait` - //~| (expected (), - //~| found *-ptr) [E0308] + //~^ ERROR: mismatched types + //~| NOTE: expected type `()` + //~| NOTE: found type `*mut Trait` + //~| NOTE: expected (), found *-ptr } fn main() { } diff --git a/src/test/compile-fail/issue-19498.rs b/src/test/compile-fail/issue-19498.rs index 87b79b5cd6..2e2115b711 100644 --- a/src/test/compile-fail/issue-19498.rs +++ b/src/test/compile-fail/issue-19498.rs @@ -11,11 +11,13 @@ use self::A; //~ NOTE previous import of `A` here use self::B; //~ NOTE previous import of `B` here mod A {} //~ ERROR a module named `A` has already been imported in this module +//~| `A` was already imported pub mod B {} //~ ERROR a module named `B` has already been imported in this module - +//~| `B` was already imported mod C { use C::D; //~ NOTE previous import of `D` here mod D {} //~ ERROR a module named `D` has already been imported in this module + //~| `D` was already imported } fn main() {} diff --git a/src/test/compile-fail/issue-19692.rs b/src/test/compile-fail/issue-19692.rs index 53ad241687..ca1715445e 100644 --- a/src/test/compile-fail/issue-19692.rs +++ b/src/test/compile-fail/issue-19692.rs @@ -12,7 +12,7 @@ struct Homura; fn akemi(homura: Homura) { let Some(ref madoka) = Some(homura.kaname()); //~ ERROR no method named `kaname` found - madoka.clone(); //~ ERROR the type of this value must be known + madoka.clone(); } fn main() { } diff --git a/src/test/compile-fail/issue-19707.rs b/src/test/compile-fail/issue-19707.rs index 814c1a4131..9affb44b74 100644 --- a/src/test/compile-fail/issue-19707.rs +++ b/src/test/compile-fail/issue-19707.rs @@ -13,10 +13,8 @@ type foo = fn(&u8, &u8) -> &u8; //~ ERROR missing lifetime specifier //~^ HELP the signature does not say whether it is borrowed from argument 1 or argument 2 -//~^^ HELP run `rustc --explain E0106` to see a detailed explanation fn bar &u8>(f: &F) {} //~ ERROR missing lifetime specifier //~^ HELP the signature does not say whether it is borrowed from argument 1 or argument 2 -//~^^ HELP run `rustc --explain E0106` to see a detailed explanation fn main() {} diff --git a/src/test/compile-fail/issue-19991.rs b/src/test/compile-fail/issue-19991.rs index 6c9b0004f7..b368daaaf5 100644 --- a/src/test/compile-fail/issue-19991.rs +++ b/src/test/compile-fail/issue-19991.rs @@ -13,10 +13,9 @@ fn main() { if let Some(homura) = Some("madoka") { //~ ERROR missing an else clause - //~| expected `()` - //~| found `_` - //~| expected () - //~| found integral variable + //~| expected type `()` + //~| found type `_` + //~| expected (), found integral variable 765 }; } diff --git a/src/test/compile-fail/issue-20261.rs b/src/test/compile-fail/issue-20261.rs index 09044b5b50..2f1910b26b 100644 --- a/src/test/compile-fail/issue-20261.rs +++ b/src/test/compile-fail/issue-20261.rs @@ -11,6 +11,5 @@ fn main() { for (ref i,) in [].iter() { //~ ERROR mismatched types i.clone(); - //~^ ERROR: the type of this value must be known in this context } } diff --git a/src/test/compile-fail/issue-20692.rs b/src/test/compile-fail/issue-20692.rs index 62d775adac..1c9e588cb2 100644 --- a/src/test/compile-fail/issue-20692.rs +++ b/src/test/compile-fail/issue-20692.rs @@ -14,6 +14,7 @@ fn f(x: &T) { let _ = x //~^ ERROR `Array` cannot be made into an object //~| NOTE the trait cannot require that `Self : Sized` + //~| NOTE requirements on the impl of `std::ops::CoerceUnsized<&Array>` as &Array; //~^ ERROR `Array` cannot be made into an object diff --git a/src/test/compile-fail/issue-20862.rs b/src/test/compile-fail/issue-20862.rs index 7293114163..9df6358399 100644 --- a/src/test/compile-fail/issue-20862.rs +++ b/src/test/compile-fail/issue-20862.rs @@ -10,11 +10,7 @@ fn foo(x: i32) { |y| x + y -//~^ ERROR: mismatched types: -//~| expected `()`, -//~| found closure -//~| (expected (), -//~| found closure) [E0308] +//~^ ERROR: mismatched types } fn main() { diff --git a/src/test/compile-fail/issue-21146.rs b/src/test/compile-fail/issue-21146.rs index 4c6059c132..02f128e1f5 100644 --- a/src/test/compile-fail/issue-21146.rs +++ b/src/test/compile-fail/issue-21146.rs @@ -9,5 +9,5 @@ // except according to those terms. // error-pattern: expected item, found `parse_error` -include!("../auxiliary/issue-21146-inc.rs"); +include!("auxiliary/issue-21146-inc.rs"); fn main() {} diff --git a/src/test/compile-fail/issue-21221-1.rs b/src/test/compile-fail/issue-21221-1.rs index c53d5a0922..2bc9ec3289 100644 --- a/src/test/compile-fail/issue-21221-1.rs +++ b/src/test/compile-fail/issue-21221-1.rs @@ -55,7 +55,6 @@ impl Mul for Foo { //~| HELP `mul1::Mul` //~| HELP `mul2::Mul` //~| HELP `std::ops::Mul` -//~| HELP run `rustc --explain E0405` to see a detailed explanation //~| HELP you can import several candidates into scope (`use ...;`): } @@ -77,22 +76,19 @@ fn getMul() -> Mul { //~| HELP `mul3::Mul` //~| HELP `mul4::Mul` //~| HELP and 2 other candidates -//~| HELP run `rustc --explain E0412` to see a detailed explanation //~| HELP you can import several candidates into scope (`use ...;`): } // Let's also test what happens if the trait doesn't exist: impl ThisTraitReallyDoesntExistInAnyModuleReally for Foo { //~^ ERROR trait `ThisTraitReallyDoesntExistInAnyModuleReally` is not in scope -//~^^ HELP run `rustc --explain E0405` to see a detailed explanation -//~^^^ HELP no candidates by the name of `ThisTraitReallyDoesntExistInAnyModuleReally` found +//~| HELP no candidates by the name of `ThisTraitReallyDoesntExistInAnyModuleReally` found } // Let's also test what happens if there's just one alternative: impl Div for Foo { //~^ ERROR trait `Div` is not in scope //~| HELP `use std::ops::Div;` -//~| HELP run `rustc --explain E0405` to see a detailed explanation } fn main() { diff --git a/src/test/compile-fail/issue-21221-2.rs b/src/test/compile-fail/issue-21221-2.rs index cf5c6e8a3b..861acf62d0 100644 --- a/src/test/compile-fail/issue-21221-2.rs +++ b/src/test/compile-fail/issue-21221-2.rs @@ -28,4 +28,3 @@ struct Foo; impl T for Foo { } //~^ ERROR trait `T` is not in scope //~| HELP you can import it into scope: `use foo::bar::T;`. -//~| HELP run `rustc --explain E0405` to see a detailed explanation diff --git a/src/test/compile-fail/issue-21221-3.rs b/src/test/compile-fail/issue-21221-3.rs index a1a712d142..05786e69ce 100644 --- a/src/test/compile-fail/issue-21221-3.rs +++ b/src/test/compile-fail/issue-21221-3.rs @@ -25,7 +25,6 @@ struct Foo; impl OuterTrait for Foo {} //~^ ERROR trait `OuterTrait` is not in scope //~| HELP you can import it into scope: `use issue_21221_3::outer::OuterTrait;`. -//~| HELP run `rustc --explain E0405` to see a detailed explanation fn main() { println!("Hello, world!"); } diff --git a/src/test/compile-fail/issue-21221-4.rs b/src/test/compile-fail/issue-21221-4.rs index 1ef205bd8b..bcbee16cdc 100644 --- a/src/test/compile-fail/issue-21221-4.rs +++ b/src/test/compile-fail/issue-21221-4.rs @@ -20,7 +20,6 @@ struct Foo; impl T for Foo {} //~^ ERROR trait `T` is not in scope //~| HELP you can import it into scope: `use issue_21221_4::T;`. -//~| HELP run `rustc --explain E0405` to see a detailed explanation fn main() { println!("Hello, world!"); diff --git a/src/test/compile-fail/issue-21600.rs b/src/test/compile-fail/issue-21600.rs index d9dcebfda6..1d0473ec4b 100644 --- a/src/test/compile-fail/issue-21600.rs +++ b/src/test/compile-fail/issue-21600.rs @@ -23,8 +23,6 @@ fn main() { call_it(|| x.gen()); call_it(|| x.gen_mut()); //~ ERROR cannot borrow data mutably in a captured outer //~^ ERROR cannot borrow data mutably in a captured outer - //~^^ HELP run `rustc --explain E0387` to see a detailed explanation - //~^^^ HELP run `rustc --explain E0387` to see a detailed explanation - //~^^^^ HELP consider changing this closure to take self by mutable reference + //~| HELP consider changing this closure to take self by mutable reference }); } diff --git a/src/test/compile-fail/issue-21659-show-relevant-trait-impls-1.rs b/src/test/compile-fail/issue-21659-show-relevant-trait-impls-1.rs index e880a8b212..99035209e1 100644 --- a/src/test/compile-fail/issue-21659-show-relevant-trait-impls-1.rs +++ b/src/test/compile-fail/issue-21659-show-relevant-trait-impls-1.rs @@ -36,5 +36,4 @@ fn main() { //~| help: the following implementations were found: //~| help: > //~| help: > - //~| help: run `rustc --explain E0277` } diff --git a/src/test/compile-fail/issue-21659-show-relevant-trait-impls-2.rs b/src/test/compile-fail/issue-21659-show-relevant-trait-impls-2.rs index 2c5b18a811..2009c32c85 100644 --- a/src/test/compile-fail/issue-21659-show-relevant-trait-impls-2.rs +++ b/src/test/compile-fail/issue-21659-show-relevant-trait-impls-2.rs @@ -43,5 +43,4 @@ fn main() { //~| help: > //~| help: > //~| help: and 2 others - //~| help: run `rustc --explain E0277` } diff --git a/src/test/compile-fail/issue-23716.rs b/src/test/compile-fail/issue-23716.rs new file mode 100644 index 0000000000..b0d36610b7 --- /dev/null +++ b/src/test/compile-fail/issue-23716.rs @@ -0,0 +1,29 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +static foo: i32 = 0; +//~^ NOTE static variable defined here + +fn bar(foo: i32) {} +//~^ ERROR static variables cannot be referenced in a pattern, use a `const` instead +//~| static variable used in pattern + +mod submod { + pub static answer: i32 = 42; +} + +use self::submod::answer; +//~^ NOTE static variable imported here + +fn question(answer: i32) {} +//~^ ERROR static variables cannot be referenced in a pattern, use a `const` instead +//~| static variable used in pattern +fn main() { +} diff --git a/src/test/compile-fail/issue-2392.rs b/src/test/compile-fail/issue-2392.rs index 47d50eb9d5..790b774bd2 100644 --- a/src/test/compile-fail/issue-2392.rs +++ b/src/test/compile-fail/issue-2392.rs @@ -81,11 +81,11 @@ impl FuncContainerOuter { fn run(&self) { unsafe { (*self.container).f1(1); //~ ERROR no method named `f1` found - //~^ NOTE use `(*self.container.f1)(...)` + //~^ NOTE use `((*self.container).f1)(...)` (*self.container).f2(1); //~ ERROR no method named `f2` found - //~^ NOTE use `(*self.container.f2)(...)` + //~^ NOTE use `((*self.container).f2)(...)` (*self.container).f3(1); //~ ERROR no method named `f3` found - //~^ NOTE use `(*self.container.f3)(...)` + //~^ NOTE use `((*self.container).f3)(...)` } } } diff --git a/src/test/compile-fail/issue-24036.rs b/src/test/compile-fail/issue-24036.rs index 28eebea749..ac7e0f2e9a 100644 --- a/src/test/compile-fail/issue-24036.rs +++ b/src/test/compile-fail/issue-24036.rs @@ -14,7 +14,9 @@ fn closure_to_loc() { //~^ ERROR mismatched types //~| NOTE no two closures, even if identical, have the same type //~| HELP consider boxing your closure and/or using it as a trait object - //~| HELP run `rustc --explain E0308` to see a detailed explanation + //~| expected closure, found a different closure + //~| expected type `[closure + //~| found type `[closure } fn closure_from_match() { @@ -27,7 +29,9 @@ fn closure_from_match() { //~^^^^^^ ERROR match arms have incompatible types //~| NOTE no two closures, even if identical, have the same type //~| HELP consider boxing your closure and/or using it as a trait object - //~| HELP run `rustc --explain E0308` to see a detailed explanation + //~| expected closure, found a different closure + //~| expected type `[closure + //~| found type `[closure } fn main() { } diff --git a/src/test/compile-fail/issue-24081.rs b/src/test/compile-fail/issue-24081.rs index 94fb300828..188716c5e9 100644 --- a/src/test/compile-fail/issue-24081.rs +++ b/src/test/compile-fail/issue-24081.rs @@ -15,9 +15,14 @@ use std::ops::Div; //~ NOTE previous import use std::ops::Rem; //~ NOTE previous import type Add = bool; //~ ERROR a trait named `Add` has already been imported in this module +//~| was already imported struct Sub { x: f32 } //~ ERROR a trait named `Sub` has already been imported in this module +//~| was already imported enum Mul { A, B } //~ ERROR a trait named `Mul` has already been imported in this module +//~| was already imported mod Div { } //~ ERROR a trait named `Div` has already been imported in this module +//~| was already imported trait Rem { } //~ ERROR a trait named `Rem` has already been imported in this module +//~| was already imported fn main() {} diff --git a/src/test/compile-fail/issue-24357.rs b/src/test/compile-fail/issue-24357.rs index f193a07b85..5d6b989fc9 100644 --- a/src/test/compile-fail/issue-24357.rs +++ b/src/test/compile-fail/issue-24357.rs @@ -12,7 +12,9 @@ struct NoCopy; fn main() { let x = NoCopy; let f = move || { let y = x; }; - //~^ NOTE `x` moved into closure environment here because it has type `NoCopy` + //~^ value moved (into closure) here let z = x; //~^ ERROR use of moved value: `x` + //~| value used here after move + //~| move occurs because `x` has type `NoCopy` } diff --git a/src/test/compile-fail/issue-24446.rs b/src/test/compile-fail/issue-24446.rs index cafe6d1bb5..cbeac77479 100644 --- a/src/test/compile-fail/issue-24446.rs +++ b/src/test/compile-fail/issue-24446.rs @@ -10,11 +10,7 @@ fn main() { static foo: Fn() -> u32 = || -> u32 { - //~^ ERROR: mismatched types: - //~| expected `std::ops::Fn() -> u32 + 'static`, - //~| found closure - //~| (expected trait std::ops::Fn, - //~| found closure) + //~^ ERROR: mismatched types 0 }; } diff --git a/src/test/compile-fail/issue-25901.rs b/src/test/compile-fail/issue-25901.rs index 3254f0b2aa..72fb2a682e 100644 --- a/src/test/compile-fail/issue-25901.rs +++ b/src/test/compile-fail/issue-25901.rs @@ -11,7 +11,8 @@ struct A; struct B; -static S: &'static B = &A; //~ ERROR user-defined dereference operators +static S: &'static B = &A; +//~^ ERROR calls in statics are limited to constant functions use std::ops::Deref; diff --git a/src/test/compile-fail/issue-26472.rs b/src/test/compile-fail/issue-26472.rs new file mode 100644 index 0000000000..0d59a897ef --- /dev/null +++ b/src/test/compile-fail/issue-26472.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod sub { + pub struct S { len: usize } + impl S { + pub fn new() -> S { S { len: 0 } } + pub fn len(&self) -> usize { self.len } + } +} + +fn main() { + let s = sub::S::new(); + let v = s.len; + //~^ ERROR field `len` of struct `sub::S` is private + //~| NOTE a method `len` also exists, perhaps you wish to call it +} diff --git a/src/test/compile-fail/issue-26480.rs b/src/test/compile-fail/issue-26480.rs index 903df42291..634a4014e1 100644 --- a/src/test/compile-fail/issue-26480.rs +++ b/src/test/compile-fail/issue-26480.rs @@ -25,19 +25,23 @@ macro_rules! write { write(stdout, $arr.as_ptr() as *const i8, $arr.len() * size_of($arr[0])); //~^ ERROR mismatched types + //~| expected u64, found usize + //~| expected type + //~| found type } }} } macro_rules! cast { - ($x:expr) => ($x as ()) - //~^ ERROR non-scalar cast: `i32` as `()` + ($x:expr) => ($x as ()) //~ ERROR non-scalar cast } fn main() { let hello = ['H', 'e', 'y']; write!(hello); //~^ NOTE in this expansion of write! + //~| NOTE in this expansion of write! + //~| NOTE in this expansion of write! cast!(2); //~^ NOTE in this expansion of cast! diff --git a/src/test/compile-fail/issue-26548.rs b/src/test/compile-fail/issue-26548.rs index 28080ae09e..2919b0b3ca 100644 --- a/src/test/compile-fail/issue-26548.rs +++ b/src/test/compile-fail/issue-26548.rs @@ -8,10 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +// error-pattern: overflow representing the type `S` + trait Mirror { type It: ?Sized; } impl Mirror for T { type It = Self; } struct S(Option<::It>); -//~^ ERROR recursive type `S` has infinite size fn main() { let _s = S(None); diff --git a/src/test/compile-fail/issue-26638.rs b/src/test/compile-fail/issue-26638.rs index 9cbb64c231..f918f0aed7 100644 --- a/src/test/compile-fail/issue-26638.rs +++ b/src/test/compile-fail/issue-26638.rs @@ -11,18 +11,15 @@ fn parse_type(iter: Box+'static>) -> &str { iter.next() } //~^ ERROR missing lifetime specifier [E0106] //~^^ HELP 2 elided lifetimes -//~^^^ HELP run `rustc --explain E0106` to see a detailed explanation fn parse_type_2(iter: fn(&u8)->&u8) -> &str { iter() } //~^ ERROR missing lifetime specifier [E0106] //~^^ HELP lifetime cannot be derived -//~^^^ HELP run `rustc --explain E0106` to see a detailed explanation -//~^^^^ HELP consider giving it an explicit bounded or 'static lifetime +//~^^^ HELP consider giving it an explicit bounded or 'static lifetime fn parse_type_3() -> &str { unimplemented!() } //~^ ERROR missing lifetime specifier [E0106] //~^^ HELP no value for it to be borrowed from -//~^^^ HELP run `rustc --explain E0106` to see a detailed explanation -//~^^^^ HELP consider giving it a 'static lifetime +//~^^^ HELP consider giving it a 'static lifetime fn main() {} diff --git a/src/test/compile-fail/issue-27008.rs b/src/test/compile-fail/issue-27008.rs index 2a4b98563a..bdcbaf0917 100644 --- a/src/test/compile-fail/issue-27008.rs +++ b/src/test/compile-fail/issue-27008.rs @@ -13,9 +13,8 @@ struct S; fn main() { let b = [0; S]; //~^ ERROR mismatched types - //~| expected `usize` - //~| found `S` - //~| expected usize - //~| found struct `S` + //~| expected type `usize` + //~| found type `S` + //~| expected usize, found struct `S` //~| ERROR expected positive integer for repeat count, found struct } diff --git a/src/test/compile-fail/issue-27033.rs b/src/test/compile-fail/issue-27033.rs index 051edfe5f4..b0904dfeaa 100644 --- a/src/test/compile-fail/issue-27033.rs +++ b/src/test/compile-fail/issue-27033.rs @@ -10,11 +10,11 @@ fn main() { match Some(1) { - None @ _ => {} //~ ERROR declaration of `None` shadows an enum variant + None @ _ => {} //~ ERROR cannot be named the same }; const C: u8 = 1; match 1 { - C @ 2 => { //~ ERROR only irrefutable patterns allowed here + C @ 2 => { //~ ERROR cannot be named the same println!("{}", C); } _ => {} diff --git a/src/test/compile-fail/issue-27842.rs b/src/test/compile-fail/issue-27842.rs new file mode 100644 index 0000000000..28050a2ee9 --- /dev/null +++ b/src/test/compile-fail/issue-27842.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + let tup = (0, 1, 2); + // the case where we show a suggestion + let _ = tup[0]; + //~^ ERROR cannot index a value of type + //~| HELP to access tuple elements, use tuple indexing syntax as shown + //~| SUGGESTION let _ = tup.0 + + // the case where we show just a general hint + let i = 0_usize; + let _ = tup[i]; + //~^ ERROR cannot index a value of type + //~| HELP to access tuple elements, use tuple indexing syntax (e.g. `tuple.0`) +} diff --git a/src/test/compile-fail/issue-27895.rs b/src/test/compile-fail/issue-27895.rs index 959818b49c..3b3abc94a4 100644 --- a/src/test/compile-fail/issue-27895.rs +++ b/src/test/compile-fail/issue-27895.rs @@ -14,8 +14,7 @@ fn main() { match i { 0...index => println!("winner"), - //~^ ERROR paths in constants may only refer to constants or functions - //~| ERROR non-constant path in constant expression + //~^ ERROR non-constant path in constant expression _ => println!("hello"), } } diff --git a/src/test/compile-fail/issue-28113.rs b/src/test/compile-fail/issue-28113.rs index c5c4fb0701..5c697b69c8 100644 --- a/src/test/compile-fail/issue-28113.rs +++ b/src/test/compile-fail/issue-28113.rs @@ -9,7 +9,8 @@ // except according to those terms. const X: u8 = - || -> u8 { 5 }() //~ ERROR function calls in constants are limited + || -> u8 { 5 }() + //~^ ERROR calls in constants are limited to constant functions ; fn main() {} diff --git a/src/test/compile-fail/issue-29084.rs b/src/test/compile-fail/issue-29084.rs index 78913e759a..00d2969a0f 100644 --- a/src/test/compile-fail/issue-29084.rs +++ b/src/test/compile-fail/issue-29084.rs @@ -13,10 +13,13 @@ macro_rules! foo { fn bar(d: u8) { } bar(&mut $d); //~^ ERROR mismatched types + //~| expected u8, found &-ptr + //~| expected type `u8` + //~| found type `&mut u8` }} } fn main() { foo!(0u8); - //~^ NOTE in this expansion of foo! + //~^ in this expansion of foo! } diff --git a/src/test/compile-fail/issue-29124.rs b/src/test/compile-fail/issue-29124.rs index b3dc043f50..a72dac0d5d 100644 --- a/src/test/compile-fail/issue-29124.rs +++ b/src/test/compile-fail/issue-29124.rs @@ -25,11 +25,7 @@ fn main() { obj::func.x(); //~^ ERROR no method named `x` found for type `fn() -> ret {obj::func}` in the current scope //~^^ NOTE obj::func is a function, perhaps you wish to call it - //~^^^ HELP try calling the base function: - //~| SUGGESTION obj::func().x(); func.x(); //~^ ERROR no method named `x` found for type `fn() -> ret {func}` in the current scope //~^^ NOTE func is a function, perhaps you wish to call it - //~^^^ HELP try calling the base function: - //~| SUGGESTION func().x(); } diff --git a/src/test/compile-fail/issue-2951.rs b/src/test/compile-fail/issue-2951.rs index d0781b5658..11ff7ab247 100644 --- a/src/test/compile-fail/issue-2951.rs +++ b/src/test/compile-fail/issue-2951.rs @@ -12,10 +12,9 @@ fn foo(x: T, y: U) { let mut xx = x; xx = y; //~^ ERROR mismatched types - //~| expected `T` - //~| found `U` - //~| expected type parameter - //~| found a different type parameter + //~| expected type `T` + //~| found type `U` + //~| expected type parameter, found a different type parameter } fn main() { diff --git a/src/test/compile-fail/issue-30123.rs b/src/test/compile-fail/issue-30123.rs index cfd3cd3af3..ae1320c821 100644 --- a/src/test/compile-fail/issue-30123.rs +++ b/src/test/compile-fail/issue-30123.rs @@ -15,5 +15,5 @@ use issue_30123_aux::*; fn main() { let ug = Graph::::new_undirected(); - //~^ ERR no associated item named `new_undirected` found for type + //~^ ERROR no associated item named `new_undirected` found for type } diff --git a/src/test/compile-fail/issue-30255.rs b/src/test/compile-fail/issue-30255.rs new file mode 100644 index 0000000000..1daa6a61f7 --- /dev/null +++ b/src/test/compile-fail/issue-30255.rs @@ -0,0 +1,35 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +// +// Test that lifetime elision error messages correctly omit parameters +// with no elided lifetimes + +struct S<'a> { + field: &'a i32, +} + +fn f(a: &S, b: i32) -> &i32 { +//~^ ERROR missing lifetime specifier [E0106] +//~^^ HELP does not say which one of `a`'s 2 elided lifetimes it is borrowed from + panic!(); +} + +fn g(a: &S, b: bool, c: &i32) -> &i32 { +//~^ ERROR missing lifetime specifier [E0106] +//~^^ HELP does not say whether it is borrowed from one of `a`'s 2 elided lifetimes or `c` + panic!(); +} + +fn h(a: &bool, b: bool, c: &S, d: &i32) -> &i32 { +//~^ ERROR missing lifetime specifier [E0106] +//~^^ HELP does not say whether it is borrowed from `a`, one of `c`'s 2 elided lifetimes, or `d` + panic!(); +} + diff --git a/src/test/compile-fail/issue-30302.rs b/src/test/compile-fail/issue-30302.rs index 56f0b31da0..26508a4722 100644 --- a/src/test/compile-fail/issue-30302.rs +++ b/src/test/compile-fail/issue-30302.rs @@ -18,10 +18,8 @@ fn is_empty(s: Stack) -> bool { Nil => true, //~^ WARN pattern binding `Nil` is named the same as one of the variants of the type `Stack` //~| HELP consider making the path in the pattern qualified: `Stack::Nil` -//~| HELP run `rustc --explain E0170` to see a detailed explanation _ => false //~^ ERROR unreachable pattern -//~| HELP run `rustc --explain E0001` to see a detailed explanation } } diff --git a/src/test/compile-fail/issue-31221.rs b/src/test/compile-fail/issue-31221.rs new file mode 100644 index 0000000000..2b3df9ad1d --- /dev/null +++ b/src/test/compile-fail/issue-31221.rs @@ -0,0 +1,49 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +enum Enum { + Var1, + Var2, +} + +fn main() { + use Enum::*; + let s = Var1; + match s { + Var1 => (), + Var3 => (), + //~^ NOTE this pattern matches any value + Var2 => (), + //~^ ERROR unreachable pattern + }; + match &s { + &Var1 => (), + &Var3 => (), + //~^ NOTE this pattern matches any value + &Var2 => (), + //~^ ERROR unreachable pattern + }; + let t = (Var1, Var1); + match t { + (Var1, b) => (), + (c, d) => (), + //~^ NOTE this pattern matches any value + anything => () + //~^ ERROR unreachable pattern + }; + // `_` need not emit a note, it is pretty obvious already. + let t = (Var1, Var1); + match t { + (Var1, b) => (), + _ => (), + anything => () + //~^ ERROR unreachable pattern + }; +} diff --git a/src/test/compile-fail/issue-31424.rs b/src/test/compile-fail/issue-31424.rs new file mode 100644 index 0000000000..262efab22a --- /dev/null +++ b/src/test/compile-fail/issue-31424.rs @@ -0,0 +1,30 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// forbid-output: &mut mut self + +struct Struct; + +impl Struct { + fn foo(&mut self) { + (&mut self).bar(); + //~^ ERROR cannot borrow immutable argument `self` as mutable + // ... and no SUGGESTION that suggests `&mut mut self` + } + + // In this case we could keep the suggestion, but to distinguish the + // two cases is pretty hard. It's an obscure case anyway. + fn bar(self: &mut Self) { + (&mut self).bar(); + //~^ ERROR cannot borrow immutable argument `self` as mutable + } +} + +fn main () {} diff --git a/src/test/compile-fail/issue-31804.rs b/src/test/compile-fail/issue-31804.rs index b6a04bee85..cea52b11c5 100644 --- a/src/test/compile-fail/issue-31804.rs +++ b/src/test/compile-fail/issue-31804.rs @@ -13,4 +13,4 @@ fn main() { let -} //~ ERROR unexpected token: `}` +} //~ ERROR expected pattern, found `}` diff --git a/src/test/compile-fail/issue-32655.rs b/src/test/compile-fail/issue-32655.rs new file mode 100644 index 0000000000..edd7fe4a1e --- /dev/null +++ b/src/test/compile-fail/issue-32655.rs @@ -0,0 +1,33 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![allow(dead_code)] +#![feature(rustc_attrs)] + +macro_rules! foo ( + () => ( + #[derive_Clone] //~ WARN attributes of the form + struct T; + ); +); + +macro_rules! bar ( + ($e:item) => ($e) +); + +foo!(); + +bar!( + #[derive_Clone] //~ WARN attributes of the form + struct S; +); + +#[rustc_error] +fn main() {} //~ ERROR compilation successful diff --git a/src/test/compile-fail/issue-32709.rs b/src/test/compile-fail/issue-32709.rs new file mode 100644 index 0000000000..f9d11f3a17 --- /dev/null +++ b/src/test/compile-fail/issue-32709.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(question_mark)] + +// Make sure that the span of try shorthand does not include the trailing +// semicolon; +fn a() -> Result { + Err(5)?; //~ ERROR 16:5: 16:12 + Ok(1) +} + +fn main() {} diff --git a/src/test/compile-fail/issue-32782.rs b/src/test/compile-fail/issue-32782.rs new file mode 100644 index 0000000000..696ea0ef54 --- /dev/null +++ b/src/test/compile-fail/issue-32782.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +macro_rules! bar ( + () => () +); + +macro_rules! foo ( + () => ( + #[allow_internal_unstable] //~ ERROR allow_internal_unstable side-steps + bar!(); + ); +); + +foo!(); +fn main() {} diff --git a/src/test/compile-fail/issue-32833.rs b/src/test/compile-fail/issue-32833.rs new file mode 100644 index 0000000000..22261d98a1 --- /dev/null +++ b/src/test/compile-fail/issue-32833.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use bar::Foo; //~ ERROR There is no `Foo` in `bar` [E0432] +mod bar { + use Foo; //~ ERROR There is no `Foo` in the crate root [E0432] +} + +fn main() {} diff --git a/src/test/compile-fail/issue-32922.rs b/src/test/compile-fail/issue-32922.rs new file mode 100644 index 0000000000..491c087c10 --- /dev/null +++ b/src/test/compile-fail/issue-32922.rs @@ -0,0 +1,41 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rustc_attrs)] +#![allow(warnings)] + +macro_rules! foo { () => { + let x = 1; + macro_rules! bar { () => {x} } + let _ = bar!(); +}} + +macro_rules! bar { // test issue #31856 + ($n:ident) => ( + let a = 1; + let $n = a; + ) +} + +macro_rules! baz { + ($i:ident) => { + let mut $i = 2; + $i = $i + 1; + } +} + +#[rustc_error] +fn main() { //~ ERROR compilation successful + foo! {}; + bar! {}; + + let mut a = true; + baz!(a); +} diff --git a/src/test/compile-fail/issue-32963.rs b/src/test/compile-fail/issue-32963.rs new file mode 100644 index 0000000000..c4e8f76611 --- /dev/null +++ b/src/test/compile-fail/issue-32963.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::mem; + +trait Misc {} + +fn size_of_copy() -> usize { mem::size_of::() } + +fn main() { + size_of_copy::(); + //~^ ERROR `Misc + Copy: std::marker::Copy` is not satisfied +} diff --git a/src/test/compile-fail/issue-33464.rs b/src/test/compile-fail/issue-33464.rs new file mode 100644 index 0000000000..992bd33ea2 --- /dev/null +++ b/src/test/compile-fail/issue-33464.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Make sure that the spans of import errors are correct. + +use abc::one_el; +//~^ ERROR 13:5: 13:16 +use abc::{a, bbb, cccccc}; +//~^ ERROR 15:11: 15:12 +//~^^ ERROR 15:14: 15:17 +//~^^^ ERROR 15:19: 15:25 +use a_very_long_name::{el, el2}; +//~^ ERROR 19:24: 19:26 +//~^^ ERROR 19:28: 19:31 + +fn main() {} diff --git a/src/test/compile-fail/issue-3477.rs b/src/test/compile-fail/issue-3477.rs index 43ef1b59cc..0bad7372a1 100644 --- a/src/test/compile-fail/issue-3477.rs +++ b/src/test/compile-fail/issue-3477.rs @@ -11,6 +11,5 @@ fn main() { let _p: char = 100; //~^ ERROR mismatched types - //~| expected `char` - //~| found `u8` + //~| expected char, found u8 } diff --git a/src/test/compile-fail/issue-3563.rs b/src/test/compile-fail/issue-3563.rs index 29c1c584ee..7928c04b9d 100644 --- a/src/test/compile-fail/issue-3563.rs +++ b/src/test/compile-fail/issue-3563.rs @@ -13,10 +13,6 @@ trait A { || self.b() //~^ ERROR no method named `b` found for type `&Self` in the current scope //~| ERROR mismatched types - //~| expected `()` - //~| found closure - //~| expected () - //~| found closure } } fn main() {} diff --git a/src/test/compile-fail/issue-3680.rs b/src/test/compile-fail/issue-3680.rs index fc918c278e..e698e6da52 100644 --- a/src/test/compile-fail/issue-3680.rs +++ b/src/test/compile-fail/issue-3680.rs @@ -12,9 +12,8 @@ fn main() { match None { Err(_) => () //~^ ERROR mismatched types - //~| expected `std::option::Option<_>` - //~| found `std::result::Result<_, _>` - //~| expected enum `std::option::Option` - //~| found enum `std::result::Result` + //~| expected type `std::option::Option<_>` + //~| found type `std::result::Result<_, _>` + //~| expected enum `std::option::Option`, found enum `std::result::Result` } } diff --git a/src/test/compile-fail/issue-3907.rs b/src/test/compile-fail/issue-3907.rs index 1dbf211b26..c99ff1813e 100644 --- a/src/test/compile-fail/issue-3907.rs +++ b/src/test/compile-fail/issue-3907.rs @@ -11,14 +11,14 @@ // aux-build:issue_3907.rs extern crate issue_3907; -type Foo = issue_3907::Foo; //~ NOTE: type defined here +type Foo = issue_3907::Foo; //~ NOTE: type aliases cannot be used for traits struct S { name: isize } impl Foo for S { //~ ERROR: `Foo` is not a trait - //~^ NOTE: `type` aliases cannot be used for traits + //~| `Foo` is not a trait fn bar() { } } diff --git a/src/test/compile-fail/issue-3973.rs b/src/test/compile-fail/issue-3973.rs index 54eb2a9082..92456760b0 100644 --- a/src/test/compile-fail/issue-3973.rs +++ b/src/test/compile-fail/issue-3973.rs @@ -31,5 +31,5 @@ impl ToString_ for Point { fn main() { let p = Point::new(0.0, 0.0); //~^ ERROR no associated item named `new` found for type `Point` in the current scope - println!("{}", p.to_string()); //~ ERROR type of this value must be known + println!("{}", p.to_string()); } diff --git a/src/test/compile-fail/issue-4201.rs b/src/test/compile-fail/issue-4201.rs index b5af1f03b6..58423341cc 100644 --- a/src/test/compile-fail/issue-4201.rs +++ b/src/test/compile-fail/issue-4201.rs @@ -13,10 +13,9 @@ fn main() { 0 } else if false { //~^ ERROR if may be missing an else clause -//~| expected `()` -//~| found `_` -//~| expected () -//~| found integral variable +//~| expected type `()` +//~| found type `_` +//~| expected (), found integral variable 1 }; } diff --git a/src/test/compile-fail/issue-4517.rs b/src/test/compile-fail/issue-4517.rs index a1804b5a26..fbd8972cbf 100644 --- a/src/test/compile-fail/issue-4517.rs +++ b/src/test/compile-fail/issue-4517.rs @@ -14,8 +14,7 @@ fn main() { let foo: [u8; 4] = [1; 4]; bar(foo); //~^ ERROR mismatched types - //~| expected `usize` - //~| found `[u8; 4]` - //~| expected usize - //~| found array of 4 elements + //~| expected type `usize` + //~| found type `[u8; 4]` + //~| expected usize, found array of 4 elements } diff --git a/src/test/compile-fail/issue-4968.rs b/src/test/compile-fail/issue-4968.rs index e7cd20f38a..7c0905873d 100644 --- a/src/test/compile-fail/issue-4968.rs +++ b/src/test/compile-fail/issue-4968.rs @@ -14,8 +14,7 @@ const A: (isize,isize) = (4,2); fn main() { match 42 { A => () } //~^ ERROR mismatched types - //~| expected `_` - //~| found `(isize, isize)` - //~| expected integral variable - //~| found tuple + //~| expected type `_` + //~| found type `(isize, isize)` + //~| expected integral variable, found tuple } diff --git a/src/test/compile-fail/issue-5035.rs b/src/test/compile-fail/issue-5035.rs index dabeb50384..a186a399a1 100644 --- a/src/test/compile-fail/issue-5035.rs +++ b/src/test/compile-fail/issue-5035.rs @@ -9,7 +9,8 @@ // except according to those terms. trait I {} -type K = I; //~ NOTE: type defined here +type K = I; +//~^ NOTE: aliases cannot be used for traits impl K for isize {} //~ ERROR: `K` is not a trait -//~^ NOTE: `type` aliases cannot be used for traits +//~| is not a trait fn main() {} diff --git a/src/test/compile-fail/issue-5100.rs b/src/test/compile-fail/issue-5100.rs index 304b6f185f..9e78b7b947 100644 --- a/src/test/compile-fail/issue-5100.rs +++ b/src/test/compile-fail/issue-5100.rs @@ -16,48 +16,43 @@ enum A { B, C } fn main() { match (true, false) { A::B => (), -//~^ ERROR mismatched types: -//~| expected `(bool, bool)` -//~| found `A` -//~| expected tuple -//~| found enum `A` +//~^ ERROR mismatched types +//~| expected type `(bool, bool)` +//~| found type `A` +//~| expected tuple, found enum `A` _ => () } match (true, false) { (true, false, false) => () //~^ ERROR mismatched types -//~| expected `(bool, bool)` -//~| found `(_, _, _)` -//~| expected a tuple with 2 elements -//~| found one with 3 elements +//~| expected type `(bool, bool)` +//~| found type `(_, _, _)` +//~| expected a tuple with 2 elements, found one with 3 elements } match (true, false) { (true, false, false) => () //~^ ERROR mismatched types -//~| expected `(bool, bool)` -//~| found `(_, _, _)` -//~| expected a tuple with 2 elements -//~| found one with 3 elements +//~| expected type `(bool, bool)` +//~| found type `(_, _, _)` +//~| expected a tuple with 2 elements, found one with 3 elements } match (true, false) { box (true, false) => () //~^ ERROR mismatched types -//~| expected `(bool, bool)` -//~| found `Box<_>` -//~| expected tuple -//~| found box +//~| expected type `(bool, bool)` +//~| found type `Box<_>` +//~| expected tuple, found box } match (true, false) { &(true, false) => () //~^ ERROR mismatched types -//~| expected `(bool, bool)` -//~| found `&_` -//~| expected tuple -//~| found &-ptr +//~| expected type `(bool, bool)` +//~| found type `&_` +//~| expected tuple, found &-ptr } @@ -69,6 +64,5 @@ fn main() { // Make sure none of the errors above were fatal let x: char = true; //~ ERROR mismatched types - //~| expected `char` - //~| found `bool` + //~| expected char, found bool } diff --git a/src/test/compile-fail/issue-5358-1.rs b/src/test/compile-fail/issue-5358-1.rs index 32702d3e2f..d8aad54fd3 100644 --- a/src/test/compile-fail/issue-5358-1.rs +++ b/src/test/compile-fail/issue-5358-1.rs @@ -15,10 +15,9 @@ fn main() { match S(Either::Left(5)) { Either::Right(_) => {} //~^ ERROR mismatched types - //~| expected `S` - //~| found `Either<_, _>` - //~| expected struct `S` - //~| found enum `Either` + //~| expected type `S` + //~| found type `Either<_, _>` + //~| expected struct `S`, found enum `Either` _ => {} } } diff --git a/src/test/compile-fail/issue-5500.rs b/src/test/compile-fail/issue-5500.rs index 565634191b..cacbf7656d 100644 --- a/src/test/compile-fail/issue-5500.rs +++ b/src/test/compile-fail/issue-5500.rs @@ -11,8 +11,7 @@ fn main() { &panic!() //~^ ERROR mismatched types - //~| expected `()` - //~| found `&_` - //~| expected () - //~| found &-ptr + //~| expected type `()` + //~| found type `&_` + //~| expected (), found &-ptr } diff --git a/src/test/compile-fail/issue-6702.rs b/src/test/compile-fail/issue-6702.rs index 6cb825a9be..66ed817ffa 100644 --- a/src/test/compile-fail/issue-6702.rs +++ b/src/test/compile-fail/issue-6702.rs @@ -16,5 +16,4 @@ struct Monster { fn main() { let _m = Monster(); //~ ERROR `Monster` is the name of a struct or //~^ HELP did you mean to write: `Monster { /* fields */ }`? - //~| HELP run `rustc --explain E0423` to see a detailed explanation } diff --git a/src/test/compile-fail/issue-7061.rs b/src/test/compile-fail/issue-7061.rs index e261249bc9..1519d71dd3 100644 --- a/src/test/compile-fail/issue-7061.rs +++ b/src/test/compile-fail/issue-7061.rs @@ -13,10 +13,9 @@ struct BarStruct; impl<'a> BarStruct { fn foo(&'a mut self) -> Box { self } //~^ ERROR mismatched types - //~| expected `Box` - //~| found `&'a mut BarStruct` - //~| expected box - //~| found &-ptr + //~| expected type `Box` + //~| found type `&'a mut BarStruct` + //~| expected box, found &-ptr } fn main() {} diff --git a/src/test/compile-fail/issue-7092.rs b/src/test/compile-fail/issue-7092.rs index 4a278bbdeb..26e1597b1d 100644 --- a/src/test/compile-fail/issue-7092.rs +++ b/src/test/compile-fail/issue-7092.rs @@ -15,11 +15,10 @@ fn foo(x: Whatever) { match x { Some(field) => //~^ ERROR mismatched types -//~| expected `Whatever` -//~| found `std::option::Option<_>` -//~| expected enum `Whatever` -//~| found enum `std::option::Option` - field.access(), //~ ERROR the type of this value must be known in this context +//~| expected type `Whatever` +//~| found type `std::option::Option<_>` +//~| expected enum `Whatever`, found enum `std::option::Option` + field.access(), } } diff --git a/src/test/compile-fail/issue-7867.rs b/src/test/compile-fail/issue-7867.rs index 95513860b0..e0de860b0e 100644 --- a/src/test/compile-fail/issue-7867.rs +++ b/src/test/compile-fail/issue-7867.rs @@ -16,25 +16,22 @@ fn main() { match (true, false) { A::B => (), //~^ ERROR mismatched types - //~| expected `(bool, bool)` - //~| found `A` - //~| expected tuple - //~| found enum `A` + //~| expected type `(bool, bool)` + //~| found type `A` + //~| expected tuple, found enum `A` _ => () } match &Some(42) { Some(x) => (), //~^ ERROR mismatched types - //~| expected `&std::option::Option<_>` - //~| found `std::option::Option<_>` - //~| expected &-ptr - //~| found enum `std::option::Option` + //~| expected type `&std::option::Option<_>` + //~| found type `std::option::Option<_>` + //~| expected &-ptr, found enum `std::option::Option` None => () //~^ ERROR mismatched types - //~| expected `&std::option::Option<_>` - //~| found `std::option::Option<_>` - //~| expected &-ptr - //~| found enum `std::option::Option` + //~| expected type `&std::option::Option<_>` + //~| found type `std::option::Option<_>` + //~| expected &-ptr, found enum `std::option::Option` } } diff --git a/src/test/compile-fail/issue-9243.rs b/src/test/compile-fail/issue-9243.rs index 7424a45d04..58bdff6c04 100644 --- a/src/test/compile-fail/issue-9243.rs +++ b/src/test/compile-fail/issue-9243.rs @@ -10,11 +10,12 @@ // Regression test for issue 9243 -struct Test { +pub struct Test { mem: isize, } -pub static g_test: Test = Test {mem: 0}; //~ ERROR statics are not allowed to have destructors +pub static g_test: Test = Test {mem: 0}; +//~^ ERROR destructors in statics are an unstable feature impl Drop for Test { fn drop(&mut self) {} diff --git a/src/test/compile-fail/issue-9575.rs b/src/test/compile-fail/issue-9575.rs index 94dd787f08..9295eeb177 100644 --- a/src/test/compile-fail/issue-9575.rs +++ b/src/test/compile-fail/issue-9575.rs @@ -12,6 +12,6 @@ #[start] fn start(argc: isize, argv: *const *const u8, crate_map: *const u8) -> isize { - //~^ ERROR incorrect number of function parameters + //~^ start function has wrong type 0 } diff --git a/src/test/compile-fail/issue32829.rs b/src/test/compile-fail/issue32829.rs new file mode 100644 index 0000000000..e0b847fc99 --- /dev/null +++ b/src/test/compile-fail/issue32829.rs @@ -0,0 +1,88 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +#![feature(const_fn)] + +const bad : u32 = { + { + 5; //~ ERROR: blocks in constants are limited to items and tail expressions + 0 + } +}; + +const bad_two : u32 = { + { + invalid(); + //~^ ERROR: blocks in constants are limited to items and tail expressions + //~^^ ERROR: calls in constants are limited to constant functions, struct and enum + 0 + } +}; + +const bad_three : u32 = { + { + valid(); + //~^ ERROR: blocks in constants are limited to items and tail expressions + 0 + } +}; + +static bad_four : u32 = { + { + 5; //~ ERROR: blocks in statics are limited to items and tail expressions + 0 + } +}; + +static bad_five : u32 = { + { + invalid(); + //~^ ERROR: blocks in statics are limited to items and tail expressions + //~^^ ERROR: calls in statics are limited to constant functions, struct and enum + 0 + } +}; + +static bad_six : u32 = { + { + valid(); + //~^ ERROR: blocks in statics are limited to items and tail expressions + 0 + } +}; + +static mut bad_seven : u32 = { + { + 5; //~ ERROR: blocks in statics are limited to items and tail expressions + 0 + } +}; + +static mut bad_eight : u32 = { + { + invalid(); + //~^ ERROR: blocks in statics are limited to items and tail expressions + //~^^ ERROR: calls in statics are limited to constant functions, struct and enum + 0 + } +}; + +static mut bad_nine : u32 = { + { + valid(); + //~^ ERROR: blocks in statics are limited to items and tail expressions + 0 + } +}; + + +fn invalid() {} +const fn valid() {} + +fn main() {} diff --git a/src/test/compile-fail/keyword-false-as-identifier.rs b/src/test/compile-fail/keyword-false-as-identifier.rs index d875898f8b..e8af94f16b 100644 --- a/src/test/compile-fail/keyword-false-as-identifier.rs +++ b/src/test/compile-fail/keyword-false-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// This file was auto-generated using 'src/etc/generate-keyword-tests.py false' - fn main() { - let false = "foo"; //~ error: ident + let false = "foo"; //~ error: mismatched types } diff --git a/src/test/compile-fail/keyword-true-as-identifier.rs b/src/test/compile-fail/keyword-true-as-identifier.rs index 048b640c0b..90414fa912 100644 --- a/src/test/compile-fail/keyword-true-as-identifier.rs +++ b/src/test/compile-fail/keyword-true-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// This file was auto-generated using 'src/etc/generate-keyword-tests.py true' - fn main() { - let true = "foo"; //~ error: ident + let true = "foo"; //~ error: mismatched types } diff --git a/src/test/compile-fail/kindck-copy.rs b/src/test/compile-fail/kindck-copy.rs index 08b4e1a45f..747fe2d204 100644 --- a/src/test/compile-fail/kindck-copy.rs +++ b/src/test/compile-fail/kindck-copy.rs @@ -45,15 +45,15 @@ fn test<'a,T,U:Copy>(_: &'a isize) { // borrowed object types are generally ok assert_copy::<&'a Dummy>(); - assert_copy::<&'a (Dummy+Copy)>(); - assert_copy::<&'static (Dummy+Copy)>(); + assert_copy::<&'a (Dummy+Send)>(); + assert_copy::<&'static (Dummy+Send)>(); // owned object types are not ok assert_copy::>(); //~ ERROR : std::marker::Copy` is not satisfied - assert_copy::>(); //~ ERROR : std::marker::Copy` is not satisfied + assert_copy::>(); //~ ERROR : std::marker::Copy` is not satisfied // mutable object types are not ok - assert_copy::<&'a mut (Dummy+Copy)>(); //~ ERROR : std::marker::Copy` is not satisfied + assert_copy::<&'a mut (Dummy+Send)>(); //~ ERROR : std::marker::Copy` is not satisfied // unsafe ptrs are ok assert_copy::<*const isize>(); diff --git a/src/test/compile-fail/kindck-impl-type-params.rs b/src/test/compile-fail/kindck-impl-type-params.rs index 53ad4d1163..2a86cdef98 100644 --- a/src/test/compile-fail/kindck-impl-type-params.rs +++ b/src/test/compile-fail/kindck-impl-type-params.rs @@ -27,12 +27,14 @@ fn f(val: T) { let t: S = S(marker::PhantomData); let a = &t as &Gettable; //~^ ERROR : std::marker::Send` is not satisfied + //~^^ ERROR : std::marker::Copy` is not satisfied } fn g(val: T) { let t: S = S(marker::PhantomData); let a: &Gettable = &t; //~^ ERROR : std::marker::Send` is not satisfied + //~^^ ERROR : std::marker::Copy` is not satisfied } fn foo<'a>() { diff --git a/src/test/compile-fail/lifetime-elision-return-type-requires-explicit-lifetime.rs b/src/test/compile-fail/lifetime-elision-return-type-requires-explicit-lifetime.rs index be4166e43b..7355c70ff9 100644 --- a/src/test/compile-fail/lifetime-elision-return-type-requires-explicit-lifetime.rs +++ b/src/test/compile-fail/lifetime-elision-return-type-requires-explicit-lifetime.rs @@ -11,7 +11,6 @@ // Lifetime annotation needed because we have no arguments. fn f() -> &isize { //~ ERROR missing lifetime specifier //~^ HELP there is no value for it to be borrowed from -//~| HELP run `rustc --explain E0106` to see a detailed explanation //~| HELP consider giving it a 'static lifetime panic!() } @@ -19,7 +18,6 @@ fn f() -> &isize { //~ ERROR missing lifetime specifier // Lifetime annotation needed because we have two by-reference parameters. fn g(_x: &isize, _y: &isize) -> &isize { //~ ERROR missing lifetime specifier //~^ HELP the signature does not say whether it is borrowed from `_x` or `_y` -//~| HELP run `rustc --explain E0106` to see a detailed explanation panic!() } @@ -31,13 +29,11 @@ struct Foo<'a> { // and one on the reference. fn h(_x: &Foo) -> &isize { //~ ERROR missing lifetime specifier //~^ HELP the signature does not say which one of `_x`'s 2 elided lifetimes it is borrowed from -//~| HELP run `rustc --explain E0106` to see a detailed explanation panic!() } fn i(_x: isize) -> &isize { //~ ERROR missing lifetime specifier //~^ HELP this function's return type contains a borrowed value -//~| HELP run `rustc --explain E0106` to see a detailed explanation //~| HELP consider giving it an explicit bounded or 'static lifetime panic!() } diff --git a/src/test/compile-fail/lint-change-warnings.rs b/src/test/compile-fail/lint-change-warnings.rs index 441a841070..19e253e3b8 100644 --- a/src/test/compile-fail/lint-change-warnings.rs +++ b/src/test/compile-fail/lint-change-warnings.rs @@ -27,5 +27,5 @@ fn bar() { #[forbid(warnings)] fn baz() { - while true {} //~ ERROR: warnings + while true {} //~ ERROR: infinite } diff --git a/src/test/compile-fail/lint-exceeding-bitshifts.rs b/src/test/compile-fail/lint-exceeding-bitshifts.rs index e1ed21877c..6d5abc944e 100644 --- a/src/test/compile-fail/lint-exceeding-bitshifts.rs +++ b/src/test/compile-fail/lint-exceeding-bitshifts.rs @@ -53,6 +53,7 @@ fn main() { let n = n << 8; //~ ERROR: bitshift exceeds the type's number of bits let n = 1u8 << -8; //~ ERROR: bitshift exceeds the type's number of bits + //~^ WARN: attempted to shift by a negative amount let n = 1u8 << (4+3); let n = 1u8 << (4+4); //~ ERROR: bitshift exceeds the type's number of bits diff --git a/src/test/compile-fail/lint-malformed.rs b/src/test/compile-fail/lint-malformed.rs index 592e2b1190..ad5e3aa3f0 100644 --- a/src/test/compile-fail/lint-malformed.rs +++ b/src/test/compile-fail/lint-malformed.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![deny = "foo"] //~ ERR malformed lint attribute -#![allow(bar = "baz")] //~ ERR malformed lint attribute +#![deny = "foo"] //~ ERROR malformed lint attribute +#![allow(bar = "baz")] //~ ERROR malformed lint attribute fn main() { } diff --git a/src/test/compile-fail/lint-removed-allow.rs b/src/test/compile-fail/lint-removed-allow.rs index 159a3d7460..1498ed4d17 100644 --- a/src/test/compile-fail/lint-removed-allow.rs +++ b/src/test/compile-fail/lint-removed-allow.rs @@ -14,4 +14,4 @@ #[deny(raw_pointer_derive)] #[allow(renamed_and_removed_lints)] #[deny(unused_variables)] -fn main() { let unused = (); } //~ ERR unused +fn main() { let unused = (); } //~ ERROR unused diff --git a/src/test/compile-fail/lint-removed.rs b/src/test/compile-fail/lint-removed.rs index 9069356604..aa7f535aa6 100644 --- a/src/test/compile-fail/lint-removed.rs +++ b/src/test/compile-fail/lint-removed.rs @@ -15,4 +15,4 @@ #[deny(raw_pointer_derive)] //~ WARN raw_pointer_derive has been removed #[deny(unused_variables)] -fn main() { let unused = (); } //~ ERR unused +fn main() { let unused = (); } //~ ERROR unused diff --git a/src/test/compile-fail/lint-renamed-allow.rs b/src/test/compile-fail/lint-renamed-allow.rs index a2426d80f7..ea26c3656e 100644 --- a/src/test/compile-fail/lint-renamed-allow.rs +++ b/src/test/compile-fail/lint-renamed-allow.rs @@ -14,4 +14,4 @@ #[deny(unknown_features)] #[allow(renamed_and_removed_lints)] #[deny(unused)] -fn main() { let unused = (); } //~ ERR unused +fn main() { let unused = (); } //~ ERROR unused diff --git a/src/test/compile-fail/lint-renamed.rs b/src/test/compile-fail/lint-renamed.rs index 2e85a323a1..9e10ddf89a 100644 --- a/src/test/compile-fail/lint-renamed.rs +++ b/src/test/compile-fail/lint-renamed.rs @@ -10,4 +10,4 @@ #[deny(unknown_features)] //~ WARN lint unknown_features has been renamed to unused_features #[deny(unused)] -fn main() { let unused = (); } //~ ERR unused +fn main() { let unused = (); } //~ ERROR unused diff --git a/src/test/compile-fail/lint-type-overflow2.rs b/src/test/compile-fail/lint-type-overflow2.rs index 83300f18c3..e99dfb9aa0 100644 --- a/src/test/compile-fail/lint-type-overflow2.rs +++ b/src/test/compile-fail/lint-type-overflow2.rs @@ -10,10 +10,12 @@ // #![deny(overflowing_literals)] +#![deny(const_err)] #[allow(unused_variables)] fn main() { let x2: i8 = --128; //~ error: literal out of range for i8 + //~^ error: attempted to negate with overflow let x = -3.40282348e+38_f32; //~ error: literal out of range for f32 let x = 3.40282348e+38_f32; //~ error: literal out of range for f32 diff --git a/src/test/compile-fail/lint-unknown-lint.rs b/src/test/compile-fail/lint-unknown-lint.rs index 8f20a2c8ab..2de8d849d1 100644 --- a/src/test/compile-fail/lint-unknown-lint.rs +++ b/src/test/compile-fail/lint-unknown-lint.rs @@ -10,4 +10,4 @@ #![allow(not_a_real_lint)] //~ WARN unknown lint #![deny(unused)] -fn main() { let unused = (); } //~ ERR unused variable +fn main() { let unused = (); } //~ ERROR unused variable diff --git a/src/test/compile-fail/lint-unused-imports.rs b/src/test/compile-fail/lint-unused-imports.rs index 3c1f4b0430..40322f5a5b 100644 --- a/src/test/compile-fail/lint-unused-imports.rs +++ b/src/test/compile-fail/lint-unused-imports.rs @@ -24,6 +24,8 @@ use test::A; //~ ERROR unused import // Be sure that if we just bring some methods into scope that they're also // counted as being used. use test::B; +// But only when actually used: do not get confused by the method with the same name. +use test::B2; //~ ERROR unused import // Make sure this import is warned about when at least one of its imported names // is unused @@ -37,6 +39,7 @@ mod test2 { mod test { pub trait A { fn a(&self) {} } pub trait B { fn b(&self) {} } + pub trait B2 { fn b(&self) {} } pub struct C; impl A for C {} impl B for C {} diff --git a/src/test/compile-fail/liveness-return-last-stmt-semi.rs b/src/test/compile-fail/liveness-return-last-stmt-semi.rs index 343622c5c1..03733cc2eb 100644 --- a/src/test/compile-fail/liveness-return-last-stmt-semi.rs +++ b/src/test/compile-fail/liveness-return-last-stmt-semi.rs @@ -12,19 +12,15 @@ macro_rules! test { () => { fn foo() -> i32 { 1; } } } //~^ ERROR not all control paths return a value - //~^^ HELP consider removing this semicolon - //~^^^ HELP run `rustc --explain E0269` to see a + //~| HELP consider removing this semicolon fn no_return() -> i32 {} //~ ERROR not all control paths return a value - //~^ HELP run `rustc --explain E0269` to see a detailed explanation fn bar(x: u32) -> u32 { //~ ERROR not all control paths return a value - //~^ HELP run `rustc --explain E0269` to see a detailed explanation x * 2; //~ HELP consider removing this semicolon } fn baz(x: u64) -> u32 { //~ ERROR not all control paths return a value - //~^ HELP run `rustc --explain E0269` to see a detailed explanation x * 2; } diff --git a/src/test/compile-fail/macro-backtrace-invalid-internals.rs b/src/test/compile-fail/macro-backtrace-invalid-internals.rs index 5069ec7d28..ebec204184 100644 --- a/src/test/compile-fail/macro-backtrace-invalid-internals.rs +++ b/src/test/compile-fail/macro-backtrace-invalid-internals.rs @@ -36,13 +36,13 @@ macro_rules! fake_method_expr { macro_rules! fake_field_expr { () => { - 1.fake + 1.fake //~ ERROR no field with that name } } macro_rules! fake_anon_field_expr { () => { - (1).0 + (1).0 //~ ERROR type was not a tuple } } @@ -52,8 +52,6 @@ fn main() { fake_anon_field_stmt!(); //~ NOTE in this expansion of let _ = fake_method_expr!(); //~ NOTE in this expansion of - let _ = fake_field_expr!(); //~ ERROR no field with that name - //~^ NOTE in this expansion of - let _ = fake_anon_field_expr!(); //~ ERROR type was not a tuple - //~^ NOTE in this expansion of + let _ = fake_field_expr!(); //~ NOTE in this expansion of + let _ = fake_anon_field_expr!(); //~ NOTE in this expansion of } diff --git a/src/test/compile-fail/macro-backtrace-nested.rs b/src/test/compile-fail/macro-backtrace-nested.rs index c935ccef05..c2a270ea9f 100644 --- a/src/test/compile-fail/macro-backtrace-nested.rs +++ b/src/test/compile-fail/macro-backtrace-nested.rs @@ -12,20 +12,19 @@ // we replace the span of the expanded expression with that of the call site. macro_rules! nested_expr { - () => (fake) + () => (fake) //~ ERROR unresolved name + //~^ ERROR unresolved name } macro_rules! call_nested_expr { - () => (nested_expr!()) + () => (nested_expr!()) //~ NOTE in this expansion of nested_expr! } macro_rules! call_nested_expr_sum { - () => { 1 + nested_expr!(); } //~ ERROR unresolved name - //~^ NOTE in this expansion of nested_expr! + () => { 1 + nested_expr!(); } //~ NOTE in this expansion of nested_expr! } fn main() { - 1 + call_nested_expr!(); //~ ERROR unresolved name - //~^ NOTE in this expansion of call_nested_expr! + 1 + call_nested_expr!(); //~ NOTE in this expansion of call_nested_expr! call_nested_expr_sum!(); //~ NOTE in this expansion of } diff --git a/src/test/compile-fail/macro-backtrace-println.rs b/src/test/compile-fail/macro-backtrace-println.rs index a485b9056d..c2277c3e6d 100644 --- a/src/test/compile-fail/macro-backtrace-println.rs +++ b/src/test/compile-fail/macro-backtrace-println.rs @@ -21,11 +21,11 @@ macro_rules! myprint { } macro_rules! myprintln { - ($fmt:expr) => (myprint!(concat!($fmt, "\n"))); //~ ERROR invalid reference to argument `0` - //~^ NOTE in this expansion of myprint! - //~^^ NOTE in this expansion of concat! + ($fmt:expr) => (myprint!(concat!($fmt, "\n"))); //~ NOTE in this expansion of myprint! + //~^ NOTE in this expansion of concat! } fn main() { - myprintln!("{}"); //~ NOTE in this expansion of + myprintln!("{}"); //~ ERROR invalid reference to argument `0` + //~^ NOTE in this expansion of } diff --git a/src/test/compile-fail/macro-context.rs b/src/test/compile-fail/macro-context.rs index 8fa5e0a708..5d07f0747f 100644 --- a/src/test/compile-fail/macro-context.rs +++ b/src/test/compile-fail/macro-context.rs @@ -12,7 +12,7 @@ // (typeof used because it's surprisingly hard to find an unparsed token after a stmt) macro_rules! m { - () => ( i ; typeof ); //~ ERROR `typeof` is a reserved keyword + () => ( i ; typeof ); //~ ERROR expected expression, found reserved keyword `typeof` //~| ERROR macro expansion ignores token `typeof` //~| ERROR macro expansion ignores token `;` //~| ERROR macro expansion ignores token `;` diff --git a/src/test/compile-fail/macro-follow.rs b/src/test/compile-fail/macro-follow.rs index 35944bada4..f985340c52 100644 --- a/src/test/compile-fail/macro-follow.rs +++ b/src/test/compile-fail/macro-follow.rs @@ -55,7 +55,7 @@ macro_rules! follow_expr { ($e:expr $m:meta) => {}; //~ERROR `$e:expr` is followed by `$m:meta` } // FOLLOW(ty) = {OpenDelim(Brace), Comma, FatArrow, Colon, Eq, Gt, Semi, Or, -// Ident(as), Ident(where), OpenDelim(Bracket)} +// Ident(as), Ident(where), OpenDelim(Bracket), Nonterminal(Block)} macro_rules! follow_ty { ($t:ty ()) => {}; //~WARN `$t:ty` is followed by `(` ($t:ty []) => {}; // ok (RFC 1462) @@ -67,7 +67,7 @@ macro_rules! follow_ty { ($t:ty $t:ty) => {}; //~ERROR `$t:ty` is followed by `$t:ty` ($t:ty $s:stmt) => {}; //~ERROR `$t:ty` is followed by `$s:stmt` ($t:ty $p:path) => {}; //~ERROR `$t:ty` is followed by `$p:path` - ($t:ty $b:block) => {}; //~ERROR `$t:ty` is followed by `$b:block` + ($t:ty $b:block) => {}; // ok (RFC 1494) ($t:ty $i:ident) => {}; //~ERROR `$t:ty` is followed by `$i:ident` ($t:ty $t:tt) => {}; //~ERROR `$t:ty` is followed by `$t:tt` ($t:ty $i:item) => {}; //~ERROR `$t:ty` is followed by `$i:item` @@ -109,7 +109,7 @@ macro_rules! follow_path { ($p:path $t:ty) => {}; //~ERROR `$p:path` is followed by `$t:ty` ($p:path $s:stmt) => {}; //~ERROR `$p:path` is followed by `$s:stmt` ($p:path $p:path) => {}; //~ERROR `$p:path` is followed by `$p:path` - ($p:path $b:block) => {}; //~ERROR `$p:path` is followed by `$b:block` + ($p:path $b:block) => {}; // ok (RFC 1494) ($p:path $i:ident) => {}; //~ERROR `$p:path` is followed by `$i:ident` ($p:path $t:tt) => {}; //~ERROR `$p:path` is followed by `$t:tt` ($p:path $i:item) => {}; //~ERROR `$p:path` is followed by `$i:item` diff --git a/src/test/compile-fail/macro-incomplete-parse.rs b/src/test/compile-fail/macro-incomplete-parse.rs index 0d5f907964..8d515622e5 100644 --- a/src/test/compile-fail/macro-incomplete-parse.rs +++ b/src/test/compile-fail/macro-incomplete-parse.rs @@ -19,7 +19,7 @@ macro_rules! ignored_item { } macro_rules! ignored_expr { - () => ( 1, //~ ERROR unexpected token: `,` + () => ( 1, //~ ERROR expected expression, found `,` 2 ) } diff --git a/src/test/compile-fail/main-wrong-type-2.rs b/src/test/compile-fail/main-wrong-type-2.rs index 09d5765a80..7434a6c960 100644 --- a/src/test/compile-fail/main-wrong-type-2.rs +++ b/src/test/compile-fail/main-wrong-type-2.rs @@ -9,5 +9,5 @@ // except according to those terms. fn main() -> char { -//~^ ERROR: main function expects type +//~^ ERROR: main function has wrong type } diff --git a/src/test/compile-fail/main-wrong-type.rs b/src/test/compile-fail/main-wrong-type.rs index d9c617a717..431b855d51 100644 --- a/src/test/compile-fail/main-wrong-type.rs +++ b/src/test/compile-fail/main-wrong-type.rs @@ -14,5 +14,5 @@ struct S { } fn main(foo: S) { -//~^ ERROR: main function expects type +//~^ ERROR: main function has wrong type } diff --git a/src/test/compile-fail/match-range-fail.rs b/src/test/compile-fail/match-range-fail.rs index 05b870b8f4..526aa83dec 100644 --- a/src/test/compile-fail/match-range-fail.rs +++ b/src/test/compile-fail/match-range-fail.rs @@ -28,6 +28,5 @@ fn main() { _ => { } }; //~^^^ ERROR mismatched types in range - //~| expected char - //~| found integral variable + //~| expected char, found integral variable } diff --git a/src/test/compile-fail/match-struct.rs b/src/test/compile-fail/match-struct.rs index 5bda378968..0dbdda1f9b 100644 --- a/src/test/compile-fail/match-struct.rs +++ b/src/test/compile-fail/match-struct.rs @@ -16,10 +16,9 @@ fn main() { match (S { a: 1 }) { E::C(_) => (), //~^ ERROR mismatched types - //~| expected `S` - //~| found `E` - //~| expected struct `S` - //~| found enum `E` + //~| expected type `S` + //~| found type `E` + //~| expected struct `S`, found enum `E` _ => () } } diff --git a/src/test/compile-fail/match-vec-mismatch-2.rs b/src/test/compile-fail/match-vec-mismatch-2.rs index 0bbba88612..2831499c73 100644 --- a/src/test/compile-fail/match-vec-mismatch-2.rs +++ b/src/test/compile-fail/match-vec-mismatch-2.rs @@ -14,9 +14,8 @@ fn main() { match () { [()] => { } //~^ ERROR mismatched types - //~| expected `()` - //~| found `&[_]` - //~| expected () - //~| found &-ptr + //~| expected type `()` + //~| found type `&[_]` + //~| expected (), found &-ptr } } diff --git a/src/test/compile-fail/method-self-arg-1.rs b/src/test/compile-fail/method-self-arg-1.rs index 57a96bb9a2..ffa5287d4b 100644 --- a/src/test/compile-fail/method-self-arg-1.rs +++ b/src/test/compile-fail/method-self-arg-1.rs @@ -19,13 +19,11 @@ impl Foo { fn main() { let x = Foo; Foo::bar(x); //~ ERROR mismatched types - //~| expected `&Foo` - //~| found `Foo` - //~| expected &-ptr - //~| found struct `Foo` + //~| expected type `&Foo` + //~| found type `Foo` + //~| expected &-ptr, found struct `Foo` Foo::bar(&42); //~ ERROR mismatched types - //~| expected `&Foo` - //~| found `&_` - //~| expected struct `Foo` - //~| found integral variable + //~| expected type `&Foo` + //~| found type `&_` + //~| expected struct `Foo`, found integral variable } diff --git a/src/test/compile-fail/moves-based-on-type-block-bad.rs b/src/test/compile-fail/moves-based-on-type-block-bad.rs index a1e876594c..deaff3c352 100644 --- a/src/test/compile-fail/moves-based-on-type-block-bad.rs +++ b/src/test/compile-fail/moves-based-on-type-block-bad.rs @@ -32,8 +32,10 @@ fn main() { loop { f(&s, |hellothere| { match hellothere.x { //~ ERROR cannot move out + //~| cannot move out of borrowed content box E::Foo(_) => {} - box E::Bar(x) => println!("{}", x.to_string()), //~ NOTE attempting to move value to here + box E::Bar(x) => println!("{}", x.to_string()), + //~^ NOTE to prevent move box E::Baz => {} } }) diff --git a/src/test/compile-fail/moves-based-on-type-distribute-copy-over-paren.rs b/src/test/compile-fail/moves-based-on-type-distribute-copy-over-paren.rs index f30360af46..02c09aa7d6 100644 --- a/src/test/compile-fail/moves-based-on-type-distribute-copy-over-paren.rs +++ b/src/test/compile-fail/moves-based-on-type-distribute-copy-over-paren.rs @@ -16,13 +16,17 @@ fn touch(_a: &A) {} fn f00() { let x = "hi".to_string(); - let _y = Foo { f:x }; //~ NOTE `x` moved here + let _y = Foo { f:x }; + //~^ value moved here touch(&x); //~ ERROR use of moved value: `x` + //~^ value used here after move + //~| move occurs because `x` has type `std::string::String` } fn f05() { let x = "hi".to_string(); - let _y = Foo { f:(((x))) }; //~ NOTE `x` moved here + let _y = Foo { f:(((x))) }; + //~^ value moved here touch(&x); //~ ERROR use of moved value: `x` } diff --git a/src/test/compile-fail/moves-based-on-type-match-bindings.rs b/src/test/compile-fail/moves-based-on-type-match-bindings.rs index 7d209467ca..bcbb8dbfad 100644 --- a/src/test/compile-fail/moves-based-on-type-match-bindings.rs +++ b/src/test/compile-fail/moves-based-on-type-match-bindings.rs @@ -24,6 +24,8 @@ fn f10() { }; touch(&x); //~ ERROR use of partially moved value: `x` + //~^ value used here after move + //~| move occurs because `x.f` has type `std::string::String` } fn main() {} diff --git a/src/test/compile-fail/mut-pattern-mismatched.rs b/src/test/compile-fail/mut-pattern-mismatched.rs index 9eb24c8196..63e7dbd30d 100644 --- a/src/test/compile-fail/mut-pattern-mismatched.rs +++ b/src/test/compile-fail/mut-pattern-mismatched.rs @@ -14,8 +14,8 @@ fn main() { // (separate lines to ensure the spans are accurate) let &_ //~ ERROR mismatched types - //~| expected `&mut _` - //~| found `&_` + //~| expected type `&mut _` + //~| found type `&_` //~| values differ in mutability = foo; let &mut _ = foo; @@ -23,8 +23,8 @@ fn main() { let bar = &1; let &_ = bar; let &mut _ //~ ERROR mismatched types - //~| expected `&_` - //~| found `&mut _` + //~| expected type `&_` + //~| found type `&mut _` //~| values differ in mutability = bar; } diff --git a/src/test/compile-fail/mut-suggestion.rs b/src/test/compile-fail/mut-suggestion.rs index efd796e119..242ad7aee8 100644 --- a/src/test/compile-fail/mut-suggestion.rs +++ b/src/test/compile-fail/mut-suggestion.rs @@ -17,14 +17,16 @@ impl S { } fn func(arg: S) { - //~^ HELP use `mut` as shown - //~| SUGGESTION fn func(mut arg: S) { - arg.mutate(); //~ ERROR cannot borrow immutable argument + //~^ here to make mutable + arg.mutate(); + //~^ ERROR cannot borrow immutable argument + //~| cannot borrow mutably } fn main() { let local = S; - //~^ HELP use `mut` as shown - //~| SUGGESTION let mut local = S; - local.mutate(); //~ ERROR cannot borrow immutable local variable + //~^ here to make mutable + local.mutate(); + //~^ ERROR cannot borrow immutable local variable + //~| cannot borrow mutably } diff --git a/src/test/compile-fail/name-clash-nullary.rs b/src/test/compile-fail/name-clash-nullary.rs index 1250318a72..662bb7bfe5 100644 --- a/src/test/compile-fail/name-clash-nullary.rs +++ b/src/test/compile-fail/name-clash-nullary.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// error-pattern:declaration of `None` shadows +// error-pattern:cannot be named the same use std::option::*; fn main() { diff --git a/src/test/compile-fail/noexporttypeexe.rs b/src/test/compile-fail/noexporttypeexe.rs index 687e1e49ee..c950ef5b68 100644 --- a/src/test/compile-fail/noexporttypeexe.rs +++ b/src/test/compile-fail/noexporttypeexe.rs @@ -19,8 +19,7 @@ fn main() { // not convertible to a path. let x: isize = noexporttypelib::foo(); //~^ ERROR mismatched types - //~| expected `isize` - //~| found `std::option::Option` - //~| expected isize - //~| found enum `std::option::Option` + //~| expected type `isize` + //~| found type `std::option::Option` + //~| expected isize, found enum `std::option::Option` } diff --git a/src/test/compile-fail/non-constant-in-const-path.rs b/src/test/compile-fail/non-constant-in-const-path.rs index 124a2ffc18..ee88168515 100644 --- a/src/test/compile-fail/non-constant-in-const-path.rs +++ b/src/test/compile-fail/non-constant-in-const-path.rs @@ -12,7 +12,6 @@ fn main() { let x = 0; match 1 { 0 ... x => {} - //~^ ERROR non-constant path in constant expr - //~| ERROR paths in constants may only refer to constants or functions + //~^ ERROR non-constant path in constant expression }; } diff --git a/src/test/compile-fail/non-interger-atomic.rs b/src/test/compile-fail/non-interger-atomic.rs index d2376eecd9..0b7b33de42 100644 --- a/src/test/compile-fail/non-interger-atomic.rs +++ b/src/test/compile-fail/non-interger-atomic.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(core_intrinsics, rustc_attrs)] +#![feature(core_intrinsics)] #![allow(warnings)] use std::intrinsics; @@ -18,97 +18,81 @@ struct Foo(i64); type Bar = &'static Fn(); type Quux = [u8; 100]; -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_bool_load(p: &mut bool, v: bool) { intrinsics::atomic_load(p); //~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `bool` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_bool_store(p: &mut bool, v: bool) { intrinsics::atomic_store(p, v); //~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `bool` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_bool_xchg(p: &mut bool, v: bool) { intrinsics::atomic_xchg(p, v); //~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `bool` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_bool_cxchg(p: &mut bool, v: bool) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `bool` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Foo_load(p: &mut Foo, v: Foo) { intrinsics::atomic_load(p); //~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `Foo` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Foo_store(p: &mut Foo, v: Foo) { intrinsics::atomic_store(p, v); //~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `Foo` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Foo_xchg(p: &mut Foo, v: Foo) { intrinsics::atomic_xchg(p, v); //~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `Foo` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Foo_cxchg(p: &mut Foo, v: Foo) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `Foo` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Bar_load(p: &mut Bar, v: Bar) { intrinsics::atomic_load(p); //~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Bar_store(p: &mut Bar, v: Bar) { intrinsics::atomic_store(p, v); //~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Bar_xchg(p: &mut Bar, v: Bar) { intrinsics::atomic_xchg(p, v); //~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Bar_cxchg(p: &mut Bar, v: Bar) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR expected basic integer type, found `&'static std::ops::Fn() + 'static` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Quux_load(p: &mut Quux, v: Quux) { intrinsics::atomic_load(p); //~^ ERROR `atomic_load` intrinsic: expected basic integer type, found `[u8; 100]` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Quux_store(p: &mut Quux, v: Quux) { intrinsics::atomic_store(p, v); //~^ ERROR `atomic_store` intrinsic: expected basic integer type, found `[u8; 100]` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Quux_xchg(p: &mut Quux, v: Quux) { intrinsics::atomic_xchg(p, v); //~^ ERROR `atomic_xchg` intrinsic: expected basic integer type, found `[u8; 100]` } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. unsafe fn test_Quux_cxchg(p: &mut Quux, v: Quux) { intrinsics::atomic_cxchg(p, v, v); //~^ ERROR `atomic_cxchg` intrinsic: expected basic integer type, found `[u8; 100]` diff --git a/src/test/compile-fail/not-panic-safe-2.rs b/src/test/compile-fail/not-panic-safe-2.rs index 922d70b801..58c0791b84 100644 --- a/src/test/compile-fail/not-panic-safe-2.rs +++ b/src/test/compile-fail/not-panic-safe-2.rs @@ -18,6 +18,7 @@ use std::cell::RefCell; fn assert() {} fn main() { - assert::>>(); //~ ERROR E0277 + assert::>>(); + //~^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied + //~^^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied } - diff --git a/src/test/compile-fail/not-panic-safe-3.rs b/src/test/compile-fail/not-panic-safe-3.rs index e5de03a084..481ffb8028 100644 --- a/src/test/compile-fail/not-panic-safe-3.rs +++ b/src/test/compile-fail/not-panic-safe-3.rs @@ -18,5 +18,7 @@ use std::cell::RefCell; fn assert() {} fn main() { - assert::>>(); //~ ERROR E0277 + assert::>>(); + //~^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied + //~^^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied } diff --git a/src/test/compile-fail/not-panic-safe-4.rs b/src/test/compile-fail/not-panic-safe-4.rs index c50e4b9d87..47302d3af7 100644 --- a/src/test/compile-fail/not-panic-safe-4.rs +++ b/src/test/compile-fail/not-panic-safe-4.rs @@ -17,5 +17,7 @@ use std::cell::RefCell; fn assert() {} fn main() { - assert::<&RefCell>(); //~ ERROR E0277 + assert::<&RefCell>(); + //~^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied + //~^^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied } diff --git a/src/test/compile-fail/not-panic-safe-6.rs b/src/test/compile-fail/not-panic-safe-6.rs index 0fc912dc95..fe13b0a75c 100644 --- a/src/test/compile-fail/not-panic-safe-6.rs +++ b/src/test/compile-fail/not-panic-safe-6.rs @@ -17,6 +17,7 @@ use std::cell::RefCell; fn assert() {} fn main() { - assert::<*mut RefCell>(); //~ ERROR E0277 + assert::<*mut RefCell>(); + //~^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied + //~^^ ERROR `std::cell::UnsafeCell: std::panic::RefUnwindSafe` is not satisfied } - diff --git a/src/test/compile-fail/occurs-check-2.rs b/src/test/compile-fail/occurs-check-2.rs index fd2903a85d..5cb60079fa 100644 --- a/src/test/compile-fail/occurs-check-2.rs +++ b/src/test/compile-fail/occurs-check-2.rs @@ -16,7 +16,7 @@ fn main() { g = f; f = box g; //~^ ERROR mismatched types - //~| expected `_` - //~| found `Box<_>` + //~| expected type `_` + //~| found type `Box<_>` //~| cyclic type of infinite size } diff --git a/src/test/compile-fail/occurs-check.rs b/src/test/compile-fail/occurs-check.rs index 036fcc1b9d..499124cb05 100644 --- a/src/test/compile-fail/occurs-check.rs +++ b/src/test/compile-fail/occurs-check.rs @@ -14,7 +14,7 @@ fn main() { let f; f = box f; //~^ ERROR mismatched types - //~| expected `_` - //~| found `Box<_>` + //~| expected type `_` + //~| found type `Box<_>` //~| cyclic type of infinite size } diff --git a/src/test/compile-fail/on-unimplemented-bad-anno.rs b/src/test/compile-fail/on-unimplemented/bad-annotation.rs similarity index 100% rename from src/test/compile-fail/on-unimplemented-bad-anno.rs rename to src/test/compile-fail/on-unimplemented/bad-annotation.rs diff --git a/src/test/compile-fail/on-unimplemented/multiple-impls.rs b/src/test/compile-fail/on-unimplemented/multiple-impls.rs new file mode 100644 index 0000000000..0df8c41ffe --- /dev/null +++ b/src/test/compile-fail/on-unimplemented/multiple-impls.rs @@ -0,0 +1,55 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test if the on_unimplemented message override works + +#![feature(on_unimplemented)] +#![feature(rustc_attrs)] + +struct Foo(T); +struct Bar(T); + +#[rustc_on_unimplemented = "trait message"] +trait Index { + type Output: ?Sized; + fn index(&self, index: Idx) -> &Self::Output; +} + +#[rustc_on_unimplemented = "on impl for Foo"] +impl Index> for [i32] { + type Output = i32; + fn index(&self, _index: Foo) -> &i32 { + loop {} + } +} + +#[rustc_on_unimplemented = "on impl for Bar"] +impl Index> for [i32] { + type Output = i32; + fn index(&self, _index: Bar) -> &i32 { + loop {} + } +} + +#[rustc_error] +fn main() { + Index::index(&[] as &[i32], 2u32); + //~^ ERROR E0277 + //~| NOTE trait message + //~| NOTE required by + Index::index(&[] as &[i32], Foo(2u32)); + //~^ ERROR E0277 + //~| NOTE on impl for Foo + //~| NOTE required by + Index::index(&[] as &[i32], Bar(2u32)); + //~^ ERROR E0277 + //~| NOTE on impl for Bar + //~| NOTE required by +} diff --git a/src/test/compile-fail/on-unimplemented/on-impl.rs b/src/test/compile-fail/on-unimplemented/on-impl.rs new file mode 100644 index 0000000000..4471b625d7 --- /dev/null +++ b/src/test/compile-fail/on-unimplemented/on-impl.rs @@ -0,0 +1,35 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test if the on_unimplemented message override works + +#![feature(on_unimplemented)] +#![feature(rustc_attrs)] + +#[rustc_on_unimplemented = "invalid"] +trait Index { + type Output: ?Sized; + fn index(&self, index: Idx) -> &Self::Output; +} + +#[rustc_on_unimplemented = "a usize is required to index into a slice"] +impl Index for [i32] { + type Output = i32; + fn index(&self, index: usize) -> &i32 { + &self[index] + } +} + +#[rustc_error] +fn main() { + Index::::index(&[1, 2, 3] as &[i32], 2u32); //~ ERROR E0277 + //~| NOTE a usize is required + //~| NOTE required by +} diff --git a/src/test/compile-fail/on-unimplemented.rs b/src/test/compile-fail/on-unimplemented/on-trait.rs similarity index 100% rename from src/test/compile-fail/on-unimplemented.rs rename to src/test/compile-fail/on-unimplemented/on-trait.rs diff --git a/src/test/compile-fail/on-unimplemented/slice-index.rs b/src/test/compile-fail/on-unimplemented/slice-index.rs new file mode 100644 index 0000000000..6a8f9d471e --- /dev/null +++ b/src/test/compile-fail/on-unimplemented/slice-index.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test new Index error message for slices + +#![feature(rustc_attrs)] + +use std::ops::Index; + +#[rustc_error] +fn main() { + let x = &[1, 2, 3] as &[i32]; + x[1i32]; //~ ERROR E0277 + //~| NOTE slice indices are of type `usize` + x[..1i32]; //~ ERROR E0277 + //~| NOTE slice indices are of type `usize` +} diff --git a/src/test/compile-fail/panic-runtime/abort-link-to-unwind-dylib.rs b/src/test/compile-fail/panic-runtime/abort-link-to-unwind-dylib.rs new file mode 100644 index 0000000000..c3242a5082 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/abort-link-to-unwind-dylib.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=abort -C prefer-dynamic +// ignore-musl - no dylibs here +// error-pattern:`panic_unwind` is not compiled with this crate's panic strategy + +// This is a test where the local crate, compiled with `panic=abort`, links to +// the standard library **dynamically** which is already linked against +// `panic=unwind`. We should fail because the linked panic runtime does not +// correspond with our `-C panic` option. +// +// Note that this test assumes that the dynamic version of the standard library +// is linked to `panic_unwind`, which is currently the case. + +fn main() { +} diff --git a/src/test/compile-fail/panic-runtime/auxiliary/needs-panic-runtime.rs b/src/test/compile-fail/panic-runtime/auxiliary/needs-panic-runtime.rs new file mode 100644 index 0000000000..d6c21fecf6 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/auxiliary/needs-panic-runtime.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(needs_panic_runtime)] +#![crate_type = "rlib"] +#![needs_panic_runtime] +#![no_std] diff --git a/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-abort.rs b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-abort.rs new file mode 100644 index 0000000000..3b74156b6b --- /dev/null +++ b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-abort.rs @@ -0,0 +1,27 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=abort +// no-prefer-dynamic + +#![feature(panic_runtime)] +#![crate_type = "rlib"] + +#![no_std] +#![panic_runtime] + +#[no_mangle] +pub extern fn __rust_maybe_catch_panic() {} + +#[no_mangle] +pub extern fn __rust_start_panic() {} + +#[no_mangle] +pub extern fn rust_eh_personality() {} diff --git a/src/test/auxiliary/lang-item-public.rs b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-lang-items.rs similarity index 62% rename from src/test/auxiliary/lang-item-public.rs rename to src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-lang-items.rs index 41ceb924ab..fbf70b3d3f 100644 --- a/src/test/auxiliary/lang-item-public.rs +++ b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-lang-items.rs @@ -1,4 +1,4 @@ -// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -8,21 +8,16 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(libc)] +// no-prefer-dynamic + +#![crate_type = "rlib"] + #![no_std] #![feature(lang_items)] -extern crate core; -extern crate libc; - +#[lang = "panic_fmt"] +fn panic_fmt() {} #[lang = "eh_personality"] -extern fn eh_personality() {} - +fn eh_personality() {} #[lang = "eh_unwind_resume"] -extern fn eh_unwind_resume() {} - -#[lang = "panic_fmt"] -extern fn rust_begin_unwind(msg: core::fmt::Arguments, file: &'static str, - line: u32) -> ! { - loop {} -} +fn eh_unwind_resume() {} diff --git a/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind.rs b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind.rs new file mode 100644 index 0000000000..4bb36839d9 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind.rs @@ -0,0 +1,27 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=unwind +// no-prefer-dynamic + +#![feature(panic_runtime)] +#![crate_type = "rlib"] + +#![no_std] +#![panic_runtime] + +#[no_mangle] +pub extern fn __rust_maybe_catch_panic() {} + +#[no_mangle] +pub extern fn __rust_start_panic() {} + +#[no_mangle] +pub extern fn rust_eh_personality() {} diff --git a/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind2.rs b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind2.rs new file mode 100644 index 0000000000..4bb36839d9 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/auxiliary/panic-runtime-unwind2.rs @@ -0,0 +1,27 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=unwind +// no-prefer-dynamic + +#![feature(panic_runtime)] +#![crate_type = "rlib"] + +#![no_std] +#![panic_runtime] + +#[no_mangle] +pub extern fn __rust_maybe_catch_panic() {} + +#[no_mangle] +pub extern fn __rust_start_panic() {} + +#[no_mangle] +pub extern fn rust_eh_personality() {} diff --git a/src/test/compile-fail/panic-runtime/auxiliary/runtime-depending-on-panic-runtime.rs b/src/test/compile-fail/panic-runtime/auxiliary/runtime-depending-on-panic-runtime.rs new file mode 100644 index 0000000000..b90dec9281 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/auxiliary/runtime-depending-on-panic-runtime.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(panic_runtime)] +#![crate_type = "rlib"] +#![panic_runtime] +#![no_std] + +extern crate needs_panic_runtime; diff --git a/src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-abort.rs b/src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-abort.rs new file mode 100644 index 0000000000..e1902e44a6 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-abort.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=abort +// no-prefer-dynamic + +#![crate_type = "rlib"] +#![no_std] + +extern crate panic_runtime_abort; diff --git a/src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-unwind.rs b/src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-unwind.rs new file mode 100644 index 0000000000..2183338b24 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/auxiliary/wants-panic-runtime-unwind.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![crate_type = "rlib"] +#![no_std] + +extern crate panic_runtime_unwind; diff --git a/src/test/compile-fail/panic-runtime/bad-panic-flag1.rs b/src/test/compile-fail/panic-runtime/bad-panic-flag1.rs new file mode 100644 index 0000000000..f067b6b834 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/bad-panic-flag1.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=foo +// error-pattern:either `panic` or `abort` was expected + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/bad-panic-flag2.rs b/src/test/compile-fail/panic-runtime/bad-panic-flag2.rs new file mode 100644 index 0000000000..0ecf65f080 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/bad-panic-flag2.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic +// error-pattern:requires either `panic` or `abort` + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/libtest-unwinds.rs b/src/test/compile-fail/panic-runtime/libtest-unwinds.rs new file mode 100644 index 0000000000..5f6f4ecbd6 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/libtest-unwinds.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern:is not compiled with this crate's panic strategy `abort` +// compile-flags:-C panic=abort + +#![feature(test)] + +extern crate test; + +fn main() { +} + diff --git a/src/test/compile-fail/panic-runtime/needs-gate.rs b/src/test/compile-fail/panic-runtime/needs-gate.rs new file mode 100644 index 0000000000..02f3da58f1 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/needs-gate.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![panic_runtime] //~ ERROR: is an experimental feature +#![needs_panic_runtime] //~ ERROR: is an experimental feature + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs b/src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs new file mode 100644 index 0000000000..0681f99106 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/runtime-depend-on-needs-runtime.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:needs-panic-runtime.rs +// aux-build:runtime-depending-on-panic-runtime.rs +// error-pattern:cannot depend on a crate that needs a panic runtime + +extern crate runtime_depending_on_panic_runtime; diff --git a/src/test/compile-fail/panic-runtime/transitive-link-a-bunch.rs b/src/test/compile-fail/panic-runtime/transitive-link-a-bunch.rs new file mode 100644 index 0000000000..885b3e6dbb --- /dev/null +++ b/src/test/compile-fail/panic-runtime/transitive-link-a-bunch.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:panic-runtime-unwind.rs +// aux-build:panic-runtime-abort.rs +// aux-build:wants-panic-runtime-unwind.rs +// aux-build:wants-panic-runtime-abort.rs +// aux-build:panic-runtime-lang-items.rs +// error-pattern: is not compiled with this crate's panic strategy `unwind` + +#![no_std] + +extern crate wants_panic_runtime_unwind; +extern crate wants_panic_runtime_abort; +extern crate panic_runtime_lang_items; + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/two-panic-runtimes.rs b/src/test/compile-fail/panic-runtime/two-panic-runtimes.rs new file mode 100644 index 0000000000..0fe0da2fa2 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/two-panic-runtimes.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern:cannot link together two panic runtimes: panic_runtime_unwind and panic_runtime_unwind2 +// ignore-tidy-linelength +// aux-build:panic-runtime-unwind.rs +// aux-build:panic-runtime-unwind2.rs +// aux-build:panic-runtime-lang-items.rs + +#![no_std] + +extern crate panic_runtime_unwind; +extern crate panic_runtime_unwind2; +extern crate panic_runtime_lang_items; + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/want-abort-got-unwind.rs b/src/test/compile-fail/panic-runtime/want-abort-got-unwind.rs new file mode 100644 index 0000000000..b178006411 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/want-abort-got-unwind.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern:is not compiled with this crate's panic strategy `abort` +// aux-build:panic-runtime-unwind.rs +// compile-flags:-C panic=abort + +extern crate panic_runtime_unwind; + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/want-abort-got-unwind2.rs b/src/test/compile-fail/panic-runtime/want-abort-got-unwind2.rs new file mode 100644 index 0000000000..de8e010c3c --- /dev/null +++ b/src/test/compile-fail/panic-runtime/want-abort-got-unwind2.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern:is not compiled with this crate's panic strategy `abort` +// aux-build:panic-runtime-unwind.rs +// aux-build:wants-panic-runtime-unwind.rs +// compile-flags:-C panic=abort + +extern crate wants_panic_runtime_unwind; + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/want-unwind-got-abort.rs b/src/test/compile-fail/panic-runtime/want-unwind-got-abort.rs new file mode 100644 index 0000000000..88ad36f310 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/want-unwind-got-abort.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern:is incompatible with this crate's strategy of `unwind` +// aux-build:panic-runtime-abort.rs +// aux-build:panic-runtime-lang-items.rs + +#![no_std] + +extern crate panic_runtime_abort; +extern crate panic_runtime_lang_items; + +fn main() {} diff --git a/src/test/compile-fail/panic-runtime/want-unwind-got-abort2.rs b/src/test/compile-fail/panic-runtime/want-unwind-got-abort2.rs new file mode 100644 index 0000000000..c42a25a553 --- /dev/null +++ b/src/test/compile-fail/panic-runtime/want-unwind-got-abort2.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// error-pattern:is incompatible with this crate's strategy of `unwind` +// aux-build:panic-runtime-abort.rs +// aux-build:wants-panic-runtime-abort.rs +// aux-build:panic-runtime-lang-items.rs + +#![no_std] + +extern crate wants_panic_runtime_abort; +extern crate panic_runtime_lang_items; + +fn main() {} diff --git a/src/test/compile-fail/paren-span.rs b/src/test/compile-fail/paren-span.rs new file mode 100644 index 0000000000..8ed5050f3d --- /dev/null +++ b/src/test/compile-fail/paren-span.rs @@ -0,0 +1,31 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Be smart about span of parenthesized expression in macro. + +macro_rules! paren { + ($e:expr) => (($e)) + // ^^^^ do not highlight here +} + +mod m { + pub struct S { + x: i32 + } + pub fn make() -> S { + S { x: 0 } + } +} + +fn main() { + let s = m::make(); + paren!(s.x); //~ ERROR field `x` of struct `m::S` is private + // ^^^ highlight here +} diff --git a/src/test/compile-fail/pat-shadow-in-nested-binding.rs b/src/test/compile-fail/pat-shadow-in-nested-binding.rs index 526e4c1618..4a8513e10d 100644 --- a/src/test/compile-fail/pat-shadow-in-nested-binding.rs +++ b/src/test/compile-fail/pat-shadow-in-nested-binding.rs @@ -11,5 +11,5 @@ struct foo(usize); fn main() { - let (foo, _) = (2, 3); //~ ERROR declaration of `foo` shadows + let (foo, _) = (2, 3); //~ ERROR `foo` cannot be named the same as } diff --git a/src/test/compile-fail/pattern-error-continue.rs b/src/test/compile-fail/pattern-error-continue.rs index 9ebdcf1a9e..d9f3bb3c40 100644 --- a/src/test/compile-fail/pattern-error-continue.rs +++ b/src/test/compile-fail/pattern-error-continue.rs @@ -31,15 +31,17 @@ fn main() { match 'c' { S { .. } => (), //~^ ERROR mismatched types - //~| expected `char` - //~| found `S` - //~| expected char - //~| found struct `S` + //~| expected type `char` + //~| found type `S` + //~| expected char, found struct `S` _ => () } f(true); //~^ ERROR mismatched types - //~| expected `char` - //~| found `bool` + //~| expected char, found bool + + match () { + E::V => {} //~ ERROR failed to resolve. Use of undeclared type or module `E` + } } diff --git a/src/test/compile-fail/pptypedef.rs b/src/test/compile-fail/pptypedef.rs index 1a1c87ff47..7ece52e753 100644 --- a/src/test/compile-fail/pptypedef.rs +++ b/src/test/compile-fail/pptypedef.rs @@ -13,11 +13,9 @@ fn let_in(x: T, f: F) where F: FnOnce(T) {} fn main() { let_in(3u32, |i| { assert!(i == 3i32); }); //~^ ERROR mismatched types - //~| expected `u32` - //~| found `i32` + //~| expected u32, found i32 let_in(3i32, |i| { assert!(i == 3u32); }); //~^ ERROR mismatched types - //~| expected `i32` - //~| found `u32` + //~| expected i32, found u32 } diff --git a/src/test/compile-fail/privacy/restricted/auxiliary/pub_restricted.rs b/src/test/compile-fail/privacy/restricted/auxiliary/pub_restricted.rs new file mode 100644 index 0000000000..b1c88ce6ce --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/auxiliary/pub_restricted.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +pub(crate) struct Crate; +#[derive(Default)] +pub struct Universe { + pub x: i32, + pub(crate) y: i32 +} + +impl Universe { + pub fn f(&self) {} + pub(crate) fn g(&self) {} +} diff --git a/src/test/compile-fail/privacy/restricted/feature-gate.rs b/src/test/compile-fail/privacy/restricted/feature-gate.rs new file mode 100644 index 0000000000..53ae439867 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/feature-gate.rs @@ -0,0 +1,25 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub(crate) //~ ERROR experimental +mod foo {} + +pub(self) //~ ERROR experimental +mod bar {} + +struct S { + pub(self) x: i32, //~ ERROR experimental +} +impl S { + pub(self) fn f() {} //~ ERROR experimental +} +extern { + pub(self) fn f(); //~ ERROR experimental +} diff --git a/src/test/compile-fail/privacy/restricted/lookup-ignores-private.rs b/src/test/compile-fail/privacy/restricted/lookup-ignores-private.rs new file mode 100644 index 0000000000..4e2a69cb79 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/lookup-ignores-private.rs @@ -0,0 +1,44 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rustc_attrs, pub_restricted)] +#![allow(warnings)] + +mod foo { + pub use foo::bar::S; + mod bar { + #[derive(Default)] + pub struct S { + pub(foo) x: i32, + } + impl S { + pub(foo) fn f(&self) -> i32 { 0 } + } + + pub struct S2 { + pub(crate) x: bool, + } + impl S2 { + pub(crate) fn f(&self) -> bool { false } + } + + impl ::std::ops::Deref for S { + type Target = S2; + fn deref(&self) -> &S2 { unimplemented!() } + } + } +} + +#[rustc_error] +fn main() { //~ ERROR compilation successful + let s = foo::S::default(); + let _: bool = s.x; + let _: bool = s.f(); +} diff --git a/src/test/compile-fail/privacy/restricted/private-in-public.rs b/src/test/compile-fail/privacy/restricted/private-in-public.rs new file mode 100644 index 0000000000..84328ca387 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/private-in-public.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +mod foo { + struct Priv; + mod bar { + use foo::Priv; + pub(super) fn f(_: Priv) {} + pub(crate) fn g(_: Priv) {} //~ ERROR E0446 + } +} + +fn main() { } diff --git a/src/test/compile-fail/privacy/restricted/struct-literal-field.rs b/src/test/compile-fail/privacy/restricted/struct-literal-field.rs new file mode 100644 index 0000000000..e254e00565 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/struct-literal-field.rs @@ -0,0 +1,31 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] +#![deny(private_in_public)] +#![allow(warnings)] + +mod foo { + pub mod bar { + pub struct S { + pub(foo) x: i32, + } + } + + fn f() { + use foo::bar::S; + S { x: 0 }; // ok + } +} + +fn main() { + use foo::bar::S; + S { x: 0 }; //~ ERROR private +} diff --git a/src/test/compile-fail/privacy/restricted/test.rs b/src/test/compile-fail/privacy/restricted/test.rs new file mode 100644 index 0000000000..3e1bb76662 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/test.rs @@ -0,0 +1,62 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:pub_restricted.rs + +#![feature(pub_restricted)] +#![deny(private_in_public)] +#![allow(warnings)] +extern crate pub_restricted; + +mod foo { + pub mod bar { + pub(super) fn f() {} + #[derive(Default)] + pub struct S { + pub(super) x: i32, + } + impl S { + pub(super) fn f(&self) {} + pub(super) fn g() {} + } + } + fn f() { + use foo::bar::S; + pub(self) use foo::bar::f; // ok + pub(super) use foo::bar::f as g; //~ ERROR cannot be reexported + S::default().x; // ok + S::default().f(); // ok + S::g(); // ok + } +} + +fn f() { + use foo::bar::S; + use foo::bar::f; //~ ERROR private + S::default().x; //~ ERROR private + S::default().f(); //~ ERROR private + S::g(); //~ ERROR private +} + +fn main() { + use pub_restricted::Universe; + use pub_restricted::Crate; //~ ERROR private + + let u = Universe::default(); + let _ = u.x; + let _ = u.y; //~ ERROR private + u.f(); + u.g(); //~ ERROR private +} + +mod pathological { + pub(bad::path) mod m1 {} //~ ERROR failed to resolve module path + pub(foo) mod m2 {} //~ ERROR visibilities can only be restricted to ancestor modules +} diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs new file mode 100644 index 0000000000..9cc53386d4 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted, type_macros)] + +mod foo { + type T = (); + struct S1(pub(foo) (), pub(T), pub(crate) (), pub(((), T))); + struct S2(pub((foo)) ()); //~ ERROR expected one of `+` or `,`, found `(` + //~| ERROR expected one of `+`, `;`, or `where`, found `(` +} diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs new file mode 100644 index 0000000000..01466c6a85 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test2.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted, type_macros)] + +macro_rules! define_struct { + ($t:ty) => { + struct S1(pub $t); + struct S2(pub (foo) ()); + struct S3(pub $t ()); //~ ERROR expected one of `+` or `,`, found `(` + //~| ERROR expected one of `+`, `;`, or `where`, found `(` + } +} + +mod foo { + define_struct! { (foo) } +} diff --git a/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs new file mode 100644 index 0000000000..ef187a1dae --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/tuple-struct-fields/test3.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted, type_macros)] + +macro_rules! define_struct { + ($t:ty) => { + struct S1(pub($t)); + struct S2(pub (foo) ()); + struct S3(pub($t) ()); //~ ERROR expected one of `+` or `,`, found `(` + //~| ERROR expected one of `+`, `;`, or `where`, found `(` + } +} + +mod foo { + define_struct! { foo } +} diff --git a/src/test/compile-fail/privacy/restricted/ty-params.rs b/src/test/compile-fail/privacy/restricted/ty-params.rs new file mode 100644 index 0000000000..ab423620d6 --- /dev/null +++ b/src/test/compile-fail/privacy/restricted/ty-params.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(pub_restricted)] + +macro_rules! m { + ($p: path) => (pub($p) struct Z;) +} + +struct S(T); +m!{ S } //~ ERROR type or lifetime parameters in visibility path + +mod foo { + struct S(pub(foo) ()); //~ ERROR type or lifetime parameters in visibility path +} + +fn main() {} diff --git a/src/test/compile-fail/ptr-coercion.rs b/src/test/compile-fail/ptr-coercion.rs index 18e210076c..ff627e69d4 100644 --- a/src/test/compile-fail/ptr-coercion.rs +++ b/src/test/compile-fail/ptr-coercion.rs @@ -15,19 +15,19 @@ pub fn main() { // *const -> *mut let x: *const isize = &42; let x: *mut isize = x; //~ ERROR mismatched types - //~| expected `*mut isize` - //~| found `*const isize` + //~| expected type `*mut isize` + //~| found type `*const isize` //~| values differ in mutability // & -> *mut let x: *mut isize = &42; //~ ERROR mismatched types - //~| expected `*mut isize` - //~| found `&isize` + //~| expected type `*mut isize` + //~| found type `&isize` //~| values differ in mutability let x: *const isize = &42; let x: *mut isize = x; //~ ERROR mismatched types - //~| expected `*mut isize` - //~| found `*const isize` + //~| expected type `*mut isize` + //~| found type `*const isize` //~| values differ in mutability } diff --git a/src/test/compile-fail/qualified-path-params-2.rs b/src/test/compile-fail/qualified-path-params-2.rs new file mode 100644 index 0000000000..5c661bfcdc --- /dev/null +++ b/src/test/compile-fail/qualified-path-params-2.rs @@ -0,0 +1,31 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Check that qualified paths with type parameters +// fail during type checking and not during parsing + +struct S; + +trait Tr { + type A; +} + +impl Tr for S { + type A = S; +} + +impl S { + fn f() {} +} + +type A = ::A::f; //~ ERROR type parameters are not allowed on this type +//~^ ERROR ambiguous associated type; specify the type using the syntax `<::A as Trait>::f` + +fn main() {} diff --git a/src/test/compile-fail/qualified-path-params.rs b/src/test/compile-fail/qualified-path-params.rs new file mode 100644 index 0000000000..002080f4cb --- /dev/null +++ b/src/test/compile-fail/qualified-path-params.rs @@ -0,0 +1,33 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Check that qualified paths with type parameters +// fail during type checking and not during parsing + +struct S; + +trait Tr { + type A; +} + +impl Tr for S { + type A = S; +} + +impl S { + fn f() {} +} + +fn main() { + match 10 { + ::A::f:: => {} //~ ERROR `f` is not an associated const + 0 ... ::A::f:: => {} //~ ERROR only char and numeric types are allowed in range + } +} diff --git a/src/test/compile-fail/range-1.rs b/src/test/compile-fail/range-1.rs index 895d2450cf..5b0dd256b4 100644 --- a/src/test/compile-fail/range-1.rs +++ b/src/test/compile-fail/range-1.rs @@ -17,11 +17,12 @@ pub fn main() { // Bool => does not implement iterator. for i in false..true {} - //~^ ERROR E0277 + //~^ ERROR `bool: std::num::One` is not satisfied + //~^^ ERROR `bool: std::iter::Step` is not satisfied + //~^^^ ERROR `for<'a> &'a bool: std::ops::Add` is not satisfied // Unsized type. let arr: &[_] = &[1, 2, 3]; let range = *arr..; //~^ ERROR `[_]: std::marker::Sized` is not satisfied - //~| ERROR `[_]: std::marker::Sized` is not satisfied } diff --git a/src/test/compile-fail/ref-suggestion.rs b/src/test/compile-fail/ref-suggestion.rs index 4625669d5e..0a0867195d 100644 --- a/src/test/compile-fail/ref-suggestion.rs +++ b/src/test/compile-fail/ref-suggestion.rs @@ -11,26 +11,17 @@ fn main() { let x = vec![1]; let y = x; - //~^ HELP use a `ref` binding as shown - //~| SUGGESTION let ref y = x; x; //~ ERROR use of moved value - //~^ HELP run `rustc --explain E0382` to see a detailed explanation let x = vec![1]; let mut y = x; - //~^ HELP use a `ref` binding as shown - //~| SUGGESTION let ref mut y = x; x; //~ ERROR use of moved value - //~^ HELP run `rustc --explain E0382` to see a detailed explanation let x = (Some(vec![1]), ()); match x { (Some(y), ()) => {}, - //~^ HELP use a `ref` binding as shown - //~| SUGGESTION (Some(ref y), ()) => {}, _ => {}, } x; //~ ERROR use of partially moved value - //~^ HELP run `rustc --explain E0382` to see a detailed explanation } diff --git a/src/test/compile-fail/region-invariant-static-error-reporting.rs b/src/test/compile-fail/region-invariant-static-error-reporting.rs new file mode 100644 index 0000000000..ac0167e08b --- /dev/null +++ b/src/test/compile-fail/region-invariant-static-error-reporting.rs @@ -0,0 +1,36 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// This test checks that the error messages you get for this example +// at least mention `'a` and `'static`. The precise messages can drift +// over time, but this test used to exhibit some pretty bogus messages +// that were not remotely helpful. + +// error-pattern:cannot infer +// error-pattern:cannot outlive the lifetime 'a +// error-pattern:must be valid for the static lifetime +// error-pattern:cannot infer +// error-pattern:cannot outlive the lifetime 'a +// error-pattern:must be valid for the static lifetime + +struct Invariant<'a>(Option<&'a mut &'a mut ()>); + +fn mk_static() -> Invariant<'static> { Invariant(None) } + +fn unify<'a>(x: Option>, f: fn(Invariant<'a>)) { + let bad = if x.is_some() { + x.unwrap() + } else { + mk_static() + }; + f(bad); +} + +fn main() {} diff --git a/src/test/compile-fail/regions-bounded-method-type-parameters-cross-crate.rs b/src/test/compile-fail/regions-bounded-method-type-parameters-cross-crate.rs index 82d05c5d71..1eb36e34ab 100644 --- a/src/test/compile-fail/regions-bounded-method-type-parameters-cross-crate.rs +++ b/src/test/compile-fail/regions-bounded-method-type-parameters-cross-crate.rs @@ -8,11 +8,11 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// aux-build:regions_bounded_method_type_parameters_cross_crate_lib.rs +// aux-build:rbmtp_cross_crate_lib.rs // Check explicit region bounds on methods in the cross crate case. -extern crate regions_bounded_method_type_parameters_cross_crate_lib as lib; +extern crate rbmtp_cross_crate_lib as lib; use lib::Inv; use lib::MaybeOwned; diff --git a/src/test/compile-fail/regions-bounds.rs b/src/test/compile-fail/regions-bounds.rs index 7f2889a327..64dbf27b78 100644 --- a/src/test/compile-fail/regions-bounds.rs +++ b/src/test/compile-fail/regions-bounds.rs @@ -17,15 +17,15 @@ struct a_class<'a> { x:&'a isize } fn a_fn1<'a,'b>(e: an_enum<'a>) -> an_enum<'b> { return e; //~ ERROR mismatched types - //~| expected `an_enum<'b>` - //~| found `an_enum<'a>` + //~| expected type `an_enum<'b>` + //~| found type `an_enum<'a>` //~| lifetime mismatch } fn a_fn3<'a,'b>(e: a_class<'a>) -> a_class<'b> { return e; //~ ERROR mismatched types - //~| expected `a_class<'b>` - //~| found `a_class<'a>` + //~| expected type `a_class<'b>` + //~| found type `a_class<'a>` //~| lifetime mismatch } diff --git a/src/test/compile-fail/regions-early-bound-error-method.rs b/src/test/compile-fail/regions-early-bound-error-method.rs index 8cc3527228..f6a0c86de6 100644 --- a/src/test/compile-fail/regions-early-bound-error-method.rs +++ b/src/test/compile-fail/regions-early-bound-error-method.rs @@ -29,8 +29,8 @@ impl<'a> Box<'a> { fn or<'b,G:GetRef<'b>>(&self, g2: G) -> &'a isize { g2.get() //~^ ERROR mismatched types - //~| expected `&'a isize` - //~| found `&'b isize` + //~| expected type `&'a isize` + //~| found type `&'b isize` //~| lifetime mismatch } diff --git a/src/test/compile-fail/regions-fn-subtyping-return-static.rs b/src/test/compile-fail/regions-fn-subtyping-return-static.rs index ebf7ca289f..c0116b2116 100644 --- a/src/test/compile-fail/regions-fn-subtyping-return-static.rs +++ b/src/test/compile-fail/regions-fn-subtyping-return-static.rs @@ -55,10 +55,9 @@ fn supply_G() { want_G(bar); want_G(baz); //~^ ERROR mismatched types - //~| expected `fn(&'cx S) -> &'static S` - //~| found `fn(&S) -> &S {baz}` - //~| expected concrete lifetime - //~| found bound lifetime parameter 'cx + //~| expected type `fn(&'cx S) -> &'static S` + //~| found type `fn(&S) -> &S {baz}` + //~| expected concrete lifetime, found bound lifetime parameter 'cx } pub fn main() { diff --git a/src/test/compile-fail/regions-infer-not-param.rs b/src/test/compile-fail/regions-infer-not-param.rs index 83b9d4633d..131b717095 100644 --- a/src/test/compile-fail/regions-infer-not-param.rs +++ b/src/test/compile-fail/regions-infer-not-param.rs @@ -27,10 +27,10 @@ fn take_direct<'a,'b>(p: direct<'a>) -> direct<'b> { p } //~ ERROR mismatched ty fn take_indirect1(p: indirect1) -> indirect1 { p } fn take_indirect2<'a,'b>(p: indirect2<'a>) -> indirect2<'b> { p } //~ ERROR mismatched types -//~| expected `indirect2<'b>` -//~| found `indirect2<'a>` +//~| expected type `indirect2<'b>` +//~| found type `indirect2<'a>` //~| ERROR mismatched types -//~| expected `indirect2<'b>` -//~| found `indirect2<'a>` +//~| expected type `indirect2<'b>` +//~| found type `indirect2<'a>` fn main() {} diff --git a/src/test/compile-fail/regions-infer-paramd-indirect.rs b/src/test/compile-fail/regions-infer-paramd-indirect.rs index 1d32e8fe7b..fad115c2ae 100644 --- a/src/test/compile-fail/regions-infer-paramd-indirect.rs +++ b/src/test/compile-fail/regions-infer-paramd-indirect.rs @@ -32,8 +32,8 @@ impl<'a> set_f<'a> for c<'a> { fn set_f_bad(&mut self, b: Box) { self.f = b; //~^ ERROR mismatched types - //~| expected `Box>` - //~| found `Box>` + //~| expected type `Box>` + //~| found type `Box>` //~| lifetime mismatch } } diff --git a/src/test/compile-fail/regions-wf-trait-object.rs b/src/test/compile-fail/regions-wf-trait-object.rs index 40b715cf3b..39b8482cfa 100644 --- a/src/test/compile-fail/regions-wf-trait-object.rs +++ b/src/test/compile-fail/regions-wf-trait-object.rs @@ -6,7 +6,7 @@ // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // , at your // option. This file may not be copied, modified, or distributed -// except according to those terms.t +// except according to those terms. // Check that the explicit lifetime bound (`'b`, in this example) must // outlive all the superbound from the trait (`'a`, in this example). diff --git a/src/test/compile-fail/reject-specialized-drops-8142.rs b/src/test/compile-fail/reject-specialized-drops-8142.rs index adc8702240..1ea956bbd5 100644 --- a/src/test/compile-fail/reject-specialized-drops-8142.rs +++ b/src/test/compile-fail/reject-specialized-drops-8142.rs @@ -38,8 +38,8 @@ impl<'ml> Drop for M<'ml> { fn drop(&mut self) { } } // AC impl Drop for N<'static> { fn drop(&mut self) { } } // REJECT //~^ ERROR mismatched types -//~| expected `N<'n>` -//~| found `N<'static>` +//~| expected type `N<'n>` +//~| found type `N<'static>` impl Drop for O { fn drop(&mut self) { } } // ACCEPT diff --git a/src/test/compile-fail/repeat_count.rs b/src/test/compile-fail/repeat_count.rs index 10b722946a..ab5af64d95 100644 --- a/src/test/compile-fail/repeat_count.rs +++ b/src/test/compile-fail/repeat_count.rs @@ -16,52 +16,45 @@ fn main() { //~^ ERROR expected constant integer for repeat count, found variable [E0307] let b = [0; ()]; //~^ ERROR mismatched types - //~| expected `usize` - //~| found `()` - //~| expected usize - //~| found ()) [E0308] + //~| expected type `usize` + //~| found type `()` + //~| expected usize, found () //~| ERROR expected positive integer for repeat count, found tuple [E0306] let c = [0; true]; //~^ ERROR mismatched types - //~| expected `usize` - //~| found `bool` + //~| expected usize, found bool //~| ERROR expected positive integer for repeat count, found boolean [E0306] let d = [0; 0.5]; //~^ ERROR mismatched types - //~| expected `usize` - //~| found `_` - //~| expected usize - //~| found floating-point variable) [E0308] + //~| expected type `usize` + //~| found type `_` + //~| expected usize, found floating-point variable //~| ERROR expected positive integer for repeat count, found float [E0306] let e = [0; "foo"]; //~^ ERROR mismatched types - //~| expected `usize` - //~| found `&'static str` - //~| expected usize - //~| found &-ptr) [E0308] + //~| expected type `usize` + //~| found type `&'static str` + //~| expected usize, found &-ptr //~| ERROR expected positive integer for repeat count, found string literal [E0306] let f = [0; -4_isize]; //~^ ERROR mismatched types //~| expected `usize` - //~| found `isize` [E0308] + //~| found `isize` //~| ERROR mismatched types: - //~| expected `usize`, - //~| found `isize` [E0307] + //~| expected usize, found isize let f = [0_usize; -1_isize]; //~^ ERROR mismatched types //~| expected `usize` - //~| found `isize` [E0308] + //~| found `isize` //~| ERROR mismatched types - //~| expected `usize` - //~| found `isize` [E0307] + //~| expected usize, found isize struct G { g: (), } let g = [0; G { g: () }]; //~^ ERROR mismatched types - //~| expected `usize` - //~| found `main::G` - //~| expected usize - //~| found struct `main::G`) [E0308] + //~| expected type `usize` + //~| found type `main::G` + //~| expected usize, found struct `main::G` //~| ERROR expected positive integer for repeat count, found struct [E0306] } diff --git a/src/test/compile-fail/reserved-become.rs b/src/test/compile-fail/reserved-become.rs index 82e9ebc10d..bcda61e363 100644 --- a/src/test/compile-fail/reserved-become.rs +++ b/src/test/compile-fail/reserved-become.rs @@ -10,5 +10,5 @@ fn main() { let become = 0; - //~^ ERROR `become` is a reserved keyword + //~^ ERROR expected pattern, found reserved keyword `become` } diff --git a/src/test/compile-fail/resolve-conflict-item-vs-import.rs b/src/test/compile-fail/resolve-conflict-item-vs-import.rs index dbd1ecf44f..5a068ce421 100644 --- a/src/test/compile-fail/resolve-conflict-item-vs-import.rs +++ b/src/test/compile-fail/resolve-conflict-item-vs-import.rs @@ -13,6 +13,6 @@ use std::mem::transmute; fn transmute() {} //~^ ERROR a value named `transmute` has already been imported in this module - +//~| was already imported fn main() { } diff --git a/src/test/compile-fail/rfc1592-deprecated.rs b/src/test/compile-fail/rfc1592-deprecated.rs new file mode 100644 index 0000000000..e766f97720 --- /dev/null +++ b/src/test/compile-fail/rfc1592-deprecated.rs @@ -0,0 +1,32 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::fmt; + +#[deny(warnings)] trait Foo { fn foo(&self) -> (Self, Self); } +//~^ ERROR the trait bound `Self: std::marker::Sized` is not satisfied +//~| WARNING hard error + +impl Foo for T { + fn foo(&self) -> (Self, Self) { + (*self, *self) + } +} + +#[deny(warnings)] +fn main() { + assert_eq!((11).foo(), (11, 11)); + + let junk: Box = Box::new(42); + //~^ ERROR the trait cannot require that `Self : Sized` + //~| WARNING hard error + let f = format!("{:?}", junk); + assert_eq!(f, "42"); +} diff --git a/src/test/compile-fail/self-infer.rs b/src/test/compile-fail/self-infer.rs new file mode 100644 index 0000000000..fd011318a4 --- /dev/null +++ b/src/test/compile-fail/self-infer.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +struct S; + +impl S { + fn f(self: _) {} //~ERROR the type placeholder `_` is not allowed within types on item sig + fn g(self: &_) {} //~ERROR the type placeholder `_` is not allowed within types on item sig +} + +fn main() {} diff --git a/src/test/compile-fail/self_type_keyword-2.rs b/src/test/compile-fail/self_type_keyword-2.rs new file mode 100644 index 0000000000..613f54eb33 --- /dev/null +++ b/src/test/compile-fail/self_type_keyword-2.rs @@ -0,0 +1,13 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use self::Self as Foo; //~ ERROR unresolved import `self::Self` + +pub fn main() {} diff --git a/src/test/compile-fail/self_type_keyword.rs b/src/test/compile-fail/self_type_keyword.rs index 6296673787..b9c9d7a389 100644 --- a/src/test/compile-fail/self_type_keyword.rs +++ b/src/test/compile-fail/self_type_keyword.rs @@ -14,7 +14,7 @@ struct Self; //~^ ERROR expected identifier, found keyword `Self` struct Bar<'Self>; -//~^ ERROR invalid lifetime name +//~^ ERROR lifetimes cannot use keyword names pub fn main() { let Self = 5; @@ -39,9 +39,6 @@ pub fn main() { } } -use self::Self as Foo; -//~^ ERROR expected identifier, found keyword `Self` - use std::option::Option as Self; //~^ ERROR expected identifier, found keyword `Self` diff --git a/src/test/compile-fail/shift-various-bad-types.rs b/src/test/compile-fail/shift-various-bad-types.rs index 560af9193b..2d06161111 100644 --- a/src/test/compile-fail/shift-various-bad-types.rs +++ b/src/test/compile-fail/shift-various-bad-types.rs @@ -34,8 +34,7 @@ fn foo(p: &Panolpy) { // Type of the result follows the LHS, not the RHS: let _: i32 = 22_i64 >> 1_i32; //~^ ERROR mismatched types - //~| expected `i32` - //~| found `i64` + //~| expected i32, found i64 } fn main() { diff --git a/src/test/compile-fail/simd-intrinsic-generic-arithmetic.rs b/src/test/compile-fail/simd-intrinsic-generic-arithmetic.rs index 33954d23b1..35c368f4cb 100644 --- a/src/test/compile-fail/simd-intrinsic-generic-arithmetic.rs +++ b/src/test/compile-fail/simd-intrinsic-generic-arithmetic.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(repr_simd, platform_intrinsics, rustc_attrs)] +#![feature(repr_simd, platform_intrinsics)] #![allow(non_camel_case_types)] #[repr(simd)] #[derive(Copy, Clone)] @@ -34,7 +34,6 @@ extern "platform-intrinsic" { fn simd_xor(x: T, y: T) -> T; } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. fn main() { let x = i32x4(0, 0, 0, 0); let y = u32x4(0, 0, 0, 0); diff --git a/src/test/compile-fail/simd-intrinsic-generic-cast.rs b/src/test/compile-fail/simd-intrinsic-generic-cast.rs index cb3bed7209..4999b790b1 100644 --- a/src/test/compile-fail/simd-intrinsic-generic-cast.rs +++ b/src/test/compile-fail/simd-intrinsic-generic-cast.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(repr_simd, platform_intrinsics, rustc_attrs)] +#![feature(repr_simd, platform_intrinsics)] #[repr(simd)] #[derive(Copy, Clone)] @@ -35,7 +35,6 @@ extern "platform-intrinsic" { fn simd_cast(x: T) -> U; } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. fn main() { let x = i32x4(0, 0, 0, 0); diff --git a/src/test/compile-fail/simd-intrinsic-generic-comparison.rs b/src/test/compile-fail/simd-intrinsic-generic-comparison.rs index 0e7b2bd490..617b03a871 100644 --- a/src/test/compile-fail/simd-intrinsic-generic-comparison.rs +++ b/src/test/compile-fail/simd-intrinsic-generic-comparison.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(repr_simd, platform_intrinsics, rustc_attrs)] +#![feature(repr_simd, platform_intrinsics)] #[repr(simd)] #[derive(Copy, Clone)] @@ -29,7 +29,6 @@ extern "platform-intrinsic" { fn simd_ge(x: T, y: T) -> U; } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. fn main() { let x = i32x4(0, 0, 0, 0); diff --git a/src/test/compile-fail/simd-intrinsic-generic-elements.rs b/src/test/compile-fail/simd-intrinsic-generic-elements.rs index 1f4cc72ffe..78f7d42e25 100644 --- a/src/test/compile-fail/simd-intrinsic-generic-elements.rs +++ b/src/test/compile-fail/simd-intrinsic-generic-elements.rs @@ -56,7 +56,6 @@ extern "platform-intrinsic" { fn simd_shuffle8(x: T, y: T, idx: [u32; 8]) -> U; } -#[rustc_no_mir] // FIXME #27840 MIR doesn't provide precise spans for calls. fn main() { let x = i32x4(0, 0, 0, 0); diff --git a/src/test/compile-fail/sized-cycle-note.rs b/src/test/compile-fail/sized-cycle-note.rs index 3d7c4868e9..712b4ac22f 100644 --- a/src/test/compile-fail/sized-cycle-note.rs +++ b/src/test/compile-fail/sized-cycle-note.rs @@ -17,14 +17,9 @@ // 2. it should elaborate the steps that led to the cycle. struct Baz { q: Option } - +//~^ ERROR recursive type `Baz` has infinite size struct Foo { q: Option } //~^ ERROR recursive type `Foo` has infinite size -//~| NOTE type `Foo` is embedded within `std::option::Option`... -//~| NOTE ...which in turn is embedded within `std::option::Option`... -//~| NOTE ...which in turn is embedded within `Baz`... -//~| NOTE ...which in turn is embedded within `std::option::Option`... -//~| NOTE ...which in turn is embedded within `Foo`, completing the cycle. impl Foo { fn bar(&self) {} } diff --git a/src/test/compile-fail/slice-mut.rs b/src/test/compile-fail/slice-mut.rs index e6acc32545..874cca8cb3 100644 --- a/src/test/compile-fail/slice-mut.rs +++ b/src/test/compile-fail/slice-mut.rs @@ -16,7 +16,7 @@ fn main() { let y: &mut[_] = &x[2..4]; //~^ ERROR mismatched types - //~| expected `&mut [_]` - //~| found `&[isize]` + //~| expected type `&mut [_]` + //~| found type `&[isize]` //~| values differ in mutability } diff --git a/src/test/compile-fail/slightly-nice-generic-literal-messages.rs b/src/test/compile-fail/slightly-nice-generic-literal-messages.rs index 3c1c3796a2..3140bb6e57 100644 --- a/src/test/compile-fail/slightly-nice-generic-literal-messages.rs +++ b/src/test/compile-fail/slightly-nice-generic-literal-messages.rs @@ -16,10 +16,9 @@ fn main() { match Foo(1.1, marker::PhantomData) { 1 => {} //~^ ERROR mismatched types - //~| expected `Foo<_, _>` - //~| found `_` - //~| expected struct `Foo` - //~| found integral variable + //~| expected type `Foo<_, _>` + //~| found type `_` + //~| expected struct `Foo`, found integral variable } } diff --git a/src/test/compile-fail/specialization/specialization-polarity.rs b/src/test/compile-fail/specialization/specialization-polarity.rs old mode 100755 new mode 100644 diff --git a/src/test/compile-fail/static-array-across-crate.rs b/src/test/compile-fail/static-array-across-crate.rs index 04a731e847..d101432f6d 100644 --- a/src/test/compile-fail/static-array-across-crate.rs +++ b/src/test/compile-fail/static-array-across-crate.rs @@ -17,4 +17,11 @@ use array::ARRAY; static X: &'static u8 = &ARRAY[0]; //~^ ERROR: cannot refer to the interior of another static, use a constant +static Y: &'static u8 = &(&ARRAY)[0]; +//~^ ERROR: cannot refer to the interior of another static, use a constant + +static Z: u8 = (&ARRAY)[0]; +//~^ ERROR: cannot refer to the interior of another static, use a constant +//~^^ ERROR: cannot refer to other statics by value + pub fn main() {} diff --git a/src/test/compile-fail/static-mut-not-constant.rs b/src/test/compile-fail/static-mut-not-constant.rs index e3bb01e697..9b83c42609 100644 --- a/src/test/compile-fail/static-mut-not-constant.rs +++ b/src/test/compile-fail/static-mut-not-constant.rs @@ -12,6 +12,6 @@ static mut a: Box = box 3; //~^ ERROR allocations are not allowed in statics -//~^^ ERROR mutable statics are not allowed to have boxes +//~^^ ERROR destructors in statics are an unstable feature fn main() {} diff --git a/src/test/compile-fail/struct-base-wrong-type-2.rs b/src/test/compile-fail/struct-base-wrong-type-2.rs index 83e73b6bc3..1250d0dabc 100644 --- a/src/test/compile-fail/struct-base-wrong-type-2.rs +++ b/src/test/compile-fail/struct-base-wrong-type-2.rs @@ -19,13 +19,11 @@ struct Bar { x: isize } fn main() { let b = Bar { x: 5 }; let f = Foo { a: 2, ..b }; //~ ERROR mismatched types - //~| expected `Foo` - //~| found `Bar` - //~| expected struct `Foo` - //~| found struct `Bar` + //~| expected type `Foo` + //~| found type `Bar` + //~| expected struct `Foo`, found struct `Bar` let f__isize = Foo { a: 2, ..4 }; //~ ERROR mismatched types - //~| expected `Foo` - //~| found `_` - //~| expected struct `Foo` - //~| found integral variable + //~| expected type `Foo` + //~| found type `_` + //~| expected struct `Foo`, found integral variable } diff --git a/src/test/compile-fail/struct-base-wrong-type.rs b/src/test/compile-fail/struct-base-wrong-type.rs index c98131560d..4503e46584 100644 --- a/src/test/compile-fail/struct-base-wrong-type.rs +++ b/src/test/compile-fail/struct-base-wrong-type.rs @@ -18,15 +18,13 @@ struct Bar { x: isize } static bar: Bar = Bar { x: 5 }; static foo: Foo = Foo { a: 2, ..bar }; //~ ERROR mismatched types - //~| expected `Foo` - //~| found `Bar` - //~| expected struct `Foo` - //~| found struct `Bar` + //~| expected type `Foo` + //~| found type `Bar` + //~| expected struct `Foo`, found struct `Bar` static foo_i: Foo = Foo { a: 2, ..4 }; //~ ERROR mismatched types - //~| expected `Foo` - //~| found `_` - //~| expected struct `Foo` - //~| found integral variable + //~| expected type `Foo` + //~| found type `_` + //~| expected struct `Foo`, found integral variable fn main() { let b = Bar { x: 5 }; diff --git a/src/test/compile-fail/structure-constructor-type-mismatch.rs b/src/test/compile-fail/structure-constructor-type-mismatch.rs index 7a6b8ff662..87fc5ba93a 100644 --- a/src/test/compile-fail/structure-constructor-type-mismatch.rs +++ b/src/test/compile-fail/structure-constructor-type-mismatch.rs @@ -26,38 +26,32 @@ fn main() { let pt = PointF { x: 1, //~^ ERROR mismatched types - //~| expected f32 - //~| found integral variable + //~| expected f32, found integral variable y: 2, //~^ ERROR mismatched types - //~| expected f32 - //~| found integral variable + //~| expected f32, found integral variable }; let pt2 = Point:: { x: 3, //~^ ERROR mismatched types - //~| expected f32 - //~| found integral variable + //~| expected f32, found integral variable y: 4, //~^ ERROR mismatched types - //~| expected f32 - //~| found integral variable + //~| expected f32, found integral variable }; let pair = PairF { x: 5, //~^ ERROR mismatched types - //~| expected f32 - //~| found integral variable + //~| expected f32, found integral variable y: 6, }; let pair2 = PairF:: { x: 7, //~^ ERROR mismatched types - //~| expected f32 - //~| found integral variable + //~| expected f32, found integral variable y: 8, }; diff --git a/src/test/compile-fail/substs-ppaux.rs b/src/test/compile-fail/substs-ppaux.rs index 851e31b942..c857790e34 100644 --- a/src/test/compile-fail/substs-ppaux.rs +++ b/src/test/compile-fail/substs-ppaux.rs @@ -24,36 +24,36 @@ fn main() {} fn foo<'z>() where &'z (): Sized { let x: () = >::bar::<'static, char>; //[verbose]~^ ERROR mismatched types - //[verbose]~| expected `()` - //[verbose]~| found `fn() {>::bar::}` + //[verbose]~| expected type `()` + //[verbose]~| found type `fn() {>::bar::}` //[normal]~^^^^ ERROR mismatched types - //[normal]~| expected `()` - //[normal]~| found `fn() {>::bar::<'static, char>}` + //[normal]~| expected type `()` + //[normal]~| found type `fn() {>::bar::<'static, char>}` let x: () = >::bar::<'static, char>; //[verbose]~^ ERROR mismatched types - //[verbose]~| expected `()` - //[verbose]~| found `fn() {>::bar::}` + //[verbose]~| expected type `()` + //[verbose]~| found type `fn() {>::bar::}` //[normal]~^^^^ ERROR mismatched types - //[normal]~| expected `()` - //[normal]~| found `fn() {>::bar::<'static, char>}` + //[normal]~| expected type `()` + //[normal]~| found type `fn() {>::bar::<'static, char>}` let x: () = >::baz; //[verbose]~^ ERROR mismatched types - //[verbose]~| expected `()` - //[verbose]~| found `fn() {>::baz}` + //[verbose]~| expected type `()` + //[verbose]~| found type `fn() {>::baz}` //[normal]~^^^^ ERROR mismatched types - //[normal]~| expected `()` - //[normal]~| found `fn() {>::baz}` + //[normal]~| expected type `()` + //[normal]~| found type `fn() {>::baz}` let x: () = foo::<'static>; //[verbose]~^ ERROR mismatched types - //[verbose]~| expected `()` - //[verbose]~| found `fn() {foo::}` + //[verbose]~| expected type `()` + //[verbose]~| found type `fn() {foo::}` //[normal]~^^^^ ERROR mismatched types - //[normal]~| expected `()` - //[normal]~| found `fn() {foo::<'static>}` + //[normal]~| expected type `()` + //[normal]~| found type `fn() {foo::<'static>}` >::bar; //[verbose]~^ ERROR `str: std::marker::Sized` is not satisfied diff --git a/src/test/compile-fail/suggest-path-instead-of-mod-dot-item.rs b/src/test/compile-fail/suggest-path-instead-of-mod-dot-item.rs index 8877377a6e..412c90fd21 100644 --- a/src/test/compile-fail/suggest-path-instead-of-mod-dot-item.rs +++ b/src/test/compile-fail/suggest-path-instead-of-mod-dot-item.rs @@ -27,54 +27,50 @@ fn h1() -> i32 { a.I //~^ ERROR E0425 //~| HELP To reference an item from the `a` module, use `a::I` - //~| HELP run `rustc --explain E0425` to see a detailed explanation } fn h2() -> i32 { a.g() //~^ ERROR E0425 //~| HELP To call a function from the `a` module, use `a::g(..)` - //~| HELP run `rustc --explain E0425` to see a detailed explanation } fn h3() -> i32 { a.b.J //~^ ERROR E0425 //~| HELP To reference an item from the `a` module, use `a::b` - //~| HELP run `rustc --explain E0425` to see a detailed explanation } fn h4() -> i32 { a::b.J //~^ ERROR E0425 //~| HELP To reference an item from the `a::b` module, use `a::b::J` - //~| HELP run `rustc --explain E0425` to see a detailed explanation } -fn h5() -> i32 { - a.b.f() +fn h5() { + a.b.f(); //~^ ERROR E0425 //~| HELP To reference an item from the `a` module, use `a::b` - //~| HELP run `rustc --explain E0425` to see a detailed explanation + let v = Vec::new(); + v.push(a::b); + //~^ ERROR E0425 + //~| HELP Module `a::b` cannot be used as an expression } fn h6() -> i32 { a::b.f() //~^ ERROR E0425 //~| HELP To call a function from the `a::b` module, use `a::b::f(..)` - //~| HELP run `rustc --explain E0425` to see a detailed explanation } fn h7() { a::b //~^ ERROR E0425 - //~| HELP Module `a::b` cannot be the value of an expression - //~| HELP run `rustc --explain E0425` to see a detailed explanation + //~| HELP Module `a::b` cannot be used as an expression } fn h8() -> i32 { a::b() //~^ ERROR E0425 - //~| HELP No function corresponds to `a::b(..)` - //~| HELP run `rustc --explain E0425` to see a detailed explanation + //~| HELP Module `a::b` cannot be used as an expression } diff --git a/src/test/compile-fail/suppressed-error.rs b/src/test/compile-fail/suppressed-error.rs index 44de5d8cfe..9a4a52ced2 100644 --- a/src/test/compile-fail/suppressed-error.rs +++ b/src/test/compile-fail/suppressed-error.rs @@ -11,9 +11,8 @@ fn main() { let (x, y) = (); //~^ ERROR mismatched types -//~| expected `()` -//~| found `(_, _)` -//~| expected () -//~| found tuple +//~| expected type `()` +//~| found type `(_, _)` +//~| expected (), found tuple return x; } diff --git a/src/test/compile-fail/svh-change-lit.rs b/src/test/compile-fail/svh-change-lit.rs index eb92bcf065..1638caaa92 100644 --- a/src/test/compile-fail/svh-change-lit.rs +++ b/src/test/compile-fail/svh-change-lit.rs @@ -17,7 +17,7 @@ extern crate a; extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on -//~| NOTE: perhaps this crate needs to be recompiled +//~| NOTE: perhaps that crate needs to be recompiled //~| NOTE: crate `a` path #1: //~| NOTE: crate `b` path #1: diff --git a/src/test/compile-fail/svh-change-significant-cfg.rs b/src/test/compile-fail/svh-change-significant-cfg.rs index 7c9e0d3a92..99523ca699 100644 --- a/src/test/compile-fail/svh-change-significant-cfg.rs +++ b/src/test/compile-fail/svh-change-significant-cfg.rs @@ -17,7 +17,7 @@ extern crate a; extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on -//~| NOTE: perhaps this crate needs to be recompiled +//~| NOTE: perhaps that crate needs to be recompiled //~| NOTE: crate `a` path #1: //~| NOTE: crate `b` path #1: diff --git a/src/test/compile-fail/svh-change-trait-bound.rs b/src/test/compile-fail/svh-change-trait-bound.rs index 1e6a723290..dcf4859792 100644 --- a/src/test/compile-fail/svh-change-trait-bound.rs +++ b/src/test/compile-fail/svh-change-trait-bound.rs @@ -17,7 +17,7 @@ extern crate a; extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on -//~| NOTE: perhaps this crate needs to be recompiled +//~| NOTE: perhaps that crate needs to be recompiled //~| NOTE: crate `a` path #1: //~| NOTE: crate `b` path #1: diff --git a/src/test/compile-fail/svh-change-type-arg.rs b/src/test/compile-fail/svh-change-type-arg.rs index 73c35ee6f8..7e51ca456b 100644 --- a/src/test/compile-fail/svh-change-type-arg.rs +++ b/src/test/compile-fail/svh-change-type-arg.rs @@ -17,7 +17,7 @@ extern crate a; extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on -//~| NOTE: perhaps this crate needs to be recompiled +//~| NOTE: perhaps that crate needs to be recompiled //~| NOTE: crate `a` path #1: //~| NOTE: crate `b` path #1: diff --git a/src/test/compile-fail/svh-change-type-ret.rs b/src/test/compile-fail/svh-change-type-ret.rs index b8908e2cbd..54ca87d84c 100644 --- a/src/test/compile-fail/svh-change-type-ret.rs +++ b/src/test/compile-fail/svh-change-type-ret.rs @@ -17,7 +17,7 @@ extern crate a; extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on -//~| NOTE: perhaps this crate needs to be recompiled +//~| NOTE: perhaps that crate needs to be recompiled //~| NOTE: crate `a` path #1: //~| NOTE: crate `b` path #1: diff --git a/src/test/compile-fail/svh-change-type-static.rs b/src/test/compile-fail/svh-change-type-static.rs index 291e441aa5..ea90faaf61 100644 --- a/src/test/compile-fail/svh-change-type-static.rs +++ b/src/test/compile-fail/svh-change-type-static.rs @@ -17,7 +17,7 @@ extern crate a; extern crate b; //~ ERROR: found possibly newer version of crate `a` which `b` depends on -//~| NOTE: perhaps this crate needs to be recompiled +//~| NOTE: perhaps that crate needs to be recompiled //~| NOTE: crate `a` path #1: //~| NOTE: crate `b` path #1: diff --git a/src/test/compile-fail/svh-use-trait.rs b/src/test/compile-fail/svh-use-trait.rs index ed816a93c5..c0a5a0a17e 100644 --- a/src/test/compile-fail/svh-use-trait.rs +++ b/src/test/compile-fail/svh-use-trait.rs @@ -22,7 +22,7 @@ extern crate uta; extern crate utb; //~ ERROR: found possibly newer version of crate `uta` which `utb` depends -//~| NOTE: perhaps this crate needs to be recompiled? +//~| NOTE: perhaps that crate needs to be recompiled? //~| NOTE: crate `uta` path #1: //~| NOTE: crate `utb` path #1: diff --git a/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs b/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs index 725234dfea..8f420f1ce4 100644 --- a/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs +++ b/src/test/compile-fail/tag-that-dare-not-speak-its-name.rs @@ -21,8 +21,7 @@ fn main() { let y; let x : char = last(y); //~^ ERROR mismatched types - //~| expected `char` - //~| found `std::option::Option<_>` - //~| expected char - //~| found enum `std::option::Option` + //~| expected type `char` + //~| found type `std::option::Option<_>` + //~| expected char, found enum `std::option::Option` } diff --git a/src/test/compile-fail/terr-in-field.rs b/src/test/compile-fail/terr-in-field.rs index 60db35b879..4a21e13398 100644 --- a/src/test/compile-fail/terr-in-field.rs +++ b/src/test/compile-fail/terr-in-field.rs @@ -21,10 +21,9 @@ struct bar { fn want_foo(f: foo) {} fn have_bar(b: bar) { want_foo(b); //~ ERROR mismatched types - //~| expected `foo` - //~| found `bar` - //~| expected struct `foo` - //~| found struct `bar` + //~| expected type `foo` + //~| found type `bar` + //~| expected struct `foo`, found struct `bar` } fn main() {} diff --git a/src/test/compile-fail/terr-sorts.rs b/src/test/compile-fail/terr-sorts.rs index 231d2366b4..592d7b3929 100644 --- a/src/test/compile-fail/terr-sorts.rs +++ b/src/test/compile-fail/terr-sorts.rs @@ -19,10 +19,9 @@ type bar = Box; fn want_foo(f: foo) {} fn have_bar(b: bar) { want_foo(b); //~ ERROR mismatched types - //~| expected `foo` - //~| found `Box` - //~| expected struct `foo` - //~| found box + //~| expected type `foo` + //~| found type `Box` + //~| expected struct `foo`, found box } fn main() {} diff --git a/src/test/compile-fail/token-error-correct-3.rs b/src/test/compile-fail/token-error-correct-3.rs index f42c8d09a9..24627e9420 100644 --- a/src/test/compile-fail/token-error-correct-3.rs +++ b/src/test/compile-fail/token-error-correct-3.rs @@ -22,6 +22,9 @@ pub mod raw { callback(path.as_ref(); //~ NOTE: unclosed delimiter //~^ ERROR: expected one of fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types + //~^ expected (), found enum `std::result::Result` + //~| expected type `()` + //~| found type `std::result::Result` } else { //~ ERROR: incorrect close delimiter: `}` //~^ ERROR: expected one of Ok(false); diff --git a/src/test/compile-fail/token-error-correct.rs b/src/test/compile-fail/token-error-correct.rs index 6c54acd7bd..f5fecf3e17 100644 --- a/src/test/compile-fail/token-error-correct.rs +++ b/src/test/compile-fail/token-error-correct.rs @@ -13,8 +13,10 @@ fn main() { foo(bar(; //~ NOTE: unclosed delimiter //~^ NOTE: unclosed delimiter - //~^^ ERROR: unexpected token: `;` + //~^^ ERROR: expected expression, found `;` //~^^^ ERROR: unresolved name `bar` //~^^^^ ERROR: unresolved name `foo` + //~^^^^^ ERROR: expected one of `)`, `,`, `.`, `<`, `?` } //~ ERROR: incorrect close delimiter: `}` //~^ ERROR: incorrect close delimiter: `}` +//~^^ ERROR: expected expression, found `)` diff --git a/src/test/compile-fail/trace_macros-gate.rs b/src/test/compile-fail/trace_macros-gate.rs index 6473bcece9..d627de24d6 100644 --- a/src/test/compile-fail/trace_macros-gate.rs +++ b/src/test/compile-fail/trace_macros-gate.rs @@ -26,5 +26,5 @@ fn main() { ($x: ident) => { trace_macros!($x) } } - expando!(true); + expando!(true); //~ ERROR `trace_macros` is not stable } diff --git a/src/test/compile-fail/trace_macros-gate3.rs b/src/test/compile-fail/trace_macros-gate3.rs deleted file mode 100644 index 66d03cf9d8..0000000000 --- a/src/test/compile-fail/trace_macros-gate3.rs +++ /dev/null @@ -1,20 +0,0 @@ -// Copyright 2015 The Rust Project Developers. See the COPYRIGHT -// file at the top-level directory of this distribution and at -// http://rust-lang.org/COPYRIGHT. -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -// Test that the trace_macros feature gate is on. - -pub fn main() { - println!("arg: {}", trace_macros!()); //~ ERROR `trace_macros` is not stable - println!("arg: {}", trace_macros!(1)); //~ ERROR `trace_macros` is not stable - println!("arg: {}", trace_macros!(ident)); //~ ERROR `trace_macros` is not stable - println!("arg: {}", trace_macros!(for)); //~ ERROR `trace_macros` is not stable - println!("arg: {}", trace_macros!(true,)); //~ ERROR `trace_macros` is not stable - println!("arg: {}", trace_macros!(false 1)); //~ ERROR `trace_macros` is not stable -} diff --git a/src/test/compile-fail/trait-bounds-cant-coerce.rs b/src/test/compile-fail/trait-bounds-cant-coerce.rs index 836f08d0e7..1fff812af5 100644 --- a/src/test/compile-fail/trait-bounds-cant-coerce.rs +++ b/src/test/compile-fail/trait-bounds-cant-coerce.rs @@ -22,10 +22,9 @@ fn c(x: Box) { fn d(x: Box) { a(x); //~ ERROR mismatched types - //~| expected `Box` - //~| found `Box` - //~| expected bounds `Send` - //~| found no bounds + //~| expected type `Box` + //~| found type `Box` + //~| expected bounds `Send`, found no bounds } fn main() { } diff --git a/src/test/compile-fail/trait-bounds-impl-comparison-1.rs b/src/test/compile-fail/trait-bounds-impl-comparison-1.rs index 3fffb2e19f..9cf65a9d00 100644 --- a/src/test/compile-fail/trait-bounds-impl-comparison-1.rs +++ b/src/test/compile-fail/trait-bounds-impl-comparison-1.rs @@ -74,7 +74,7 @@ trait Trait { impl Trait for usize { fn method>(&self) {} - //~^ G : Getter` appears on the impl method but not on the corresponding trait method + //~^ ERROR `G: Getter` appears on the impl method } fn main() {} diff --git a/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs b/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs index 29360e58b5..fc2ed83b27 100644 --- a/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs +++ b/src/test/compile-fail/trait-object-reference-without-parens-suggestion.rs @@ -13,10 +13,8 @@ fn main() { //~^ ERROR expected a path //~| HELP try adding parentheses //~| SUGGESTION let _: &(Copy + 'static); - //~| HELP run `rustc --explain E0178` to see a detailed explanation let _: &'static Copy + 'static; //~^ ERROR expected a path //~| HELP try adding parentheses //~| SUGGESTION let _: &'static (Copy + 'static); - //~| HELP run `rustc --explain E0178` to see a detailed explanation } diff --git a/src/test/compile-fail/trait-suggest-where-clause.rs b/src/test/compile-fail/trait-suggest-where-clause.rs index 6950bce730..a8ff1bae7a 100644 --- a/src/test/compile-fail/trait-suggest-where-clause.rs +++ b/src/test/compile-fail/trait-suggest-where-clause.rs @@ -16,13 +16,11 @@ fn check() { // suggest a where-clause, if needed mem::size_of::(); //~^ ERROR `U: std::marker::Sized` is not satisfied - //~| HELP E0277 //~| HELP consider adding a `where U: std::marker::Sized` bound //~| NOTE required by `std::mem::size_of` mem::size_of::>(); //~^ ERROR `U: std::marker::Sized` is not satisfied - //~| HELP E0277 //~| HELP consider adding a `where U: std::marker::Sized` bound //~| NOTE required because it appears within the type `Misc` //~| NOTE required by `std::mem::size_of` @@ -31,13 +29,11 @@ fn check() { >::from; //~^ ERROR `u64: std::convert::From` is not satisfied - //~| HELP E0277 //~| HELP consider adding a `where u64: std::convert::From` bound //~| NOTE required by `std::convert::From::from` ::Item>>::from; //~^ ERROR `u64: std::convert::From<::Item>` is not satisfied - //~| HELP E0277 //~| HELP consider adding a `where u64: //~| NOTE required by `std::convert::From::from` @@ -45,20 +41,17 @@ fn check() { as From>::from; //~^ ERROR `Misc<_>: std::convert::From` is not satisfied - //~| HELP E0277 //~| NOTE required by `std::convert::From::from` // ... and also not if the error is not related to the type mem::size_of::<[T]>(); //~^ ERROR `[T]: std::marker::Sized` is not satisfied - //~| HELP E0277 //~| NOTE `[T]` does not have a constant size //~| NOTE required by `std::mem::size_of` mem::size_of::<[&U]>(); //~^ ERROR `[&U]: std::marker::Sized` is not satisfied - //~| HELP E0277 //~| NOTE `[&U]` does not have a constant size //~| NOTE required by `std::mem::size_of` } diff --git a/src/test/compile-fail/trait-test-2.rs b/src/test/compile-fail/trait-test-2.rs index 0cfcf6bb3f..2d4df77f96 100644 --- a/src/test/compile-fail/trait-test-2.rs +++ b/src/test/compile-fail/trait-test-2.rs @@ -21,7 +21,5 @@ fn main() { (box 10 as Box).dup(); //~^ ERROR E0038 //~| ERROR E0038 - //~| ERROR E0038 - //~| ERROR E0038 //~| ERROR E0277 } diff --git a/src/test/compile-fail/traits-inductive-overflow-simultaneous.rs b/src/test/compile-fail/traits-inductive-overflow-simultaneous.rs new file mode 100644 index 0000000000..2968e8a7ca --- /dev/null +++ b/src/test/compile-fail/traits-inductive-overflow-simultaneous.rs @@ -0,0 +1,30 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #33344, initial version. This example allowed +// arbitrary trait bounds to be synthesized. + +trait Tweedledum: IntoIterator {} +trait Tweedledee: IntoIterator {} + +impl Tweedledee for T {} +impl Tweedledum for T {} + +trait Combo: IntoIterator {} +impl Combo for T {} + +fn is_ee(t: T) { + t.into_iter(); +} + +fn main() { + is_ee(4); + //~^ ERROR overflow evaluating the requirement `_: Tweedle +} diff --git a/src/test/compile-fail/transmute-from-fn-item-types-lint.rs b/src/test/compile-fail/transmute-from-fn-item-types-lint.rs index 42c3cb7f18..08e660e878 100644 --- a/src/test/compile-fail/transmute-from-fn-item-types-lint.rs +++ b/src/test/compile-fail/transmute-from-fn-item-types-lint.rs @@ -15,15 +15,15 @@ use std::mem; unsafe fn foo() -> (isize, *const (), Option) { let i = mem::transmute(bar); //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting - //~^^ ERROR was previously accepted + //~^^ WARNING was previously accepted let p = mem::transmute(foo); //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting - //~^^ ERROR was previously accepted + //~^^ WARNING was previously accepted let of = mem::transmute(main); //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting - //~^^ ERROR was previously accepted + //~^^ WARNING was previously accepted (i, p, of) } @@ -31,11 +31,11 @@ unsafe fn foo() -> (isize, *const (), Option) { unsafe fn bar() { mem::transmute::<_, *mut ()>(foo); //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting - //~^^ ERROR was previously accepted + //~^^ WARNING was previously accepted mem::transmute::<_, fn()>(bar); //~^ ERROR is now zero-sized and has to be cast to a pointer before transmuting - //~^^ ERROR was previously accepted + //~^^ WARNING was previously accepted // No error if a coercion would otherwise occur. mem::transmute::(main); diff --git a/src/test/compile-fail/tuple-arity-mismatch.rs b/src/test/compile-fail/tuple-arity-mismatch.rs index 8ad9ca50e3..e62255a4e7 100644 --- a/src/test/compile-fail/tuple-arity-mismatch.rs +++ b/src/test/compile-fail/tuple-arity-mismatch.rs @@ -15,15 +15,13 @@ fn first((value, _): (isize, f64)) -> isize { value } fn main() { let y = first ((1,2.0,3)); //~^ ERROR mismatched types - //~| expected `(isize, f64)` - //~| found `(isize, f64, _)` - //~| expected a tuple with 2 elements - //~| found one with 3 elements + //~| expected type `(isize, f64)` + //~| found type `(isize, f64, _)` + //~| expected a tuple with 2 elements, found one with 3 elements let y = first ((1,)); //~^ ERROR mismatched types - //~| expected `(isize, f64)` - //~| found `(isize,)` - //~| expected a tuple with 2 elements - //~| found one with 1 elements + //~| expected type `(isize, f64)` + //~| found type `(isize,)` + //~| expected a tuple with 2 elements, found one with 1 elements } diff --git a/src/test/compile-fail/tutorial-suffix-inference-test.rs b/src/test/compile-fail/tutorial-suffix-inference-test.rs index 99d6437c02..dadf7eb91d 100644 --- a/src/test/compile-fail/tutorial-suffix-inference-test.rs +++ b/src/test/compile-fail/tutorial-suffix-inference-test.rs @@ -18,12 +18,10 @@ fn main() { identity_u8(x); // after this, `x` is assumed to have type `u8` identity_u16(x); //~^ ERROR mismatched types - //~| expected `u16` - //~| found `u8` + //~| expected u16, found u8 identity_u16(y); //~^ ERROR mismatched types - //~| expected `u16` - //~| found `i32` + //~| expected u16, found i32 let a = 3; @@ -32,6 +30,5 @@ fn main() { identity_i(a); // ok identity_u16(a); //~^ ERROR mismatched types - //~| expected `u16` - //~| found `isize` + //~| expected u16, found isize } diff --git a/src/test/compile-fail/type-mismatch-multiple.rs b/src/test/compile-fail/type-mismatch-multiple.rs index dd8f54cdab..0f174d99fe 100644 --- a/src/test/compile-fail/type-mismatch-multiple.rs +++ b/src/test/compile-fail/type-mismatch-multiple.rs @@ -12,11 +12,9 @@ fn main() { let a: bool = 1; let b: i32 = true; } //~^ ERROR mismatched types -//~| expected `bool` -//~| found `_` -//~| expected bool -//~| found integral variable +//~| expected type `bool` +//~| found type `_` +//~| expected bool, found integral variable //~| ERROR mismatched types -//~| expected `i32` -//~| found `bool` +//~| expected i32, found bool diff --git a/src/test/compile-fail/type-mismatch-same-crate-name.rs b/src/test/compile-fail/type-mismatch-same-crate-name.rs index 014fa35c30..e74acaa71b 100644 --- a/src/test/compile-fail/type-mismatch-same-crate-name.rs +++ b/src/test/compile-fail/type-mismatch-same-crate-name.rs @@ -23,11 +23,17 @@ fn main() { let bar2 = {extern crate crate_a2 as a; a::bar()}; { extern crate crate_a1 as a; - a::try_foo(foo2); //~ ERROR mismatched types - //~^ HELP run - //~^^ NOTE Perhaps two different versions of crate `crate_a1` - a::try_bar(bar2); //~ ERROR mismatched types - //~^ HELP run - //~^^ NOTE Perhaps two different versions of crate `crate_a1` + a::try_foo(foo2); + //~^ ERROR mismatched types + //~| Perhaps two different versions of crate `crate_a1` + //~| expected struct `main::a::Foo` + //~| expected type `main::a::Foo` + //~| found type `main::a::Foo` + a::try_bar(bar2); + //~^ ERROR mismatched types + //~| Perhaps two different versions of crate `crate_a1` + //~| expected trait `main::a::Bar` + //~| expected type `Box` + //~| found type `Box` } } diff --git a/src/test/compile-fail/type-parameter-invalid-lint.rs b/src/test/compile-fail/type-parameter-invalid-lint.rs index 9291329fac..b99ef92580 100644 --- a/src/test/compile-fail/type-parameter-invalid-lint.rs +++ b/src/test/compile-fail/type-parameter-invalid-lint.rs @@ -14,4 +14,10 @@ fn avg(_: T) {} //~^ ERROR defaults for type parameters are only allowed //~| WARNING hard error + +struct S(T); +impl S {} +//~^ ERROR defaults for type parameters are only allowed +//~| WARNING hard error + fn main() {} diff --git a/src/test/compile-fail/type-parameter-names.rs b/src/test/compile-fail/type-parameter-names.rs index 408bf72e97..11a2fc2665 100644 --- a/src/test/compile-fail/type-parameter-names.rs +++ b/src/test/compile-fail/type-parameter-names.rs @@ -14,10 +14,9 @@ fn foo(x: Foo) -> Bar { x //~^ ERROR mismatched types -//~| expected `Bar` -//~| found `Foo` -//~| expected type parameter -//~| found a different type parameter +//~| expected type `Bar` +//~| found type `Foo` +//~| expected type parameter, found a different type parameter } fn main() {} diff --git a/src/test/compile-fail/type-params-in-different-spaces-1.rs b/src/test/compile-fail/type-params-in-different-spaces-1.rs index 155b835bbc..26eac6adde 100644 --- a/src/test/compile-fail/type-params-in-different-spaces-1.rs +++ b/src/test/compile-fail/type-params-in-different-spaces-1.rs @@ -13,10 +13,9 @@ use std::ops::Add; trait BrokenAdd: Copy + Add { fn broken_add(&self, rhs: T) -> Self { *self + rhs //~ ERROR mismatched types - //~| expected `Self` - //~| found `T` - //~| expected Self - //~| found type parameter + //~| expected type `Self` + //~| found type `T` + //~| expected Self, found type parameter } } diff --git a/src/test/compile-fail/typeck-default-trait-impl-cross-crate-coherence.rs b/src/test/compile-fail/typeck-default-trait-impl-cross-crate-coherence.rs index b1febae768..b918b0dde4 100644 --- a/src/test/compile-fail/typeck-default-trait-impl-cross-crate-coherence.rs +++ b/src/test/compile-fail/typeck-default-trait-impl-cross-crate-coherence.rs @@ -8,14 +8,14 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// aux-build:typeck_default_trait_impl_cross_crate_coherence_lib.rs +// aux-build:tdticc_coherence_lib.rs // Test that we do not consider associated types to be sendable without // some applicable trait bound (and we don't ICE). #![feature(optin_builtin_traits)] -extern crate typeck_default_trait_impl_cross_crate_coherence_lib as lib; +extern crate tdticc_coherence_lib as lib; use lib::DefaultedTrait; diff --git a/src/test/compile-fail/typeck_type_placeholder_mismatch.rs b/src/test/compile-fail/typeck_type_placeholder_mismatch.rs index 1daea8f915..91e3c38322 100644 --- a/src/test/compile-fail/typeck_type_placeholder_mismatch.rs +++ b/src/test/compile-fail/typeck_type_placeholder_mismatch.rs @@ -22,18 +22,16 @@ pub fn main() { fn test1() { let x: Foo<_> = Bar::(PhantomData); //~^ ERROR mismatched types - //~| expected `Foo<_>` - //~| found `Bar` - //~| expected struct `Foo` - //~| found struct `Bar` + //~| expected type `Foo<_>` + //~| found type `Bar` + //~| expected struct `Foo`, found struct `Bar` let y: Foo = x; } fn test2() { let x: Foo<_> = Bar::(PhantomData); //~^ ERROR mismatched types - //~| expected `Foo<_>` - //~| found `Bar` - //~| expected struct `Foo` - //~| found struct `Bar` + //~| expected type `Foo<_>` + //~| found type `Bar` + //~| expected struct `Foo`, found struct `Bar` } diff --git a/src/test/compile-fail/ufcs-explicit-self-bad.rs b/src/test/compile-fail/ufcs-explicit-self-bad.rs index e54a7623cb..f14a3505cd 100644 --- a/src/test/compile-fail/ufcs-explicit-self-bad.rs +++ b/src/test/compile-fail/ufcs-explicit-self-bad.rs @@ -45,12 +45,12 @@ impl<'a, T> SomeTrait for &'a Bar { //~^ ERROR mismatched types fn dummy3(self: &&Bar) {} //~^ ERROR mismatched types - //~| expected `&&'a Bar` - //~| found `&&Bar` + //~| expected type `&&'a Bar` + //~| found type `&&Bar` //~| lifetime mismatch //~| ERROR mismatched types - //~| expected `&&'a Bar` - //~| found `&&Bar` + //~| expected type `&&'a Bar` + //~| found type `&&Bar` //~| lifetime mismatch } diff --git a/src/test/compile-fail/unsized6.rs b/src/test/compile-fail/unsized6.rs index 663cb0a171..d40c12f67a 100644 --- a/src/test/compile-fail/unsized6.rs +++ b/src/test/compile-fail/unsized6.rs @@ -14,9 +14,9 @@ trait T {} fn f1(x: &X) { let _: X; // <-- this is OK, no bindings created, no initializer. - let _: (isize, (X, isize)); // same + let _: (isize, (X, isize)); let y: X; //~ERROR `X: std::marker::Sized` is not satisfied - let y: (isize, (X, isize)); //~ERROR `X: std::marker::Sized` is not satisfied + let y: (isize, (X, usize)); //~ERROR `X: std::marker::Sized` is not satisfied } fn f2(x: &X) { let y: X; //~ERROR `X: std::marker::Sized` is not satisfied diff --git a/src/test/compile-fail/use-keyword.rs b/src/test/compile-fail/use-keyword.rs new file mode 100644 index 0000000000..040db02556 --- /dev/null +++ b/src/test/compile-fail/use-keyword.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Check that imports with nakes super and self don't fail during parsing +// FIXME: this shouldn't fail during name resolution either + +mod a { + mod b { + use self as A; //~ ERROR `self` imports are only allowed within a { } list + //~^ ERROR unresolved import `self`. There is no `self` in the crate root + use super as B; //~ ERROR unresolved import `super`. There is no `super` in the crate root + use super::{self as C}; //~ERROR unresolved import `super`. There is no `super` in the crate + } +} + +fn main() {} diff --git a/src/test/compile-fail/use-mod-2.rs b/src/test/compile-fail/use-mod-2.rs index e98224bee0..f2384912cd 100644 --- a/src/test/compile-fail/use-mod-2.rs +++ b/src/test/compile-fail/use-mod-2.rs @@ -10,10 +10,10 @@ mod foo { use self::{self}; - //~^ ERROR unresolved import `self`. There is no `self` in `???` + //~^ ERROR unresolved import `self`. There is no `self` in the crate root use super::{self}; - //~^ ERROR unresolved import `super`. There is no `super` in `???` + //~^ ERROR unresolved import `super`. There is no `super` in the crate root } fn main() {} diff --git a/src/test/parse-fail/use-mod-4.rs b/src/test/compile-fail/use-mod-4.rs similarity index 80% rename from src/test/parse-fail/use-mod-4.rs rename to src/test/compile-fail/use-mod-4.rs index bcafa4e9fd..146d37f41d 100644 --- a/src/test/parse-fail/use-mod-4.rs +++ b/src/test/compile-fail/use-mod-4.rs @@ -8,9 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// compile-flags: -Z parse-only - -use foo::self; -//~^ ERROR expected identifier, found keyword `self` +use foo::self; //~ ERROR unresolved import `foo::self` +//~^ ERROR `self` imports are only allowed within a { } list fn main() {} diff --git a/src/test/compile-fail/variadic-ffi-3.rs b/src/test/compile-fail/variadic-ffi-3.rs index 6e60562da6..b43159b0d9 100644 --- a/src/test/compile-fail/variadic-ffi-3.rs +++ b/src/test/compile-fail/variadic-ffi-3.rs @@ -21,17 +21,15 @@ fn main() { let x: unsafe extern "C" fn(f: isize, x: u8) = foo; //~^ ERROR: mismatched types - //~| expected `unsafe extern "C" fn(isize, u8)` - //~| found `unsafe extern "C" fn(isize, u8, ...) {foo}` - //~| expected non-variadic fn - //~| found variadic function + //~| expected type `unsafe extern "C" fn(isize, u8)` + //~| found type `unsafe extern "C" fn(isize, u8, ...) {foo}` + //~| NOTE: expected non-variadic fn, found variadic function let y: extern "C" fn(f: isize, x: u8, ...) = bar; //~^ ERROR: mismatched types - //~| expected `extern "C" fn(isize, u8, ...)` - //~| found `extern "C" fn(isize, u8) {bar}` - //~| expected variadic fn - //~| found non-variadic function + //~| expected type `extern "C" fn(isize, u8, ...)` + //~| found type `extern "C" fn(isize, u8) {bar}` + //~| NOTE: expected variadic fn, found non-variadic function foo(1, 2, 3f32); //~ ERROR: can't pass an `f32` to variadic function, cast to `c_double` foo(1, 2, true); //~ ERROR: can't pass `bool` to variadic function, cast to `c_int` diff --git a/src/test/compile-fail/variance-unused-type-param.rs b/src/test/compile-fail/variance-unused-type-param.rs index f7fed32cb5..862d842d62 100644 --- a/src/test/compile-fail/variance-unused-type-param.rs +++ b/src/test/compile-fail/variance-unused-type-param.rs @@ -16,18 +16,15 @@ struct SomeStruct { x: u32 } //~^ ERROR parameter `A` is never used //~| HELP PhantomData -//~| HELP run `rustc --explain E0392` to see a detailed explanation enum SomeEnum { Nothing } //~^ ERROR parameter `A` is never used //~| HELP PhantomData -//~| HELP run `rustc --explain E0392` to see a detailed explanation // Here T might *appear* used, but in fact it isn't. enum ListCell { //~^ ERROR parameter `T` is never used //~| HELP PhantomData -//~| HELP run `rustc --explain E0392` to see a detailed explanation Cons(Box>), Nil } diff --git a/src/test/compile-fail/variant-used-as-type.rs b/src/test/compile-fail/variant-used-as-type.rs new file mode 100644 index 0000000000..73defa6eef --- /dev/null +++ b/src/test/compile-fail/variant-used-as-type.rs @@ -0,0 +1,30 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test error message when enum variants are used as types + + +// issue 21225 +enum Ty { + A, + B(Ty::A), + //~^ ERROR: found value `Ty::A` used as a type +} + + +// issue 19197 +enum E { + A +} + +impl E::A {} +//~^ ERROR: found value `E::A` used as a type + +fn main() {} diff --git a/src/test/compile-fail/vec-macro-with-comma-only.rs b/src/test/compile-fail/vec-macro-with-comma-only.rs index 8c8e789cd9..346cf1ec55 100644 --- a/src/test/compile-fail/vec-macro-with-comma-only.rs +++ b/src/test/compile-fail/vec-macro-with-comma-only.rs @@ -9,5 +9,5 @@ // except according to those terms. pub fn main() { - vec!(,); //~ ERROR unexpected token + vec!(,); //~ ERROR expected expression, found `,` } diff --git a/src/test/auxiliary/cross_crate_debuginfo_type_uniquing.rs b/src/test/debuginfo/auxiliary/cross_crate_debuginfo_type_uniquing.rs similarity index 100% rename from src/test/auxiliary/cross_crate_debuginfo_type_uniquing.rs rename to src/test/debuginfo/auxiliary/cross_crate_debuginfo_type_uniquing.rs diff --git a/src/test/auxiliary/cross_crate_spans.rs b/src/test/debuginfo/auxiliary/cross_crate_spans.rs similarity index 100% rename from src/test/auxiliary/cross_crate_spans.rs rename to src/test/debuginfo/auxiliary/cross_crate_spans.rs diff --git a/src/test/auxiliary/issue13213aux.rs b/src/test/debuginfo/auxiliary/issue13213aux.rs similarity index 100% rename from src/test/auxiliary/issue13213aux.rs rename to src/test/debuginfo/auxiliary/issue13213aux.rs diff --git a/src/test/debuginfo/c-style-enum.rs b/src/test/debuginfo/c-style-enum.rs index 4eec26d335..dbd336d2dc 100644 --- a/src/test/debuginfo/c-style-enum.rs +++ b/src/test/debuginfo/c-style-enum.rs @@ -157,7 +157,8 @@ fn main() { zzz(); // #break - let a = SINGLE_VARIANT; + // Borrow to avoid an eager load of the constant value in the static. + let a = &SINGLE_VARIANT; let a = unsafe { AUTO_ONE }; let a = unsafe { MANUAL_ONE }; } diff --git a/src/test/debuginfo/function-prologue-stepping-no-stack-check.rs b/src/test/debuginfo/function-prologue-stepping-no-stack-check.rs index b5b6ca7572..f0ecda9299 100644 --- a/src/test/debuginfo/function-prologue-stepping-no-stack-check.rs +++ b/src/test/debuginfo/function-prologue-stepping-no-stack-check.rs @@ -247,10 +247,11 @@ // lldb-command:continue #![allow(dead_code, unused_assignments, unused_variables)] -#![feature(omit_gdb_pretty_printer_section)] +#![feature(omit_gdb_pretty_printer_section, rustc_attrs)] #![omit_gdb_pretty_printer_section] #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn immediate_args(a: isize, b: bool, c: f64) { println!(""); } @@ -267,43 +268,51 @@ struct BigStruct { } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn non_immediate_args(a: BigStruct, b: BigStruct) { println!(""); } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn binding(a: i64, b: u64, c: f64) { let x = 0; println!(""); } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn assignment(mut a: u64, b: u64, c: f64) { a = b; println!(""); } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn function_call(x: u64, y: u64, z: f64) { println!("Hi!") } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn identifier(x: u64, y: u64, z: f64) -> u64 { x } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn return_expr(x: u64, y: u64, z: f64) -> u64 { return x; } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn arithmetic_expr(x: u64, y: u64, z: f64) -> u64 { x + y } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn if_expr(x: u64, y: u64, z: f64) -> u64 { if x + y < 1000 { x @@ -313,6 +322,7 @@ fn if_expr(x: u64, y: u64, z: f64) -> u64 { } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn while_expr(mut x: u64, y: u64, z: u64) -> u64 { while x + y < 1000 { x += z @@ -321,6 +331,7 @@ fn while_expr(mut x: u64, y: u64, z: u64) -> u64 { } #[no_stack_check] +#[rustc_no_mir] // FIXME(#32949) MIR debuginfo shadows arguments with uninit vars. fn loop_expr(mut x: u64, y: u64, z: u64) -> u64 { loop { x += z; diff --git a/src/test/debuginfo/no-debug-attribute.rs b/src/test/debuginfo/no-debug-attribute.rs index 6bdd68d5e2..15f2eae76c 100644 --- a/src/test/debuginfo/no-debug-attribute.rs +++ b/src/test/debuginfo/no-debug-attribute.rs @@ -27,15 +27,18 @@ #![feature(omit_gdb_pretty_printer_section)] #![omit_gdb_pretty_printer_section] +#[inline(never)] +fn id(x: T) -> T {x} + fn function_with_debuginfo() { let abc = 10_usize; - return (); // #break + id(abc); // #break } #[no_debug] fn function_without_debuginfo() { let abc = -57i32; - return (); // #break + id(abc); // #break } fn main() { diff --git a/src/test/debuginfo/struct-namespace.rs b/src/test/debuginfo/struct-namespace.rs new file mode 100644 index 0000000000..3fd4cf57b2 --- /dev/null +++ b/src/test/debuginfo/struct-namespace.rs @@ -0,0 +1,70 @@ +// Copyright 2013-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// ignore-gdb +// compile-flags:-g +// min-lldb-version: 310 + +// Check that structs get placed in the correct namespace + +// lldb-command:run +// lldb-command:p struct1 +// lldb-check:(struct_namespace::Struct1) $0 = [...] +// lldb-command:p struct2 +// lldb-check:(struct_namespace::Struct2) $1 = [...] + +// lldb-command:p mod1_struct1 +// lldb-check:(struct_namespace::mod1::Struct1) $2 = [...] +// lldb-command:p mod1_struct2 +// lldb-check:(struct_namespace::mod1::Struct2) $3 = [...] + +#![allow(unused_variables)] +#![allow(dead_code)] +#![feature(omit_gdb_pretty_printer_section)] +#![omit_gdb_pretty_printer_section] + +struct Struct1 { + a: u32, + b: u64, +} + +struct Struct2(u32); + +mod mod1 { + + pub struct Struct1 { + pub a: u32, + pub b: u64, + } + + pub struct Struct2(pub u32); +} + + +fn main() { + let struct1 = Struct1 { + a: 0, + b: 1, + }; + + let struct2 = Struct2(2); + + let mod1_struct1 = mod1::Struct1 { + a: 3, + b: 4, + }; + + let mod1_struct2 = mod1::Struct2(5); + + zzz(); // #break +} + +#[inline(never)] +fn zzz() {()} diff --git a/src/libsyntax/owned_slice.rs b/src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs similarity index 70% rename from src/libsyntax/owned_slice.rs rename to src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs index 33a3d57859..d802c9a835 100644 --- a/src/libsyntax/owned_slice.rs +++ b/src/test/incremental/callee_caller_cross_crate/auxiliary/a.rs @@ -8,7 +8,17 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -/// A non-growable owned slice. -#[unstable(feature = "rustc_private", issue = "0")] -#[rustc_deprecated(since = "1.7.0", reason = "use `ptr::P<[T]>` instead")] -pub type OwnedSlice = ::ptr::P<[T]>; +#![crate_type="rlib"] + +#[cfg(rpass1)] +pub fn function0(x: u32) -> u32 { + x +} + +#[cfg(rpass2)] +pub fn function0(x: i32) -> i32 { + x +} + +pub fn function1(x: u32) { +} diff --git a/src/librustc_data_structures/obligation_forest/tree_index.rs b/src/test/incremental/callee_caller_cross_crate/b.rs similarity index 59% rename from src/librustc_data_structures/obligation_forest/tree_index.rs rename to src/test/incremental/callee_caller_cross_crate/b.rs index 499448634a..e81f828beb 100644 --- a/src/librustc_data_structures/obligation_forest/tree_index.rs +++ b/src/test/incremental/callee_caller_cross_crate/b.rs @@ -8,20 +8,21 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -use std::u32; +// aux-build:a.rs +// revisions:rpass1 rpass2 -#[derive(Copy, Clone, Debug, PartialEq, Eq)] -pub struct TreeIndex { - index: u32, -} +#![feature(rustc_attrs)] + +extern crate a; -impl TreeIndex { - pub fn new(value: usize) -> TreeIndex { - assert!(value < (u32::MAX as usize)); - TreeIndex { index: value as u32 } - } +#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")] +pub fn call_function0() { + a::function0(77); +} - pub fn get(self) -> usize { - self.index as usize - } +#[rustc_clean(label="TypeckItemBody", cfg="rpass2")] +pub fn call_function1() { + a::function1(77); } + +pub fn main() { } diff --git a/src/test/incremental/type_alias_cross_crate/auxiliary/a.rs b/src/test/incremental/type_alias_cross_crate/auxiliary/a.rs new file mode 100644 index 0000000000..2494dca050 --- /dev/null +++ b/src/test/incremental/type_alias_cross_crate/auxiliary/a.rs @@ -0,0 +1,21 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type="rlib"] + +#[cfg(rpass1)] +pub type X = u32; + +#[cfg(rpass2)] +pub type X = i32; + +pub type Y = char; + +pub fn foo() { } diff --git a/src/test/incremental/type_alias_cross_crate/b.rs b/src/test/incremental/type_alias_cross_crate/b.rs new file mode 100644 index 0000000000..b4e9b76010 --- /dev/null +++ b/src/test/incremental/type_alias_cross_crate/b.rs @@ -0,0 +1,29 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// aux-build:a.rs +// revisions:rpass1 rpass2 + +#![feature(rustc_attrs)] + +extern crate a; + +#[rustc_dirty(label="TypeckItemBody", cfg="rpass2")] +pub fn use_X() -> u32 { + let x: a::X = 22; + x as u32 +} + +#[rustc_clean(label="TypeckItemBody", cfg="rpass2")] +pub fn use_Y() { + let x: a::Y = 'c'; +} + +pub fn main() { } diff --git a/src/test/parse-fail/bad-pointer-type.rs b/src/test/parse-fail/bad-pointer-type.rs new file mode 100644 index 0000000000..cdb4d16fed --- /dev/null +++ b/src/test/parse-fail/bad-pointer-type.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +fn foo(_: *()) { + //~^ expected mut or const in raw pointer type (use `*mut T` or `*const T` as appropriate) +} diff --git a/src/test/parse-fail/column-offset-1-based.rs b/src/test/parse-fail/column-offset-1-based.rs index bcadd1f39f..8caf2e0c0a 100644 --- a/src/test/parse-fail/column-offset-1-based.rs +++ b/src/test/parse-fail/column-offset-1-based.rs @@ -10,4 +10,4 @@ // compile-flags: -Z parse-only -# //~ ERROR 13:1: 13:2 error: expected `[`, found `` +# //~ ERROR 13:1: 13:2: expected `[`, found `` diff --git a/src/test/parse-fail/extern-no-fn.rs b/src/test/parse-fail/extern-no-fn.rs index bf5cbe0c45..acf7187cf4 100644 --- a/src/test/parse-fail/extern-no-fn.rs +++ b/src/test/parse-fail/extern-no-fn.rs @@ -11,7 +11,7 @@ // compile-flags: -Z parse-only extern { - f(); //~ ERROR expected one of `fn`, `pub`, `static`, `unsafe`, or `}`, found `f` + f(); //~ ERROR expected one of `fn`, `pub`, `static`, or `}`, found `f` } fn main() { diff --git a/src/test/parse-fail/issue-10412.rs b/src/test/parse-fail/issue-10412.rs index b75e7b12bb..fc2598d1e9 100644 --- a/src/test/parse-fail/issue-10412.rs +++ b/src/test/parse-fail/issue-10412.rs @@ -11,17 +11,17 @@ // compile-flags: -Z parse-only -Z continue-parse-after-error -trait Serializable<'self, T> { //~ ERROR no longer a special lifetime - fn serialize(val : &'self T) -> Vec ; //~ ERROR no longer a special lifetime - fn deserialize(repr : &[u8]) -> &'self T; //~ ERROR no longer a special lifetime +trait Serializable<'self, T> { //~ ERROR lifetimes cannot use keyword names + fn serialize(val : &'self T) -> Vec ; //~ ERROR lifetimes cannot use keyword names + fn deserialize(repr : &[u8]) -> &'self T; //~ ERROR lifetimes cannot use keyword names } -impl<'self> Serializable for &'self str { //~ ERROR no longer a special lifetime - //~^ ERROR no longer a special lifetime - fn serialize(val : &'self str) -> Vec { //~ ERROR no longer a special lifetime +impl<'self> Serializable for &'self str { //~ ERROR lifetimes cannot use keyword names + //~^ ERROR lifetimes cannot use keyword names + fn serialize(val : &'self str) -> Vec { //~ ERROR lifetimes cannot use keyword names vec!(1) } - fn deserialize(repr: &[u8]) -> &'self str { //~ ERROR no longer a special lifetime + fn deserialize(repr: &[u8]) -> &'self str { //~ ERROR lifetimes cannot use keyword names "hi" } } diff --git a/src/test/parse-fail/issue-14303-path.rs b/src/test/parse-fail/issue-14303-path.rs index 7c30b5f262..431a917c2d 100644 --- a/src/test/parse-fail/issue-14303-path.rs +++ b/src/test/parse-fail/issue-14303-path.rs @@ -12,4 +12,4 @@ fn bar<'a, T>(x: mymodule::X<'a, T, 'b, 'c>) {} //~^ ERROR lifetime parameters must be declared prior to type parameters -//~^^ ERROR unexpected token +//~^^ ERROR expected pattern, found `'c` diff --git a/src/test/parse-fail/issue-30318.rs b/src/test/parse-fail/issue-30318.rs new file mode 100644 index 0000000000..9ea0bb7828 --- /dev/null +++ b/src/test/parse-fail/issue-30318.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +fn foo() { } + +//! Misplaced comment... +//~^ ERROR expected outer doc comment +//~| NOTE inner doc comments like this (starting with `//!` or `/*!`) can only appear before items + +fn main() { } diff --git a/src/test/parse-fail/issue-32214.rs b/src/test/parse-fail/issue-32214.rs new file mode 100644 index 0000000000..3ba59c8ee9 --- /dev/null +++ b/src/test/parse-fail/issue-32214.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only -Z continue-parse-after-error + +pub fn test >() { + //~^ ERROR expected `=`, found `>` +} + +fn main() { } diff --git a/src/test/parse-fail/issue-32505.rs b/src/test/parse-fail/issue-32505.rs new file mode 100644 index 0000000000..246941ff25 --- /dev/null +++ b/src/test/parse-fail/issue-32505.rs @@ -0,0 +1,17 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only -Z continue-parse-after-error + +pub fn test() { + foo(|_|) //~ ERROR expected expression, found `)` +} + +fn main() { } diff --git a/src/test/parse-fail/lifetime-obsoleted-self.rs b/src/test/parse-fail/issue-33262.rs similarity index 71% rename from src/test/parse-fail/lifetime-obsoleted-self.rs rename to src/test/parse-fail/issue-33262.rs index e8b76750eb..d6bbfdc59f 100644 --- a/src/test/parse-fail/lifetime-obsoleted-self.rs +++ b/src/test/parse-fail/issue-33262.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -10,6 +10,9 @@ // compile-flags: -Z parse-only -fn baz(a: &'self isize) { } //~ ERROR invalid lifetime name: 'self is no longer a special lifetime +// Issue #33262 -fn main() { } +pub fn main() { + for i in 0..a as { } + //~^ ERROR expected type, found `{` +} diff --git a/src/test/parse-fail/issue-33413.rs b/src/test/parse-fail/issue-33413.rs new file mode 100644 index 0000000000..699af8ca7a --- /dev/null +++ b/src/test/parse-fail/issue-33413.rs @@ -0,0 +1,16 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +impl S { + fn f(*, a: u8) -> u8 {} //~ ERROR expected pattern, found `*` + //~^ ERROR expected one of `)`, `-`, `box`, `false`, `mut`, `ref`, or `true`, found `*` +} diff --git a/src/test/parse-fail/keyword-abstract.rs b/src/test/parse-fail/keyword-abstract.rs index bd3fbbe79a..2db5a5c583 100644 --- a/src/test/parse-fail/keyword-abstract.rs +++ b/src/test/parse-fail/keyword-abstract.rs @@ -11,5 +11,5 @@ // compile-flags: -Z parse-only fn main() { - let abstract = (); //~ ERROR `abstract` is a reserved keyword + let abstract = (); //~ ERROR expected pattern, found reserved keyword `abstract` } diff --git a/src/test/parse-fail/keyword-as-as-identifier.rs b/src/test/parse-fail/keyword-as-as-identifier.rs index bc05a7c4f2..c6070c456e 100644 --- a/src/test/parse-fail/keyword-as-as-identifier.rs +++ b/src/test/parse-fail/keyword-as-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py as' fn main() { - let as = "foo"; //~ error: ident + let as = "foo"; //~ error: expected pattern, found keyword `as` } diff --git a/src/test/parse-fail/keyword-do-as-identifier.rs b/src/test/parse-fail/keyword-box-as-identifier.rs similarity index 78% rename from src/test/parse-fail/keyword-do-as-identifier.rs rename to src/test/parse-fail/keyword-box-as-identifier.rs index 5cc14dfef0..b5abe14dbe 100644 --- a/src/test/parse-fail/keyword-do-as-identifier.rs +++ b/src/test/parse-fail/keyword-box-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -11,5 +11,5 @@ // compile-flags: -Z parse-only fn main() { - let do = "bar"; //~ error: ident + let box = "foo"; //~ error: expected pattern, found `=` } diff --git a/src/test/parse-fail/keyword-break-as-identifier.rs b/src/test/parse-fail/keyword-break-as-identifier.rs index bd7527f399..65c775fa1b 100644 --- a/src/test/parse-fail/keyword-break-as-identifier.rs +++ b/src/test/parse-fail/keyword-break-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py break' fn main() { - let break = "foo"; //~ error: ident + let break = "foo"; //~ error: expected pattern, found keyword `break` } diff --git a/src/test/parse-fail/keyword-const-as-identifier.rs b/src/test/parse-fail/keyword-const-as-identifier.rs new file mode 100644 index 0000000000..6ecf14957e --- /dev/null +++ b/src/test/parse-fail/keyword-const-as-identifier.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +// This file was auto-generated using 'src/etc/generate-keyword-tests.py const' + +fn main() { + let const = "foo"; //~ error: expected pattern, found keyword `const` +} diff --git a/src/test/parse-fail/keyword-continue-as-identifier.rs b/src/test/parse-fail/keyword-continue-as-identifier.rs new file mode 100644 index 0000000000..87377ac836 --- /dev/null +++ b/src/test/parse-fail/keyword-continue-as-identifier.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +// This file was auto-generated using 'src/etc/generate-keyword-tests.py continue' + +fn main() { + let continue = "foo"; //~ error: expected pattern, found keyword `continue` +} diff --git a/src/test/parse-fail/keyword-crate-as-identifier.rs b/src/test/parse-fail/keyword-crate-as-identifier.rs new file mode 100644 index 0000000000..8a914ca7b1 --- /dev/null +++ b/src/test/parse-fail/keyword-crate-as-identifier.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +// This file was auto-generated using 'src/etc/generate-keyword-tests.py crate' + +fn main() { + let crate = "foo"; //~ error: expected pattern, found keyword `crate` +} diff --git a/src/test/parse-fail/keyword-else-as-identifier.rs b/src/test/parse-fail/keyword-else-as-identifier.rs index 24bd18a738..6878f7fea0 100644 --- a/src/test/parse-fail/keyword-else-as-identifier.rs +++ b/src/test/parse-fail/keyword-else-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py else' fn main() { - let else = "foo"; //~ error: ident + let else = "foo"; //~ error: expected pattern, found keyword `else` } diff --git a/src/test/parse-fail/keyword-enum-as-identifier.rs b/src/test/parse-fail/keyword-enum-as-identifier.rs index e474527041..042a02d79e 100644 --- a/src/test/parse-fail/keyword-enum-as-identifier.rs +++ b/src/test/parse-fail/keyword-enum-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py enum' fn main() { - let enum = "foo"; //~ error: ident + let enum = "foo"; //~ error: expected pattern, found keyword `enum` } diff --git a/src/test/parse-fail/keyword-extern-as-identifier.rs b/src/test/parse-fail/keyword-extern-as-identifier.rs index 579cd9f916..3bbe24ed56 100644 --- a/src/test/parse-fail/keyword-extern-as-identifier.rs +++ b/src/test/parse-fail/keyword-extern-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py extern' fn main() { - let extern = "foo"; //~ error: ident + let extern = "foo"; //~ error: expected pattern, found keyword `extern` } diff --git a/src/test/parse-fail/keyword-final.rs b/src/test/parse-fail/keyword-final.rs index 4b06312d09..be29a73944 100644 --- a/src/test/parse-fail/keyword-final.rs +++ b/src/test/parse-fail/keyword-final.rs @@ -11,5 +11,5 @@ // compile-flags: -Z parse-only fn main() { - let final = (); //~ ERROR `final` is a reserved keyword + let final = (); //~ ERROR expected pattern, found reserved keyword `final` } diff --git a/src/test/parse-fail/keyword-fn-as-identifier.rs b/src/test/parse-fail/keyword-fn-as-identifier.rs index 0ace9ddf1f..0d454f67d1 100644 --- a/src/test/parse-fail/keyword-fn-as-identifier.rs +++ b/src/test/parse-fail/keyword-fn-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py fn' fn main() { - let fn = "foo"; //~ error: ident + let fn = "foo"; //~ error: expected pattern, found keyword `fn` } diff --git a/src/test/parse-fail/keyword-for-as-identifier.rs b/src/test/parse-fail/keyword-for-as-identifier.rs index 035c87b80b..d341669f72 100644 --- a/src/test/parse-fail/keyword-for-as-identifier.rs +++ b/src/test/parse-fail/keyword-for-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py for' fn main() { - let for = "foo"; //~ error: ident + let for = "foo"; //~ error: expected pattern, found keyword `for` } diff --git a/src/test/parse-fail/keyword-if-as-identifier.rs b/src/test/parse-fail/keyword-if-as-identifier.rs index 1aad0a780f..417e40425e 100644 --- a/src/test/parse-fail/keyword-if-as-identifier.rs +++ b/src/test/parse-fail/keyword-if-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py if' fn main() { - let if = "foo"; //~ error: ident + let if = "foo"; //~ error: expected pattern, found keyword `if` } diff --git a/src/test/parse-fail/keyword-impl-as-identifier.rs b/src/test/parse-fail/keyword-impl-as-identifier.rs index 585109505d..fe97c191f6 100644 --- a/src/test/parse-fail/keyword-impl-as-identifier.rs +++ b/src/test/parse-fail/keyword-impl-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py impl' fn main() { - let impl = "foo"; //~ error: ident + let impl = "foo"; //~ error: expected pattern, found keyword `impl` } diff --git a/src/test/parse-fail/keyword-priv-as-identifier.rs b/src/test/parse-fail/keyword-in-as-identifier.rs similarity index 78% rename from src/test/parse-fail/keyword-priv-as-identifier.rs rename to src/test/parse-fail/keyword-in-as-identifier.rs index e80feb66d6..c0f9396b98 100644 --- a/src/test/parse-fail/keyword-priv-as-identifier.rs +++ b/src/test/parse-fail/keyword-in-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -10,8 +10,8 @@ // compile-flags: -Z parse-only -// This file was auto-generated using 'src/etc/generate-keyword-tests.py priv' +// This file was auto-generated using 'src/etc/generate-keyword-tests.py in' fn main() { - let priv = "foo"; //~ error: ident + let in = "foo"; //~ error: expected pattern, found keyword `in` } diff --git a/src/test/parse-fail/keyword-let-as-identifier.rs b/src/test/parse-fail/keyword-let-as-identifier.rs index 07bc790168..5d6dca78d7 100644 --- a/src/test/parse-fail/keyword-let-as-identifier.rs +++ b/src/test/parse-fail/keyword-let-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py let' fn main() { - let let = "foo"; //~ error: ident + let let = "foo"; //~ error: expected pattern, found keyword `let` } diff --git a/src/test/parse-fail/keyword-loop-as-identifier.rs b/src/test/parse-fail/keyword-loop-as-identifier.rs index 7b2b10a2d6..7c3d11d67f 100644 --- a/src/test/parse-fail/keyword-loop-as-identifier.rs +++ b/src/test/parse-fail/keyword-loop-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py loop' fn main() { - let loop = "foo"; //~ error: ident + let loop = "foo"; //~ error: expected pattern, found keyword `loop` } diff --git a/src/test/parse-fail/keyword-match-as-identifier.rs b/src/test/parse-fail/keyword-match-as-identifier.rs index 528873c179..7c727f44da 100644 --- a/src/test/parse-fail/keyword-match-as-identifier.rs +++ b/src/test/parse-fail/keyword-match-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py match' fn main() { - let match = "foo"; //~ error: ident + let match = "foo"; //~ error: expected pattern, found keyword `match` } diff --git a/src/test/parse-fail/keyword-mod-as-identifier.rs b/src/test/parse-fail/keyword-mod-as-identifier.rs index b29bcbc76c..85b4cc2e02 100644 --- a/src/test/parse-fail/keyword-mod-as-identifier.rs +++ b/src/test/parse-fail/keyword-mod-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py mod' fn main() { - let mod = "foo"; //~ error: ident + let mod = "foo"; //~ error: expected pattern, found keyword `mod` } diff --git a/src/test/parse-fail/keyword-move-as-identifier.rs b/src/test/parse-fail/keyword-move-as-identifier.rs new file mode 100644 index 0000000000..b785ac0058 --- /dev/null +++ b/src/test/parse-fail/keyword-move-as-identifier.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +// This file was auto-generated using 'src/etc/generate-keyword-tests.py move' + +fn main() { + let move = "foo"; //~ error: expected pattern, found keyword `move` +} diff --git a/src/test/parse-fail/keyword-mut-as-identifier.rs b/src/test/parse-fail/keyword-mut-as-identifier.rs index b637d07d8b..0aeca9b34a 100644 --- a/src/test/parse-fail/keyword-mut-as-identifier.rs +++ b/src/test/parse-fail/keyword-mut-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -10,8 +10,6 @@ // compile-flags: -Z parse-only -// This file was auto-generated using 'src/etc/generate-keyword-tests.py mut' - fn main() { - let mut = "foo"; //~ error: ident + let mut = "foo"; //~ error: expected identifier, found `=` } diff --git a/src/test/parse-fail/keyword-override.rs b/src/test/parse-fail/keyword-override.rs index 3f79e43718..60333762b3 100644 --- a/src/test/parse-fail/keyword-override.rs +++ b/src/test/parse-fail/keyword-override.rs @@ -11,5 +11,5 @@ // compile-flags: -Z parse-only fn main() { - let override = (); //~ ERROR `override` is a reserved keyword + let override = (); //~ ERROR expected pattern, found reserved keyword `override` } diff --git a/src/test/parse-fail/keyword-pub-as-identifier.rs b/src/test/parse-fail/keyword-pub-as-identifier.rs index 959bbfbf88..9233728697 100644 --- a/src/test/parse-fail/keyword-pub-as-identifier.rs +++ b/src/test/parse-fail/keyword-pub-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py pub' fn main() { - let pub = "foo"; //~ error: ident + let pub = "foo"; //~ error: expected pattern, found keyword `pub` } diff --git a/src/test/parse-fail/keyword-ref-as-identifier.rs b/src/test/parse-fail/keyword-ref-as-identifier.rs index 3db6d11c2e..a689c4eeea 100644 --- a/src/test/parse-fail/keyword-ref-as-identifier.rs +++ b/src/test/parse-fail/keyword-ref-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -10,8 +10,6 @@ // compile-flags: -Z parse-only -// This file was auto-generated using 'src/etc/generate-keyword-tests.py ref' - fn main() { - let ref = "foo"; //~ error: ident + let ref = "foo"; //~ error: expected identifier, found `=` } diff --git a/src/test/parse-fail/keyword-return-as-identifier.rs b/src/test/parse-fail/keyword-return-as-identifier.rs index df8aeba6d7..bcf7f13754 100644 --- a/src/test/parse-fail/keyword-return-as-identifier.rs +++ b/src/test/parse-fail/keyword-return-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py return' fn main() { - let return = "foo"; //~ error: ident + let return = "foo"; //~ error: expected pattern, found keyword `return` } diff --git a/src/test/parse-fail/keyword-self-as-identifier.rs b/src/test/parse-fail/keyword-self-as-identifier.rs index 0e0d07ca6a..f8b93a1796 100644 --- a/src/test/parse-fail/keyword-self-as-identifier.rs +++ b/src/test/parse-fail/keyword-self-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -10,8 +10,6 @@ // compile-flags: -Z parse-only -// This file was auto-generated using 'src/etc/generate-keyword-tests.py self' - fn main() { - let self = "foo"; //~ error: ident + let Self = "foo"; //~ error: expected identifier, found keyword `Self` } diff --git a/src/test/parse-fail/keyword-static-as-identifier.rs b/src/test/parse-fail/keyword-static-as-identifier.rs index d5b529af4b..793262266a 100644 --- a/src/test/parse-fail/keyword-static-as-identifier.rs +++ b/src/test/parse-fail/keyword-static-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py static' fn main() { - let static = "foo"; //~ error: ident + let static = "foo"; //~ error: expected pattern, found keyword `static` } diff --git a/src/test/parse-fail/keyword-struct-as-identifier.rs b/src/test/parse-fail/keyword-struct-as-identifier.rs index 7d2160dfd2..591bd25db6 100644 --- a/src/test/parse-fail/keyword-struct-as-identifier.rs +++ b/src/test/parse-fail/keyword-struct-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py struct' fn main() { - let struct = "foo"; //~ error: ident + let struct = "foo"; //~ error: expected pattern, found keyword `struct` } diff --git a/src/test/parse-fail/keyword-super-as-identifier.rs b/src/test/parse-fail/keyword-super-as-identifier.rs index 4d86691886..a48683a4f5 100644 --- a/src/test/parse-fail/keyword-super-as-identifier.rs +++ b/src/test/parse-fail/keyword-super-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -10,8 +10,6 @@ // compile-flags: -Z parse-only -// This file was auto-generated using 'src/etc/generate-keyword-tests.py super' - fn main() { - let super = "foo"; //~ error: ident + let super = "foo"; //~ error: expected identifier, found keyword `super` } diff --git a/src/test/parse-fail/keyword-trait-as-identifier.rs b/src/test/parse-fail/keyword-trait-as-identifier.rs index 7a8be0baa2..bdb5d264b0 100644 --- a/src/test/parse-fail/keyword-trait-as-identifier.rs +++ b/src/test/parse-fail/keyword-trait-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py trait' fn main() { - let trait = "foo"; //~ error: ident + let trait = "foo"; //~ error: expected pattern, found keyword `trait` } diff --git a/src/test/parse-fail/keyword-type-as-identifier.rs b/src/test/parse-fail/keyword-type-as-identifier.rs index c76bea89ab..2ba99d098d 100644 --- a/src/test/parse-fail/keyword-type-as-identifier.rs +++ b/src/test/parse-fail/keyword-type-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py type' fn main() { - let type = "foo"; //~ error: ident + let type = "foo"; //~ error: expected pattern, found keyword `type` } diff --git a/src/test/parse-fail/keyword-typeof.rs b/src/test/parse-fail/keyword-typeof.rs index 29ec4f5844..40e26bd375 100644 --- a/src/test/parse-fail/keyword-typeof.rs +++ b/src/test/parse-fail/keyword-typeof.rs @@ -11,5 +11,5 @@ // compile-flags: -Z parse-only fn main() { - let typeof = (); //~ ERROR `typeof` is a reserved keyword + let typeof = (); //~ ERROR expected pattern, found reserved keyword `typeof` } diff --git a/src/test/parse-fail/keyword-unsafe-as-identifier.rs b/src/test/parse-fail/keyword-unsafe-as-identifier.rs index d3c48c6ded..a72723e566 100644 --- a/src/test/parse-fail/keyword-unsafe-as-identifier.rs +++ b/src/test/parse-fail/keyword-unsafe-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py unsafe' fn main() { - let unsafe = "foo"; //~ error: ident + let unsafe = "foo"; //~ error: expected pattern, found keyword `unsafe` } diff --git a/src/test/parse-fail/keyword-use-as-identifier.rs b/src/test/parse-fail/keyword-use-as-identifier.rs index d3815c650a..de74907ff2 100644 --- a/src/test/parse-fail/keyword-use-as-identifier.rs +++ b/src/test/parse-fail/keyword-use-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py use' fn main() { - let use = "foo"; //~ error: ident + let use = "foo"; //~ error: expected pattern, found keyword `use` } diff --git a/src/test/parse-fail/keyword-where-as-identifier.rs b/src/test/parse-fail/keyword-where-as-identifier.rs new file mode 100644 index 0000000000..4b7c8920b1 --- /dev/null +++ b/src/test/parse-fail/keyword-where-as-identifier.rs @@ -0,0 +1,17 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -Z parse-only + +// This file was auto-generated using 'src/etc/generate-keyword-tests.py where' + +fn main() { + let where = "foo"; //~ error: expected pattern, found keyword `where` +} diff --git a/src/test/parse-fail/keyword-while-as-identifier.rs b/src/test/parse-fail/keyword-while-as-identifier.rs index 331fdc07cc..01793caa38 100644 --- a/src/test/parse-fail/keyword-while-as-identifier.rs +++ b/src/test/parse-fail/keyword-while-as-identifier.rs @@ -1,4 +1,4 @@ -// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // @@ -13,5 +13,5 @@ // This file was auto-generated using 'src/etc/generate-keyword-tests.py while' fn main() { - let while = "foo"; //~ error: ident + let while = "foo"; //~ error: expected pattern, found keyword `while` } diff --git a/src/test/parse-fail/keywords-followed-by-double-colon.rs b/src/test/parse-fail/keywords-followed-by-double-colon.rs index 5e27d3e4f3..bb8a1dfdb1 100644 --- a/src/test/parse-fail/keywords-followed-by-double-colon.rs +++ b/src/test/parse-fail/keywords-followed-by-double-colon.rs @@ -12,5 +12,5 @@ fn main() { struct::foo(); //~ ERROR expected identifier - mut::baz(); //~ ERROR expected identifier + mut::baz(); //~ ERROR expected expression, found keyword `mut` } diff --git a/src/test/parse-fail/lifetime-no-keyword.rs b/src/test/parse-fail/lifetime-no-keyword.rs index 84b02e6ba0..9ca81d9918 100644 --- a/src/test/parse-fail/lifetime-no-keyword.rs +++ b/src/test/parse-fail/lifetime-no-keyword.rs @@ -12,6 +12,7 @@ fn foo<'a>(a: &'a isize) { } fn bar(a: &'static isize) { } -fn baz(a: &'let isize) { } //~ ERROR invalid lifetime name +fn baz(a: &'let isize) { } //~ ERROR lifetimes cannot use keyword names +fn zab(a: &'self isize) { } //~ ERROR lifetimes cannot use keyword names fn main() { } diff --git a/src/test/parse-fail/match-arrows-block-then-binop.rs b/src/test/parse-fail/match-arrows-block-then-binop.rs index 3026e159a4..e8cfb77f05 100644 --- a/src/test/parse-fail/match-arrows-block-then-binop.rs +++ b/src/test/parse-fail/match-arrows-block-then-binop.rs @@ -14,6 +14,6 @@ fn main() { match 0 { 0 => { - } + 5 //~ ERROR unexpected token: `+` + } + 5 //~ ERROR expected pattern, found `+` } } diff --git a/src/test/parse-fail/no-unsafe-self.rs b/src/test/parse-fail/no-unsafe-self.rs index cbdf50a752..22baf88b43 100644 --- a/src/test/parse-fail/no-unsafe-self.rs +++ b/src/test/parse-fail/no-unsafe-self.rs @@ -11,14 +11,16 @@ // compile-flags: -Z parse-only -Z continue-parse-after-error trait A { - fn foo(*mut self); //~ ERROR cannot pass self by raw pointer - fn bar(*self); //~ ERROR cannot pass self by raw pointer + fn foo(*mut self); //~ ERROR cannot pass `self` by raw pointer + fn baz(*const self); //~ ERROR cannot pass `self` by raw pointer + fn bar(*self); //~ ERROR cannot pass `self` by raw pointer } struct X; impl A for X { - fn foo(*mut self) { } //~ ERROR cannot pass self by raw pointer - fn bar(*self) { } //~ ERROR cannot pass self by raw pointer + fn foo(*mut self) { } //~ ERROR cannot pass `self` by raw pointer + fn baz(*const self) { } //~ ERROR cannot pass `self` by raw pointer + fn bar(*self) { } //~ ERROR cannot pass `self` by raw pointer } fn main() { } diff --git a/src/test/parse-fail/obsolete-proc.rs b/src/test/parse-fail/obsolete-proc.rs index 1ef8cd2714..648c46d246 100644 --- a/src/test/parse-fail/obsolete-proc.rs +++ b/src/test/parse-fail/obsolete-proc.rs @@ -12,9 +12,8 @@ // Test that we generate obsolete syntax errors around usages of `proc`. -fn foo(p: proc()) { } //~ ERROR `proc` is a reserved keyword +fn foo(p: proc()) { } //~ ERROR expected type, found reserved keyword `proc` -fn bar() { proc() 1; } //~ ERROR `proc` is a reserved keyword - //~^ ERROR expected +fn bar() { proc() 1; } //~ ERROR expected expression, found reserved keyword `proc` fn main() { } diff --git a/src/test/parse-fail/removed-syntax-extern-const.rs b/src/test/parse-fail/removed-syntax-extern-const.rs index c42fae7123..e632af6c83 100644 --- a/src/test/parse-fail/removed-syntax-extern-const.rs +++ b/src/test/parse-fail/removed-syntax-extern-const.rs @@ -12,5 +12,5 @@ extern { const i: isize; - //~^ ERROR expected one of `fn`, `pub`, `static`, `unsafe`, or `}`, found `const` + //~^ ERROR expected one of `fn`, `pub`, `static`, or `}`, found `const` } diff --git a/src/test/parse-fail/removed-syntax-mode.rs b/src/test/parse-fail/removed-syntax-mode.rs index 4dafc36e91..6e99f8b3ee 100644 --- a/src/test/parse-fail/removed-syntax-mode.rs +++ b/src/test/parse-fail/removed-syntax-mode.rs @@ -10,4 +10,4 @@ // compile-flags: -Z parse-only -fn f(+x: isize) {} //~ ERROR unexpected token: `+` +fn f(+x: isize) {} //~ ERROR expected pattern, found `+` diff --git a/src/test/parse-fail/removed-syntax-mut-vec-expr.rs b/src/test/parse-fail/removed-syntax-mut-vec-expr.rs index 301bd0e8b1..7e5bd27b49 100644 --- a/src/test/parse-fail/removed-syntax-mut-vec-expr.rs +++ b/src/test/parse-fail/removed-syntax-mut-vec-expr.rs @@ -11,7 +11,5 @@ // compile-flags: -Z parse-only fn f() { - let v = [mut 1, 2, 3, 4]; - //~^ ERROR expected identifier, found keyword `mut` - //~^^ ERROR expected one of `!`, `,`, `.`, `::`, `;`, `?`, `]`, `{`, or an operator, found `1` + let v = [mut 1, 2, 3, 4]; //~ ERROR expected expression, found keyword `mut` } diff --git a/src/test/parse-fail/removed-syntax-mut-vec-ty.rs b/src/test/parse-fail/removed-syntax-mut-vec-ty.rs index 91918f01bb..0cdf1981a2 100644 --- a/src/test/parse-fail/removed-syntax-mut-vec-ty.rs +++ b/src/test/parse-fail/removed-syntax-mut-vec-ty.rs @@ -10,6 +10,4 @@ // compile-flags: -Z parse-only -type v = [mut isize]; - //~^ ERROR expected identifier, found keyword `mut` - //~^^ ERROR expected one of `!`, `(`, `+`, `::`, `;`, `<`, or `]`, found `isize` +type v = [mut isize]; //~ ERROR expected type, found keyword `mut` diff --git a/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs b/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs index 2f637cf0b4..b16c77ab6b 100644 --- a/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs +++ b/src/test/parse-fail/removed-syntax-uniq-mut-expr.rs @@ -11,7 +11,5 @@ // compile-flags: -Z parse-only fn f() { - let a_box = box mut 42; - //~^ ERROR expected identifier, found keyword `mut` - //~^^ ERROR expected one of `!`, `.`, `::`, `;`, `?`, `{`, or an operator, found `42` + let a_box = box mut 42; //~ ERROR expected expression, found keyword `mut` } diff --git a/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs b/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs index e163790126..9bd8dc9b11 100644 --- a/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs +++ b/src/test/parse-fail/removed-syntax-uniq-mut-ty.rs @@ -10,6 +10,4 @@ // compile-flags: -Z parse-only -type mut_box = Box; - //~^ ERROR expected identifier, found keyword `mut` - //~^^ ERROR expected one of `!`, `(`, `+`, `,`, `::`, `<`, or `>`, found `isize` +type mut_box = Box; //~ ERROR expected type, found keyword `mut` diff --git a/src/test/parse-fail/unsized2.rs b/src/test/parse-fail/unsized2.rs index a4a4c0dcfd..f3af8740be 100644 --- a/src/test/parse-fail/unsized2.rs +++ b/src/test/parse-fail/unsized2.rs @@ -15,8 +15,5 @@ fn f() {} pub fn main() { - f(); - //~^ ERROR expected identifier, found keyword `type` - //~^^ ERROR: chained comparison - //~^^^ HELP: use `::< + f(); //~ ERROR expected expression, found keyword `type` } diff --git a/src/test/parse-fail/use-ends-with-mod-sep.rs b/src/test/parse-fail/use-ends-with-mod-sep.rs index 143886e233..e6a10d43e2 100644 --- a/src/test/parse-fail/use-ends-with-mod-sep.rs +++ b/src/test/parse-fail/use-ends-with-mod-sep.rs @@ -10,4 +10,4 @@ // compile-flags: -Z parse-only -use std::any::; //~ ERROR expected identifier or `{` or `*`, found `;` +use std::any::; //~ ERROR expected identifier, found `;` diff --git a/src/test/run-make/atomic-lock-free/Makefile b/src/test/run-make/atomic-lock-free/Makefile new file mode 100644 index 0000000000..78e7bb2313 --- /dev/null +++ b/src/test/run-make/atomic-lock-free/Makefile @@ -0,0 +1,30 @@ +-include ../tools.mk + +# This tests ensure that atomic types are never lowered into runtime library calls that are not +# guaranteed to be lock-free. + +all: +ifeq ($(UNAME),Linux) + $(RUSTC) --target=i686-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=x86_64-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=arm-unknown-linux-gnueabi atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=arm-unknown-linux-gnueabihf atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=armv7-unknown-linux-gnueabihf atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=aarch64-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=mips-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=mipsel-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=powerpc-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=powerpc64-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add + $(RUSTC) --target=powerpc64le-unknown-linux-gnu atomic_lock_free.rs + nm "$(TMPDIR)/libatomic_lock_free.rlib" | grep -vq __atomic_fetch_add +endif diff --git a/src/test/run-make/atomic-lock-free/atomic_lock_free.rs b/src/test/run-make/atomic-lock-free/atomic_lock_free.rs new file mode 100644 index 0000000000..8731cd960f --- /dev/null +++ b/src/test/run-make/atomic-lock-free/atomic_lock_free.rs @@ -0,0 +1,62 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(cfg_target_has_atomic, no_core, intrinsics, lang_items)] +#![crate_type="rlib"] +#![no_core] + +extern "rust-intrinsic" { + fn atomic_xadd(dst: *mut T, src: T) -> T; +} + +#[lang = "sized"] +trait Sized {} + +#[cfg(target_has_atomic = "8")] +pub unsafe fn atomic_u8(x: *mut u8) { + atomic_xadd(x, 1); + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "8")] +pub unsafe fn atomic_i8(x: *mut i8) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "16")] +pub unsafe fn atomic_u16(x: *mut u16) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "16")] +pub unsafe fn atomic_i16(x: *mut i16) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "32")] +pub unsafe fn atomic_u32(x: *mut u32) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "32")] +pub unsafe fn atomic_i32(x: *mut i32) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "64")] +pub unsafe fn atomic_u64(x: *mut u64) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "64")] +pub unsafe fn atomic_i64(x: *mut i64) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "ptr")] +pub unsafe fn atomic_usize(x: *mut usize) { + atomic_xadd(x, 1); +} +#[cfg(target_has_atomic = "ptr")] +pub unsafe fn atomic_isize(x: *mut isize) { + atomic_xadd(x, 1); +} diff --git a/src/test/run-make/cdylib/Makefile b/src/test/run-make/cdylib/Makefile new file mode 100644 index 0000000000..ae3b82537d --- /dev/null +++ b/src/test/run-make/cdylib/Makefile @@ -0,0 +1,19 @@ +include ../tools.mk + +all: $(call RUN_BINFILE,foo) + $(call RUN,foo) + rm $(call DYLIB,foo) + $(RUSTC) foo.rs -C lto + $(call RUN,foo) + +ifdef IS_MSVC +$(call RUN_BINFILE,foo): $(call DYLIB,foo) + $(CC) $(CFLAGS) foo.c $(TMPDIR)/foo.dll.lib -Fe:`cygpath -w $@` +else +$(call RUN_BINFILE,foo): $(call DYLIB,foo) + $(CC) $(CFLAGS) foo.c -lfoo -o $(call RUN_BINFILE,foo) -L $(TMPDIR) +endif + +$(call DYLIB,foo): + $(RUSTC) bar.rs + $(RUSTC) foo.rs diff --git a/src/test/run-make/cdylib/bar.rs b/src/test/run-make/cdylib/bar.rs new file mode 100644 index 0000000000..2c97298604 --- /dev/null +++ b/src/test/run-make/cdylib/bar.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "rlib"] + +pub fn bar() { + println!("hello!"); +} diff --git a/src/test/run-make/cdylib/foo.c b/src/test/run-make/cdylib/foo.c new file mode 100644 index 0000000000..1c950427c6 --- /dev/null +++ b/src/test/run-make/cdylib/foo.c @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#include + +extern void foo(); +extern unsigned bar(unsigned a, unsigned b); + +int main() { + foo(); + assert(bar(1, 2) == 3); + return 0; +} diff --git a/src/test/run-make/cdylib/foo.rs b/src/test/run-make/cdylib/foo.rs new file mode 100644 index 0000000000..cdac6d1903 --- /dev/null +++ b/src/test/run-make/cdylib/foo.rs @@ -0,0 +1,23 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "cdylib"] + +extern crate bar; + +#[no_mangle] +pub extern fn foo() { + bar::bar(); +} + +#[no_mangle] +pub extern fn bar(a: u32, b: u32) -> u32 { + a + b +} diff --git a/src/test/run-make/compiler-lookup-paths/Makefile b/src/test/run-make/compiler-lookup-paths/Makefile index 154e46c0ed..e22b937a08 100644 --- a/src/test/run-make/compiler-lookup-paths/Makefile +++ b/src/test/run-make/compiler-lookup-paths/Makefile @@ -18,13 +18,21 @@ all: $(TMPDIR)/libnative.a $(RUSTC) d.rs -L crate=$(TMPDIR)/native && exit 1 || exit 0 $(RUSTC) d.rs -L native=$(TMPDIR)/native $(RUSTC) d.rs -L all=$(TMPDIR)/native + # Deduplication tests: + # Same hash, no errors. mkdir -p $(TMPDIR)/e1 mkdir -p $(TMPDIR)/e2 $(RUSTC) e.rs -o $(TMPDIR)/e1/libe.rlib $(RUSTC) e.rs -o $(TMPDIR)/e2/libe.rlib + $(RUSTC) f.rs -L $(TMPDIR)/e1 -L $(TMPDIR)/e2 + $(RUSTC) f.rs -L crate=$(TMPDIR)/e1 -L $(TMPDIR)/e2 + $(RUSTC) f.rs -L crate=$(TMPDIR)/e1 -L crate=$(TMPDIR)/e2 + # Different hash, errors. + $(RUSTC) e2.rs -o $(TMPDIR)/e2/libe.rlib $(RUSTC) f.rs -L $(TMPDIR)/e1 -L $(TMPDIR)/e2 && exit 1 || exit 0 $(RUSTC) f.rs -L crate=$(TMPDIR)/e1 -L $(TMPDIR)/e2 && exit 1 || exit 0 $(RUSTC) f.rs -L crate=$(TMPDIR)/e1 -L crate=$(TMPDIR)/e2 && exit 1 || exit 0 + # Native/dependency paths don't cause errors. $(RUSTC) f.rs -L native=$(TMPDIR)/e1 -L $(TMPDIR)/e2 $(RUSTC) f.rs -L dependency=$(TMPDIR)/e1 -L $(TMPDIR)/e2 $(RUSTC) f.rs -L dependency=$(TMPDIR)/e1 -L crate=$(TMPDIR)/e2 diff --git a/src/test/run-make/compiler-lookup-paths/e2.rs b/src/test/run-make/compiler-lookup-paths/e2.rs new file mode 100644 index 0000000000..f8c8c029c0 --- /dev/null +++ b/src/test/run-make/compiler-lookup-paths/e2.rs @@ -0,0 +1,14 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_name = "e"] +#![crate_type = "rlib"] + +pub fn f() {} diff --git a/src/test/run-make/compiler-lookup-paths/native.c b/src/test/run-make/compiler-lookup-paths/native.c index e69de29bb2..3066947052 100644 --- a/src/test/run-make/compiler-lookup-paths/native.c +++ b/src/test/run-make/compiler-lookup-paths/native.c @@ -0,0 +1,9 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. diff --git a/src/test/run-make/debug-assertions/Makefile b/src/test/run-make/debug-assertions/Makefile index 7129756276..76ada90f1e 100644 --- a/src/test/run-make/debug-assertions/Makefile +++ b/src/test/run-make/debug-assertions/Makefile @@ -11,6 +11,10 @@ all: $(call RUN,debug) good $(RUSTC) debug.rs -C opt-level=3 $(call RUN,debug) good + $(RUSTC) debug.rs -C opt-level=s + $(call RUN,debug) good + $(RUSTC) debug.rs -C opt-level=z + $(call RUN,debug) good $(RUSTC) debug.rs -O $(call RUN,debug) good $(RUSTC) debug.rs diff --git a/src/test/run-make/dep-info-no-analysis/Makefile b/src/test/run-make/dep-info-no-analysis/Makefile new file mode 100644 index 0000000000..5d2cfadfd0 --- /dev/null +++ b/src/test/run-make/dep-info-no-analysis/Makefile @@ -0,0 +1,6 @@ +-include ../tools.mk + +all: + $(RUSTC) -o $(TMPDIR)/input.dd -Z no-analysis --emit dep-info input.rs + sed -i'.bak' 's/^.*input.dd/input.dd/g' $(TMPDIR)/input.dd + diff -u $(TMPDIR)/input.dd input.dd diff --git a/src/test/run-make/dep-info-no-analysis/input.dd b/src/test/run-make/dep-info-no-analysis/input.dd new file mode 100644 index 0000000000..f2c8676515 --- /dev/null +++ b/src/test/run-make/dep-info-no-analysis/input.dd @@ -0,0 +1,3 @@ +input.dd: input.rs + +input.rs: diff --git a/src/test/run-make/dep-info-no-analysis/input.rs b/src/test/run-make/dep-info-no-analysis/input.rs new file mode 100644 index 0000000000..523b0f0670 --- /dev/null +++ b/src/test/run-make/dep-info-no-analysis/input.rs @@ -0,0 +1,14 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Tests that dep info can be emitted without resolving external crates. +extern crate not_there; + +fn main() {} diff --git a/src/test/run-make/emit/Makefile b/src/test/run-make/emit/Makefile index be34028fe1..e0b57107e5 100644 --- a/src/test/run-make/emit/Makefile +++ b/src/test/run-make/emit/Makefile @@ -5,6 +5,8 @@ all: $(RUSTC) -Copt-level=1 --emit=llvm-bc,llvm-ir,asm,obj,link test-24876.rs $(RUSTC) -Copt-level=2 --emit=llvm-bc,llvm-ir,asm,obj,link test-24876.rs $(RUSTC) -Copt-level=3 --emit=llvm-bc,llvm-ir,asm,obj,link test-24876.rs + $(RUSTC) -Copt-level=s --emit=llvm-bc,llvm-ir,asm,obj,link test-24876.rs + $(RUSTC) -Copt-level=z --emit=llvm-bc,llvm-ir,asm,obj,link test-24876.rs $(RUSTC) -Copt-level=0 --emit=llvm-bc,llvm-ir,asm,obj,link test-26235.rs $(call RUN,test-26235) || exit 1 $(RUSTC) -Copt-level=1 --emit=llvm-bc,llvm-ir,asm,obj,link test-26235.rs @@ -13,3 +15,7 @@ all: $(call RUN,test-26235) || exit 1 $(RUSTC) -Copt-level=3 --emit=llvm-bc,llvm-ir,asm,obj,link test-26235.rs $(call RUN,test-26235) || exit 1 + $(RUSTC) -Copt-level=s --emit=llvm-bc,llvm-ir,asm,obj,link test-26235.rs + $(call RUN,test-26235) || exit 1 + $(RUSTC) -Copt-level=z --emit=llvm-bc,llvm-ir,asm,obj,link test-26235.rs + $(call RUN,test-26235) || exit 1 diff --git a/src/test/run-make/execution-engine/test.rs b/src/test/run-make/execution-engine/test.rs index 12cc475f12..0e84a0f522 100644 --- a/src/test/run-make/execution-engine/test.rs +++ b/src/test/run-make/execution-engine/test.rs @@ -33,8 +33,8 @@ use rustc::ty; use rustc::session::config::{self, basic_options, build_configuration, Input, Options}; use rustc::session::build_session; use rustc_driver::{driver, abort_on_err}; -use rustc::hir::lowering::{lower_crate, LoweringContext}; use rustc_resolve::MakeGlobMap; +use rustc_metadata::creader::read_local_crates; use rustc_metadata::cstore::CStore; use libc::c_void; @@ -223,8 +223,12 @@ fn compile_program(input: &str, sysroot: PathBuf) let handle = thread.spawn(move || { let opts = build_exec_options(sysroot); - let cstore = Rc::new(CStore::new(token::get_ident_interner())); - let sess = build_session(opts, None, Registry::new(&rustc::DIAGNOSTICS), + let dep_graph = DepGraph::new(opts.build_dep_graph()); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); + let sess = build_session(opts, + &dep_graph, + None, + Registry::new(&rustc::DIAGNOSTICS), cstore.clone()); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); @@ -238,15 +242,19 @@ fn compile_program(input: &str, sysroot: PathBuf) .expect("phase_2 returned `None`"); let krate = driver::assign_node_ids(&sess, krate); - let lcx = LoweringContext::new(&sess, Some(&krate)); - let dep_graph = DepGraph::new(sess.opts.build_dep_graph()); - let mut hir_forest = ast_map::Forest::new(lower_crate(&lcx, &krate), dep_graph); + let mut defs = ast_map::collect_definitions(&krate); + read_local_crates(&sess, &cstore, &defs, &krate, &id, &dep_graph); + let (analysis, resolutions, mut hir_forest) = { + driver::lower_and_resolve(&sess, &id, &mut defs, &krate, + &sess.dep_graph, MakeGlobMap::No) + }; + let arenas = ty::CtxtArenas::new(); - let ast_map = driver::make_map(&sess, &mut hir_forest); + let ast_map = ast_map::map_crate(&mut hir_forest, defs); abort_on_err(driver::phase_3_run_analysis_passes( - &sess, &cstore, ast_map, &arenas, &id, - MakeGlobMap::No, |tcx, mir_map, analysis, _| { + &sess, ast_map, analysis, resolutions, &arenas, &id, + |tcx, mir_map, analysis, _| { let trans = driver::phase_4_translate_to_llvm(tcx, mir_map.unwrap(), analysis); diff --git a/src/test/run-make/extern-flag-fun/Makefile b/src/test/run-make/extern-flag-fun/Makefile index ca5aa052a7..a9f2585335 100644 --- a/src/test/run-make/extern-flag-fun/Makefile +++ b/src/test/run-make/extern-flag-fun/Makefile @@ -3,14 +3,15 @@ all: $(RUSTC) bar.rs --crate-type=rlib $(RUSTC) bar.rs --crate-type=rlib -C extra-filename=-a + $(RUSTC) bar-alt.rs --crate-type=rlib $(RUSTC) foo.rs --extern hello && exit 1 || exit 0 $(RUSTC) foo.rs --extern bar=no-exist && exit 1 || exit 0 $(RUSTC) foo.rs --extern bar=foo.rs && exit 1 || exit 0 $(RUSTC) foo.rs \ --extern bar=$(TMPDIR)/libbar.rlib \ - --extern bar=$(TMPDIR)/libbar-a.rlib \ + --extern bar=$(TMPDIR)/libbar-alt.rlib \ && exit 1 || exit 0 $(RUSTC) foo.rs \ --extern bar=$(TMPDIR)/libbar.rlib \ - --extern bar=$(TMPDIR)/libbar.rlib + --extern bar=$(TMPDIR)/libbar-a.rlib $(RUSTC) foo.rs --extern bar=$(TMPDIR)/libbar.rlib diff --git a/src/test/auxiliary/no_std_crate.rs b/src/test/run-make/extern-flag-fun/bar-alt.rs similarity index 94% rename from src/test/auxiliary/no_std_crate.rs rename to src/test/run-make/extern-flag-fun/bar-alt.rs index 7cfae6d121..d6ebd9d896 100644 --- a/src/test/auxiliary/no_std_crate.rs +++ b/src/test/run-make/extern-flag-fun/bar-alt.rs @@ -8,6 +8,4 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![no_std] - -pub fn foo() {} +pub fn f() {} diff --git a/src/test/run-make/extern-multiple-copies2/Makefile b/src/test/run-make/extern-multiple-copies2/Makefile new file mode 100644 index 0000000000..567d7e78a5 --- /dev/null +++ b/src/test/run-make/extern-multiple-copies2/Makefile @@ -0,0 +1,10 @@ +-include ../tools.mk + +all: + $(RUSTC) foo1.rs + $(RUSTC) foo2.rs + mkdir $(TMPDIR)/foo + cp $(TMPDIR)/libfoo1.rlib $(TMPDIR)/foo/libfoo1.rlib + $(RUSTC) bar.rs \ + --extern foo1=$(TMPDIR)/foo/libfoo1.rlib \ + --extern foo2=$(TMPDIR)/libfoo2.rlib diff --git a/src/test/run-make/extern-multiple-copies2/bar.rs b/src/test/run-make/extern-multiple-copies2/bar.rs new file mode 100644 index 0000000000..b8ac34aa53 --- /dev/null +++ b/src/test/run-make/extern-multiple-copies2/bar.rs @@ -0,0 +1,18 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[macro_use] +extern crate foo2; // foo2 first to exhibit the bug +#[macro_use] +extern crate foo1; + +fn main() { + foo2::foo2(foo1::A); +} diff --git a/src/test/run-make/json-errors/foo.rs b/src/test/run-make/extern-multiple-copies2/foo1.rs similarity index 86% rename from src/test/run-make/json-errors/foo.rs rename to src/test/run-make/extern-multiple-copies2/foo1.rs index 4db33940d8..1787772053 100644 --- a/src/test/run-make/json-errors/foo.rs +++ b/src/test/run-make/extern-multiple-copies2/foo1.rs @@ -8,8 +8,10 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -fn main() { - let x = 42 + y; +#![crate_type = "rlib"] - 42u8 + 42i32; +pub struct A; + +pub fn foo1(a: A) { + drop(a); } diff --git a/src/test/run-make/extern-multiple-copies2/foo2.rs b/src/test/run-make/extern-multiple-copies2/foo2.rs new file mode 100644 index 0000000000..bad1030438 --- /dev/null +++ b/src/test/run-make/extern-multiple-copies2/foo2.rs @@ -0,0 +1,18 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "rlib"] + +#[macro_use] +extern crate foo1; + +pub fn foo2(a: foo1::A) { + foo1::foo1(a); +} diff --git a/src/test/run-make/issue-11908/Makefile b/src/test/run-make/issue-11908/Makefile index 663a9f7125..cf6572c27a 100644 --- a/src/test/run-make/issue-11908/Makefile +++ b/src/test/run-make/issue-11908/Makefile @@ -9,15 +9,13 @@ all: mkdir $(TMPDIR)/other - $(RUSTC) foo.rs --crate-type=dylib + $(RUSTC) foo.rs --crate-type=dylib -C prefer-dynamic mv $(call DYLIB,foo) $(TMPDIR)/other - $(RUSTC) foo.rs --crate-type=dylib - $(RUSTC) bar.rs -L $(TMPDIR)/other 2>&1 | \ - grep "multiple dylib candidates" + $(RUSTC) foo.rs --crate-type=dylib -C prefer-dynamic + $(RUSTC) bar.rs -L $(TMPDIR)/other rm -rf $(TMPDIR) mkdir -p $(TMPDIR)/other $(RUSTC) foo.rs --crate-type=rlib mv $(TMPDIR)/libfoo.rlib $(TMPDIR)/other $(RUSTC) foo.rs --crate-type=rlib - $(RUSTC) bar.rs -L $(TMPDIR)/other 2>&1 | \ - grep "multiple rlib candidates" + $(RUSTC) bar.rs -L $(TMPDIR)/other diff --git a/src/test/run-make/issue-19371/foo.rs b/src/test/run-make/issue-19371/foo.rs index 80c06ca327..41d250eade 100644 --- a/src/test/run-make/issue-19371/foo.rs +++ b/src/test/run-make/issue-19371/foo.rs @@ -16,6 +16,7 @@ extern crate rustc_lint; extern crate rustc_metadata; extern crate syntax; +use rustc::dep_graph::DepGraph; use rustc::session::{build_session, Session}; use rustc::session::config::{basic_options, build_configuration, Input, OutputType}; use rustc_driver::driver::{compile_input, CompileController, anon_src}; @@ -54,8 +55,9 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc) { opts.maybe_sysroot = Some(sysroot); let descriptions = Registry::new(&rustc::DIAGNOSTICS); - let cstore = Rc::new(CStore::new(token::get_ident_interner())); - let sess = build_session(opts, None, descriptions, cstore.clone()); + let dep_graph = DepGraph::new(opts.build_dep_graph()); + let cstore = Rc::new(CStore::new(&dep_graph, token::get_ident_interner())); + let sess = build_session(opts, &dep_graph, None, descriptions, cstore.clone()); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); (sess, cstore) } diff --git a/src/test/run-make/issue-22131/Makefile b/src/test/run-make/issue-22131/Makefile index ec1e282666..1e8568626a 100644 --- a/src/test/run-make/issue-22131/Makefile +++ b/src/test/run-make/issue-22131/Makefile @@ -2,6 +2,6 @@ all: foo.rs $(RUSTC) --cfg 'feature="bar"' --crate-type lib foo.rs - $(HOST_RPATH_ENV) $(RUSTDOC) --test --cfg 'feature="bar"' \ + $(HOST_RPATH_ENV) '$(RUSTDOC)' --test --cfg 'feature="bar"' \ -L $(TMPDIR) foo.rs |\ grep -q 'test foo_0 ... ok' diff --git a/src/test/run-make/json-errors/Makefile b/src/test/run-make/json-errors/Makefile deleted file mode 100644 index bb73fda67b..0000000000 --- a/src/test/run-make/json-errors/Makefile +++ /dev/null @@ -1,10 +0,0 @@ --include ../tools.mk - -LOG := $(TMPDIR)/foo.log - -all: - cp foo.rs $(TMPDIR) - cd $(TMPDIR) - -$(RUSTC) -Z unstable-options --error-format=json foo.rs 2>$(LOG) - grep -q '{"message":"unresolved name `y`","code":{"code":"E0425","explanation":"\\nAn unresolved name was used. Example of erroneous codes.*"},"level":"error","spans":\[{"file_name":"foo.rs","byte_start":496,"byte_end":497,"line_start":12,"line_end":12,"column_start":18,"column_end":19,"text":\[{"text":" let x = 42 + y;","highlight_start":18,"highlight_end":19}\]}\],"children":\[\]}' $(LOG) - grep -q '{"message":".*","code":{"code":"E0277","explanation":"\\nYou tried.*"},"level":"error","spans":\[{.*}\],"children":\[{"message":"the .*","code":null,"level":"help","spans":\[{"file_name":"foo.rs","byte_start":504,"byte_end":516,"line_start":14,"line_end":14,"column_start":0,"column_end":0,"text":\[{.*}\]}\],"children":\[\]},{"message":" ","code":null,"level":"help",' $(LOG) diff --git a/src/test/run-make/many-crates-but-no-match/Makefile b/src/test/run-make/many-crates-but-no-match/Makefile index edf8e9df46..0da4af34ef 100644 --- a/src/test/run-make/many-crates-but-no-match/Makefile +++ b/src/test/run-make/many-crates-but-no-match/Makefile @@ -28,7 +28,7 @@ all: # Ensure crateC fails to compile since A1 is "missing" and A2/A3 hashes do not match $(RUSTC) -L $(A2) -L $(A3) crateC.rs >$(LOG) 2>&1 || true grep "error: found possibly newer version of crate \`crateA\` which \`crateB\` depends on" $(LOG) - grep "note: perhaps this crate needs to be recompiled?" $(LOG) + grep "note: perhaps that crate needs to be recompiled?" $(LOG) grep "note: crate \`crateA\` path #1:" $(LOG) grep "note: crate \`crateA\` path #2:" $(LOG) grep "note: crate \`crateB\` path #1:" $(LOG) diff --git a/src/test/run-make/pretty-expanded-hygiene/input.pp.rs b/src/test/run-make/pretty-expanded-hygiene/input.pp.rs index c241241e18..0717af98b3 100644 --- a/src/test/run-make/pretty-expanded-hygiene/input.pp.rs +++ b/src/test/run-make/pretty-expanded-hygiene/input.pp.rs @@ -14,3 +14,5 @@ fn bar /* 62#0 */() { let x /* 59#2 */ = 1; y /* 61#4 */ + x /* 59#5 */ } + +fn y /* 61#0 */() { } diff --git a/src/test/run-make/pretty-expanded-hygiene/input.rs b/src/test/run-make/pretty-expanded-hygiene/input.rs index 56783b5609..422fbdb088 100644 --- a/src/test/run-make/pretty-expanded-hygiene/input.rs +++ b/src/test/run-make/pretty-expanded-hygiene/input.rs @@ -20,3 +20,5 @@ fn bar() { let x = 1; foo!(x) } + +fn y() {} diff --git a/src/test/run-make/print-cfg/Makefile b/src/test/run-make/print-cfg/Makefile index c74233d495..a820a463f4 100644 --- a/src/test/run-make/print-cfg/Makefile +++ b/src/test/run-make/print-cfg/Makefile @@ -5,6 +5,7 @@ all: default $(RUSTC) --target x86_64-pc-windows-gnu --print cfg | grep x86_64 $(RUSTC) --target i686-pc-windows-msvc --print cfg | grep msvc $(RUSTC) --target i686-apple-darwin --print cfg | grep macos + $(RUSTC) --target i686-unknown-linux-gnu --print cfg | grep sse2 ifdef IS_WINDOWS default: diff --git a/src/test/run-make/save-analysis/Makefile b/src/test/run-make/save-analysis/Makefile index 7296fb9cc5..3c636baaed 100644 --- a/src/test/run-make/save-analysis/Makefile +++ b/src/test/run-make/save-analysis/Makefile @@ -3,4 +3,5 @@ all: code krate2: krate2.rs $(RUSTC) $< code: foo.rs krate2 + $(RUSTC) foo.rs -Zsave-analysis-csv $(RUSTC) foo.rs -Zsave-analysis diff --git a/src/test/run-make/static-unwinding/main.rs b/src/test/run-make/static-unwinding/main.rs index ba4860be91..1cd785334f 100644 --- a/src/test/run-make/static-unwinding/main.rs +++ b/src/test/run-make/static-unwinding/main.rs @@ -25,7 +25,7 @@ fn main() { thread::spawn(move|| { let _a = A; lib::callback(|| panic!()); - }).join().err().unwrap(); + }).join().unwrap_err(); unsafe { assert_eq!(lib::statik, 1); diff --git a/src/test/run-make/symlinked-rlib/Makefile b/src/test/run-make/symlinked-rlib/Makefile new file mode 100644 index 0000000000..2709f786e0 --- /dev/null +++ b/src/test/run-make/symlinked-rlib/Makefile @@ -0,0 +1,14 @@ +-include ../tools.mk + +# ignore windows: `ln` is actually `cp` on msys. +ifndef IS_WINDOWS + +all: + $(RUSTC) foo.rs --crate-type=rlib -o $(TMPDIR)/foo.xxx + ln -nsf $(TMPDIR)/foo.xxx $(TMPDIR)/libfoo.rlib + $(RUSTC) bar.rs -L $(TMPDIR) + +else +all: + +endif diff --git a/src/test/run-make/symlinked-rlib/bar.rs b/src/test/run-make/symlinked-rlib/bar.rs new file mode 100644 index 0000000000..e8f0668086 --- /dev/null +++ b/src/test/run-make/symlinked-rlib/bar.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +extern crate foo; + +fn main() { + foo::bar(); +} diff --git a/src/test/run-make/symlinked-rlib/foo.rs b/src/test/run-make/symlinked-rlib/foo.rs new file mode 100644 index 0000000000..5abbb1dcbc --- /dev/null +++ b/src/test/run-make/symlinked-rlib/foo.rs @@ -0,0 +1,11 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub fn bar() {} diff --git a/src/test/run-make/tools.mk b/src/test/run-make/tools.mk index dab6511eb9..38afa42a29 100644 --- a/src/test/run-make/tools.mk +++ b/src/test/run-make/tools.mk @@ -5,7 +5,7 @@ HOST_RPATH_ENV = \ TARGET_RPATH_ENV = \ $(LD_LIB_PATH_ENVVAR)="$(TMPDIR):$(TARGET_RPATH_DIR):$($(LD_LIB_PATH_ENVVAR))" -BARE_RUSTC := $(HOST_RPATH_ENV) $(RUSTC) +BARE_RUSTC := $(HOST_RPATH_ENV) '$(RUSTC)' RUSTC := $(BARE_RUSTC) --out-dir $(TMPDIR) -L $(TMPDIR) $(RUSTFLAGS) #CC := $(CC) -L $(TMPDIR) HTMLDOCCK := $(PYTHON) $(S)/src/etc/htmldocck.py diff --git a/src/test/auxiliary/custom_derive_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs similarity index 98% rename from src/test/auxiliary/custom_derive_plugin.rs rename to src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs index 5f0ef4de49..0132014de0 100644 --- a/src/test/auxiliary/custom_derive_plugin.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs @@ -58,6 +58,7 @@ fn expand(cx: &mut ExtCtxt, ret_ty: Literal(Path::new_local("isize")), attributes: vec![], is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(box |cx, span, substr| { let zero = cx.expr_isize(span, 0); cs_fold(false, diff --git a/src/test/auxiliary/custom_derive_plugin_attr.rs b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs similarity index 98% rename from src/test/auxiliary/custom_derive_plugin_attr.rs rename to src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs index 2878674f0e..6fa7891383 100644 --- a/src/test/auxiliary/custom_derive_plugin_attr.rs +++ b/src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs @@ -60,6 +60,7 @@ fn expand(cx: &mut ExtCtxt, ret_ty: Literal(Path::new_local("isize")), attributes: vec![], is_unsafe: false, + unify_fieldless_variants: true, combine_substructure: combine_substructure(Box::new(totalsum_substructure)), }, ], diff --git a/src/test/auxiliary/dummy_mir_pass.rs b/src/test/run-pass-fulldeps/auxiliary/dummy_mir_pass.rs similarity index 87% rename from src/test/auxiliary/dummy_mir_pass.rs rename to src/test/run-pass-fulldeps/auxiliary/dummy_mir_pass.rs index b5234af937..0abf71ba44 100644 --- a/src/test/auxiliary/dummy_mir_pass.rs +++ b/src/test/run-pass-fulldeps/auxiliary/dummy_mir_pass.rs @@ -18,21 +18,20 @@ extern crate rustc_plugin; extern crate rustc_const_math; extern crate syntax; -use rustc::mir::transform::{self, MirPass}; +use rustc::mir::transform::{self, MirPass, MirSource}; use rustc::mir::repr::{Mir, Literal}; use rustc::mir::visit::MutVisitor; -use rustc::ty; +use rustc::ty::TyCtxt; use rustc::middle::const_val::ConstVal; use rustc_const_math::ConstInt; use rustc_plugin::Registry; -use syntax::ast::NodeId; - struct Pass; impl transform::Pass for Pass {} impl<'tcx> MirPass<'tcx> for Pass { - fn run_pass(&mut self, _: &ty::TyCtxt<'tcx>, _: NodeId, mir: &mut Mir<'tcx>) { + fn run_pass<'a>(&mut self, _: TyCtxt<'a, 'tcx, 'tcx>, + _: MirSource, mir: &mut Mir<'tcx>) { Visitor.visit_mir(mir) } } diff --git a/src/test/auxiliary/issue-13560-1.rs b/src/test/run-pass-fulldeps/auxiliary/issue-13560-1.rs similarity index 100% rename from src/test/auxiliary/issue-13560-1.rs rename to src/test/run-pass-fulldeps/auxiliary/issue-13560-1.rs diff --git a/src/test/auxiliary/issue-13560-2.rs b/src/test/run-pass-fulldeps/auxiliary/issue-13560-2.rs similarity index 100% rename from src/test/auxiliary/issue-13560-2.rs rename to src/test/run-pass-fulldeps/auxiliary/issue-13560-2.rs diff --git a/src/test/auxiliary/issue-13560-3.rs b/src/test/run-pass-fulldeps/auxiliary/issue-13560-3.rs similarity index 100% rename from src/test/auxiliary/issue-13560-3.rs rename to src/test/run-pass-fulldeps/auxiliary/issue-13560-3.rs diff --git a/src/test/auxiliary/issue-16822.rs b/src/test/run-pass-fulldeps/auxiliary/issue-16822.rs similarity index 100% rename from src/test/auxiliary/issue-16822.rs rename to src/test/run-pass-fulldeps/auxiliary/issue-16822.rs diff --git a/src/test/auxiliary/issue-18502.rs b/src/test/run-pass-fulldeps/auxiliary/issue-18502.rs similarity index 100% rename from src/test/auxiliary/issue-18502.rs rename to src/test/run-pass-fulldeps/auxiliary/issue-18502.rs diff --git a/src/test/auxiliary/issue_16723_multiple_items_syntax_ext.rs b/src/test/run-pass-fulldeps/auxiliary/issue_16723_multiple_items_syntax_ext.rs similarity index 100% rename from src/test/auxiliary/issue_16723_multiple_items_syntax_ext.rs rename to src/test/run-pass-fulldeps/auxiliary/issue_16723_multiple_items_syntax_ext.rs diff --git a/src/test/auxiliary/linkage-visibility.rs b/src/test/run-pass-fulldeps/auxiliary/linkage-visibility.rs similarity index 100% rename from src/test/auxiliary/linkage-visibility.rs rename to src/test/run-pass-fulldeps/auxiliary/linkage-visibility.rs diff --git a/src/test/run-pass-fulldeps/auxiliary/lint_for_crate.rs b/src/test/run-pass-fulldeps/auxiliary/lint_for_crate.rs new file mode 100644 index 0000000000..a424517da1 --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/lint_for_crate.rs @@ -0,0 +1,47 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// force-host + +#![feature(plugin_registrar, rustc_private)] +#![feature(box_syntax)] + +#[macro_use] extern crate rustc; +extern crate rustc_plugin; +extern crate syntax; + +use rustc::lint::{LateContext, LintContext, LintPass, LateLintPass, LateLintPassObject, LintArray}; +use rustc_plugin::Registry; +use rustc::hir; +use syntax::attr; + +declare_lint!(CRATE_NOT_OKAY, Warn, "crate not marked with #![crate_okay]"); + +struct Pass; + +impl LintPass for Pass { + fn get_lints(&self) -> LintArray { + lint_array!(CRATE_NOT_OKAY) + } +} + +impl LateLintPass for Pass { + fn check_crate(&mut self, cx: &LateContext, krate: &hir::Crate) { + if !attr::contains_name(&krate.attrs, "crate_okay") { + cx.span_lint(CRATE_NOT_OKAY, krate.span, + "crate is not marked with #![crate_okay]"); + } + } +} + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_late_lint_pass(box Pass as LateLintPassObject); +} diff --git a/src/test/run-pass-fulldeps/auxiliary/lint_group_plugin_test.rs b/src/test/run-pass-fulldeps/auxiliary/lint_group_plugin_test.rs new file mode 100644 index 0000000000..1e9a77724a --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/lint_group_plugin_test.rs @@ -0,0 +1,51 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// force-host + +#![feature(plugin_registrar)] +#![feature(box_syntax, rustc_private)] + +// Load rustc as a plugin to get macros +#[macro_use] +extern crate rustc; +extern crate rustc_plugin; + +use rustc::hir; +use rustc::lint::{LateContext, LintContext, LintPass, LateLintPass, LateLintPassObject, LintArray}; +use rustc_plugin::Registry; + +declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'"); + +declare_lint!(PLEASE_LINT, Warn, "Warn about items named 'pleaselintme'"); + +struct Pass; + +impl LintPass for Pass { + fn get_lints(&self) -> LintArray { + lint_array!(TEST_LINT, PLEASE_LINT) + } +} + +impl LateLintPass for Pass { + fn check_item(&mut self, cx: &LateContext, it: &hir::Item) { + match &*it.name.as_str() { + "lintme" => cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"), + "pleaselintme" => cx.span_lint(PLEASE_LINT, it.span, "item is named 'pleaselintme'"), + _ => {} + } + } +} + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_late_lint_pass(box Pass as LateLintPassObject); + reg.register_lint_group("lint_me", vec![TEST_LINT, PLEASE_LINT]); +} diff --git a/src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs b/src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs new file mode 100644 index 0000000000..8ea131da33 --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs @@ -0,0 +1,48 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// force-host + +#![feature(plugin_registrar)] +#![feature(box_syntax, rustc_private)] + +extern crate syntax; + +// Load rustc as a plugin to get macros +#[macro_use] +extern crate rustc; +extern crate rustc_plugin; + +use rustc::lint::{EarlyContext, LintContext, LintPass, EarlyLintPass, + EarlyLintPassObject, LintArray}; +use rustc_plugin::Registry; +use syntax::ast; +declare_lint!(TEST_LINT, Warn, "Warn about items named 'lintme'"); + +struct Pass; + +impl LintPass for Pass { + fn get_lints(&self) -> LintArray { + lint_array!(TEST_LINT) + } +} + +impl EarlyLintPass for Pass { + fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) { + if it.ident.name.as_str() == "lintme" { + cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"); + } + } +} + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_early_lint_pass(box Pass as EarlyLintPassObject); +} diff --git a/src/test/auxiliary/llvm_pass_plugin.rs b/src/test/run-pass-fulldeps/auxiliary/llvm_pass_plugin.rs similarity index 100% rename from src/test/auxiliary/llvm_pass_plugin.rs rename to src/test/run-pass-fulldeps/auxiliary/llvm_pass_plugin.rs diff --git a/src/test/auxiliary/logging_right_crate.rs b/src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs similarity index 100% rename from src/test/auxiliary/logging_right_crate.rs rename to src/test/run-pass-fulldeps/auxiliary/logging_right_crate.rs diff --git a/src/test/auxiliary/lto-syntax-extension-lib.rs b/src/test/run-pass-fulldeps/auxiliary/lto-syntax-extension-lib.rs similarity index 100% rename from src/test/auxiliary/lto-syntax-extension-lib.rs rename to src/test/run-pass-fulldeps/auxiliary/lto-syntax-extension-lib.rs diff --git a/src/test/auxiliary/lto-syntax-extension-plugin.rs b/src/test/run-pass-fulldeps/auxiliary/lto-syntax-extension-plugin.rs similarity index 100% rename from src/test/auxiliary/lto-syntax-extension-plugin.rs rename to src/test/run-pass-fulldeps/auxiliary/lto-syntax-extension-plugin.rs diff --git a/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs new file mode 100644 index 0000000000..3516f566e8 --- /dev/null +++ b/src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs @@ -0,0 +1,141 @@ +// Copyright 2013-2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// force-host + +#![feature(plugin_registrar, quote, rustc_private)] + +extern crate syntax; +extern crate rustc; +extern crate rustc_plugin; + +use syntax::ast::{self, TokenTree, Item, MetaItem, ImplItem, TraitItem, ItemKind}; +use syntax::codemap::Span; +use syntax::ext::base::*; +use syntax::parse::{self, token}; +use syntax::ptr::P; +use rustc_plugin::Registry; + +#[macro_export] +macro_rules! exported_macro { () => (2) } +macro_rules! unexported_macro { () => (3) } + +#[plugin_registrar] +pub fn plugin_registrar(reg: &mut Registry) { + reg.register_macro("make_a_1", expand_make_a_1); + reg.register_macro("identity", expand_identity); + reg.register_syntax_extension( + token::intern("into_multi_foo"), + // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. + MultiModifier(Box::new(expand_into_foo_multi))); + reg.register_syntax_extension( + token::intern("duplicate"), + // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. + MultiDecorator(Box::new(expand_duplicate))); +} + +fn expand_make_a_1(cx: &mut ExtCtxt, sp: Span, tts: &[TokenTree]) + -> Box { + if !tts.is_empty() { + cx.span_fatal(sp, "make_a_1 takes no arguments"); + } + MacEager::expr(quote_expr!(cx, 1)) +} + +// See Issue #15750 +fn expand_identity(cx: &mut ExtCtxt, _span: Span, tts: &[TokenTree]) + -> Box { + // Parse an expression and emit it unchanged. + let mut parser = parse::new_parser_from_tts(cx.parse_sess(), + cx.cfg(), tts.to_vec()); + let expr = parser.parse_expr().unwrap(); + MacEager::expr(quote_expr!(&mut *cx, $expr)) +} + +fn expand_into_foo_multi(cx: &mut ExtCtxt, + sp: Span, + attr: &MetaItem, + it: Annotatable) -> Annotatable { + match it { + Annotatable::Item(it) => { + Annotatable::Item(P(Item { + attrs: it.attrs.clone(), + ..(*quote_item!(cx, enum Foo2 { Bar2, Baz2 }).unwrap()).clone() + })) + } + Annotatable::ImplItem(it) => { + quote_item!(cx, impl X { fn foo(&self) -> i32 { 42 } }).unwrap().and_then(|i| { + match i.node { + ItemKind::Impl(_, _, _, _, _, mut items) => { + Annotatable::ImplItem(P(items.pop().expect("impl method not found"))) + } + _ => unreachable!("impl parsed to something other than impl") + } + }) + } + Annotatable::TraitItem(it) => { + quote_item!(cx, trait X { fn foo(&self) -> i32 { 0 } }).unwrap().and_then(|i| { + match i.node { + ItemKind::Trait(_, _, _, mut items) => { + Annotatable::TraitItem(P(items.pop().expect("trait method not found"))) + } + _ => unreachable!("trait parsed to something other than trait") + } + }) + } + } +} + +// Create a duplicate of the annotatable, based on the MetaItem +fn expand_duplicate(cx: &mut ExtCtxt, + sp: Span, + mi: &MetaItem, + it: &Annotatable, + push: &mut FnMut(Annotatable)) +{ + let copy_name = match mi.node { + ast::MetaItemKind::List(_, ref xs) => { + if let ast::MetaItemKind::Word(ref w) = xs[0].node { + token::str_to_ident(&w) + } else { + cx.span_err(mi.span, "Expected word"); + return; + } + } + _ => { + cx.span_err(mi.span, "Expected list"); + return; + } + }; + + // Duplicate the item but replace its ident by the MetaItem + match it.clone() { + Annotatable::Item(it) => { + let mut new_it = (*it).clone(); + new_it.attrs.clear(); + new_it.ident = copy_name; + push(Annotatable::Item(P(new_it))); + } + Annotatable::ImplItem(it) => { + let mut new_it = (*it).clone(); + new_it.attrs.clear(); + new_it.ident = copy_name; + push(Annotatable::ImplItem(P(new_it))); + } + Annotatable::TraitItem(tt) => { + let mut new_it = (*tt).clone(); + new_it.attrs.clear(); + new_it.ident = copy_name; + push(Annotatable::TraitItem(P(new_it))); + } + } +} + +pub fn foo() {} diff --git a/src/test/auxiliary/plugin_args.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs similarity index 97% rename from src/test/auxiliary/plugin_args.rs rename to src/test/run-pass-fulldeps/auxiliary/plugin_args.rs index f6e80266a1..99321ad424 100644 --- a/src/test/auxiliary/plugin_args.rs +++ b/src/test/run-pass-fulldeps/auxiliary/plugin_args.rs @@ -45,7 +45,7 @@ impl TTMacroExpander for Expander { #[plugin_registrar] pub fn plugin_registrar(reg: &mut Registry) { - let args = reg.args().clone(); + let args = reg.args().to_owned(); reg.register_syntax_extension(token::intern("plugin_args"), // FIXME (#22405): Replace `Box::new` with `box` here when/if possible. NormalTT(Box::new(Expander { args: args, }), None, false)); diff --git a/src/test/auxiliary/plugin_crate_outlive_expansion_phase.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_crate_outlive_expansion_phase.rs similarity index 100% rename from src/test/auxiliary/plugin_crate_outlive_expansion_phase.rs rename to src/test/run-pass-fulldeps/auxiliary/plugin_crate_outlive_expansion_phase.rs diff --git a/src/test/auxiliary/plugin_with_plugin_lib.rs b/src/test/run-pass-fulldeps/auxiliary/plugin_with_plugin_lib.rs similarity index 100% rename from src/test/auxiliary/plugin_with_plugin_lib.rs rename to src/test/run-pass-fulldeps/auxiliary/plugin_with_plugin_lib.rs diff --git a/src/test/auxiliary/procedural_mbe_matching.rs b/src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs similarity index 100% rename from src/test/auxiliary/procedural_mbe_matching.rs rename to src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs diff --git a/src/test/auxiliary/roman_numerals.rs b/src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs similarity index 97% rename from src/test/auxiliary/roman_numerals.rs rename to src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs index c262b0dba2..839ece49c3 100644 --- a/src/test/auxiliary/roman_numerals.rs +++ b/src/test/run-pass-fulldeps/auxiliary/roman_numerals.rs @@ -48,7 +48,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree]) } let text = match args[0] { - TokenTree::Token(_, token::Ident(s, _)) => s.to_string(), + TokenTree::Token(_, token::Ident(s)) => s.to_string(), _ => { cx.span_err(sp, "argument should be a single identifier"); return DummyResult::any(sp); diff --git a/src/test/auxiliary/syntax_extension_with_dll_deps_1.rs b/src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_1.rs similarity index 100% rename from src/test/auxiliary/syntax_extension_with_dll_deps_1.rs rename to src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_1.rs diff --git a/src/test/auxiliary/syntax_extension_with_dll_deps_2.rs b/src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_2.rs similarity index 100% rename from src/test/auxiliary/syntax_extension_with_dll_deps_2.rs rename to src/test/run-pass-fulldeps/auxiliary/syntax_extension_with_dll_deps_2.rs diff --git a/src/test/run-pass-fulldeps/compiler-calls.rs b/src/test/run-pass-fulldeps/compiler-calls.rs index 42784e009e..af641d717e 100644 --- a/src/test/run-pass-fulldeps/compiler-calls.rs +++ b/src/test/run-pass-fulldeps/compiler-calls.rs @@ -69,7 +69,10 @@ impl<'a> CompilerCalls<'a> for TestCalls { panic!("This shouldn't happen"); } - fn build_controller(&mut self, _: &Session) -> driver::CompileController<'a> { + fn build_controller(&mut self, + _: &Session, + _: &getopts::Matches) + -> driver::CompileController<'a> { panic!("This shouldn't be called"); } } diff --git a/src/test/run-pass-fulldeps/rustc_encodable_hygiene.rs b/src/test/run-pass-fulldeps/rustc_encodable_hygiene.rs new file mode 100644 index 0000000000..655b08225a --- /dev/null +++ b/src/test/run-pass-fulldeps/rustc_encodable_hygiene.rs @@ -0,0 +1,32 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rustc_private)] + +#[allow(dead_code)] + +extern crate serialize as rustc_serialize; + +#[derive(RustcDecodable, RustcEncodable,Debug)] +struct A { + a: String, +} + +trait Trait { + fn encode(&self); +} + +impl Trait for T { + fn encode(&self) { + unimplemented!() + } +} + +fn main() {} diff --git a/src/test/run-pass/associated-const-outer-ty-refs.rs b/src/test/run-pass/associated-const-outer-ty-refs.rs new file mode 100644 index 0000000000..a603b22513 --- /dev/null +++ b/src/test/run-pass/associated-const-outer-ty-refs.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +#![feature(associated_consts)] + +trait Lattice { + const BOTTOM: Self; +} + +// FIXME(#33573): this should work without the 'static lifetime bound. +impl Lattice for Option { + const BOTTOM: Option = None; +} + +fn main(){} diff --git a/src/test/run-pass/associated-types-in-bound-type-arg.rs b/src/test/run-pass/associated-types-in-bound-type-arg.rs new file mode 100644 index 0000000000..18803d1571 --- /dev/null +++ b/src/test/run-pass/associated-types-in-bound-type-arg.rs @@ -0,0 +1,26 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test the case where we resolve `C::Result` and the trait bound +// itself includes a `Self::Item` shorthand. +// +// Regression test for issue #33425. + +trait ParallelIterator { + type Item; + fn drive_unindexed(self, consumer: C) -> C::Result + where C: Consumer; +} + +pub trait Consumer { + type Result; +} + +fn main() { } diff --git a/src/test/auxiliary/allocator-dummy.rs b/src/test/run-pass/auxiliary/allocator-dummy.rs similarity index 100% rename from src/test/auxiliary/allocator-dummy.rs rename to src/test/run-pass/auxiliary/allocator-dummy.rs diff --git a/src/test/auxiliary/anon-extern-mod-cross-crate-1.rs b/src/test/run-pass/auxiliary/anon-extern-mod-cross-crate-1.rs similarity index 100% rename from src/test/auxiliary/anon-extern-mod-cross-crate-1.rs rename to src/test/run-pass/auxiliary/anon-extern-mod-cross-crate-1.rs diff --git a/src/test/auxiliary/anon_trait_static_method_lib.rs b/src/test/run-pass/auxiliary/anon_trait_static_method_lib.rs similarity index 100% rename from src/test/auxiliary/anon_trait_static_method_lib.rs rename to src/test/run-pass/auxiliary/anon_trait_static_method_lib.rs diff --git a/src/test/auxiliary/associated-const-cc-lib.rs b/src/test/run-pass/auxiliary/associated-const-cc-lib.rs similarity index 100% rename from src/test/auxiliary/associated-const-cc-lib.rs rename to src/test/run-pass/auxiliary/associated-const-cc-lib.rs diff --git a/src/test/auxiliary/associated-types-cc-lib.rs b/src/test/run-pass/auxiliary/associated-types-cc-lib.rs similarity index 100% rename from src/test/auxiliary/associated-types-cc-lib.rs rename to src/test/run-pass/auxiliary/associated-types-cc-lib.rs diff --git a/src/test/auxiliary/augmented_assignments.rs b/src/test/run-pass/auxiliary/augmented_assignments.rs similarity index 100% rename from src/test/auxiliary/augmented_assignments.rs rename to src/test/run-pass/auxiliary/augmented_assignments.rs diff --git a/src/test/auxiliary/blind-item-mixed-crate-use-item-foo.rs b/src/test/run-pass/auxiliary/blind-item-mixed-crate-use-item-foo.rs similarity index 100% rename from src/test/auxiliary/blind-item-mixed-crate-use-item-foo.rs rename to src/test/run-pass/auxiliary/blind-item-mixed-crate-use-item-foo.rs diff --git a/src/test/auxiliary/blind-item-mixed-crate-use-item-foo2.rs b/src/test/run-pass/auxiliary/blind-item-mixed-crate-use-item-foo2.rs similarity index 100% rename from src/test/auxiliary/blind-item-mixed-crate-use-item-foo2.rs rename to src/test/run-pass/auxiliary/blind-item-mixed-crate-use-item-foo2.rs diff --git a/src/test/auxiliary/cci_borrow_lib.rs b/src/test/run-pass/auxiliary/cci_borrow_lib.rs similarity index 100% rename from src/test/auxiliary/cci_borrow_lib.rs rename to src/test/run-pass/auxiliary/cci_borrow_lib.rs diff --git a/src/test/auxiliary/cci_capture_clause.rs b/src/test/run-pass/auxiliary/cci_capture_clause.rs similarity index 100% rename from src/test/auxiliary/cci_capture_clause.rs rename to src/test/run-pass/auxiliary/cci_capture_clause.rs diff --git a/src/test/run-pass/auxiliary/cci_class.rs b/src/test/run-pass/auxiliary/cci_class.rs new file mode 100644 index 0000000000..08a13fd8bc --- /dev/null +++ b/src/test/run-pass/auxiliary/cci_class.rs @@ -0,0 +1,24 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub mod kitties { + pub struct cat { + meows : usize, + + pub how_hungry : isize, + } + + pub fn cat(in_x : usize, in_y : isize) -> cat { + cat { + meows: in_x, + how_hungry: in_y + } + } +} diff --git a/src/test/auxiliary/cci_class_2.rs b/src/test/run-pass/auxiliary/cci_class_2.rs similarity index 100% rename from src/test/auxiliary/cci_class_2.rs rename to src/test/run-pass/auxiliary/cci_class_2.rs diff --git a/src/test/auxiliary/cci_class_3.rs b/src/test/run-pass/auxiliary/cci_class_3.rs similarity index 100% rename from src/test/auxiliary/cci_class_3.rs rename to src/test/run-pass/auxiliary/cci_class_3.rs diff --git a/src/test/auxiliary/cci_class_4.rs b/src/test/run-pass/auxiliary/cci_class_4.rs similarity index 100% rename from src/test/auxiliary/cci_class_4.rs rename to src/test/run-pass/auxiliary/cci_class_4.rs diff --git a/src/test/auxiliary/cci_class_6.rs b/src/test/run-pass/auxiliary/cci_class_6.rs similarity index 100% rename from src/test/auxiliary/cci_class_6.rs rename to src/test/run-pass/auxiliary/cci_class_6.rs diff --git a/src/test/auxiliary/cci_class_cast.rs b/src/test/run-pass/auxiliary/cci_class_cast.rs similarity index 100% rename from src/test/auxiliary/cci_class_cast.rs rename to src/test/run-pass/auxiliary/cci_class_cast.rs diff --git a/src/test/auxiliary/cci_class_trait.rs b/src/test/run-pass/auxiliary/cci_class_trait.rs similarity index 100% rename from src/test/auxiliary/cci_class_trait.rs rename to src/test/run-pass/auxiliary/cci_class_trait.rs diff --git a/src/test/auxiliary/cci_const.rs b/src/test/run-pass/auxiliary/cci_const.rs similarity index 100% rename from src/test/auxiliary/cci_const.rs rename to src/test/run-pass/auxiliary/cci_const.rs diff --git a/src/test/auxiliary/cci_const_block.rs b/src/test/run-pass/auxiliary/cci_const_block.rs similarity index 100% rename from src/test/auxiliary/cci_const_block.rs rename to src/test/run-pass/auxiliary/cci_const_block.rs diff --git a/src/test/auxiliary/cci_impl_lib.rs b/src/test/run-pass/auxiliary/cci_impl_lib.rs similarity index 100% rename from src/test/auxiliary/cci_impl_lib.rs rename to src/test/run-pass/auxiliary/cci_impl_lib.rs diff --git a/src/test/auxiliary/cci_intrinsic.rs b/src/test/run-pass/auxiliary/cci_intrinsic.rs similarity index 100% rename from src/test/auxiliary/cci_intrinsic.rs rename to src/test/run-pass/auxiliary/cci_intrinsic.rs diff --git a/src/test/auxiliary/cci_iter_lib.rs b/src/test/run-pass/auxiliary/cci_iter_lib.rs similarity index 100% rename from src/test/auxiliary/cci_iter_lib.rs rename to src/test/run-pass/auxiliary/cci_iter_lib.rs diff --git a/src/test/auxiliary/cci_nested_lib.rs b/src/test/run-pass/auxiliary/cci_nested_lib.rs similarity index 100% rename from src/test/auxiliary/cci_nested_lib.rs rename to src/test/run-pass/auxiliary/cci_nested_lib.rs diff --git a/src/test/auxiliary/cci_no_inline_lib.rs b/src/test/run-pass/auxiliary/cci_no_inline_lib.rs similarity index 100% rename from src/test/auxiliary/cci_no_inline_lib.rs rename to src/test/run-pass/auxiliary/cci_no_inline_lib.rs diff --git a/src/test/auxiliary/cfg_inner_static.rs b/src/test/run-pass/auxiliary/cfg_inner_static.rs similarity index 100% rename from src/test/auxiliary/cfg_inner_static.rs rename to src/test/run-pass/auxiliary/cfg_inner_static.rs diff --git a/src/test/auxiliary/cgu_test.rs b/src/test/run-pass/auxiliary/cgu_test.rs similarity index 100% rename from src/test/auxiliary/cgu_test.rs rename to src/test/run-pass/auxiliary/cgu_test.rs diff --git a/src/test/auxiliary/cgu_test_a.rs b/src/test/run-pass/auxiliary/cgu_test_a.rs similarity index 100% rename from src/test/auxiliary/cgu_test_a.rs rename to src/test/run-pass/auxiliary/cgu_test_a.rs diff --git a/src/test/auxiliary/cgu_test_b.rs b/src/test/run-pass/auxiliary/cgu_test_b.rs similarity index 100% rename from src/test/auxiliary/cgu_test_b.rs rename to src/test/run-pass/auxiliary/cgu_test_b.rs diff --git a/src/test/auxiliary/check_static_recursion_foreign_helper.rs b/src/test/run-pass/auxiliary/check_static_recursion_foreign_helper.rs similarity index 100% rename from src/test/auxiliary/check_static_recursion_foreign_helper.rs rename to src/test/run-pass/auxiliary/check_static_recursion_foreign_helper.rs diff --git a/src/test/run-pass/auxiliary/coherence_copy_like_lib.rs b/src/test/run-pass/auxiliary/coherence_copy_like_lib.rs new file mode 100644 index 0000000000..d3d389c6a8 --- /dev/null +++ b/src/test/run-pass/auxiliary/coherence_copy_like_lib.rs @@ -0,0 +1,20 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "rlib"] +#![feature(fundamental)] + +pub trait MyCopy { } +impl MyCopy for i32 { } + +pub struct MyStruct(T); + +#[fundamental] +pub struct MyFundamentalStruct(T); diff --git a/src/test/run-pass/auxiliary/coherence_lib.rs b/src/test/run-pass/auxiliary/coherence_lib.rs new file mode 100644 index 0000000000..daa123849e --- /dev/null +++ b/src/test/run-pass/auxiliary/coherence_lib.rs @@ -0,0 +1,25 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type="lib"] + +pub trait Remote { + fn foo(&self) { } +} + +pub trait Remote1 { + fn foo(&self, t: T) { } +} + +pub trait Remote2 { + fn foo(&self, t: T, u: U) { } +} + +pub struct Pair(T,U); diff --git a/src/test/run-pass/auxiliary/const_fn_lib.rs b/src/test/run-pass/auxiliary/const_fn_lib.rs new file mode 100644 index 0000000000..b0d5a6b127 --- /dev/null +++ b/src/test/run-pass/auxiliary/const_fn_lib.rs @@ -0,0 +1,16 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Crate that exports a const fn. Used for testing cross-crate. + +#![crate_type="rlib"] +#![feature(const_fn)] + +pub const fn foo() -> usize { 22 } //~ ERROR const fn is unstable diff --git a/src/test/auxiliary/crate-attributes-using-cfg_attr.rs b/src/test/run-pass/auxiliary/crate-attributes-using-cfg_attr.rs similarity index 100% rename from src/test/auxiliary/crate-attributes-using-cfg_attr.rs rename to src/test/run-pass/auxiliary/crate-attributes-using-cfg_attr.rs diff --git a/src/test/auxiliary/crate-method-reexport-grrrrrrr2.rs b/src/test/run-pass/auxiliary/crate-method-reexport-grrrrrrr2.rs similarity index 100% rename from src/test/auxiliary/crate-method-reexport-grrrrrrr2.rs rename to src/test/run-pass/auxiliary/crate-method-reexport-grrrrrrr2.rs diff --git a/src/test/auxiliary/default_type_params_xc.rs b/src/test/run-pass/auxiliary/default_type_params_xc.rs similarity index 100% rename from src/test/auxiliary/default_type_params_xc.rs rename to src/test/run-pass/auxiliary/default_type_params_xc.rs diff --git a/src/test/auxiliary/derive-no-std.rs b/src/test/run-pass/auxiliary/derive-no-std.rs similarity index 100% rename from src/test/auxiliary/derive-no-std.rs rename to src/test/run-pass/auxiliary/derive-no-std.rs diff --git a/src/test/run-pass/auxiliary/empty-struct.rs b/src/test/run-pass/auxiliary/empty-struct.rs new file mode 100644 index 0000000000..22f65c2b0d --- /dev/null +++ b/src/test/run-pass/auxiliary/empty-struct.rs @@ -0,0 +1,17 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct XEmpty1 {} +pub struct XEmpty2; + +pub enum XE { + XEmpty3 {}, + XEmpty4, +} diff --git a/src/test/auxiliary/explicit_self_xcrate.rs b/src/test/run-pass/auxiliary/explicit_self_xcrate.rs similarity index 100% rename from src/test/auxiliary/explicit_self_xcrate.rs rename to src/test/run-pass/auxiliary/explicit_self_xcrate.rs diff --git a/src/test/auxiliary/extern-crosscrate-source.rs b/src/test/run-pass/auxiliary/extern-crosscrate-source.rs similarity index 100% rename from src/test/auxiliary/extern-crosscrate-source.rs rename to src/test/run-pass/auxiliary/extern-crosscrate-source.rs diff --git a/src/test/auxiliary/extern-take-value.rs b/src/test/run-pass/auxiliary/extern-take-value.rs similarity index 100% rename from src/test/auxiliary/extern-take-value.rs rename to src/test/run-pass/auxiliary/extern-take-value.rs diff --git a/src/test/auxiliary/extern_calling_convention.rs b/src/test/run-pass/auxiliary/extern_calling_convention.rs similarity index 100% rename from src/test/auxiliary/extern_calling_convention.rs rename to src/test/run-pass/auxiliary/extern_calling_convention.rs diff --git a/src/test/auxiliary/extern_mod_ordering_lib.rs b/src/test/run-pass/auxiliary/extern_mod_ordering_lib.rs similarity index 100% rename from src/test/auxiliary/extern_mod_ordering_lib.rs rename to src/test/run-pass/auxiliary/extern_mod_ordering_lib.rs diff --git a/src/test/auxiliary/fat_drop.rs b/src/test/run-pass/auxiliary/fat_drop.rs similarity index 100% rename from src/test/auxiliary/fat_drop.rs rename to src/test/run-pass/auxiliary/fat_drop.rs diff --git a/src/test/auxiliary/fn-abi.rs b/src/test/run-pass/auxiliary/fn-abi.rs similarity index 100% rename from src/test/auxiliary/fn-abi.rs rename to src/test/run-pass/auxiliary/fn-abi.rs diff --git a/src/test/auxiliary/foreign_lib.rs b/src/test/run-pass/auxiliary/foreign_lib.rs similarity index 100% rename from src/test/auxiliary/foreign_lib.rs rename to src/test/run-pass/auxiliary/foreign_lib.rs diff --git a/src/test/run-pass/auxiliary/go_trait.rs b/src/test/run-pass/auxiliary/go_trait.rs new file mode 100644 index 0000000000..044bb606b4 --- /dev/null +++ b/src/test/run-pass/auxiliary/go_trait.rs @@ -0,0 +1,53 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(specialization)] + +// Common code used for tests that model the Fn/FnMut/FnOnce hierarchy. + +pub trait Go { + fn go(&self, arg: isize); +} + +pub fn go(this: &G, arg: isize) { + this.go(arg) +} + +pub trait GoMut { + fn go_mut(&mut self, arg: isize); +} + +pub fn go_mut(this: &mut G, arg: isize) { + this.go_mut(arg) +} + +pub trait GoOnce { + fn go_once(self, arg: isize); +} + +pub fn go_once(this: G, arg: isize) { + this.go_once(arg) +} + +impl GoMut for G + where G : Go +{ + default fn go_mut(&mut self, arg: isize) { + go(&*self, arg) + } +} + +impl GoOnce for G + where G : GoMut +{ + default fn go_once(mut self, arg: isize) { + go_mut(&mut self, arg) + } +} diff --git a/src/test/auxiliary/i8.rs b/src/test/run-pass/auxiliary/i8.rs similarity index 100% rename from src/test/auxiliary/i8.rs rename to src/test/run-pass/auxiliary/i8.rs diff --git a/src/test/auxiliary/impl_privacy_xc_1.rs b/src/test/run-pass/auxiliary/impl_privacy_xc_1.rs similarity index 100% rename from src/test/auxiliary/impl_privacy_xc_1.rs rename to src/test/run-pass/auxiliary/impl_privacy_xc_1.rs diff --git a/src/test/auxiliary/impl_privacy_xc_2.rs b/src/test/run-pass/auxiliary/impl_privacy_xc_2.rs similarity index 100% rename from src/test/auxiliary/impl_privacy_xc_2.rs rename to src/test/run-pass/auxiliary/impl_privacy_xc_2.rs diff --git a/src/test/auxiliary/inline_dtor.rs b/src/test/run-pass/auxiliary/inline_dtor.rs similarity index 100% rename from src/test/auxiliary/inline_dtor.rs rename to src/test/run-pass/auxiliary/inline_dtor.rs diff --git a/src/test/auxiliary/inner_static.rs b/src/test/run-pass/auxiliary/inner_static.rs similarity index 100% rename from src/test/auxiliary/inner_static.rs rename to src/test/run-pass/auxiliary/inner_static.rs diff --git a/src/test/auxiliary/iss.rs b/src/test/run-pass/auxiliary/iss.rs similarity index 100% rename from src/test/auxiliary/iss.rs rename to src/test/run-pass/auxiliary/iss.rs diff --git a/src/test/auxiliary/issue-10028.rs b/src/test/run-pass/auxiliary/issue-10028.rs similarity index 100% rename from src/test/auxiliary/issue-10028.rs rename to src/test/run-pass/auxiliary/issue-10028.rs diff --git a/src/test/auxiliary/issue-11224.rs b/src/test/run-pass/auxiliary/issue-11224.rs similarity index 100% rename from src/test/auxiliary/issue-11224.rs rename to src/test/run-pass/auxiliary/issue-11224.rs diff --git a/src/test/auxiliary/issue-11225-1.rs b/src/test/run-pass/auxiliary/issue-11225-1.rs similarity index 100% rename from src/test/auxiliary/issue-11225-1.rs rename to src/test/run-pass/auxiliary/issue-11225-1.rs diff --git a/src/test/auxiliary/issue-11225-2.rs b/src/test/run-pass/auxiliary/issue-11225-2.rs similarity index 100% rename from src/test/auxiliary/issue-11225-2.rs rename to src/test/run-pass/auxiliary/issue-11225-2.rs diff --git a/src/test/auxiliary/issue-11225-3.rs b/src/test/run-pass/auxiliary/issue-11225-3.rs similarity index 100% rename from src/test/auxiliary/issue-11225-3.rs rename to src/test/run-pass/auxiliary/issue-11225-3.rs diff --git a/src/test/auxiliary/issue-11508.rs b/src/test/run-pass/auxiliary/issue-11508.rs similarity index 100% rename from src/test/auxiliary/issue-11508.rs rename to src/test/run-pass/auxiliary/issue-11508.rs diff --git a/src/test/auxiliary/issue-11529.rs b/src/test/run-pass/auxiliary/issue-11529.rs similarity index 100% rename from src/test/auxiliary/issue-11529.rs rename to src/test/run-pass/auxiliary/issue-11529.rs diff --git a/src/test/auxiliary/issue-12133-dylib.rs b/src/test/run-pass/auxiliary/issue-12133-dylib.rs similarity index 100% rename from src/test/auxiliary/issue-12133-dylib.rs rename to src/test/run-pass/auxiliary/issue-12133-dylib.rs diff --git a/src/test/auxiliary/issue-12133-dylib2.rs b/src/test/run-pass/auxiliary/issue-12133-dylib2.rs similarity index 100% rename from src/test/auxiliary/issue-12133-dylib2.rs rename to src/test/run-pass/auxiliary/issue-12133-dylib2.rs diff --git a/src/test/auxiliary/issue-12133-rlib.rs b/src/test/run-pass/auxiliary/issue-12133-rlib.rs similarity index 100% rename from src/test/auxiliary/issue-12133-rlib.rs rename to src/test/run-pass/auxiliary/issue-12133-rlib.rs diff --git a/src/test/auxiliary/issue-12660-aux.rs b/src/test/run-pass/auxiliary/issue-12660-aux.rs similarity index 100% rename from src/test/auxiliary/issue-12660-aux.rs rename to src/test/run-pass/auxiliary/issue-12660-aux.rs diff --git a/src/test/auxiliary/issue-13620-1.rs b/src/test/run-pass/auxiliary/issue-13620-1.rs similarity index 100% rename from src/test/auxiliary/issue-13620-1.rs rename to src/test/run-pass/auxiliary/issue-13620-1.rs diff --git a/src/test/auxiliary/issue-13620-2.rs b/src/test/run-pass/auxiliary/issue-13620-2.rs similarity index 100% rename from src/test/auxiliary/issue-13620-2.rs rename to src/test/run-pass/auxiliary/issue-13620-2.rs diff --git a/src/test/auxiliary/issue-13872-1.rs b/src/test/run-pass/auxiliary/issue-13872-1.rs similarity index 100% rename from src/test/auxiliary/issue-13872-1.rs rename to src/test/run-pass/auxiliary/issue-13872-1.rs diff --git a/src/test/auxiliary/issue-13872-2.rs b/src/test/run-pass/auxiliary/issue-13872-2.rs similarity index 100% rename from src/test/auxiliary/issue-13872-2.rs rename to src/test/run-pass/auxiliary/issue-13872-2.rs diff --git a/src/test/auxiliary/issue-13872-3.rs b/src/test/run-pass/auxiliary/issue-13872-3.rs similarity index 100% rename from src/test/auxiliary/issue-13872-3.rs rename to src/test/run-pass/auxiliary/issue-13872-3.rs diff --git a/src/test/auxiliary/issue-14344-1.rs b/src/test/run-pass/auxiliary/issue-14344-1.rs similarity index 100% rename from src/test/auxiliary/issue-14344-1.rs rename to src/test/run-pass/auxiliary/issue-14344-1.rs diff --git a/src/test/auxiliary/issue-14344-2.rs b/src/test/run-pass/auxiliary/issue-14344-2.rs similarity index 100% rename from src/test/auxiliary/issue-14344-2.rs rename to src/test/run-pass/auxiliary/issue-14344-2.rs diff --git a/src/test/auxiliary/issue-14421.rs b/src/test/run-pass/auxiliary/issue-14421.rs similarity index 100% rename from src/test/auxiliary/issue-14421.rs rename to src/test/run-pass/auxiliary/issue-14421.rs diff --git a/src/test/auxiliary/issue-14422.rs b/src/test/run-pass/auxiliary/issue-14422.rs similarity index 100% rename from src/test/auxiliary/issue-14422.rs rename to src/test/run-pass/auxiliary/issue-14422.rs diff --git a/src/test/auxiliary/issue-15562.rs b/src/test/run-pass/auxiliary/issue-15562.rs similarity index 100% rename from src/test/auxiliary/issue-15562.rs rename to src/test/run-pass/auxiliary/issue-15562.rs diff --git a/src/test/auxiliary/issue-16643.rs b/src/test/run-pass/auxiliary/issue-16643.rs similarity index 100% rename from src/test/auxiliary/issue-16643.rs rename to src/test/run-pass/auxiliary/issue-16643.rs diff --git a/src/test/auxiliary/issue-17662.rs b/src/test/run-pass/auxiliary/issue-17662.rs similarity index 100% rename from src/test/auxiliary/issue-17662.rs rename to src/test/run-pass/auxiliary/issue-17662.rs diff --git a/src/test/auxiliary/issue-17718-aux.rs b/src/test/run-pass/auxiliary/issue-17718-aux.rs similarity index 100% rename from src/test/auxiliary/issue-17718-aux.rs rename to src/test/run-pass/auxiliary/issue-17718-aux.rs diff --git a/src/test/auxiliary/issue-18501.rs b/src/test/run-pass/auxiliary/issue-18501.rs similarity index 100% rename from src/test/auxiliary/issue-18501.rs rename to src/test/run-pass/auxiliary/issue-18501.rs diff --git a/src/test/auxiliary/issue-18514.rs b/src/test/run-pass/auxiliary/issue-18514.rs similarity index 100% rename from src/test/auxiliary/issue-18514.rs rename to src/test/run-pass/auxiliary/issue-18514.rs diff --git a/src/test/auxiliary/issue-18711.rs b/src/test/run-pass/auxiliary/issue-18711.rs similarity index 100% rename from src/test/auxiliary/issue-18711.rs rename to src/test/run-pass/auxiliary/issue-18711.rs diff --git a/src/test/auxiliary/issue-18913-1.rs b/src/test/run-pass/auxiliary/issue-18913-1.rs similarity index 100% rename from src/test/auxiliary/issue-18913-1.rs rename to src/test/run-pass/auxiliary/issue-18913-1.rs diff --git a/src/test/auxiliary/issue-18913-2.rs b/src/test/run-pass/auxiliary/issue-18913-2.rs similarity index 100% rename from src/test/auxiliary/issue-18913-2.rs rename to src/test/run-pass/auxiliary/issue-18913-2.rs diff --git a/src/test/auxiliary/issue-19340-1.rs b/src/test/run-pass/auxiliary/issue-19340-1.rs similarity index 100% rename from src/test/auxiliary/issue-19340-1.rs rename to src/test/run-pass/auxiliary/issue-19340-1.rs diff --git a/src/test/auxiliary/issue-2380.rs b/src/test/run-pass/auxiliary/issue-2380.rs similarity index 100% rename from src/test/auxiliary/issue-2380.rs rename to src/test/run-pass/auxiliary/issue-2380.rs diff --git a/src/test/auxiliary/issue-2414-a.rs b/src/test/run-pass/auxiliary/issue-2414-a.rs similarity index 100% rename from src/test/auxiliary/issue-2414-a.rs rename to src/test/run-pass/auxiliary/issue-2414-a.rs diff --git a/src/test/auxiliary/issue-2414-b.rs b/src/test/run-pass/auxiliary/issue-2414-b.rs similarity index 100% rename from src/test/auxiliary/issue-2414-b.rs rename to src/test/run-pass/auxiliary/issue-2414-b.rs diff --git a/src/test/auxiliary/issue-25185-1.rs b/src/test/run-pass/auxiliary/issue-25185-1.rs similarity index 100% rename from src/test/auxiliary/issue-25185-1.rs rename to src/test/run-pass/auxiliary/issue-25185-1.rs diff --git a/src/test/auxiliary/issue-25185-2.rs b/src/test/run-pass/auxiliary/issue-25185-2.rs similarity index 100% rename from src/test/auxiliary/issue-25185-2.rs rename to src/test/run-pass/auxiliary/issue-25185-2.rs diff --git a/src/test/auxiliary/issue-2526.rs b/src/test/run-pass/auxiliary/issue-2526.rs similarity index 100% rename from src/test/auxiliary/issue-2526.rs rename to src/test/run-pass/auxiliary/issue-2526.rs diff --git a/src/test/auxiliary/issue-25467.rs b/src/test/run-pass/auxiliary/issue-25467.rs similarity index 100% rename from src/test/auxiliary/issue-25467.rs rename to src/test/run-pass/auxiliary/issue-25467.rs diff --git a/src/test/auxiliary/issue-2631-a.rs b/src/test/run-pass/auxiliary/issue-2631-a.rs similarity index 100% rename from src/test/auxiliary/issue-2631-a.rs rename to src/test/run-pass/auxiliary/issue-2631-a.rs diff --git a/src/test/auxiliary/issue-29485.rs b/src/test/run-pass/auxiliary/issue-29485.rs similarity index 100% rename from src/test/auxiliary/issue-29485.rs rename to src/test/run-pass/auxiliary/issue-29485.rs diff --git a/src/test/auxiliary/issue-3012-1.rs b/src/test/run-pass/auxiliary/issue-3012-1.rs similarity index 100% rename from src/test/auxiliary/issue-3012-1.rs rename to src/test/run-pass/auxiliary/issue-3012-1.rs diff --git a/src/test/auxiliary/issue-31702-1.rs b/src/test/run-pass/auxiliary/issue-31702-1.rs similarity index 100% rename from src/test/auxiliary/issue-31702-1.rs rename to src/test/run-pass/auxiliary/issue-31702-1.rs diff --git a/src/test/auxiliary/issue-31702-2.rs b/src/test/run-pass/auxiliary/issue-31702-2.rs similarity index 100% rename from src/test/auxiliary/issue-31702-2.rs rename to src/test/run-pass/auxiliary/issue-31702-2.rs diff --git a/src/test/auxiliary/issue-4208-cc.rs b/src/test/run-pass/auxiliary/issue-4208-cc.rs similarity index 100% rename from src/test/auxiliary/issue-4208-cc.rs rename to src/test/run-pass/auxiliary/issue-4208-cc.rs diff --git a/src/test/auxiliary/issue-4545.rs b/src/test/run-pass/auxiliary/issue-4545.rs similarity index 100% rename from src/test/auxiliary/issue-4545.rs rename to src/test/run-pass/auxiliary/issue-4545.rs diff --git a/src/test/auxiliary/issue-5518.rs b/src/test/run-pass/auxiliary/issue-5518.rs similarity index 100% rename from src/test/auxiliary/issue-5518.rs rename to src/test/run-pass/auxiliary/issue-5518.rs diff --git a/src/test/auxiliary/issue-5521.rs b/src/test/run-pass/auxiliary/issue-5521.rs similarity index 100% rename from src/test/auxiliary/issue-5521.rs rename to src/test/run-pass/auxiliary/issue-5521.rs diff --git a/src/test/auxiliary/issue-7178.rs b/src/test/run-pass/auxiliary/issue-7178.rs similarity index 100% rename from src/test/auxiliary/issue-7178.rs rename to src/test/run-pass/auxiliary/issue-7178.rs diff --git a/src/test/auxiliary/issue-7899.rs b/src/test/run-pass/auxiliary/issue-7899.rs similarity index 100% rename from src/test/auxiliary/issue-7899.rs rename to src/test/run-pass/auxiliary/issue-7899.rs diff --git a/src/test/auxiliary/issue-8044.rs b/src/test/run-pass/auxiliary/issue-8044.rs similarity index 100% rename from src/test/auxiliary/issue-8044.rs rename to src/test/run-pass/auxiliary/issue-8044.rs diff --git a/src/test/auxiliary/issue-8259.rs b/src/test/run-pass/auxiliary/issue-8259.rs similarity index 100% rename from src/test/auxiliary/issue-8259.rs rename to src/test/run-pass/auxiliary/issue-8259.rs diff --git a/src/test/auxiliary/issue-9906.rs b/src/test/run-pass/auxiliary/issue-9906.rs similarity index 100% rename from src/test/auxiliary/issue-9906.rs rename to src/test/run-pass/auxiliary/issue-9906.rs diff --git a/src/test/auxiliary/issue-9968.rs b/src/test/run-pass/auxiliary/issue-9968.rs similarity index 100% rename from src/test/auxiliary/issue-9968.rs rename to src/test/run-pass/auxiliary/issue-9968.rs diff --git a/src/test/auxiliary/issue13507.rs b/src/test/run-pass/auxiliary/issue13507.rs similarity index 100% rename from src/test/auxiliary/issue13507.rs rename to src/test/run-pass/auxiliary/issue13507.rs diff --git a/src/test/auxiliary/issue2170lib.rs b/src/test/run-pass/auxiliary/issue2170lib.rs similarity index 100% rename from src/test/auxiliary/issue2170lib.rs rename to src/test/run-pass/auxiliary/issue2170lib.rs diff --git a/src/test/auxiliary/issue_10031_aux.rs b/src/test/run-pass/auxiliary/issue_10031_aux.rs similarity index 100% rename from src/test/auxiliary/issue_10031_aux.rs rename to src/test/run-pass/auxiliary/issue_10031_aux.rs diff --git a/src/test/auxiliary/issue_3907_1.rs b/src/test/run-pass/auxiliary/issue_12612_1.rs similarity index 93% rename from src/test/auxiliary/issue_3907_1.rs rename to src/test/run-pass/auxiliary/issue_12612_1.rs index 25d2e3399c..a0234c1185 100644 --- a/src/test/auxiliary/issue_3907_1.rs +++ b/src/test/run-pass/auxiliary/issue_12612_1.rs @@ -8,6 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -pub trait Foo { - fn bar(); +pub mod bar { + pub fn foo() {} } diff --git a/src/test/auxiliary/issue_12612_2.rs b/src/test/run-pass/auxiliary/issue_12612_2.rs similarity index 100% rename from src/test/auxiliary/issue_12612_2.rs rename to src/test/run-pass/auxiliary/issue_12612_2.rs diff --git a/src/test/auxiliary/issue_19293.rs b/src/test/run-pass/auxiliary/issue_19293.rs similarity index 100% rename from src/test/auxiliary/issue_19293.rs rename to src/test/run-pass/auxiliary/issue_19293.rs diff --git a/src/test/auxiliary/issue_20389.rs b/src/test/run-pass/auxiliary/issue_20389.rs similarity index 100% rename from src/test/auxiliary/issue_20389.rs rename to src/test/run-pass/auxiliary/issue_20389.rs diff --git a/src/test/auxiliary/issue_2316_a.rs b/src/test/run-pass/auxiliary/issue_2316_a.rs similarity index 100% rename from src/test/auxiliary/issue_2316_a.rs rename to src/test/run-pass/auxiliary/issue_2316_a.rs diff --git a/src/test/auxiliary/issue_2316_b.rs b/src/test/run-pass/auxiliary/issue_2316_b.rs similarity index 100% rename from src/test/auxiliary/issue_2316_b.rs rename to src/test/run-pass/auxiliary/issue_2316_b.rs diff --git a/src/test/auxiliary/issue_2472_b.rs b/src/test/run-pass/auxiliary/issue_2472_b.rs similarity index 100% rename from src/test/auxiliary/issue_2472_b.rs rename to src/test/run-pass/auxiliary/issue_2472_b.rs diff --git a/src/test/auxiliary/issue_2723_a.rs b/src/test/run-pass/auxiliary/issue_2723_a.rs similarity index 100% rename from src/test/auxiliary/issue_2723_a.rs rename to src/test/run-pass/auxiliary/issue_2723_a.rs diff --git a/src/test/auxiliary/issue_3136_a.rc b/src/test/run-pass/auxiliary/issue_3136_a.rc similarity index 100% rename from src/test/auxiliary/issue_3136_a.rc rename to src/test/run-pass/auxiliary/issue_3136_a.rc diff --git a/src/test/auxiliary/issue_3136_a.rs b/src/test/run-pass/auxiliary/issue_3136_a.rs similarity index 100% rename from src/test/auxiliary/issue_3136_a.rs rename to src/test/run-pass/auxiliary/issue_3136_a.rs diff --git a/src/test/auxiliary/issue_3979_traits.rs b/src/test/run-pass/auxiliary/issue_3979_traits.rs similarity index 100% rename from src/test/auxiliary/issue_3979_traits.rs rename to src/test/run-pass/auxiliary/issue_3979_traits.rs diff --git a/src/test/auxiliary/issue_8401.rs b/src/test/run-pass/auxiliary/issue_8401.rs similarity index 100% rename from src/test/auxiliary/issue_8401.rs rename to src/test/run-pass/auxiliary/issue_8401.rs diff --git a/src/test/auxiliary/issue_9123.rs b/src/test/run-pass/auxiliary/issue_9123.rs similarity index 100% rename from src/test/auxiliary/issue_9123.rs rename to src/test/run-pass/auxiliary/issue_9123.rs diff --git a/src/test/auxiliary/issue_9155.rs b/src/test/run-pass/auxiliary/issue_9155.rs similarity index 100% rename from src/test/auxiliary/issue_9155.rs rename to src/test/run-pass/auxiliary/issue_9155.rs diff --git a/src/test/auxiliary/issue_9188.rs b/src/test/run-pass/auxiliary/issue_9188.rs similarity index 100% rename from src/test/auxiliary/issue_9188.rs rename to src/test/run-pass/auxiliary/issue_9188.rs diff --git a/src/test/auxiliary/kinds_in_metadata.rs b/src/test/run-pass/auxiliary/kinds_in_metadata.rs similarity index 100% rename from src/test/auxiliary/kinds_in_metadata.rs rename to src/test/run-pass/auxiliary/kinds_in_metadata.rs diff --git a/src/test/auxiliary/linkage1.rs b/src/test/run-pass/auxiliary/linkage1.rs similarity index 100% rename from src/test/auxiliary/linkage1.rs rename to src/test/run-pass/auxiliary/linkage1.rs diff --git a/src/test/auxiliary/macro-include-items-expr.rs b/src/test/run-pass/auxiliary/macro-include-items-expr.rs similarity index 100% rename from src/test/auxiliary/macro-include-items-expr.rs rename to src/test/run-pass/auxiliary/macro-include-items-expr.rs diff --git a/src/test/auxiliary/macro-include-items-item.rs b/src/test/run-pass/auxiliary/macro-include-items-item.rs similarity index 100% rename from src/test/auxiliary/macro-include-items-item.rs rename to src/test/run-pass/auxiliary/macro-include-items-item.rs diff --git a/src/test/auxiliary/macro_crate_def_only.rs b/src/test/run-pass/auxiliary/macro_crate_def_only.rs similarity index 100% rename from src/test/auxiliary/macro_crate_def_only.rs rename to src/test/run-pass/auxiliary/macro_crate_def_only.rs diff --git a/src/test/run-pass/auxiliary/macro_crate_nonterminal.rs b/src/test/run-pass/auxiliary/macro_crate_nonterminal.rs new file mode 100644 index 0000000000..4f75e2b5d7 --- /dev/null +++ b/src/test/run-pass/auxiliary/macro_crate_nonterminal.rs @@ -0,0 +1,22 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub fn increment(x: usize) -> usize { + x + 1 +} + +#[macro_export] +macro_rules! increment { + ($x:expr) => ($crate::increment($x)) +} + +pub fn check_local() { + assert_eq!(increment!(3), 4); +} diff --git a/src/test/auxiliary/macro_export_inner_module.rs b/src/test/run-pass/auxiliary/macro_export_inner_module.rs similarity index 100% rename from src/test/auxiliary/macro_export_inner_module.rs rename to src/test/run-pass/auxiliary/macro_export_inner_module.rs diff --git a/src/test/run-pass/auxiliary/macro_reexport_1.rs b/src/test/run-pass/auxiliary/macro_reexport_1.rs new file mode 100644 index 0000000000..aaeccc6e89 --- /dev/null +++ b/src/test/run-pass/auxiliary/macro_reexport_1.rs @@ -0,0 +1,15 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "dylib"] +#[macro_export] +macro_rules! reexported { + () => ( 3 ) +} diff --git a/src/test/auxiliary/macro_reexport_2.rs b/src/test/run-pass/auxiliary/macro_reexport_2.rs similarity index 100% rename from src/test/auxiliary/macro_reexport_2.rs rename to src/test/run-pass/auxiliary/macro_reexport_2.rs diff --git a/src/test/auxiliary/macro_reexport_2_no_use.rs b/src/test/run-pass/auxiliary/macro_reexport_2_no_use.rs similarity index 100% rename from src/test/auxiliary/macro_reexport_2_no_use.rs rename to src/test/run-pass/auxiliary/macro_reexport_2_no_use.rs diff --git a/src/test/auxiliary/macro_with_super_1.rs b/src/test/run-pass/auxiliary/macro_with_super_1.rs similarity index 100% rename from src/test/auxiliary/macro_with_super_1.rs rename to src/test/run-pass/auxiliary/macro_with_super_1.rs diff --git a/src/test/auxiliary/method_self_arg1.rs b/src/test/run-pass/auxiliary/method_self_arg1.rs similarity index 100% rename from src/test/auxiliary/method_self_arg1.rs rename to src/test/run-pass/auxiliary/method_self_arg1.rs diff --git a/src/test/auxiliary/method_self_arg2.rs b/src/test/run-pass/auxiliary/method_self_arg2.rs similarity index 100% rename from src/test/auxiliary/method_self_arg2.rs rename to src/test/run-pass/auxiliary/method_self_arg2.rs diff --git a/src/test/auxiliary/mir_external_refs.rs b/src/test/run-pass/auxiliary/mir_external_refs.rs similarity index 100% rename from src/test/auxiliary/mir_external_refs.rs rename to src/test/run-pass/auxiliary/mir_external_refs.rs diff --git a/src/test/auxiliary/moves_based_on_type_lib.rs b/src/test/run-pass/auxiliary/moves_based_on_type_lib.rs similarity index 100% rename from src/test/auxiliary/moves_based_on_type_lib.rs rename to src/test/run-pass/auxiliary/moves_based_on_type_lib.rs diff --git a/src/test/auxiliary/msvc-data-only-lib.rs b/src/test/run-pass/auxiliary/msvc-data-only-lib.rs similarity index 100% rename from src/test/auxiliary/msvc-data-only-lib.rs rename to src/test/run-pass/auxiliary/msvc-data-only-lib.rs diff --git a/src/test/auxiliary/namespaced_enum_emulate_flat.rs b/src/test/run-pass/auxiliary/namespaced_enum_emulate_flat.rs similarity index 100% rename from src/test/auxiliary/namespaced_enum_emulate_flat.rs rename to src/test/run-pass/auxiliary/namespaced_enum_emulate_flat.rs diff --git a/src/test/run-pass/auxiliary/namespaced_enums.rs b/src/test/run-pass/auxiliary/namespaced_enums.rs new file mode 100644 index 0000000000..3bf39b788d --- /dev/null +++ b/src/test/run-pass/auxiliary/namespaced_enums.rs @@ -0,0 +1,20 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub enum Foo { + A, + B(isize), + C { a: isize }, +} + +impl Foo { + pub fn foo() {} + pub fn bar(&self) {} +} diff --git a/src/test/auxiliary/nested_item.rs b/src/test/run-pass/auxiliary/nested_item.rs similarity index 100% rename from src/test/auxiliary/nested_item.rs rename to src/test/run-pass/auxiliary/nested_item.rs diff --git a/src/test/auxiliary/newtype_struct_xc.rs b/src/test/run-pass/auxiliary/newtype_struct_xc.rs similarity index 100% rename from src/test/auxiliary/newtype_struct_xc.rs rename to src/test/run-pass/auxiliary/newtype_struct_xc.rs diff --git a/src/test/auxiliary/overloaded_autoderef_xc.rs b/src/test/run-pass/auxiliary/overloaded_autoderef_xc.rs similarity index 100% rename from src/test/auxiliary/overloaded_autoderef_xc.rs rename to src/test/run-pass/auxiliary/overloaded_autoderef_xc.rs diff --git a/src/test/auxiliary/packed.rs b/src/test/run-pass/auxiliary/packed.rs similarity index 100% rename from src/test/auxiliary/packed.rs rename to src/test/run-pass/auxiliary/packed.rs diff --git a/src/test/auxiliary/priv-impl-prim-ty.rs b/src/test/run-pass/auxiliary/priv-impl-prim-ty.rs similarity index 100% rename from src/test/auxiliary/priv-impl-prim-ty.rs rename to src/test/run-pass/auxiliary/priv-impl-prim-ty.rs diff --git a/src/test/auxiliary/privacy_reexport.rs b/src/test/run-pass/auxiliary/privacy_reexport.rs similarity index 100% rename from src/test/auxiliary/privacy_reexport.rs rename to src/test/run-pass/auxiliary/privacy_reexport.rs diff --git a/src/test/auxiliary/pub_use_mods_xcrate.rs b/src/test/run-pass/auxiliary/pub_use_mods_xcrate.rs similarity index 100% rename from src/test/auxiliary/pub_use_mods_xcrate.rs rename to src/test/run-pass/auxiliary/pub_use_mods_xcrate.rs diff --git a/src/test/auxiliary/pub_use_xcrate1.rs b/src/test/run-pass/auxiliary/pub_use_xcrate1.rs similarity index 100% rename from src/test/auxiliary/pub_use_xcrate1.rs rename to src/test/run-pass/auxiliary/pub_use_xcrate1.rs diff --git a/src/test/auxiliary/pub_use_xcrate2.rs b/src/test/run-pass/auxiliary/pub_use_xcrate2.rs similarity index 100% rename from src/test/auxiliary/pub_use_xcrate2.rs rename to src/test/run-pass/auxiliary/pub_use_xcrate2.rs diff --git a/src/test/auxiliary/reachable-unnameable-items.rs b/src/test/run-pass/auxiliary/reachable-unnameable-items.rs similarity index 100% rename from src/test/auxiliary/reachable-unnameable-items.rs rename to src/test/run-pass/auxiliary/reachable-unnameable-items.rs diff --git a/src/test/auxiliary/reexport-should-still-link.rs b/src/test/run-pass/auxiliary/reexport-should-still-link.rs similarity index 100% rename from src/test/auxiliary/reexport-should-still-link.rs rename to src/test/run-pass/auxiliary/reexport-should-still-link.rs diff --git a/src/test/auxiliary/reexported_static_methods.rs b/src/test/run-pass/auxiliary/reexported_static_methods.rs similarity index 100% rename from src/test/auxiliary/reexported_static_methods.rs rename to src/test/run-pass/auxiliary/reexported_static_methods.rs diff --git a/src/test/auxiliary/sepcomp-extern-lib.rs b/src/test/run-pass/auxiliary/sepcomp-extern-lib.rs similarity index 100% rename from src/test/auxiliary/sepcomp-extern-lib.rs rename to src/test/run-pass/auxiliary/sepcomp-extern-lib.rs diff --git a/src/test/auxiliary/sepcomp_cci_lib.rs b/src/test/run-pass/auxiliary/sepcomp_cci_lib.rs similarity index 100% rename from src/test/auxiliary/sepcomp_cci_lib.rs rename to src/test/run-pass/auxiliary/sepcomp_cci_lib.rs diff --git a/src/test/auxiliary/sepcomp_lib.rs b/src/test/run-pass/auxiliary/sepcomp_lib.rs similarity index 100% rename from src/test/auxiliary/sepcomp_lib.rs rename to src/test/run-pass/auxiliary/sepcomp_lib.rs diff --git a/src/test/auxiliary/static-function-pointer-aux.rs b/src/test/run-pass/auxiliary/static-function-pointer-aux.rs similarity index 100% rename from src/test/auxiliary/static-function-pointer-aux.rs rename to src/test/run-pass/auxiliary/static-function-pointer-aux.rs diff --git a/src/test/auxiliary/static-methods-crate.rs b/src/test/run-pass/auxiliary/static-methods-crate.rs similarity index 100% rename from src/test/auxiliary/static-methods-crate.rs rename to src/test/run-pass/auxiliary/static-methods-crate.rs diff --git a/src/test/auxiliary/static_fn_inline_xc_aux.rs b/src/test/run-pass/auxiliary/static_fn_inline_xc_aux.rs similarity index 100% rename from src/test/auxiliary/static_fn_inline_xc_aux.rs rename to src/test/run-pass/auxiliary/static_fn_inline_xc_aux.rs diff --git a/src/test/auxiliary/static_fn_trait_xc_aux.rs b/src/test/run-pass/auxiliary/static_fn_trait_xc_aux.rs similarity index 100% rename from src/test/auxiliary/static_fn_trait_xc_aux.rs rename to src/test/run-pass/auxiliary/static_fn_trait_xc_aux.rs diff --git a/src/test/auxiliary/static_mut_xc.rs b/src/test/run-pass/auxiliary/static_mut_xc.rs similarity index 100% rename from src/test/auxiliary/static_mut_xc.rs rename to src/test/run-pass/auxiliary/static_mut_xc.rs diff --git a/src/test/auxiliary/struct_destructuring_cross_crate.rs b/src/test/run-pass/auxiliary/struct_destructuring_cross_crate.rs similarity index 100% rename from src/test/auxiliary/struct_destructuring_cross_crate.rs rename to src/test/run-pass/auxiliary/struct_destructuring_cross_crate.rs diff --git a/src/test/auxiliary/struct_variant_xc_aux.rs b/src/test/run-pass/auxiliary/struct_variant_xc_aux.rs similarity index 100% rename from src/test/auxiliary/struct_variant_xc_aux.rs rename to src/test/run-pass/auxiliary/struct_variant_xc_aux.rs diff --git a/src/test/auxiliary/svh-a-no-change.rs b/src/test/run-pass/auxiliary/svh-a-base.rs similarity index 100% rename from src/test/auxiliary/svh-a-no-change.rs rename to src/test/run-pass/auxiliary/svh-a-base.rs diff --git a/src/test/auxiliary/svh-a-comment.rs b/src/test/run-pass/auxiliary/svh-a-comment.rs similarity index 100% rename from src/test/auxiliary/svh-a-comment.rs rename to src/test/run-pass/auxiliary/svh-a-comment.rs diff --git a/src/test/auxiliary/svh-a-doc.rs b/src/test/run-pass/auxiliary/svh-a-doc.rs similarity index 100% rename from src/test/auxiliary/svh-a-doc.rs rename to src/test/run-pass/auxiliary/svh-a-doc.rs diff --git a/src/test/auxiliary/svh-a-macro.rs b/src/test/run-pass/auxiliary/svh-a-macro.rs similarity index 100% rename from src/test/auxiliary/svh-a-macro.rs rename to src/test/run-pass/auxiliary/svh-a-macro.rs diff --git a/src/test/run-pass/auxiliary/svh-a-no-change.rs b/src/test/run-pass/auxiliary/svh-a-no-change.rs new file mode 100644 index 0000000000..31a97f695f --- /dev/null +++ b/src/test/run-pass/auxiliary/svh-a-no-change.rs @@ -0,0 +1,35 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! The `svh-a-*.rs` files are all deviations from the base file +//! svh-a-base.rs with some difference (usually in `fn foo`) that +//! should not affect the strict version hash (SVH) computation +//! (#14132). + +#![crate_name = "a"] + +macro_rules! three { + () => { 3 } +} + +pub trait U {} +pub trait V {} +impl U for () {} +impl V for () {} + +static A_CONSTANT : isize = 2; + +pub fn foo(_: isize) -> isize { + 3 +} + +pub fn an_unused_name() -> isize { + 4 +} diff --git a/src/test/auxiliary/svh-a-redundant-cfg.rs b/src/test/run-pass/auxiliary/svh-a-redundant-cfg.rs similarity index 100% rename from src/test/auxiliary/svh-a-redundant-cfg.rs rename to src/test/run-pass/auxiliary/svh-a-redundant-cfg.rs diff --git a/src/test/auxiliary/svh-a-whitespace.rs b/src/test/run-pass/auxiliary/svh-a-whitespace.rs similarity index 100% rename from src/test/auxiliary/svh-a-whitespace.rs rename to src/test/run-pass/auxiliary/svh-a-whitespace.rs diff --git a/src/test/run-pass/auxiliary/svh-b.rs b/src/test/run-pass/auxiliary/svh-b.rs new file mode 100644 index 0000000000..b8946fdc99 --- /dev/null +++ b/src/test/run-pass/auxiliary/svh-b.rs @@ -0,0 +1,23 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! This is a client of the `a` crate defined in "svn-a-base.rs". The +//! rpass and cfail tests (such as "run-pass/svh-add-comment.rs") use +//! it by swapping in a different object code library crate built from +//! some variant of "svn-a-base.rs", and then we are checking if the +//! compiler properly ignores or accepts the change, based on whether +//! the change could affect the downstream crate content or not +//! (#14132). + +#![crate_name = "b"] + +extern crate a; + +pub fn foo() { assert_eq!(a::foo::<()>(0), 3); } diff --git a/src/test/auxiliary/thread-local-extern-static.rs b/src/test/run-pass/auxiliary/thread-local-extern-static.rs similarity index 100% rename from src/test/auxiliary/thread-local-extern-static.rs rename to src/test/run-pass/auxiliary/thread-local-extern-static.rs diff --git a/src/test/auxiliary/trait_default_method_xc_aux.rs b/src/test/run-pass/auxiliary/trait_default_method_xc_aux.rs similarity index 100% rename from src/test/auxiliary/trait_default_method_xc_aux.rs rename to src/test/run-pass/auxiliary/trait_default_method_xc_aux.rs diff --git a/src/test/auxiliary/trait_default_method_xc_aux_2.rs b/src/test/run-pass/auxiliary/trait_default_method_xc_aux_2.rs similarity index 100% rename from src/test/auxiliary/trait_default_method_xc_aux_2.rs rename to src/test/run-pass/auxiliary/trait_default_method_xc_aux_2.rs diff --git a/src/test/auxiliary/trait_inheritance_auto_xc_2_aux.rs b/src/test/run-pass/auxiliary/trait_inheritance_auto_xc_2_aux.rs similarity index 100% rename from src/test/auxiliary/trait_inheritance_auto_xc_2_aux.rs rename to src/test/run-pass/auxiliary/trait_inheritance_auto_xc_2_aux.rs diff --git a/src/test/auxiliary/trait_inheritance_auto_xc_aux.rs b/src/test/run-pass/auxiliary/trait_inheritance_auto_xc_aux.rs similarity index 100% rename from src/test/auxiliary/trait_inheritance_auto_xc_aux.rs rename to src/test/run-pass/auxiliary/trait_inheritance_auto_xc_aux.rs diff --git a/src/test/auxiliary/trait_inheritance_cross_trait_call_xc_aux.rs b/src/test/run-pass/auxiliary/trait_inheritance_cross_trait_call_xc_aux.rs similarity index 100% rename from src/test/auxiliary/trait_inheritance_cross_trait_call_xc_aux.rs rename to src/test/run-pass/auxiliary/trait_inheritance_cross_trait_call_xc_aux.rs diff --git a/src/test/auxiliary/trait_inheritance_overloading_xc.rs b/src/test/run-pass/auxiliary/trait_inheritance_overloading_xc.rs similarity index 100% rename from src/test/auxiliary/trait_inheritance_overloading_xc.rs rename to src/test/run-pass/auxiliary/trait_inheritance_overloading_xc.rs diff --git a/src/test/run-pass/auxiliary/trait_safety_lib.rs b/src/test/run-pass/auxiliary/trait_safety_lib.rs new file mode 100644 index 0000000000..585a756fd0 --- /dev/null +++ b/src/test/run-pass/auxiliary/trait_safety_lib.rs @@ -0,0 +1,19 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Simple smoke test that unsafe traits can be compiled etc. + +pub unsafe trait Foo { + fn foo(&self) -> isize; +} + +unsafe impl Foo for isize { + fn foo(&self) -> isize { *self } +} diff --git a/src/test/run-pass/auxiliary/trait_superkinds_in_metadata.rs b/src/test/run-pass/auxiliary/trait_superkinds_in_metadata.rs new file mode 100644 index 0000000000..0fa2d3459f --- /dev/null +++ b/src/test/run-pass/auxiliary/trait_superkinds_in_metadata.rs @@ -0,0 +1,18 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test library crate for cross-crate usages of traits inheriting +// from the builtin kinds. Mostly tests metadata correctness. + +#![crate_type="lib"] + +pub trait RequiresShare : Sync { } +pub trait RequiresRequiresShareAndSend : RequiresShare + Send { } +pub trait RequiresCopy : Copy { } diff --git a/src/test/auxiliary/traitimpl.rs b/src/test/run-pass/auxiliary/traitimpl.rs similarity index 100% rename from src/test/auxiliary/traitimpl.rs rename to src/test/run-pass/auxiliary/traitimpl.rs diff --git a/src/test/run-pass/auxiliary/two_macros.rs b/src/test/run-pass/auxiliary/two_macros.rs new file mode 100644 index 0000000000..060960f0db --- /dev/null +++ b/src/test/run-pass/auxiliary/two_macros.rs @@ -0,0 +1,15 @@ +// Copyright 2015 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[macro_export] +macro_rules! macro_one { () => ("one") } + +#[macro_export] +macro_rules! macro_two { () => ("two") } diff --git a/src/test/auxiliary/typeid-intrinsic-aux1.rs b/src/test/run-pass/auxiliary/typeid-intrinsic-aux1.rs similarity index 100% rename from src/test/auxiliary/typeid-intrinsic-aux1.rs rename to src/test/run-pass/auxiliary/typeid-intrinsic-aux1.rs diff --git a/src/test/auxiliary/typeid-intrinsic-aux2.rs b/src/test/run-pass/auxiliary/typeid-intrinsic-aux2.rs similarity index 100% rename from src/test/auxiliary/typeid-intrinsic-aux2.rs rename to src/test/run-pass/auxiliary/typeid-intrinsic-aux2.rs diff --git a/src/test/auxiliary/unboxed-closures-cross-crate.rs b/src/test/run-pass/auxiliary/unboxed-closures-cross-crate.rs similarity index 100% rename from src/test/auxiliary/unboxed-closures-cross-crate.rs rename to src/test/run-pass/auxiliary/unboxed-closures-cross-crate.rs diff --git a/src/test/run-pass/auxiliary/weak-lang-items.rs b/src/test/run-pass/auxiliary/weak-lang-items.rs new file mode 100644 index 0000000000..6434e62b6f --- /dev/null +++ b/src/test/run-pass/auxiliary/weak-lang-items.rs @@ -0,0 +1,32 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +// This aux-file will require the eh_personality function to be codegen'd, but +// it hasn't been defined just yet. Make sure we don't explode. + +#![no_std] +#![crate_type = "rlib"] + +struct A; + +impl core::ops::Drop for A { + fn drop(&mut self) {} +} + +pub fn foo() { + let _a = A; + panic!("wut"); +} + +mod std { + pub use core::{option, fmt}; +} diff --git a/src/test/auxiliary/where_clauses_xc.rs b/src/test/run-pass/auxiliary/where_clauses_xc.rs similarity index 100% rename from src/test/auxiliary/where_clauses_xc.rs rename to src/test/run-pass/auxiliary/where_clauses_xc.rs diff --git a/src/test/auxiliary/xcrate-trait-lifetime-param.rs b/src/test/run-pass/auxiliary/xcrate-trait-lifetime-param.rs similarity index 100% rename from src/test/auxiliary/xcrate-trait-lifetime-param.rs rename to src/test/run-pass/auxiliary/xcrate-trait-lifetime-param.rs diff --git a/src/test/auxiliary/xcrate_address_insignificant.rs b/src/test/run-pass/auxiliary/xcrate_address_insignificant.rs similarity index 100% rename from src/test/auxiliary/xcrate_address_insignificant.rs rename to src/test/run-pass/auxiliary/xcrate_address_insignificant.rs diff --git a/src/test/auxiliary/xcrate_associated_type_defaults.rs b/src/test/run-pass/auxiliary/xcrate_associated_type_defaults.rs similarity index 100% rename from src/test/auxiliary/xcrate_associated_type_defaults.rs rename to src/test/run-pass/auxiliary/xcrate_associated_type_defaults.rs diff --git a/src/test/auxiliary/xcrate_static_addresses.rs b/src/test/run-pass/auxiliary/xcrate_static_addresses.rs similarity index 100% rename from src/test/auxiliary/xcrate_static_addresses.rs rename to src/test/run-pass/auxiliary/xcrate_static_addresses.rs diff --git a/src/test/auxiliary/xcrate_struct_aliases.rs b/src/test/run-pass/auxiliary/xcrate_struct_aliases.rs similarity index 100% rename from src/test/auxiliary/xcrate_struct_aliases.rs rename to src/test/run-pass/auxiliary/xcrate_struct_aliases.rs diff --git a/src/test/run-pass/auxiliary/xcrate_unit_struct.rs b/src/test/run-pass/auxiliary/xcrate_unit_struct.rs new file mode 100644 index 0000000000..7a69be2b06 --- /dev/null +++ b/src/test/run-pass/auxiliary/xcrate_unit_struct.rs @@ -0,0 +1,38 @@ +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![crate_type = "lib"] + +// used by the rpass test + +#[derive(Copy, Clone)] +pub struct Struct; + +#[derive(Copy, Clone)] +pub enum Unit { + UnitVariant, + Argument(Struct) +} + +#[derive(Copy, Clone)] +pub struct TupleStruct(pub usize, pub &'static str); + +// used by the cfail test + +#[derive(Copy, Clone)] +pub struct StructWithFields { + foo: isize, +} + +#[derive(Copy, Clone)] +pub enum EnumWithVariants { + EnumVariant, + EnumVariantArg(isize) +} diff --git a/src/test/run-pass/backtrace-debuginfo-aux.rs b/src/test/run-pass/backtrace-debuginfo-aux.rs index b80c938fed..48df600214 100644 --- a/src/test/run-pass/backtrace-debuginfo-aux.rs +++ b/src/test/run-pass/backtrace-debuginfo-aux.rs @@ -11,7 +11,6 @@ // ignore-test: not a test, used by backtrace-debuginfo.rs to test file!() #[inline(never)] -#[rustc_no_mir] // FIXME #31005 MIR missing debuginfo currently. pub fn callback(f: F) where F: FnOnce((&'static str, u32)) { f((file!(), line!())) } @@ -21,7 +20,6 @@ pub fn callback(f: F) where F: FnOnce((&'static str, u32)) { // this case. #[cfg_attr(not(target_env = "msvc"), inline(always))] #[cfg_attr(target_env = "msvc", inline(never))] -#[rustc_no_mir] // FIXME #31005 MIR missing debuginfo currently. pub fn callback_inlined(f: F) where F: FnOnce((&'static str, u32)) { f((file!(), line!())) } diff --git a/src/test/run-pass/backtrace-debuginfo.rs b/src/test/run-pass/backtrace-debuginfo.rs index 7867c94ce4..f42a6ab162 100644 --- a/src/test/run-pass/backtrace-debuginfo.rs +++ b/src/test/run-pass/backtrace-debuginfo.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_attrs)] - // We disable tail merging here because it can't preserve debuginfo and thus // potentially breaks the backtraces. Also, subtle changes can decide whether // tail merging suceeds, so the test might work today but fail tomorrow due to a @@ -78,7 +76,6 @@ fn dump_filelines(filelines: &[Pos]) { } #[inline(never)] -#[rustc_no_mir] // FIXME #31005 MIR missing debuginfo currently. fn inner(counter: &mut i32, main_pos: Pos, outer_pos: Pos) { check!(counter; main_pos, outer_pos); check!(counter; main_pos, outer_pos); @@ -95,7 +92,6 @@ fn inner(counter: &mut i32, main_pos: Pos, outer_pos: Pos) { // this case. #[cfg_attr(not(target_env = "msvc"), inline(always))] #[cfg_attr(target_env = "msvc", inline(never))] -#[rustc_no_mir] // FIXME #31005 MIR missing debuginfo currently. fn inner_inlined(counter: &mut i32, main_pos: Pos, outer_pos: Pos) { check!(counter; main_pos, outer_pos); check!(counter; main_pos, outer_pos); @@ -121,7 +117,6 @@ fn inner_inlined(counter: &mut i32, main_pos: Pos, outer_pos: Pos) { } #[inline(never)] -#[rustc_no_mir] // FIXME #31005 MIR missing debuginfo currently. fn outer(mut counter: i32, main_pos: Pos) { inner(&mut counter, main_pos, pos!()); inner_inlined(&mut counter, main_pos, pos!()); @@ -166,7 +161,6 @@ fn run_test(me: &str) { } #[inline(never)] -#[rustc_no_mir] // FIXME #31005 MIR missing debuginfo currently. fn main() { let args: Vec = env::args().collect(); if args.len() >= 2 { diff --git a/src/test/run-pass/backtrace.rs b/src/test/run-pass/backtrace.rs index d38f10dd33..ad38dc8f45 100644 --- a/src/test/run-pass/backtrace.rs +++ b/src/test/run-pass/backtrace.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_attrs)] - // no-pretty-expanded FIXME #15189 // ignore-android FIXME #17520 // compile-flags:-g @@ -18,8 +16,6 @@ use std::env; use std::process::{Command, Stdio}; use std::str; -// FIXME #31005 MIR missing debuginfo currently. -#[cfg_attr(target_env = "msvc", rustc_no_mir)] #[inline(never)] fn foo() { let _v = vec![1, 2, 3]; @@ -28,8 +24,6 @@ fn foo() { } } -// FIXME #31005 MIR missing debuginfo currently. -#[cfg_attr(target_env = "msvc", rustc_no_mir)] #[inline(never)] fn double() { struct Double; diff --git a/src/test/run-pass/cast-to-infer-ty.rs b/src/test/run-pass/cast-to-infer-ty.rs new file mode 100644 index 0000000000..2aa0d9c62f --- /dev/null +++ b/src/test/run-pass/cast-to-infer-ty.rs @@ -0,0 +1,17 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Check that we allow a cast to `_` so long as the target type can be +// inferred elsewhere. + +pub fn main() { + let i: *const i32 = 0 as _; + assert!(i.is_null()); +} diff --git a/src/test/run-pass/check-static-mut-slices.rs b/src/test/run-pass/check-static-mut-slices.rs index 5959dd4c38..1cfe5bdaeb 100644 --- a/src/test/run-pass/check-static-mut-slices.rs +++ b/src/test/run-pass/check-static-mut-slices.rs @@ -12,6 +12,7 @@ static mut TEST: &'static mut [isize] = &mut [1]; +static mut EMPTY: &'static mut [isize] = &mut []; pub fn main() { unsafe { diff --git a/src/test/run-pass/coerce-expect-unsized.rs b/src/test/run-pass/coerce-expect-unsized.rs index f846ee8f3d..e4792e7936 100644 --- a/src/test/run-pass/coerce-expect-unsized.rs +++ b/src/test/run-pass/coerce-expect-unsized.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// pretty-expanded FIXME #23616 - #![allow(unknown_features)] #![feature(box_syntax)] diff --git a/src/test/run-pass/command-before-exec.rs b/src/test/run-pass/command-before-exec.rs index 16560637b6..72f952fb6c 100644 --- a/src/test/run-pass/command-before-exec.rs +++ b/src/test/run-pass/command-before-exec.rs @@ -62,7 +62,7 @@ fn main() { let output = Command::new(&me).arg("bad").before_exec(|| { Err(Error::from_raw_os_error(102)) - }).output().err().unwrap(); + }).output().unwrap_err(); assert_eq!(output.raw_os_error(), Some(102)); let pid = unsafe { libc::getpid() }; diff --git a/src/test/run-pass/const-autoderef.rs b/src/test/run-pass/const-autoderef.rs index 69173e35e2..6b3970e4f1 100644 --- a/src/test/run-pass/const-autoderef.rs +++ b/src/test/run-pass/const-autoderef.rs @@ -9,10 +9,10 @@ // except according to those terms. -static A: [u8; 1] = ['h' as u8]; -static B: u8 = (&A)[0]; -static C: &'static &'static &'static &'static [u8; 1] = & & & &A; -static D: u8 = (&C)[0]; +const A: [u8; 1] = ['h' as u8]; +const B: u8 = (&A)[0]; +const C: &'static &'static &'static &'static [u8; 1] = & & & &A; +const D: u8 = (&C)[0]; pub fn main() { assert_eq!(B, A[0]); diff --git a/src/test/run-pass/const-err.rs b/src/test/run-pass/const-err.rs new file mode 100644 index 0000000000..9f4ae1ad92 --- /dev/null +++ b/src/test/run-pass/const-err.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// check for const_err regressions + +#![deny(const_err)] + +const X: *const u8 = b"" as _; + +fn main() { + let _ = ((-1 as i8) << 8 - 1) as f32; + let _ = 0u8 as char; +} diff --git a/src/test/run-pass/const-meth-pattern.rs b/src/test/run-pass/const-meth-pattern.rs new file mode 100644 index 0000000000..3b27987f19 --- /dev/null +++ b/src/test/run-pass/const-meth-pattern.rs @@ -0,0 +1,27 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. +#![feature(const_fn)] + +struct A; + +impl A { + const fn banana() -> bool { + true + } +} + +const ABANANA: bool = A::banana(); + +fn main() { + match true { + ABANANA => {}, + _ => panic!("what?") + } +} diff --git a/src/test/run-pass/const-str-ptr.rs b/src/test/run-pass/const-str-ptr.rs index 1736ab5bb8..f58bf4fc39 100644 --- a/src/test/run-pass/const-str-ptr.rs +++ b/src/test/run-pass/const-str-ptr.rs @@ -8,17 +8,12 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_attrs)] - -// ignore-pretty : (#23623) problems when ending with // comments - use std::{str, string}; const A: [u8; 2] = ['h' as u8, 'i' as u8]; const B: &'static [u8; 2] = &A; const C: *const u8 = B as *const u8; -#[rustc_no_mir] // FIXME #27840 MIR can't do rvalue promotion yet. pub fn main() { unsafe { let foo = &A as *const u8; diff --git a/src/test/run-pass/copy-out-of-array-1.rs b/src/test/run-pass/copy-out-of-array-1.rs index 5c5765454d..54147c73ff 100644 --- a/src/test/run-pass/copy-out-of-array-1.rs +++ b/src/test/run-pass/copy-out-of-array-1.rs @@ -12,8 +12,6 @@ // // (Compare with compile-fail/move-out-of-array-1.rs) -// pretty-expanded FIXME #23616 - #[derive(Copy, Clone)] struct C { _x: u8 } diff --git a/src/test/run-pass/deriving-copyclone.rs b/src/test/run-pass/deriving-copyclone.rs new file mode 100644 index 0000000000..96d0406d9e --- /dev/null +++ b/src/test/run-pass/deriving-copyclone.rs @@ -0,0 +1,48 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +//! Test that #[derive(Copy, Clone)] produces a shallow copy +//! even when a member violates RFC 1521 + +use std::sync::atomic::{AtomicBool, ATOMIC_BOOL_INIT, Ordering}; + +/// A struct that pretends to be Copy, but actually does something +/// in its Clone impl +#[derive(Copy)] +struct Liar; + +/// Static cooperating with the rogue Clone impl +static CLONED: AtomicBool = ATOMIC_BOOL_INIT; + +impl Clone for Liar { + fn clone(&self) -> Self { + // this makes Clone vs Copy observable + CLONED.store(true, Ordering::SeqCst); + + *self + } +} + +/// This struct is actually Copy... at least, it thinks it is! +#[derive(Copy, Clone)] +struct Innocent(Liar); + +impl Innocent { + fn new() -> Self { + Innocent(Liar) + } +} + +fn main() { + let _ = Innocent::new().clone(); + // if Innocent was byte-for-byte copied, CLONED will still be false + assert!(!CLONED.load(Ordering::SeqCst)); +} + diff --git a/src/test/run-pass/deriving-via-extension-hash-enum.rs b/src/test/run-pass/deriving-via-extension-hash-enum.rs index 249661f003..cbe23ea052 100644 --- a/src/test/run-pass/deriving-via-extension-hash-enum.rs +++ b/src/test/run-pass/deriving-via-extension-hash-enum.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// pretty-expanded FIXME #23616 - #[derive(Hash)] enum Foo { Bar(isize, char), diff --git a/src/test/run-pass/foreign-dupe.rs b/src/test/run-pass/foreign-dupe.rs index 163ee617d6..fb162d8793 100644 --- a/src/test/run-pass/foreign-dupe.rs +++ b/src/test/run-pass/foreign-dupe.rs @@ -13,6 +13,7 @@ // Check that we can still call duplicated extern (imported) functions // which were declared in another crate. See issues #32740 and #32783. + extern crate foreign_lib; pub fn main() { diff --git a/src/test/run-pass/foreign-truncated-arguments.rs b/src/test/run-pass/foreign-truncated-arguments.rs new file mode 100644 index 0000000000..a983a4a959 --- /dev/null +++ b/src/test/run-pass/foreign-truncated-arguments.rs @@ -0,0 +1,29 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -O +// Regression test for https://github.com/rust-lang/rust/issues/33868 + +#[repr(C)] +pub struct S { + a: u32, + b: f32, + c: u32 +} + +#[no_mangle] +#[inline(never)] +pub extern "C" fn test(s: S) -> u32 { + s.c +} + +fn main() { + assert_eq!(test(S{a: 0, b: 0.0, c: 42}), 42); +} diff --git a/src/test/run-pass/hrtb-opt-in-copy.rs b/src/test/run-pass/hrtb-opt-in-copy.rs index b40f4d27a9..f0214d3f37 100644 --- a/src/test/run-pass/hrtb-opt-in-copy.rs +++ b/src/test/run-pass/hrtb-opt-in-copy.rs @@ -16,8 +16,6 @@ // did not consider that a match (something I would like to revise in // a later PR). -// pretty-expanded FIXME #23616 - #![allow(dead_code)] use std::marker::PhantomData; diff --git a/src/test/auxiliary/crate_with_invalid_spans.rs b/src/test/run-pass/import-crate-with-invalid-spans/auxiliary/crate_with_invalid_spans.rs similarity index 100% rename from src/test/auxiliary/crate_with_invalid_spans.rs rename to src/test/run-pass/import-crate-with-invalid-spans/auxiliary/crate_with_invalid_spans.rs diff --git a/src/test/auxiliary/crate_with_invalid_spans_macros.rs b/src/test/run-pass/import-crate-with-invalid-spans/auxiliary/crate_with_invalid_spans_macros.rs similarity index 100% rename from src/test/auxiliary/crate_with_invalid_spans_macros.rs rename to src/test/run-pass/import-crate-with-invalid-spans/auxiliary/crate_with_invalid_spans_macros.rs diff --git a/src/test/run-pass/import-crate-with-invalid-spans.rs b/src/test/run-pass/import-crate-with-invalid-spans/main.rs similarity index 100% rename from src/test/run-pass/import-crate-with-invalid-spans.rs rename to src/test/run-pass/import-crate-with-invalid-spans/main.rs diff --git a/src/test/run-pass/import-prefix-macro.rs b/src/test/run-pass/import-prefix-macro.rs new file mode 100644 index 0000000000..cfe4ff78e6 --- /dev/null +++ b/src/test/run-pass/import-prefix-macro.rs @@ -0,0 +1,35 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +mod a { + pub mod b { + pub mod c { + pub struct S; + pub struct Z; + } + pub struct W; + } +} + +macro_rules! import { + (1 $p: path) => (use $p;); + (2 $p: path) => (use $p::{Z};); + (3 $p: path) => (use $p::*;); +} + +import! { 1 a::b::c::S } +import! { 2 a::b::c } +import! { 3 a::b } + +fn main() { + let s = S; + let z = Z; + let w = W; +} diff --git a/src/test/run-pass/issue-11577.rs b/src/test/run-pass/issue-11577.rs index c1997fac74..a64fbb6afd 100644 --- a/src/test/run-pass/issue-11577.rs +++ b/src/test/run-pass/issue-11577.rs @@ -1,4 +1,4 @@ - // Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // diff --git a/src/test/run-pass/issue-13264.rs b/src/test/run-pass/issue-13264.rs index 7acabf31c8..383c1aef23 100644 --- a/src/test/run-pass/issue-13264.rs +++ b/src/test/run-pass/issue-13264.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// pretty-expanded FIXME #23616 - use std::ops::Deref; struct Root { diff --git a/src/test/run-pass/issue-23338-ensure-param-drop-order.rs b/src/test/run-pass/issue-23338-ensure-param-drop-order.rs index 73c52a0843..fb84e7bae5 100644 --- a/src/test/run-pass/issue-23338-ensure-param-drop-order.rs +++ b/src/test/run-pass/issue-23338-ensure-param-drop-order.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(rustc_attrs)] - // ignore-pretty : (#23623) problems when ending with // comments // This test is ensuring that parameters are indeed dropped after @@ -42,11 +40,11 @@ pub fn main() { // | | | | eval tail of foo // | | | +-- Make D(de_5, 6) // | | | | +-- Make D(de_6, 7) - 6, // | | | +-- Drop D(de_5, 6) - // | | | | | - 5, // | | | | +-- Drop D(de_4, 5) - // | | | | + 5, // | | | | | +-- Drop D(de_4, 5) + // | | | | | 2, // | | +-- Drop D(de_2, 2) + // | | | | + 6, // | | +-- Drop D(de_5, 6) // | | | 1, // | +-- Drop D(de_1, 1) // | | @@ -66,8 +64,8 @@ fn test<'a>(log: d::Log<'a>) { d::println(&format!("result {}", result)); } -#[rustc_no_mir] // FIXME #29855 MIR doesn't handle all drops correctly. -fn foo<'a>(da0: D<'a>, de1: D<'a>) -> D<'a> { +// FIXME(#33490) Remove the double braces when old trans is gone. +fn foo<'a>(da0: D<'a>, de1: D<'a>) -> D<'a> {{ d::println("entered foo"); let de2 = de1.incr(); // creates D(de_2, 2) let de4 = { @@ -76,7 +74,7 @@ fn foo<'a>(da0: D<'a>, de1: D<'a>) -> D<'a> { }; d::println("eval tail of foo"); de4.incr().incr() // creates D(de_5, 6) and D(de_6, 7) -} +}} // This module provides simultaneous printouts of the dynamic extents // of all of the D values, in addition to logging the order that each diff --git a/src/test/run-pass/issue-25343.rs b/src/test/run-pass/issue-25343.rs index 9e01d57727..64e7350fb8 100644 --- a/src/test/run-pass/issue-25343.rs +++ b/src/test/run-pass/issue-25343.rs @@ -8,9 +8,24 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. +#[allow(unused)] fn main() { || { 'label: loop { } }; + + // More cases added from issue 31754 + + 'label2: loop { + break; + } + + let closure = || { + 'label2: loop {} + }; + + fn inner_fn() { + 'label2: loop {} + } } diff --git a/src/test/run-pass/issue-3121.rs b/src/test/run-pass/issue-3121.rs index 777e5bf7a6..6e9ee7fb15 100644 --- a/src/test/run-pass/issue-3121.rs +++ b/src/test/run-pass/issue-3121.rs @@ -8,8 +8,6 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -// pretty-expanded FIXME #23616 - #![allow(unknown_features)] #![feature(box_syntax)] diff --git a/src/test/run-pass/issue-31299.rs b/src/test/run-pass/issue-31299.rs new file mode 100644 index 0000000000..6c04e66068 --- /dev/null +++ b/src/test/run-pass/issue-31299.rs @@ -0,0 +1,43 @@ +// Copyright 2012 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Regression test for #31299. This was generating an overflow error +// because of eager normalization: +// +// proving `M: Sized` requires +// - proving `PtrBack>: Sized` requires +// - normalizing `Vec< as Front>::Back>>: Sized` requires +// - proving `Vec: Front` requires +// - `M: Sized` <-- cycle! +// +// If we skip the normalization step, though, everything goes fine. +// +// This could be fixed by implementing lazy normalization everywhere. +// +// However, we want this to work before then. For that, when checking +// whether a type is Sized we only check that the tails are Sized. As +// PtrBack does not have a tail, we don't need to normalize anything +// and this compiles + +trait Front { + type Back; +} + +impl Front for Vec { + type Back = Vec; +} + +struct PtrBack(Vec); + +struct M(PtrBack>); + +fn main() { + std::mem::size_of::(); +} diff --git a/src/test/run-pass/issue-33096.rs b/src/test/run-pass/issue-33096.rs new file mode 100644 index 0000000000..c3cf008c82 --- /dev/null +++ b/src/test/run-pass/issue-33096.rs @@ -0,0 +1,27 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags: -g + +use std::ops::Deref; + +trait Foo { + fn foo() {} +} + +impl Foo for u8 {} + +fn bar() where T::Target: Foo { + <::Target as Foo>::foo() +} + +fn main() { + bar::<&u8>(); +} diff --git a/src/test/run-pass/issue-33202.rs b/src/test/run-pass/issue-33202.rs new file mode 100644 index 0000000000..eb4192942f --- /dev/null +++ b/src/test/run-pass/issue-33202.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#[repr(C)] +pub enum CPOption { + PSome(T), +} + +fn main() { + println!("sizeof CPOption {}", std::mem::size_of::>()); +} diff --git a/src/test/run-pass/issue-33387.rs b/src/test/run-pass/issue-33387.rs new file mode 100644 index 0000000000..a4b85bc7a0 --- /dev/null +++ b/src/test/run-pass/issue-33387.rs @@ -0,0 +1,44 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(rustc_attrs)] + +use std::sync::Arc; + +trait Foo { + fn get(&self) -> [u8; 2]; +} + +impl Foo for [u8; 2] { + fn get(&self) -> [u8; 2] { + *self + } +} + +struct Bar(T); + +#[rustc_mir] +fn unsize_fat_ptr<'a>(x: &'a Bar) -> &'a Bar { + x +} + +#[rustc_mir] +fn unsize_nested_fat_ptr(x: Arc) -> Arc { + x +} + +#[rustc_mir] +fn main() { + let x: Box> = Box::new(Bar([1,2])); + assert_eq!(unsize_fat_ptr(&*x).0.get(), [1, 2]); + + let x: Arc = Arc::new([3, 4]); + assert_eq!(unsize_nested_fat_ptr(x).get(), [3, 4]); +} diff --git a/src/test/run-pass/issue-33537.rs b/src/test/run-pass/issue-33537.rs new file mode 100644 index 0000000000..24f4c9f590 --- /dev/null +++ b/src/test/run-pass/issue-33537.rs @@ -0,0 +1,24 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(const_fn)] + +const fn foo() -> *const i8 { + b"foo" as *const _ as *const i8 +} + +const fn bar() -> i32 { + *&{(1, 2, 3).1} +} + +fn main() { + assert_eq!(foo(), b"foo" as *const _ as *const i8); + assert_eq!(bar(), 2); +} diff --git a/src/test/run-pass/issue-34503.rs b/src/test/run-pass/issue-34503.rs new file mode 100644 index 0000000000..e6217243ee --- /dev/null +++ b/src/test/run-pass/issue-34503.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +fn main() { + struct X; + trait Foo { + fn foo(&self) where (T, Option): Ord {} + fn bar(&self, x: &Option) -> bool + where Option: Ord { *x < *x } + } + impl Foo for () {} + let _ = &() as &Foo; +} diff --git a/src/test/run-pass/issue-9382.rs b/src/test/run-pass/issue-9382.rs index 2c84e202b2..fb7ffdcd51 100644 --- a/src/test/run-pass/issue-9382.rs +++ b/src/test/run-pass/issue-9382.rs @@ -1,6 +1,6 @@ // pretty-expanded FIXME #23616 - // Copyright 2013 The Rust Project Developers. See the COPYRIGHT +// Copyright 2013 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // diff --git a/src/test/auxiliary/issue24687_lib.rs b/src/test/run-pass/issue24687-embed-debuginfo/auxiliary/issue24687_lib.rs similarity index 100% rename from src/test/auxiliary/issue24687_lib.rs rename to src/test/run-pass/issue24687-embed-debuginfo/auxiliary/issue24687_lib.rs diff --git a/src/test/auxiliary/issue24687_mbcs_in_comments.rs b/src/test/run-pass/issue24687-embed-debuginfo/auxiliary/issue24687_mbcs_in_comments.rs similarity index 100% rename from src/test/auxiliary/issue24687_mbcs_in_comments.rs rename to src/test/run-pass/issue24687-embed-debuginfo/auxiliary/issue24687_mbcs_in_comments.rs diff --git a/src/test/run-pass/issue24687-embed-debuginfo.rs b/src/test/run-pass/issue24687-embed-debuginfo/main.rs similarity index 100% rename from src/test/run-pass/issue24687-embed-debuginfo.rs rename to src/test/run-pass/issue24687-embed-debuginfo/main.rs diff --git a/src/test/run-pass/macro-follow.rs b/src/test/run-pass/macro-follow.rs index ce6498f67f..dca676f8cf 100644 --- a/src/test/run-pass/macro-follow.rs +++ b/src/test/run-pass/macro-follow.rs @@ -26,7 +26,7 @@ macro_rules! follow_expr { ($e:expr ;) => {}; } // FOLLOW(ty) = {OpenDelim(Brace), Comma, FatArrow, Colon, Eq, Gt, Semi, Or, -// Ident(as), Ident(where), OpenDelim(Bracket)} +// Ident(as), Ident(where), OpenDelim(Bracket), Nonterminal(Block)} macro_rules! follow_ty { ($t:ty {}) => {}; ($t:ty ,) => {}; @@ -39,6 +39,7 @@ macro_rules! follow_ty { ($t:ty as) => {}; ($t:ty where) => {}; ($t:ty []) => {}; + ($t:ty $b:block) => {}; } // FOLLOW(stmt) = FOLLOW(expr) macro_rules! follow_stmt { @@ -59,6 +60,7 @@ macro_rules! follow_path { ($p:path as) => {}; ($p:path where) => {}; ($p:path []) => {}; + ($p:path $b:block) => {}; } // FOLLOW(block) = any token macro_rules! follow_block { diff --git a/src/test/run-pass/macro-include-items.rs b/src/test/run-pass/macro-include-items.rs index 9e2f431c3e..1e31c85afa 100644 --- a/src/test/run-pass/macro-include-items.rs +++ b/src/test/run-pass/macro-include-items.rs @@ -12,9 +12,9 @@ fn bar() {} -include!(concat!("", "", "../auxiliary/", "macro-include-items-item.rs")); +include!(concat!("", "", "auxiliary/", "macro-include-items-item.rs")); fn main() { foo(); - assert_eq!(include!(concat!("", "../auxiliary/", "macro-include-items-expr.rs")), 1_usize); + assert_eq!(include!(concat!("", "auxiliary/", "macro-include-items-expr.rs")), 1_usize); } diff --git a/src/test/run-pass/mir_ascription_coercion.rs b/src/test/run-pass/mir_ascription_coercion.rs new file mode 100644 index 0000000000..b227be9c54 --- /dev/null +++ b/src/test/run-pass/mir_ascription_coercion.rs @@ -0,0 +1,20 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Tests that the result of type ascription has adjustments applied + +#![feature(rustc_attrs, type_ascription)] + +#[rustc_mir] +fn main() { + let x = [1, 2, 3]; + // The RHS should coerce to &[i32] + let _y : &[i32] = &x : &[i32; 3]; +} diff --git a/src/test/run-pass/mir_coercion_casts.rs b/src/test/run-pass/mir_coercion_casts.rs new file mode 100644 index 0000000000..4d5c59276d --- /dev/null +++ b/src/test/run-pass/mir_coercion_casts.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Tests the coercion casts are handled properly + +#![feature(rustc_attrs)] + +#[rustc_mir] +fn main() { + // This should produce only a reification of f, + // not a fn -> fn cast as well + let _ = f as fn(&()); +} + +fn f<'a>(_: &'a ()) { } diff --git a/src/test/run-pass/mir_coercions.rs b/src/test/run-pass/mir_coercions.rs index c1897f79f2..09dd52e30b 100644 --- a/src/test/run-pass/mir_coercions.rs +++ b/src/test/run-pass/mir_coercions.rs @@ -55,6 +55,13 @@ fn coerce_fat_ptr_wrapper(p: PtrWrapper u32+Send>) p } +#[rustc_mir] +fn coerce_ptr_wrapper_poly<'a, T, Trait: ?Sized>(p: PtrWrapper<'a, T>) + -> PtrWrapper<'a, Trait> + where PtrWrapper<'a, T>: CoerceUnsized> +{ + p +} fn main() { let a = [0,1,2]; @@ -73,4 +80,8 @@ fn main() { let z = coerce_fat_ptr_wrapper(PtrWrapper(2,3,(),&square_local)); assert_eq!((z.3)(6), 36); + + let z: PtrWrapper u32> = + coerce_ptr_wrapper_poly(PtrWrapper(2,3,(),&square_local)); + assert_eq!((z.3)(6), 36); } diff --git a/src/test/run-pass/mir_constval_adts.rs b/src/test/run-pass/mir_constval_adts.rs index 8a1f68dbea..4e9c0bce64 100644 --- a/src/test/run-pass/mir_constval_adts.rs +++ b/src/test/run-pass/mir_constval_adts.rs @@ -14,6 +14,7 @@ struct Point { _x: i32, _y: i32, } + const STRUCT: Point = Point { _x: 42, _y: 42 }; const TUPLE1: (i32, i32) = (42, 42); const TUPLE2: (&'static str, &'static str) = ("hello","world"); @@ -26,7 +27,19 @@ fn mir() -> (Point, (i32, i32), (&'static str, &'static str)){ (struct1, tuple1, tuple2) } +#[derive(PartialEq, Eq, Debug)] +struct Newtype(T); + +const NEWTYPE: Newtype<&'static str> = Newtype("foobar"); + +#[rustc_mir] +fn test_promoted_newtype_str_ref() { + let x = &NEWTYPE; + assert_eq!(x, &Newtype("foobar")); +} + fn main(){ assert_eq!(mir(), (STRUCT, TUPLE1, TUPLE2)); + test_promoted_newtype_str_ref(); } diff --git a/src/test/run-pass/mir_raw_fat_ptr.rs b/src/test/run-pass/mir_raw_fat_ptr.rs index c0ba7a76db..a632f00d9e 100644 --- a/src/test/run-pass/mir_raw_fat_ptr.rs +++ b/src/test/run-pass/mir_raw_fat_ptr.rs @@ -121,7 +121,6 @@ impl Foo for T { struct S(u32, T); -#[rustc_no_mir] // FIXME #27840 MIR can't do rvalue promotion yet. fn main() { let array = [0,1,2,3,4]; let array2 = [5,6,7,8,9]; diff --git a/src/test/run-pass/mir_trans_calls.rs b/src/test/run-pass/mir_trans_calls.rs index b8b7ecbf03..31e2c89257 100644 --- a/src/test/run-pass/mir_trans_calls.rs +++ b/src/test/run-pass/mir_trans_calls.rs @@ -138,6 +138,15 @@ fn test_fn_nil_call(f: &F) -> i32 f() } +#[rustc_mir] +fn test_fn_transmute_zst(x: ()) -> [(); 1] { + fn id(x: T) -> T {x} + + id(unsafe { + std::mem::transmute(x) + }) +} + fn main() { assert_eq!(test1(1, (2, 3), &[4, 5, 6]), (1, (2, 3), &[4, 5, 6][..])); assert_eq!(test2(98), 98); @@ -159,4 +168,5 @@ fn main() { assert_eq!(test_fn_direct_call(&closure, 100, 4), 324); assert_eq!(test_fn_nil_call(&(|| 42)), 42); + assert_eq!(test_fn_transmute_zst(()), [()]); } diff --git a/src/test/run-pass/mir_trans_calls_variadic.rs b/src/test/run-pass/mir_trans_calls_variadic.rs index ff66daace3..7f711b2758 100644 --- a/src/test/run-pass/mir_trans_calls_variadic.rs +++ b/src/test/run-pass/mir_trans_calls_variadic.rs @@ -16,17 +16,17 @@ extern { } #[rustc_mir] -fn test(a: i64, b: i64, c: i64, d: i64, e: i64, f: i64) -> i64 { +fn test(a: i64, b: i64, c: i64, d: i64, e: i64, f: T, g: U) -> i64 { unsafe { rust_interesting_average(6, a, a as f64, b, b as f64, c, c as f64, d, d as f64, e, e as f64, - f, f as f64) as i64 + f, g) as i64 } } fn main(){ - assert_eq!(test(10, 20, 30, 40, 50, 60), 70); + assert_eq!(test(10, 20, 30, 40, 50, 60_i64, 60.0_f64), 70); } diff --git a/src/test/run-pass/no-landing-pads.rs b/src/test/run-pass/no-landing-pads.rs index 8445bccf13..e718046ebb 100644 --- a/src/test/run-pass/no-landing-pads.rs +++ b/src/test/run-pass/no-landing-pads.rs @@ -27,6 +27,6 @@ fn main() { thread::spawn(move|| -> () { let _a = A; panic!(); - }).join().err().unwrap(); + }).join().unwrap_err(); assert!(unsafe { !HIT }); } diff --git a/src/test/run-pass/out-of-stack.rs b/src/test/run-pass/out-of-stack.rs index 5ac0378318..a7748b6d6a 100644 --- a/src/test/run-pass/out-of-stack.rs +++ b/src/test/run-pass/out-of-stack.rs @@ -47,8 +47,7 @@ fn check_status(status: std::process::ExitStatus) use std::os::unix::process::ExitStatusExt; assert!(!status.success()); - assert!(status.signal() != Some(libc::SIGSEGV) - && status.signal() != Some(libc::SIGBUS)); + assert_eq!(status.signal(), Some(libc::SIGABRT)); } #[cfg(not(unix))] diff --git a/src/test/run-pass/panic-recover-propagate.rs b/src/test/run-pass/panic-recover-propagate.rs index d420ef9986..2c87c6b926 100644 --- a/src/test/run-pass/panic-recover-propagate.rs +++ b/src/test/run-pass/panic-recover-propagate.rs @@ -28,10 +28,10 @@ fn main() { panic!("hi there"); }); - panic::propagate(result.err().unwrap()); + panic::propagate(result.unwrap_err()); }).join(); - let msg = *result.err().unwrap().downcast::<&'static str>().unwrap(); + let msg = *result.unwrap_err().downcast::<&'static str>().unwrap(); assert_eq!("hi there", msg); assert_eq!(1, A.load(Ordering::SeqCst)); } diff --git a/src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs b/src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs new file mode 100644 index 0000000000..71c1a61062 --- /dev/null +++ b/src/test/run-pass/panic-runtime/abort-link-to-unwinding-crates.rs @@ -0,0 +1,35 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=abort +// aux-build:exit-success-if-unwind.rs +// no-prefer-dynamic + +extern crate exit_success_if_unwind; + +use std::process::Command; +use std::env; + +fn main() { + let mut args = env::args_os(); + let me = args.next().unwrap(); + + if let Some(s) = args.next() { + if &*s == "foo" { + exit_success_if_unwind::bar(do_panic); + } + } + let s = Command::new(env::args_os().next().unwrap()).arg("foo").status(); + assert!(s.unwrap().code() != Some(0)); +} + +fn do_panic() { + panic!("try to catch me"); +} diff --git a/src/test/run-pass/panic-runtime/abort.rs b/src/test/run-pass/panic-runtime/abort.rs new file mode 100644 index 0000000000..2fc9d6cfd0 --- /dev/null +++ b/src/test/run-pass/panic-runtime/abort.rs @@ -0,0 +1,39 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=abort +// no-prefer-dynamic + +use std::process::Command; +use std::env; + +struct Bomb; + +impl Drop for Bomb { + fn drop(&mut self) { + std::process::exit(0); + } +} + +fn main() { + let mut args = env::args_os(); + let me = args.next().unwrap(); + + if let Some(s) = args.next() { + if &*s == "foo" { + + let _bomb = Bomb; + + panic!("try to catch me"); + } + } + let s = Command::new(env::args_os().next().unwrap()).arg("foo").status(); + assert!(s.unwrap().code() != Some(0)); +} diff --git a/src/test/run-pass/panic-runtime/auxiliary/exit-success-if-unwind.rs b/src/test/run-pass/panic-runtime/auxiliary/exit-success-if-unwind.rs new file mode 100644 index 0000000000..9e5fc592b1 --- /dev/null +++ b/src/test/run-pass/panic-runtime/auxiliary/exit-success-if-unwind.rs @@ -0,0 +1,26 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![crate_type = "rlib"] + +struct Bomb; + +impl Drop for Bomb { + fn drop(&mut self) { + std::process::exit(0); + } +} + +pub fn bar(f: fn()) { + let _bomb = Bomb; + f(); +} diff --git a/src/test/run-pass/panic-runtime/link-to-abort.rs b/src/test/run-pass/panic-runtime/link-to-abort.rs new file mode 100644 index 0000000000..71e35e41fc --- /dev/null +++ b/src/test/run-pass/panic-runtime/link-to-abort.rs @@ -0,0 +1,19 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C panic=abort +// no-prefer-dynamic + +#![feature(panic_abort)] + +extern crate panic_abort; + +fn main() { +} diff --git a/src/test/run-pass/panic-runtime/link-to-unwind.rs b/src/test/run-pass/panic-runtime/link-to-unwind.rs new file mode 100644 index 0000000000..dec8f738d3 --- /dev/null +++ b/src/test/run-pass/panic-runtime/link-to-unwind.rs @@ -0,0 +1,18 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// no-prefer-dynamic + +#![feature(panic_unwind)] + +extern crate panic_unwind; + +fn main() { +} diff --git a/src/test/run-pass/panic-runtime/lto-abort.rs b/src/test/run-pass/panic-runtime/lto-abort.rs new file mode 100644 index 0000000000..09e33b8818 --- /dev/null +++ b/src/test/run-pass/panic-runtime/lto-abort.rs @@ -0,0 +1,39 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C lto -C panic=abort +// no-prefer-dynamic + +use std::process::Command; +use std::env; + +struct Bomb; + +impl Drop for Bomb { + fn drop(&mut self) { + std::process::exit(0); + } +} + +fn main() { + let mut args = env::args_os(); + let me = args.next().unwrap(); + + if let Some(s) = args.next() { + if &*s == "foo" { + + let _bomb = Bomb; + + panic!("try to catch me"); + } + } + let s = Command::new(env::args_os().next().unwrap()).arg("foo").status(); + assert!(s.unwrap().code() != Some(0)); +} diff --git a/src/test/run-pass/panic-runtime/lto-unwind.rs b/src/test/run-pass/panic-runtime/lto-unwind.rs new file mode 100644 index 0000000000..10e633b377 --- /dev/null +++ b/src/test/run-pass/panic-runtime/lto-unwind.rs @@ -0,0 +1,41 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// compile-flags:-C lto -C panic=unwind +// no-prefer-dynamic + +use std::process::Command; +use std::env; + +struct Bomb; + +impl Drop for Bomb { + fn drop(&mut self) { + println!("hurray you ran me"); + } +} + +fn main() { + let mut args = env::args_os(); + let me = args.next().unwrap(); + + if let Some(s) = args.next() { + if &*s == "foo" { + + let _bomb = Bomb; + + panic!("try to catch me"); + } + } + let s = Command::new(env::args_os().next().unwrap()).arg("foo").output(); + let s = s.unwrap(); + assert!(!s.status.success()); + assert!(String::from_utf8_lossy(&s.stdout).contains("hurray you ran me")); +} diff --git a/src/test/run-pass/regions-lub-ref-ref-rc.rs b/src/test/run-pass/regions-lub-ref-ref-rc.rs index 41c64197ac..ade742863a 100644 --- a/src/test/run-pass/regions-lub-ref-ref-rc.rs +++ b/src/test/run-pass/regions-lub-ref-ref-rc.rs @@ -9,7 +9,7 @@ // except according to those terms. // Test a corner case of LUB coercion. In this case, one arm of the -// match requires a deref coercion and other other doesn't, and there +// match requires a deref coercion and the other doesn't, and there // is an extra `&` on the `rc`. We want to be sure that the lifetime // assigned to this `&rc` value is not `'a` but something smaller. In // other words, the type from `rc` is `&'a Rc` and the type diff --git a/src/test/run-pass/rfc1592-deprecated.rs b/src/test/run-pass/rfc1592-deprecated.rs new file mode 100644 index 0000000000..81bf025878 --- /dev/null +++ b/src/test/run-pass/rfc1592-deprecated.rs @@ -0,0 +1,29 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +use std::fmt; + +trait Foo { + fn foo(&self) -> (Self, Self); +} + +impl Foo for T { + fn foo(&self) -> (Self, Self) { + (*self, *self) + } +} + +fn main() { + assert_eq!((11).foo(), (11, 11)); + + let junk: Box = Box::new(42); + let f = format!("{:?}", junk); + assert_eq!(f, "42"); +} diff --git a/src/test/run-pass/sepcomp-unwind.rs b/src/test/run-pass/sepcomp-unwind.rs index 96e9c1ed2c..3a93845a06 100644 --- a/src/test/run-pass/sepcomp-unwind.rs +++ b/src/test/run-pass/sepcomp-unwind.rs @@ -39,5 +39,5 @@ mod b { } fn main() { - thread::spawn(move|| { ::b::g() }).join().err().unwrap(); + thread::spawn(move|| { ::b::g() }).join().unwrap_err(); } diff --git a/src/test/run-pass/simd-intrinsic-generic-elements.rs b/src/test/run-pass/simd-intrinsic-generic-elements.rs index ffb9e6072d..5cb57b63ad 100644 --- a/src/test/run-pass/simd-intrinsic-generic-elements.rs +++ b/src/test/run-pass/simd-intrinsic-generic-elements.rs @@ -8,7 +8,7 @@ // option. This file may not be copied, modified, or distributed // except according to those terms. -#![feature(repr_simd, rustc_attrs, platform_intrinsics)] +#![feature(repr_simd, platform_intrinsics)] // ignore-pretty : (#23623) problems when ending with // comments @@ -52,7 +52,6 @@ macro_rules! all_eq { }} } -#[rustc_no_mir] // FIXME #27840 MIR doesn't handle shuffle constants. fn main() { let x2 = i32x2(20, 21); let x3 = i32x3(30, 31, 32); diff --git a/src/test/run-pass/specialization/auxiliary/go_trait.rs b/src/test/run-pass/specialization/auxiliary/go_trait.rs new file mode 100644 index 0000000000..044bb606b4 --- /dev/null +++ b/src/test/run-pass/specialization/auxiliary/go_trait.rs @@ -0,0 +1,53 @@ +// Copyright 2014 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(specialization)] + +// Common code used for tests that model the Fn/FnMut/FnOnce hierarchy. + +pub trait Go { + fn go(&self, arg: isize); +} + +pub fn go(this: &G, arg: isize) { + this.go(arg) +} + +pub trait GoMut { + fn go_mut(&mut self, arg: isize); +} + +pub fn go_mut(this: &mut G, arg: isize) { + this.go_mut(arg) +} + +pub trait GoOnce { + fn go_once(self, arg: isize); +} + +pub fn go_once(this: G, arg: isize) { + this.go_once(arg) +} + +impl GoMut for G + where G : Go +{ + default fn go_mut(&mut self, arg: isize) { + go(&*self, arg) + } +} + +impl GoOnce for G + where G : GoMut +{ + default fn go_once(mut self, arg: isize) { + go_mut(&mut self, arg) + } +} diff --git a/src/test/auxiliary/specialization_cross_crate.rs b/src/test/run-pass/specialization/auxiliary/specialization_cross_crate.rs similarity index 100% rename from src/test/auxiliary/specialization_cross_crate.rs rename to src/test/run-pass/specialization/auxiliary/specialization_cross_crate.rs diff --git a/src/test/auxiliary/specialization_cross_crate_defaults.rs b/src/test/run-pass/specialization/auxiliary/specialization_cross_crate_defaults.rs old mode 100755 new mode 100644 similarity index 100% rename from src/test/auxiliary/specialization_cross_crate_defaults.rs rename to src/test/run-pass/specialization/auxiliary/specialization_cross_crate_defaults.rs diff --git a/src/test/run-pass/sse2.rs b/src/test/run-pass/sse2.rs new file mode 100644 index 0000000000..78d91b2f31 --- /dev/null +++ b/src/test/run-pass/sse2.rs @@ -0,0 +1,18 @@ +// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +#![feature(cfg_target_feature)] + +pub fn main() { + if cfg!(any(target_arch = "x86", target_arch = "x86_64")) { + assert!(cfg!(target_feature = "sse2"), + "SSE2 was not detected as available on an x86 platform"); + } +} diff --git a/src/test/run-pass/tcp-stress.rs b/src/test/run-pass/tcp-stress.rs index 52c920b6f5..dfc86497c9 100644 --- a/src/test/run-pass/tcp-stress.rs +++ b/src/test/run-pass/tcp-stress.rs @@ -18,48 +18,54 @@ use std::io::prelude::*; use std::net::{TcpListener, TcpStream}; use std::process; use std::sync::mpsc::channel; +use std::time::Duration; use std::thread::{self, Builder}; fn main() { // This test has a chance to time out, try to not let it time out thread::spawn(move|| -> () { - thread::sleep_ms(30 * 1000); + thread::sleep(Duration::from_secs(30)); process::exit(1); }); - let mut listener = TcpListener::bind("127.0.0.1:0").unwrap(); + let listener = TcpListener::bind("127.0.0.1:0").unwrap(); let addr = listener.local_addr().unwrap(); thread::spawn(move || -> () { loop { let mut stream = match listener.accept() { Ok(stream) => stream.0, - Err(error) => continue, + Err(_) => continue, }; - stream.read(&mut [0]); - stream.write(&[2]); + let _ = stream.read(&mut [0]); + let _ = stream.write(&[2]); } }); let (tx, rx) = channel(); + let mut spawned_cnt = 0; for _ in 0..1000 { let tx = tx.clone(); - Builder::new().stack_size(64 * 1024).spawn(move|| { + let res = Builder::new().stack_size(64 * 1024).spawn(move|| { match TcpStream::connect(addr) { Ok(mut stream) => { - stream.write(&[1]); - stream.read(&mut [0]); + let _ = stream.write(&[1]); + let _ = stream.read(&mut [0]); }, Err(..) => {} } tx.send(()).unwrap(); }); + if let Ok(_) = res { + spawned_cnt += 1; + }; } // Wait for all clients to exit, but don't wait for the server to exit. The // server just runs infinitely. drop(tx); - for _ in 0..1000 { + for _ in 0..spawned_cnt { rx.recv().unwrap(); } + assert_eq!(spawned_cnt, 1000); process::exit(0); } diff --git a/src/test/run-pass/terminate-in-initializer.rs b/src/test/run-pass/terminate-in-initializer.rs index 2875f73fc6..c9133bae85 100644 --- a/src/test/run-pass/terminate-in-initializer.rs +++ b/src/test/run-pass/terminate-in-initializer.rs @@ -24,13 +24,13 @@ fn test_ret() { let _x: Box = return; } fn test_panic() { fn f() { let _x: Box = panic!(); } - thread::spawn(move|| f() ).join().err().unwrap(); + thread::spawn(move|| f() ).join().unwrap_err(); } fn test_panic_indirect() { fn f() -> ! { panic!(); } fn g() { let _x: Box = f(); } - thread::spawn(move|| g() ).join().err().unwrap(); + thread::spawn(move|| g() ).join().unwrap_err(); } pub fn main() { diff --git a/src/test/run-pass/trait-object-exclusion.rs b/src/test/run-pass/trait-object-exclusion.rs new file mode 100644 index 0000000000..13b725b7c9 --- /dev/null +++ b/src/test/run-pass/trait-object-exclusion.rs @@ -0,0 +1,28 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +trait Future: 'static { + // The requirement for Self: Sized must prevent instantiation of + // Future::forget in vtables, otherwise there's an infinite type + // recursion through as Future>::forget. + fn forget(self) where Self: Sized { + Box::new(Map(self)) as Box; + } +} + +struct Map(A); +impl Future for Map {} + +pub struct Promise; +impl Future for Promise {} + +fn main() { + Promise.forget(); +} diff --git a/src/test/run-pass/unit-like-struct-drop-run.rs b/src/test/run-pass/unit-like-struct-drop-run.rs index eaee3505a6..ec37be9420 100644 --- a/src/test/run-pass/unit-like-struct-drop-run.rs +++ b/src/test/run-pass/unit-like-struct-drop-run.rs @@ -30,6 +30,6 @@ pub fn main() { let _b = Foo; }).join(); - let s = x.err().unwrap().downcast::<&'static str>().unwrap(); + let s = x.unwrap_err().downcast::<&'static str>().unwrap(); assert_eq!(&**s, "This panic should happen."); } diff --git a/src/test/run-pass/use-keyword-2.rs b/src/test/run-pass/use-keyword-2.rs new file mode 100644 index 0000000000..60016f5959 --- /dev/null +++ b/src/test/run-pass/use-keyword-2.rs @@ -0,0 +1,30 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct A; + +mod test { + pub use super :: A; + + pub use self :: A as B; +} + +impl A { + fn f() {} + fn g() { + Self :: f() + } +} + +fn main() { + let a: A = test::A; + let b: A = test::B; + let c: () = A::g(); +} diff --git a/src/test/auxiliary/empty.rs b/src/test/rustdoc/auxiliary/empty.rs similarity index 100% rename from src/test/auxiliary/empty.rs rename to src/test/rustdoc/auxiliary/empty.rs diff --git a/src/test/auxiliary/inline-default-methods.rs b/src/test/rustdoc/auxiliary/inline-default-methods.rs similarity index 100% rename from src/test/auxiliary/inline-default-methods.rs rename to src/test/rustdoc/auxiliary/inline-default-methods.rs diff --git a/src/test/auxiliary/issue-13698.rs b/src/test/rustdoc/auxiliary/issue-13698.rs similarity index 100% rename from src/test/auxiliary/issue-13698.rs rename to src/test/rustdoc/auxiliary/issue-13698.rs diff --git a/src/test/auxiliary/issue-15318.rs b/src/test/rustdoc/auxiliary/issue-15318.rs similarity index 100% rename from src/test/auxiliary/issue-15318.rs rename to src/test/rustdoc/auxiliary/issue-15318.rs diff --git a/src/test/auxiliary/issue-17476.rs b/src/test/rustdoc/auxiliary/issue-17476.rs similarity index 100% rename from src/test/auxiliary/issue-17476.rs rename to src/test/rustdoc/auxiliary/issue-17476.rs diff --git a/src/test/auxiliary/issue-19190-3.rs b/src/test/rustdoc/auxiliary/issue-19190-3.rs similarity index 100% rename from src/test/auxiliary/issue-19190-3.rs rename to src/test/rustdoc/auxiliary/issue-19190-3.rs diff --git a/src/test/auxiliary/issue-20646.rs b/src/test/rustdoc/auxiliary/issue-20646.rs similarity index 100% rename from src/test/auxiliary/issue-20646.rs rename to src/test/rustdoc/auxiliary/issue-20646.rs diff --git a/src/test/auxiliary/issue-20727.rs b/src/test/rustdoc/auxiliary/issue-20727.rs similarity index 100% rename from src/test/auxiliary/issue-20727.rs rename to src/test/rustdoc/auxiliary/issue-20727.rs diff --git a/src/test/auxiliary/issue-21092.rs b/src/test/rustdoc/auxiliary/issue-21092.rs similarity index 100% rename from src/test/auxiliary/issue-21092.rs rename to src/test/rustdoc/auxiliary/issue-21092.rs diff --git a/src/test/auxiliary/issue-21801.rs b/src/test/rustdoc/auxiliary/issue-21801.rs similarity index 100% rename from src/test/auxiliary/issue-21801.rs rename to src/test/rustdoc/auxiliary/issue-21801.rs diff --git a/src/test/auxiliary/issue-22025.rs b/src/test/rustdoc/auxiliary/issue-22025.rs similarity index 100% rename from src/test/auxiliary/issue-22025.rs rename to src/test/rustdoc/auxiliary/issue-22025.rs diff --git a/src/test/auxiliary/issue-23207-1.rs b/src/test/rustdoc/auxiliary/issue-23207-1.rs similarity index 100% rename from src/test/auxiliary/issue-23207-1.rs rename to src/test/rustdoc/auxiliary/issue-23207-1.rs diff --git a/src/test/auxiliary/issue-23207-2.rs b/src/test/rustdoc/auxiliary/issue-23207-2.rs similarity index 100% rename from src/test/auxiliary/issue-23207-2.rs rename to src/test/rustdoc/auxiliary/issue-23207-2.rs diff --git a/src/test/auxiliary/issue-26606-macro.rs b/src/test/rustdoc/auxiliary/issue-26606-macro.rs similarity index 100% rename from src/test/auxiliary/issue-26606-macro.rs rename to src/test/rustdoc/auxiliary/issue-26606-macro.rs diff --git a/src/test/auxiliary/issue-27362.rs b/src/test/rustdoc/auxiliary/issue-27362.rs similarity index 100% rename from src/test/auxiliary/issue-27362.rs rename to src/test/rustdoc/auxiliary/issue-27362.rs diff --git a/src/test/auxiliary/issue-28927-1.rs b/src/test/rustdoc/auxiliary/issue-28927-1.rs similarity index 100% rename from src/test/auxiliary/issue-28927-1.rs rename to src/test/rustdoc/auxiliary/issue-28927-1.rs diff --git a/src/test/auxiliary/issue-28927-2.rs b/src/test/rustdoc/auxiliary/issue-28927-2.rs similarity index 100% rename from src/test/auxiliary/issue-28927-2.rs rename to src/test/rustdoc/auxiliary/issue-28927-2.rs diff --git a/src/test/auxiliary/issue-29584.rs b/src/test/rustdoc/auxiliary/issue-29584.rs similarity index 100% rename from src/test/auxiliary/issue-29584.rs rename to src/test/rustdoc/auxiliary/issue-29584.rs diff --git a/src/test/auxiliary/issue-30109-1.rs b/src/test/rustdoc/auxiliary/issue-30109-1.rs similarity index 100% rename from src/test/auxiliary/issue-30109-1.rs rename to src/test/rustdoc/auxiliary/issue-30109-1.rs diff --git a/src/test/auxiliary/reexp_stripped.rs b/src/test/rustdoc/auxiliary/reexp_stripped.rs similarity index 100% rename from src/test/auxiliary/reexp_stripped.rs rename to src/test/rustdoc/auxiliary/reexp_stripped.rs diff --git a/src/test/auxiliary/rustdoc-default-impl.rs b/src/test/rustdoc/auxiliary/rustdoc-default-impl.rs similarity index 100% rename from src/test/auxiliary/rustdoc-default-impl.rs rename to src/test/rustdoc/auxiliary/rustdoc-default-impl.rs diff --git a/src/test/auxiliary/rustdoc-extern-default-method.rs b/src/test/rustdoc/auxiliary/rustdoc-extern-default-method.rs similarity index 100% rename from src/test/auxiliary/rustdoc-extern-default-method.rs rename to src/test/rustdoc/auxiliary/rustdoc-extern-default-method.rs diff --git a/src/test/auxiliary/rustdoc-extern-method.rs b/src/test/rustdoc/auxiliary/rustdoc-extern-method.rs similarity index 100% rename from src/test/auxiliary/rustdoc-extern-method.rs rename to src/test/rustdoc/auxiliary/rustdoc-extern-method.rs diff --git a/src/test/auxiliary/rustdoc-ffi.rs b/src/test/rustdoc/auxiliary/rustdoc-ffi.rs similarity index 100% rename from src/test/auxiliary/rustdoc-ffi.rs rename to src/test/rustdoc/auxiliary/rustdoc-ffi.rs diff --git a/src/test/auxiliary/rustdoc-impl-parts-crosscrate.rs b/src/test/rustdoc/auxiliary/rustdoc-impl-parts-crosscrate.rs similarity index 100% rename from src/test/auxiliary/rustdoc-impl-parts-crosscrate.rs rename to src/test/rustdoc/auxiliary/rustdoc-impl-parts-crosscrate.rs diff --git a/src/test/auxiliary/variant-struct.rs b/src/test/rustdoc/auxiliary/variant-struct.rs similarity index 100% rename from src/test/auxiliary/variant-struct.rs rename to src/test/rustdoc/auxiliary/variant-struct.rs diff --git a/src/test/rustdoc/duplicate_impls/impls.rs b/src/test/rustdoc/duplicate_impls/impls.rs new file mode 100644 index 0000000000..72e54fe733 --- /dev/null +++ b/src/test/rustdoc/duplicate_impls/impls.rs @@ -0,0 +1,22 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +pub struct Foo; + +// just so that `Foo` doesn't show up on `Bar`s sidebar +pub mod bar { + pub trait Bar {} +} + +impl Foo { + pub fn new() -> Foo { Foo } +} + +impl bar::Bar for Foo {} diff --git a/src/test/rustdoc/duplicate_impls/issue-33054.rs b/src/test/rustdoc/duplicate_impls/issue-33054.rs new file mode 100644 index 0000000000..df6ebcae10 --- /dev/null +++ b/src/test/rustdoc/duplicate_impls/issue-33054.rs @@ -0,0 +1,21 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// @has issue_33054/impls/struct.Foo.html +// @has - '//code' 'impl Foo' +// @has - '//code' 'impl Bar for Foo' +// @count - '//*[@class="impl"]' 2 +// @has issue_33054/impls/bar/trait.Bar.html +// @has - '//code' 'impl Bar for Foo' +// @count - '//*[@class="struct"]' 1 +pub mod impls; + +#[doc(inline)] +pub use impls as impls2; diff --git a/src/test/rustdoc/escape-rust-expr.rs b/src/test/rustdoc/escape-rust-expr.rs new file mode 100644 index 0000000000..7f9a2bf175 --- /dev/null +++ b/src/test/rustdoc/escape-rust-expr.rs @@ -0,0 +1,15 @@ +// Copyright 2016 The Rust Project Developers. See the COPYRIGHT +// file at the top-level directory of this distribution and at +// http://rust-lang.org/COPYRIGHT. +// +// Licensed under the Apache License, Version 2.0 or the MIT license +// , at your +// option. This file may not be copied, modified, or distributed +// except according to those terms. + +// Test that we HTML-escape Rust expressions, where HTML special chars +// can occur, and we know it's definitely not markup. + +// @has escape_rust_expr/constant.CONST_S.html '//pre[@class="rust const"]' '"

    (&mut self, mut predicate: P) -> Option where - P: FnMut(Self::Item) -> bool, - Self: Sized + ExactSizeIterator + DoubleEndedIterator - { - let mut i = self.len(); - - while let Some(v) = self.next_back() { - if predicate(v) { - return Some(i - 1); - } - // No need for an overflow check here, because `ExactSizeIterator` - // implies that the number of elements fits into a `usize`. - i -= 1; - } - None - } - - /// Returns the maximum element of an iterator. - /// - /// If the two elements are equally maximum, the latest element is - /// returned. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// assert_eq!(a.iter().max(), Some(&3)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn max(self) -> Option where Self: Sized, Self::Item: Ord - { - select_fold1(self, - |_| (), - // switch to y even if it is only equal, to preserve - // stability. - |_, x, _, y| *x <= *y) - .map(|(_, x)| x) - } - - /// Returns the minimum element of an iterator. - /// - /// If the two elements are equally minimum, the first element is - /// returned. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// assert_eq!(a.iter().min(), Some(&1)); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn min(self) -> Option where Self: Sized, Self::Item: Ord - { - select_fold1(self, - |_| (), - // only switch to y if it is strictly smaller, to - // preserve stability. - |_, x, _, y| *x > *y) - .map(|(_, x)| x) - } - - /// Returns the element that gives the maximum value from the - /// specified function. - /// - /// Returns the rightmost element if the comparison determines two elements - /// to be equally maximum. - /// - /// # Examples - /// - /// ``` - /// let a = [-3_i32, 0, 1, 5, -10]; - /// assert_eq!(*a.iter().max_by_key(|x| x.abs()).unwrap(), -10); - /// ``` - #[inline] - #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] - fn max_by_key(self, f: F) -> Option - where Self: Sized, F: FnMut(&Self::Item) -> B, - { - select_fold1(self, - f, - // switch to y even if it is only equal, to preserve - // stability. - |x_p, _, y_p, _| x_p <= y_p) - .map(|(_, x)| x) - } - - /// Returns the element that gives the minimum value from the - /// specified function. - /// - /// Returns the latest element if the comparison determines two elements - /// to be equally minimum. - /// - /// # Examples - /// - /// ``` - /// let a = [-3_i32, 0, 1, 5, -10]; - /// assert_eq!(*a.iter().min_by_key(|x| x.abs()).unwrap(), 0); - /// ``` - #[stable(feature = "iter_cmp_by_key", since = "1.6.0")] - fn min_by_key(self, f: F) -> Option - where Self: Sized, F: FnMut(&Self::Item) -> B, - { - select_fold1(self, - f, - // only switch to y if it is strictly smaller, to - // preserve stability. - |x_p, _, y_p, _| x_p > y_p) - .map(|(_, x)| x) - } - - /// Reverses an iterator's direction. - /// - /// Usually, iterators iterate from left to right. After using `rev()`, - /// an iterator will instead iterate from right to left. - /// - /// This is only possible if the iterator has an end, so `rev()` only - /// works on [`DoubleEndedIterator`]s. - /// - /// [`DoubleEndedIterator`]: trait.DoubleEndedIterator.html - /// - /// # Examples - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut iter = a.iter().rev(); - /// - /// assert_eq!(iter.next(), Some(&3)); - /// assert_eq!(iter.next(), Some(&2)); - /// assert_eq!(iter.next(), Some(&1)); - /// - /// assert_eq!(iter.next(), None); - /// ``` - #[inline] - #[stable(feature = "rust1", since = "1.0.0")] - fn rev(self) -> Rev where Self: Sized + DoubleEndedIterator { - Rev{iter: self} - } - - /// Converts an iterator of pairs into a pair of containers. - /// - /// `unzip()` consumes an entire iterator of pairs, producing two - /// collections: one from the left elements of the pairs, and one - /// from the right elements. - /// - /// This function is, in some sense, the opposite of [`zip()`]. - /// - /// [`zip()`]: #method.zip - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [(1, 2), (3, 4)]; - /// - /// let (left, right): (Vec<_>, Vec<_>) = a.iter().cloned().unzip(); - /// - /// assert_eq!(left, [1, 3]); - /// assert_eq!(right, [2, 4]); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn unzip(self) -> (FromA, FromB) where - FromA: Default + Extend, - FromB: Default + Extend, - Self: Sized + Iterator, - { - struct SizeHint(usize, Option, marker::PhantomData); - impl Iterator for SizeHint { - type Item = A; - - fn next(&mut self) -> Option { None } - fn size_hint(&self) -> (usize, Option) { - (self.0, self.1) - } - } - - let (lo, hi) = self.size_hint(); - let mut ts: FromA = Default::default(); - let mut us: FromB = Default::default(); - - ts.extend(SizeHint(lo, hi, marker::PhantomData)); - us.extend(SizeHint(lo, hi, marker::PhantomData)); - - for (t, u) in self { - ts.extend(Some(t)); - us.extend(Some(u)); - } - - (ts, us) - } - - /// Creates an iterator which `clone()`s all of its elements. - /// - /// This is useful when you have an iterator over `&T`, but you need an - /// iterator over `T`. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let v_cloned: Vec<_> = a.iter().cloned().collect(); - /// - /// // cloned is the same as .map(|&x| x), for integers - /// let v_map: Vec<_> = a.iter().map(|&x| x).collect(); - /// - /// assert_eq!(v_cloned, vec![1, 2, 3]); - /// assert_eq!(v_map, vec![1, 2, 3]); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - fn cloned<'a, T: 'a>(self) -> Cloned - where Self: Sized + Iterator, T: Clone - { - Cloned { it: self } - } - - /// Repeats an iterator endlessly. - /// - /// Instead of stopping at `None`, the iterator will instead start again, - /// from the beginning. After iterating again, it will start at the - /// beginning again. And again. And again. Forever. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// let a = [1, 2, 3]; - /// - /// let mut it = a.iter().cycle(); - /// - /// assert_eq!(it.next(), Some(&1)); - /// assert_eq!(it.next(), Some(&2)); - /// assert_eq!(it.next(), Some(&3)); - /// assert_eq!(it.next(), Some(&1)); - /// assert_eq!(it.next(), Some(&2)); - /// assert_eq!(it.next(), Some(&3)); - /// assert_eq!(it.next(), Some(&1)); - /// ``` - #[stable(feature = "rust1", since = "1.0.0")] - #[inline] - fn cycle(self) -> Cycle where Self: Sized + Clone { - Cycle{orig: self.clone(), iter: self} - } - - /// Sums the elements of an iterator. - /// - /// Takes each element, adds them together, and returns the result. - /// - /// An empty iterator returns the zero value of the type. - /// - /// # Examples - /// - /// Basic usage: - /// - /// ``` - /// #![feature(iter_arith)] - /// - /// let a = [1, 2, 3]; - /// let sum: i32 = a.iter().sum(); - /// - /// assert_eq!(sum, 6); - /// ``` - #[unstable(feature = "iter_arith", reason = "bounds recently changed", - issue = "27739")] - fn sum(self) -> S where - S: Add + Zero, - Self: Sized, - { - self.fold(Zero::zero(), |s, e| s + e) - } - - /// Iterates over the entire iterator, multiplying all the elements - /// - /// An empty iterator returns the one value of the type. - /// - /// # Examples - /// - /// ``` - /// #![feature(iter_arith)] - /// - /// fn factorial(n: u32) -> u32 { - /// (1..).take_while(|&i| i <= n).product() - /// } - /// assert_eq!(factorial(0), 1); - /// assert_eq!(factorial(1), 1); - /// assert_eq!(factorial(5), 120); - /// ``` - #[unstable(feature="iter_arith", reason = "bounds recently changed", - issue = "27739")] - fn product