1
Fork 0

Merge branch 'master' into escape-reason-docs

This commit is contained in:
Esteban Küber 2016-12-27 17:02:52 -08:00
commit e766c465d2
601 changed files with 19295 additions and 10334 deletions

View file

@ -8,6 +8,8 @@ git:
depth: 1 depth: 1
submodules: false submodules: false
osx_image: xcode8.2
matrix: matrix:
include: include:
# Linux builders, all docker images # Linux builders, all docker images
@ -27,28 +29,37 @@ matrix:
# OSX builders # OSX builders
- env: > - env: >
RUST_CHECK_TARGET=check RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin
SRC=. SRC=.
os: osx os: osx
install: brew install ccache install: &osx_install_sccache >
curl -L https://api.pub.build.mozilla.org/tooltool/sha512/d0025b286468cc5ada83b23d3fafbc936b9f190eaa7d4a981715b18e8e3bf720a7bcee7bfe758cfdeb8268857f6098fd52dcdd8818232692a30ce91039936596 |
tar xJf - -C /usr/local/bin --strip-components=1
- env: > - env: >
RUST_CHECK_TARGET=check RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--target=i686-apple-darwin RUST_CONFIGURE_ARGS=--build=i686-apple-darwin
SRC=. SRC=.
os: osx os: osx
install: brew install ccache install: *osx_install_sccache
- env: > - env: >
RUST_CHECK_TARGET=check RUST_CHECK_TARGET=check
RUST_CONFIGURE_ARGS=--target=x86_64-apple-darwin --disable-rustbuild RUST_CONFIGURE_ARGS=--build=x86_64-apple-darwin --disable-rustbuild
SRC=. SRC=.
os: osx os: osx
install: brew install ccache install: *osx_install_sccache
- env: > - env: >
RUST_CHECK_TARGET= RUST_CHECK_TARGET=
RUST_CONFIGURE_ARGS=--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios RUST_CONFIGURE_ARGS=--target=aarch64-apple-ios,armv7-apple-ios,armv7s-apple-ios,i386-apple-ios,x86_64-apple-ios
SRC=. SRC=.
os: osx os: osx
install: brew install ccache install: *osx_install_sccache
env:
global:
- SCCACHE_BUCKET=rust-lang-ci-sccache
- AWS_ACCESS_KEY_ID=AKIAIMX7VLAS3PZAVLUQ
# AWS_SECRET_ACCESS_KEY=...
- secure: "Pixhh0hXDqGCdOyLtGFjli3J2AtDWIpyb2btIrLe956nCBDRutRoMm6rv5DI9sFZN07Mms7VzNNvhc9wCW1y63JAm414d2Co7Ob8kWMZlz9l9t7ACHuktUiis8yr+S4Quq1Vqd6pqi7pf2J++UxC8R/uLeqVrubzr6+X7AbmEFE="
script: script:
- > - >
@ -77,5 +88,3 @@ notifications:
cache: cache:
directories: directories:
- $HOME/docker - $HOME/docker
- $HOME/.ccache
- $HOME/.cargo

View file

@ -212,12 +212,13 @@ Some common invocations of `x.py` are:
& everything builds in the correct manner. & everything builds in the correct manner.
- `x.py test src/libstd --stage 1` - test the standard library without - `x.py test src/libstd --stage 1` - test the standard library without
recompiling stage 2. recompiling stage 2.
- `x.py test src/test/run-pass --filter TESTNAME` - Run a matching set of tests. - `x.py test src/test/run-pass --test-args TESTNAME` - Run a matching set of
tests.
- `TESTNAME` should be a substring of the tests to match against e.g. it could - `TESTNAME` should be a substring of the tests to match against e.g. it could
be the fully qualified test name, or just a part of it. be the fully qualified test name, or just a part of it.
`TESTNAME=collections::hash::map::test_map::test_capacity_not_less_than_len` `TESTNAME=collections::hash::map::test_map::test_capacity_not_less_than_len`
or `TESTNAME=test_capacity_not_less_than_len`. or `TESTNAME=test_capacity_not_less_than_len`.
- `x.py test src/test/run-pass --stage 1 --filter <substring-of-test-name>` - - `x.py test src/test/run-pass --stage 1 --test-args <substring-of-test-name>` -
Run a single rpass test with the stage1 compiler (this will be quicker than Run a single rpass test with the stage1 compiler (this will be quicker than
running the command above as we only build the stage1 compiler, not the entire running the command above as we only build the stage1 compiler, not the entire
thing). You can also leave off the directory argument to run all stage1 test thing). You can also leave off the directory argument to run all stage1 test
@ -284,7 +285,7 @@ been approved. The PR then enters the [merge queue][merge-queue], where @bors
will run all the tests on every platform we support. If it all works out, will run all the tests on every platform we support. If it all works out,
@bors will merge your code into `master` and close the pull request. @bors will merge your code into `master` and close the pull request.
[merge-queue]: http://buildbot.rust-lang.org/homu/queue/rust [merge-queue]: https://buildbot.rust-lang.org/homu/queue/rust
Speaking of tests, Rust has a comprehensive test suite. More information about Speaking of tests, Rust has a comprehensive test suite. More information about
it can be found it can be found
@ -403,4 +404,4 @@ are:
[tlgba]: http://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/ [tlgba]: http://tomlee.co/2014/04/a-more-detailed-tour-of-the-rust-compiler/
[ro]: http://www.rustaceans.org/ [ro]: http://www.rustaceans.org/
[rctd]: ./COMPILER_TESTS.md [rctd]: ./COMPILER_TESTS.md
[cheatsheet]: http://buildbot.rust-lang.org/homu/ [cheatsheet]: https://buildbot.rust-lang.org/homu/

View file

@ -1,3 +1,222 @@
Version 1.14.0 (2016-12-22)
===========================
Language
--------
* [`..` matches multiple tuple fields in enum variants, structs
and tuples][36843]. [RFC 1492].
* [Safe `fn` items can be coerced to `unsafe fn` pointers][37389]
* [`use *` and `use ::*` both glob-import from the crate root][37367]
* [It's now possible to call a `Vec<Box<Fn()>>` without explicit
dereferencing][36822]
Compiler
--------
* [Mark enums with non-zero discriminant as non-zero][37224]
* [Lower-case `static mut` names are linted like other
statics and consts][37162]
* [Fix ICE on some macros in const integer positions
(e.g. `[u8; m!()]`)][36819]
* [Improve error message and snippet for "did you mean `x`"][36798]
* [Add a panic-strategy field to the target specification][36794]
* [Include LLVM version in `--version --verbose`][37200]
Compile-time Optimizations
--------------------------
* [Improve macro expansion performance][37569]
* [Shrink `Expr_::ExprInlineAsm`][37445]
* [Replace all uses of SHA-256 with BLAKE2b][37439]
* [Reduce the number of bytes hashed by `IchHasher`][37427]
* [Avoid more allocations when compiling html5ever][37373]
* [Use `SmallVector` in `CombineFields::instantiate`][37322]
* [Avoid some allocations in the macro parser][37318]
* [Use a faster deflate setting][37298]
* [Add `ArrayVec` and `AccumulateVec` to reduce heap allocations
during interning of slices][37270]
* [Optimize `write_metadata`][37267]
* [Don't process obligation forest cycles when stalled][37231]
* [Avoid many `CrateConfig` clones][37161]
* [Optimize `Substs::super_fold_with`][37108]
* [Optimize `ObligationForest`'s `NodeState` handling][36993]
* [Speed up `plug_leaks`][36917]
Libraries
---------
* [`println!()`, with no arguments, prints newline][36825].
Previously, an empty string was required to achieve the same.
* [`Wrapping` impls standard binary and unary operators, as well as
the `Sum` and `Product` iterators][37356]
* [Implement `From<Cow<str>> for String` and `From<Cow<[T]>> for
Vec<T>`][37326]
* [Improve `fold` performance for `chain`, `cloned`, `map`, and
`VecDeque` iterators][37315]
* [Improve `SipHasher` performance on small values][37312]
* [Add Iterator trait TrustedLen to enable better FromIterator /
Extend][37306]
* [Expand `.zip()` specialization to `.map()` and `.cloned()`][37230]
* [`ReadDir` implements `Debug`][37221]
* [Implement `RefUnwindSafe` for atomic types][37178]
* [Specialize `Vec::extend` to `Vec::extend_from_slice`][37094]
* [Avoid allocations in `Decoder::read_str`][37064]
* [`io::Error` implements `From<io::ErrorKind>`][37037]
* [Impl `Debug` for raw pointers to unsized data][36880]
* [Don't reuse `HashMap` random seeds][37470]
* [The internal memory layout of `HashMap` is more cache-friendly, for
significant improvements in some operations][36692]
* [`HashMap` uses less memory on 32-bit architectures][36595]
* [Impl `Add<{str, Cow<str>}>` for `Cow<str>`][36430]
Cargo
-----
* [Expose rustc cfg values to build scripts][cargo/3243]
* [Allow cargo to work with read-only `CARGO_HOME`][cargo/3259]
* [Fix passing --features when testing multiple packages][cargo/3280]
* [Use a single profile set per workspace][cargo/3249]
* [Load `replace` sections from lock files][cargo/3220]
* [Ignore `panic` configuration for test/bench profiles][cargo/3175]
Tooling
-------
* [rustup is the recommended Rust installation method][1.14rustup]
* This release includes host (rustc) builds for Linux on MIPS, PowerPC, and
S390x. These are [tier 2] platforms and may have major defects. Follow the
instructions on the website to install, or add the targets to an existing
installation with `rustup target add`. The new target triples are:
- `mips-unknown-linux-gnu`
- `mipsel-unknown-linux-gnu`
- `mips64-unknown-linux-gnuabi64`
- `mips64el-unknown-linux-gnuabi64 `
- `powerpc-unknown-linux-gnu`
- `powerpc64-unknown-linux-gnu`
- `powerpc64le-unknown-linux-gnu`
- `s390x-unknown-linux-gnu `
* This release includes target (std) builds for ARM Linux running MUSL
libc. These are [tier 2] platforms and may have major defects. Add the
following triples to an existing rustup installation with `rustup target add`:
- `arm-unknown-linux-musleabi`
- `arm-unknown-linux-musleabihf`
- `armv7-unknown-linux-musleabihf`
* This release includes [experimental support for WebAssembly][1.14wasm], via
the `wasm32-unknown-emscripten` target. This target is known to have major
defects. Please test, report, and fix.
* rustup no longer installs documentation by default. Run `rustup
component add rust-docs` to install.
* [Fix line stepping in debugger][37310]
* [Enable line number debuginfo in releases][37280]
Misc
----
* [Disable jemalloc on aarch64/powerpc/mips][37392]
* [Add support for Fuchsia OS][37313]
* [Detect local-rebuild by only MAJOR.MINOR version][37273]
Compatibility Notes
-------------------
* [A number of forward-compatibility lints used by the compiler
to gradually introduce language changes have been converted
to deny by default][36894]:
- ["use of inaccessible extern crate erroneously allowed"][36886]
- ["type parameter default erroneously allowed in invalid location"][36887]
- ["detects super or self keywords at the beginning of global path"][36888]
- ["two overlapping inherent impls define an item with the same name
were erroneously allowed"][36889]
- ["floating-point constants cannot be used in patterns"][36890]
- ["constants of struct or enum type can only be used in a pattern if
the struct or enum has `#[derive(PartialEq, Eq)]`"][36891]
- ["lifetimes or labels named `'_` were erroneously allowed"][36892]
* [Prohibit patterns in trait methods without bodies][37378]
* [The atomic `Ordering` enum may not be matched exhaustively][37351]
* [Future-proofing `#[no_link]` breaks some obscure cases][37247]
* [The `$crate` macro variable is accepted in fewer locations][37213]
* [Impls specifying extra region requirements beyond the trait
they implement are rejected][37167]
* [Enums may not be unsized][37111]. Unsized enums are intended to
work but never have. For now they are forbidden.
* [Enforce the shadowing restrictions from RFC 1560 for today's macros][36767]
[tier 2]: https://forge.rust-lang.org/platform-support.html
[1.14rustup]: https://internals.rust-lang.org/t/beta-testing-rustup-rs/3316/204
[1.14wasm]: https://users.rust-lang.org/t/compiling-to-the-web-with-rust-and-emscripten/7627
[36430]: https://github.com/rust-lang/rust/pull/36430
[36595]: https://github.com/rust-lang/rust/pull/36595
[36595]: https://github.com/rust-lang/rust/pull/36595
[36692]: https://github.com/rust-lang/rust/pull/36692
[36767]: https://github.com/rust-lang/rust/pull/36767
[36794]: https://github.com/rust-lang/rust/pull/36794
[36798]: https://github.com/rust-lang/rust/pull/36798
[36819]: https://github.com/rust-lang/rust/pull/36819
[36822]: https://github.com/rust-lang/rust/pull/36822
[36825]: https://github.com/rust-lang/rust/pull/36825
[36843]: https://github.com/rust-lang/rust/pull/36843
[36880]: https://github.com/rust-lang/rust/pull/36880
[36886]: https://github.com/rust-lang/rust/issues/36886
[36887]: https://github.com/rust-lang/rust/issues/36887
[36888]: https://github.com/rust-lang/rust/issues/36888
[36889]: https://github.com/rust-lang/rust/issues/36889
[36890]: https://github.com/rust-lang/rust/issues/36890
[36891]: https://github.com/rust-lang/rust/issues/36891
[36892]: https://github.com/rust-lang/rust/issues/36892
[36894]: https://github.com/rust-lang/rust/pull/36894
[36917]: https://github.com/rust-lang/rust/pull/36917
[36993]: https://github.com/rust-lang/rust/pull/36993
[37037]: https://github.com/rust-lang/rust/pull/37037
[37064]: https://github.com/rust-lang/rust/pull/37064
[37094]: https://github.com/rust-lang/rust/pull/37094
[37108]: https://github.com/rust-lang/rust/pull/37108
[37111]: https://github.com/rust-lang/rust/pull/37111
[37161]: https://github.com/rust-lang/rust/pull/37161
[37162]: https://github.com/rust-lang/rust/pull/37162
[37167]: https://github.com/rust-lang/rust/pull/37167
[37178]: https://github.com/rust-lang/rust/pull/37178
[37200]: https://github.com/rust-lang/rust/pull/37200
[37213]: https://github.com/rust-lang/rust/pull/37213
[37221]: https://github.com/rust-lang/rust/pull/37221
[37224]: https://github.com/rust-lang/rust/pull/37224
[37230]: https://github.com/rust-lang/rust/pull/37230
[37231]: https://github.com/rust-lang/rust/pull/37231
[37247]: https://github.com/rust-lang/rust/pull/37247
[37267]: https://github.com/rust-lang/rust/pull/37267
[37270]: https://github.com/rust-lang/rust/pull/37270
[37273]: https://github.com/rust-lang/rust/pull/37273
[37280]: https://github.com/rust-lang/rust/pull/37280
[37298]: https://github.com/rust-lang/rust/pull/37298
[37306]: https://github.com/rust-lang/rust/pull/37306
[37310]: https://github.com/rust-lang/rust/pull/37310
[37312]: https://github.com/rust-lang/rust/pull/37312
[37313]: https://github.com/rust-lang/rust/pull/37313
[37315]: https://github.com/rust-lang/rust/pull/37315
[37318]: https://github.com/rust-lang/rust/pull/37318
[37322]: https://github.com/rust-lang/rust/pull/37322
[37326]: https://github.com/rust-lang/rust/pull/37326
[37351]: https://github.com/rust-lang/rust/pull/37351
[37356]: https://github.com/rust-lang/rust/pull/37356
[37367]: https://github.com/rust-lang/rust/pull/37367
[37373]: https://github.com/rust-lang/rust/pull/37373
[37378]: https://github.com/rust-lang/rust/pull/37378
[37389]: https://github.com/rust-lang/rust/pull/37389
[37392]: https://github.com/rust-lang/rust/pull/37392
[37427]: https://github.com/rust-lang/rust/pull/37427
[37439]: https://github.com/rust-lang/rust/pull/37439
[37445]: https://github.com/rust-lang/rust/pull/37445
[37470]: https://github.com/rust-lang/rust/pull/37470
[37569]: https://github.com/rust-lang/rust/pull/37569
[RFC 1492]: https://github.com/rust-lang/rfcs/blob/master/text/1492-dotdot-in-patterns.md
[cargo/3175]: https://github.com/rust-lang/cargo/pull/3175
[cargo/3220]: https://github.com/rust-lang/cargo/pull/3220
[cargo/3243]: https://github.com/rust-lang/cargo/pull/3243
[cargo/3249]: https://github.com/rust-lang/cargo/pull/3249
[cargo/3259]: https://github.com/rust-lang/cargo/pull/3259
[cargo/3280]: https://github.com/rust-lang/cargo/pull/3280
Version 1.13.0 (2016-11-10) Version 1.13.0 (2016-11-10)
=========================== ===========================

View file

@ -1,4 +1,9 @@
environment: environment:
SCCACHE_BUCKET: rust-lang-ci-sccache
AWS_ACCESS_KEY_ID: AKIAIMX7VLAS3PZAVLUQ
AWS_SECRET_ACCESS_KEY:
secure: 1UkmbiDd15tWtYbMm5O2Uqm0b0Ur8v1MoSlydxl4ojcroPeerRMlUges0l57py8c
SCCACHE_DIGEST: f808afabb4a4eb1d7112bcb3fa6be03b61e93412890c88e177c667eb37f46353d7ec294e559b16f9f4b5e894f2185fe7670a0df15fd064889ecbd80f0c34166c
matrix: matrix:
# 32/64 bit MSVC # 32/64 bit MSVC
- MSYS_BITS: 64 - MSYS_BITS: 64
@ -84,6 +89,13 @@ install:
# Otherwise pull in the MinGW installed on appveyor # Otherwise pull in the MinGW installed on appveyor
- if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH% - if NOT defined MINGW_URL set PATH=C:\msys64\mingw%MSYS_BITS%\bin;C:\msys64\usr\bin;%PATH%
# Download and install sccache
- appveyor DownloadFile https://api.pub.build.mozilla.org/tooltool/sha512/%SCCACHE_DIGEST%
- mv %SCCACHE_DIGEST% sccache.tar.bz2
- 7z x -y sccache.tar.bz2 > nul
- 7z x -y sccache.tar > nul
- set PATH=%PATH%;%CD%\sccache2
test_script: test_script:
- git submodule update --init - git submodule update --init
- set SRC=. - set SRC=.

33
configure vendored
View file

@ -621,6 +621,7 @@ opt llvm-assertions 0 "build LLVM with assertions"
opt debug-assertions 0 "build with debugging assertions" opt debug-assertions 0 "build with debugging assertions"
opt fast-make 0 "use .gitmodules as timestamp for submodule deps" opt fast-make 0 "use .gitmodules as timestamp for submodule deps"
opt ccache 0 "invoke gcc/clang via ccache to reuse object files between builds" opt ccache 0 "invoke gcc/clang via ccache to reuse object files between builds"
opt sccache 0 "invoke gcc/clang via sccache to reuse object files between builds"
opt local-rust 0 "use an installed rustc rather than downloading a snapshot" opt local-rust 0 "use an installed rustc rather than downloading a snapshot"
opt local-rebuild 0 "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version" opt local-rebuild 0 "assume local-rust matches the current version, for rebuilds; implies local-rust, and is implied if local-rust already matches the current version"
opt llvm-static-stdcpp 0 "statically link to libstdc++ for LLVM" opt llvm-static-stdcpp 0 "statically link to libstdc++ for LLVM"
@ -916,6 +917,18 @@ case $CFG_BUILD in
esac esac
putvar CFG_LLDB_PYTHON putvar CFG_LLDB_PYTHON
# Do some sanity checks if running on buildbot
# (these env vars are set by rust-buildbot)
if [ -n "$RUST_DIST_SERVER" -a -n "$ALLOW_NONZERO_RLIMIT_CORE" ]; then
# Frequently the llvm submodule directory is broken by the build
# being killed
llvm_lock="${CFG_SRC_DIR}/.git/modules/src/llvm/index.lock"
if [ -e "$llvm_lock" ]; then
step_msg "removing $llvm_lock"
rm -f "$llvm_lock"
fi
fi
step_msg "looking for target specific programs" step_msg "looking for target specific programs"
probe CFG_ADB adb probe CFG_ADB adb
@ -1677,11 +1690,23 @@ do
LLVM_CC_64_ARG1="gcc" LLVM_CC_64_ARG1="gcc"
;; ;;
("gcc") ("gcc")
LLVM_CXX_32="g++" if [ -z "$CFG_ENABLE_SCCACHE" ]; then
LLVM_CC_32="gcc" LLVM_CXX_32="g++"
LLVM_CC_32="gcc"
LLVM_CXX_64="g++" LLVM_CXX_64="g++"
LLVM_CC_64="gcc" LLVM_CC_64="gcc"
else
LLVM_CXX_32="sccache"
LLVM_CC_32="sccache"
LLVM_CXX_32_ARG1="g++"
LLVM_CC_32_ARG1="gcc"
LLVM_CXX_64="sccache"
LLVM_CC_64="sccache"
LLVM_CXX_64_ARG1="g++"
LLVM_CC_64_ARG1="gcc"
fi
;; ;;
(*) (*)

View file

@ -0,0 +1 @@
# rustbuild-only target

View file

@ -13,7 +13,7 @@
###################################################################### ######################################################################
# The version number # The version number
CFG_RELEASE_NUM=1.15.0 CFG_RELEASE_NUM=1.16.0
# An optional number to put after the label, e.g. '.2' -> '-beta.2' # An optional number to put after the label, e.g. '.2' -> '-beta.2'
# NB Make sure it starts with a dot to conform to semver pre-release # NB Make sure it starts with a dot to conform to semver pre-release

View file

@ -22,7 +22,7 @@ $(eval $(call RUST_CRATE,coretest))
DEPS_collectionstest := DEPS_collectionstest :=
$(eval $(call RUST_CRATE,collectionstest)) $(eval $(call RUST_CRATE,collectionstest))
TEST_TARGET_CRATES = $(filter-out core rustc_unicode alloc_system libc \ TEST_TARGET_CRATES = $(filter-out core std_unicode alloc_system libc \
alloc_jemalloc panic_unwind \ alloc_jemalloc panic_unwind \
panic_abort,$(TARGET_CRATES)) \ panic_abort,$(TARGET_CRATES)) \
collectionstest coretest collectionstest coretest

20
src/Cargo.lock generated
View file

@ -19,7 +19,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"build_helper 0.1.0", "build_helper 0.1.0",
"core 0.0.0", "core 0.0.0",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0", "libc 0.0.0",
] ]
@ -42,7 +42,7 @@ dependencies = [
"build_helper 0.1.0", "build_helper 0.1.0",
"cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)", "cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)",
"filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)", "filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
"getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)", "getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)", "libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)",
@ -63,7 +63,7 @@ name = "cmake"
version = "0.1.18" version = "0.1.18"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [ dependencies = [
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -80,7 +80,7 @@ name = "compiler_builtins"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"core 0.0.0", "core 0.0.0",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -121,7 +121,7 @@ name = "flate"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"build_helper 0.1.0", "build_helper 0.1.0",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
[[package]] [[package]]
@ -130,7 +130,7 @@ version = "0.0.0"
[[package]] [[package]]
name = "gcc" name = "gcc"
version = "0.3.38" version = "0.3.40"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]] [[package]]
@ -402,7 +402,7 @@ name = "rustc_llvm"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"build_helper 0.1.0", "build_helper 0.1.0",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_bitflags 0.0.0", "rustc_bitflags 0.0.0",
] ]
@ -551,7 +551,7 @@ version = "0.0.0"
dependencies = [ dependencies = [
"arena 0.0.0", "arena 0.0.0",
"build_helper 0.1.0", "build_helper 0.1.0",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.0.0", "log 0.0.0",
"rustc 0.0.0", "rustc 0.0.0",
"rustc_back 0.0.0", "rustc_back 0.0.0",
@ -587,7 +587,7 @@ dependencies = [
"collections 0.0.0", "collections 0.0.0",
"compiler_builtins 0.0.0", "compiler_builtins 0.0.0",
"core 0.0.0", "core 0.0.0",
"gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)", "gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.0.0", "libc 0.0.0",
"panic_abort 0.0.0", "panic_abort 0.0.0",
"panic_unwind 0.0.0", "panic_unwind 0.0.0",
@ -677,7 +677,7 @@ dependencies = [
"checksum cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "0e5bcf27e097a184c1df4437654ed98df3d7a516e8508a6ba45d8b092bbdf283" "checksum cmake 0.1.18 (registry+https://github.com/rust-lang/crates.io-index)" = "0e5bcf27e097a184c1df4437654ed98df3d7a516e8508a6ba45d8b092bbdf283"
"checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f" "checksum env_logger 0.3.5 (registry+https://github.com/rust-lang/crates.io-index)" = "15abd780e45b3ea4f76b4e9a26ff4843258dd8a3eed2775a0e7368c2e7936c2f"
"checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922" "checksum filetime 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "5363ab8e4139b8568a6237db5248646e5a8a2f89bd5ccb02092182b11fd3e922"
"checksum gcc 0.3.38 (registry+https://github.com/rust-lang/crates.io-index)" = "553f11439bdefe755bf366b264820f1da70f3aaf3924e594b886beb9c831bcf5" "checksum gcc 0.3.40 (registry+https://github.com/rust-lang/crates.io-index)" = "872db9e59486ef2b14f8e8c10e9ef02de2bccef6363d7f34835dedb386b3d950"
"checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685" "checksum getopts 0.2.14 (registry+https://github.com/rust-lang/crates.io-index)" = "d9047cfbd08a437050b363d35ef160452c5fe8ea5187ae0a624708c91581d685"
"checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8" "checksum libc 0.2.17 (registry+https://github.com/rust-lang/crates.io-index)" = "044d1360593a78f5c8e5e710beccdc24ab71d1f01bc19a29bcacdba22e8475d8"
"checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054" "checksum log 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "ab83497bf8bf4ed2a74259c1c802351fcd67a65baa86394b6ba73c36f4838054"

View file

@ -22,7 +22,7 @@ Note that if you're on Unix you should be able to execute the script directly:
./x.py build ./x.py build
``` ```
The script accepts commands, flags, and filters to determine what to do: The script accepts commands, flags, and arguments to determine what to do:
* `build` - a general purpose command for compiling code. Alone `build` will * `build` - a general purpose command for compiling code. Alone `build` will
bootstrap the entire compiler, and otherwise arguments passed indicate what to bootstrap the entire compiler, and otherwise arguments passed indicate what to
@ -42,6 +42,15 @@ The script accepts commands, flags, and filters to determine what to do:
./x.py build --stage 0 src/libtest ./x.py build --stage 0 src/libtest
``` ```
If files are dirty that would normally be rebuilt from stage 0, that can be
overidden using `--keep-stage 0`. Using `--keep-stage n` will skip all steps
that belong to stage n or earlier:
```
# keep old build products for stage 0 and build stage 1
./x.py build --keep-stage 0 --stage 1
```
* `test` - a command for executing unit tests. Like the `build` command this * `test` - a command for executing unit tests. Like the `build` command this
will execute the entire test suite by default, and otherwise it can be used to will execute the entire test suite by default, and otherwise it can be used to
select which test suite is run: select which test suite is run:
@ -54,7 +63,7 @@ The script accepts commands, flags, and filters to determine what to do:
./x.py test src/test/run-pass ./x.py test src/test/run-pass
# execute only some tests in the run-pass test suite # execute only some tests in the run-pass test suite
./x.py test src/test/run-pass --filter my-filter ./x.py test src/test/run-pass --test-args substring-of-test-name
# execute tests in the standard library in stage0 # execute tests in the standard library in stage0
./x.py test --stage 0 src/libstd ./x.py test --stage 0 src/libstd
@ -107,6 +116,42 @@ compiler. What actually happens when you invoke rustbuild is:
The goal of each stage is to (a) leverage Cargo as much as possible and failing The goal of each stage is to (a) leverage Cargo as much as possible and failing
that (b) leverage Rust as much as possible! that (b) leverage Rust as much as possible!
## Incremental builds
You can configure rustbuild to use incremental compilation. Because
incremental is new and evolving rapidly, if you want to use it, it is
recommended that you replace the snapshot with a locally installed
nightly build of rustc. You will want to keep this up to date.
To follow this course of action, first thing you will want to do is to
install a nightly, presumably using `rustup`. You will then want to
configure your directory to use this build, like so:
```
# configure to use local rust instead of downloding a beta.
# `--local-rust-root` is optional here. If elided, we will
# use whatever rustc we find on your PATH.
> configure --enable-rustbuild --local-rust-root=~/.cargo/ --enable-local-rebuild
```
After that, you can use the `--incremental` flag to actually do
incremental builds:
```
> ../x.py build --incremental
```
The `--incremental` flag will store incremental compilation artifacts
in `build/<host>/stage0-incremental`. Note that we only use incremental
compilation for the stage0 -> stage1 compilation -- this is because
the stage1 compiler is changing, and we don't try to cache and reuse
incremental artifacts across different versions of the compiler. For
this reason, `--incremental` defaults to `--stage 1` (though you can
manually select a higher stage, if you prefer).
You can always drop the `--incremental` to build as normal (but you
will still be using the local nightly as your bootstrap).
## Directory Layout ## Directory Layout
This build system houses all output under the `build` directory, which looks This build system houses all output under the `build` directory, which looks

View file

@ -25,12 +25,17 @@
//! switching compilers for the bootstrap and for build scripts will probably //! switching compilers for the bootstrap and for build scripts will probably
//! never get replaced. //! never get replaced.
#![deny(warnings)]
extern crate bootstrap; extern crate bootstrap;
use std::env; use std::env;
use std::ffi::OsString; use std::ffi::OsString;
use std::io;
use std::io::prelude::*;
use std::str::FromStr;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::Command; use std::process::{Command, ExitStatus};
fn main() { fn main() {
let args = env::args_os().skip(1).collect::<Vec<_>>(); let args = env::args_os().skip(1).collect::<Vec<_>>();
@ -41,6 +46,11 @@ fn main() {
.and_then(|w| w[1].to_str()); .and_then(|w| w[1].to_str());
let version = args.iter().find(|w| &**w == "-vV"); let version = args.iter().find(|w| &**w == "-vV");
let verbose = match env::var("RUSTC_VERBOSE") {
Ok(s) => usize::from_str(&s).expect("RUSTC_VERBOSE should be an integer"),
Err(_) => 0,
};
// Build scripts always use the snapshot compiler which is guaranteed to be // Build scripts always use the snapshot compiler which is guaranteed to be
// able to produce an executable, whereas intermediate compilers may not // able to produce an executable, whereas intermediate compilers may not
// have the standard library built yet and may not be able to produce an // have the standard library built yet and may not be able to produce an
@ -95,6 +105,15 @@ fn main() {
cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>()); cmd.args(&s.split(" ").filter(|s| !s.is_empty()).collect::<Vec<_>>());
} }
// Pass down incremental directory, if any.
if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
cmd.arg(format!("-Zincremental={}", dir));
if verbose > 0 {
cmd.arg("-Zincremental-info");
}
}
// If we're compiling specifically the `panic_abort` crate then we pass // If we're compiling specifically the `panic_abort` crate then we pass
// the `-C panic=abort` option. Note that we do not do this for any // the `-C panic=abort` option. Note that we do not do this for any
// other crate intentionally as this is the only crate for now that we // other crate intentionally as this is the only crate for now that we
@ -158,6 +177,15 @@ fn main() {
// to change a flag in a binary? // to change a flag in a binary?
if env::var("RUSTC_RPATH") == Ok("true".to_string()) { if env::var("RUSTC_RPATH") == Ok("true".to_string()) {
let rpath = if target.contains("apple") { let rpath = if target.contains("apple") {
// Note that we need to take one extra step on OSX to also pass
// `-Wl,-instal_name,@rpath/...` to get things to work right. To
// do that we pass a weird flag to the compiler to get it to do
// so. Note that this is definitely a hack, and we should likely
// flesh out rpath support more fully in the future.
if stage != "0" {
cmd.arg("-Z").arg("osx-rpath-install-name");
}
Some("-Wl,-rpath,@loader_path/../lib") Some("-Wl,-rpath,@loader_path/../lib")
} else if !target.contains("windows") { } else if !target.contains("windows") {
Some("-Wl,-rpath,$ORIGIN/../lib") Some("-Wl,-rpath,$ORIGIN/../lib")
@ -167,12 +195,33 @@ fn main() {
if let Some(rpath) = rpath { if let Some(rpath) = rpath {
cmd.arg("-C").arg(format!("link-args={}", rpath)); cmd.arg("-C").arg(format!("link-args={}", rpath));
} }
if let Ok(s) = env::var("RUSTFLAGS") {
for flag in s.split_whitespace() {
cmd.arg(flag);
}
}
} }
} }
if verbose > 1 {
writeln!(&mut io::stderr(), "rustc command: {:?}", cmd).unwrap();
}
// Actually run the compiler! // Actually run the compiler!
std::process::exit(match cmd.status() { std::process::exit(match exec_cmd(&mut cmd) {
Ok(s) => s.code().unwrap_or(1), Ok(s) => s.code().unwrap_or(0xfe),
Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e), Err(e) => panic!("\n\nfailed to run {:?}: {}\n\n", cmd, e),
}) })
} }
#[cfg(unix)]
fn exec_cmd(cmd: &mut Command) -> ::std::io::Result<ExitStatus> {
use std::os::unix::process::CommandExt;
Err(cmd.exec())
}
#[cfg(not(unix))]
fn exec_cmd(cmd: &mut Command) -> ::std::io::Result<ExitStatus> {
cmd.status()
}

View file

@ -12,6 +12,8 @@
//! //!
//! See comments in `src/bootstrap/rustc.rs` for more information. //! See comments in `src/bootstrap/rustc.rs` for more information.
#![deny(warnings)]
extern crate bootstrap; extern crate bootstrap;
use std::env; use std::env;

View file

@ -81,7 +81,7 @@ def verify(path, sha_path, verbose):
with open(path, "rb") as f: with open(path, "rb") as f:
found = hashlib.sha256(f.read()).hexdigest() found = hashlib.sha256(f.read()).hexdigest()
with open(sha_path, "r") as f: with open(sha_path, "r") as f:
expected, _ = f.readline().split() expected = f.readline().split()[0]
verified = found == expected verified = found == expected
if not verified: if not verified:
print("invalid checksum:\n" print("invalid checksum:\n"
@ -146,7 +146,7 @@ class RustBuild(object):
def download_stage0(self): def download_stage0(self):
cache_dst = os.path.join(self.build_dir, "cache") cache_dst = os.path.join(self.build_dir, "cache")
rustc_cache = os.path.join(cache_dst, self.stage0_rustc_date()) rustc_cache = os.path.join(cache_dst, self.stage0_rustc_date())
cargo_cache = os.path.join(cache_dst, self.stage0_cargo_date()) cargo_cache = os.path.join(cache_dst, self.stage0_cargo_rev())
if not os.path.exists(rustc_cache): if not os.path.exists(rustc_cache):
os.makedirs(rustc_cache) os.makedirs(rustc_cache)
if not os.path.exists(cargo_cache): if not os.path.exists(cargo_cache):
@ -179,21 +179,17 @@ class RustBuild(object):
if self.cargo().startswith(self.bin_root()) and \ if self.cargo().startswith(self.bin_root()) and \
(not os.path.exists(self.cargo()) or self.cargo_out_of_date()): (not os.path.exists(self.cargo()) or self.cargo_out_of_date()):
self.print_what_it_means_to_bootstrap() self.print_what_it_means_to_bootstrap()
channel = self.stage0_cargo_channel() filename = "cargo-nightly-{}.tar.gz".format(self.build)
filename = "cargo-{}-{}.tar.gz".format(channel, self.build) url = "https://s3.amazonaws.com/rust-lang-ci/cargo-builds/" + self.stage0_cargo_rev()
url = "https://static.rust-lang.org/cargo-dist/" + self.stage0_cargo_date()
tarball = os.path.join(cargo_cache, filename) tarball = os.path.join(cargo_cache, filename)
if not os.path.exists(tarball): if not os.path.exists(tarball):
get("{}/{}".format(url, filename), tarball, verbose=self.verbose) get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
unpack(tarball, self.bin_root(), match="cargo", verbose=self.verbose) unpack(tarball, self.bin_root(), match="cargo", verbose=self.verbose)
with open(self.cargo_stamp(), 'w') as f: with open(self.cargo_stamp(), 'w') as f:
f.write(self.stage0_cargo_date()) f.write(self.stage0_cargo_rev())
def stage0_cargo_date(self): def stage0_cargo_rev(self):
return self._cargo_date return self._cargo_rev
def stage0_cargo_channel(self):
return self._cargo_channel
def stage0_rustc_date(self): def stage0_rustc_date(self):
return self._rustc_date return self._rustc_date
@ -217,7 +213,7 @@ class RustBuild(object):
if not os.path.exists(self.cargo_stamp()) or self.clean: if not os.path.exists(self.cargo_stamp()) or self.clean:
return True return True
with open(self.cargo_stamp(), 'r') as f: with open(self.cargo_stamp(), 'r') as f:
return self.stage0_cargo_date() != f.read() return self.stage0_cargo_rev() != f.read()
def bin_root(self): def bin_root(self):
return os.path.join(self.build_dir, self.build, "stage0") return os.path.join(self.build_dir, self.build, "stage0")
@ -294,6 +290,8 @@ class RustBuild(object):
env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") env["DYLD_LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib")
env["PATH"] = os.path.join(self.bin_root(), "bin") + \ env["PATH"] = os.path.join(self.bin_root(), "bin") + \
os.pathsep + env["PATH"] os.pathsep + env["PATH"]
if not os.path.isfile(self.cargo()):
raise Exception("no cargo executable found at `%s`" % self.cargo())
args = [self.cargo(), "build", "--manifest-path", args = [self.cargo(), "build", "--manifest-path",
os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")] os.path.join(self.rust_root, "src/bootstrap/Cargo.toml")]
if self.use_vendored_sources: if self.use_vendored_sources:
@ -467,7 +465,7 @@ def main():
data = stage0_data(rb.rust_root) data = stage0_data(rb.rust_root)
rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1) rb._rustc_channel, rb._rustc_date = data['rustc'].split('-', 1)
rb._cargo_channel, rb._cargo_date = data['cargo'].split('-', 1) rb._cargo_rev = data['cargo']
start_time = time() start_time = time()

View file

@ -15,7 +15,7 @@
//! `package_vers`, and otherwise indicating to the compiler what it should //! `package_vers`, and otherwise indicating to the compiler what it should
//! print out as part of its version information. //! print out as part of its version information.
use std::fs::{self, File}; use std::fs::File;
use std::io::prelude::*; use std::io::prelude::*;
use std::process::Command; use std::process::Command;
@ -69,7 +69,7 @@ pub fn collect(build: &mut Build) {
// If we have a git directory, add in some various SHA information of what // If we have a git directory, add in some various SHA information of what
// commit this compiler was compiled from. // commit this compiler was compiled from.
if fs::metadata(build.src.join(".git")).is_ok() { if build.src.join(".git").is_dir() {
let ver_date = output(Command::new("git").current_dir(&build.src) let ver_date = output(Command::new("git").current_dir(&build.src)
.arg("log").arg("-1") .arg("log").arg("-1")
.arg("--date=short") .arg("--date=short")

View file

@ -13,6 +13,8 @@
//! This file implements the various regression test suites that we execute on //! This file implements the various regression test suites that we execute on
//! our CI. //! our CI.
extern crate build_helper;
use std::collections::HashSet; use std::collections::HashSet;
use std::env; use std::env;
use std::fmt; use std::fmt;
@ -190,7 +192,7 @@ pub fn compiletest(build: &Build,
cmd.args(&build.flags.cmd.test_args()); cmd.args(&build.flags.cmd.test_args());
if build.config.verbose || build.flags.verbose { if build.config.verbose() || build.flags.verbose() {
cmd.arg("--verbose"); cmd.arg("--verbose");
} }
@ -299,6 +301,7 @@ fn markdown_test(build: &Build, compiler: &Compiler, markdown: &Path) {
build.add_rust_test_threads(&mut cmd); build.add_rust_test_threads(&mut cmd);
cmd.arg("--test"); cmd.arg("--test");
cmd.arg(markdown); cmd.arg(markdown);
cmd.env("RUSTC_BOOTSTRAP", "1");
let mut test_args = build.flags.cmd.test_args().join(" "); let mut test_args = build.flags.cmd.test_args().join(" ");
if build.config.quiet_tests { if build.config.quiet_tests {
@ -542,7 +545,7 @@ pub fn distcheck(build: &Build) {
build.run(&mut cmd); build.run(&mut cmd);
build.run(Command::new("./configure") build.run(Command::new("./configure")
.current_dir(&dir)); .current_dir(&dir));
build.run(Command::new("make") build.run(Command::new(build_helper::make(&build.config.build))
.arg("check") .arg("check")
.current_dir(&dir)); .current_dir(&dir));
} }

View file

@ -38,9 +38,9 @@ use util::push_exe_path;
/// `src/bootstrap/config.toml.example`. /// `src/bootstrap/config.toml.example`.
#[derive(Default)] #[derive(Default)]
pub struct Config { pub struct Config {
pub ccache: bool, pub ccache: Option<String>,
pub ninja: bool, pub ninja: bool,
pub verbose: bool, pub verbose: usize,
pub submodules: bool, pub submodules: bool,
pub compiler_docs: bool, pub compiler_docs: bool,
pub docs: bool, pub docs: bool,
@ -113,6 +113,7 @@ pub struct Target {
#[derive(RustcDecodable, Default)] #[derive(RustcDecodable, Default)]
struct TomlConfig { struct TomlConfig {
build: Option<Build>, build: Option<Build>,
install: Option<Install>,
llvm: Option<Llvm>, llvm: Option<Llvm>,
rust: Option<Rust>, rust: Option<Rust>,
target: Option<HashMap<String, TomlTarget>>, target: Option<HashMap<String, TomlTarget>>,
@ -135,10 +136,16 @@ struct Build {
python: Option<String>, python: Option<String>,
} }
/// TOML representation of various global install decisions.
#[derive(RustcDecodable, Default, Clone)]
struct Install {
prefix: Option<String>,
}
/// TOML representation of how the LLVM build is configured. /// TOML representation of how the LLVM build is configured.
#[derive(RustcDecodable, Default)] #[derive(RustcDecodable, Default)]
struct Llvm { struct Llvm {
ccache: Option<bool>, ccache: Option<StringOrBool>,
ninja: Option<bool>, ninja: Option<bool>,
assertions: Option<bool>, assertions: Option<bool>,
optimize: Option<bool>, optimize: Option<bool>,
@ -147,6 +154,18 @@ struct Llvm {
static_libstdcpp: Option<bool>, static_libstdcpp: Option<bool>,
} }
#[derive(RustcDecodable)]
enum StringOrBool {
String(String),
Bool(bool),
}
impl Default for StringOrBool {
fn default() -> StringOrBool {
StringOrBool::Bool(false)
}
}
/// TOML representation of how the Rust build is configured. /// TOML representation of how the Rust build is configured.
#[derive(RustcDecodable, Default)] #[derive(RustcDecodable, Default)]
struct Rust { struct Rust {
@ -246,8 +265,20 @@ impl Config {
set(&mut config.submodules, build.submodules); set(&mut config.submodules, build.submodules);
set(&mut config.vendor, build.vendor); set(&mut config.vendor, build.vendor);
if let Some(ref install) = toml.install {
config.prefix = install.prefix.clone();
}
if let Some(ref llvm) = toml.llvm { if let Some(ref llvm) = toml.llvm {
set(&mut config.ccache, llvm.ccache); match llvm.ccache {
Some(StringOrBool::String(ref s)) => {
config.ccache = Some(s.to_string())
}
Some(StringOrBool::Bool(true)) => {
config.ccache = Some("ccache".to_string());
}
Some(StringOrBool::Bool(false)) | None => {}
}
set(&mut config.ninja, llvm.ninja); set(&mut config.ninja, llvm.ninja);
set(&mut config.llvm_assertions, llvm.assertions); set(&mut config.llvm_assertions, llvm.assertions);
set(&mut config.llvm_optimize, llvm.optimize); set(&mut config.llvm_optimize, llvm.optimize);
@ -255,6 +286,7 @@ impl Config {
set(&mut config.llvm_version_check, llvm.version_check); set(&mut config.llvm_version_check, llvm.version_check);
set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp); set(&mut config.llvm_static_stdcpp, llvm.static_libstdcpp);
} }
if let Some(ref rust) = toml.rust { if let Some(ref rust) = toml.rust {
set(&mut config.rust_debug_assertions, rust.debug_assertions); set(&mut config.rust_debug_assertions, rust.debug_assertions);
set(&mut config.rust_debuginfo, rust.debuginfo); set(&mut config.rust_debuginfo, rust.debuginfo);
@ -338,7 +370,6 @@ impl Config {
} }
check! { check! {
("CCACHE", self.ccache),
("MANAGE_SUBMODULES", self.submodules), ("MANAGE_SUBMODULES", self.submodules),
("COMPILER_DOCS", self.compiler_docs), ("COMPILER_DOCS", self.compiler_docs),
("DOCS", self.docs), ("DOCS", self.docs),
@ -475,10 +506,24 @@ impl Config {
let path = parse_configure_path(value); let path = parse_configure_path(value);
self.python = Some(path); self.python = Some(path);
} }
"CFG_ENABLE_CCACHE" if value == "1" => {
self.ccache = Some("ccache".to_string());
}
"CFG_ENABLE_SCCACHE" if value == "1" => {
self.ccache = Some("sccache".to_string());
}
_ => {} _ => {}
} }
} }
} }
pub fn verbose(&self) -> bool {
self.verbose > 0
}
pub fn very_verbose(&self) -> bool {
self.verbose > 1
}
} }
#[cfg(not(windows))] #[cfg(not(windows))]

View file

@ -25,6 +25,8 @@
# Indicates whether ccache is used when building LLVM # Indicates whether ccache is used when building LLVM
#ccache = false #ccache = false
# or alternatively ...
#ccache = "/path/to/ccache"
# If an external LLVM root is specified, we automatically check the version by # If an external LLVM root is specified, we automatically check the version by
# default to make sure it's within the range that we're expecting, but setting # default to make sure it's within the range that we're expecting, but setting
@ -98,6 +100,14 @@
# Indicate whether the vendored sources are used for Rust dependencies or not # Indicate whether the vendored sources are used for Rust dependencies or not
#vendor = false #vendor = false
# =============================================================================
# General install configuration options
# =============================================================================
[install]
# Instead of installing to /usr/local, install to this path instead.
#prefix = "/path/to/install"
# ============================================================================= # =============================================================================
# Options for compiling Rust code itself # Options for compiling Rust code itself
# ============================================================================= # =============================================================================

View file

@ -48,6 +48,11 @@ pub fn tmpdir(build: &Build) -> PathBuf {
/// Slurps up documentation from the `stage`'s `host`. /// Slurps up documentation from the `stage`'s `host`.
pub fn docs(build: &Build, stage: u32, host: &str) { pub fn docs(build: &Build, stage: u32, host: &str) {
println!("Dist docs stage{} ({})", stage, host); println!("Dist docs stage{} ({})", stage, host);
if !build.config.docs {
println!("\tskipping - docs disabled");
return
}
let name = format!("rust-docs-{}", package_vers(build)); let name = format!("rust-docs-{}", package_vers(build));
let image = tmpdir(build).join(format!("{}-{}-image", name, name)); let image = tmpdir(build).join(format!("{}-{}-image", name, name));
let _ = fs::remove_dir_all(&image); let _ = fs::remove_dir_all(&image);
@ -92,6 +97,7 @@ pub fn mingw(build: &Build, host: &str) {
let name = format!("rust-mingw-{}", package_vers(build)); let name = format!("rust-mingw-{}", package_vers(build));
let image = tmpdir(build).join(format!("{}-{}-image", name, host)); let image = tmpdir(build).join(format!("{}-{}-image", name, host));
let _ = fs::remove_dir_all(&image); let _ = fs::remove_dir_all(&image);
t!(fs::create_dir_all(&image));
// The first argument to the script is a "temporary directory" which is just // The first argument to the script is a "temporary directory" which is just
// thrown away (this contains the runtime DLLs included in the rustc package // thrown away (this contains the runtime DLLs included in the rustc package
@ -260,6 +266,14 @@ pub fn debugger_scripts(build: &Build,
pub fn std(build: &Build, compiler: &Compiler, target: &str) { pub fn std(build: &Build, compiler: &Compiler, target: &str) {
println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host, println!("Dist std stage{} ({} -> {})", compiler.stage, compiler.host,
target); target);
// The only true set of target libraries came from the build triple, so
// let's reduce redundant work by only producing archives from that host.
if compiler.host != build.config.build {
println!("\tskipping, not a build host");
return
}
let name = format!("rust-std-{}", package_vers(build)); let name = format!("rust-std-{}", package_vers(build));
let image = tmpdir(build).join(format!("{}-{}-image", name, target)); let image = tmpdir(build).join(format!("{}-{}-image", name, target));
let _ = fs::remove_dir_all(&image); let _ = fs::remove_dir_all(&image);
@ -294,10 +308,15 @@ pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
println!("Dist analysis"); println!("Dist analysis");
if build.config.channel != "nightly" { if build.config.channel != "nightly" {
println!("Skipping dist-analysis - not on nightly channel"); println!("\tskipping - not on nightly channel");
return; return;
} }
if compiler.host != build.config.build {
println!("\tskipping - not a build host");
return
}
if compiler.stage != 2 { if compiler.stage != 2 {
println!("\tskipping - not stage2");
return return
} }
@ -324,18 +343,17 @@ pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
.arg("--legacy-manifest-dirs=rustlib,cargo"); .arg("--legacy-manifest-dirs=rustlib,cargo");
build.run(&mut cmd); build.run(&mut cmd);
t!(fs::remove_dir_all(&image)); t!(fs::remove_dir_all(&image));
// Create plain source tarball
let mut cmd = Command::new("tar");
cmd.arg("-czf").arg(sanitize_sh(&distdir(build).join(&format!("{}.tar.gz", name))))
.arg("analysis")
.current_dir(&src);
build.run(&mut cmd);
} }
/// Creates the `rust-src` installer component and the plain source tarball /// Creates the `rust-src` installer component and the plain source tarball
pub fn rust_src(build: &Build) { pub fn rust_src(build: &Build, host: &str) {
println!("Dist src"); println!("Dist src");
if host != build.config.build {
println!("\tskipping, not a build host");
return
}
let plain_name = format!("rustc-{}-src", package_vers(build)); let plain_name = format!("rustc-{}-src", package_vers(build));
let name = format!("rust-src-{}", package_vers(build)); let name = format!("rust-src-{}", package_vers(build));
let image = tmpdir(build).join(format!("{}-image", name)); let image = tmpdir(build).join(format!("{}-image", name));

View file

@ -27,8 +27,9 @@ use step;
/// Deserialized version of all flags for this compile. /// Deserialized version of all flags for this compile.
pub struct Flags { pub struct Flags {
pub verbose: bool, pub verbose: usize, // verbosity level: 0 == not verbose, 1 == verbose, 2 == very verbose
pub stage: Option<u32>, pub stage: Option<u32>,
pub keep_stage: Option<u32>,
pub build: String, pub build: String,
pub host: Vec<String>, pub host: Vec<String>,
pub target: Vec<String>, pub target: Vec<String>,
@ -36,6 +37,17 @@ pub struct Flags {
pub src: Option<PathBuf>, pub src: Option<PathBuf>,
pub jobs: Option<u32>, pub jobs: Option<u32>,
pub cmd: Subcommand, pub cmd: Subcommand,
pub incremental: bool,
}
impl Flags {
pub fn verbose(&self) -> bool {
self.verbose > 0
}
pub fn very_verbose(&self) -> bool {
self.verbose > 1
}
} }
pub enum Subcommand { pub enum Subcommand {
@ -62,12 +74,14 @@ pub enum Subcommand {
impl Flags { impl Flags {
pub fn parse(args: &[String]) -> Flags { pub fn parse(args: &[String]) -> Flags {
let mut opts = Options::new(); let mut opts = Options::new();
opts.optflag("v", "verbose", "use verbose output"); opts.optflagmulti("v", "verbose", "use verbose output (-vv for very verbose)");
opts.optflag("i", "incremental", "use incremental compilation");
opts.optopt("", "config", "TOML configuration file for build", "FILE"); opts.optopt("", "config", "TOML configuration file for build", "FILE");
opts.optopt("", "build", "build target of the stage0 compiler", "BUILD"); opts.optopt("", "build", "build target of the stage0 compiler", "BUILD");
opts.optmulti("", "host", "host targets to build", "HOST"); opts.optmulti("", "host", "host targets to build", "HOST");
opts.optmulti("", "target", "target targets to build", "TARGET"); opts.optmulti("", "target", "target targets to build", "TARGET");
opts.optopt("", "stage", "stage to build", "N"); opts.optopt("", "stage", "stage to build", "N");
opts.optopt("", "keep-stage", "stage to keep without recompiling", "N");
opts.optopt("", "src", "path to the root of the rust checkout", "DIR"); opts.optopt("", "src", "path to the root of the rust checkout", "DIR");
opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS"); opts.optopt("j", "jobs", "number of jobs to run in parallel", "JOBS");
opts.optflag("h", "help", "print this help message"); opts.optflag("h", "help", "print this help message");
@ -108,7 +122,6 @@ Arguments:
tests that should be compiled and run. For example: tests that should be compiled and run. For example:
./x.py test src/test/run-pass ./x.py test src/test/run-pass
./x.py test src/test/run-pass/assert-*
./x.py test src/libstd --test-args hash_map ./x.py test src/libstd --test-args hash_map
./x.py test src/libstd --stage 0 ./x.py test src/libstd --stage 0
@ -255,9 +268,20 @@ To learn more about a subcommand, run `./x.py <command> -h`
} }
}); });
let mut stage = m.opt_str("stage").map(|j| j.parse().unwrap());
let incremental = m.opt_present("i");
if incremental {
if stage.is_none() {
stage = Some(1);
}
}
Flags { Flags {
verbose: m.opt_present("v"), verbose: m.opt_count("v"),
stage: m.opt_str("stage").map(|j| j.parse().unwrap()), stage: stage,
keep_stage: m.opt_str("keep-stage").map(|j| j.parse().unwrap()),
build: m.opt_str("build").unwrap_or_else(|| { build: m.opt_str("build").unwrap_or_else(|| {
env::var("BUILD").unwrap() env::var("BUILD").unwrap()
}), }),
@ -267,6 +291,7 @@ To learn more about a subcommand, run `./x.py <command> -h`
src: m.opt_str("src").map(PathBuf::from), src: m.opt_str("src").map(PathBuf::from),
jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()), jobs: m.opt_str("jobs").map(|j| j.parse().unwrap()),
cmd: cmd, cmd: cmd,
incremental: incremental,
} }
} }
} }

View file

@ -64,6 +64,8 @@
//! More documentation can be found in each respective module below, and you can //! More documentation can be found in each respective module below, and you can
//! also check out the `src/bootstrap/README.md` file for more information. //! also check out the `src/bootstrap/README.md` file for more information.
#![deny(warnings)]
extern crate build_helper; extern crate build_helper;
extern crate cmake; extern crate cmake;
extern crate filetime; extern crate filetime;
@ -74,6 +76,7 @@ extern crate rustc_serialize;
extern crate toml; extern crate toml;
use std::collections::HashMap; use std::collections::HashMap;
use std::cmp;
use std::env; use std::env;
use std::ffi::OsString; use std::ffi::OsString;
use std::fs::{self, File}; use std::fs::{self, File};
@ -497,6 +500,17 @@ impl Build {
cargo.env("RUSTC_BOOTSTRAP", "1"); cargo.env("RUSTC_BOOTSTRAP", "1");
self.add_rust_test_threads(&mut cargo); self.add_rust_test_threads(&mut cargo);
// Ignore incremental modes except for stage0, since we're
// not guaranteeing correctness acros builds if the compiler
// is changing under your feet.`
if self.flags.incremental && compiler.stage == 0 {
let incr_dir = self.incremental_dir(compiler);
cargo.env("RUSTC_INCREMENTAL", incr_dir);
}
let verbose = cmp::max(self.config.verbose, self.flags.verbose);
cargo.env("RUSTC_VERBOSE", format!("{}", verbose));
// Specify some various options for build scripts used throughout // Specify some various options for build scripts used throughout
// the build. // the build.
// //
@ -516,7 +530,7 @@ impl Build {
// FIXME: should update code to not require this env var // FIXME: should update code to not require this env var
cargo.env("CFG_COMPILER_HOST_TRIPLE", target); cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
if self.config.verbose || self.flags.verbose { if self.config.verbose() || self.flags.verbose() {
cargo.arg("-v"); cargo.arg("-v");
} }
// FIXME: cargo bench does not accept `--release` // FIXME: cargo bench does not accept `--release`
@ -630,6 +644,12 @@ impl Build {
} }
} }
/// Get the directory for incremental by-products when using the
/// given compiler.
fn incremental_dir(&self, compiler: &Compiler) -> PathBuf {
self.out.join(compiler.host).join(format!("stage{}-incremental", compiler.stage))
}
/// Returns the libdir where the standard library and other artifacts are /// Returns the libdir where the standard library and other artifacts are
/// found for a compiler's sysroot. /// found for a compiler's sysroot.
fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf { fn sysroot_libdir(&self, compiler: &Compiler, target: &str) -> PathBuf {
@ -703,7 +723,8 @@ impl Build {
fn llvm_filecheck(&self, target: &str) -> PathBuf { fn llvm_filecheck(&self, target: &str) -> PathBuf {
let target_config = self.config.target_config.get(target); let target_config = self.config.target_config.get(target);
if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) { if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
s.parent().unwrap().join(exe("FileCheck", target)) let llvm_bindir = output(Command::new(s).arg("--bindir"));
Path::new(llvm_bindir.trim()).join(exe("FileCheck", target))
} else { } else {
let base = self.llvm_out(&self.config.build).join("build"); let base = self.llvm_out(&self.config.build).join("build");
let exe = exe("FileCheck", target); let exe = exe("FileCheck", target);
@ -768,7 +789,7 @@ impl Build {
/// Prints a message if this build is configured in verbose mode. /// Prints a message if this build is configured in verbose mode.
fn verbose(&self, msg: &str) { fn verbose(&self, msg: &str) {
if self.flags.verbose || self.config.verbose { if self.flags.verbose() || self.config.verbose() {
println!("{}", msg); println!("{}", msg);
} }
} }

View file

@ -56,7 +56,8 @@ check-cargotest:
dist: dist:
$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS) $(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
distcheck: distcheck:
$(Q)$(BOOTSTRAP) test distcheck $(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
$(Q)$(BOOTSTRAP) test distcheck $(BOOTSTRAP_ARGS)
install: install:
$(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS) $(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
tidy: tidy:
@ -65,7 +66,7 @@ tidy:
check-stage2-T-arm-linux-androideabi-H-x86_64-unknown-linux-gnu: check-stage2-T-arm-linux-androideabi-H-x86_64-unknown-linux-gnu:
$(Q)$(BOOTSTRAP) test --target arm-linux-androideabi $(Q)$(BOOTSTRAP) test --target arm-linux-androideabi
check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu: check-stage2-T-x86_64-unknown-linux-musl-H-x86_64-unknown-linux-gnu:
$(Q)$(BOOTSTRAP) test --target x86_64-unknown-linux-gnu $(Q)$(BOOTSTRAP) test --target x86_64-unknown-linux-musl
.PHONY: dist .PHONY: dist

View file

@ -81,7 +81,7 @@ pub fn llvm(build: &Build, target: &str) {
.profile(profile) .profile(profile)
.define("LLVM_ENABLE_ASSERTIONS", assertions) .define("LLVM_ENABLE_ASSERTIONS", assertions)
.define("LLVM_TARGETS_TO_BUILD", .define("LLVM_TARGETS_TO_BUILD",
"X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430") "X86;ARM;AArch64;Mips;PowerPC;SystemZ;JSBackend;MSP430;Sparc")
.define("LLVM_INCLUDE_EXAMPLES", "OFF") .define("LLVM_INCLUDE_EXAMPLES", "OFF")
.define("LLVM_INCLUDE_TESTS", "OFF") .define("LLVM_INCLUDE_TESTS", "OFF")
.define("LLVM_INCLUDE_DOCS", "OFF") .define("LLVM_INCLUDE_DOCS", "OFF")
@ -109,10 +109,10 @@ pub fn llvm(build: &Build, target: &str) {
// MSVC handles compiler business itself // MSVC handles compiler business itself
if !target.contains("msvc") { if !target.contains("msvc") {
if build.config.ccache { if let Some(ref ccache) = build.config.ccache {
cfg.define("CMAKE_C_COMPILER", "ccache") cfg.define("CMAKE_C_COMPILER", ccache)
.define("CMAKE_C_COMPILER_ARG1", build.cc(target)) .define("CMAKE_C_COMPILER_ARG1", build.cc(target))
.define("CMAKE_CXX_COMPILER", "ccache") .define("CMAKE_CXX_COMPILER", ccache)
.define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target)); .define("CMAKE_CXX_COMPILER_ARG1", build.cxx(target));
} else { } else {
cfg.define("CMAKE_C_COMPILER", build.cc(target)) cfg.define("CMAKE_C_COMPILER", build.cc(target))

View file

@ -143,7 +143,7 @@ pub fn check(build: &mut Build) {
// Externally configured LLVM requires FileCheck to exist // Externally configured LLVM requires FileCheck to exist
let filecheck = build.llvm_filecheck(&build.config.build); let filecheck = build.llvm_filecheck(&build.config.build);
if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests { if !filecheck.starts_with(&build.out) && !filecheck.exists() && build.config.codegen_tests {
panic!("filecheck executable {:?} does not exist", filecheck); panic!("FileCheck executable {:?} does not exist", filecheck);
} }
for target in build.config.target.iter() { for target in build.config.target.iter() {
@ -223,4 +223,8 @@ $ pacman -R cmake && pacman -S mingw-w64-x86_64-cmake
if build.lldb_version.is_some() { if build.lldb_version.is_some() {
build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok(); build.lldb_python_dir = run(Command::new("lldb").arg("-P")).ok();
} }
if let Some(ref s) = build.config.ccache {
need_cmd(s.as_ref());
}
} }

View file

@ -86,7 +86,7 @@ pub fn build_rules(build: &Build) -> Rules {
// //
// To handle this we do a bit of dynamic dispatch to see what the dependency // To handle this we do a bit of dynamic dispatch to see what the dependency
// is. If we're building a LLVM for the build triple, then we don't actually // is. If we're building a LLVM for the build triple, then we don't actually
// have any dependencies! To do that we return a dependency on the "dummy" // have any dependencies! To do that we return a dependency on the `Step::noop()`
// target which does nothing. // target which does nothing.
// //
// If we're build a cross-compiled LLVM, however, we need to assemble the // If we're build a cross-compiled LLVM, however, we need to assemble the
@ -104,7 +104,7 @@ pub fn build_rules(build: &Build) -> Rules {
.host(true) .host(true)
.dep(move |s| { .dep(move |s| {
if s.target == build.config.build { if s.target == build.config.build {
dummy(s, build) Step::noop()
} else { } else {
s.target(&build.config.build) s.target(&build.config.build)
} }
@ -115,14 +115,11 @@ pub fn build_rules(build: &Build) -> Rules {
// going on here. You can check out the API docs below and also see a bunch // going on here. You can check out the API docs below and also see a bunch
// more examples of rules directly below as well. // more examples of rules directly below as well.
// dummy rule to do nothing, useful when a dep maps to no deps
rules.build("dummy", "path/to/nowhere");
// the compiler with no target libraries ready to go // the compiler with no target libraries ready to go
rules.build("rustc", "src/rustc") rules.build("rustc", "src/rustc")
.dep(move |s| { .dep(move |s| {
if s.stage == 0 { if s.stage == 0 {
dummy(s, build) Step::noop()
} else { } else {
s.name("librustc") s.name("librustc")
.host(&build.config.build) .host(&build.config.build)
@ -165,7 +162,7 @@ pub fn build_rules(build: &Build) -> Rules {
.dep(move |s| s.name("rustc").host(&build.config.build).target(s.host)) .dep(move |s| s.name("rustc").host(&build.config.build).target(s.host))
.dep(move |s| { .dep(move |s| {
if s.host == build.config.build { if s.host == build.config.build {
dummy(s, build) Step::noop()
} else { } else {
s.host(&build.config.build) s.host(&build.config.build)
} }
@ -183,7 +180,7 @@ pub fn build_rules(build: &Build) -> Rules {
.dep(|s| s.name("libstd")) .dep(|s| s.name("libstd"))
.dep(move |s| { .dep(move |s| {
if s.host == build.config.build { if s.host == build.config.build {
dummy(s, build) Step::noop()
} else { } else {
s.host(&build.config.build) s.host(&build.config.build)
} }
@ -203,7 +200,7 @@ pub fn build_rules(build: &Build) -> Rules {
.dep(move |s| s.name("llvm").host(&build.config.build).stage(0)) .dep(move |s| s.name("llvm").host(&build.config.build).stage(0))
.dep(move |s| { .dep(move |s| {
if s.host == build.config.build { if s.host == build.config.build {
dummy(s, build) Step::noop()
} else { } else {
s.host(&build.config.build) s.host(&build.config.build)
} }
@ -233,7 +230,7 @@ pub fn build_rules(build: &Build) -> Rules {
if s.target.contains("android") { if s.target.contains("android") {
s.name("android-copy-libs") s.name("android-copy-libs")
} else { } else {
dummy(s, build) Step::noop()
} }
}) })
.default(true) .default(true)
@ -270,16 +267,18 @@ pub fn build_rules(build: &Build) -> Rules {
// nothing to do for debuginfo tests // nothing to do for debuginfo tests
} else if build.config.build.contains("apple") { } else if build.config.build.contains("apple") {
rules.test("check-debuginfo", "src/test/debuginfo") rules.test("check-debuginfo", "src/test/debuginfo")
.default(true)
.dep(|s| s.name("libtest")) .dep(|s| s.name("libtest"))
.dep(|s| s.name("tool-compiletest").host(s.host)) .dep(|s| s.name("tool-compiletest").target(s.host))
.dep(|s| s.name("test-helpers")) .dep(|s| s.name("test-helpers"))
.dep(|s| s.name("debugger-scripts")) .dep(|s| s.name("debugger-scripts"))
.run(move |s| check::compiletest(build, &s.compiler(), s.target, .run(move |s| check::compiletest(build, &s.compiler(), s.target,
"debuginfo-lldb", "debuginfo")); "debuginfo-lldb", "debuginfo"));
} else { } else {
rules.test("check-debuginfo", "src/test/debuginfo") rules.test("check-debuginfo", "src/test/debuginfo")
.default(true)
.dep(|s| s.name("libtest")) .dep(|s| s.name("libtest"))
.dep(|s| s.name("tool-compiletest").host(s.host)) .dep(|s| s.name("tool-compiletest").target(s.host))
.dep(|s| s.name("test-helpers")) .dep(|s| s.name("test-helpers"))
.dep(|s| s.name("debugger-scripts")) .dep(|s| s.name("debugger-scripts"))
.run(move |s| check::compiletest(build, &s.compiler(), s.target, .run(move |s| check::compiletest(build, &s.compiler(), s.target,
@ -458,7 +457,7 @@ pub fn build_rules(build: &Build) -> Rules {
for (krate, path, default) in krates("test_shim") { for (krate, path, default) in krates("test_shim") {
rules.doc(&krate.doc_step, path) rules.doc(&krate.doc_step, path)
.dep(|s| s.name("libtest")) .dep(|s| s.name("libtest"))
.default(default && build.config.docs) .default(default && build.config.compiler_docs)
.run(move |s| doc::test(build, s.stage, s.target)); .run(move |s| doc::test(build, s.stage, s.target));
} }
for (krate, path, default) in krates("rustc-main") { for (krate, path, default) in krates("rustc-main") {
@ -490,16 +489,21 @@ pub fn build_rules(build: &Build) -> Rules {
.default(true) .default(true)
.run(move |s| dist::std(build, &s.compiler(), s.target)); .run(move |s| dist::std(build, &s.compiler(), s.target));
rules.dist("dist-mingw", "path/to/nowhere") rules.dist("dist-mingw", "path/to/nowhere")
.run(move |s| dist::mingw(build, s.target)); .default(true)
.run(move |s| {
if s.target.contains("pc-windows-gnu") {
dist::mingw(build, s.target)
}
});
rules.dist("dist-src", "src") rules.dist("dist-src", "src")
.default(true) .default(true)
.host(true) .host(true)
.run(move |_| dist::rust_src(build)); .run(move |s| dist::rust_src(build, s.target));
rules.dist("dist-docs", "src/doc") rules.dist("dist-docs", "src/doc")
.default(true) .default(true)
.dep(|s| s.name("default:doc")) .dep(|s| s.name("default:doc"))
.run(move |s| dist::docs(build, s.stage, s.target)); .run(move |s| dist::docs(build, s.stage, s.target));
rules.dist("dist-analysis", "src/libstd") rules.dist("dist-analysis", "analysis")
.dep(|s| s.name("dist-std")) .dep(|s| s.name("dist-std"))
.default(true) .default(true)
.run(move |s| dist::analysis(build, &s.compiler(), s.target)); .run(move |s| dist::analysis(build, &s.compiler(), s.target));
@ -509,12 +513,6 @@ pub fn build_rules(build: &Build) -> Rules {
rules.verify(); rules.verify();
return rules; return rules;
fn dummy<'a>(s: &Step<'a>, build: &'a Build) -> Step<'a> {
s.name("dummy").stage(0)
.target(&build.config.build)
.host(&build.config.build)
}
} }
#[derive(PartialEq, Eq, Hash, Clone, Debug)] #[derive(PartialEq, Eq, Hash, Clone, Debug)]
@ -538,6 +536,10 @@ struct Step<'a> {
} }
impl<'a> Step<'a> { impl<'a> Step<'a> {
fn noop() -> Step<'a> {
Step { name: "", stage: 0, host: "", target: "" }
}
/// Creates a new step which is the same as this, except has a new name. /// Creates a new step which is the same as this, except has a new name.
fn name(&self, name: &'a str) -> Step<'a> { fn name(&self, name: &'a str) -> Step<'a> {
Step { name: name, ..*self } Step { name: name, ..*self }
@ -733,6 +735,9 @@ impl<'a> Rules<'a> {
if self.rules.contains_key(&dep.name) || dep.name.starts_with("default:") { if self.rules.contains_key(&dep.name) || dep.name.starts_with("default:") {
continue continue
} }
if dep == Step::noop() {
continue
}
panic!("\ panic!("\
invalid rule dependency graph detected, was a rule added and maybe typo'd? invalid rule dependency graph detected, was a rule added and maybe typo'd?
@ -817,7 +822,16 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
let hosts = if self.build.flags.host.len() > 0 { let hosts = if self.build.flags.host.len() > 0 {
&self.build.flags.host &self.build.flags.host
} else { } else {
&self.build.config.host if kind == Kind::Dist {
// For 'dist' steps we only distribute artifacts built from
// the build platform, so only consider that in the hosts
// array.
// NOTE: This relies on the fact that the build triple is
// always placed first, as done in `config.rs`.
&self.build.config.host[..1]
} else {
&self.build.config.host
}
}; };
let targets = if self.build.flags.target.len() > 0 { let targets = if self.build.flags.target.len() > 0 {
&self.build.flags.target &self.build.flags.target
@ -859,6 +873,7 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
// of what we need to do. // of what we need to do.
let mut order = Vec::new(); let mut order = Vec::new();
let mut added = HashSet::new(); let mut added = HashSet::new();
added.insert(Step::noop());
for step in steps.iter().cloned() { for step in steps.iter().cloned() {
self.fill(step, &mut order, &mut added); self.fill(step, &mut order, &mut added);
} }
@ -871,6 +886,10 @@ invalid rule dependency graph detected, was a rule added and maybe typo'd?
// And finally, iterate over everything and execute it. // And finally, iterate over everything and execute it.
for step in order.iter() { for step in order.iter() {
if self.build.flags.keep_stage.map_or(false, |s| step.stage <= s) {
self.build.verbose(&format!("keeping step {:?}", step));
continue;
}
self.build.verbose(&format!("executing step {:?}", step)); self.build.verbose(&format!("executing step {:?}", step));
(self.rules[step.name].run)(step); (self.rules[step.name].run)(step);
} }

View file

@ -41,6 +41,12 @@ pub fn mtime(path: &Path) -> FileTime {
/// Copies a file from `src` to `dst`, attempting to use hard links and then /// Copies a file from `src` to `dst`, attempting to use hard links and then
/// falling back to an actually filesystem copy if necessary. /// falling back to an actually filesystem copy if necessary.
pub fn copy(src: &Path, dst: &Path) { pub fn copy(src: &Path, dst: &Path) {
// A call to `hard_link` will fail if `dst` exists, so remove it if it
// already exists so we can try to help `hard_link` succeed.
let _ = fs::remove_file(&dst);
// Attempt to "easy copy" by creating a hard link (symlinks don't work on
// windows), but if that fails just fall back to a slow `copy` operation.
let res = fs::hard_link(src, dst); let res = fs::hard_link(src, dst);
let res = res.or_else(|_| fs::copy(src, dst).map(|_| ())); let res = res.or_else(|_| fs::copy(src, dst).map(|_| ()));
if let Err(e) = res { if let Err(e) = res {

View file

@ -47,6 +47,8 @@ pub fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
None None
} else if target.contains("musl") { } else if target.contains("musl") {
Some(PathBuf::from("ar")) Some(PathBuf::from("ar"))
} else if target.contains("openbsd") {
Some(PathBuf::from("ar"))
} else { } else {
let parent = cc.parent().unwrap(); let parent = cc.parent().unwrap();
let file = cc.file_name().unwrap().to_str().unwrap(); let file = cc.file_name().unwrap().to_str().unwrap();
@ -61,6 +63,16 @@ pub fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
} }
} }
pub fn make(host: &str) -> PathBuf {
if host.contains("bitrig") || host.contains("dragonfly") ||
host.contains("freebsd") || host.contains("netbsd") ||
host.contains("openbsd") {
PathBuf::from("gmake")
} else {
PathBuf::from("make")
}
}
pub fn output(cmd: &mut Command) -> String { pub fn output(cmd: &mut Command) -> String {
let output = match cmd.stderr(Stdio::inherit()).output() { let output = match cmd.stderr(Stdio::inherit()).output() {
Ok(status) => status, Ok(status) => status,

View file

@ -16,7 +16,8 @@ RUN dpkg --add-architecture i386 && \
expect \ expect \
openjdk-9-jre \ openjdk-9-jre \
sudo \ sudo \
libstdc++6:i386 libstdc++6:i386 \
xz-utils
WORKDIR /android/ WORKDIR /android/
ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools ENV PATH=$PATH:/android/ndk-arm-9/bin:/android/sdk/tools:/android/sdk/platform-tools
@ -25,8 +26,17 @@ COPY install-ndk.sh install-sdk.sh accept-licenses.sh /android/
RUN sh /android/install-ndk.sh RUN sh /android/install-ndk.sh
RUN sh /android/install-sdk.sh RUN sh /android/install-sdk.sh
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
COPY start-emulator.sh /android/ COPY start-emulator.sh /android/
ENTRYPOINT ["/android/start-emulator.sh"]
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/android/start-emulator.sh"]
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
ENV TARGETS=arm-linux-androideabi ENV TARGETS=arm-linux-androideabi
ENV TARGETS=$TARGETS,i686-linux-android ENV TARGETS=$TARGETS,i686-linux-android

View file

@ -21,7 +21,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
gcc-powerpc-linux-gnu libc6-dev-powerpc-cross \ gcc-powerpc-linux-gnu libc6-dev-powerpc-cross \
gcc-powerpc64-linux-gnu libc6-dev-ppc64-cross \ gcc-powerpc64-linux-gnu libc6-dev-ppc64-cross \
gcc-powerpc64le-linux-gnu libc6-dev-ppc64el-cross \ gcc-powerpc64le-linux-gnu libc6-dev-ppc64el-cross \
gcc-s390x-linux-gnu libc6-dev-s390x-cross gcc-s390x-linux-gnu libc6-dev-s390x-cross \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV TARGETS=aarch64-unknown-linux-gnu ENV TARGETS=aarch64-unknown-linux-gnu
ENV TARGETS=$TARGETS,arm-unknown-linux-gnueabi ENV TARGETS=$TARGETS,arm-unknown-linux-gnueabi

View file

@ -11,7 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \ cmake \
ccache \ ccache \
sudo \ sudo \
gdb gdb \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu --disable-optimize-tests
ENV RUST_CHECK_TARGET check ENV RUST_CHECK_TARGET check

View file

@ -11,7 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \ cmake \
ccache \ ccache \
sudo \ sudo \
gdb gdb \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu ENV RUST_CONFIGURE_ARGS --build=i686-unknown-linux-gnu
ENV RUST_CHECK_TARGET check ENV RUST_CHECK_TARGET check

View file

@ -25,18 +25,26 @@ docker \
-t rust-ci \ -t rust-ci \
"`dirname "$script"`/$image" "`dirname "$script"`/$image"
mkdir -p $HOME/.ccache
mkdir -p $HOME/.cargo mkdir -p $HOME/.cargo
mkdir -p $root_dir/obj mkdir -p $root_dir/obj
args=
if [ "$SCCACHE_BUCKET" != "" ]; then
args="$args --env SCCACHE_BUCKET=$SCCACHE_BUCKET"
args="$args --env AWS_ACCESS_KEY_ID=$AWS_ACCESS_KEY_ID"
args="$args --env AWS_SECRET_ACCESS_KEY=$AWS_SECRET_ACCESS_KEY"
else
mkdir -p $HOME/.cache/sccache
args="$args --env SCCACHE_DIR=/sccache --volume $HOME/.cache/sccache:/sccache"
fi
exec docker \ exec docker \
run \ run \
--volume "$root_dir:/checkout:ro" \ --volume "$root_dir:/checkout:ro" \
--volume "$root_dir/obj:/checkout/obj" \ --volume "$root_dir/obj:/checkout/obj" \
--workdir /checkout/obj \ --workdir /checkout/obj \
--env SRC=/checkout \ --env SRC=/checkout \
--env CCACHE_DIR=/ccache \ $args \
--volume "$HOME/.ccache:/ccache" \
--env CARGO_HOME=/cargo \ --env CARGO_HOME=/cargo \
--env LOCAL_USER_ID=`id -u` \ --env LOCAL_USER_ID=`id -u` \
--volume "$HOME/.cargo:/cargo" \ --volume "$HOME/.cargo:/cargo" \

View file

@ -18,6 +18,15 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
COPY build-toolchain.sh /tmp/ COPY build-toolchain.sh /tmp/
RUN sh /tmp/build-toolchain.sh RUN sh /tmp/build-toolchain.sh
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
ENV \ ENV \
AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-ar \ AR_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-ar \
CC_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-gcc CC_x86_64_unknown_freebsd=x86_64-unknown-freebsd10-gcc

View file

@ -11,7 +11,18 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \ cmake \
ccache \ ccache \
libssl-dev \ libssl-dev \
sudo sudo \
xz-utils \
pkg-config
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu
ENV RUST_CHECK_TARGET check-cargotest ENV RUST_CHECK_TARGET check-cargotest

View file

@ -11,7 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \ cmake \
ccache \ ccache \
sudo \ sudo \
gdb gdb \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS \ ENV RUST_CONFIGURE_ARGS \
--build=x86_64-unknown-linux-gnu \ --build=x86_64-unknown-linux-gnu \

View file

@ -14,7 +14,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
gdb \ gdb \
llvm-3.7-tools \ llvm-3.7-tools \
libedit-dev \ libedit-dev \
zlib1g-dev zlib1g-dev \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS \ ENV RUST_CONFIGURE_ARGS \
--build=x86_64-unknown-linux-gnu \ --build=x86_64-unknown-linux-gnu \

View file

@ -11,7 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \ cmake \
ccache \ ccache \
sudo \ sudo \
gdb gdb \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-rustbuild ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-rustbuild
ENV RUST_CHECK_TARGET check ENV RUST_CHECK_TARGET check

View file

@ -11,7 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \ cmake \
ccache \ ccache \
sudo \ sudo \
gdb gdb \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-optimize-tests ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu --disable-optimize-tests
ENV RUST_CHECK_TARGET check ENV RUST_CHECK_TARGET check

View file

@ -11,7 +11,17 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
cmake \ cmake \
ccache \ ccache \
sudo \ sudo \
gdb gdb \
xz-utils
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu ENV RUST_CONFIGURE_ARGS --build=x86_64-unknown-linux-gnu
ENV RUST_CHECK_TARGET check ENV RUST_CHECK_TARGET check

View file

@ -18,6 +18,15 @@ WORKDIR /build/
COPY build-musl.sh /build/ COPY build-musl.sh /build/
RUN sh /build/build-musl.sh && rm -rf /build RUN sh /build/build-musl.sh && rm -rf /build
RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
dpkg -i dumb-init_*.deb && \
rm dumb-init_*.deb
ENTRYPOINT ["/usr/bin/dumb-init", "--"]
ENV SCCACHE_DIGEST=7237e38e029342fa27b7ac25412cb9d52554008b12389727320bd533fd7f05b6a96d55485f305caf95e5c8f5f97c3313e10012ccad3e752aba2518f3522ba783
RUN curl -L https://api.pub.build.mozilla.org/tooltool/sha512/$SCCACHE_DIGEST | \
tar xJf - -C /usr/local/bin --strip-components=1
ENV RUST_CONFIGURE_ARGS \ ENV RUST_CONFIGURE_ARGS \
--target=x86_64-unknown-linux-musl \ --target=x86_64-unknown-linux-musl \
--musl-root-x86_64=/musl-x86_64 --musl-root-x86_64=/musl-x86_64

View file

@ -26,17 +26,13 @@ if [ "$NO_VENDOR" = "" ]; then
ENABLE_VENDOR=--enable-vendor ENABLE_VENDOR=--enable-vendor
fi fi
if [ "$NO_CCACHE" = "" ]; then
ENABLE_CCACHE=--enable-ccache
fi
set -ex set -ex
$SRC/configure \ $SRC/configure \
--disable-manage-submodules \ --disable-manage-submodules \
--enable-debug-assertions \ --enable-debug-assertions \
--enable-quiet-tests \ --enable-quiet-tests \
$ENABLE_CCACHE \ --enable-sccache \
$ENABLE_VENDOR \ $ENABLE_VENDOR \
$ENABLE_LLVM_ASSERTIONS \ $ENABLE_LLVM_ASSERTIONS \
$RUST_CONFIGURE_ARGS $RUST_CONFIGURE_ARGS

View file

@ -16,18 +16,18 @@ function result.
The most common case of coercion is removing mutability from a reference: The most common case of coercion is removing mutability from a reference:
* `&mut T` to `&T` * `&mut T` to `&T`
An analogous conversion is to remove mutability from a An analogous conversion is to remove mutability from a
[raw pointer](raw-pointers.md): [raw pointer](raw-pointers.md):
* `*mut T` to `*const T` * `*mut T` to `*const T`
References can also be coerced to raw pointers: References can also be coerced to raw pointers:
* `&T` to `*const T` * `&T` to `*const T`
* `&mut T` to `*mut T` * `&mut T` to `*mut T`
Custom coercions may be defined using [`Deref`](deref-coercions.md). Custom coercions may be defined using [`Deref`](deref-coercions.md).
@ -59,11 +59,11 @@ A cast `e as U` is valid if `e` has type `T` and `T` *coerces* to `U`.
A cast `e as U` is also valid in any of the following cases: A cast `e as U` is also valid in any of the following cases:
* `e` has type `T` and `T` and `U` are any numeric types; *numeric-cast* * `e` has type `T` and `T` and `U` are any numeric types; *numeric-cast*
* `e` is a C-like enum (with no data attached to the variants), * `e` is a C-like enum (with no data attached to the variants),
and `U` is an integer type; *enum-cast* and `U` is an integer type; *enum-cast*
* `e` has type `bool` or `char` and `U` is an integer type; *prim-int-cast* * `e` has type `bool` or `char` and `U` is an integer type; *prim-int-cast*
* `e` has type `u8` and `U` is `char`; *u8-char-cast* * `e` has type `u8` and `U` is `char`; *u8-char-cast*
For example For example

View file

@ -460,8 +460,9 @@ not actually pass as a test.
``` ```
The `no_run` attribute will compile your code, but not run it. This is The `no_run` attribute will compile your code, but not run it. This is
important for examples such as "Here's how to start up a network service," important for examples such as "Here's how to retrieve a web page,"
which you would want to make sure compile, but might run in an infinite loop! which you would want to ensure compiles, but might be run in a test
environment that has no network access.
### Documenting modules ### Documenting modules

View file

@ -24,41 +24,40 @@ see the website][platform-support].
[platform-support]: https://forge.rust-lang.org/platform-support.html [platform-support]: https://forge.rust-lang.org/platform-support.html
## Installing on Linux or Mac ## Installing Rust
If we're on Linux or a Mac, all we need to do is open a terminal and type this: All you need to do on Unix systems like Linux and macOS is open a
terminal and type this:
```bash ```bash
$ curl -sSf https://static.rust-lang.org/rustup.sh | sh $ curl https://sh.rustup.rs -sSf | sh
``` ```
This will download a script, and start the installation. If it all goes well, It will download a script, and start the installation. If everything
youll see this appear: goes well, youll see this appear:
```text ```text
Rust is ready to roll. Rust is installed now. Great!
``` ```
From here, press `y` for yes, and then follow the rest of the prompts. Installing on Windows is nearly as easy: download and run
[rustup-init.exe]. It will start the installation in a console and
present the above message on success.
## Installing on Windows For other installation options and information, visit the [install]
page of the Rust website.
If you're on Windows, please download the appropriate [installer][install-page]. [rustup-init.exe]: https://win.rustup.rs
[install]: https://www.rust-lang.org/install.html
[install-page]: https://www.rust-lang.org/install.html
## Uninstalling ## Uninstalling
Uninstalling Rust is as easy as installing it. On Linux or Mac, run Uninstalling Rust is as easy as installing it:
the uninstall script:
```bash ```bash
$ sudo /usr/local/lib/rustlib/uninstall.sh $ rustup self uninstall
``` ```
If we used the Windows installer, we can re-run the `.msi` and it will give us
an uninstall option.
## Troubleshooting ## Troubleshooting
If we've got Rust installed, we can open up a shell, and type this: If we've got Rust installed, we can open up a shell, and type this:
@ -71,12 +70,15 @@ You should see the version number, commit hash, and commit date.
If you do, Rust has been installed successfully! Congrats! If you do, Rust has been installed successfully! Congrats!
If you don't and you're on Windows, check that Rust is in your %PATH% system If you don't, that probably means that the `PATH` environment variable
variable: `$ echo %PATH%`. If it isn't, run the installer again, select "Change" doesn't include Cargo's binary directory, `~/.cargo/bin` on Unix, or
on the "Change, repair, or remove installation" page and ensure "Add to PATH" is `%USERPROFILE%\.cargo\bin` on Windows. This is the directory where
installed on the local hard drive. If you need to configure your path manually, Rust development tools live, and most Rust developers keep it in their
you can find the Rust executables in a directory like `PATH` environment variable, which makes it possible to run `rustc` on
`"C:\Program Files\Rust stable GNU 1.x\bin"`. the command line. Due to differences in operating systems, command
shells, and bugs in installation, you may need to restart your shell,
log out of the system, or configure `PATH` manually as appropriate for
your operating environment.
Rust does not do its own linking, and so youll need to have a linker Rust does not do its own linking, and so youll need to have a linker
installed. Doing so will depend on your specific system. For installed. Doing so will depend on your specific system. For
@ -106,9 +108,7 @@ resources include [the users forum][users] and [Stack Overflow][stackoverflow
[stackoverflow]: http://stackoverflow.com/questions/tagged/rust [stackoverflow]: http://stackoverflow.com/questions/tagged/rust
This installer also installs a copy of the documentation locally, so we can This installer also installs a copy of the documentation locally, so we can
read it offline. On UNIX systems, `/usr/local/share/doc/rust` is the location. read it offline. It's only a `rustup doc` away!
On Windows, it's in a `share/doc` directory, inside the directory to which Rust
was installed.
# Hello, world! # Hello, world!

View file

@ -589,11 +589,10 @@ please see the [Documentation chapter](documentation.html).
# Testing and concurrency # Testing and concurrency
One thing that is important to note when writing tests is that they may be run It is important to note that tests are run concurrently using threads. For this
concurrently using threads. For this reason you should take care that your tests reason, care should be taken to ensure your tests do not depend on each-other,
are written in such a way as to not depend on each-other, or on any shared or on any shared state. "Shared state" can also include the environment, such
state. "Shared state" can also include the environment, such as the current as the current working directory, or environment variables.
working directory, or environment variables.
If this is an issue it is possible to control this concurrency, either by If this is an issue it is possible to control this concurrency, either by
setting the environment variable `RUST_TEST_THREADS`, or by passing the argument setting the environment variable `RUST_TEST_THREADS`, or by passing the argument

View file

@ -1731,7 +1731,8 @@ of an item to see whether it should be allowed or not. This is where privacy
warnings are generated, or otherwise "you used a private item of another module warnings are generated, or otherwise "you used a private item of another module
and weren't allowed to." and weren't allowed to."
By default, everything in Rust is *private*, with one exception. Enum variants By default, everything in Rust is *private*, with two exceptions: Associated
items in a `pub` Trait are public by default; Enum variants
in a `pub` enum are also public by default. When an item is declared as `pub`, in a `pub` enum are also public by default. When an item is declared as `pub`,
it can be thought of as being accessible to the outside world. For example: it can be thought of as being accessible to the outside world. For example:

View file

@ -44,7 +44,9 @@
font-family: 'Source Code Pro'; font-family: 'Source Code Pro';
font-style: normal; font-style: normal;
font-weight: 400; font-weight: 400;
src: local('Source Code Pro'), url("SourceCodePro-Regular.woff") format('woff'); /* Avoid using locally installed font because bad versions are in circulation:
* see https://github.com/rust-lang/rust/issues/24355 */
src: url("SourceCodePro-Regular.woff") format('woff');
} }
*:not(body) { *:not(body) {

View file

@ -392,8 +392,6 @@ impl<T: ?Sized> Arc<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(arc_counts)]
///
/// use std::sync::Arc; /// use std::sync::Arc;
/// ///
/// let five = Arc::new(5); /// let five = Arc::new(5);
@ -404,8 +402,7 @@ impl<T: ?Sized> Arc<T> {
/// assert_eq!(1, Arc::weak_count(&five)); /// assert_eq!(1, Arc::weak_count(&five));
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "arc_counts", reason = "not clearly useful, and racy", #[stable(feature = "arc_counts", since = "1.15.0")]
issue = "28356")]
pub fn weak_count(this: &Self) -> usize { pub fn weak_count(this: &Self) -> usize {
this.inner().weak.load(SeqCst) - 1 this.inner().weak.load(SeqCst) - 1
} }
@ -421,8 +418,6 @@ impl<T: ?Sized> Arc<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(arc_counts)]
///
/// use std::sync::Arc; /// use std::sync::Arc;
/// ///
/// let five = Arc::new(5); /// let five = Arc::new(5);
@ -433,8 +428,7 @@ impl<T: ?Sized> Arc<T> {
/// assert_eq!(2, Arc::strong_count(&five)); /// assert_eq!(2, Arc::strong_count(&five));
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "arc_counts", reason = "not clearly useful, and racy", #[stable(feature = "arc_counts", since = "1.15.0")]
issue = "28356")]
pub fn strong_count(this: &Self) -> usize { pub fn strong_count(this: &Self) -> usize {
this.inner().strong.load(SeqCst) this.inner().strong.load(SeqCst)
} }

View file

@ -127,6 +127,7 @@ pub fn usable_size(size: usize, align: usize) -> usize {
pub const EMPTY: *mut () = 0x1 as *mut (); pub const EMPTY: *mut () = 0x1 as *mut ();
/// The allocator for unique pointers. /// The allocator for unique pointers.
// This function must not unwind. If it does, MIR trans will fail.
#[cfg(not(test))] #[cfg(not(test))]
#[lang = "exchange_malloc"] #[lang = "exchange_malloc"]
#[inline] #[inline]
@ -143,6 +144,7 @@ unsafe fn exchange_malloc(size: usize, align: usize) -> *mut u8 {
} }
#[cfg(not(test))] #[cfg(not(test))]
#[cfg(stage0)]
#[lang = "exchange_free"] #[lang = "exchange_free"]
#[inline] #[inline]
unsafe fn exchange_free(ptr: *mut u8, old_size: usize, align: usize) { unsafe fn exchange_free(ptr: *mut u8, old_size: usize, align: usize) {

View file

@ -320,7 +320,7 @@ impl<T> Rc<T> {
#[inline] #[inline]
#[stable(feature = "rc_unique", since = "1.4.0")] #[stable(feature = "rc_unique", since = "1.4.0")]
pub fn try_unwrap(this: Self) -> Result<T, Self> { pub fn try_unwrap(this: Self) -> Result<T, Self> {
if Rc::would_unwrap(&this) { if Rc::strong_count(&this) == 1 {
unsafe { unsafe {
let val = ptr::read(&*this); // copy the contained object let val = ptr::read(&*this); // copy the contained object
@ -343,26 +343,10 @@ impl<T> Rc<T> {
/// ///
/// [try_unwrap]: struct.Rc.html#method.try_unwrap /// [try_unwrap]: struct.Rc.html#method.try_unwrap
/// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok /// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
///
/// # Examples
///
/// ```
/// #![feature(rc_would_unwrap)]
///
/// use std::rc::Rc;
///
/// let x = Rc::new(3);
/// assert!(Rc::would_unwrap(&x));
/// assert_eq!(Rc::try_unwrap(x), Ok(3));
///
/// let x = Rc::new(4);
/// let _y = x.clone();
/// assert!(!Rc::would_unwrap(&x));
/// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
/// ```
#[unstable(feature = "rc_would_unwrap", #[unstable(feature = "rc_would_unwrap",
reason = "just added for niche usecase", reason = "just added for niche usecase",
issue = "28356")] issue = "28356")]
#[rustc_deprecated(since = "1.15.0", reason = "too niche; use `strong_count` instead")]
pub fn would_unwrap(this: &Self) -> bool { pub fn would_unwrap(this: &Self) -> bool {
Rc::strong_count(&this) == 1 Rc::strong_count(&this) == 1
} }
@ -482,8 +466,6 @@ impl<T: ?Sized> Rc<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(rc_counts)]
///
/// use std::rc::Rc; /// use std::rc::Rc;
/// ///
/// let five = Rc::new(5); /// let five = Rc::new(5);
@ -492,8 +474,7 @@ impl<T: ?Sized> Rc<T> {
/// assert_eq!(1, Rc::weak_count(&five)); /// assert_eq!(1, Rc::weak_count(&five));
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful", #[stable(feature = "rc_counts", since = "1.15.0")]
issue = "28356")]
pub fn weak_count(this: &Self) -> usize { pub fn weak_count(this: &Self) -> usize {
this.weak() - 1 this.weak() - 1
} }
@ -503,8 +484,6 @@ impl<T: ?Sized> Rc<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(rc_counts)]
///
/// use std::rc::Rc; /// use std::rc::Rc;
/// ///
/// let five = Rc::new(5); /// let five = Rc::new(5);
@ -513,8 +492,7 @@ impl<T: ?Sized> Rc<T> {
/// assert_eq!(2, Rc::strong_count(&five)); /// assert_eq!(2, Rc::strong_count(&five));
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "rc_counts", reason = "not clearly useful", #[stable(feature = "rc_counts", since = "1.15.0")]
issue = "28356")]
pub fn strong_count(this: &Self) -> usize { pub fn strong_count(this: &Self) -> usize {
this.strong() this.strong()
} }
@ -523,21 +501,11 @@ impl<T: ?Sized> Rc<T> {
/// this inner value. /// this inner value.
/// ///
/// [weak]: struct.Weak.html /// [weak]: struct.Weak.html
///
/// # Examples
///
/// ```
/// #![feature(rc_counts)]
///
/// use std::rc::Rc;
///
/// let five = Rc::new(5);
///
/// assert!(Rc::is_unique(&five));
/// ```
#[inline] #[inline]
#[unstable(feature = "rc_counts", reason = "uniqueness has unclear meaning", #[unstable(feature = "is_unique", reason = "uniqueness has unclear meaning",
issue = "28356")] issue = "28356")]
#[rustc_deprecated(since = "1.15.0",
reason = "too niche; use `strong_count` and `weak_count` instead")]
pub fn is_unique(this: &Self) -> bool { pub fn is_unique(this: &Self) -> bool {
Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1 Rc::weak_count(this) == 0 && Rc::strong_count(this) == 1
} }

View file

@ -36,7 +36,8 @@ fn main() {
// targets, which means we have to build the alloc_jemalloc crate // targets, which means we have to build the alloc_jemalloc crate
// for targets like emscripten, even if we don't use it. // for targets like emscripten, even if we don't use it.
if target.contains("rumprun") || target.contains("bitrig") || target.contains("openbsd") || if target.contains("rumprun") || target.contains("bitrig") || target.contains("openbsd") ||
target.contains("msvc") || target.contains("emscripten") || target.contains("fuchsia") { target.contains("msvc") || target.contains("emscripten") || target.contains("fuchsia") ||
target.contains("redox") {
println!("cargo:rustc-cfg=dummy_jemalloc"); println!("cargo:rustc-cfg=dummy_jemalloc");
return; return;
} }
@ -151,7 +152,7 @@ fn main() {
cmd.arg(format!("--build={}", build_helper::gnu_target(&host))); cmd.arg(format!("--build={}", build_helper::gnu_target(&host)));
run(&mut cmd); run(&mut cmd);
let mut make = Command::new("make"); let mut make = Command::new(build_helper::make(&host));
make.current_dir(&build_dir) make.current_dir(&build_dir)
.arg("build_lib_static"); .arg("build_lib_static");

View file

@ -19,7 +19,7 @@
issue = "27783")] issue = "27783")]
#![feature(allocator)] #![feature(allocator)]
#![feature(staged_api)] #![feature(staged_api)]
#![cfg_attr(unix, feature(libc))] #![cfg_attr(any(unix, target_os = "redox"), feature(libc))]
// The minimum alignment guaranteed by the architecture. This value is used to // The minimum alignment guaranteed by the architecture. This value is used to
// add fast paths for low alignment values. In practice, the alignment is a // add fast paths for low alignment values. In practice, the alignment is a
@ -71,7 +71,7 @@ pub extern "C" fn __rust_usable_size(size: usize, align: usize) -> usize {
imp::usable_size(size, align) imp::usable_size(size, align)
} }
#[cfg(unix)] #[cfg(any(unix, target_os = "redox"))]
mod imp { mod imp {
extern crate libc; extern crate libc;
@ -87,7 +87,7 @@ mod imp {
} }
} }
#[cfg(target_os = "android")] #[cfg(any(target_os = "android", target_os = "redox"))]
unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 { unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 {
// On android we currently target API level 9 which unfortunately // On android we currently target API level 9 which unfortunately
// doesn't have the `posix_memalign` API used below. Instead we use // doesn't have the `posix_memalign` API used below. Instead we use
@ -109,7 +109,7 @@ mod imp {
libc::memalign(align as libc::size_t, size as libc::size_t) as *mut u8 libc::memalign(align as libc::size_t, size as libc::size_t) as *mut u8
} }
#[cfg(not(target_os = "android"))] #[cfg(not(any(target_os = "android", target_os = "redox")))]
unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 { unsafe fn aligned_malloc(size: usize, align: usize) -> *mut u8 {
let mut out = ptr::null_mut(); let mut out = ptr::null_mut();
let ret = libc::posix_memalign(&mut out, align as libc::size_t, size as libc::size_t); let ret = libc::posix_memalign(&mut out, align as libc::size_t, size as libc::size_t);

View file

@ -225,7 +225,7 @@ pub struct BinaryHeap<T> {
/// [`peek_mut()`]: struct.BinaryHeap.html#method.peek_mut /// [`peek_mut()`]: struct.BinaryHeap.html#method.peek_mut
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
pub struct PeekMut<'a, T: 'a + Ord> { pub struct PeekMut<'a, T: 'a + Ord> {
heap: &'a mut BinaryHeap<T> heap: &'a mut BinaryHeap<T>,
} }
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")] #[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
@ -385,9 +385,7 @@ impl<T: Ord> BinaryHeap<T> {
if self.is_empty() { if self.is_empty() {
None None
} else { } else {
Some(PeekMut { Some(PeekMut { heap: self })
heap: self
})
} }
} }
@ -1126,7 +1124,9 @@ impl<T: Ord> IntoIterator for BinaryHeap<T> {
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> IntoIterator for &'a BinaryHeap<T> where T: Ord { impl<'a, T> IntoIterator for &'a BinaryHeap<T>
where T: Ord
{
type Item = &'a T; type Item = &'a T;
type IntoIter = Iter<'a, T>; type IntoIter = Iter<'a, T>;

View file

@ -63,7 +63,9 @@ pub trait ToOwned {
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<T> ToOwned for T where T: Clone { impl<T> ToOwned for T
where T: Clone
{
type Owned = T; type Owned = T;
fn to_owned(&self) -> T { fn to_owned(&self) -> T {
self.clone() self.clone()
@ -117,17 +119,19 @@ pub enum Cow<'a, B: ?Sized + 'a>
{ {
/// Borrowed data. /// Borrowed data.
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
Borrowed(#[stable(feature = "rust1", since = "1.0.0")] &'a B), Borrowed(#[stable(feature = "rust1", since = "1.0.0")]
&'a B),
/// Owned data. /// Owned data.
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
Owned( Owned(#[stable(feature = "rust1", since = "1.0.0")]
#[stable(feature = "rust1", since = "1.0.0")] <B as ToOwned>::Owned <B as ToOwned>::Owned),
),
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Clone for Cow<'a, B> where B: ToOwned { impl<'a, B: ?Sized> Clone for Cow<'a, B>
where B: ToOwned
{
fn clone(&self) -> Cow<'a, B> { fn clone(&self) -> Cow<'a, B> {
match *self { match *self {
Borrowed(b) => Borrowed(b), Borrowed(b) => Borrowed(b),
@ -139,7 +143,9 @@ impl<'a, B: ?Sized> Clone for Cow<'a, B> where B: ToOwned {
} }
} }
impl<'a, B: ?Sized> Cow<'a, B> where B: ToOwned { impl<'a, B: ?Sized> Cow<'a, B>
where B: ToOwned
{
/// Acquires a mutable reference to the owned form of the data. /// Acquires a mutable reference to the owned form of the data.
/// ///
/// Clones the data if it is not already owned. /// Clones the data if it is not already owned.
@ -194,7 +200,9 @@ impl<'a, B: ?Sized> Cow<'a, B> where B: ToOwned {
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Deref for Cow<'a, B> where B: ToOwned { impl<'a, B: ?Sized> Deref for Cow<'a, B>
where B: ToOwned
{
type Target = B; type Target = B;
fn deref(&self) -> &B { fn deref(&self) -> &B {
@ -209,7 +217,9 @@ impl<'a, B: ?Sized> Deref for Cow<'a, B> where B: ToOwned {
impl<'a, B: ?Sized> Eq for Cow<'a, B> where B: Eq + ToOwned {} impl<'a, B: ?Sized> Eq for Cow<'a, B> where B: Eq + ToOwned {}
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Ord for Cow<'a, B> where B: Ord + ToOwned { impl<'a, B: ?Sized> Ord for Cow<'a, B>
where B: Ord + ToOwned
{
#[inline] #[inline]
fn cmp(&self, other: &Cow<'a, B>) -> Ordering { fn cmp(&self, other: &Cow<'a, B>) -> Ordering {
Ord::cmp(&**self, &**other) Ord::cmp(&**self, &**other)
@ -228,7 +238,9 @@ impl<'a, 'b, B: ?Sized, C: ?Sized> PartialEq<Cow<'b, C>> for Cow<'a, B>
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> PartialOrd for Cow<'a, B> where B: PartialOrd + ToOwned { impl<'a, B: ?Sized> PartialOrd for Cow<'a, B>
where B: PartialOrd + ToOwned
{
#[inline] #[inline]
fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> { fn partial_cmp(&self, other: &Cow<'a, B>) -> Option<Ordering> {
PartialOrd::partial_cmp(&**self, &**other) PartialOrd::partial_cmp(&**self, &**other)
@ -273,7 +285,9 @@ impl<'a, B: ?Sized> Default for Cow<'a, B>
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<'a, B: ?Sized> Hash for Cow<'a, B> where B: Hash + ToOwned { impl<'a, B: ?Sized> Hash for Cow<'a, B>
where B: Hash + ToOwned
{
#[inline] #[inline]
fn hash<H: Hasher>(&self, state: &mut H) { fn hash<H: Hasher>(&self, state: &mut H) {
Hash::hash(&**self, state) Hash::hash(&**self, state)

View file

@ -74,24 +74,44 @@ pub struct BTreeSet<T> {
map: BTreeMap<T, ()>, map: BTreeMap<T, ()>,
} }
/// An iterator over a BTreeSet's items. /// An iterator over a `BTreeSet`'s items.
///
/// This structure is created by the [`iter`] method on [`BTreeSet`].
///
/// [`BTreeSet`]: struct.BTreeSet.html
/// [`iter`]: struct.BTreeSet.html#method.iter
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> { pub struct Iter<'a, T: 'a> {
iter: Keys<'a, T, ()>, iter: Keys<'a, T, ()>,
} }
/// An owning iterator over a BTreeSet's items. /// An owning iterator over a `BTreeSet`'s items.
///
/// This structure is created by the `into_iter` method on [`BTreeSet`]
/// [`BTreeSet`] (provided by the `IntoIterator` trait).
///
/// [`BTreeSet`]: struct.BTreeSet.html
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<T> { pub struct IntoIter<T> {
iter: ::btree_map::IntoIter<T, ()>, iter: ::btree_map::IntoIter<T, ()>,
} }
/// An iterator over a sub-range of BTreeSet's items. /// An iterator over a sub-range of `BTreeSet`'s items.
///
/// This structure is created by the [`range`] method on [`BTreeSet`].
///
/// [`BTreeSet`]: struct.BTreeSet.html
/// [`range`]: struct.BTreeSet.html#method.range
pub struct Range<'a, T: 'a> { pub struct Range<'a, T: 'a> {
iter: ::btree_map::Range<'a, T, ()>, iter: ::btree_map::Range<'a, T, ()>,
} }
/// A lazy iterator producing elements in the set difference (in-order). /// A lazy iterator producing elements in the set difference (in-order).
///
/// This structure is created by the [`difference`] method on [`BTreeSet`].
///
/// [`BTreeSet`]: struct.BTreeSet.html
/// [`difference`]: struct.BTreeSet.html#method.difference
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Difference<'a, T: 'a> { pub struct Difference<'a, T: 'a> {
a: Peekable<Iter<'a, T>>, a: Peekable<Iter<'a, T>>,
@ -99,6 +119,12 @@ pub struct Difference<'a, T: 'a> {
} }
/// A lazy iterator producing elements in the set symmetric difference (in-order). /// A lazy iterator producing elements in the set symmetric difference (in-order).
///
/// This structure is created by the [`symmetric_difference`] method on
/// [`BTreeSet`].
///
/// [`BTreeSet`]: struct.BTreeSet.html
/// [`symmetric_difference`]: struct.BTreeSet.html#method.symmetric_difference
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct SymmetricDifference<'a, T: 'a> { pub struct SymmetricDifference<'a, T: 'a> {
a: Peekable<Iter<'a, T>>, a: Peekable<Iter<'a, T>>,
@ -106,6 +132,11 @@ pub struct SymmetricDifference<'a, T: 'a> {
} }
/// A lazy iterator producing elements in the set intersection (in-order). /// A lazy iterator producing elements in the set intersection (in-order).
///
/// This structure is created by the [`intersection`] method on [`BTreeSet`].
///
/// [`BTreeSet`]: struct.BTreeSet.html
/// [`intersection`]: struct.BTreeSet.html#method.intersection
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Intersection<'a, T: 'a> { pub struct Intersection<'a, T: 'a> {
a: Peekable<Iter<'a, T>>, a: Peekable<Iter<'a, T>>,
@ -113,6 +144,11 @@ pub struct Intersection<'a, T: 'a> {
} }
/// A lazy iterator producing elements in the set union (in-order). /// A lazy iterator producing elements in the set union (in-order).
///
/// This structure is created by the [`union`] method on [`BTreeSet`].
///
/// [`BTreeSet`]: struct.BTreeSet.html
/// [`union`]: struct.BTreeSet.html#method.union
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Union<'a, T: 'a> { pub struct Union<'a, T: 'a> {
a: Peekable<Iter<'a, T>>, a: Peekable<Iter<'a, T>>,
@ -120,7 +156,7 @@ pub struct Union<'a, T: 'a> {
} }
impl<T: Ord> BTreeSet<T> { impl<T: Ord> BTreeSet<T> {
/// Makes a new BTreeSet with a reasonable choice of B. /// Makes a new `BTreeSet` with a reasonable choice of B.
/// ///
/// # Examples /// # Examples
/// ///
@ -137,21 +173,32 @@ impl<T: Ord> BTreeSet<T> {
} }
impl<T> BTreeSet<T> { impl<T> BTreeSet<T> {
/// Gets an iterator over the BTreeSet's contents. /// Gets an iterator that visits the values in the `BTreeSet` in ascending order.
/// ///
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// use std::collections::BTreeSet; /// use std::collections::BTreeSet;
/// ///
/// let set: BTreeSet<usize> = [1, 2, 3, 4].iter().cloned().collect(); /// let set: BTreeSet<usize> = [1, 2, 3].iter().cloned().collect();
/// let mut set_iter = set.iter();
/// assert_eq!(set_iter.next(), Some(&1));
/// assert_eq!(set_iter.next(), Some(&2));
/// assert_eq!(set_iter.next(), Some(&3));
/// assert_eq!(set_iter.next(), None);
/// ```
/// ///
/// for x in set.iter() { /// Values returned by the iterator are returned in ascending order:
/// println!("{}", x);
/// }
/// ///
/// let v: Vec<_> = set.iter().cloned().collect(); /// ```
/// assert_eq!(v, [1, 2, 3, 4]); /// use std::collections::BTreeSet;
///
/// let set: BTreeSet<usize> = [3, 1, 2].iter().cloned().collect();
/// let mut set_iter = set.iter();
/// assert_eq!(set_iter.next(), Some(&1));
/// assert_eq!(set_iter.next(), Some(&2));
/// assert_eq!(set_iter.next(), Some(&3));
/// assert_eq!(set_iter.next(), None);
/// ``` /// ```
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> { pub fn iter(&self) -> Iter<T> {

View file

@ -276,7 +276,8 @@ impl<E: CLike> FromIterator<E> for EnumSet<E> {
} }
} }
impl<'a, E> IntoIterator for &'a EnumSet<E> where E: CLike impl<'a, E> IntoIterator for &'a EnumSet<E>
where E: CLike
{ {
type Item = E; type Item = E;
type IntoIter = Iter<E>; type IntoIter = Iter<E>;

View file

@ -10,8 +10,15 @@
//! A doubly-linked list with owned nodes. //! A doubly-linked list with owned nodes.
//! //!
//! The `LinkedList` allows pushing and popping elements at either end and is thus //! The `LinkedList` allows pushing and popping elements at either end
//! efficiently usable as a double-ended queue. //! in constant time.
//!
//! Almost always it is better to use `Vec` or [`VecDeque`] instead of
//! [`LinkedList`]. In general, array-based containers are faster,
//! more memory efficient and make better use of CPU cache.
//!
//! [`LinkedList`]: ../linked_list/struct.LinkedList.html
//! [`VecDeque`]: ../vec_deque/struct.VecDeque.html
#![stable(feature = "rust1", since = "1.0.0")] #![stable(feature = "rust1", since = "1.0.0")]
@ -27,7 +34,14 @@ use core::ptr::{self, Shared};
use super::SpecExtend; use super::SpecExtend;
/// A doubly-linked list. /// A doubly-linked list with owned nodes.
///
/// The `LinkedList` allows pushing and popping elements at either end
/// in constant time.
///
/// Almost always it is better to use `Vec` or `VecDeque` instead of
/// `LinkedList`. In general, array-based containers are faster,
/// more memory efficient and make better use of CPU cache.
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct LinkedList<T> { pub struct LinkedList<T> {
head: Option<Shared<Node<T>>>, head: Option<Shared<Node<T>>>,
@ -225,15 +239,17 @@ impl<T> LinkedList<T> {
pub fn append(&mut self, other: &mut Self) { pub fn append(&mut self, other: &mut Self) {
match self.tail { match self.tail {
None => mem::swap(self, other), None => mem::swap(self, other),
Some(tail) => if let Some(other_head) = other.head.take() { Some(tail) => {
unsafe { if let Some(other_head) = other.head.take() {
(**tail).next = Some(other_head); unsafe {
(**other_head).prev = Some(tail); (**tail).next = Some(other_head);
} (**other_head).prev = Some(tail);
}
self.tail = other.tail.take(); self.tail = other.tail.take();
self.len += mem::replace(&mut other.len, 0); self.len += mem::replace(&mut other.len, 0);
}, }
}
} }
} }
@ -674,7 +690,10 @@ impl<T> LinkedList<T> {
reason = "method name and placement protocol are subject to change", reason = "method name and placement protocol are subject to change",
issue = "30172")] issue = "30172")]
pub fn front_place(&mut self) -> FrontPlace<T> { pub fn front_place(&mut self) -> FrontPlace<T> {
FrontPlace { list: self, node: IntermediateBox::make_place() } FrontPlace {
list: self,
node: IntermediateBox::make_place(),
}
} }
/// Returns a place for insertion at the back of the list. /// Returns a place for insertion at the back of the list.
@ -699,7 +718,10 @@ impl<T> LinkedList<T> {
reason = "method name and placement protocol are subject to change", reason = "method name and placement protocol are subject to change",
issue = "30172")] issue = "30172")]
pub fn back_place(&mut self) -> BackPlace<T> { pub fn back_place(&mut self) -> BackPlace<T> {
BackPlace { list: self, node: IntermediateBox::make_place() } BackPlace {
list: self,
node: IntermediateBox::make_place(),
}
} }
} }
@ -852,7 +874,7 @@ impl<'a, T> IterMut<'a, T> {
(**head).prev = node; (**head).prev = node;
self.list.len += 1; self.list.len += 1;
} },
} }
} }
@ -1135,9 +1157,15 @@ impl<'a, T> InPlace<T> for BackPlace<'a, T> {
// Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters. // Ensure that `LinkedList` and its read-only iterators are covariant in their type parameters.
#[allow(dead_code)] #[allow(dead_code)]
fn assert_covariance() { fn assert_covariance() {
fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> { x } fn a<'a>(x: LinkedList<&'static str>) -> LinkedList<&'a str> {
fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> { x } x
fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> { x } }
fn b<'i, 'a>(x: Iter<'i, &'static str>) -> Iter<'i, &'a str> {
x
}
fn c<'a>(x: IntoIter<&'static str>) -> IntoIter<&'a str> {
x
}
} }
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
@ -1298,10 +1326,10 @@ mod tests {
fn test_send() { fn test_send() {
let n = list_from(&[1, 2, 3]); let n = list_from(&[1, 2, 3]);
thread::spawn(move || { thread::spawn(move || {
check_links(&n); check_links(&n);
let a: &[_] = &[&1, &2, &3]; let a: &[_] = &[&1, &2, &3];
assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]); assert_eq!(a, &n.iter().collect::<Vec<_>>()[..]);
}) })
.join() .join()
.ok() .ok()
.unwrap(); .unwrap();

View file

@ -1496,10 +1496,10 @@ unsafe fn merge<T, F>(v: &mut [T], mid: usize, buf: *mut T, compare: &mut F)
/// The algorithm identifies strictly descending and non-descending subsequences, which are called /// The algorithm identifies strictly descending and non-descending subsequences, which are called
/// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed /// natural runs. There is a stack of pending runs yet to be merged. Each newly found run is pushed
/// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are /// onto the stack, and then some pairs of adjacent runs are merged until these two invariants are
/// satisfied, for every `i` in `0 .. runs.len() - 2`: /// satisfied:
/// ///
/// 1. `runs[i].len > runs[i + 1].len` /// 1. for every `i` in `1..runs.len()`: `runs[i - 1].len > runs[i].len`
/// 2. `runs[i].len > runs[i + 1].len + runs[i + 2].len` /// 2. for every `i` in `2..runs.len()`: `runs[i - 2].len > runs[i - 1].len + runs[i].len`
/// ///
/// The invariants ensure that the total running time is `O(n log n)` worst-case. /// The invariants ensure that the total running time is `O(n log n)` worst-case.
fn merge_sort<T, F>(v: &mut [T], mut compare: F) fn merge_sort<T, F>(v: &mut [T], mut compare: F)

View file

@ -1697,11 +1697,7 @@ impl str {
debug_assert!('Σ'.len_utf8() == 2); debug_assert!('Σ'.len_utf8() == 2);
let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) && let is_word_final = case_ignoreable_then_cased(from[..i].chars().rev()) &&
!case_ignoreable_then_cased(from[i + 2..].chars()); !case_ignoreable_then_cased(from[i + 2..].chars());
to.push_str(if is_word_final { to.push_str(if is_word_final { "ς" } else { "σ" });
"ς"
} else {
"σ"
});
} }
fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool { fn case_ignoreable_then_cased<I: Iterator<Item = char>>(iter: I) -> bool {

View file

@ -542,11 +542,7 @@ impl String {
unsafe { *xs.get_unchecked(i) } unsafe { *xs.get_unchecked(i) }
} }
fn safe_get(xs: &[u8], i: usize, total: usize) -> u8 { fn safe_get(xs: &[u8], i: usize, total: usize) -> u8 {
if i >= total { if i >= total { 0 } else { unsafe_get(xs, i) }
0
} else {
unsafe_get(xs, i)
}
} }
let mut res = String::with_capacity(total); let mut res = String::with_capacity(total);
@ -976,7 +972,7 @@ impl String {
pub fn push(&mut self, ch: char) { pub fn push(&mut self, ch: char) {
match ch.len_utf8() { match ch.len_utf8() {
1 => self.vec.push(ch as u8), 1 => self.vec.push(ch as u8),
_ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0;4]).as_bytes()), _ => self.vec.extend_from_slice(ch.encode_utf8(&mut [0; 4]).as_bytes()),
} }
} }
@ -1935,7 +1931,7 @@ impl<'a> FromIterator<String> for Cow<'a, str> {
#[stable(feature = "from_string_for_vec_u8", since = "1.14.0")] #[stable(feature = "from_string_for_vec_u8", since = "1.14.0")]
impl From<String> for Vec<u8> { impl From<String> for Vec<u8> {
fn from(string : String) -> Vec<u8> { fn from(string: String) -> Vec<u8> {
string.into_bytes() string.into_bytes()
} }
} }

View file

@ -1902,14 +1902,13 @@ impl<T> IntoIter<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// # #![feature(vec_into_iter_as_slice)]
/// let vec = vec!['a', 'b', 'c']; /// let vec = vec!['a', 'b', 'c'];
/// let mut into_iter = vec.into_iter(); /// let mut into_iter = vec.into_iter();
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
/// let _ = into_iter.next().unwrap(); /// let _ = into_iter.next().unwrap();
/// assert_eq!(into_iter.as_slice(), &['b', 'c']); /// assert_eq!(into_iter.as_slice(), &['b', 'c']);
/// ``` /// ```
#[unstable(feature = "vec_into_iter_as_slice", issue = "35601")] #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
pub fn as_slice(&self) -> &[T] { pub fn as_slice(&self) -> &[T] {
unsafe { unsafe {
slice::from_raw_parts(self.ptr, self.len()) slice::from_raw_parts(self.ptr, self.len())
@ -1921,7 +1920,6 @@ impl<T> IntoIter<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// # #![feature(vec_into_iter_as_slice)]
/// let vec = vec!['a', 'b', 'c']; /// let vec = vec!['a', 'b', 'c'];
/// let mut into_iter = vec.into_iter(); /// let mut into_iter = vec.into_iter();
/// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']); /// assert_eq!(into_iter.as_slice(), &['a', 'b', 'c']);
@ -1930,7 +1928,7 @@ impl<T> IntoIter<T> {
/// assert_eq!(into_iter.next().unwrap(), 'b'); /// assert_eq!(into_iter.next().unwrap(), 'b');
/// assert_eq!(into_iter.next().unwrap(), 'z'); /// assert_eq!(into_iter.next().unwrap(), 'z');
/// ``` /// ```
#[unstable(feature = "vec_into_iter_as_slice", issue = "35601")] #[stable(feature = "vec_into_iter_as_slice", since = "1.15.0")]
pub fn as_mut_slice(&self) -> &mut [T] { pub fn as_mut_slice(&self) -> &mut [T] {
unsafe { unsafe {
slice::from_raw_parts_mut(self.ptr as *mut T, self.len()) slice::from_raw_parts_mut(self.ptr as *mut T, self.len())

View file

@ -206,11 +206,7 @@ impl<T> VecDeque<T> {
unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) { unsafe fn wrap_copy(&self, dst: usize, src: usize, len: usize) {
#[allow(dead_code)] #[allow(dead_code)]
fn diff(a: usize, b: usize) -> usize { fn diff(a: usize, b: usize) -> usize {
if a <= b { if a <= b { b - a } else { a - b }
b - a
} else {
a - b
}
} }
debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(), debug_assert!(cmp::min(diff(dst, src), self.cap() - diff(dst, src)) + len <= self.cap(),
"wrc dst={} src={} len={} cap={}", "wrc dst={} src={} len={} cap={}",
@ -552,8 +548,8 @@ impl<T> VecDeque<T> {
let old_cap = self.cap(); let old_cap = self.cap();
let used_cap = self.len() + 1; let used_cap = self.len() + 1;
let new_cap = used_cap.checked_add(additional) let new_cap = used_cap.checked_add(additional)
.and_then(|needed_cap| needed_cap.checked_next_power_of_two()) .and_then(|needed_cap| needed_cap.checked_next_power_of_two())
.expect("capacity overflow"); .expect("capacity overflow");
if new_cap > self.capacity() { if new_cap > self.capacity() {
self.buf.reserve_exact(used_cap, new_cap - used_cap); self.buf.reserve_exact(used_cap, new_cap - used_cap);
@ -1293,9 +1289,7 @@ impl<T> VecDeque<T> {
let contiguous = self.is_contiguous(); let contiguous = self.is_contiguous();
match (contiguous, match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
distance_to_tail <= distance_to_head,
idx >= self.tail) {
(true, true, _) if index == 0 => { (true, true, _) if index == 0 => {
// push_front // push_front
// //
@ -1513,9 +1507,7 @@ impl<T> VecDeque<T> {
let contiguous = self.is_contiguous(); let contiguous = self.is_contiguous();
match (contiguous, match (contiguous, distance_to_tail <= distance_to_head, idx >= self.tail) {
distance_to_tail <= distance_to_head,
idx >= self.tail) {
(true, true, _) => { (true, true, _) => {
unsafe { unsafe {
// contiguous, remove closer to tail: // contiguous, remove closer to tail:
@ -1812,7 +1804,7 @@ fn wrap_index(index: usize, size: usize) -> usize {
} }
/// Returns the two slices that cover the VecDeque's valid range /// Returns the two slices that cover the VecDeque's valid range
trait RingSlices : Sized { trait RingSlices: Sized {
fn slice(self, from: usize, to: usize) -> Self; fn slice(self, from: usize, to: usize) -> Self;
fn split_at(self, i: usize) -> (Self, Self); fn split_at(self, i: usize) -> (Self, Self);
@ -1895,7 +1887,7 @@ impl<'a, T> Iterator for Iter<'a, T> {
} }
fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
where F: FnMut(Acc, Self::Item) -> Acc, where F: FnMut(Acc, Self::Item) -> Acc
{ {
let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
accum = front.iter().fold(accum, &mut f); accum = front.iter().fold(accum, &mut f);
@ -1959,7 +1951,7 @@ impl<'a, T> Iterator for IterMut<'a, T> {
} }
fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc fn fold<Acc, F>(self, mut accum: Acc, mut f: F) -> Acc
where F: FnMut(Acc, Self::Item) -> Acc, where F: FnMut(Acc, Self::Item) -> Acc
{ {
let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail); let (front, back) = RingSlices::ring_slices(self.ring, self.head, self.tail);
accum = front.iter_mut().fold(accum, &mut f); accum = front.iter_mut().fold(accum, &mut f);
@ -2082,17 +2074,15 @@ impl<'a, T: 'a> Drop for Drain<'a, T> {
(_, 0) => { (_, 0) => {
source_deque.head = drain_tail; source_deque.head = drain_tail;
} }
_ => { _ => unsafe {
unsafe { if tail_len <= head_len {
if tail_len <= head_len { source_deque.tail = source_deque.wrap_sub(drain_head, tail_len);
source_deque.tail = source_deque.wrap_sub(drain_head, tail_len); source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len);
source_deque.wrap_copy(source_deque.tail, orig_tail, tail_len); } else {
} else { source_deque.head = source_deque.wrap_add(drain_tail, head_len);
source_deque.head = source_deque.wrap_add(drain_tail, head_len); source_deque.wrap_copy(drain_tail, drain_head, head_len);
source_deque.wrap_copy(drain_tail, drain_head, head_len);
}
} }
} },
} }
} }
} }
@ -2288,10 +2278,8 @@ impl<T> From<Vec<T>> for VecDeque<T> {
// We need to extend the buf if it's not a power of two, too small // We need to extend the buf if it's not a power of two, too small
// or doesn't have at least one free space // or doesn't have at least one free space
if !buf.cap().is_power_of_two() if !buf.cap().is_power_of_two() || (buf.cap() < (MINIMUM_CAPACITY + 1)) ||
|| (buf.cap() < (MINIMUM_CAPACITY + 1)) (buf.cap() == len) {
|| (buf.cap() == len)
{
let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two(); let cap = cmp::max(buf.cap() + 1, MINIMUM_CAPACITY + 1).next_power_of_two();
buf.reserve_exact(len, cap - len); buf.reserve_exact(len, cap - len);
} }
@ -2299,7 +2287,7 @@ impl<T> From<Vec<T>> for VecDeque<T> {
VecDeque { VecDeque {
tail: 0, tail: 0,
head: len, head: len,
buf: buf buf: buf,
} }
} }
} }
@ -2324,18 +2312,17 @@ impl<T> From<VecDeque<T>> for Vec<T> {
// do this in at most three copy moves. // do this in at most three copy moves.
if (cap - tail) > head { if (cap - tail) > head {
// right hand block is the long one; move that enough for the left // right hand block is the long one; move that enough for the left
ptr::copy( ptr::copy(buf.offset(tail as isize),
buf.offset(tail as isize), buf.offset((tail - head) as isize),
buf.offset((tail - head) as isize), cap - tail);
cap - tail);
// copy left in the end // copy left in the end
ptr::copy(buf, buf.offset((cap - head) as isize), head); ptr::copy(buf, buf.offset((cap - head) as isize), head);
// shift the new thing to the start // shift the new thing to the start
ptr::copy(buf.offset((tail-head) as isize), buf, len); ptr::copy(buf.offset((tail - head) as isize), buf, len);
} else { } else {
// left hand block is the long one, we can do it in two! // left hand block is the long one, we can do it in two!
ptr::copy(buf, buf.offset((cap-tail) as isize), head); ptr::copy(buf, buf.offset((cap - tail) as isize), head);
ptr::copy(buf.offset(tail as isize), buf, cap-tail); ptr::copy(buf.offset(tail as isize), buf, cap - tail);
} }
} else { } else {
// Need to use N swaps to move the ring // Need to use N swaps to move the ring
@ -2576,8 +2563,8 @@ mod tests {
// We should see the correct values in the VecDeque // We should see the correct values in the VecDeque
let expected: VecDeque<_> = (0..drain_start) let expected: VecDeque<_> = (0..drain_start)
.chain(drain_end..len) .chain(drain_end..len)
.collect(); .collect();
assert_eq!(expected, tester); assert_eq!(expected, tester);
} }
} }
@ -2693,19 +2680,19 @@ mod tests {
let cap = (2i32.pow(cap_pwr) - 1) as usize; let cap = (2i32.pow(cap_pwr) - 1) as usize;
// In these cases there is enough free space to solve it with copies // In these cases there is enough free space to solve it with copies
for len in 0..((cap+1)/2) { for len in 0..((cap + 1) / 2) {
// Test contiguous cases // Test contiguous cases
for offset in 0..(cap-len) { for offset in 0..(cap - len) {
create_vec_and_test_convert(cap, offset, len) create_vec_and_test_convert(cap, offset, len)
} }
// Test cases where block at end of buffer is bigger than block at start // Test cases where block at end of buffer is bigger than block at start
for offset in (cap-len)..(cap-(len/2)) { for offset in (cap - len)..(cap - (len / 2)) {
create_vec_and_test_convert(cap, offset, len) create_vec_and_test_convert(cap, offset, len)
} }
// Test cases where block at start of buffer is bigger than block at end // Test cases where block at start of buffer is bigger than block at end
for offset in (cap-(len/2))..cap { for offset in (cap - (len / 2))..cap {
create_vec_and_test_convert(cap, offset, len) create_vec_and_test_convert(cap, offset, len)
} }
} }
@ -2714,19 +2701,19 @@ mod tests {
// the ring will use swapping when: // the ring will use swapping when:
// (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len)) // (cap + 1 - offset) > (cap + 1 - len) && (len - (cap + 1 - offset)) > (cap + 1 - len))
// right block size > free space && left block size > free space // right block size > free space && left block size > free space
for len in ((cap+1)/2)..cap { for len in ((cap + 1) / 2)..cap {
// Test contiguous cases // Test contiguous cases
for offset in 0..(cap-len) { for offset in 0..(cap - len) {
create_vec_and_test_convert(cap, offset, len) create_vec_and_test_convert(cap, offset, len)
} }
// Test cases where block at end of buffer is bigger than block at start // Test cases where block at end of buffer is bigger than block at start
for offset in (cap-len)..(cap-(len/2)) { for offset in (cap - len)..(cap - (len / 2)) {
create_vec_and_test_convert(cap, offset, len) create_vec_and_test_convert(cap, offset, len)
} }
// Test cases where block at start of buffer is bigger than block at end // Test cases where block at start of buffer is bigger than block at end
for offset in (cap-(len/2))..cap { for offset in (cap - (len / 2))..cap {
create_vec_and_test_convert(cap, offset, len) create_vec_and_test_convert(cap, offset, len)
} }
} }

View file

@ -29,7 +29,6 @@
#![feature(test)] #![feature(test)]
#![feature(unboxed_closures)] #![feature(unboxed_closures)]
#![feature(unicode)] #![feature(unicode)]
#![feature(vec_into_iter_as_slice)]
extern crate collections; extern crate collections;
extern crate test; extern crate test;

View file

@ -242,7 +242,7 @@ fn main() {
"atomic_thread_fence.c"]); "atomic_thread_fence.c"]);
} }
if !target.contains("windows") { if !target.contains("redox") && !target.contains("windows") {
sources.extend(&["emutls.c"]); sources.extend(&["emutls.c"]);
} }

View file

@ -393,6 +393,8 @@ pub struct RefCell<T: ?Sized> {
/// An enumeration of values returned from the `state` method on a `RefCell<T>`. /// An enumeration of values returned from the `state` method on a `RefCell<T>`.
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]
#[unstable(feature = "borrow_state", issue = "27733")] #[unstable(feature = "borrow_state", issue = "27733")]
#[rustc_deprecated(since = "1.15.0", reason = "use `try_borrow` instead")]
#[allow(deprecated)]
pub enum BorrowState { pub enum BorrowState {
/// The cell is currently being read, there is at least one active `borrow`. /// The cell is currently being read, there is at least one active `borrow`.
Reading, Reading,
@ -511,6 +513,8 @@ impl<T: ?Sized> RefCell<T> {
/// } /// }
/// ``` /// ```
#[unstable(feature = "borrow_state", issue = "27733")] #[unstable(feature = "borrow_state", issue = "27733")]
#[rustc_deprecated(since = "1.15.0", reason = "use `try_borrow` instead")]
#[allow(deprecated)]
#[inline] #[inline]
pub fn borrow_state(&self) -> BorrowState { pub fn borrow_state(&self) -> BorrowState {
match self.borrow.get() { match self.borrow.get() {
@ -888,9 +892,7 @@ impl<'b, T: ?Sized> Ref<'b, T> {
/// `Ref::clone(...)`. A `Clone` implementation or a method would interfere /// `Ref::clone(...)`. A `Clone` implementation or a method would interfere
/// with the widespread use of `r.borrow().clone()` to clone the contents of /// with the widespread use of `r.borrow().clone()` to clone the contents of
/// a `RefCell`. /// a `RefCell`.
#[unstable(feature = "cell_extras", #[stable(feature = "cell_extras", since = "1.15.0")]
reason = "likely to be moved to a method, pending language changes",
issue = "27746")]
#[inline] #[inline]
pub fn clone(orig: &Ref<'b, T>) -> Ref<'b, T> { pub fn clone(orig: &Ref<'b, T>) -> Ref<'b, T> {
Ref { Ref {

View file

@ -327,9 +327,9 @@ pub trait CharExt {
fn len_utf8(self) -> usize; fn len_utf8(self) -> usize;
#[stable(feature = "core", since = "1.6.0")] #[stable(feature = "core", since = "1.6.0")]
fn len_utf16(self) -> usize; fn len_utf16(self) -> usize;
#[unstable(feature = "unicode", issue = "27784")] #[stable(feature = "unicode_encode_char", since = "1.15.0")]
fn encode_utf8(self, dst: &mut [u8]) -> &mut str; fn encode_utf8(self, dst: &mut [u8]) -> &mut str;
#[unstable(feature = "unicode", issue = "27784")] #[stable(feature = "unicode_encode_char", since = "1.15.0")]
fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16]; fn encode_utf16(self, dst: &mut [u16]) -> &mut [u16];
} }

View file

@ -12,7 +12,7 @@
#![stable(feature = "rust1", since = "1.0.0")] #![stable(feature = "rust1", since = "1.0.0")]
use cell::{UnsafeCell, Cell, RefCell, Ref, RefMut, BorrowState}; use cell::{UnsafeCell, Cell, RefCell, Ref, RefMut};
use marker::PhantomData; use marker::PhantomData;
use mem; use mem;
use num::flt2dec; use num::flt2dec;
@ -1634,13 +1634,13 @@ impl<T: Copy + Debug> Debug for Cell<T> {
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized + Debug> Debug for RefCell<T> { impl<T: ?Sized + Debug> Debug for RefCell<T> {
fn fmt(&self, f: &mut Formatter) -> Result { fn fmt(&self, f: &mut Formatter) -> Result {
match self.borrow_state() { match self.try_borrow() {
BorrowState::Unused | BorrowState::Reading => { Ok(borrow) => {
f.debug_struct("RefCell") f.debug_struct("RefCell")
.field("value", &self.borrow()) .field("value", &borrow)
.finish() .finish()
} }
BorrowState::Writing => { Err(_) => {
f.debug_struct("RefCell") f.debug_struct("RefCell")
.field("value", &"<borrowed>") .field("value", &"<borrowed>")
.finish() .finish()

View file

@ -255,10 +255,44 @@ pub trait BuildHasher {
fn build_hasher(&self) -> Self::Hasher; fn build_hasher(&self) -> Self::Hasher;
} }
/// A structure which implements `BuildHasher` for all `Hasher` types which also /// The `BuildHasherDefault` structure is used in scenarios where one has a
/// implement `Default`. /// type that implements [`Hasher`] and [`Default`], but needs that type to
/// implement [`BuildHasher`].
/// ///
/// This struct is 0-sized and does not need construction. /// This structure is zero-sized and does not need construction.
///
/// # Examples
///
/// Using `BuildHasherDefault` to specify a custom [`BuildHasher`] for
/// [`HashMap`]:
///
/// ```
/// use std::collections::HashMap;
/// use std::hash::{BuildHasherDefault, Hasher};
///
/// #[derive(Default)]
/// struct MyHasher;
///
/// impl Hasher for MyHasher {
/// fn write(&mut self, bytes: &[u8]) {
/// // Your hashing algorithm goes here!
/// unimplemented!()
/// }
///
/// fn finish(&self) -> u64 {
/// // Your hashing algorithm goes here!
/// unimplemented!()
/// }
/// }
///
/// type MyBuildHasher = BuildHasherDefault<MyHasher>;
///
/// let hash_map = HashMap::<u32, u32, MyBuildHasher>::default();
/// ```
///
/// [`BuildHasher`]: trait.BuildHasher.html
/// [`Default`]: ../default/trait.Default.html
/// [`Hasher`]: trait.Hasher.html
#[stable(since = "1.7.0", feature = "build_hasher")] #[stable(since = "1.7.0", feature = "build_hasher")]
pub struct BuildHasherDefault<H>(marker::PhantomData<H>); pub struct BuildHasherDefault<H>(marker::PhantomData<H>);

View file

@ -1696,12 +1696,11 @@ pub trait Iterator {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(iter_max_by)]
/// let a = [-3_i32, 0, 1, 5, -10]; /// let a = [-3_i32, 0, 1, 5, -10];
/// assert_eq!(*a.iter().max_by(|x, y| x.cmp(y)).unwrap(), 5); /// assert_eq!(*a.iter().max_by(|x, y| x.cmp(y)).unwrap(), 5);
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "iter_max_by", issue="36105")] #[stable(feature = "iter_max_by", since = "1.15.0")]
fn max_by<F>(self, mut compare: F) -> Option<Self::Item> fn max_by<F>(self, mut compare: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering, where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,
{ {
@ -1746,12 +1745,11 @@ pub trait Iterator {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(iter_min_by)]
/// let a = [-3_i32, 0, 1, 5, -10]; /// let a = [-3_i32, 0, 1, 5, -10];
/// assert_eq!(*a.iter().min_by(|x, y| x.cmp(y)).unwrap(), -10); /// assert_eq!(*a.iter().min_by(|x, y| x.cmp(y)).unwrap(), -10);
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "iter_min_by", issue="36105")] #[stable(feature = "iter_min_by", since = "1.15.0")]
fn min_by<F>(self, mut compare: F) -> Option<Self::Item> fn min_by<F>(self, mut compare: F) -> Option<Self::Item>
where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering, where Self: Sized, F: FnMut(&Self::Item, &Self::Item) -> Ordering,
{ {

View file

@ -117,6 +117,8 @@ pub unsafe fn replace<T>(dest: *mut T, mut src: T) -> T {
/// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use /// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
/// because it will attempt to drop the value previously at `*src`. /// because it will attempt to drop the value previously at `*src`.
/// ///
/// The pointer must be aligned; use `read_unaligned` if that is not the case.
///
/// # Examples /// # Examples
/// ///
/// Basic usage: /// Basic usage:
@ -137,6 +139,44 @@ pub unsafe fn read<T>(src: *const T) -> T {
tmp tmp
} }
/// Reads the value from `src` without moving it. This leaves the
/// memory in `src` unchanged.
///
/// Unlike `read`, the pointer may be unaligned.
///
/// # Safety
///
/// Beyond accepting a raw pointer, this is unsafe because it semantically
/// moves the value out of `src` without preventing further usage of `src`.
/// If `T` is not `Copy`, then care must be taken to ensure that the value at
/// `src` is not used before the data is overwritten again (e.g. with `write`,
/// `zero_memory`, or `copy_memory`). Note that `*src = foo` counts as a use
/// because it will attempt to drop the value previously at `*src`.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// #![feature(ptr_unaligned)]
///
/// let x = 12;
/// let y = &x as *const i32;
///
/// unsafe {
/// assert_eq!(std::ptr::read_unaligned(y), 12);
/// }
/// ```
#[inline(always)]
#[unstable(feature = "ptr_unaligned", issue = "37955")]
pub unsafe fn read_unaligned<T>(src: *const T) -> T {
let mut tmp: T = mem::uninitialized();
copy_nonoverlapping(src as *const u8,
&mut tmp as *mut T as *mut u8,
mem::size_of::<T>());
tmp
}
/// Overwrites a memory location with the given value without reading or /// Overwrites a memory location with the given value without reading or
/// dropping the old value. /// dropping the old value.
/// ///
@ -151,6 +191,8 @@ pub unsafe fn read<T>(src: *const T) -> T {
/// This is appropriate for initializing uninitialized memory, or overwriting /// This is appropriate for initializing uninitialized memory, or overwriting
/// memory that has previously been `read` from. /// memory that has previously been `read` from.
/// ///
/// The pointer must be aligned; use `write_unaligned` if that is not the case.
///
/// # Examples /// # Examples
/// ///
/// Basic usage: /// Basic usage:
@ -171,6 +213,47 @@ pub unsafe fn write<T>(dst: *mut T, src: T) {
intrinsics::move_val_init(&mut *dst, src) intrinsics::move_val_init(&mut *dst, src)
} }
/// Overwrites a memory location with the given value without reading or
/// dropping the old value.
///
/// Unlike `write`, the pointer may be unaligned.
///
/// # Safety
///
/// This operation is marked unsafe because it accepts a raw pointer.
///
/// It does not drop the contents of `dst`. This is safe, but it could leak
/// allocations or resources, so care must be taken not to overwrite an object
/// that should be dropped.
///
/// This is appropriate for initializing uninitialized memory, or overwriting
/// memory that has previously been `read` from.
///
/// # Examples
///
/// Basic usage:
///
/// ```
/// #![feature(ptr_unaligned)]
///
/// let mut x = 0;
/// let y = &mut x as *mut i32;
/// let z = 12;
///
/// unsafe {
/// std::ptr::write_unaligned(y, z);
/// assert_eq!(std::ptr::read_unaligned(y), 12);
/// }
/// ```
#[inline]
#[unstable(feature = "ptr_unaligned", issue = "37955")]
pub unsafe fn write_unaligned<T>(dst: *mut T, src: T) {
copy_nonoverlapping(&src as *const T as *const u8,
dst as *mut u8,
mem::size_of::<T>());
mem::forget(src);
}
/// Performs a volatile read of the value from `src` without moving it. This /// Performs a volatile read of the value from `src` without moving it. This
/// leaves the memory in `src` unchanged. /// leaves the memory in `src` unchanged.
/// ///

View file

@ -501,6 +501,8 @@ impl<T, E> Result<T, E> {
/// Returns an iterator over the possibly contained value. /// Returns an iterator over the possibly contained value.
/// ///
/// The iterator yields one value if the result is [`Ok`], otherwise none.
///
/// # Examples /// # Examples
/// ///
/// Basic usage: /// Basic usage:
@ -512,6 +514,8 @@ impl<T, E> Result<T, E> {
/// let x: Result<u32, &str> = Err("nothing!"); /// let x: Result<u32, &str> = Err("nothing!");
/// assert_eq!(x.iter().next(), None); /// assert_eq!(x.iter().next(), None);
/// ``` /// ```
///
/// [`Ok`]: enum.Result.html#variant.Ok
#[inline] #[inline]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn iter(&self) -> Iter<T> { pub fn iter(&self) -> Iter<T> {
@ -520,6 +524,8 @@ impl<T, E> Result<T, E> {
/// Returns a mutable iterator over the possibly contained value. /// Returns a mutable iterator over the possibly contained value.
/// ///
/// The iterator yields one value if the result is [`Ok`], otherwise none.
///
/// # Examples /// # Examples
/// ///
/// Basic usage: /// Basic usage:
@ -535,6 +541,8 @@ impl<T, E> Result<T, E> {
/// let mut x: Result<u32, &str> = Err("nothing!"); /// let mut x: Result<u32, &str> = Err("nothing!");
/// assert_eq!(x.iter_mut().next(), None); /// assert_eq!(x.iter_mut().next(), None);
/// ``` /// ```
///
/// [`Ok`]: enum.Result.html#variant.Ok
#[inline] #[inline]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub fn iter_mut(&mut self) -> IterMut<T> { pub fn iter_mut(&mut self) -> IterMut<T> {
@ -848,6 +856,8 @@ impl<T, E> IntoIterator for Result<T, E> {
/// Returns a consuming iterator over the possibly contained value. /// Returns a consuming iterator over the possibly contained value.
/// ///
/// The iterator yields one value if the result is [`Ok`], otherwise none.
///
/// # Examples /// # Examples
/// ///
/// Basic usage: /// Basic usage:
@ -861,6 +871,8 @@ impl<T, E> IntoIterator for Result<T, E> {
/// let v: Vec<u32> = x.into_iter().collect(); /// let v: Vec<u32> = x.into_iter().collect();
/// assert_eq!(v, []); /// assert_eq!(v, []);
/// ``` /// ```
///
/// [`Ok`]: enum.Result.html#variant.Ok
#[inline] #[inline]
fn into_iter(self) -> IntoIter<T> { fn into_iter(self) -> IntoIter<T> {
IntoIter { inner: self.ok() } IntoIter { inner: self.ok() }
@ -893,8 +905,13 @@ impl<'a, T, E> IntoIterator for &'a mut Result<T, E> {
/// An iterator over a reference to the [`Ok`] variant of a [`Result`]. /// An iterator over a reference to the [`Ok`] variant of a [`Result`].
/// ///
/// The iterator yields one value if the result is [`Ok`], otherwise none.
///
/// Created by [`Result::iter`].
///
/// [`Ok`]: enum.Result.html#variant.Ok /// [`Ok`]: enum.Result.html#variant.Ok
/// [`Result`]: enum.Result.html /// [`Result`]: enum.Result.html
/// [`Result::iter`]: enum.Result.html#method.iter
#[derive(Debug)] #[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> { inner: Option<&'a T> } pub struct Iter<'a, T: 'a> { inner: Option<&'a T> }
@ -934,8 +951,11 @@ impl<'a, T> Clone for Iter<'a, T> {
/// An iterator over a mutable reference to the [`Ok`] variant of a [`Result`]. /// An iterator over a mutable reference to the [`Ok`] variant of a [`Result`].
/// ///
/// Created by [`Result::iter_mut`].
///
/// [`Ok`]: enum.Result.html#variant.Ok /// [`Ok`]: enum.Result.html#variant.Ok
/// [`Result`]: enum.Result.html /// [`Result`]: enum.Result.html
/// [`Result::iter_mut`]: enum.Result.html#method.iter_mut
#[derive(Debug)] #[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")] #[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, T: 'a> { inner: Option<&'a mut T> } pub struct IterMut<'a, T: 'a> { inner: Option<&'a mut T> }
@ -968,9 +988,12 @@ impl<'a, T> FusedIterator for IterMut<'a, T> {}
#[unstable(feature = "trusted_len", issue = "37572")] #[unstable(feature = "trusted_len", issue = "37572")]
unsafe impl<'a, A> TrustedLen for IterMut<'a, A> {} unsafe impl<'a, A> TrustedLen for IterMut<'a, A> {}
/// An iterator over the value in a [`Ok`] variant of a [`Result`]. This struct is /// An iterator over the value in a [`Ok`] variant of a [`Result`].
/// created by the [`into_iter`] method on [`Result`][`Result`] (provided by ///
/// the [`IntoIterator`] trait). /// The iterator yields one value if the result is [`Ok`], otherwise none.
///
/// This struct is created by the [`into_iter`] method on
/// [`Result`][`Result`] (provided by the [`IntoIterator`] trait).
/// ///
/// [`Ok`]: enum.Result.html#variant.Ok /// [`Ok`]: enum.Result.html#variant.Ok
/// [`Result`]: enum.Result.html /// [`Result`]: enum.Result.html

View file

@ -33,6 +33,7 @@
// * The `raw` and `bytes` submodules. // * The `raw` and `bytes` submodules.
// * Boilerplate trait implementations. // * Boilerplate trait implementations.
use borrow::Borrow;
use cmp::Ordering::{self, Less, Equal, Greater}; use cmp::Ordering::{self, Less, Equal, Greater};
use cmp; use cmp;
use fmt; use fmt;
@ -100,15 +101,17 @@ pub trait SliceExt {
#[stable(feature = "core", since = "1.6.0")] #[stable(feature = "core", since = "1.6.0")]
fn as_ptr(&self) -> *const Self::Item; fn as_ptr(&self) -> *const Self::Item;
#[stable(feature = "core", since = "1.6.0")] #[stable(feature = "core", since = "1.6.0")]
fn binary_search(&self, x: &Self::Item) -> Result<usize, usize> fn binary_search<Q: ?Sized>(&self, x: &Q) -> Result<usize, usize>
where Self::Item: Ord; where Self::Item: Borrow<Q>,
Q: Ord;
#[stable(feature = "core", since = "1.6.0")] #[stable(feature = "core", since = "1.6.0")]
fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize> fn binary_search_by<'a, F>(&'a self, f: F) -> Result<usize, usize>
where F: FnMut(&'a Self::Item) -> Ordering; where F: FnMut(&'a Self::Item) -> Ordering;
#[stable(feature = "slice_binary_search_by_key", since = "1.10.0")] #[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
fn binary_search_by_key<'a, B, F>(&'a self, b: &B, f: F) -> Result<usize, usize> fn binary_search_by_key<'a, B, F, Q: ?Sized>(&'a self, b: &Q, f: F) -> Result<usize, usize>
where F: FnMut(&'a Self::Item) -> B, where F: FnMut(&'a Self::Item) -> B,
B: Ord; B: Borrow<Q>,
Q: Ord;
#[stable(feature = "core", since = "1.6.0")] #[stable(feature = "core", since = "1.6.0")]
fn len(&self) -> usize; fn len(&self) -> usize;
#[stable(feature = "core", since = "1.6.0")] #[stable(feature = "core", since = "1.6.0")]
@ -493,8 +496,8 @@ impl<T> SliceExt for [T] {
m >= n && needle == &self[m-n..] m >= n && needle == &self[m-n..]
} }
fn binary_search(&self, x: &T) -> Result<usize, usize> where T: Ord { fn binary_search<Q: ?Sized>(&self, x: &Q) -> Result<usize, usize> where T: Borrow<Q>, Q: Ord {
self.binary_search_by(|p| p.cmp(x)) self.binary_search_by(|p| p.borrow().cmp(x))
} }
#[inline] #[inline]
@ -522,11 +525,12 @@ impl<T> SliceExt for [T] {
} }
#[inline] #[inline]
fn binary_search_by_key<'a, B, F>(&'a self, b: &B, mut f: F) -> Result<usize, usize> fn binary_search_by_key<'a, B, F, Q: ?Sized>(&'a self, b: &Q, mut f: F) -> Result<usize, usize>
where F: FnMut(&'a Self::Item) -> B, where F: FnMut(&'a Self::Item) -> B,
B: Ord B: Borrow<Q>,
Q: Ord
{ {
self.binary_search_by(|k| f(k).cmp(b)) self.binary_search_by(|k| f(k).borrow().cmp(b))
} }
} }

View file

@ -203,7 +203,6 @@ impl AtomicBool {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(atomic_access)]
/// use std::sync::atomic::{AtomicBool, Ordering}; /// use std::sync::atomic::{AtomicBool, Ordering};
/// ///
/// let mut some_bool = AtomicBool::new(true); /// let mut some_bool = AtomicBool::new(true);
@ -212,7 +211,7 @@ impl AtomicBool {
/// assert_eq!(some_bool.load(Ordering::SeqCst), false); /// assert_eq!(some_bool.load(Ordering::SeqCst), false);
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "atomic_access", issue = "35603")] #[stable(feature = "atomic_access", since = "1.15.0")]
pub fn get_mut(&mut self) -> &mut bool { pub fn get_mut(&mut self) -> &mut bool {
unsafe { &mut *(self.v.get() as *mut bool) } unsafe { &mut *(self.v.get() as *mut bool) }
} }
@ -225,14 +224,13 @@ impl AtomicBool {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(atomic_access)]
/// use std::sync::atomic::AtomicBool; /// use std::sync::atomic::AtomicBool;
/// ///
/// let some_bool = AtomicBool::new(true); /// let some_bool = AtomicBool::new(true);
/// assert_eq!(some_bool.into_inner(), true); /// assert_eq!(some_bool.into_inner(), true);
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "atomic_access", issue = "35603")] #[stable(feature = "atomic_access", since = "1.15.0")]
pub fn into_inner(self) -> bool { pub fn into_inner(self) -> bool {
unsafe { self.v.into_inner() != 0 } unsafe { self.v.into_inner() != 0 }
} }
@ -588,7 +586,6 @@ impl<T> AtomicPtr<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(atomic_access)]
/// use std::sync::atomic::{AtomicPtr, Ordering}; /// use std::sync::atomic::{AtomicPtr, Ordering};
/// ///
/// let mut atomic_ptr = AtomicPtr::new(&mut 10); /// let mut atomic_ptr = AtomicPtr::new(&mut 10);
@ -596,7 +593,7 @@ impl<T> AtomicPtr<T> {
/// assert_eq!(unsafe { *atomic_ptr.load(Ordering::SeqCst) }, 5); /// assert_eq!(unsafe { *atomic_ptr.load(Ordering::SeqCst) }, 5);
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "atomic_access", issue = "35603")] #[stable(feature = "atomic_access", since = "1.15.0")]
pub fn get_mut(&mut self) -> &mut *mut T { pub fn get_mut(&mut self) -> &mut *mut T {
unsafe { &mut *self.p.get() } unsafe { &mut *self.p.get() }
} }
@ -609,14 +606,13 @@ impl<T> AtomicPtr<T> {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(atomic_access)]
/// use std::sync::atomic::AtomicPtr; /// use std::sync::atomic::AtomicPtr;
/// ///
/// let atomic_ptr = AtomicPtr::new(&mut 5); /// let atomic_ptr = AtomicPtr::new(&mut 5);
/// assert_eq!(unsafe { *atomic_ptr.into_inner() }, 5); /// assert_eq!(unsafe { *atomic_ptr.into_inner() }, 5);
/// ``` /// ```
#[inline] #[inline]
#[unstable(feature = "atomic_access", issue = "35603")] #[stable(feature = "atomic_access", since = "1.15.0")]
pub fn into_inner(self) -> *mut T { pub fn into_inner(self) -> *mut T {
unsafe { self.p.into_inner() } unsafe { self.p.into_inner() }
} }
@ -883,7 +879,6 @@ macro_rules! atomic_int {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(atomic_access)]
/// use std::sync::atomic::{AtomicIsize, Ordering}; /// use std::sync::atomic::{AtomicIsize, Ordering};
/// ///
/// let mut some_isize = AtomicIsize::new(10); /// let mut some_isize = AtomicIsize::new(10);
@ -905,7 +900,6 @@ macro_rules! atomic_int {
/// # Examples /// # Examples
/// ///
/// ``` /// ```
/// #![feature(atomic_access)]
/// use std::sync::atomic::AtomicIsize; /// use std::sync::atomic::AtomicIsize;
/// ///
/// let some_isize = AtomicIsize::new(5); /// let some_isize = AtomicIsize::new(5);
@ -1261,7 +1255,7 @@ atomic_int!{
stable(feature = "rust1", since = "1.0.0"), stable(feature = "rust1", since = "1.0.0"),
stable(feature = "extended_compare_and_swap", since = "1.10.0"), stable(feature = "extended_compare_and_swap", since = "1.10.0"),
stable(feature = "atomic_debug", since = "1.3.0"), stable(feature = "atomic_debug", since = "1.3.0"),
unstable(feature = "atomic_access", issue = "35603"), stable(feature = "atomic_access", since = "1.15.0"),
isize AtomicIsize ATOMIC_ISIZE_INIT isize AtomicIsize ATOMIC_ISIZE_INIT
} }
#[cfg(target_has_atomic = "ptr")] #[cfg(target_has_atomic = "ptr")]
@ -1269,7 +1263,7 @@ atomic_int!{
stable(feature = "rust1", since = "1.0.0"), stable(feature = "rust1", since = "1.0.0"),
stable(feature = "extended_compare_and_swap", since = "1.10.0"), stable(feature = "extended_compare_and_swap", since = "1.10.0"),
stable(feature = "atomic_debug", since = "1.3.0"), stable(feature = "atomic_debug", since = "1.3.0"),
unstable(feature = "atomic_access", issue = "35603"), stable(feature = "atomic_access", since = "1.15.0"),
usize AtomicUsize ATOMIC_USIZE_INIT usize AtomicUsize ATOMIC_USIZE_INIT
} }

View file

@ -59,22 +59,22 @@ fn double_imm_borrow() {
fn no_mut_then_imm_borrow() { fn no_mut_then_imm_borrow() {
let x = RefCell::new(0); let x = RefCell::new(0);
let _b1 = x.borrow_mut(); let _b1 = x.borrow_mut();
assert_eq!(x.borrow_state(), BorrowState::Writing); assert!(x.try_borrow().is_err());
} }
#[test] #[test]
fn no_imm_then_borrow_mut() { fn no_imm_then_borrow_mut() {
let x = RefCell::new(0); let x = RefCell::new(0);
let _b1 = x.borrow(); let _b1 = x.borrow();
assert_eq!(x.borrow_state(), BorrowState::Reading); assert!(x.try_borrow_mut().is_err());
} }
#[test] #[test]
fn no_double_borrow_mut() { fn no_double_borrow_mut() {
let x = RefCell::new(0); let x = RefCell::new(0);
assert_eq!(x.borrow_state(), BorrowState::Unused); assert!(x.try_borrow().is_ok());
let _b1 = x.borrow_mut(); let _b1 = x.borrow_mut();
assert_eq!(x.borrow_state(), BorrowState::Writing); assert!(x.try_borrow().is_err());
} }
#[test] #[test]
@ -102,7 +102,8 @@ fn double_borrow_single_release_no_borrow_mut() {
{ {
let _b2 = x.borrow(); let _b2 = x.borrow();
} }
assert_eq!(x.borrow_state(), BorrowState::Reading); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_err());
} }
#[test] #[test]
@ -119,14 +120,18 @@ fn ref_clone_updates_flag() {
let x = RefCell::new(0); let x = RefCell::new(0);
{ {
let b1 = x.borrow(); let b1 = x.borrow();
assert_eq!(x.borrow_state(), BorrowState::Reading); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_err());
{ {
let _b2 = Ref::clone(&b1); let _b2 = Ref::clone(&b1);
assert_eq!(x.borrow_state(), BorrowState::Reading); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_err());
} }
assert_eq!(x.borrow_state(), BorrowState::Reading); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_err());
} }
assert_eq!(x.borrow_state(), BorrowState::Unused); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_ok());
} }
#[test] #[test]
@ -134,15 +139,19 @@ fn ref_map_does_not_update_flag() {
let x = RefCell::new(Some(5)); let x = RefCell::new(Some(5));
{ {
let b1: Ref<Option<u32>> = x.borrow(); let b1: Ref<Option<u32>> = x.borrow();
assert_eq!(x.borrow_state(), BorrowState::Reading); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_err());
{ {
let b2: Ref<u32> = Ref::map(b1, |o| o.as_ref().unwrap()); let b2: Ref<u32> = Ref::map(b1, |o| o.as_ref().unwrap());
assert_eq!(*b2, 5); assert_eq!(*b2, 5);
assert_eq!(x.borrow_state(), BorrowState::Reading); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_err());
} }
assert_eq!(x.borrow_state(), BorrowState::Unused); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_ok());
} }
assert_eq!(x.borrow_state(), BorrowState::Unused); assert!(x.try_borrow().is_ok());
assert!(x.try_borrow_mut().is_ok());
} }
#[test] #[test]
@ -247,5 +256,3 @@ fn refcell_ref_coercion() {
assert_eq!(&*coerced, comp); assert_eq!(&*coerced, comp);
} }
} }

View file

@ -10,9 +10,7 @@
#![deny(warnings)] #![deny(warnings)]
#![feature(borrow_state)]
#![feature(box_syntax)] #![feature(box_syntax)]
#![feature(cell_extras)]
#![feature(char_escape_debug)] #![feature(char_escape_debug)]
#![feature(const_fn)] #![feature(const_fn)]
#![feature(core_private_bignum)] #![feature(core_private_bignum)]
@ -32,10 +30,9 @@
#![feature(try_from)] #![feature(try_from)]
#![feature(unicode)] #![feature(unicode)]
#![feature(unique)] #![feature(unique)]
#![feature(iter_max_by)]
#![feature(iter_min_by)]
#![feature(ordering_chaining)] #![feature(ordering_chaining)]
#![feature(result_unwrap_or_default)] #![feature(result_unwrap_or_default)]
#![feature(ptr_unaligned)]
extern crate core; extern crate core;
extern crate test; extern crate test;

View file

@ -9,6 +9,7 @@
// except according to those terms. // except according to those terms.
use core::ptr::*; use core::ptr::*;
use core::cell::RefCell;
#[test] #[test]
fn test() { fn test() {
@ -189,3 +190,25 @@ pub fn test_variadic_fnptr() {
let mut s = SipHasher::new(); let mut s = SipHasher::new();
assert_eq!(p.hash(&mut s), q.hash(&mut s)); assert_eq!(p.hash(&mut s), q.hash(&mut s));
} }
#[test]
fn write_unaligned_drop() {
thread_local! {
static DROPS: RefCell<Vec<u32>> = RefCell::new(Vec::new());
}
struct Dropper(u32);
impl Drop for Dropper {
fn drop(&mut self) {
DROPS.with(|d| d.borrow_mut().push(self.0));
}
}
{
let c = Dropper(0);
let mut t = Dropper(1);
unsafe { write_unaligned(&mut t, c); }
}
DROPS.with(|d| assert_eq!(*d.borrow(), [0]));
}

@ -1 +1 @@
Subproject commit 0ac39c5ccf6a04395b7c40dd62321cb91f63f160 Subproject commit e49e9bb7c3d9c7f2fd893f0ee0db81617b8db21f

View file

@ -28,7 +28,7 @@
#![panic_runtime] #![panic_runtime]
#![feature(panic_runtime)] #![feature(panic_runtime)]
#![cfg_attr(unix, feature(libc))] #![cfg_attr(unix, feature(libc))]
#![cfg_attr(windows, feature(core_intrinsics))] #![cfg_attr(any(target_os = "redox", windows), feature(core_intrinsics))]
// Rust's "try" function, but if we're aborting on panics we just call the // Rust's "try" function, but if we're aborting on panics we just call the
// function as there's nothing else we need to do here. // function as there's nothing else we need to do here.
@ -61,7 +61,7 @@ pub unsafe extern fn __rust_start_panic(_data: usize, _vtable: usize) -> u32 {
libc::abort(); libc::abort();
} }
#[cfg(windows)] #[cfg(any(target_os = "redox", windows))]
unsafe fn abort() -> ! { unsafe fn abort() -> ! {
core::intrinsics::abort(); core::intrinsics::abort();
} }

View file

@ -69,6 +69,7 @@ mod imp;
// i686-pc-windows-gnu and all others // i686-pc-windows-gnu and all others
#[cfg(any(all(unix, not(target_os = "emscripten")), #[cfg(any(all(unix, not(target_os = "emscripten")),
target_os = "redox",
all(windows, target_arch = "x86", target_env = "gnu")))] all(windows, target_arch = "x86", target_env = "gnu")))]
#[path = "gcc.rs"] #[path = "gcc.rs"]
mod imp; mod imp;

View file

@ -418,7 +418,7 @@ to see something like:
Hir(foo) -> Collect(bar) Hir(foo) -> Collect(bar)
Collect(bar) -> TypeckItemBody(bar) Collect(bar) -> TypeckItemBody(bar)
That first edge looks suspicious to you. So you set That first edge looks suspicious to you. So you set
`RUST_FORBID_DEP_GRAPH_EDGE` to `Hir&foo -> Collect&bar`, re-run, and `RUST_FORBID_DEP_GRAPH_EDGE` to `Hir&foo -> Collect&bar`, re-run, and
then observe the backtrace. Voila, bug fixed! then observe the backtrace. Voila, bug fixed!
@ -440,6 +440,4 @@ To achieve this, the HIR map will detect if the def-id originates in
an inlined node and add a dependency to a suitable `MetaData` node an inlined node and add a dependency to a suitable `MetaData` node
instead. If you are reading a HIR node and are not sure if it may be instead. If you are reading a HIR node and are not sure if it may be
inlined or not, you can use `tcx.map.read(node_id)` and it will detect inlined or not, you can use `tcx.map.read(node_id)` and it will detect
whether the node is inlined or not and do the right thing. You can whether the node is inlined or not and do the right thing.
also use `tcx.map.is_inlined_def_id()` and
`tcx.map.is_inlined_node_id()` to test.

View file

@ -27,7 +27,7 @@
//! created. See `./README.md` for details. //! created. See `./README.md` for details.
use hir::def_id::DefId; use hir::def_id::DefId;
use std::cell::{BorrowState, RefCell}; use std::cell::RefCell;
use std::env; use std::env;
use super::DepNode; use super::DepNode;
@ -71,15 +71,11 @@ impl ShadowGraph {
pub fn enqueue(&self, message: &DepMessage) { pub fn enqueue(&self, message: &DepMessage) {
if ENABLED { if ENABLED {
match self.stack.borrow_state() { if self.stack.try_borrow().is_err() {
BorrowState::Unused => {} // When we apply edge filters, that invokes the Debug trait on
_ => { // DefIds, which in turn reads from various bits of state and
// When we apply edge filters, that invokes the // creates reads! Ignore those recursive reads.
// Debug trait on DefIds, which in turn reads from return;
// various bits of state and creates reads! Ignore
// those recursive reads.
return;
}
} }
let mut stack = self.stack.borrow_mut(); let mut stack = self.stack.borrow_mut();

View file

@ -40,7 +40,6 @@ pub fn visit_all_item_likes_in_krate<'a, 'tcx, V, F>(tcx: TyCtxt<'a, 'tcx, 'tcx>
let task_id = (self.dep_node_fn)(item_def_id); let task_id = (self.dep_node_fn)(item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id.clone()); let _task = self.tcx.dep_graph.in_task(task_id.clone());
debug!("Started task {:?}", task_id); debug!("Started task {:?}", task_id);
assert!(!self.tcx.map.is_inlined_def_id(item_def_id));
self.tcx.dep_graph.read(DepNode::Hir(item_def_id)); self.tcx.dep_graph.read(DepNode::Hir(item_def_id));
self.visitor.visit_item(i); self.visitor.visit_item(i);
debug!("Ended task {:?}", task_id); debug!("Ended task {:?}", task_id);
@ -51,7 +50,6 @@ pub fn visit_all_item_likes_in_krate<'a, 'tcx, V, F>(tcx: TyCtxt<'a, 'tcx, 'tcx>
let task_id = (self.dep_node_fn)(impl_item_def_id); let task_id = (self.dep_node_fn)(impl_item_def_id);
let _task = self.tcx.dep_graph.in_task(task_id.clone()); let _task = self.tcx.dep_graph.in_task(task_id.clone());
debug!("Started task {:?}", task_id); debug!("Started task {:?}", task_id);
assert!(!self.tcx.map.is_inlined_def_id(impl_item_def_id));
self.tcx.dep_graph.read(DepNode::Hir(impl_item_def_id)); self.tcx.dep_graph.read(DepNode::Hir(impl_item_def_id));
self.visitor.visit_impl_item(i); self.visitor.visit_impl_item(i);
debug!("Ended task {:?}", task_id); debug!("Ended task {:?}", task_id);

View file

@ -1236,6 +1236,23 @@ struct Foo<'a, T: 'a> {
foo: &'a T foo: &'a T
} }
``` ```
To see why this is important, consider the case where `T` is itself a reference
(e.g., `T = &str`). If we don't include the restriction that `T: 'a`, the
following code would be perfectly legal:
```compile_fail,E0309
struct Foo<'a, T> {
foo: &'a T
}
fn main() {
let v = "42".to_string();
let f = Foo{foo: &v};
drop(v);
println!("{}", f.foo); // but we've already dropped v!
}
```
"##, "##,
E0310: r##" E0310: r##"

View file

@ -120,9 +120,7 @@ impl fmt::Debug for DefId {
ty::tls::with_opt(|opt_tcx| { ty::tls::with_opt(|opt_tcx| {
if let Some(tcx) = opt_tcx { if let Some(tcx) = opt_tcx {
if let Some(def_path) = tcx.opt_def_path(*self) { write!(f, " => {}", tcx.def_path(*self).to_string(tcx))?;
write!(f, " => {}", def_path.to_string(tcx))?;
}
} }
Ok(()) Ok(())
})?; })?;

View file

@ -365,7 +365,6 @@ pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
pub fn walk_macro_def<'v, V: Visitor<'v>>(visitor: &mut V, macro_def: &'v MacroDef) { pub fn walk_macro_def<'v, V: Visitor<'v>>(visitor: &mut V, macro_def: &'v MacroDef) {
visitor.visit_id(macro_def.id); visitor.visit_id(macro_def.id);
visitor.visit_name(macro_def.span, macro_def.name); visitor.visit_name(macro_def.span, macro_def.name);
walk_opt_name(visitor, macro_def.span, macro_def.imported_from);
walk_list!(visitor, visit_attribute, &macro_def.attrs); walk_list!(visitor, visit_attribute, &macro_def.attrs);
} }

View file

@ -81,7 +81,7 @@ pub struct LoweringContext<'a> {
} }
pub trait Resolver { pub trait Resolver {
// Resolve a global hir path generated by the lowerer when expanding `for`, `if let`, etc. // Resolve a hir path generated by the lowerer when expanding `for`, `if let`, etc.
fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool); fn resolve_hir_path(&mut self, path: &mut hir::Path, is_value: bool);
// Obtain the resolution for a node id // Obtain the resolution for a node id
@ -337,7 +337,6 @@ impl<'a> LoweringContext<'a> {
let proj_start = p.segments.len() - resolution.depth; let proj_start = p.segments.len() - resolution.depth;
let path = P(hir::Path { let path = P(hir::Path {
global: p.global,
def: resolution.base_def, def: resolution.base_def,
segments: p.segments[..proj_start].iter().enumerate().map(|(i, segment)| { segments: p.segments[..proj_start].iter().enumerate().map(|(i, segment)| {
let param_mode = match (qself_position, param_mode) { let param_mode = match (qself_position, param_mode) {
@ -404,12 +403,17 @@ impl<'a> LoweringContext<'a> {
id: NodeId, id: NodeId,
p: &Path, p: &Path,
name: Option<Name>, name: Option<Name>,
param_mode: ParamMode) param_mode: ParamMode,
defaults_to_global: bool)
-> hir::Path { -> hir::Path {
let mut segments = p.segments.iter();
if defaults_to_global && p.is_global() {
segments.next();
}
hir::Path { hir::Path {
global: p.global,
def: self.expect_full_def(id), def: self.expect_full_def(id),
segments: p.segments.iter().map(|segment| { segments: segments.map(|segment| {
self.lower_path_segment(segment, param_mode) self.lower_path_segment(segment, param_mode)
}).chain(name.map(|name| { }).chain(name.map(|name| {
hir::PathSegment { hir::PathSegment {
@ -424,22 +428,29 @@ impl<'a> LoweringContext<'a> {
fn lower_path(&mut self, fn lower_path(&mut self,
id: NodeId, id: NodeId,
p: &Path, p: &Path,
param_mode: ParamMode) param_mode: ParamMode,
defaults_to_global: bool)
-> hir::Path { -> hir::Path {
self.lower_path_extra(id, p, None, param_mode) self.lower_path_extra(id, p, None, param_mode, defaults_to_global)
} }
fn lower_path_segment(&mut self, fn lower_path_segment(&mut self,
segment: &PathSegment, segment: &PathSegment,
param_mode: ParamMode) param_mode: ParamMode)
-> hir::PathSegment { -> hir::PathSegment {
let parameters = match segment.parameters { let parameters = if let Some(ref parameters) = segment.parameters {
PathParameters::AngleBracketed(ref data) => { match **parameters {
let data = self.lower_angle_bracketed_parameter_data(data, param_mode); PathParameters::AngleBracketed(ref data) => {
hir::AngleBracketedParameters(data) let data = self.lower_angle_bracketed_parameter_data(data, param_mode);
hir::AngleBracketedParameters(data)
}
PathParameters::Parenthesized(ref data) => {
hir::ParenthesizedParameters(self.lower_parenthesized_parameter_data(data))
}
} }
PathParameters::Parenthesized(ref data) => } else {
hir::ParenthesizedParameters(self.lower_parenthesized_parameter_data(data)), let data = self.lower_angle_bracketed_parameter_data(&Default::default(), param_mode);
hir::AngleBracketedParameters(data)
}; };
hir::PathSegment { hir::PathSegment {
@ -596,8 +607,8 @@ impl<'a> LoweringContext<'a> {
// Check if the where clause type is a plain type parameter. // Check if the where clause type is a plain type parameter.
match bound_pred.bounded_ty.node { match bound_pred.bounded_ty.node {
TyKind::Path(None, ref path) TyKind::Path(None, ref path)
if !path.global && path.segments.len() == 1 && if path.segments.len() == 1 &&
bound_pred.bound_lifetimes.is_empty() => { bound_pred.bound_lifetimes.is_empty() => {
if let Some(Def::TyParam(def_id)) = if let Some(Def::TyParam(def_id)) =
self.resolver.get_resolution(bound_pred.bounded_ty.id) self.resolver.get_resolution(bound_pred.bounded_ty.id)
.map(|d| d.base_def) { .map(|d| d.base_def) {
@ -671,7 +682,7 @@ impl<'a> LoweringContext<'a> {
span}) => { span}) => {
hir::WherePredicate::EqPredicate(hir::WhereEqPredicate { hir::WherePredicate::EqPredicate(hir::WhereEqPredicate {
id: id, id: id,
path: self.lower_path(id, path, ParamMode::Explicit), path: self.lower_path(id, path, ParamMode::Explicit, false),
ty: self.lower_ty(ty), ty: self.lower_ty(ty),
span: span, span: span,
}) })
@ -701,7 +712,7 @@ impl<'a> LoweringContext<'a> {
fn lower_trait_ref(&mut self, p: &TraitRef) -> hir::TraitRef { fn lower_trait_ref(&mut self, p: &TraitRef) -> hir::TraitRef {
hir::TraitRef { hir::TraitRef {
path: self.lower_path(p.ref_id, &p.path, ParamMode::Explicit), path: self.lower_path(p.ref_id, &p.path, ParamMode::Explicit, false),
ref_id: p.ref_id, ref_id: p.ref_id,
} }
} }
@ -794,7 +805,7 @@ impl<'a> LoweringContext<'a> {
}; };
let mut path = self.lower_path_extra(import.id, path, suffix, let mut path = self.lower_path_extra(import.id, path, suffix,
ParamMode::Explicit); ParamMode::Explicit, true);
path.span = span; path.span = span;
self.items.insert(import.id, hir::Item { self.items.insert(import.id, hir::Item {
id: import.id, id: import.id,
@ -808,7 +819,7 @@ impl<'a> LoweringContext<'a> {
path path
} }
}; };
let path = P(self.lower_path(id, path, ParamMode::Explicit)); let path = P(self.lower_path(id, path, ParamMode::Explicit, true));
let kind = match view_path.node { let kind = match view_path.node {
ViewPathSimple(ident, _) => { ViewPathSimple(ident, _) => {
*name = ident.name; *name = ident.name;
@ -987,8 +998,6 @@ impl<'a> LoweringContext<'a> {
attrs: self.lower_attrs(&m.attrs), attrs: self.lower_attrs(&m.attrs),
id: m.id, id: m.id,
span: m.span, span: m.span,
imported_from: m.imported_from.map(|x| x.name),
allow_internal_unstable: m.allow_internal_unstable,
body: m.body.clone().into(), body: m.body.clone().into(),
} }
} }
@ -1131,7 +1140,6 @@ impl<'a> LoweringContext<'a> {
Some(def) => { Some(def) => {
hir::PatKind::Path(hir::QPath::Resolved(None, P(hir::Path { hir::PatKind::Path(hir::QPath::Resolved(None, P(hir::Path {
span: pth1.span, span: pth1.span,
global: false,
def: def, def: def,
segments: hir_vec![ segments: hir_vec![
hir::PathSegment::from_name(pth1.node.name) hir::PathSegment::from_name(pth1.node.name)
@ -1874,7 +1882,7 @@ impl<'a> LoweringContext<'a> {
Visibility::Crate(_) => hir::Visibility::Crate, Visibility::Crate(_) => hir::Visibility::Crate,
Visibility::Restricted { ref path, id } => { Visibility::Restricted { ref path, id } => {
hir::Visibility::Restricted { hir::Visibility::Restricted {
path: P(self.lower_path(id, path, ParamMode::Explicit)), path: P(self.lower_path(id, path, ParamMode::Explicit, true)),
id: id id: id
} }
} }
@ -1967,7 +1975,6 @@ impl<'a> LoweringContext<'a> {
let expr_path = hir::ExprPath(hir::QPath::Resolved(None, P(hir::Path { let expr_path = hir::ExprPath(hir::QPath::Resolved(None, P(hir::Path {
span: span, span: span,
global: false,
def: def, def: def,
segments: hir_vec![hir::PathSegment::from_name(id)], segments: hir_vec![hir::PathSegment::from_name(id)],
}))); })));
@ -2135,17 +2142,12 @@ impl<'a> LoweringContext<'a> {
/// `fld.cx.use_std`, and `::core::b::c::d` otherwise. /// `fld.cx.use_std`, and `::core::b::c::d` otherwise.
/// The path is also resolved according to `is_value`. /// The path is also resolved according to `is_value`.
fn std_path(&mut self, span: Span, components: &[&str], is_value: bool) -> hir::Path { fn std_path(&mut self, span: Span, components: &[&str], is_value: bool) -> hir::Path {
let idents = self.crate_root.iter().chain(components);
let segments: Vec<_> = idents.map(|name| {
hir::PathSegment::from_name(Symbol::intern(name))
}).collect();
let mut path = hir::Path { let mut path = hir::Path {
span: span, span: span,
global: true,
def: Def::Err, def: Def::Err,
segments: segments.into(), segments: iter::once(keywords::CrateRoot.name()).chain({
self.crate_root.into_iter().chain(components.iter().cloned()).map(Symbol::intern)
}).map(hir::PathSegment::from_name).collect(),
}; };
self.resolver.resolve_hir_path(&mut path, is_value); self.resolver.resolve_hir_path(&mut path, is_value);

View file

@ -11,7 +11,6 @@
use super::*; use super::*;
use hir::intravisit::{Visitor, NestedVisitorMap}; use hir::intravisit::{Visitor, NestedVisitorMap};
use hir::def_id::DefId;
use middle::cstore::InlinedItem; use middle::cstore::InlinedItem;
use std::iter::repeat; use std::iter::repeat;
use syntax::ast::{NodeId, CRATE_NODE_ID}; use syntax::ast::{NodeId, CRATE_NODE_ID};
@ -47,8 +46,6 @@ impl<'ast> NodeCollector<'ast> {
pub fn extend(krate: &'ast Crate, pub fn extend(krate: &'ast Crate,
parent: &'ast InlinedItem, parent: &'ast InlinedItem,
parent_node: NodeId, parent_node: NodeId,
parent_def_path: DefPath,
parent_def_id: DefId,
map: Vec<MapEntry<'ast>>) map: Vec<MapEntry<'ast>>)
-> NodeCollector<'ast> { -> NodeCollector<'ast> {
let mut collector = NodeCollector { let mut collector = NodeCollector {
@ -58,7 +55,6 @@ impl<'ast> NodeCollector<'ast> {
ignore_nested_items: true ignore_nested_items: true
}; };
assert_eq!(parent_def_path.krate, parent_def_id.krate);
collector.insert_entry(parent_node, RootInlinedParent(parent)); collector.insert_entry(parent_node, RootInlinedParent(parent));
collector collector

View file

@ -9,12 +9,7 @@
// except according to those terms. // except according to those terms.
use hir::map::definitions::*; use hir::map::definitions::*;
use hir::def_id::{CRATE_DEF_INDEX, DefIndex};
use hir;
use hir::intravisit::{self, Visitor, NestedVisitorMap};
use hir::def_id::{CRATE_DEF_INDEX, DefId, DefIndex};
use middle::cstore::InlinedItem;
use syntax::ast::*; use syntax::ast::*;
use syntax::ext::hygiene::Mark; use syntax::ext::hygiene::Mark;
@ -23,9 +18,6 @@ use syntax::symbol::{Symbol, keywords};
/// Creates def ids for nodes in the HIR. /// Creates def ids for nodes in the HIR.
pub struct DefCollector<'a> { pub struct DefCollector<'a> {
// If we are walking HIR (c.f., AST), we need to keep a reference to the
// crate.
hir_crate: Option<&'a hir::Crate>,
definitions: &'a mut Definitions, definitions: &'a mut Definitions,
parent_def: Option<DefIndex>, parent_def: Option<DefIndex>,
pub visit_macro_invoc: Option<&'a mut FnMut(MacroInvocationData)>, pub visit_macro_invoc: Option<&'a mut FnMut(MacroInvocationData)>,
@ -40,43 +32,16 @@ pub struct MacroInvocationData {
impl<'a> DefCollector<'a> { impl<'a> DefCollector<'a> {
pub fn new(definitions: &'a mut Definitions) -> Self { pub fn new(definitions: &'a mut Definitions) -> Self {
DefCollector { DefCollector {
hir_crate: None,
definitions: definitions, definitions: definitions,
parent_def: None, parent_def: None,
visit_macro_invoc: None, visit_macro_invoc: None,
} }
} }
pub fn extend(parent_node: NodeId,
parent_def_path: DefPath,
parent_def_id: DefId,
definitions: &'a mut Definitions)
-> Self {
let mut collector = DefCollector::new(definitions);
assert_eq!(parent_def_path.krate, parent_def_id.krate);
let root_path = Box::new(InlinedRootPath {
data: parent_def_path.data,
def_id: parent_def_id,
});
let def = collector.create_def(parent_node, DefPathData::InlinedRoot(root_path));
collector.parent_def = Some(def);
collector
}
pub fn collect_root(&mut self) { pub fn collect_root(&mut self) {
let root = self.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot); let root = self.create_def_with_parent(None, CRATE_NODE_ID, DefPathData::CrateRoot);
assert_eq!(root, CRATE_DEF_INDEX); assert_eq!(root, CRATE_DEF_INDEX);
self.parent_def = Some(root); self.parent_def = Some(root);
self.create_def_with_parent(Some(CRATE_DEF_INDEX), DUMMY_NODE_ID, DefPathData::Misc);
}
pub fn walk_item(&mut self, ii: &'a InlinedItem, krate: &'a hir::Crate) {
self.hir_crate = Some(krate);
ii.visit(self);
} }
fn create_def(&mut self, node_id: NodeId, data: DefPathData) -> DefIndex { fn create_def(&mut self, node_id: NodeId, data: DefPathData) -> DefIndex {
@ -114,16 +79,6 @@ impl<'a> DefCollector<'a> {
self.create_def(expr.id, DefPathData::Initializer); self.create_def(expr.id, DefPathData::Initializer);
} }
fn visit_hir_const_integer(&mut self, expr: &hir::Expr) {
// FIXME(eddyb) Closures should have separate
// function definition IDs and expression IDs.
if let hir::ExprClosure(..) = expr.node {
return;
}
self.create_def(expr.id, DefPathData::Initializer);
}
fn visit_macro_invoc(&mut self, id: NodeId, const_integer: bool) { fn visit_macro_invoc(&mut self, id: NodeId, const_integer: bool) {
if let Some(ref mut visit) = self.visit_macro_invoc { if let Some(ref mut visit) = self.visit_macro_invoc {
visit(MacroInvocationData { visit(MacroInvocationData {
@ -324,169 +279,3 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
} }
} }
} }
// We walk the HIR rather than the AST when reading items from metadata.
impl<'ast> Visitor<'ast> for DefCollector<'ast> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'ast> {
// note however that we override `visit_body` below
NestedVisitorMap::None
}
fn visit_body(&mut self, id: hir::ExprId) {
if let Some(krate) = self.hir_crate {
self.visit_expr(krate.expr(id));
}
}
fn visit_item(&mut self, i: &'ast hir::Item) {
debug!("visit_item: {:?}", i);
// Pick the def data. This need not be unique, but the more
// information we encapsulate into
let def_data = match i.node {
hir::ItemDefaultImpl(..) | hir::ItemImpl(..) =>
DefPathData::Impl,
hir::ItemEnum(..) | hir::ItemStruct(..) | hir::ItemUnion(..) |
hir::ItemTrait(..) | hir::ItemExternCrate(..) | hir::ItemMod(..) |
hir::ItemForeignMod(..) | hir::ItemTy(..) =>
DefPathData::TypeNs(i.name.as_str()),
hir::ItemStatic(..) | hir::ItemConst(..) | hir::ItemFn(..) =>
DefPathData::ValueNs(i.name.as_str()),
hir::ItemUse(..) => DefPathData::Misc,
};
let def = self.create_def(i.id, def_data);
self.with_parent(def, |this| {
match i.node {
hir::ItemEnum(ref enum_definition, _) => {
for v in &enum_definition.variants {
let variant_def_index =
this.create_def(v.node.data.id(),
DefPathData::EnumVariant(v.node.name.as_str()));
this.with_parent(variant_def_index, |this| {
for field in v.node.data.fields() {
this.create_def(field.id,
DefPathData::Field(field.name.as_str()));
}
if let Some(ref expr) = v.node.disr_expr {
this.visit_hir_const_integer(expr);
}
});
}
}
hir::ItemStruct(ref struct_def, _) |
hir::ItemUnion(ref struct_def, _) => {
// If this is a tuple-like struct, register the constructor.
if !struct_def.is_struct() {
this.create_def(struct_def.id(),
DefPathData::StructCtor);
}
for field in struct_def.fields() {
this.create_def(field.id, DefPathData::Field(field.name.as_str()));
}
}
_ => {}
}
intravisit::walk_item(this, i);
});
}
fn visit_foreign_item(&mut self, foreign_item: &'ast hir::ForeignItem) {
let def = self.create_def(foreign_item.id,
DefPathData::ValueNs(foreign_item.name.as_str()));
self.with_parent(def, |this| {
intravisit::walk_foreign_item(this, foreign_item);
});
}
fn visit_generics(&mut self, generics: &'ast hir::Generics) {
for ty_param in generics.ty_params.iter() {
self.create_def(ty_param.id, DefPathData::TypeParam(ty_param.name.as_str()));
}
intravisit::walk_generics(self, generics);
}
fn visit_trait_item(&mut self, ti: &'ast hir::TraitItem) {
let def_data = match ti.node {
hir::MethodTraitItem(..) | hir::ConstTraitItem(..) =>
DefPathData::ValueNs(ti.name.as_str()),
hir::TypeTraitItem(..) => DefPathData::TypeNs(ti.name.as_str()),
};
let def = self.create_def(ti.id, def_data);
self.with_parent(def, |this| {
if let hir::ConstTraitItem(_, Some(ref expr)) = ti.node {
this.create_def(expr.id, DefPathData::Initializer);
}
intravisit::walk_trait_item(this, ti);
});
}
fn visit_impl_item(&mut self, ii: &'ast hir::ImplItem) {
let def_data = match ii.node {
hir::ImplItemKind::Method(..) | hir::ImplItemKind::Const(..) =>
DefPathData::ValueNs(ii.name.as_str()),
hir::ImplItemKind::Type(..) => DefPathData::TypeNs(ii.name.as_str()),
};
let def = self.create_def(ii.id, def_data);
self.with_parent(def, |this| {
if let hir::ImplItemKind::Const(_, ref expr) = ii.node {
this.create_def(expr.id, DefPathData::Initializer);
}
intravisit::walk_impl_item(this, ii);
});
}
fn visit_pat(&mut self, pat: &'ast hir::Pat) {
let parent_def = self.parent_def;
if let hir::PatKind::Binding(_, _, name, _) = pat.node {
let def = self.create_def(pat.id, DefPathData::Binding(name.node.as_str()));
self.parent_def = Some(def);
}
intravisit::walk_pat(self, pat);
self.parent_def = parent_def;
}
fn visit_expr(&mut self, expr: &'ast hir::Expr) {
let parent_def = self.parent_def;
if let hir::ExprRepeat(_, ref count) = expr.node {
self.visit_hir_const_integer(count);
}
if let hir::ExprClosure(..) = expr.node {
let def = self.create_def(expr.id, DefPathData::ClosureExpr);
self.parent_def = Some(def);
}
intravisit::walk_expr(self, expr);
self.parent_def = parent_def;
}
fn visit_ty(&mut self, ty: &'ast hir::Ty) {
if let hir::TyArray(_, ref length) = ty.node {
self.visit_hir_const_integer(length);
}
if let hir::TyImplTrait(..) = ty.node {
self.create_def(ty.id, DefPathData::ImplTrait);
}
intravisit::walk_ty(self, ty);
}
fn visit_lifetime_def(&mut self, def: &'ast hir::LifetimeDef) {
self.create_def(def.lifetime.id, DefPathData::LifetimeDef(def.lifetime.name.as_str()));
}
fn visit_macro_def(&mut self, macro_def: &'ast hir::MacroDef) {
self.create_def(macro_def.id, DefPathData::MacroDef(macro_def.name.as_str()));
}
}

View file

@ -8,22 +8,119 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
//! For each definition, we track the following data. A definition
//! here is defined somewhat circularly as "something with a def-id",
//! but it generally corresponds to things like structs, enums, etc.
//! There are also some rather random cases (like const initializer
//! expressions) that are mostly just leftovers.
use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE}; use hir::def_id::{CrateNum, DefId, DefIndex, LOCAL_CRATE};
use rustc_data_structures::fx::FxHashMap; use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::StableHasher;
use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::fmt::Write; use std::fmt::Write;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::collections::hash_map::DefaultHasher;
use syntax::ast; use syntax::ast;
use syntax::symbol::{Symbol, InternedString}; use syntax::symbol::{Symbol, InternedString};
use ty::TyCtxt; use ty::TyCtxt;
use util::nodemap::NodeMap; use util::nodemap::NodeMap;
/// The definition table containing node definitions /// The DefPathTable maps DefIndexes to DefKeys and vice versa.
/// Internally the DefPathTable holds a tree of DefKeys, where each DefKey
/// stores the DefIndex of its parent.
/// There is one DefPathTable for each crate.
#[derive(Clone)]
pub struct DefPathTable {
index_to_key: Vec<DefKey>,
key_to_index: FxHashMap<DefKey, DefIndex>,
}
impl DefPathTable {
fn insert(&mut self, key: DefKey) -> DefIndex {
let index = DefIndex::new(self.index_to_key.len());
debug!("DefPathTable::insert() - {:?} <-> {:?}", key, index);
self.index_to_key.push(key.clone());
self.key_to_index.insert(key, index);
index
}
#[inline(always)]
pub fn def_key(&self, index: DefIndex) -> DefKey {
self.index_to_key[index.as_usize()].clone()
}
#[inline(always)]
pub fn def_index_for_def_key(&self, key: &DefKey) -> Option<DefIndex> {
self.key_to_index.get(key).cloned()
}
#[inline(always)]
pub fn contains_key(&self, key: &DefKey) -> bool {
self.key_to_index.contains_key(key)
}
pub fn retrace_path(&self,
path_data: &[DisambiguatedDefPathData])
-> Option<DefIndex> {
let root_key = DefKey {
parent: None,
disambiguated_data: DisambiguatedDefPathData {
data: DefPathData::CrateRoot,
disambiguator: 0,
},
};
let root_index = self.key_to_index
.get(&root_key)
.expect("no root key?")
.clone();
debug!("retrace_path: root_index={:?}", root_index);
let mut index = root_index;
for data in path_data {
let key = DefKey { parent: Some(index), disambiguated_data: data.clone() };
debug!("retrace_path: key={:?}", key);
match self.key_to_index.get(&key) {
Some(&i) => index = i,
None => return None,
}
}
Some(index)
}
}
impl Encodable for DefPathTable {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
self.index_to_key.encode(s)
}
}
impl Decodable for DefPathTable {
fn decode<D: Decoder>(d: &mut D) -> Result<DefPathTable, D::Error> {
let index_to_key: Vec<DefKey> = Decodable::decode(d)?;
let key_to_index = index_to_key.iter()
.enumerate()
.map(|(index, key)| (key.clone(), DefIndex::new(index)))
.collect();
Ok(DefPathTable {
index_to_key: index_to_key,
key_to_index: key_to_index,
})
}
}
/// The definition table containing node definitions.
/// It holds the DefPathTable for local DefIds/DefPaths and it also stores a
/// mapping from NodeIds to local DefIds.
#[derive(Clone)] #[derive(Clone)]
pub struct Definitions { pub struct Definitions {
data: Vec<DefData>, table: DefPathTable,
key_map: FxHashMap<DefKey, DefIndex>, node_to_def_index: NodeMap<DefIndex>,
node_map: NodeMap<DefIndex>, def_index_to_node: Vec<ast::NodeId>,
} }
/// A unique identifier that we can use to lookup a definition /// A unique identifier that we can use to lookup a definition
@ -50,19 +147,6 @@ pub struct DisambiguatedDefPathData {
pub disambiguator: u32 pub disambiguator: u32
} }
/// For each definition, we track the following data. A definition
/// here is defined somewhat circularly as "something with a def-id",
/// but it generally corresponds to things like structs, enums, etc.
/// There are also some rather random cases (like const initializer
/// expressions) that are mostly just leftovers.
#[derive(Clone, Debug)]
pub struct DefData {
pub key: DefKey,
/// Local ID within the HIR.
pub node_id: ast::NodeId,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct DefPath { pub struct DefPath {
/// the path leading from the crate root to the item /// the path leading from the crate root to the item
@ -77,12 +161,11 @@ impl DefPath {
self.krate == LOCAL_CRATE self.krate == LOCAL_CRATE
} }
pub fn make<FN>(start_krate: CrateNum, pub fn make<FN>(krate: CrateNum,
start_index: DefIndex, start_index: DefIndex,
mut get_key: FN) -> DefPath mut get_key: FN) -> DefPath
where FN: FnMut(DefIndex) -> DefKey where FN: FnMut(DefIndex) -> DefKey
{ {
let mut krate = start_krate;
let mut data = vec![]; let mut data = vec![];
let mut index = Some(start_index); let mut index = Some(start_index);
loop { loop {
@ -95,13 +178,6 @@ impl DefPath {
assert!(key.parent.is_none()); assert!(key.parent.is_none());
break; break;
} }
DefPathData::InlinedRoot(ref p) => {
assert!(key.parent.is_none());
assert!(!p.def_id.is_local());
data.extend(p.data.iter().cloned().rev());
krate = p.def_id.krate;
break;
}
_ => { _ => {
data.push(key.disambiguated_data); data.push(key.disambiguated_data);
index = key.parent; index = key.parent;
@ -131,7 +207,8 @@ impl DefPath {
} }
pub fn deterministic_hash(&self, tcx: TyCtxt) -> u64 { pub fn deterministic_hash(&self, tcx: TyCtxt) -> u64 {
let mut state = DefaultHasher::new(); debug!("deterministic_hash({:?})", self);
let mut state = StableHasher::new();
self.deterministic_hash_to(tcx, &mut state); self.deterministic_hash_to(tcx, &mut state);
state.finish() state.finish()
} }
@ -143,31 +220,6 @@ impl DefPath {
} }
} }
/// Root of an inlined item. We track the `DefPath` of the item within
/// the original crate but also its def-id. This is kind of an
/// augmented version of a `DefPath` that includes a `DefId`. This is
/// all sort of ugly but the hope is that inlined items will be going
/// away soon anyway.
///
/// Some of the constraints that led to the current approach:
///
/// - I don't want to have a `DefId` in the main `DefPath` because
/// that gets serialized for incr. comp., and when reloaded the
/// `DefId` is no longer valid. I'd rather maintain the invariant
/// that every `DefId` is valid, and a potentially outdated `DefId` is
/// represented as a `DefPath`.
/// - (We don't serialize def-paths from inlined items, so it's ok to have one here.)
/// - We need to be able to extract the def-id from inline items to
/// make the symbol name. In theory we could retrace it from the
/// data, but the metadata doesn't have the required indices, and I
/// don't want to write the code to create one just for this.
/// - It may be that we don't actually need `data` at all. We'll have
/// to see about that.
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct InlinedRootPath {
pub data: Vec<DisambiguatedDefPathData>,
pub def_id: DefId,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum DefPathData { pub enum DefPathData {
@ -175,8 +227,6 @@ pub enum DefPathData {
// they are treated specially by the `def_path` function. // they are treated specially by the `def_path` function.
/// The crate root (marker) /// The crate root (marker)
CrateRoot, CrateRoot,
/// An inlined root
InlinedRoot(Box<InlinedRootPath>),
// Catch-all for random DefId things like DUMMY_NODE_ID // Catch-all for random DefId things like DUMMY_NODE_ID
Misc, Misc,
@ -218,23 +268,30 @@ impl Definitions {
/// Create new empty definition map. /// Create new empty definition map.
pub fn new() -> Definitions { pub fn new() -> Definitions {
Definitions { Definitions {
data: vec![], table: DefPathTable {
key_map: FxHashMap(), index_to_key: vec![],
node_map: NodeMap(), key_to_index: FxHashMap(),
},
node_to_def_index: NodeMap(),
def_index_to_node: vec![],
} }
} }
pub fn def_path_table(&self) -> &DefPathTable {
&self.table
}
/// Get the number of definitions. /// Get the number of definitions.
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
self.data.len() self.def_index_to_node.len()
} }
pub fn def_key(&self, index: DefIndex) -> DefKey { pub fn def_key(&self, index: DefIndex) -> DefKey {
self.data[index.as_usize()].key.clone() self.table.def_key(index)
} }
pub fn def_index_for_def_key(&self, key: DefKey) -> Option<DefIndex> { pub fn def_index_for_def_key(&self, key: DefKey) -> Option<DefIndex> {
self.key_map.get(&key).cloned() self.table.def_index_for_def_key(&key)
} }
/// Returns the path from the crate root to `index`. The root /// Returns the path from the crate root to `index`. The root
@ -247,7 +304,7 @@ impl Definitions {
} }
pub fn opt_def_index(&self, node: ast::NodeId) -> Option<DefIndex> { pub fn opt_def_index(&self, node: ast::NodeId) -> Option<DefIndex> {
self.node_map.get(&node).cloned() self.node_to_def_index.get(&node).cloned()
} }
pub fn opt_local_def_id(&self, node: ast::NodeId) -> Option<DefId> { pub fn opt_local_def_id(&self, node: ast::NodeId) -> Option<DefId> {
@ -260,8 +317,8 @@ impl Definitions {
pub fn as_local_node_id(&self, def_id: DefId) -> Option<ast::NodeId> { pub fn as_local_node_id(&self, def_id: DefId) -> Option<ast::NodeId> {
if def_id.krate == LOCAL_CRATE { if def_id.krate == LOCAL_CRATE {
assert!(def_id.index.as_usize() < self.data.len()); assert!(def_id.index.as_usize() < self.def_index_to_node.len());
Some(self.data[def_id.index.as_usize()].node_id) Some(self.def_index_to_node[def_id.index.as_usize()])
} else { } else {
None None
} }
@ -276,16 +333,13 @@ impl Definitions {
debug!("create_def_with_parent(parent={:?}, node_id={:?}, data={:?})", debug!("create_def_with_parent(parent={:?}, node_id={:?}, data={:?})",
parent, node_id, data); parent, node_id, data);
assert!(!self.node_map.contains_key(&node_id), assert!(!self.node_to_def_index.contains_key(&node_id),
"adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}", "adding a def'n for node-id {:?} and data {:?} but a previous def'n exists: {:?}",
node_id, node_id,
data, data,
self.data[self.node_map[&node_id].as_usize()]); self.table.def_key(self.node_to_def_index[&node_id]));
assert!(parent.is_some() ^ match data { assert!(parent.is_some() ^ (data == DefPathData::CrateRoot));
DefPathData::CrateRoot | DefPathData::InlinedRoot(_) => true,
_ => false,
});
// Find a unique DefKey. This basically means incrementing the disambiguator // Find a unique DefKey. This basically means incrementing the disambiguator
// until we get no match. // until we get no match.
@ -297,20 +351,18 @@ impl Definitions {
} }
}; };
while self.key_map.contains_key(&key) { while self.table.contains_key(&key) {
key.disambiguated_data.disambiguator += 1; key.disambiguated_data.disambiguator += 1;
} }
debug!("create_def_with_parent: after disambiguation, key = {:?}", key); debug!("create_def_with_parent: after disambiguation, key = {:?}", key);
// Create the definition. // Create the definition.
let index = DefIndex::new(self.data.len()); let index = self.table.insert(key);
self.data.push(DefData { key: key.clone(), node_id: node_id }); debug!("create_def_with_parent: def_index_to_node[{:?} <-> {:?}", index, node_id);
debug!("create_def_with_parent: node_map[{:?}] = {:?}", node_id, index); self.node_to_def_index.insert(node_id, index);
self.node_map.insert(node_id, index); assert_eq!(index.as_usize(), self.def_index_to_node.len());
debug!("create_def_with_parent: key_map[{:?}] = {:?}", key, index); self.def_index_to_node.push(node_id);
self.key_map.insert(key, index);
index index
} }
@ -332,7 +384,6 @@ impl DefPathData {
Impl | Impl |
CrateRoot | CrateRoot |
InlinedRoot(_) |
Misc | Misc |
ClosureExpr | ClosureExpr |
StructCtor | StructCtor |
@ -359,9 +410,6 @@ impl DefPathData {
// note that this does not show up in user printouts // note that this does not show up in user printouts
CrateRoot => "{{root}}", CrateRoot => "{{root}}",
// note that this does not show up in user printouts
InlinedRoot(_) => "{{inlined-root}}",
Impl => "{{impl}}", Impl => "{{impl}}",
Misc => "{{?}}", Misc => "{{?}}",
ClosureExpr => "{{closure}}", ClosureExpr => "{{closure}}",
@ -377,4 +425,3 @@ impl DefPathData {
self.as_interned_str().to_string() self.as_interned_str().to_string()
} }
} }

View file

@ -13,7 +13,7 @@ use self::MapEntry::*;
use self::collector::NodeCollector; use self::collector::NodeCollector;
pub use self::def_collector::{DefCollector, MacroInvocationData}; pub use self::def_collector::{DefCollector, MacroInvocationData};
pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData, pub use self::definitions::{Definitions, DefKey, DefPath, DefPathData,
DisambiguatedDefPathData, InlinedRootPath}; DisambiguatedDefPathData};
use dep_graph::{DepGraph, DepNode}; use dep_graph::{DepGraph, DepNode};
@ -221,22 +221,14 @@ pub struct Map<'ast> {
/// plain old integers. /// plain old integers.
map: RefCell<Vec<MapEntry<'ast>>>, map: RefCell<Vec<MapEntry<'ast>>>,
definitions: RefCell<Definitions>, definitions: Definitions,
/// All NodeIds that are numerically greater or equal to this value come /// All NodeIds that are numerically greater or equal to this value come
/// from inlined items. /// from inlined items.
local_node_id_watermark: NodeId, local_node_id_watermark: NodeId,
/// All def-indices that are numerically greater or equal to this value come
/// from inlined items.
local_def_id_watermark: usize,
} }
impl<'ast> Map<'ast> { impl<'ast> Map<'ast> {
pub fn is_inlined_def_id(&self, id: DefId) -> bool {
id.is_local() && id.index.as_usize() >= self.local_def_id_watermark
}
pub fn is_inlined_node_id(&self, id: NodeId) -> bool { pub fn is_inlined_node_id(&self, id: NodeId) -> bool {
id >= self.local_node_id_watermark id >= self.local_node_id_watermark
} }
@ -262,7 +254,6 @@ impl<'ast> Map<'ast> {
EntryItem(_, item) => { EntryItem(_, item) => {
assert_eq!(id, item.id); assert_eq!(id, item.id);
let def_id = self.local_def_id(id); let def_id = self.local_def_id(id);
assert!(!self.is_inlined_def_id(def_id));
if let Some(last_id) = last_expr { if let Some(last_id) = last_expr {
// The body of the item may have a separate dep node // The body of the item may have a separate dep node
@ -278,7 +269,6 @@ impl<'ast> Map<'ast> {
EntryImplItem(_, item) => { EntryImplItem(_, item) => {
let def_id = self.local_def_id(id); let def_id = self.local_def_id(id);
assert!(!self.is_inlined_def_id(def_id));
if let Some(last_id) = last_expr { if let Some(last_id) = last_expr {
// The body of the item may have a separate dep node // The body of the item may have a separate dep node
@ -392,12 +382,16 @@ impl<'ast> Map<'ast> {
} }
pub fn num_local_def_ids(&self) -> usize { pub fn num_local_def_ids(&self) -> usize {
self.definitions.borrow().len() self.definitions.len()
}
pub fn definitions(&self) -> &Definitions {
&self.definitions
} }
pub fn def_key(&self, def_id: DefId) -> DefKey { pub fn def_key(&self, def_id: DefId) -> DefKey {
assert!(def_id.is_local()); assert!(def_id.is_local());
self.definitions.borrow().def_key(def_id.index) self.definitions.def_key(def_id.index)
} }
pub fn def_path_from_id(&self, id: NodeId) -> Option<DefPath> { pub fn def_path_from_id(&self, id: NodeId) -> Option<DefPath> {
@ -408,11 +402,11 @@ impl<'ast> Map<'ast> {
pub fn def_path(&self, def_id: DefId) -> DefPath { pub fn def_path(&self, def_id: DefId) -> DefPath {
assert!(def_id.is_local()); assert!(def_id.is_local());
self.definitions.borrow().def_path(def_id.index) self.definitions.def_path(def_id.index)
} }
pub fn def_index_for_def_key(&self, def_key: DefKey) -> Option<DefIndex> { pub fn def_index_for_def_key(&self, def_key: DefKey) -> Option<DefIndex> {
self.definitions.borrow().def_index_for_def_key(def_key) self.definitions.def_index_for_def_key(def_key)
} }
pub fn local_def_id(&self, node: NodeId) -> DefId { pub fn local_def_id(&self, node: NodeId) -> DefId {
@ -423,11 +417,11 @@ impl<'ast> Map<'ast> {
} }
pub fn opt_local_def_id(&self, node: NodeId) -> Option<DefId> { pub fn opt_local_def_id(&self, node: NodeId) -> Option<DefId> {
self.definitions.borrow().opt_local_def_id(node) self.definitions.opt_local_def_id(node)
} }
pub fn as_local_node_id(&self, def_id: DefId) -> Option<NodeId> { pub fn as_local_node_id(&self, def_id: DefId) -> Option<NodeId> {
self.definitions.borrow().as_local_node_id(def_id) self.definitions.as_local_node_id(def_id)
} }
fn entry_count(&self) -> usize { fn entry_count(&self) -> usize {
@ -930,23 +924,19 @@ pub fn map_crate<'ast>(forest: &'ast mut Forest,
} }
let local_node_id_watermark = NodeId::new(map.len()); let local_node_id_watermark = NodeId::new(map.len());
let local_def_id_watermark = definitions.len();
Map { Map {
forest: forest, forest: forest,
dep_graph: forest.dep_graph.clone(), dep_graph: forest.dep_graph.clone(),
map: RefCell::new(map), map: RefCell::new(map),
definitions: RefCell::new(definitions), definitions: definitions,
local_node_id_watermark: local_node_id_watermark, local_node_id_watermark: local_node_id_watermark,
local_def_id_watermark: local_def_id_watermark,
} }
} }
/// Used for items loaded from external crate that are being inlined into this /// Used for items loaded from external crate that are being inlined into this
/// crate. /// crate.
pub fn map_decoded_item<'ast>(map: &Map<'ast>, pub fn map_decoded_item<'ast>(map: &Map<'ast>,
parent_def_path: DefPath,
parent_def_id: DefId,
ii: InlinedItem, ii: InlinedItem,
ii_parent_id: NodeId) ii_parent_id: NodeId)
-> &'ast InlinedItem { -> &'ast InlinedItem {
@ -954,18 +944,9 @@ pub fn map_decoded_item<'ast>(map: &Map<'ast>,
let ii = map.forest.inlined_items.alloc(ii); let ii = map.forest.inlined_items.alloc(ii);
let defs = &mut *map.definitions.borrow_mut();
let mut def_collector = DefCollector::extend(ii_parent_id,
parent_def_path.clone(),
parent_def_id,
defs);
def_collector.walk_item(ii, map.krate());
let mut collector = NodeCollector::extend(map.krate(), let mut collector = NodeCollector::extend(map.krate(),
ii, ii,
ii_parent_id, ii_parent_id,
parent_def_path,
parent_def_id,
mem::replace(&mut *map.map.borrow_mut(), vec![])); mem::replace(&mut *map.map.borrow_mut(), vec![]));
ii.visit(&mut collector); ii.visit(&mut collector);
*map.map.borrow_mut() = collector.map; *map.map.borrow_mut() = collector.map;

View file

@ -105,15 +105,18 @@ pub struct LifetimeDef {
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash)]
pub struct Path { pub struct Path {
pub span: Span, pub span: Span,
/// A `::foo` path, is relative to the crate root rather than current
/// module (like paths in an import).
pub global: bool,
/// The definition that the path resolved to. /// The definition that the path resolved to.
pub def: Def, pub def: Def,
/// The segments in the path: the things separated by `::`. /// The segments in the path: the things separated by `::`.
pub segments: HirVec<PathSegment>, pub segments: HirVec<PathSegment>,
} }
impl Path {
pub fn is_global(&self) -> bool {
!self.segments.is_empty() && self.segments[0].name == keywords::CrateRoot.name()
}
}
impl fmt::Debug for Path { impl fmt::Debug for Path {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "path({})", print::path_to_string(self)) write!(f, "path({})", print::path_to_string(self))
@ -475,8 +478,6 @@ pub struct MacroDef {
pub attrs: HirVec<Attribute>, pub attrs: HirVec<Attribute>,
pub id: NodeId, pub id: NodeId,
pub span: Span, pub span: Span,
pub imported_from: Option<Name>,
pub allow_internal_unstable: bool,
pub body: HirVec<TokenTree>, pub body: HirVec<TokenTree>,
} }
@ -1533,8 +1534,6 @@ pub struct ItemId {
pub id: NodeId, pub id: NodeId,
} }
// FIXME (#3300): Should allow items to be anonymous. Right now
// we just use dummy names for anon items.
/// An item /// An item
/// ///
/// The name might be a dummy name in case of anonymous items /// The name might be a dummy name in case of anonymous items

View file

@ -1643,17 +1643,14 @@ impl<'a> State<'a> {
-> io::Result<()> { -> io::Result<()> {
self.maybe_print_comment(path.span.lo)?; self.maybe_print_comment(path.span.lo)?;
let mut first = !path.global; for (i, segment) in path.segments.iter().enumerate() {
for segment in &path.segments { if i > 0 {
if first {
first = false
} else {
word(&mut self.s, "::")? word(&mut self.s, "::")?
} }
if segment.name != keywords::CrateRoot.name() && segment.name != "$crate" {
self.print_name(segment.name)?; self.print_name(segment.name)?;
self.print_path_parameters(&segment.parameters, colons_before_params)?;
self.print_path_parameters(&segment.parameters, colons_before_params)?; }
} }
Ok(()) Ok(())
@ -1673,15 +1670,14 @@ impl<'a> State<'a> {
space(&mut self.s)?; space(&mut self.s)?;
self.word_space("as")?; self.word_space("as")?;
let mut first = !path.global; for (i, segment) in path.segments[..path.segments.len() - 1].iter().enumerate() {
for segment in &path.segments[..path.segments.len() - 1] { if i > 0 {
if first {
first = false
} else {
word(&mut self.s, "::")? word(&mut self.s, "::")?
} }
self.print_name(segment.name)?; if segment.name != keywords::CrateRoot.name() && segment.name != "$crate" {
self.print_path_parameters(&segment.parameters, colons_before_params)?; self.print_name(segment.name)?;
self.print_path_parameters(&segment.parameters, colons_before_params)?;
}
} }
word(&mut self.s, ">")?; word(&mut self.s, ">")?;

View file

@ -1620,7 +1620,6 @@ impl<'a, 'gcx, 'tcx> Rebuilder<'a, 'gcx, 'tcx> {
new_segs.push(new_seg); new_segs.push(new_seg);
hir::Path { hir::Path {
span: path.span, span: path.span,
global: path.global,
def: path.def, def: path.def,
segments: new_segs.into() segments: new_segs.into()
} }

View file

@ -1367,9 +1367,10 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
cause: &ObligationCause<'tcx>, cause: &ObligationCause<'tcx>,
expected: Ty<'tcx>, expected: Ty<'tcx>,
actual: Ty<'tcx>, actual: Ty<'tcx>,
err: TypeError<'tcx>) { err: TypeError<'tcx>)
-> DiagnosticBuilder<'tcx> {
let trace = TypeTrace::types(cause, true, expected, actual); let trace = TypeTrace::types(cause, true, expected, actual);
self.report_and_explain_type_error(trace, &err).emit(); self.report_and_explain_type_error(trace, &err)
} }
pub fn report_conflicting_default_types(&self, pub fn report_conflicting_default_types(&self,

View file

@ -24,7 +24,6 @@
#![cfg_attr(not(stage0), deny(warnings))] #![cfg_attr(not(stage0), deny(warnings))]
#![feature(associated_consts)] #![feature(associated_consts)]
#![feature(borrow_state)]
#![feature(box_patterns)] #![feature(box_patterns)]
#![feature(box_syntax)] #![feature(box_syntax)]
#![feature(collections)] #![feature(collections)]

View file

@ -211,6 +211,12 @@ declare_lint! {
not named `mod.rs`" not named `mod.rs`"
} }
declare_lint! {
pub LEGACY_IMPORTS,
Warn,
"detects names that resolve to ambiguous glob imports with RFC 1560"
}
declare_lint! { declare_lint! {
pub DEPRECATED, pub DEPRECATED,
Warn, Warn,
@ -257,6 +263,7 @@ impl LintPass for HardwiredLints {
PATTERNS_IN_FNS_WITHOUT_BODY, PATTERNS_IN_FNS_WITHOUT_BODY,
EXTRA_REQUIREMENT_IN_IMPL, EXTRA_REQUIREMENT_IN_IMPL,
LEGACY_DIRECTORY_OWNERSHIP, LEGACY_DIRECTORY_OWNERSHIP,
LEGACY_IMPORTS,
DEPRECATED DEPRECATED
) )
} }

View file

@ -25,7 +25,7 @@
use hir::def::{self, Def}; use hir::def::{self, Def};
use hir::def_id::{CrateNum, DefId, DefIndex}; use hir::def_id::{CrateNum, DefId, DefIndex};
use hir::map as hir_map; use hir::map as hir_map;
use hir::map::definitions::{Definitions, DefKey}; use hir::map::definitions::{Definitions, DefKey, DisambiguatedDefPathData};
use hir::svh::Svh; use hir::svh::Svh;
use middle::lang_items; use middle::lang_items;
use ty::{self, Ty, TyCtxt}; use ty::{self, Ty, TyCtxt};
@ -298,8 +298,7 @@ pub trait CrateStore<'tcx> {
// trait/impl-item info // trait/impl-item info
fn trait_of_item(&self, def_id: DefId) -> Option<DefId>; fn trait_of_item(&self, def_id: DefId) -> Option<DefId>;
fn associated_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) fn associated_item(&self, def: DefId) -> Option<ty::AssociatedItem>;
-> Option<ty::AssociatedItem>;
// flags // flags
fn is_const_fn(&self, did: DefId) -> bool; fn is_const_fn(&self, did: DefId) -> bool;
@ -336,12 +335,12 @@ pub trait CrateStore<'tcx> {
fn is_no_builtins(&self, cnum: CrateNum) -> bool; fn is_no_builtins(&self, cnum: CrateNum) -> bool;
// resolve // resolve
fn def_index_for_def_key(&self, fn retrace_path(&self,
cnum: CrateNum, cnum: CrateNum,
def: DefKey) path_data: &[DisambiguatedDefPathData])
-> Option<DefIndex>; -> Option<DefId>;
fn def_key(&self, def: DefId) -> hir_map::DefKey; fn def_key(&self, def: DefId) -> DefKey;
fn relative_def_path(&self, def: DefId) -> Option<hir_map::DefPath>; fn def_path(&self, def: DefId) -> hir_map::DefPath;
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>; fn struct_field_names(&self, def: DefId) -> Vec<ast::Name>;
fn item_children(&self, did: DefId) -> Vec<def::Export>; fn item_children(&self, did: DefId) -> Vec<def::Export>;
fn load_macro(&self, did: DefId, sess: &Session) -> LoadedMacro; fn load_macro(&self, did: DefId, sess: &Session) -> LoadedMacro;
@ -442,12 +441,6 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore {
// trait info // trait info
fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] } fn implementations_of_trait(&self, filter: Option<DefId>) -> Vec<DefId> { vec![] }
fn def_index_for_def_key(&self,
cnum: CrateNum,
def: DefKey)
-> Option<DefIndex> {
None
}
// impl info // impl info
fn associated_item_def_ids(&self, def_id: DefId) -> Vec<DefId> fn associated_item_def_ids(&self, def_id: DefId) -> Vec<DefId>
@ -462,8 +455,7 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore {
// trait/impl-item info // trait/impl-item info
fn trait_of_item(&self, def_id: DefId) -> Option<DefId> { bug!("trait_of_item") } fn trait_of_item(&self, def_id: DefId) -> Option<DefId> { bug!("trait_of_item") }
fn associated_item<'a>(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>, def: DefId) fn associated_item(&self, def: DefId) -> Option<ty::AssociatedItem> { bug!("associated_item") }
-> Option<ty::AssociatedItem> { bug!("associated_item") }
// flags // flags
fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") } fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") }
@ -508,8 +500,15 @@ impl<'tcx> CrateStore<'tcx> for DummyCrateStore {
fn is_no_builtins(&self, cnum: CrateNum) -> bool { bug!("is_no_builtins") } fn is_no_builtins(&self, cnum: CrateNum) -> bool { bug!("is_no_builtins") }
// resolve // resolve
fn def_key(&self, def: DefId) -> hir_map::DefKey { bug!("def_key") } fn retrace_path(&self,
fn relative_def_path(&self, def: DefId) -> Option<hir_map::DefPath> { cnum: CrateNum,
path_data: &[DisambiguatedDefPathData])
-> Option<DefId> {
None
}
fn def_key(&self, def: DefId) -> DefKey { bug!("def_key") }
fn def_path(&self, def: DefId) -> hir_map::DefPath {
bug!("relative_def_path") bug!("relative_def_path")
} }
fn struct_field_names(&self, def: DefId) -> Vec<ast::Name> { bug!("struct_field_names") } fn struct_field_names(&self, def: DefId) -> Vec<ast::Name> { bug!("struct_field_names") }

View file

@ -86,20 +86,7 @@ impl<'a, 'tcx> MarkSymbolVisitor<'a, 'tcx> {
} }
} }
fn handle_definition(&mut self, id: ast::NodeId, def: Def) { fn handle_definition(&mut self, def: Def) {
// If `bar` is a trait item, make sure to mark Foo as alive in `Foo::bar`
match def {
Def::AssociatedTy(..) | Def::Method(_) | Def::AssociatedConst(_)
if self.tcx.trait_of_item(def.def_id()).is_some() => {
if let Some(substs) = self.tcx.tables().item_substs.get(&id) {
if let ty::TyAdt(tyid, _) = substs.substs.type_at(0).sty {
self.check_def_id(tyid.did);
}
}
}
_ => {}
}
match def { match def {
Def::Const(_) | Def::AssociatedConst(..) => { Def::Const(_) | Def::AssociatedConst(..) => {
self.check_def_id(def.def_id()); self.check_def_id(def.def_id());
@ -241,7 +228,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> {
match expr.node { match expr.node {
hir::ExprPath(ref qpath @ hir::QPath::TypeRelative(..)) => { hir::ExprPath(ref qpath @ hir::QPath::TypeRelative(..)) => {
let def = self.tcx.tables().qpath_def(qpath, expr.id); let def = self.tcx.tables().qpath_def(qpath, expr.id);
self.handle_definition(expr.id, def); self.handle_definition(def);
} }
hir::ExprMethodCall(..) => { hir::ExprMethodCall(..) => {
self.lookup_and_handle_method(expr.id); self.lookup_and_handle_method(expr.id);
@ -281,7 +268,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> {
} }
PatKind::Path(ref qpath @ hir::QPath::TypeRelative(..)) => { PatKind::Path(ref qpath @ hir::QPath::TypeRelative(..)) => {
let def = self.tcx.tables().qpath_def(qpath, pat.id); let def = self.tcx.tables().qpath_def(qpath, pat.id);
self.handle_definition(pat.id, def); self.handle_definition(def);
} }
_ => () _ => ()
} }
@ -291,8 +278,8 @@ impl<'a, 'tcx> Visitor<'tcx> for MarkSymbolVisitor<'a, 'tcx> {
self.ignore_non_const_paths = false; self.ignore_non_const_paths = false;
} }
fn visit_path(&mut self, path: &'tcx hir::Path, id: ast::NodeId) { fn visit_path(&mut self, path: &'tcx hir::Path, _: ast::NodeId) {
self.handle_definition(id, path.def); self.handle_definition(path.def);
intravisit::walk_path(self, path); intravisit::walk_path(self, path);
} }
} }
@ -426,6 +413,7 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> {
hir::ItemStatic(..) hir::ItemStatic(..)
| hir::ItemConst(..) | hir::ItemConst(..)
| hir::ItemFn(..) | hir::ItemFn(..)
| hir::ItemTy(..)
| hir::ItemEnum(..) | hir::ItemEnum(..)
| hir::ItemStruct(..) | hir::ItemStruct(..)
| hir::ItemUnion(..) => true, | hir::ItemUnion(..) => true,

View file

@ -328,7 +328,6 @@ language_item_table! {
PanicFmtLangItem, "panic_fmt", panic_fmt; PanicFmtLangItem, "panic_fmt", panic_fmt;
ExchangeMallocFnLangItem, "exchange_malloc", exchange_malloc_fn; ExchangeMallocFnLangItem, "exchange_malloc", exchange_malloc_fn;
ExchangeFreeFnLangItem, "exchange_free", exchange_free_fn;
BoxFreeFnLangItem, "box_free", box_free_fn; BoxFreeFnLangItem, "box_free", box_free_fn;
StrDupUniqFnLangItem, "strdup_uniq", strdup_uniq_fn; StrDupUniqFnLangItem, "strdup_uniq", strdup_uniq_fn;

View file

@ -302,9 +302,7 @@ impl<'a, 'tcx> Visitor<'tcx> for Annotator<'a, 'tcx> {
} }
fn visit_macro_def(&mut self, md: &'tcx hir::MacroDef) { fn visit_macro_def(&mut self, md: &'tcx hir::MacroDef) {
if md.imported_from.is_none() { self.annotate(md.id, &md.attrs, md.span, AnnotationKind::Required, |_| {});
self.annotate(md.id, &md.attrs, md.span, AnnotationKind::Required, |_| {});
}
} }
} }
@ -373,9 +371,7 @@ impl<'a, 'tcx> Visitor<'tcx> for MissingStabilityAnnotations<'a, 'tcx> {
} }
fn visit_macro_def(&mut self, md: &'tcx hir::MacroDef) { fn visit_macro_def(&mut self, md: &'tcx hir::MacroDef) {
if md.imported_from.is_none() { self.check_missing_stability(md.id, md.span);
self.check_missing_stability(md.id, md.span);
}
} }
} }

View file

@ -142,7 +142,12 @@ impl CodeStats {
max_variant_size = cmp::max(max_variant_size, size); max_variant_size = cmp::max(max_variant_size, size);
let mut min_offset = discr_size; let mut min_offset = discr_size;
for field in fields {
// We want to print fields by increasing offset.
let mut fields = fields.clone();
fields.sort_by_key(|f| f.offset);
for field in fields.iter() {
let FieldInfo { ref name, offset, size, align } = *field; let FieldInfo { ref name, offset, size, align } = *field;
// Include field alignment in output only if it caused padding injection // Include field alignment in output only if it caused padding injection

Some files were not shown because too many files have changed in this diff Show more