1
Fork 0

Auto merge of #39353 - alexcrichton:rollup, r=alexcrichton

Rollup of 21 pull requests

- Successful merges: #38617, #39284, #39285, #39290, #39302, #39305, #39306, #39307, #39311, #39313, #39314, #39321, #39325, #39332, #39335, #39344, #39345, #39346, #39348, #39350, #39351
- Failed merges:
This commit is contained in:
bors 2017-01-28 02:50:51 +00:00
commit 0f8a296475
62 changed files with 782 additions and 447 deletions

View file

@ -43,8 +43,8 @@ Brian Anderson <banderson@mozilla.com> <andersrb@gmail.com>
Brian Dawn <brian.t.dawn@gmail.com>
Brian Leibig <brian@brianleibig.com> Brian Leibig <brian.leibig@gmail.com>
Carl-Anton Ingmarsson <mail@carlanton.se> <ca.ingmarsson@gmail.com>
Carol (Nichols || Goulding) <carol.nichols@gmail.com> Carol Nichols <carol.nichols@gmail.com>
Carol (Nichols || Goulding) <carol.nichols@gmail.com> Carol Nichols <cnichols@thinkthroughmath.com>
Carol (Nichols || Goulding) <carol.nichols@gmail.com>
Carol (Nichols || Goulding) <cnichols@thinkthroughmath.com>
Carol Willing <carolcode@willingconsulting.com>
Chris C Cerami <chrisccerami@users.noreply.github.com> Chris C Cerami <chrisccerami@gmail.com>
Chris Pressey <cpressey@gmail.com>
@ -53,6 +53,7 @@ Clark Gaebel <cg.wowus.cg@gmail.com> <cgaebel@mozilla.com>
Clinton Ryan <clint.ryan3@gmail.com>
Corey Farwell <coreyf+rust@rwell.org> Corey Farwell <coreyf@rwell.org>
Corey Richardson <corey@octayn.net> Elaine "See More" Nemo <corey@octayn.net>
Cyryl Płotnicki <cyplo@cyplo.net>
Damien Schoof <damien.schoof@gmail.com>
Daniel Ramos <dan@daramos.com>
David Klein <david.klein@baesystemsdetica.com>
@ -102,6 +103,7 @@ Jason Toffaletti <toffaletti@gmail.com> Jason Toffaletti <jason@topsy.com>
Jauhien Piatlicki <jauhien@gentoo.org> Jauhien Piatlicki <jpiatlicki@zertisa.com>
Jay True <glacjay@gmail.com>
Jeremy Letang <letang.jeremy@gmail.com>
Jethro Beekman <github@jbeekman.nl>
Jihyun Yu <j.yu@navercorp.com> <yjh0502@gmail.com>
Jihyun Yu <j.yu@navercorp.com> jihyun <jihyun@nablecomm.com>
Jihyun Yu <j.yu@navercorp.com> Jihyun Yu <jihyun@nclab.kaist.ac.kr>

View file

@ -110,9 +110,9 @@ before_deploy:
- mkdir -p deploy/$TRAVIS_COMMIT
- >
if [ "$TRAVIS_OS_NAME" == "osx" ]; then
cp build/dist/*.tar.gz deploy/$TRAVIS_COMMIT;
cp -r build/dist deploy/$TRAVIS_COMMIT;
else
cp obj/build/dist/*.tar.gz deploy/$TRAVIS_COMMIT;
cp -r obj/build/dist deploy/$TRAVIS_COMMIT;
fi
deploy:

View file

@ -137,7 +137,7 @@ branches:
before_deploy:
- ps: |
New-Item -Path deploy -ItemType directory
Get-ChildItem -Path build\dist -Filter '*.tar.gz' | Move-Item -Destination deploy
Get-ChildItem -Path build\dist | Move-Item -Destination deploy
Get-ChildItem -Path deploy | Foreach-Object {
Push-AppveyorArtifact $_.FullName -FileName ${env:APPVEYOR_REPO_COMMIT}/$_
}
@ -151,7 +151,7 @@ deploy:
bucket: rust-lang-ci
set_public: true
region: us-east-1
artifact: /.*\.tar.gz/
artifact: /.*/
folder: rustc-builds
on:
branch: auto

8
src/Cargo.lock generated
View file

@ -50,6 +50,14 @@ dependencies = [
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "build-manifest"
version = "0.1.0"
dependencies = [
"rustc-serialize 0.3.19 (registry+https://github.com/rust-lang/crates.io-index)",
"toml 0.1.30 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "build_helper"
version = "0.1.0"

View file

@ -10,6 +10,7 @@ members = [
"tools/linkchecker",
"tools/rustbook",
"tools/tidy",
"tools/build-manifest",
]
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit

View file

@ -78,6 +78,11 @@ pub struct Config {
pub cargo: Option<PathBuf>,
pub local_rebuild: bool,
// dist misc
pub dist_sign_folder: Option<PathBuf>,
pub dist_upload_addr: Option<String>,
pub dist_gpg_password_file: Option<PathBuf>,
// libstd features
pub debug_jemalloc: bool,
pub use_jemalloc: bool,
@ -123,6 +128,7 @@ struct TomlConfig {
llvm: Option<Llvm>,
rust: Option<Rust>,
target: Option<HashMap<String, TomlTarget>>,
dist: Option<Dist>,
}
/// TOML representation of various global build decisions.
@ -166,6 +172,13 @@ struct Llvm {
targets: Option<String>,
}
#[derive(RustcDecodable, Default, Clone)]
struct Dist {
sign_folder: Option<String>,
gpg_password_file: Option<String>,
upload_addr: Option<String>,
}
#[derive(RustcDecodable)]
enum StringOrBool {
String(String),
@ -352,6 +365,12 @@ impl Config {
}
}
if let Some(ref t) = toml.dist {
config.dist_sign_folder = t.sign_folder.clone().map(PathBuf::from);
config.dist_gpg_password_file = t.gpg_password_file.clone().map(PathBuf::from);
config.dist_upload_addr = t.upload_addr.clone();
}
return config
}

View file

@ -242,3 +242,33 @@
# that this option only makes sense for MUSL targets that produce statically
# linked binaries
#musl-root = "..."
# =============================================================================
# Distribution options
#
# These options are related to distribution, mostly for the Rust project itself.
# You probably won't need to concern yourself with any of these options
# =============================================================================
[dist]
# This is the folder of artifacts that the build system will sign. All files in
# this directory will be signed with the default gpg key using the system `gpg`
# binary. The `asc` and `sha256` files will all be output into the standard dist
# output folder (currently `build/dist`)
#
# This folder should be populated ahead of time before the build system is
# invoked.
#sign-folder = "path/to/folder/to/sign"
# This is a file which contains the password of the default gpg key. This will
# be passed to `gpg` down the road when signing all files in `sign-folder`
# above. This should be stored in plaintext.
#gpg-password-file = "path/to/gpg/password"
# The remote address that all artifacts will eventually be uploaded to. The
# build system generates manifests which will point to these urls, and for the
# manifests to be correct they'll have to have the right URLs encoded.
#
# Note that this address should not contain a trailing slash as file names will
# be appended to it.
#upload-addr = "https://example.com/folder"

View file

@ -22,7 +22,7 @@ use std::env;
use std::fs::{self, File};
use std::io::{Read, Write};
use std::path::{PathBuf, Path};
use std::process::Command;
use std::process::{Command, Stdio};
use build_helper::output;
@ -876,3 +876,34 @@ fn add_env(build: &Build, cmd: &mut Command, target: &str) {
cmd.env("CFG_PLATFORM", "x86");
}
}
pub fn hash_and_sign(build: &Build) {
let compiler = Compiler::new(0, &build.config.build);
let mut cmd = build.tool_cmd(&compiler, "build-manifest");
let sign = build.config.dist_sign_folder.as_ref().unwrap_or_else(|| {
panic!("\n\nfailed to specify `dist.sign-folder` in `config.toml`\n\n")
});
let addr = build.config.dist_upload_addr.as_ref().unwrap_or_else(|| {
panic!("\n\nfailed to specify `dist.upload-addr` in `config.toml`\n\n")
});
let file = build.config.dist_gpg_password_file.as_ref().unwrap_or_else(|| {
panic!("\n\nfailed to specify `dist.gpg-password-file` in `config.toml`\n\n")
});
let mut pass = String::new();
t!(t!(File::open(&file)).read_to_string(&mut pass));
let today = output(Command::new("date").arg("+%Y-%m-%d"));
cmd.arg(sign);
cmd.arg(distdir(build));
cmd.arg(today.trim());
cmd.arg(package_vers(build));
cmd.arg(addr);
t!(fs::create_dir_all(distdir(build)));
let mut child = t!(cmd.stdin(Stdio::piped()).spawn());
t!(child.stdin.take().unwrap().write_all(pass.as_bytes()));
let status = t!(child.wait());
assert!(status.success());
}

View file

@ -513,6 +513,9 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
rules.build("tool-compiletest", "src/tools/compiletest")
.dep(|s| s.name("libtest"))
.run(move |s| compile::tool(build, s.stage, s.target, "compiletest"));
rules.build("tool-build-manifest", "src/tools/build-manifest")
.dep(|s| s.name("libstd"))
.run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
// ========================================================================
// Documentation targets
@ -633,6 +636,13 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
.dep(|d| d.name("dist-cargo"))
.run(move |s| dist::extended(build, s.stage, s.target));
rules.dist("dist-sign", "hash-and-sign")
.host(true)
.only_build(true)
.only_host_build(true)
.dep(move |s| s.name("tool-build-manifest").target(&build.config.build).stage(0))
.run(move |_| dist::hash_and_sign(build));
rules.verify();
return rules;
}

View file

@ -710,7 +710,7 @@ Please note that [`catch_unwind()`] will only catch unwinding panics, not
those who abort the process. See the documentation of [`catch_unwind()`]
for more information.
[`catch_unwind()`]: https://doc.rust-lang.org/std/panic/fn.catch_unwind.html
[`catch_unwind()`]: ../std/panic/fn.catch_unwind.html
# Representing opaque structs

View file

@ -26,7 +26,7 @@ this is totally fine.
For instance, a custom implementation of `Box` might write `Drop` like this:
```rust
#![feature(alloc, heap_api, drop_in_place, unique)]
#![feature(alloc, heap_api, unique)]
extern crate alloc;
@ -57,7 +57,7 @@ use-after-free the `ptr` because when drop exits, it becomes inaccessible.
However this wouldn't work:
```rust
#![feature(alloc, heap_api, drop_in_place, unique)]
#![feature(alloc, heap_api, unique)]
extern crate alloc;
@ -135,7 +135,7 @@ The classic safe solution to overriding recursive drop and allowing moving out
of Self during `drop` is to use an Option:
```rust
#![feature(alloc, heap_api, drop_in_place, unique)]
#![feature(alloc, heap_api, unique)]
extern crate alloc;

View file

@ -59,7 +59,7 @@ const MAX_REFCOUNT: usize = (isize::MAX) as usize;
/// as long as `T` implements [`Send`] and [`Sync`][sync]. The disadvantage is
/// that atomic operations are more expensive than ordinary memory accesses.
/// If you are not sharing reference-counted values between threads, consider
/// using [`rc::Rc`] for lower overhead. [`Rc`] is a safe default, because
/// using [`rc::Rc`][`Rc`] for lower overhead. [`Rc`] is a safe default, because
/// the compiler will catch any attempt to send an [`Rc`] between threads.
/// However, a library might choose `Arc` in order to give library consumers
/// more flexibility.

View file

@ -17,6 +17,8 @@
reason = "matches collection reform specification, \
waiting for dust to settle",
issue = "37966")]
#![rustc_deprecated(since = "1.16.0", reason = "long since replaced")]
#![allow(deprecated)]
use core::marker;
use core::fmt;

View file

@ -79,6 +79,7 @@ pub use btree_set::BTreeSet;
#[doc(no_inline)]
pub use linked_list::LinkedList;
#[doc(no_inline)]
#[allow(deprecated)]
pub use enum_set::EnumSet;
#[doc(no_inline)]
pub use vec_deque::VecDeque;

View file

@ -509,7 +509,7 @@ impl<T> [T] {
core_slice::SliceExt::swap(self, a, b)
}
/// Reverse the order of elements in a slice, in place.
/// Reverses the order of elements in a slice, in place.
///
/// # Example
///
@ -1062,7 +1062,7 @@ impl<T> [T] {
core_slice::SliceExt::binary_search_by_key(self, b, f)
}
/// This is equivalent to `self.sort_by(|a, b| a.cmp(b))`.
/// Sorts the slice.
///
/// This sort is stable (i.e. does not reorder equal elements) and `O(n log n)` worst-case.
///

View file

@ -1607,7 +1607,6 @@ impl str {
/// Basic usage:
///
/// ```
/// # #![feature(str_replacen)]
/// let s = "foo foo 123 foo";
/// assert_eq!("new new 123 foo", s.replacen("foo", "new", 2));
/// assert_eq!("faa fao 123 foo", s.replacen('o', "a", 3));
@ -1617,13 +1616,10 @@ impl str {
/// When the pattern doesn't match:
///
/// ```
/// # #![feature(str_replacen)]
/// let s = "this is old";
/// assert_eq!(s, s.replacen("cookie monster", "little lamb", 10));
/// ```
#[unstable(feature = "str_replacen",
issue = "36436",
reason = "only need to replace first N matches")]
#[stable(feature = "str_replacen", since = "1.16.0")]
pub fn replacen<'a, P: Pattern<'a>>(&'a self, pat: P, to: &str, count: usize) -> String {
// Hope to reduce the times of re-allocation
let mut result = String::with_capacity(32);
@ -1795,11 +1791,9 @@ impl str {
/// Basic usage:
///
/// ```
/// #![feature(repeat_str)]
///
/// assert_eq!("abc".repeat(4), String::from("abcabcabcabc"));
/// ```
#[unstable(feature = "repeat_str", issue = "37079")]
#[stable(feature = "repeat_str", since = "1.16.0")]
pub fn repeat(&self, n: usize) -> String {
let mut s = String::with_capacity(self.len() * n);
s.extend((0..n).map(|_| self));

View file

@ -1166,8 +1166,6 @@ impl String {
/// Basic usage:
///
/// ```
/// #![feature(insert_str)]
///
/// let mut s = String::from("bar");
///
/// s.insert_str(0, "foo");
@ -1175,9 +1173,7 @@ impl String {
/// assert_eq!("foobar", s);
/// ```
#[inline]
#[unstable(feature = "insert_str",
reason = "recent addition",
issue = "35553")]
#[stable(feature = "insert_str", since = "1.16.0")]
pub fn insert_str(&mut self, idx: usize, string: &str) {
assert!(self.is_char_boundary(idx));
@ -1270,7 +1266,6 @@ impl String {
/// # Examples
///
/// ```
/// # #![feature(string_split_off)]
/// # fn main() {
/// let mut hello = String::from("Hello, World!");
/// let world = hello.split_off(7);
@ -1279,7 +1274,7 @@ impl String {
/// # }
/// ```
#[inline]
#[unstable(feature = "string_split_off", issue = "38080")]
#[stable(feature = "string_split_off", since = "1.16.0")]
pub fn split_off(&mut self, mid: usize) -> String {
assert!(self.is_char_boundary(mid));
let other = self.vec.split_off(mid);

View file

@ -820,15 +820,13 @@ impl<T> Vec<T> {
/// # Examples
///
/// ```
/// #![feature(dedup_by)]
///
/// let mut vec = vec![10, 20, 21, 30, 20];
///
/// vec.dedup_by_key(|i| *i / 10);
///
/// assert_eq!(vec, [10, 20, 30, 20]);
/// ```
#[unstable(feature = "dedup_by", reason = "recently added", issue = "37087")]
#[stable(feature = "dedup_by", since = "1.16.0")]
#[inline]
pub fn dedup_by_key<F, K>(&mut self, mut key: F) where F: FnMut(&mut T) -> K, K: PartialEq {
self.dedup_by(|a, b| key(a) == key(b))
@ -841,7 +839,6 @@ impl<T> Vec<T> {
/// # Examples
///
/// ```
/// #![feature(dedup_by)]
/// use std::ascii::AsciiExt;
///
/// let mut vec = vec!["foo", "bar", "Bar", "baz", "bar"];
@ -850,7 +847,7 @@ impl<T> Vec<T> {
///
/// assert_eq!(vec, ["foo", "bar", "baz", "bar"]);
/// ```
#[unstable(feature = "dedup_by", reason = "recently added", issue = "37087")]
#[stable(feature = "dedup_by", since = "1.16.0")]
pub fn dedup_by<F>(&mut self, mut same_bucket: F) where F: FnMut(&mut T, &mut T) -> bool {
unsafe {
// Although we have a mutable reference to `self`, we cannot make

View file

@ -643,8 +643,6 @@ impl<T> VecDeque<T> {
/// # Examples
///
/// ```
/// #![feature(deque_extras)]
///
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
@ -655,9 +653,7 @@ impl<T> VecDeque<T> {
/// assert_eq!(buf.len(), 1);
/// assert_eq!(Some(&5), buf.get(0));
/// ```
#[unstable(feature = "deque_extras",
reason = "matches collection reform specification; waiting on panic semantics",
issue = "27788")]
#[stable(feature = "deque_extras", since = "1.16.0")]
pub fn truncate(&mut self, len: usize) {
for _ in len..self.len() {
self.pop_back();
@ -1779,8 +1775,6 @@ impl<T: Clone> VecDeque<T> {
/// # Examples
///
/// ```
/// #![feature(deque_extras)]
///
/// use std::collections::VecDeque;
///
/// let mut buf = VecDeque::new();
@ -1793,9 +1787,7 @@ impl<T: Clone> VecDeque<T> {
/// assert_eq!(a, b);
/// }
/// ```
#[unstable(feature = "deque_extras",
reason = "matches collection reform specification; waiting on panic semantics",
issue = "27788")]
#[stable(feature = "deque_extras", since = "1.16.0")]
pub fn resize(&mut self, new_len: usize, value: T) {
let len = self.len();

View file

@ -1,268 +0,0 @@
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::mem;
use collections::enum_set::{CLike, EnumSet};
use self::Foo::*;
#[derive(Copy, Clone, PartialEq, Debug)]
#[repr(usize)]
enum Foo {
A,
B,
C,
}
impl CLike for Foo {
fn to_usize(&self) -> usize {
*self as usize
}
fn from_usize(v: usize) -> Foo {
unsafe { mem::transmute(v) }
}
}
#[test]
fn test_new() {
let e: EnumSet<Foo> = EnumSet::new();
assert!(e.is_empty());
}
#[test]
fn test_show() {
let mut e = EnumSet::new();
assert!(format!("{:?}", e) == "{}");
e.insert(A);
assert!(format!("{:?}", e) == "{A}");
e.insert(C);
assert!(format!("{:?}", e) == "{A, C}");
}
#[test]
fn test_len() {
let mut e = EnumSet::new();
assert_eq!(e.len(), 0);
e.insert(A);
e.insert(B);
e.insert(C);
assert_eq!(e.len(), 3);
e.remove(&A);
assert_eq!(e.len(), 2);
e.clear();
assert_eq!(e.len(), 0);
}
///////////////////////////////////////////////////////////////////////////
// intersect
#[test]
fn test_two_empties_do_not_intersect() {
let e1: EnumSet<Foo> = EnumSet::new();
let e2: EnumSet<Foo> = EnumSet::new();
assert!(e1.is_disjoint(&e2));
}
#[test]
fn test_empty_does_not_intersect_with_full() {
let e1: EnumSet<Foo> = EnumSet::new();
let mut e2: EnumSet<Foo> = EnumSet::new();
e2.insert(A);
e2.insert(B);
e2.insert(C);
assert!(e1.is_disjoint(&e2));
}
#[test]
fn test_disjoint_intersects() {
let mut e1: EnumSet<Foo> = EnumSet::new();
e1.insert(A);
let mut e2: EnumSet<Foo> = EnumSet::new();
e2.insert(B);
assert!(e1.is_disjoint(&e2));
}
#[test]
fn test_overlapping_intersects() {
let mut e1: EnumSet<Foo> = EnumSet::new();
e1.insert(A);
let mut e2: EnumSet<Foo> = EnumSet::new();
e2.insert(A);
e2.insert(B);
assert!(!e1.is_disjoint(&e2));
}
///////////////////////////////////////////////////////////////////////////
// contains and contains_elem
#[test]
fn test_superset() {
let mut e1: EnumSet<Foo> = EnumSet::new();
e1.insert(A);
let mut e2: EnumSet<Foo> = EnumSet::new();
e2.insert(A);
e2.insert(B);
let mut e3: EnumSet<Foo> = EnumSet::new();
e3.insert(C);
assert!(e1.is_subset(&e2));
assert!(e2.is_superset(&e1));
assert!(!e3.is_superset(&e2));
assert!(!e2.is_superset(&e3))
}
#[test]
fn test_contains() {
let mut e1: EnumSet<Foo> = EnumSet::new();
e1.insert(A);
assert!(e1.contains(&A));
assert!(!e1.contains(&B));
assert!(!e1.contains(&C));
e1.insert(A);
e1.insert(B);
assert!(e1.contains(&A));
assert!(e1.contains(&B));
assert!(!e1.contains(&C));
}
///////////////////////////////////////////////////////////////////////////
// iter
#[test]
fn test_iterator() {
let mut e1: EnumSet<Foo> = EnumSet::new();
let elems: Vec<Foo> = e1.iter().collect();
assert!(elems.is_empty());
e1.insert(A);
let elems: Vec<_> = e1.iter().collect();
assert_eq!(elems, [A]);
e1.insert(C);
let elems: Vec<_> = e1.iter().collect();
assert_eq!(elems, [A, C]);
e1.insert(C);
let elems: Vec<_> = e1.iter().collect();
assert_eq!(elems, [A, C]);
e1.insert(B);
let elems: Vec<_> = e1.iter().collect();
assert_eq!(elems, [A, B, C]);
}
///////////////////////////////////////////////////////////////////////////
// operators
#[test]
fn test_operators() {
let mut e1: EnumSet<Foo> = EnumSet::new();
e1.insert(A);
e1.insert(C);
let mut e2: EnumSet<Foo> = EnumSet::new();
e2.insert(B);
e2.insert(C);
let e_union = e1 | e2;
let elems: Vec<_> = e_union.iter().collect();
assert_eq!(elems, [A, B, C]);
let e_intersection = e1 & e2;
let elems: Vec<_> = e_intersection.iter().collect();
assert_eq!(elems, [C]);
// Another way to express intersection
let e_intersection = e1 - (e1 - e2);
let elems: Vec<_> = e_intersection.iter().collect();
assert_eq!(elems, [C]);
let e_subtract = e1 - e2;
let elems: Vec<_> = e_subtract.iter().collect();
assert_eq!(elems, [A]);
// Bitwise XOR of two sets, aka symmetric difference
let e_symmetric_diff = e1 ^ e2;
let elems: Vec<_> = e_symmetric_diff.iter().collect();
assert_eq!(elems, [A, B]);
// Another way to express symmetric difference
let e_symmetric_diff = (e1 - e2) | (e2 - e1);
let elems: Vec<_> = e_symmetric_diff.iter().collect();
assert_eq!(elems, [A, B]);
// Yet another way to express symmetric difference
let e_symmetric_diff = (e1 | e2) - (e1 & e2);
let elems: Vec<_> = e_symmetric_diff.iter().collect();
assert_eq!(elems, [A, B]);
}
#[test]
#[should_panic]
fn test_overflow() {
#[allow(dead_code)]
#[derive(Copy, Clone)]
#[repr(usize)]
enum Bar {
V00, V01, V02, V03, V04, V05, V06, V07, V08, V09,
V10, V11, V12, V13, V14, V15, V16, V17, V18, V19,
V20, V21, V22, V23, V24, V25, V26, V27, V28, V29,
V30, V31, V32, V33, V34, V35, V36, V37, V38, V39,
V40, V41, V42, V43, V44, V45, V46, V47, V48, V49,
V50, V51, V52, V53, V54, V55, V56, V57, V58, V59,
V60, V61, V62, V63, V64, V65, V66, V67, V68, V69,
}
impl CLike for Bar {
fn to_usize(&self) -> usize {
*self as usize
}
fn from_usize(v: usize) -> Bar {
unsafe { mem::transmute(v) }
}
}
let mut set = EnumSet::new();
set.insert(Bar::V64);
}
#[test]
fn test_extend_ref() {
let mut a = EnumSet::new();
a.insert(A);
a.extend(&[A, C]);
assert_eq!(a.len(), 2);
assert!(a.contains(&A));
assert!(a.contains(&C));
let mut b = EnumSet::new();
b.insert(B);
a.extend(&b);
assert_eq!(a.len(), 3);
assert!(a.contains(&A));
assert!(a.contains(&B));
assert!(a.contains(&C));
}

View file

@ -18,17 +18,12 @@
#![feature(collections)]
#![feature(collections_bound)]
#![feature(const_fn)]
#![feature(dedup_by)]
#![feature(enumset)]
#![feature(exact_size_is_empty)]
#![feature(pattern)]
#![feature(placement_in_syntax)]
#![feature(rand)]
#![feature(repeat_str)]
#![feature(step_by)]
#![feature(str_escape)]
#![feature(str_replacen)]
#![feature(string_split_off)]
#![feature(test)]
#![feature(unboxed_closures)]
#![feature(unicode)]
@ -47,7 +42,6 @@ mod bench;
mod binary_heap;
mod btree;
mod cow_str;
mod enum_set;
mod fmt;
mod linked_list;
mod slice;

View file

@ -307,6 +307,7 @@ pub trait BuildHasher {
/// [`BuildHasher`]: trait.BuildHasher.html
/// [`Default`]: ../default/trait.Default.html
/// [`Hasher`]: trait.Hasher.html
/// [`HashMap`]: ../../std/collections/struct.HashMap.html
#[stable(since = "1.7.0", feature = "build_hasher")]
pub struct BuildHasherDefault<H>(marker::PhantomData<H>);

View file

@ -1108,9 +1108,9 @@ pub trait Iterator {
///
/// One of the keys to `collect()`'s power is that many things you might
/// not think of as 'collections' actually are. For example, a [`String`]
/// is a collection of [`char`]s. And a collection of [`Result<T, E>`] can
/// be thought of as single [`Result`]`<Collection<T>, E>`. See the examples
/// below for more.
/// is a collection of [`char`]s. And a collection of
/// [`Result<T, E>`][`Result`] can be thought of as single
/// [`Result`]`<Collection<T>, E>`. See the examples below for more.
///
/// Because `collect()` is so general, it can cause problems with type
/// inference. As such, `collect()` is one of the few times you'll see

View file

@ -448,7 +448,6 @@ impl<T: ?Sized> *const T {
/// Basic usage:
///
/// ```
/// #![feature(ptr_wrapping_offset)]
/// // Iterate using a raw pointer in increments of two elements
/// let data = [1u8, 2, 3, 4, 5];
/// let mut ptr: *const u8 = data.as_ptr();
@ -463,7 +462,7 @@ impl<T: ?Sized> *const T {
/// ptr = ptr.wrapping_offset(step);
/// }
/// ```
#[unstable(feature = "ptr_wrapping_offset", issue = "37570")]
#[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
#[inline]
pub fn wrapping_offset(self, count: isize) -> *const T where T: Sized {
unsafe {
@ -572,7 +571,6 @@ impl<T: ?Sized> *mut T {
/// Basic usage:
///
/// ```
/// #![feature(ptr_wrapping_offset)]
/// // Iterate using a raw pointer in increments of two elements
/// let mut data = [1u8, 2, 3, 4, 5];
/// let mut ptr: *mut u8 = data.as_mut_ptr();
@ -587,7 +585,7 @@ impl<T: ?Sized> *mut T {
/// }
/// assert_eq!(&data, &[0, 2, 0, 4, 0]);
/// ```
#[unstable(feature = "ptr_wrapping_offset", issue = "37570")]
#[stable(feature = "ptr_wrapping_offset", since = "1.16.0")]
#[inline]
pub fn wrapping_offset(self, count: isize) -> *mut T where T: Sized {
unsafe {

View file

@ -840,8 +840,6 @@ impl<T: Default, E> Result<T, E> {
/// `Err` on error.
///
/// ```
/// #![feature(result_unwrap_or_default)]
///
/// let good_year_from_input = "1909";
/// let bad_year_from_input = "190blarg";
/// let good_year = good_year_from_input.parse().unwrap_or_default();
@ -854,7 +852,7 @@ impl<T: Default, E> Result<T, E> {
/// [`FromStr`]: ../../std/str/trait.FromStr.html
/// ```
#[inline]
#[unstable(feature = "result_unwrap_or_default", issue = "37516")]
#[stable(feature = "result_unwrap_or_default", since = "1.16.0")]
pub fn unwrap_or_default(self) -> T {
match self {
Ok(x) => x,

View file

@ -24,7 +24,7 @@
//! same as [LLVM atomic orderings][1]. For more information see the [nomicon][2].
//!
//! [1]: http://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations
//! [2]: https://doc.rust-lang.org/nomicon/atomics.html
//! [2]: ../../../nomicon/atomics.html
//!
//! Atomic variables are safe to share between threads (they implement `Sync`)
//! but they do not themselves provide the mechanism for sharing and follow the
@ -144,7 +144,7 @@ unsafe impl<T> Sync for AtomicPtr<T> {}
/// LLVM's](http://llvm.org/docs/LangRef.html#memory-model-for-concurrent-operations).
///
/// For more information see the [nomicon][1].
/// [1]: https://doc.rust-lang.org/nomicon/atomics.html
/// [1]: ../../../nomicon/atomics.html
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone, Debug)]
pub enum Ordering {

View file

@ -32,7 +32,6 @@
#![feature(unicode)]
#![feature(unique)]
#![feature(ordering_chaining)]
#![feature(result_unwrap_or_default)]
#![feature(ptr_unaligned)]
extern crate core;

View file

@ -231,7 +231,11 @@ pub struct TypeckTables<'tcx> {
/// of the struct - this is needed because it is non-trivial to
/// normalize while preserving regions. This table is used only in
/// MIR construction and hence is not serialized to metadata.
pub fru_field_types: NodeMap<Vec<Ty<'tcx>>>
pub fru_field_types: NodeMap<Vec<Ty<'tcx>>>,
/// Maps a cast expression to its kind. This is keyed on the
/// *from* expression of the cast, not the cast itself.
pub cast_kinds: NodeMap<ty::cast::CastKind>,
}
impl<'tcx> TypeckTables<'tcx> {
@ -246,7 +250,8 @@ impl<'tcx> TypeckTables<'tcx> {
closure_tys: NodeMap(),
closure_kinds: NodeMap(),
liberated_fn_sigs: NodeMap(),
fru_field_types: NodeMap()
fru_field_types: NodeMap(),
cast_kinds: NodeMap(),
}
}
@ -533,10 +538,6 @@ pub struct GlobalCtxt<'tcx> {
/// expression defining the closure.
pub closure_kinds: RefCell<DepTrackingMap<maps::ClosureKinds<'tcx>>>,
/// Maps a cast expression to its kind. This is keyed on the
/// *from* expression of the cast, not the cast itself.
pub cast_kinds: RefCell<NodeMap<ty::cast::CastKind>>,
/// Maps Fn items to a collection of fragment infos.
///
/// The main goal is to identify data (each of which may be moved
@ -792,7 +793,6 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
custom_coerce_unsized_kinds: RefCell::new(DefIdMap()),
closure_tys: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
closure_kinds: RefCell::new(DepTrackingMap::new(dep_graph.clone())),
cast_kinds: RefCell::new(NodeMap()),
fragment_infos: RefCell::new(DefIdMap()),
crate_name: Symbol::intern(crate_name),
data_layout: data_layout,

View file

@ -191,11 +191,7 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> {
}
}
TyRef(_, ref tm) => {
if tcx.sess.features.borrow().never_type {
tm.ty.uninhabited_from(visited, tcx)
} else {
DefIdForest::empty()
}
tm.ty.uninhabited_from(visited, tcx)
}
_ => DefIdForest::empty(),

View file

@ -514,7 +514,7 @@ pub enum BorrowKind {
/// Data must be immutable but not aliasable. This kind of borrow
/// cannot currently be expressed by the user and is used only in
/// implicit closure bindings. It is needed when you the closure
/// implicit closure bindings. It is needed when the closure
/// is borrowing or mutating a mutable referent, e.g.:
///
/// let x: &mut isize = ...;

View file

@ -379,19 +379,24 @@ impl<'tcx> Witness<'tcx> {
fn all_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
pcx: PatternContext<'tcx>) -> Vec<Constructor>
{
let check_inhabited = cx.tcx.sess.features.borrow().never_type;
debug!("all_constructors({:?})", pcx.ty);
match pcx.ty.sty {
ty::TyBool =>
[true, false].iter().map(|b| ConstantValue(ConstVal::Bool(*b))).collect(),
ty::TySlice(ref sub_ty) => {
if sub_ty.is_uninhabited_from(cx.module, cx.tcx) {
if sub_ty.is_uninhabited_from(cx.module, cx.tcx)
&& check_inhabited
{
vec![Slice(0)]
} else {
(0..pcx.max_slice_length+1).map(|length| Slice(length)).collect()
}
}
ty::TyArray(ref sub_ty, length) => {
if length == 0 || !sub_ty.is_uninhabited_from(cx.module, cx.tcx) {
if length == 0 || !(sub_ty.is_uninhabited_from(cx.module, cx.tcx)
&& check_inhabited)
{
vec![Slice(length)]
} else {
vec![]
@ -403,7 +408,9 @@ fn all_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
let forest = v.uninhabited_from(&mut visited,
cx.tcx, substs,
AdtKind::Enum);
if forest.contains(cx.tcx, cx.module) {
if forest.contains(cx.tcx, cx.module)
&& check_inhabited
{
None
} else {
Some(Variant(v.did))
@ -411,7 +418,9 @@ fn all_constructors<'a, 'tcx: 'a>(cx: &mut MatchCheckCtxt<'a, 'tcx>,
}).collect()
}
_ => {
if pcx.ty.is_uninhabited_from(cx.module, cx.tcx) {
if pcx.ty.is_uninhabited_from(cx.module, cx.tcx)
&& check_inhabited
{
vec![]
} else {
vec![Single]

View file

@ -99,20 +99,24 @@ impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
}
PatternKind::Variant { adt_def, substs, variant_index, ref subpatterns } => {
let irrefutable = adt_def.variants.iter().enumerate().all(|(i, v)| {
i == variant_index || {
let mut visited = FxHashSet::default();
let node_set = v.uninhabited_from(&mut visited,
self.hir.tcx(),
substs,
adt_def.adt_kind());
!node_set.is_empty()
if self.hir.tcx().sess.features.borrow().never_type {
let irrefutable = adt_def.variants.iter().enumerate().all(|(i, v)| {
i == variant_index || {
let mut visited = FxHashSet::default();
let node_set = v.uninhabited_from(&mut visited,
self.hir.tcx(),
substs,
adt_def.adt_kind());
!node_set.is_empty()
}
});
if irrefutable {
let lvalue = match_pair.lvalue.downcast(adt_def, variant_index);
candidate.match_pairs.extend(self.field_match_pairs(lvalue, subpatterns));
Ok(())
} else {
Err(match_pair)
}
});
if irrefutable {
let lvalue = match_pair.lvalue.downcast(adt_def, variant_index);
candidate.match_pairs.extend(self.field_match_pairs(lvalue, subpatterns));
Ok(())
} else {
Err(match_pair)
}

View file

@ -663,7 +663,7 @@ fn make_mirror_unadjusted<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
hir::ExprCast(ref source, _) => {
// Check to see if this cast is a "coercion cast", where the cast is actually done
// using a coercion (or is a no-op).
if let Some(&TyCastKind::CoercionCast) = cx.tcx.cast_kinds.borrow().get(&source.id) {
if let Some(&TyCastKind::CoercionCast) = cx.tables().cast_kinds.get(&source.id) {
// Convert the lexpr to a vexpr.
ExprKind::Use { source: source.to_ref() }
} else {

View file

@ -92,7 +92,7 @@ pub fn write_mir_pretty<'a, 'b, 'tcx, I>(tcx: TyCtxt<'b, 'tcx, 'tcx>,
where I: Iterator<Item=DefId>, 'tcx: 'a
{
let mut first = true;
for def_id in iter {
for def_id in iter.filter(DefId::is_local) {
let mir = &tcx.item_mir(def_id);
if first {

View file

@ -314,7 +314,7 @@ fn check_expr<'a, 'tcx>(v: &mut CheckCrateVisitor<'a, 'tcx>, e: &hir::Expr, node
}
hir::ExprCast(ref from, _) => {
debug!("Checking const cast(id={})", from.id);
match v.tcx.cast_kinds.borrow().get(&from.id) {
match v.tables.cast_kinds.get(&from.id) {
None => span_bug!(e.span, "no kind for cast"),
Some(&CastKind::PtrAddrCast) | Some(&CastKind::FnPtrAddrCast) => {
v.promotable = false;

View file

@ -111,11 +111,16 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> {
fn nest_tables<F>(&mut self, item_id: NodeId, f: F)
where F: FnOnce(&mut DumpVisitor<'l, 'tcx, 'll, D>)
{
let old_tables = self.save_ctxt.tables;
let item_def_id = self.tcx.hir.local_def_id(item_id);
self.save_ctxt.tables = self.tcx.item_tables(item_def_id);
f(self);
self.save_ctxt.tables = old_tables;
match self.tcx.tables.borrow().get(&item_def_id) {
Some(tables) => {
let old_tables = self.save_ctxt.tables;
self.save_ctxt.tables = tables;
f(self);
self.save_ctxt.tables = old_tables;
}
None => f(self),
}
}
pub fn dump_crate_info(&mut self, name: &str, krate: &ast::Crate) {

View file

@ -234,7 +234,8 @@ pub fn C_integral(t: Type, u: u64, sign_extend: bool) -> ValueRef {
pub fn C_big_integral(t: Type, u: u128, sign_extend: bool) -> ValueRef {
if ::std::mem::size_of::<u128>() == 16 {
unsafe {
llvm::LLVMConstIntOfArbitraryPrecision(t.to_ref(), 2, &u as *const u128 as *const u64)
let words = [u as u64, u.wrapping_shr(64) as u64];
llvm::LLVMConstIntOfArbitraryPrecision(t.to_ref(), 2, words.as_ptr())
}
} else {
// SNAP: remove after snapshot

View file

@ -348,12 +348,12 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> {
} else if self.try_coercion_cast(fcx) {
self.trivial_cast_lint(fcx);
debug!(" -> CoercionCast");
fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, CastKind::CoercionCast);
fcx.tables.borrow_mut().cast_kinds.insert(self.expr.id, CastKind::CoercionCast);
} else {
match self.do_check(fcx) {
Ok(k) => {
debug!(" -> {:?}", k);
fcx.tcx.cast_kinds.borrow_mut().insert(self.expr.id, k);
fcx.tables.borrow_mut().cast_kinds.insert(self.expr.id, k);
}
Err(e) => self.report_cast_error(fcx, e),
};

View file

@ -12,7 +12,7 @@
use super::FnCtxt;
use hir::def_id::DefId;
use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue};
use rustc::ty::{Ty, TypeFoldable, PreferMutLvalue, TypeVariants};
use rustc::infer::type_variable::TypeVariableOrigin;
use syntax::ast;
use syntax::symbol::Symbol;
@ -204,6 +204,22 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
"binary operation `{}` cannot be applied to type `{}`",
op.node.as_str(),
lhs_ty);
if let TypeVariants::TyRef(_, ref ty_mut) = lhs_ty.sty {
if !self.infcx.type_moves_by_default(ty_mut.ty, lhs_expr.span) &&
self.lookup_op_method(expr, ty_mut.ty, vec![rhs_ty_var],
Symbol::intern(name), trait_def_id,
lhs_expr).is_ok() {
err.span_note(
lhs_expr.span,
&format!(
"this is a reference of type that `{}` can be applied to, \
you need to dereference this variable once for this \
operation to work",
op.node.as_str()));
}
}
let missing_trait = match op.node {
hir::BiAdd => Some("std::ops::Add"),
hir::BiSub => Some("std::ops::Sub"),

View file

@ -51,6 +51,7 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
wbcx.visit_anon_types();
wbcx.visit_deferred_obligations(item_id);
wbcx.visit_type_nodes();
wbcx.visit_cast_types();
let tables = self.tcx.alloc_tables(wbcx.tables);
self.tcx.tables.borrow_mut().insert(item_def_id, tables);
@ -291,6 +292,15 @@ impl<'cx, 'gcx, 'tcx> WritebackCx<'cx, 'gcx, 'tcx> {
}
}
fn visit_cast_types(&mut self) {
if self.fcx.writeback_errors.get() {
return
}
self.tables.cast_kinds.extend(
self.fcx.tables.borrow().cast_kinds.iter().map(|(&key, &value)| (key, value)));
}
fn visit_anon_types(&self) {
if self.fcx.writeback_errors.get() {
return

View file

@ -263,6 +263,7 @@ impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> {
.span_label(item.span, &format!("impl doesn't use types inside crate"))
.note(&format!("the impl does not reference any types defined in \
this crate"))
.note("define and implement a trait or new type instead")
.emit();
return;
}

View file

@ -11,11 +11,9 @@
//! Implementations of serialization for structures found in libcollections
use std::hash::{Hash, BuildHasher};
use std::mem;
use {Decodable, Encodable, Decoder, Encoder};
use std::collections::{LinkedList, VecDeque, BTreeMap, BTreeSet, HashMap, HashSet};
use collections::enum_set::{EnumSet, CLike};
impl<
T: Encodable
@ -128,33 +126,6 @@ impl<
}
}
impl<
T: Encodable + CLike
> Encodable for EnumSet<T> {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
let mut bits = 0;
for item in self {
bits |= 1 << item.to_usize();
}
s.emit_usize(bits)
}
}
impl<
T: Decodable + CLike
> Decodable for EnumSet<T> {
fn decode<D: Decoder>(d: &mut D) -> Result<EnumSet<T>, D::Error> {
let bits = d.read_usize()?;
let mut set = EnumSet::new();
for bit in 0..(mem::size_of::<usize>()*8) {
if bits & (1 << bit) != 0 {
set.insert(CLike::from_usize(bit));
}
}
Ok(set)
}
}
impl<K, V, S> Encodable for HashMap<K, V, S>
where K: Encodable + Hash + Eq,
V: Encodable,

View file

@ -30,7 +30,6 @@ Core encoding and decoding interfaces.
#![feature(box_syntax)]
#![feature(collections)]
#![feature(core_intrinsics)]
#![feature(enumset)]
#![feature(specialization)]
#![feature(staged_api)]
#![cfg_attr(test, feature(test))]

View file

@ -222,7 +222,7 @@ fn _var_os(key: &OsStr) -> Option<OsString> {
/// Possible errors from the [`env::var`] function.
///
/// [env::var]: fn.var.html
/// [`env::var`]: fn.var.html
#[derive(Debug, PartialEq, Eq, Clone)]
#[stable(feature = "env", since = "1.0.0")]
pub enum VarError {

View file

@ -404,7 +404,6 @@ impl File {
/// # Examples
///
/// ```
/// #![feature(set_permissions_atomic)]
/// # fn foo() -> std::io::Result<()> {
/// use std::fs::File;
///
@ -415,7 +414,7 @@ impl File {
/// # Ok(())
/// # }
/// ```
#[unstable(feature = "set_permissions_atomic", issue="37916")]
#[stable(feature = "set_permissions_atomic", since = "1.16.0")]
pub fn set_permissions(&self, perm: Permissions) -> io::Result<()> {
self.inner.set_permissions(perm.0)
}

View file

@ -148,8 +148,6 @@ impl SocketAddr {
/// # Examples
///
/// ```
/// #![feature(sockaddr_checker)]
///
/// use std::net::{IpAddr, Ipv4Addr, SocketAddr};
///
/// fn main() {
@ -158,7 +156,7 @@ impl SocketAddr {
/// assert_eq!(socket.is_ipv6(), false);
/// }
/// ```
#[unstable(feature = "sockaddr_checker", issue = "36949")]
#[stable(feature = "sockaddr_checker", since = "1.16.0")]
pub fn is_ipv4(&self) -> bool {
match *self {
SocketAddr::V4(_) => true,
@ -172,8 +170,6 @@ impl SocketAddr {
/// # Examples
///
/// ```
/// #![feature(sockaddr_checker)]
///
/// use std::net::{IpAddr, Ipv6Addr, SocketAddr};
///
/// fn main() {
@ -183,7 +179,7 @@ impl SocketAddr {
/// assert_eq!(socket.is_ipv6(), true);
/// }
/// ```
#[unstable(feature = "sockaddr_checker", issue = "36949")]
#[stable(feature = "sockaddr_checker", since = "1.16.0")]
pub fn is_ipv6(&self) -> bool {
match *self {
SocketAddr::V4(_) => false,

View file

@ -196,8 +196,6 @@ impl IpAddr {
/// # Examples
///
/// ```
/// #![feature(ipaddr_checker)]
///
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// fn main() {
@ -206,7 +204,7 @@ impl IpAddr {
/// false);
/// }
/// ```
#[unstable(feature = "ipaddr_checker", issue = "36949")]
#[stable(feature = "ipaddr_checker", since = "1.16.0")]
pub fn is_ipv4(&self) -> bool {
match *self {
IpAddr::V4(_) => true,
@ -219,8 +217,6 @@ impl IpAddr {
/// # Examples
///
/// ```
/// #![feature(ipaddr_checker)]
///
/// use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
///
/// fn main() {
@ -229,7 +225,7 @@ impl IpAddr {
/// true);
/// }
/// ```
#[unstable(feature = "ipaddr_checker", issue = "36949")]
#[stable(feature = "ipaddr_checker", since = "1.16.0")]
pub fn is_ipv6(&self) -> bool {
match *self {
IpAddr::V4(_) => false,

View file

@ -99,17 +99,17 @@ impl ExitStatusExt for process::ExitStatus {
}
/// Windows-specific extensions to the `std::process::Command` builder
#[unstable(feature = "windows_process_extensions", issue = "37827")]
#[stable(feature = "windows_process_extensions", since = "1.16.0")]
pub trait CommandExt {
/// Sets the [process creation flags][1] to be passed to `CreateProcess`.
///
/// These will always be ORed with `CREATE_UNICODE_ENVIRONMENT`.
/// [1]: https://msdn.microsoft.com/en-us/library/windows/desktop/ms684863(v=vs.85).aspx
#[unstable(feature = "windows_process_extensions", issue = "37827")]
#[stable(feature = "windows_process_extensions", since = "1.16.0")]
fn creation_flags(&mut self, flags: u32) -> &mut process::Command;
}
#[unstable(feature = "windows_process_extensions", issue = "37827")]
#[stable(feature = "windows_process_extensions", since = "1.16.0")]
impl CommandExt for process::Command {
fn creation_flags(&mut self, flags: u32) -> &mut process::Command {
self.as_inner_mut().creation_flags(flags);

View file

@ -391,7 +391,7 @@ impl Builder {
/// [`JoinHandle`]: ../../std/thread/struct.JoinHandle.html
/// [`join`]: ../../std/thread/struct.JoinHandle.html#method.join
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
/// [`panic!`]: ../../std/macro.panic.html
/// [`panic`]: ../../std/macro.panic.html
/// [`Builder::spawn`]: ../../std/thread/struct.Builder.html#method.spawn
///
/// # Examples
@ -974,7 +974,7 @@ impl<T> JoinHandle<T> {
/// to [`panic`].
///
/// [`Err`]: ../../std/result/enum.Result.html#variant.Err
/// [`panic!`]: ../../std/macro.panic.html
/// [`panic`]: ../../std/macro.panic.html
///
/// # Examples
///

View file

@ -154,14 +154,12 @@ impl Duration {
/// Basic usage:
///
/// ```
/// #![feature(duration_checked_ops)]
///
/// use std::time::Duration;
///
/// assert_eq!(Duration::new(0, 0).checked_add(Duration::new(0, 1)), Some(Duration::new(0, 1)));
/// assert_eq!(Duration::new(1, 0).checked_add(Duration::new(std::u64::MAX, 0)), None);
/// ```
#[unstable(feature = "duration_checked_ops", issue = "35774")]
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
pub fn checked_add(self, rhs: Duration) -> Option<Duration> {
if let Some(mut secs) = self.secs.checked_add(rhs.secs) {
@ -194,14 +192,12 @@ impl Duration {
/// Basic usage:
///
/// ```
/// #![feature(duration_checked_ops)]
///
/// use std::time::Duration;
///
/// assert_eq!(Duration::new(0, 1).checked_sub(Duration::new(0, 0)), Some(Duration::new(0, 1)));
/// assert_eq!(Duration::new(0, 0).checked_sub(Duration::new(0, 1)), None);
/// ```
#[unstable(feature = "duration_checked_ops", issue = "35774")]
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
pub fn checked_sub(self, rhs: Duration) -> Option<Duration> {
if let Some(mut secs) = self.secs.checked_sub(rhs.secs) {
@ -232,14 +228,12 @@ impl Duration {
/// Basic usage:
///
/// ```
/// #![feature(duration_checked_ops)]
///
/// use std::time::Duration;
///
/// assert_eq!(Duration::new(0, 500_000_001).checked_mul(2), Some(Duration::new(1, 2)));
/// assert_eq!(Duration::new(std::u64::MAX - 1, 0).checked_mul(2), None);
/// ```
#[unstable(feature = "duration_checked_ops", issue = "35774")]
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
pub fn checked_mul(self, rhs: u32) -> Option<Duration> {
// Multiply nanoseconds as u64, because it cannot overflow that way.
@ -269,15 +263,13 @@ impl Duration {
/// Basic usage:
///
/// ```
/// #![feature(duration_checked_ops)]
///
/// use std::time::Duration;
///
/// assert_eq!(Duration::new(2, 0).checked_div(2), Some(Duration::new(1, 0)));
/// assert_eq!(Duration::new(1, 0).checked_div(2), Some(Duration::new(0, 500_000_000)));
/// assert_eq!(Duration::new(2, 0).checked_div(0), None);
/// ```
#[unstable(feature = "duration_checked_ops", issue = "35774")]
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
pub fn checked_div(self, rhs: u32) -> Option<Duration> {
if rhs != 0 {

View file

@ -80,6 +80,28 @@ impl Lit {
}
}
fn ident_can_begin_expr(ident: ast::Ident) -> bool {
let ident_token: Token = Ident(ident);
!ident_token.is_any_keyword() ||
ident_token.is_path_segment_keyword() ||
[
keywords::Box.name(),
keywords::Break.name(),
keywords::Continue.name(),
keywords::False.name(),
keywords::For.name(),
keywords::If.name(),
keywords::Loop.name(),
keywords::Match.name(),
keywords::Move.name(),
keywords::Return.name(),
keywords::True.name(),
keywords::Unsafe.name(),
keywords::While.name(),
].contains(&ident.name)
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug)]
pub enum Token {
/* Expression-operator symbols. */
@ -163,7 +185,7 @@ impl Token {
pub fn can_begin_expr(&self) -> bool {
match *self {
OpenDelim(..) => true,
Ident(..) => true,
Ident(ident) => ident_can_begin_expr(ident),
Literal(..) => true,
Not => true,
BinOp(Minus) => true,

View file

@ -269,7 +269,7 @@ LARGE_INTEGER increment_all_parts(LARGE_INTEGER li) {
return li;
}
#if !(defined(WIN32) || defined(_WIN32) || defined(__WIN32)) && defined(__amd64__)
#if __SIZEOF_INT128__ == 16
unsigned __int128 identity(unsigned __int128 a) {
return a;

View file

@ -11,6 +11,7 @@
impl Drop for u32 {} //~ ERROR E0117
//~^ NOTE impl doesn't use types inside crate
//~| NOTE the impl does not reference any types defined in this crate
//~| NOTE define and implement a trait or new type instead
fn main() {
}

View file

@ -16,6 +16,7 @@ impl Copy for Foo { }
//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
//~| NOTE impl doesn't use types inside crate
//~| NOTE the impl does not reference any types defined in this crate
//~| NOTE define and implement a trait or new type instead
#[derive(Copy, Clone)]
struct Bar;

View file

@ -0,0 +1,20 @@
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
let v = vec![1, 2, 3, 4, 5, 6, 7, 8, 9];
let vr = v.iter().filter(|x| {
x % 2 == 0
//~^ ERROR binary operation `%` cannot be applied to type `&&{integer}`
//~| NOTE this is a reference of type that `%` can be applied to
//~| NOTE an implementation of `std::ops::Rem` might be missing for `&&{integer}`
});
println!("{:?}", vr);
}

View file

@ -0,0 +1,30 @@
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub fn main() {
return;
return ();
return as ();
return return as ();
return return return;
return if true {
()
} else {
()
};
loop {
return break as ();
}
return enum; //~ ERROR expected one of `.`, `;`, `?`, `}`, or an operator, found `enum`
}

View file

@ -37,6 +37,7 @@ impl Copy for (MyType, MyType) {}
//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
//~| NOTE impl doesn't use types inside crate
//~| NOTE the impl does not reference any types defined in this crate
//~| NOTE define and implement a trait or new type instead
impl Copy for &'static NotSync {}
//~^ ERROR the trait `Copy` may not be implemented for this type
@ -46,8 +47,9 @@ impl Copy for [MyType] {}
//~^ ERROR the trait `Copy` may not be implemented for this type
//~| NOTE type is not a structure or enumeration
//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
//~| NOTE impl doesn't use types inside crate
//~| NOTE the impl does not reference any types defined in this crate
//~| NOTE define and implement a trait or new type instead
//~| NOTE impl doesn't use types inside crate
impl Copy for &'static [NotSync] {}
//~^ ERROR the trait `Copy` may not be implemented for this type
@ -55,6 +57,7 @@ impl Copy for &'static [NotSync] {}
//~| ERROR only traits defined in the current crate can be implemented for arbitrary types
//~| NOTE impl doesn't use types inside crate
//~| NOTE the impl does not reference any types defined in this crate
//~| NOTE define and implement a trait or new type instead
fn main() {
}

View file

@ -1,4 +1,4 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
@ -8,12 +8,11 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
enum Void {}
fn main() {
let x: Result<u32, &'static Void> = Ok(23);
let _ = match x { //~ ERROR non-exhaustive
Ok(n) => n,
};
let a: &String = &"1".to_owned();
let b: &str = &"2";
let c = a + b;
//~^ ERROR binary operation `+` cannot be applied to type `&std::string::String`
//~| NOTE an implementation of `std::ops::Add` might be missing for `&std::string::String`
println!("{:?}", c);
}

View file

@ -0,0 +1,50 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(slice_patterns)]
enum Void {}
fn main() {
let x: Result<u32, &'static Void> = Ok(23);
let _ = match x { //~ ERROR non-exhaustive
Ok(n) => n,
};
let x: &Void = unsafe { std::mem::uninitialized() };
let _ = match x {};
//~^ ERROR non-exhaustive
let x: (Void,) = unsafe { std::mem::uninitialized() };
let _ = match x {};
//~^ ERROR non-exhaustive
let x: [Void; 1] = unsafe { std::mem::uninitialized() };
let _ = match x {};
//~^ ERROR non-exhaustive
let x: &[Void] = unsafe { std::mem::uninitialized() };
let _ = match x { //~ ERROR non-exhaustive
&[] => (),
};
let x: Void = unsafe { std::mem::uninitialized() };
let _ = match x {}; // okay
let x: Result<u32, Void> = Ok(23);
let _ = match x { //~ ERROR non-exhaustive
Ok(x) => x,
};
let x: Result<u32, Void> = Ok(23);
let Ok(x) = x;
//~^ ERROR refutable
}

View file

@ -10,8 +10,6 @@
// aux-build:fat_drop.rs
#![feature(drop_in_place)]
extern crate fat_drop;
fn main() {

View file

@ -0,0 +1,8 @@
[package]
name = "build-manifest"
version = "0.1.0"
authors = ["Alex Crichton <alex@alexcrichton.com>"]
[dependencies]
toml = "0.1"
rustc-serialize = "0.3"

View file

@ -0,0 +1,404 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
extern crate toml;
extern crate rustc_serialize;
use std::collections::HashMap;
use std::env;
use std::fs::File;
use std::io::{self, Read, Write};
use std::path::{PathBuf, Path};
use std::process::{Command, Stdio};
static HOSTS: &'static [&'static str] = &[
"aarch64-unknown-linux-gnu",
"arm-unknown-linux-gnueabi",
"arm-unknown-linux-gnueabihf",
"armv7-unknown-linux-gnueabihf",
"i686-apple-darwin",
"i686-pc-windows-gnu",
"i686-pc-windows-msvc",
"i686-unknown-linux-gnu",
"mips-unknown-linux-gnu",
"mips64-unknown-linux-gnuabi64",
"mips64el-unknown-linux-gnuabi64",
"mipsel-unknown-linux-gnu",
"powerpc-unknown-linux-gnu",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-gnu",
"x86_64-pc-windows-msvc",
"x86_64-unknown-freebsd",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-netbsd",
];
static TARGETS: &'static [&'static str] = &[
"aarch64-apple-ios",
"aarch64-linux-android",
"aarch64-unknown-linux-gnu",
"arm-linux-androideabi",
"arm-unknown-linux-gnueabi",
"arm-unknown-linux-gnueabihf",
"arm-unknown-linux-musleabi",
"arm-unknown-linux-musleabihf",
"armv7-apple-ios",
"armv7-linux-androideabi",
"armv7-unknown-linux-gnueabihf",
"armv7-unknown-linux-musleabihf",
"armv7s-apple-ios",
"asmjs-unknown-emscripten",
"i386-apple-ios",
"i586-pc-windows-msvc",
"i586-unknown-linux-gnu",
"i686-apple-darwin",
"i686-linux-android",
"i686-pc-windows-gnu",
"i686-pc-windows-msvc",
"i686-unknown-freebsd",
"i686-unknown-linux-gnu",
"i686-unknown-linux-musl",
"mips-unknown-linux-gnu",
"mips-unknown-linux-musl",
"mips64-unknown-linux-gnuabi64",
"mips64el-unknown-linux-gnuabi64",
"mipsel-unknown-linux-gnu",
"mipsel-unknown-linux-musl",
"powerpc-unknown-linux-gnu",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"wasm32-unknown-emscripten",
"x86_64-apple-darwin",
"x86_64-apple-ios",
"x86_64-pc-windows-gnu",
"x86_64-pc-windows-msvc",
"x86_64-rumprun-netbsd",
"x86_64-unknown-freebsd",
"x86_64-unknown-linux-gnu",
"x86_64-unknown-linux-musl",
"x86_64-unknown-netbsd",
];
static MINGW: &'static [&'static str] = &[
"i686-pc-windows-gnu",
"x86_64-pc-windows-gnu",
];
#[derive(RustcEncodable)]
struct Manifest {
manifest_version: String,
date: String,
pkg: HashMap<String, Package>,
}
#[derive(RustcEncodable)]
struct Package {
version: String,
target: HashMap<String, Target>,
}
#[derive(RustcEncodable)]
struct Target {
available: bool,
url: Option<String>,
hash: Option<String>,
components: Option<Vec<Component>>,
extensions: Option<Vec<Component>>,
}
#[derive(RustcEncodable)]
struct Component {
pkg: String,
target: String,
}
macro_rules! t {
($e:expr) => (match $e {
Ok(e) => e,
Err(e) => panic!("{} failed with {}", stringify!($e), e),
})
}
struct Builder {
channel: String,
input: PathBuf,
output: PathBuf,
gpg_passphrase: String,
digests: HashMap<String, String>,
s3_address: String,
date: String,
rust_version: String,
cargo_version: String,
}
fn main() {
let mut args = env::args().skip(1);
let input = PathBuf::from(args.next().unwrap());
let output = PathBuf::from(args.next().unwrap());
let date = args.next().unwrap();
let channel = args.next().unwrap();
let s3_address = args.next().unwrap();
let mut passphrase = String::new();
t!(io::stdin().read_to_string(&mut passphrase));
Builder {
channel: channel,
input: input,
output: output,
gpg_passphrase: passphrase,
digests: HashMap::new(),
s3_address: s3_address,
date: date,
rust_version: String::new(),
cargo_version: String::new(),
}.build();
}
impl Builder {
fn build(&mut self) {
self.rust_version = self.version("rust", "x86_64-unknown-linux-gnu");
self.cargo_version = self.version("cargo", "x86_64-unknown-linux-gnu");
self.digest_and_sign();
let manifest = self.build_manifest();
let manifest = toml::encode(&manifest).to_string();
let filename = format!("channel-rust-{}.toml", self.channel);
self.write_manifest(&manifest, &filename);
if self.channel != "beta" && self.channel != "nightly" {
self.write_manifest(&manifest, "channel-rust-stable.toml");
}
}
fn digest_and_sign(&mut self) {
for file in t!(self.input.read_dir()).map(|e| t!(e).path()) {
let filename = file.file_name().unwrap().to_str().unwrap();
let digest = self.hash(&file);
self.sign(&file);
assert!(self.digests.insert(filename.to_string(), digest).is_none());
}
}
fn build_manifest(&mut self) -> Manifest {
let mut manifest = Manifest {
manifest_version: "2".to_string(),
date: self.date.to_string(),
pkg: HashMap::new(),
};
self.package("rustc", &mut manifest.pkg, HOSTS);
self.package("cargo", &mut manifest.pkg, HOSTS);
self.package("rust-mingw", &mut manifest.pkg, MINGW);
self.package("rust-std", &mut manifest.pkg, TARGETS);
self.package("rust-docs", &mut manifest.pkg, TARGETS);
self.package("rust-src", &mut manifest.pkg, &["*"]);
let mut pkg = Package {
version: self.cached_version("rust").to_string(),
target: HashMap::new(),
};
for host in HOSTS {
let filename = self.filename("rust", host);
let digest = match self.digests.remove(&filename) {
Some(digest) => digest,
None => {
pkg.target.insert(host.to_string(), Target {
available: false,
url: None,
hash: None,
components: None,
extensions: None,
});
continue
}
};
let mut components = Vec::new();
let mut extensions = Vec::new();
// rustc/rust-std/cargo are all required, and so is rust-mingw if it's
// available for the target.
components.extend(vec![
Component { pkg: "rustc".to_string(), target: host.to_string() },
Component { pkg: "rust-std".to_string(), target: host.to_string() },
Component { pkg: "cargo".to_string(), target: host.to_string() },
]);
if host.contains("pc-windows-gnu") {
components.push(Component {
pkg: "rust-mingw".to_string(),
target: host.to_string(),
});
}
// Docs, other standard libraries, and the source package are all
// optional.
extensions.push(Component {
pkg: "rust-docs".to_string(),
target: host.to_string(),
});
for target in TARGETS {
if target != host {
extensions.push(Component {
pkg: "rust-std".to_string(),
target: target.to_string(),
});
}
}
extensions.push(Component {
pkg: "rust-src".to_string(),
target: "*".to_string(),
});
pkg.target.insert(host.to_string(), Target {
available: true,
url: Some(self.url("rust", host)),
hash: Some(to_hex(digest.as_ref())),
components: Some(components),
extensions: Some(extensions),
});
}
manifest.pkg.insert("rust".to_string(), pkg);
return manifest
}
fn package(&mut self,
pkgname: &str,
dst: &mut HashMap<String, Package>,
targets: &[&str]) {
let targets = targets.iter().map(|name| {
let filename = self.filename(pkgname, name);
let digest = match self.digests.remove(&filename) {
Some(digest) => digest,
None => {
return (name.to_string(), Target {
available: false,
url: None,
hash: None,
components: None,
extensions: None,
})
}
};
(name.to_string(), Target {
available: true,
url: Some(self.url(pkgname, name)),
hash: Some(digest),
components: None,
extensions: None,
})
}).collect();
dst.insert(pkgname.to_string(), Package {
version: self.cached_version(pkgname).to_string(),
target: targets,
});
}
fn url(&self, component: &str, target: &str) -> String {
format!("{}/{}/{}",
self.s3_address,
self.date,
self.filename(component, target))
}
fn filename(&self, component: &str, target: &str) -> String {
if component == "rust-src" {
format!("rust-src-{}.tar.gz", self.channel)
} else {
format!("{}-{}-{}.tar.gz", component, self.channel, target)
}
}
fn cached_version(&self, component: &str) -> &str {
if component == "cargo" {
&self.cargo_version
} else {
&self.rust_version
}
}
fn version(&self, component: &str, target: &str) -> String {
let mut cmd = Command::new("tar");
let filename = self.filename(component, target);
cmd.arg("xf")
.arg(self.input.join(&filename))
.arg(format!("{}/version", filename.replace(".tar.gz", "")))
.arg("-O");
let version = t!(cmd.output());
if !version.status.success() {
panic!("failed to learn version:\n\n{:?}\n\n{}\n\n{}",
cmd,
String::from_utf8_lossy(&version.stdout),
String::from_utf8_lossy(&version.stderr));
}
String::from_utf8_lossy(&version.stdout).trim().to_string()
}
fn hash(&self, path: &Path) -> String {
let sha = t!(Command::new("shasum")
.arg("-a").arg("256")
.arg(path)
.output());
assert!(sha.status.success());
let filename = path.file_name().unwrap().to_str().unwrap();
let sha256 = self.output.join(format!("{}.sha256", filename));
t!(t!(File::create(&sha256)).write_all(&sha.stdout));
let stdout = String::from_utf8_lossy(&sha.stdout);
stdout.split_whitespace().next().unwrap().to_string()
}
fn sign(&self, path: &Path) {
let filename = path.file_name().unwrap().to_str().unwrap();
let asc = self.output.join(format!("{}.asc", filename));
println!("signing: {:?}", path);
let mut cmd = Command::new("gpg");
cmd.arg("--no-tty")
.arg("--yes")
.arg("--passphrase-fd").arg("0")
.arg("--armor")
.arg("--output").arg(&asc)
.arg("--detach-sign").arg(path)
.stdin(Stdio::piped());
let mut child = t!(cmd.spawn());
t!(child.stdin.take().unwrap().write_all(self.gpg_passphrase.as_bytes()));
assert!(t!(child.wait()).success());
}
fn write_manifest(&self, manifest: &str, name: &str) {
let dst = self.output.join(name);
t!(t!(File::create(&dst)).write_all(manifest.as_bytes()));
self.hash(&dst);
self.sign(&dst);
}
}
fn to_hex(digest: &[u8]) -> String {
let mut ret = String::new();
for byte in digest {
ret.push(hex((byte & 0xf0) >> 4));
ret.push(hex(byte & 0xf));
}
return ret;
fn hex(b: u8) -> char {
match b {
0...9 => (b'0' + b) as char,
_ => (b'a' + b - 10) as char,
}
}
}