1
Fork 0

Auto merge of #49939 - kennytm:rollup, r=kennytm

Rollup of 14 pull requests

Successful merges: #49908, #49876, #49916, #49951, #49465, #49922, #49866, #49915, #49886, #49913, #49852, #49958, #49871, #49864

Failed merges:
This commit is contained in:
bors 2018-04-14 13:11:24 +00:00
commit 21dae950be
42 changed files with 369 additions and 89 deletions

View file

@ -176,6 +176,8 @@ matrix:
if: branch = auto
- env: IMAGE=x86_64-gnu-distcheck
if: branch = auto
- env: IMAGE=mingw-check
if: type = pull_request OR branch = auto
- stage: publish toolstate
if: branch = master AND type = push

View file

@ -247,9 +247,6 @@ fn main() {
// When running miri tests, we need to generate MIR for all libraries
if env::var("TEST_MIRI").ok().map_or(false, |val| val == "true") {
cmd.arg("-Zalways-encode-mir");
if stage != "0" {
cmd.arg("-Zmiri");
}
cmd.arg("-Zmir-emit-validate=1");
}

View file

@ -514,7 +514,7 @@ impl Step for Test {
fn should_run(run: ShouldRun) -> ShouldRun {
let builder = run.builder;
run.krate("test").default_condition(builder.config.compiler_docs)
run.krate("test").default_condition(builder.build.config.docs)
}
fn make_run(run: RunConfig) {
@ -557,6 +557,9 @@ impl Step for Test {
let mut cargo = builder.cargo(compiler, Mode::Libtest, target, "doc");
compile::test_cargo(build, &compiler, target, &mut cargo);
cargo.arg("--no-deps").arg("-p").arg("test");
build.run(&mut cargo);
build.cp_r(&my_out, &out);
}

View file

@ -459,7 +459,7 @@ impl Step for RustdocTheme {
}
fn run(self, builder: &Builder) {
let rustdoc = builder.rustdoc(self.compiler.host);
let rustdoc = builder.out.join("bootstrap/debug/rustdoc");
let mut cmd = builder.tool_cmd(Tool::RustdocTheme);
cmd.arg(rustdoc.to_str().unwrap())
.arg(builder.src.join("src/librustdoc/html/static/themes").to_str().unwrap())
@ -875,7 +875,7 @@ impl Step for Compiletest {
if build.config.rust_debuginfo_tests {
flags.push("-g".to_string());
}
flags.push("-Zmiri -Zunstable-options".to_string());
flags.push("-Zunstable-options".to_string());
flags.push(build.config.cmd.rustc_args().join(" "));
if let Some(linker) = build.linker(target) {

View file

@ -564,7 +564,8 @@ tool_extended!((self, builder),
target: self.target,
extra_features: Vec::new(),
});
if clippy.is_some() {
let channel = &builder.config.channel;
if clippy.is_some() && channel != "stable" && channel != "beta" {
self.extra_features.push("clippy".to_owned());
}
builder.ensure(native::Openssl {

View file

@ -0,0 +1,22 @@
FROM ubuntu:16.04
RUN apt-get update && apt-get install -y --no-install-recommends \
g++ \
make \
file \
curl \
ca-certificates \
python2.7 \
git \
cmake \
sudo \
gdb \
xz-utils \
libssl-dev \
pkg-config \
mingw-w64
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
ENV SCRIPT python2.7 ../x.py check --target=i686-pc-windows-gnu --host=i686-pc-windows-gnu

View file

@ -55,7 +55,7 @@ fn main() {
```
And that's it! The `#[global_allocator]` attribute is applied to a `static`
which implements the `Alloc` trait in the `std::heap` module. Note, though,
which implements the `Alloc` trait in the `std::alloc` module. Note, though,
that the implementation is defined for `&MyAllocator`, not just `MyAllocator`.
You may wish, however, to also provide `Alloc for MyAllocator` for other use
cases.

View file

@ -18,7 +18,7 @@ derives have spans that point to the fields, rather than the
sample usage: src/etc/generate-deriving-span-tests.py
"""
import sys, os, datetime, stat
import sys, os, datetime, stat, re
TEST_DIR = os.path.abspath(
os.path.join(os.path.dirname(__file__), '../test/compile-fail'))
@ -87,16 +87,25 @@ def create_test_case(type, trait, super_traits, error_count):
def write_file(name, string):
test_file = os.path.join(TEST_DIR, 'derives-span-%s.rs' % name)
with open(test_file) as f:
old_str = f.read()
old_str_ignoring_date = re.sub(r'^// Copyright \d+',
'// Copyright {year}'.format(year = YEAR), old_str)
if old_str_ignoring_date == string:
# if all we're doing is updating the copyright year, ignore it
return 0
# set write permission if file exists, so it can be changed
if os.path.exists(test_file):
os.chmod(test_file, stat.S_IWUSR)
with open(test_file, 'wt') as f:
with open(test_file, 'w') as f:
f.write(string)
# mark file read-only
os.chmod(test_file, stat.S_IRUSR|stat.S_IRGRP|stat.S_IROTH)
return 1
ENUM = 1
@ -120,11 +129,15 @@ for (trait, supers, errs) in [('Clone', [], 1),
('Hash', [], 1)]:
traits[trait] = (ALL, supers, errs)
files = 0
for (trait, (types, super_traits, error_count)) in traits.items():
mk = lambda ty: create_test_case(ty, trait, super_traits, error_count)
if types & ENUM:
write_file(trait + '-enum', mk(ENUM_TUPLE))
write_file(trait + '-enum-struct-variant', mk(ENUM_STRUCT))
files += write_file(trait + '-enum', mk(ENUM_TUPLE))
files += write_file(trait + '-enum-struct-variant', mk(ENUM_STRUCT))
if types & STRUCT:
write_file(trait + '-struct', mk(STRUCT_FIELDS))
write_file(trait + '-tuple-struct', mk(STRUCT_TUPLE))
files += write_file(trait + '-struct', mk(STRUCT_FIELDS))
files += write_file(trait + '-tuple-struct', mk(STRUCT_TUPLE))
print('Generated {files} deriving span test{}.'.format('s' if files != 1 else '', files = files))

View file

@ -55,8 +55,6 @@
#![stable(feature = "rust1", since = "1.0.0")]
use raw_vec::RawVec;
use core::any::Any;
use core::borrow;
use core::cmp::Ordering;
@ -68,6 +66,8 @@ use core::mem::{self, Pin};
use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState};
use core::ptr::{self, NonNull, Unique};
use core::convert::From;
use raw_vec::RawVec;
use str::from_boxed_utf8_unchecked;
/// A pointer type for heap allocation.

View file

@ -8,15 +8,16 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use alloc::{Alloc, Layout, Global};
use core::cmp;
use core::mem;
use core::ops::Drop;
use core::ptr::{self, NonNull, Unique};
use core::slice;
use super::boxed::Box;
use super::allocator::CollectionAllocErr;
use super::allocator::CollectionAllocErr::*;
use alloc::{Alloc, Layout, Global};
use alloc::CollectionAllocErr;
use alloc::CollectionAllocErr::*;
use boxed::Box;
/// A low-level utility for more ergonomically allocating, reallocating, and deallocating
/// a buffer of memory on the heap without having to worry about all the corner cases

View file

@ -46,12 +46,12 @@ use core::mem;
use core::ptr;
use core::iter::FusedIterator;
use vec_deque::VecDeque;
use borrow::{Borrow, ToOwned};
use boxed::Box;
use slice::{SliceConcatExt, SliceIndex};
use string::String;
use vec::Vec;
use slice::{SliceConcatExt, SliceIndex};
use boxed::Box;
use vec_deque::VecDeque;
#[stable(feature = "rust1", since = "1.0.0")]
pub use core::str::{FromStr, Utf8Error};

View file

@ -66,11 +66,11 @@ use core::ptr;
use core::str::pattern::Pattern;
use core::str::lossy;
use alloc::CollectionAllocErr;
use borrow::{Cow, ToOwned};
use boxed::Box;
use str::{self, from_boxed_utf8_unchecked, FromStr, Utf8Error, Chars};
use vec::Vec;
use boxed::Box;
use super::allocator::CollectionAllocErr;
/// A UTF-8 encoded, growable string.
///

View file

@ -82,11 +82,11 @@ use core::ptr;
use core::ptr::NonNull;
use core::slice;
use alloc::CollectionAllocErr;
use borrow::ToOwned;
use borrow::Cow;
use boxed::Box;
use raw_vec::RawVec;
use super::allocator::CollectionAllocErr;
/// A contiguous growable array type, written `Vec<T>` but pronounced 'vector'.
///

View file

@ -30,10 +30,9 @@ use core::slice;
use core::hash::{Hash, Hasher};
use core::cmp;
use alloc::CollectionAllocErr;
use raw_vec::RawVec;
use super::allocator::CollectionAllocErr;
use super::vec::Vec;
use vec::Vec;
const INITIAL_CAPACITY: usize = 7; // 2^3 - 1
const MINIMUM_CAPACITY: usize = 1; // 2 - 1

View file

@ -15,6 +15,7 @@
use convert::TryFrom;
use fmt;
use intrinsics;
use mem;
#[allow(deprecated)] use nonzero::NonZero;
use ops;
use str::FromStr;
@ -1868,6 +1869,50 @@ $EndFeature, "
#[inline]
pub fn is_negative(self) -> bool { self < 0 }
}
/// Return the memory representation of this integer as a byte array.
///
/// The target platforms native endianness is used.
/// Portable code likely wants to use this after [`to_be`] or [`to_le`].
///
/// [`to_be`]: #method.to_be
/// [`to_le`]: #method.to_le
///
/// # Examples
///
/// ```
/// #![feature(int_to_from_bytes)]
///
/// let bytes = i32::min_value().to_be().to_bytes();
/// assert_eq!(bytes, [0x80, 0, 0, 0]);
/// ```
#[unstable(feature = "int_to_from_bytes", issue = "49792")]
#[inline]
pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
unsafe { mem::transmute(self) }
}
/// Create an integer value from its memory representation as a byte array.
///
/// The target platforms native endianness is used.
/// Portable code likely wants to use [`from_be`] or [`from_le`] after this.
///
/// [`from_be`]: #method.from_be
/// [`from_le`]: #method.from_le
///
/// # Examples
///
/// ```
/// #![feature(int_to_from_bytes)]
///
/// let int = i32::from_be(i32::from_bytes([0x80, 0, 0, 0]));
/// assert_eq!(int, i32::min_value());
/// ```
#[unstable(feature = "int_to_from_bytes", issue = "49792")]
#[inline]
pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
unsafe { mem::transmute(bytes) }
}
}
}
@ -3373,6 +3418,50 @@ $EndFeature, "
self.one_less_than_next_power_of_two().checked_add(1)
}
}
/// Return the memory representation of this integer as a byte array.
///
/// The target platforms native endianness is used.
/// Portable code likely wants to use this after [`to_be`] or [`to_le`].
///
/// [`to_be`]: #method.to_be
/// [`to_le`]: #method.to_le
///
/// # Examples
///
/// ```
/// #![feature(int_to_from_bytes)]
///
/// let bytes = 0x1234_5678_u32.to_be().to_bytes();
/// assert_eq!(bytes, [0x12, 0x34, 0x56, 0x78]);
/// ```
#[unstable(feature = "int_to_from_bytes", issue = "49792")]
#[inline]
pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
unsafe { mem::transmute(self) }
}
/// Create an integer value from its memory representation as a byte array.
///
/// The target platforms native endianness is used.
/// Portable code likely wants to use [`to_be`] or [`to_le`] after this.
///
/// [`to_be`]: #method.to_be
/// [`to_le`]: #method.to_le
///
/// # Examples
///
/// ```
/// #![feature(int_to_from_bytes)]
///
/// let int = u32::from_be(u32::from_bytes([0x12, 0x34, 0x56, 0x78]));
/// assert_eq!(int, 0x1234_5678_u32);
/// ```
#[unstable(feature = "int_to_from_bytes", issue = "49792")]
#[inline]
pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
unsafe { mem::transmute(bytes) }
}
}
}

View file

@ -315,7 +315,12 @@ macro_rules! bitxor_impl {
bitxor_impl! { bool usize u8 u16 u32 u64 u128 isize i8 i16 i32 i64 i128 }
/// The left shift operator `<<`.
/// The left shift operator `<<`. Note that because this trait is implemented
/// for all integer types with multiple right-hand-side types, Rust's type
/// checker has special handling for `_ << _`, setting the result type for
/// integer operations to the type of the left-hand-side operand. This means
/// that though `a << b` and `a.shl(b)` are one and the same from an evaluation
/// standpoint, they are different when it comes to type inference.
///
/// # Examples
///
@ -417,7 +422,12 @@ macro_rules! shl_impl_all {
shl_impl_all! { u8 u16 u32 u64 u128 usize i8 i16 i32 i64 isize i128 }
/// The right shift operator `>>`.
/// The right shift operator `>>`. Note that because this trait is implemented
/// for all integer types with multiple right-hand-side types, Rust's type
/// checker has special handling for `_ >> _`, setting the result type for
/// integer operations to the type of the left-hand-side operand. This means
/// that though `a >> b` and `a.shr(b)` are one and the same from an evaluation
/// standpoint, they are different when it comes to type inference.
///
/// # Examples
///

View file

@ -1425,8 +1425,8 @@ assert_eq!(foo.load(Ordering::SeqCst), 0b011110);
doc_comment! {
concat!("Fetches the value, and applies a function to it that returns an optional
new value. Returns a `Result` (`Ok(_)` if the function returned `Some(_)`, else `Err(_)`) of the
previous value.
new value. Returns a `Result` of `Ok(previous_value)` if the function returned `Some(_)`, else
`Err(previous_value)`.
Note: This may call the function multiple times if the value has been changed from other threads in
the meantime, as long as the function returns `Some(_)`, but the function will have been applied

View file

@ -35,6 +35,7 @@ use std::fmt;
use syntax::ast;
use errors::DiagnosticBuilder;
use syntax_pos::{self, Span};
use syntax_pos::symbol::InternedString;
use util::nodemap::FxHashMap;
use arena::DroplessArena;
@ -343,7 +344,7 @@ pub enum RegionVariableOrigin {
Coercion(Span),
// Region variables created as the values for early-bound regions
EarlyBoundRegion(Span, ast::Name),
EarlyBoundRegion(Span, InternedString),
// Region variables created for bound regions
// in a function or method that is called

View file

@ -1227,8 +1227,6 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"print some statistics about MIR"),
always_encode_mir: bool = (false, parse_bool, [TRACKED],
"encode MIR of all functions into the crate metadata"),
miri: bool = (false, parse_bool, [TRACKED],
"check the miri const evaluator against the old ctfe"),
osx_rpath_install_name: bool = (false, parse_bool, [TRACKED],
"pass `-install_name @rpath/...` to the macOS linker"),
sanitizer: Option<Sanitizer> = (None, parse_sanitizer, [TRACKED],

View file

@ -728,7 +728,7 @@ pub struct TypeParameterDef {
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub struct RegionParameterDef {
pub name: Name,
pub name: InternedString,
pub def_id: DefId,
pub index: u32,

View file

@ -58,7 +58,7 @@ pub enum BoundRegion {
///
/// The def-id is needed to distinguish free regions in
/// the event of shadowing.
BrNamed(DefId, Name),
BrNamed(DefId, InternedString),
/// Fresh bound identifiers created during GLB computations.
BrFresh(u32),
@ -1058,7 +1058,7 @@ impl<'tcx> serialize::UseSpecializedDecodable for Region<'tcx> {}
pub struct EarlyBoundRegion {
pub def_id: DefId,
pub index: u32,
pub name: Name,
pub name: InternedString,
}
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]

View file

@ -30,7 +30,7 @@ use std::usize;
use rustc_data_structures::indexed_vec::Idx;
use syntax::abi::Abi;
use syntax::ast::CRATE_NODE_ID;
use syntax::symbol::Symbol;
use syntax::symbol::{Symbol, InternedString};
use hir;
macro_rules! gen_display_debug_body {
@ -130,7 +130,7 @@ macro_rules! print {
}
struct LateBoundRegionNameCollector(FxHashSet<Symbol>);
struct LateBoundRegionNameCollector(FxHashSet<InternedString>);
impl<'tcx> ty::fold::TypeVisitor<'tcx> for LateBoundRegionNameCollector {
fn visit_region(&mut self, r: ty::Region<'tcx>) -> bool {
match *r {
@ -148,7 +148,7 @@ pub struct PrintContext {
is_debug: bool,
is_verbose: bool,
identify_regions: bool,
used_region_names: Option<FxHashSet<Symbol>>,
used_region_names: Option<FxHashSet<InternedString>>,
region_index: usize,
binder_depth: usize,
}
@ -440,12 +440,12 @@ impl PrintContext {
lifted: Option<ty::Binder<U>>) -> fmt::Result
where T: Print, U: Print + TypeFoldable<'tcx>, F: fmt::Write
{
fn name_by_region_index(index: usize) -> Symbol {
fn name_by_region_index(index: usize) -> InternedString {
match index {
0 => Symbol::intern("'r"),
1 => Symbol::intern("'s"),
i => Symbol::intern(&format!("'t{}", i-2)),
}
}.as_str()
}
// Replace any anonymous late-bound regions with named
@ -493,8 +493,7 @@ impl PrintContext {
}
};
let _ = write!(f, "{}", name);
ty::BrNamed(tcx.hir.local_def_id(CRATE_NODE_ID),
name)
ty::BrNamed(tcx.hir.local_def_id(CRATE_NODE_ID), name)
}
};
tcx.mk_region(ty::ReLateBound(ty::DebruijnIndex::new(1), br))
@ -510,7 +509,7 @@ impl PrintContext {
result
}
fn is_name_used(&self, name: &Symbol) -> bool {
fn is_name_used(&self, name: &InternedString) -> bool {
match self.used_region_names {
Some(ref names) => names.contains(name),
None => false,
@ -697,7 +696,7 @@ define_print! {
BrAnon(n) => write!(f, "BrAnon({:?})", n),
BrFresh(n) => write!(f, "BrFresh({:?})", n),
BrNamed(did, name) => {
write!(f, "BrNamed({:?}:{:?}, {:?})",
write!(f, "BrNamed({:?}:{:?}, {})",
did.krate, did.index, name)
}
BrEnv => write!(f, "BrEnv"),

View file

@ -307,7 +307,7 @@ impl<'a, 'gcx, 'tcx> Env<'a, 'gcx, 'tcx> {
}
pub fn re_early_bound(&self, index: u32, name: &'static str) -> ty::Region<'tcx> {
let name = Symbol::intern(name);
let name = Symbol::intern(name).as_str();
self.infcx.tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
def_id: self.infcx.tcx.hir.local_def_id(ast::CRATE_NODE_ID),
index,

View file

@ -100,7 +100,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
{
let tcx = self.tcx();
let lifetime_name = |def_id| {
tcx.hir.name(tcx.hir.as_local_node_id(def_id).unwrap())
tcx.hir.name(tcx.hir.as_local_node_id(def_id).unwrap()).as_str()
};
let hir_id = tcx.hir.node_to_hir_id(lifetime.id);

View file

@ -886,7 +886,7 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let early_lifetimes = early_bound_lifetimes_from_generics(tcx, ast_generics);
let regions = early_lifetimes.enumerate().map(|(i, l)| {
ty::RegionParameterDef {
name: l.lifetime.name.name(),
name: l.lifetime.name.name().as_str(),
index: own_start + i as u32,
def_id: tcx.hir.local_def_id(l.lifetime.id),
pure_wrt_drop: l.pure_wrt_drop,
@ -1427,7 +1427,7 @@ pub fn explicit_predicates_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
def_id: tcx.hir.local_def_id(param.lifetime.id),
index,
name: param.lifetime.name.name(),
name: param.lifetime.name.name().as_str(),
}));
index += 1;

View file

@ -224,7 +224,7 @@ impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> {
let name = if p.name == "" {
hir::LifetimeName::Static
} else {
hir::LifetimeName::Name(p.name)
hir::LifetimeName::Name(Symbol::intern(&p.name))
};
hir::Lifetime {
@ -407,7 +407,7 @@ impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> {
let names_map: FxHashMap<String, Lifetime> = generics
.regions
.iter()
.map(|l| (l.name.as_str().to_string(), l.clean(self.cx)))
.map(|l| (l.name.to_string(), l.clean(self.cx)))
.collect();
let body_ids: FxHashSet<_> = infcx
@ -728,7 +728,7 @@ impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> {
fn region_name(&self, region: Region) -> Option<String> {
match region {
&ty::ReEarlyBound(r) => Some(r.name.as_str().to_string()),
&ty::ReEarlyBound(r) => Some(r.name.to_string()),
_ => None,
}
}
@ -1005,7 +1005,7 @@ impl<'a, 'tcx, 'rcx> AutoTraitFinder<'a, 'tcx, 'rcx> {
// We only care about late bound regions, as we need to add them
// to the 'for<>' section
&ty::ReLateBound(_, ty::BoundRegion::BrNamed(_, name)) => {
Some(GenericParam::Lifetime(Lifetime(name.as_str().to_string())))
Some(GenericParam::Lifetime(Lifetime(name.to_string())))
}
&ty::ReVar(_) | &ty::ReEarlyBound(_) => None,
_ => panic!("Unexpected region type {:?}", r),

View file

@ -28,7 +28,7 @@ use rustc::session::config::{OutputType, OutputTypes, Externs};
use rustc::session::search_paths::{SearchPaths, PathKind};
use rustc_metadata::dynamic_lib::DynamicLibrary;
use tempdir::TempDir;
use rustc_driver::{self, driver, Compilation};
use rustc_driver::{self, driver, target_features, Compilation};
use rustc_driver::driver::phase_2_configure_and_expand;
use rustc_metadata::cstore::CStore;
use rustc_resolve::MakeGlobMap;
@ -96,8 +96,10 @@ pub fn run(input_path: &Path,
let trans = rustc_driver::get_trans(&sess);
let cstore = CStore::new(trans.metadata_loader());
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
sess.parse_sess.config =
config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
target_features::add_configuration(&mut cfg, &sess, &*trans);
sess.parse_sess.config = cfg;
let krate = panictry!(driver::phase_1_parse_input(&driver::CompileController::basic(),
&sess,
@ -271,8 +273,11 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
let outdir = Mutex::new(TempDir::new("rustdoctest").ok().expect("rustdoc needs a tempdir"));
let libdir = sess.target_filesearch(PathKind::All).get_lib_path();
let mut control = driver::CompileController::basic();
sess.parse_sess.config =
config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
let mut cfg = config::build_configuration(&sess, config::parse_cfgspecs(cfgs.clone()));
target_features::add_configuration(&mut cfg, &sess, &*trans);
sess.parse_sess.config = cfg;
let out = Some(outdir.lock().unwrap().path().to_path_buf());
if no_run {

View file

@ -527,8 +527,17 @@ impl Token {
// all span information.
//
// As a result, some AST nodes are annotated with the token
// stream they came from. Attempt to extract these lossless
// token streams before we fall back to the stringification.
// stream they came from. Here we attempt to extract these
// lossless token streams before we fall back to the
// stringification.
//
// During early phases of the compiler, though, the AST could
// get modified directly (e.g. attributes added or removed) and
// the internal cache of tokens my not be invalidated or
// updated. Consequently if the "lossless" token stream
// disagrees with our actuall stringification (which has
// historically been much more battle-tested) then we go with
// the lossy stream anyway (losing span information).
let mut tokens = None;
match nt.0 {
@ -555,13 +564,17 @@ impl Token {
_ => {}
}
tokens.unwrap_or_else(|| {
nt.1.force(|| {
// FIXME(jseyfried): Avoid this pretty-print + reparse hack
let source = pprust::token_to_string(self);
parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span))
})
})
let tokens_for_real = nt.1.force(|| {
// FIXME(#43081): Avoid this pretty-print + reparse hack
let source = pprust::token_to_string(self);
parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span))
});
if let Some(tokens) = tokens {
if tokens.eq_unspanned(&tokens_for_real) {
return tokens
}
}
return tokens_for_real
}
}

View file

@ -118,7 +118,7 @@ impl TokenTree {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
(&TokenTree::Delimited(_, ref dl), &TokenTree::Delimited(_, ref dl2)) => {
dl.delim == dl2.delim &&
dl.stream().trees().zip(dl2.stream().trees()).all(|(tt, tt2)| tt.eq_unspanned(&tt2))
dl.stream().eq_unspanned(&dl2.stream())
}
(_, _) => false,
}
@ -240,12 +240,14 @@ impl TokenStream {
/// Compares two TokenStreams, checking equality without regarding span information.
pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
for (t1, t2) in self.trees().zip(other.trees()) {
let mut t1 = self.trees();
let mut t2 = other.trees();
for (t1, t2) in t1.by_ref().zip(t2.by_ref()) {
if !t1.eq_unspanned(&t2) {
return false;
}
}
true
t1.next().is_none() && t2.next().is_none()
}
/// Precondition: `self` consists of a single token tree.

View file

@ -13,7 +13,7 @@
#![feature(global_allocator, allocator_api)]
#![crate_type = "rlib"]
use std::heap::System;
use std::alloc::System;
#[global_allocator]
static A: System = System;

View file

@ -13,7 +13,7 @@
#![feature(global_allocator, allocator_api)]
#![crate_type = "rlib"]
use std::heap::System;
use std::alloc::System;
#[global_allocator]
static A: System = System;

View file

@ -10,7 +10,7 @@
#![feature(global_allocator, allocator_api)]
use std::heap::System;
use std::alloc::System;
#[global_allocator]
static A: System = System;

View file

@ -16,7 +16,7 @@
extern crate system_allocator;
use std::heap::System;
use std::alloc::System;
#[global_allocator]
static A: System = System;

View file

@ -0,0 +1,57 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// no-prefer-dynamic
#![crate_type = "proc-macro"]
#![feature(proc_macro)]
extern crate proc_macro;
use proc_macro::*;
#[proc_macro_attribute]
pub fn assert1(_a: TokenStream, b: TokenStream) -> TokenStream {
assert_eq(b.clone(), "pub fn foo() {}".parse().unwrap());
b
}
#[proc_macro_derive(Foo, attributes(foo))]
pub fn assert2(a: TokenStream) -> TokenStream {
assert_eq(a, "pub struct MyStructc { _a: i32, }".parse().unwrap());
TokenStream::empty()
}
fn assert_eq(a: TokenStream, b: TokenStream) {
let mut a = a.into_iter();
let mut b = b.into_iter();
for (a, b) in a.by_ref().zip(&mut b) {
match (a, b) {
(TokenTree::Group(a), TokenTree::Group(b)) => {
assert_eq!(a.delimiter(), b.delimiter());
assert_eq(a.stream(), b.stream());
}
(TokenTree::Op(a), TokenTree::Op(b)) => {
assert_eq!(a.op(), b.op());
assert_eq!(a.spacing(), b.spacing());
}
(TokenTree::Literal(a), TokenTree::Literal(b)) => {
assert_eq!(a.to_string(), b.to_string());
}
(TokenTree::Term(a), TokenTree::Term(b)) => {
assert_eq!(a.to_string(), b.to_string());
}
(a, b) => panic!("{:?} != {:?}", a, b),
}
}
assert!(a.next().is_none());
assert!(b.next().is_none());
}

View file

@ -0,0 +1,37 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:modify-ast.rs
#![feature(proc_macro)]
extern crate modify_ast;
use modify_ast::*;
#[derive(Foo)]
pub struct MyStructc {
#[cfg_attr(my_cfg, foo)]
_a: i32,
}
macro_rules! a {
($i:item) => ($i)
}
a! {
#[assert1]
pub fn foo() {}
}
fn main() {
let _a = MyStructc { _a: 0 };
foo();
}

View file

@ -10,15 +10,15 @@
#![feature(allocator_api, nonnull)]
use std::heap::{Heap, Alloc};
use std::alloc::{Alloc, Global};
fn main() {
unsafe {
let ptr = Heap.alloc_one::<i32>().unwrap_or_else(|_| {
Heap.oom()
let ptr = Global.alloc_one::<i32>().unwrap_or_else(|_| {
Global.oom()
});
*ptr.as_ptr() = 4;
assert_eq!(*ptr.as_ptr(), 4);
Heap.dealloc_one(ptr);
Global.dealloc_one(ptr);
}
}

View file

@ -13,7 +13,7 @@
#![feature(heap_api, allocator_api)]
#![crate_type = "rlib"]
use std::heap::{GlobalAlloc, System, Layout, Opaque};
use std::alloc::{GlobalAlloc, System, Layout, Opaque};
use std::sync::atomic::{AtomicUsize, Ordering};
pub struct A(pub AtomicUsize);

View file

@ -12,7 +12,7 @@
#![feature(allocator_api)]
use std::heap::{Alloc, Heap, Layout};
use std::alloc::{Alloc, Global, Layout};
use std::ptr::NonNull;
struct arena(());
@ -32,8 +32,8 @@ struct Ccx {
fn alloc<'a>(_bcx : &'a arena) -> &'a Bcx<'a> {
unsafe {
let ptr = Heap.alloc(Layout::new::<Bcx>())
.unwrap_or_else(|_| Heap.oom());
let ptr = Global.alloc(Layout::new::<Bcx>())
.unwrap_or_else(|_| Global.oom());
&*(ptr.as_ptr() as *const _)
}
}
@ -46,7 +46,7 @@ fn g(fcx : &Fcx) {
let bcx = Bcx { fcx: fcx };
let bcx2 = h(&bcx);
unsafe {
Heap.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
Global.dealloc(NonNull::new_unchecked(bcx2 as *const _ as *mut _), Layout::new::<Bcx>());
}
}

View file

@ -14,6 +14,6 @@
#![feature(allocator_api, global_allocator)]
#[global_allocator]
static A: std::heap::System = std::heap::System;
static A: std::alloc::System = std::alloc::System;
fn main() {}

View file

@ -0,0 +1,31 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// only-x86_64
// compile-flags:--test
// should-fail
// no-system-llvm
// #49723: rustdoc didn't add target features when extracting or running doctests
#![feature(doc_cfg)]
/// Foo
///
/// # Examples
///
/// ```
/// #![feature(cfg_target_feature)]
///
/// #[cfg(target_feature = "sse")]
/// assert!(false);
/// ```
#[doc(cfg(target_feature = "sse"))]
pub unsafe fn foo() {}

@ -1 +1 @@
Subproject commit b70ab13b31628e91b05961d55c07abf20ad49de6
Subproject commit 008c3690846798d678f4a0a45ee46cc9ff6dc90f

@ -1 +1 @@
Subproject commit faccf0d07cad4c84da20c5c0102a450fca9d0b6b
Subproject commit 7bda1161a37ff51f254ff0a7862abe6dc54fdb36