1
Fork 0

Auto merge of #47748 - alexcrichton:rollup, r=alexcrichton

Rollup of 19 pull requests

- Successful merges: #47415, #47437, #47439, #47453, #47460, #47502, #47529, #47600, #47607, #47618, #47626, #47656, #47668, #47696, #47701, #47705, #47710, #47711, #47719
- Failed merges: #47455, #47521
This commit is contained in:
bors 2018-01-26 17:41:36 +00:00
commit bacb5c58df
118 changed files with 3683 additions and 2403 deletions

View file

@ -125,11 +125,6 @@ fn main() {
cmd.arg(format!("-Clinker={}", target_linker)); cmd.arg(format!("-Clinker={}", target_linker));
} }
// Pass down incremental directory, if any.
if let Ok(dir) = env::var("RUSTC_INCREMENTAL") {
cmd.arg(format!("-Zincremental={}", dir));
}
let crate_name = args.windows(2) let crate_name = args.windows(2)
.find(|a| &*a[0] == "--crate-name") .find(|a| &*a[0] == "--crate-name")
.unwrap(); .unwrap();

View file

@ -602,6 +602,7 @@ class RustBuild(object):
env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \ env["LIBRARY_PATH"] = os.path.join(self.bin_root(), "lib") + \
(os.pathsep + env["LIBRARY_PATH"]) \ (os.pathsep + env["LIBRARY_PATH"]) \
if "LIBRARY_PATH" in env else "" if "LIBRARY_PATH" in env else ""
env["RUSTFLAGS"] = "-Cdebuginfo=2"
env["PATH"] = os.path.join(self.bin_root(), "bin") + \ env["PATH"] = os.path.join(self.bin_root(), "bin") + \
os.pathsep + env["PATH"] os.pathsep + env["PATH"]
if not os.path.isfile(self.cargo()): if not os.path.isfile(self.cargo()):

View file

@ -26,6 +26,7 @@ use util::{exe, libdir, add_lib_path};
use {Build, Mode}; use {Build, Mode};
use cache::{INTERNER, Interned, Cache}; use cache::{INTERNER, Interned, Cache};
use check; use check;
use test;
use flags::Subcommand; use flags::Subcommand;
use doc; use doc;
use tool; use tool;
@ -230,6 +231,7 @@ impl<'a> ShouldRun<'a> {
#[derive(Copy, Clone, PartialEq, Eq, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum Kind { pub enum Kind {
Build, Build,
Check,
Test, Test,
Bench, Bench,
Dist, Dist,
@ -251,13 +253,13 @@ impl<'a> Builder<'a> {
tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient, tool::Compiletest, tool::RemoteTestServer, tool::RemoteTestClient,
tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy, tool::RustInstaller, tool::Cargo, tool::Rls, tool::Rustdoc, tool::Clippy,
native::Llvm, tool::Rustfmt, tool::Miri), native::Llvm, tool::Rustfmt, tool::Miri),
Kind::Test => describe!(check::Tidy, check::Bootstrap, check::DefaultCompiletest, Kind::Check => describe!(check::Std, check::Test, check::Rustc),
check::HostCompiletest, check::Crate, check::CrateLibrustc, check::Rustdoc, Kind::Test => describe!(test::Tidy, test::Bootstrap, test::DefaultCompiletest,
check::Linkcheck, check::Cargotest, check::Cargo, check::Rls, check::Docs, test::HostCompiletest, test::Crate, test::CrateLibrustc, test::Rustdoc,
check::ErrorIndex, check::Distcheck, check::Rustfmt, check::Miri, check::Clippy, test::Linkcheck, test::Cargotest, test::Cargo, test::Rls, test::Docs,
check::RustdocJS), test::ErrorIndex, test::Distcheck, test::Rustfmt, test::Miri, test::Clippy,
test::RustdocJS),
Kind::Bench => describe!(check::Crate, check::CrateLibrustc), Kind::Bench => describe!(test::Crate, test::CrateLibrustc),
Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook, Kind::Doc => describe!(doc::UnstableBook, doc::UnstableBookGen, doc::TheBook,
doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon, doc::Standalone, doc::Std, doc::Test, doc::Rustc, doc::ErrorIndex, doc::Nomicon,
doc::Reference, doc::Rustdoc, doc::RustByExample, doc::CargoBook), doc::Reference, doc::Rustdoc, doc::RustByExample, doc::CargoBook),
@ -304,6 +306,7 @@ impl<'a> Builder<'a> {
pub fn run(build: &Build) { pub fn run(build: &Build) {
let (kind, paths) = match build.config.cmd { let (kind, paths) = match build.config.cmd {
Subcommand::Build { ref paths } => (Kind::Build, &paths[..]), Subcommand::Build { ref paths } => (Kind::Build, &paths[..]),
Subcommand::Check { ref paths } => (Kind::Check, &paths[..]),
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]), Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]), Subcommand::Test { ref paths, .. } => (Kind::Test, &paths[..]),
Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]), Subcommand::Bench { ref paths, .. } => (Kind::Bench, &paths[..]),
@ -493,13 +496,14 @@ impl<'a> Builder<'a> {
cargo.env("RUSTC_CODEGEN_UNITS", n.to_string()); cargo.env("RUSTC_CODEGEN_UNITS", n.to_string());
} }
if let Some(host_linker) = self.build.linker(compiler.host) { if let Some(host_linker) = self.build.linker(compiler.host) {
cargo.env("RUSTC_HOST_LINKER", host_linker); cargo.env("RUSTC_HOST_LINKER", host_linker);
} }
if let Some(target_linker) = self.build.linker(target) { if let Some(target_linker) = self.build.linker(target) {
cargo.env("RUSTC_TARGET_LINKER", target_linker); cargo.env("RUSTC_TARGET_LINKER", target_linker);
} }
if cmd != "build" { if cmd != "build" && cmd != "check" {
cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build))); cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(self.compiler(2, self.build.build)));
} }
@ -566,8 +570,7 @@ impl<'a> Builder<'a> {
// not guaranteeing correctness across builds if the compiler // not guaranteeing correctness across builds if the compiler
// is changing under your feet.` // is changing under your feet.`
if self.config.incremental && compiler.stage == 0 { if self.config.incremental && compiler.stage == 0 {
let incr_dir = self.incremental_dir(compiler); cargo.env("CARGO_INCREMENTAL", "1");
cargo.env("RUSTC_INCREMENTAL", incr_dir);
} }
if let Some(ref on_fail) = self.config.on_fail { if let Some(ref on_fail) = self.config.on_fail {

File diff suppressed because it is too large Load diff

View file

@ -108,7 +108,8 @@ impl Step for Std {
std_cargo(build, &compiler, target, &mut cargo); std_cargo(build, &compiler, target, &mut cargo);
run_cargo(build, run_cargo(build,
&mut cargo, &mut cargo,
&libstd_stamp(build, compiler, target)); &libstd_stamp(build, compiler, target),
false);
builder.ensure(StdLink { builder.ensure(StdLink {
compiler: builder.compiler(compiler.stage, build.build), compiler: builder.compiler(compiler.stage, build.build),
@ -360,7 +361,8 @@ impl Step for Test {
test_cargo(build, &compiler, target, &mut cargo); test_cargo(build, &compiler, target, &mut cargo);
run_cargo(build, run_cargo(build,
&mut cargo, &mut cargo,
&libtest_stamp(build, compiler, target)); &libtest_stamp(build, compiler, target),
false);
builder.ensure(TestLink { builder.ensure(TestLink {
compiler: builder.compiler(compiler.stage, build.build), compiler: builder.compiler(compiler.stage, build.build),
@ -488,7 +490,8 @@ impl Step for Rustc {
rustc_cargo(build, target, &mut cargo); rustc_cargo(build, target, &mut cargo);
run_cargo(build, run_cargo(build,
&mut cargo, &mut cargo,
&librustc_stamp(build, compiler, target)); &librustc_stamp(build, compiler, target),
false);
builder.ensure(RustcLink { builder.ensure(RustcLink {
compiler: builder.compiler(compiler.stage, build.build), compiler: builder.compiler(compiler.stage, build.build),
@ -755,7 +758,7 @@ impl Step for Assemble {
/// ///
/// For a particular stage this will link the file listed in `stamp` into the /// For a particular stage this will link the file listed in `stamp` into the
/// `sysroot_dst` provided. /// `sysroot_dst` provided.
fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) { pub fn add_to_sysroot(sysroot_dst: &Path, stamp: &Path) {
t!(fs::create_dir_all(&sysroot_dst)); t!(fs::create_dir_all(&sysroot_dst));
for path in read_stamp_file(stamp) { for path in read_stamp_file(stamp) {
copy(&path, &sysroot_dst.join(path.file_name().unwrap())); copy(&path, &sysroot_dst.join(path.file_name().unwrap()));
@ -785,7 +788,7 @@ fn stderr_isatty() -> bool {
} }
} }
fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) { pub fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path, is_check: bool) {
// Instruct Cargo to give us json messages on stdout, critically leaving // Instruct Cargo to give us json messages on stdout, critically leaving
// stderr as piped so we can get those pretty colors. // stderr as piped so we can get those pretty colors.
cargo.arg("--message-format").arg("json") cargo.arg("--message-format").arg("json")
@ -836,7 +839,8 @@ fn run_cargo(build: &Build, cargo: &mut Command, stamp: &Path) {
// Skip files like executables // Skip files like executables
if !filename.ends_with(".rlib") && if !filename.ends_with(".rlib") &&
!filename.ends_with(".lib") && !filename.ends_with(".lib") &&
!is_dylib(&filename) { !is_dylib(&filename) &&
!(is_check && filename.ends_with(".rmeta")) {
continue continue
} }

View file

@ -48,6 +48,9 @@ pub enum Subcommand {
Build { Build {
paths: Vec<PathBuf>, paths: Vec<PathBuf>,
}, },
Check {
paths: Vec<PathBuf>,
},
Doc { Doc {
paths: Vec<PathBuf>, paths: Vec<PathBuf>,
}, },
@ -88,6 +91,7 @@ Usage: x.py <subcommand> [options] [<paths>...]
Subcommands: Subcommands:
build Compile either the compiler or libraries build Compile either the compiler or libraries
check Compile either the compiler or libraries, using cargo check
test Build and run some test suites test Build and run some test suites
bench Build and run some benchmarks bench Build and run some benchmarks
doc Build documentation doc Build documentation
@ -128,6 +132,7 @@ To learn more about a subcommand, run `./x.py <subcommand> -h`");
// there on out. // there on out.
let subcommand = args.iter().find(|&s| let subcommand = args.iter().find(|&s|
(s == "build") (s == "build")
|| (s == "check")
|| (s == "test") || (s == "test")
|| (s == "bench") || (s == "bench")
|| (s == "doc") || (s == "doc")
@ -217,6 +222,21 @@ Arguments:
arguments would), and then use the compiler built in stage 0 to build arguments would), and then use the compiler built in stage 0 to build
src/libtest and its dependencies. src/libtest and its dependencies.
Once this is done, build/$ARCH/stage1 contains a usable compiler."); Once this is done, build/$ARCH/stage1 contains a usable compiler.");
}
"check" => {
subcommand_help.push_str("\n
Arguments:
This subcommand accepts a number of paths to directories to the crates
and/or artifacts to compile. For example:
./x.py check src/libcore
./x.py check src/libcore src/libproc_macro
If no arguments are passed then the complete artifacts are compiled: std, test, and rustc. Note
also that since we use `cargo check`, by default this will automatically enable incremental
compilation, so there's no need to pass it separately, though it won't hurt. We also completely
ignore the stage passed, as there's no way to compile in non-stage 0 without actually building
the compiler.");
} }
"test" => { "test" => {
subcommand_help.push_str("\n subcommand_help.push_str("\n
@ -286,6 +306,9 @@ Arguments:
"build" => { "build" => {
Subcommand::Build { paths: paths } Subcommand::Build { paths: paths }
} }
"check" => {
Subcommand::Check { paths: paths }
}
"test" => { "test" => {
Subcommand::Test { Subcommand::Test {
paths, paths,

View file

@ -150,6 +150,7 @@ use util::{exe, libdir, OutputFolder, CiEnv};
mod cc_detect; mod cc_detect;
mod channel; mod channel;
mod check; mod check;
mod test;
mod clean; mod clean;
mod compile; mod compile;
mod metadata; mod metadata;
@ -449,12 +450,6 @@ impl Build {
out out
} }
/// Get the directory for incremental by-products when using the
/// given compiler.
fn incremental_dir(&self, compiler: Compiler) -> PathBuf {
self.out.join(&*compiler.host).join(format!("stage{}-incremental", compiler.stage))
}
/// Returns the root directory for all output generated in a particular /// Returns the root directory for all output generated in a particular
/// stage when running with a particular host compiler. /// stage when running with a particular host compiler.
/// ///
@ -776,7 +771,11 @@ impl Build {
fn release(&self, num: &str) -> String { fn release(&self, num: &str) -> String {
match &self.config.channel[..] { match &self.config.channel[..] {
"stable" => num.to_string(), "stable" => num.to_string(),
"beta" => format!("{}-beta.{}", num, self.beta_prerelease_version()), "beta" => if self.rust_info.is_git() {
format!("{}-beta.{}", num, self.beta_prerelease_version())
} else {
format!("{}-beta", num)
},
"nightly" => format!("{}-nightly", num), "nightly" => format!("{}-nightly", num),
_ => format!("{}-dev", num), _ => format!("{}-dev", num),
} }

1542
src/bootstrap/test.rs Normal file

File diff suppressed because it is too large Load diff

View file

@ -1,4 +1,4 @@
FROM ubuntu:16.04 FROM ubuntu:18.04
RUN apt-get update && apt-get install -y --no-install-recommends \ RUN apt-get update && apt-get install -y --no-install-recommends \
clang \ clang \

@ -1 +1 @@
Subproject commit 0ba07e49264a54cb5bbd4856fcea083bb3fbec15 Subproject commit 0a95675bab808c49f86208bacc89c5d9c53ac43f

View file

@ -638,6 +638,7 @@ define_dep_nodes!( <'tcx>
[input] TargetFeaturesWhitelist, [input] TargetFeaturesWhitelist,
[] TargetFeaturesEnabled(DefId), [] TargetFeaturesEnabled(DefId),
[] InstanceDefSizeEstimate { instance_def: InstanceDef<'tcx> },
); );
trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug { trait DepNodeParams<'a, 'gcx: 'tcx + 'a, 'tcx: 'a> : fmt::Debug {

View file

@ -43,7 +43,6 @@
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute}; use syntax::ast::{NodeId, CRATE_NODE_ID, Name, Attribute};
use syntax::codemap::Spanned;
use syntax_pos::Span; use syntax_pos::Span;
use hir::*; use hir::*;
use hir::def::Def; use hir::def::Def;
@ -336,6 +335,9 @@ pub trait Visitor<'v> : Sized {
fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) { fn visit_variant(&mut self, v: &'v Variant, g: &'v Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id) walk_variant(self, v, g, item_id)
} }
fn visit_label(&mut self, label: &'v Label) {
walk_label(self, label)
}
fn visit_lifetime(&mut self, lifetime: &'v Lifetime) { fn visit_lifetime(&mut self, lifetime: &'v Lifetime) {
walk_lifetime(self, lifetime) walk_lifetime(self, lifetime)
} }
@ -370,18 +372,6 @@ pub trait Visitor<'v> : Sized {
} }
} }
pub fn walk_opt_name<'v, V: Visitor<'v>>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
if let Some(name) = opt_name {
visitor.visit_name(span, name);
}
}
pub fn walk_opt_sp_name<'v, V: Visitor<'v>>(visitor: &mut V, opt_sp_name: &Option<Spanned<Name>>) {
if let Some(ref sp_name) = *opt_sp_name {
visitor.visit_name(sp_name.span, sp_name.node);
}
}
/// Walks the contents of a crate. See also `Crate::visit_all_items`. /// Walks the contents of a crate. See also `Crate::visit_all_items`.
pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) { pub fn walk_crate<'v, V: Visitor<'v>>(visitor: &mut V, krate: &'v Crate) {
visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID); visitor.visit_mod(&krate.module, krate.span, CRATE_NODE_ID);
@ -420,6 +410,10 @@ pub fn walk_local<'v, V: Visitor<'v>>(visitor: &mut V, local: &'v Local) {
walk_list!(visitor, visit_ty, &local.ty); walk_list!(visitor, visit_ty, &local.ty);
} }
pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
visitor.visit_name(label.span, label.name);
}
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) { pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.id); visitor.visit_id(lifetime.id);
match lifetime.name { match lifetime.name {
@ -452,7 +446,9 @@ pub fn walk_item<'v, V: Visitor<'v>>(visitor: &mut V, item: &'v Item) {
match item.node { match item.node {
ItemExternCrate(opt_name) => { ItemExternCrate(opt_name) => {
visitor.visit_id(item.id); visitor.visit_id(item.id);
walk_opt_name(visitor, item.span, opt_name) if let Some(name) = opt_name {
visitor.visit_name(item.span, name);
}
} }
ItemUse(ref path, _) => { ItemUse(ref path, _) => {
visitor.visit_id(item.id); visitor.visit_id(item.id);
@ -993,14 +989,14 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
visitor.visit_expr(if_block); visitor.visit_expr(if_block);
walk_list!(visitor, visit_expr, optional_else); walk_list!(visitor, visit_expr, optional_else);
} }
ExprWhile(ref subexpression, ref block, ref opt_sp_name) => { ExprWhile(ref subexpression, ref block, ref opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression); visitor.visit_expr(subexpression);
visitor.visit_block(block); visitor.visit_block(block);
walk_opt_sp_name(visitor, opt_sp_name);
} }
ExprLoop(ref block, ref opt_sp_name, _) => { ExprLoop(ref block, ref opt_label, _) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block); visitor.visit_block(block);
walk_opt_sp_name(visitor, opt_sp_name);
} }
ExprMatch(ref subexpression, ref arms, _) => { ExprMatch(ref subexpression, ref arms, _) => {
visitor.visit_expr(subexpression); visitor.visit_expr(subexpression);
@ -1036,28 +1032,28 @@ pub fn walk_expr<'v, V: Visitor<'v>>(visitor: &mut V, expression: &'v Expr) {
ExprPath(ref qpath) => { ExprPath(ref qpath) => {
visitor.visit_qpath(qpath, expression.id, expression.span); visitor.visit_qpath(qpath, expression.id, expression.span);
} }
ExprBreak(label, ref opt_expr) => { ExprBreak(ref destination, ref opt_expr) => {
label.ident.map(|ident| { if let Some(ref label) = destination.label {
match label.target_id { visitor.visit_label(label);
match destination.target_id {
ScopeTarget::Block(node_id) | ScopeTarget::Block(node_id) |
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) => ScopeTarget::Loop(LoopIdResult::Ok(node_id)) =>
visitor.visit_def_mention(Def::Label(node_id)), visitor.visit_def_mention(Def::Label(node_id)),
ScopeTarget::Loop(LoopIdResult::Err(_)) => {}, ScopeTarget::Loop(LoopIdResult::Err(_)) => {},
}; };
visitor.visit_name(ident.span, ident.node.name); }
});
walk_list!(visitor, visit_expr, opt_expr); walk_list!(visitor, visit_expr, opt_expr);
} }
ExprAgain(label) => { ExprAgain(ref destination) => {
label.ident.map(|ident| { if let Some(ref label) = destination.label {
match label.target_id { visitor.visit_label(label);
match destination.target_id {
ScopeTarget::Block(_) => bug!("can't `continue` to a non-loop block"), ScopeTarget::Block(_) => bug!("can't `continue` to a non-loop block"),
ScopeTarget::Loop(LoopIdResult::Ok(node_id)) => ScopeTarget::Loop(LoopIdResult::Ok(node_id)) =>
visitor.visit_def_mention(Def::Label(node_id)), visitor.visit_def_mention(Def::Label(node_id)),
ScopeTarget::Loop(LoopIdResult::Err(_)) => {}, ScopeTarget::Loop(LoopIdResult::Err(_)) => {},
}; };
visitor.visit_name(ident.span, ident.node.name); }
});
} }
ExprRet(ref optional_expression) => { ExprRet(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression); walk_list!(visitor, visit_expr, optional_expression);

View file

@ -773,22 +773,22 @@ impl<'a> LoweringContext<'a> {
*self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident)) *self.name_map.entry(ident).or_insert_with(|| Symbol::from_ident(ident))
} }
fn lower_opt_sp_ident(&mut self, o_id: Option<Spanned<Ident>>) -> Option<Spanned<Name>> { fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
o_id.map(|sp_ident| respan(sp_ident.span, sp_ident.node.name)) label.map(|label| hir::Label { name: label.ident.name, span: label.span })
} }
fn lower_loop_destination(&mut self, destination: Option<(NodeId, Spanned<Ident>)>) fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>)
-> hir::Destination -> hir::Destination
{ {
match destination { match destination {
Some((id, label_ident)) => { Some((id, label)) => {
let target = if let Def::Label(loop_id) = self.expect_full_def(id) { let target = if let Def::Label(loop_id) = self.expect_full_def(id) {
hir::LoopIdResult::Ok(self.lower_node_id(loop_id).node_id) hir::LoopIdResult::Ok(self.lower_node_id(loop_id).node_id)
} else { } else {
hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel) hir::LoopIdResult::Err(hir::LoopIdError::UnresolvedLabel)
}; };
hir::Destination { hir::Destination {
ident: Some(label_ident), label: self.lower_label(Some(label)),
target_id: hir::ScopeTarget::Loop(target), target_id: hir::ScopeTarget::Loop(target),
} }
}, },
@ -798,7 +798,7 @@ impl<'a> LoweringContext<'a> {
.map(|innermost_loop_id| *innermost_loop_id); .map(|innermost_loop_id| *innermost_loop_id);
hir::Destination { hir::Destination {
ident: None, label: None,
target_id: hir::ScopeTarget::Loop( target_id: hir::ScopeTarget::Loop(
loop_id.map(|id| Ok(self.lower_node_id(id).node_id)) loop_id.map(|id| Ok(self.lower_node_id(id).node_id))
.unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)) .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope))
@ -2751,17 +2751,17 @@ impl<'a> LoweringContext<'a> {
hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt) hir::ExprIf(P(self.lower_expr(cond)), P(then_expr), else_opt)
} }
ExprKind::While(ref cond, ref body, opt_ident) => { ExprKind::While(ref cond, ref body, opt_label) => {
self.with_loop_scope(e.id, |this| self.with_loop_scope(e.id, |this|
hir::ExprWhile( hir::ExprWhile(
this.with_loop_condition_scope(|this| P(this.lower_expr(cond))), this.with_loop_condition_scope(|this| P(this.lower_expr(cond))),
this.lower_block(body, false), this.lower_block(body, false),
this.lower_opt_sp_ident(opt_ident))) this.lower_label(opt_label)))
} }
ExprKind::Loop(ref body, opt_ident) => { ExprKind::Loop(ref body, opt_label) => {
self.with_loop_scope(e.id, |this| self.with_loop_scope(e.id, |this|
hir::ExprLoop(this.lower_block(body, false), hir::ExprLoop(this.lower_block(body, false),
this.lower_opt_sp_ident(opt_ident), this.lower_label(opt_label),
hir::LoopSource::Loop)) hir::LoopSource::Loop))
} }
ExprKind::Catch(ref body) => { ExprKind::Catch(ref body) => {
@ -2837,8 +2837,8 @@ impl<'a> LoweringContext<'a> {
(&None, &Some(..), Closed) => "RangeToInclusive", (&None, &Some(..), Closed) => "RangeToInclusive",
(&Some(..), &Some(..), Closed) => "RangeInclusive", (&Some(..), &Some(..), Closed) => "RangeInclusive",
(_, &None, Closed) => (_, &None, Closed) =>
panic!(self.diagnostic().span_fatal( self.diagnostic().span_fatal(
e.span, "inclusive range with no end")), e.span, "inclusive range with no end").raise(),
}; };
let fields = let fields =
@ -2877,30 +2877,30 @@ impl<'a> LoweringContext<'a> {
hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional, hir::ExprPath(self.lower_qpath(e.id, qself, path, ParamMode::Optional,
ImplTraitContext::Disallowed)) ImplTraitContext::Disallowed))
} }
ExprKind::Break(opt_ident, ref opt_expr) => { ExprKind::Break(opt_label, ref opt_expr) => {
let label_result = if self.is_in_loop_condition && opt_ident.is_none() { let destination = if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination { hir::Destination {
ident: opt_ident, label: None,
target_id: hir::ScopeTarget::Loop( target_id: hir::ScopeTarget::Loop(
Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()), Err(hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
} }
} else { } else {
self.lower_loop_destination(opt_ident.map(|ident| (e.id, ident))) self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
}; };
hir::ExprBreak( hir::ExprBreak(
label_result, destination,
opt_expr.as_ref().map(|x| P(self.lower_expr(x)))) opt_expr.as_ref().map(|x| P(self.lower_expr(x))))
} }
ExprKind::Continue(opt_ident) => ExprKind::Continue(opt_label) =>
hir::ExprAgain( hir::ExprAgain(
if self.is_in_loop_condition && opt_ident.is_none() { if self.is_in_loop_condition && opt_label.is_none() {
hir::Destination { hir::Destination {
ident: opt_ident, label: None,
target_id: hir::ScopeTarget::Loop(Err( target_id: hir::ScopeTarget::Loop(Err(
hir::LoopIdError::UnlabeledCfInWhileCondition).into()), hir::LoopIdError::UnlabeledCfInWhileCondition).into()),
} }
} else { } else {
self.lower_loop_destination(opt_ident.map( |ident| (e.id, ident))) self.lower_loop_destination(opt_label.map(|label| (e.id, label)))
}), }),
ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))), ExprKind::Ret(ref e) => hir::ExprRet(e.as_ref().map(|x| P(self.lower_expr(x)))),
ExprKind::InlineAsm(ref asm) => { ExprKind::InlineAsm(ref asm) => {
@ -3000,7 +3000,7 @@ impl<'a> LoweringContext<'a> {
// Desugar ExprWhileLet // Desugar ExprWhileLet
// From: `[opt_ident]: while let <pat> = <sub_expr> <body>` // From: `[opt_ident]: while let <pat> = <sub_expr> <body>`
ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_ident) => { ExprKind::WhileLet(ref pat, ref sub_expr, ref body, opt_label) => {
// to: // to:
// //
// [opt_ident]: loop { // [opt_ident]: loop {
@ -3041,7 +3041,7 @@ impl<'a> LoweringContext<'a> {
// `[opt_ident]: loop { ... }` // `[opt_ident]: loop { ... }`
let loop_block = P(self.block_expr(P(match_expr))); let loop_block = P(self.block_expr(P(match_expr)));
let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label),
hir::LoopSource::WhileLet); hir::LoopSource::WhileLet);
// add attributes to the outer returned expr node // add attributes to the outer returned expr node
loop_expr loop_expr
@ -3049,7 +3049,7 @@ impl<'a> LoweringContext<'a> {
// Desugar ExprForLoop // Desugar ExprForLoop
// From: `[opt_ident]: for <pat> in <head> <body>` // From: `[opt_ident]: for <pat> in <head> <body>`
ExprKind::ForLoop(ref pat, ref head, ref body, opt_ident) => { ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
// to: // to:
// //
// { // {
@ -3150,7 +3150,7 @@ impl<'a> LoweringContext<'a> {
None)); None));
// `[opt_ident]: loop { ... }` // `[opt_ident]: loop { ... }`
let loop_expr = hir::ExprLoop(loop_block, self.lower_opt_sp_ident(opt_ident), let loop_expr = hir::ExprLoop(loop_block, self.lower_label(opt_label),
hir::LoopSource::ForLoop); hir::LoopSource::ForLoop);
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id); let LoweredNodeId { node_id, hir_id } = self.lower_node_id(e.id);
let loop_expr = P(hir::Expr { let loop_expr = P(hir::Expr {
@ -3270,7 +3270,7 @@ impl<'a> LoweringContext<'a> {
e.span, e.span,
hir::ExprBreak( hir::ExprBreak(
hir::Destination { hir::Destination {
ident: None, label: None,
target_id: hir::ScopeTarget::Block(catch_node), target_id: hir::ScopeTarget::Block(catch_node),
}, },
Some(from_err_expr) Some(from_err_expr)

View file

@ -34,7 +34,7 @@ use util::nodemap::{NodeMap, FxHashSet};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use syntax::codemap::{self, Spanned}; use syntax::codemap::{self, Spanned};
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::ast::{self, Ident, Name, NodeId, DUMMY_NODE_ID, AsmDialect}; use syntax::ast::{self, Name, NodeId, DUMMY_NODE_ID, AsmDialect};
use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem}; use syntax::ast::{Attribute, Lit, StrStyle, FloatTy, IntTy, UintTy, MetaItem};
use syntax::ext::hygiene::SyntaxContext; use syntax::ext::hygiene::SyntaxContext;
use syntax::ptr::P; use syntax::ptr::P;
@ -172,6 +172,18 @@ pub const DUMMY_HIR_ID: HirId = HirId {
pub const DUMMY_ITEM_LOCAL_ID: ItemLocalId = ItemLocalId(!0); pub const DUMMY_ITEM_LOCAL_ID: ItemLocalId = ItemLocalId(!0);
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Label {
pub name: Name,
pub span: Span,
}
impl fmt::Debug for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "label({:?})", self.name)
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime { pub struct Lifetime {
pub id: NodeId, pub id: NodeId,
@ -1276,11 +1288,11 @@ pub enum Expr_ {
/// A while loop, with an optional label /// A while loop, with an optional label
/// ///
/// `'label: while expr { block }` /// `'label: while expr { block }`
ExprWhile(P<Expr>, P<Block>, Option<Spanned<Name>>), ExprWhile(P<Expr>, P<Block>, Option<Label>),
/// Conditionless loop (can be exited with break, continue, or return) /// Conditionless loop (can be exited with break, continue, or return)
/// ///
/// `'label: loop { block }` /// `'label: loop { block }`
ExprLoop(P<Block>, Option<Spanned<Name>>, LoopSource), ExprLoop(P<Block>, Option<Label>, LoopSource),
/// A `match` block, with a source that indicates whether or not it is /// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind. /// the result of a desugaring, and if so, which kind.
ExprMatch(P<Expr>, HirVec<Arm>, MatchSource), ExprMatch(P<Expr>, HirVec<Arm>, MatchSource),
@ -1459,7 +1471,7 @@ impl ScopeTarget {
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub struct Destination { pub struct Destination {
// This is `Some(_)` iff there is an explicit user-specified `label // This is `Some(_)` iff there is an explicit user-specified `label
pub ident: Option<Spanned<Ident>>, pub label: Option<Label>,
// These errors are caught and then reported during the diagnostics pass in // These errors are caught and then reported during the diagnostics pass in
// librustc_passes/loops.rs // librustc_passes/loops.rs

View file

@ -1337,9 +1337,9 @@ impl<'a> State<'a> {
hir::ExprIf(ref test, ref blk, ref elseopt) => { hir::ExprIf(ref test, ref blk, ref elseopt) => {
self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?; self.print_if(&test, &blk, elseopt.as_ref().map(|e| &**e))?;
} }
hir::ExprWhile(ref test, ref blk, opt_sp_name) => { hir::ExprWhile(ref test, ref blk, opt_label) => {
if let Some(sp_name) = opt_sp_name { if let Some(label) = opt_label {
self.print_name(sp_name.node)?; self.print_name(label.name)?;
self.word_space(":")?; self.word_space(":")?;
} }
self.head("while")?; self.head("while")?;
@ -1347,9 +1347,9 @@ impl<'a> State<'a> {
self.s.space()?; self.s.space()?;
self.print_block(&blk)?; self.print_block(&blk)?;
} }
hir::ExprLoop(ref blk, opt_sp_name, _) => { hir::ExprLoop(ref blk, opt_label, _) => {
if let Some(sp_name) = opt_sp_name { if let Some(label) = opt_label {
self.print_name(sp_name.node)?; self.print_name(label.name)?;
self.word_space(":")?; self.word_space(":")?;
} }
self.head("loop")?; self.head("loop")?;
@ -1424,11 +1424,11 @@ impl<'a> State<'a> {
hir::ExprPath(ref qpath) => { hir::ExprPath(ref qpath) => {
self.print_qpath(qpath, true)? self.print_qpath(qpath, true)?
} }
hir::ExprBreak(label, ref opt_expr) => { hir::ExprBreak(destination, ref opt_expr) => {
self.s.word("break")?; self.s.word("break")?;
self.s.space()?; self.s.space()?;
if let Some(label_ident) = label.ident { if let Some(label) = destination.label {
self.print_name(label_ident.node.name)?; self.print_name(label.name)?;
self.s.space()?; self.s.space()?;
} }
if let Some(ref expr) = *opt_expr { if let Some(ref expr) = *opt_expr {
@ -1436,11 +1436,11 @@ impl<'a> State<'a> {
self.s.space()?; self.s.space()?;
} }
} }
hir::ExprAgain(label) => { hir::ExprAgain(destination) => {
self.s.word("continue")?; self.s.word("continue")?;
self.s.space()?; self.s.space()?;
if let Some(label_ident) = label.ident { if let Some(label) = destination.label {
self.print_name(label_ident.node.name)?; self.print_name(label.name)?;
self.s.space()? self.s.space()?
} }
} }

View file

@ -148,6 +148,11 @@ impl_stable_hash_for!(enum hir::LifetimeName {
Name(name) Name(name)
}); });
impl_stable_hash_for!(struct hir::Label {
span,
name
});
impl_stable_hash_for!(struct hir::Lifetime { impl_stable_hash_for!(struct hir::Lifetime {
id, id,
span, span,
@ -619,7 +624,7 @@ impl_stable_hash_for!(enum hir::CaptureClause {
impl_stable_hash_for_spanned!(usize); impl_stable_hash_for_spanned!(usize);
impl_stable_hash_for!(struct hir::Destination { impl_stable_hash_for!(struct hir::Destination {
ident, label,
target_id target_id
}); });

View file

@ -82,7 +82,7 @@ pub type VarOrigins = IndexVec<RegionVid, RegionVariableOrigin>;
/// Describes constraints between the region variables and other /// Describes constraints between the region variables and other
/// regions, as well as other conditions that must be verified, or /// regions, as well as other conditions that must be verified, or
/// assumptions that can be made. /// assumptions that can be made.
#[derive(Default)] #[derive(Debug, Default)]
pub struct RegionConstraintData<'tcx> { pub struct RegionConstraintData<'tcx> {
/// Constraints of the form `A <= B`, where either `A` or `B` can /// Constraints of the form `A <= B`, where either `A` or `B` can
/// be a region variable (or neither, as it happens). /// be a region variable (or neither, as it happens).

View file

@ -1018,7 +1018,7 @@ fn extract_labels(ctxt: &mut LifetimeContext<'_, '_>, body: &hir::Body) {
fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> { fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> {
match ex.node { match ex.node {
hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => { hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => {
Some((label.node, label.span)) Some((label.name, label.span))
} }
_ => None, _ => None,
} }

View file

@ -10,7 +10,7 @@
use syntax::ast::NodeId; use syntax::ast::NodeId;
use syntax::symbol::InternedString; use syntax::symbol::InternedString;
use ty::Instance; use ty::{Instance, TyCtxt};
use util::nodemap::FxHashMap; use util::nodemap::FxHashMap;
use rustc_data_structures::base_n; use rustc_data_structures::base_n;
use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult, use rustc_data_structures::stable_hasher::{HashStable, StableHasherResult,
@ -25,6 +25,21 @@ pub enum MonoItem<'tcx> {
GlobalAsm(NodeId), GlobalAsm(NodeId),
} }
impl<'tcx> MonoItem<'tcx> {
pub fn size_estimate<'a>(&self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) -> usize {
match *self {
MonoItem::Fn(instance) => {
// Estimate the size of a function based on how many statements
// it contains.
tcx.instance_def_size_estimate(instance.def)
},
// Conservatively estimate the size of a static declaration
// or assembly to be 1.
MonoItem::Static(_) | MonoItem::GlobalAsm(_) => 1,
}
}
}
impl<'tcx> HashStable<StableHashingContext<'tcx>> for MonoItem<'tcx> { impl<'tcx> HashStable<StableHashingContext<'tcx>> for MonoItem<'tcx> {
fn hash_stable<W: StableHasherResult>(&self, fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'tcx>, hcx: &mut StableHashingContext<'tcx>,
@ -52,6 +67,7 @@ pub struct CodegenUnit<'tcx> {
/// as well as the crate name and disambiguator. /// as well as the crate name and disambiguator.
name: InternedString, name: InternedString,
items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>, items: FxHashMap<MonoItem<'tcx>, (Linkage, Visibility)>,
size_estimate: Option<usize>,
} }
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)] #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
@ -101,6 +117,7 @@ impl<'tcx> CodegenUnit<'tcx> {
CodegenUnit { CodegenUnit {
name: name, name: name,
items: FxHashMap(), items: FxHashMap(),
size_estimate: None,
} }
} }
@ -131,6 +148,24 @@ impl<'tcx> CodegenUnit<'tcx> {
let hash = hash & ((1u128 << 80) - 1); let hash = hash & ((1u128 << 80) - 1);
base_n::encode(hash, base_n::CASE_INSENSITIVE) base_n::encode(hash, base_n::CASE_INSENSITIVE)
} }
pub fn estimate_size<'a>(&mut self, tcx: &TyCtxt<'a, 'tcx, 'tcx>) {
// Estimate the size of a codegen unit as (approximately) the number of MIR
// statements it corresponds to.
self.size_estimate = Some(self.items.keys().map(|mi| mi.size_estimate(tcx)).sum());
}
pub fn size_estimate(&self) -> usize {
// Should only be called if `estimate_size` has previously been called.
self.size_estimate.expect("estimate_size must be called before getting a size_estimate")
}
pub fn modify_size_estimate(&mut self, delta: usize) {
assert!(self.size_estimate.is_some());
if let Some(size_estimate) = self.size_estimate {
self.size_estimate = Some(size_estimate + delta);
}
}
} }
impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> { impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> {
@ -140,6 +175,8 @@ impl<'tcx> HashStable<StableHashingContext<'tcx>> for CodegenUnit<'tcx> {
let CodegenUnit { let CodegenUnit {
ref items, ref items,
name, name,
// The size estimate is not relevant to the hash
size_estimate: _,
} = *self; } = *self;
name.hash_stable(hcx, hasher); name.hash_stable(hcx, hasher);

View file

@ -72,6 +72,26 @@ pub enum OptLevel {
SizeMin, // -Oz SizeMin, // -Oz
} }
#[derive(Clone, Copy, PartialEq, Hash)]
pub enum Lto {
/// Don't do any LTO whatsoever
No,
/// Do a full crate graph LTO. The flavor is determined by the compiler
/// (currently the default is "fat").
Yes,
/// Do a full crate graph LTO with ThinLTO
Thin,
/// Do a local graph LTO with ThinLTO (only relevant for multiple codegen
/// units).
ThinLocal,
/// Do a full crate graph LTO with "fat" LTO
Fat,
}
#[derive(Clone, Copy, PartialEq, Hash)] #[derive(Clone, Copy, PartialEq, Hash)]
pub enum DebugInfoLevel { pub enum DebugInfoLevel {
NoDebugInfo, NoDebugInfo,
@ -389,7 +409,7 @@ top_level_options!(
// commands like `--emit llvm-ir` which they're often incompatible with // commands like `--emit llvm-ir` which they're often incompatible with
// if we otherwise use the defaults of rustc. // if we otherwise use the defaults of rustc.
cli_forced_codegen_units: Option<usize> [UNTRACKED], cli_forced_codegen_units: Option<usize> [UNTRACKED],
cli_forced_thinlto: Option<bool> [UNTRACKED], cli_forced_thinlto_off: bool [UNTRACKED],
} }
); );
@ -590,7 +610,7 @@ pub fn basic_options() -> Options {
debug_assertions: true, debug_assertions: true,
actually_rustdoc: false, actually_rustdoc: false,
cli_forced_codegen_units: None, cli_forced_codegen_units: None,
cli_forced_thinlto: None, cli_forced_thinlto_off: false,
} }
} }
@ -780,11 +800,13 @@ macro_rules! options {
Some("crate=integer"); Some("crate=integer");
pub const parse_unpretty: Option<&'static str> = pub const parse_unpretty: Option<&'static str> =
Some("`string` or `string=string`"); Some("`string` or `string=string`");
pub const parse_lto: Option<&'static str> =
Some("one of `thin`, `fat`, or omitted");
} }
#[allow(dead_code)] #[allow(dead_code)]
mod $mod_set { mod $mod_set {
use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer}; use super::{$struct_name, Passes, SomePasses, AllPasses, Sanitizer, Lto};
use rustc_back::{LinkerFlavor, PanicStrategy, RelroLevel}; use rustc_back::{LinkerFlavor, PanicStrategy, RelroLevel};
use std::path::PathBuf; use std::path::PathBuf;
@ -978,6 +1000,16 @@ macro_rules! options {
_ => false, _ => false,
} }
} }
fn parse_lto(slot: &mut Lto, v: Option<&str>) -> bool {
*slot = match v {
None => Lto::Yes,
Some("thin") => Lto::Thin,
Some("fat") => Lto::Fat,
Some(_) => return false,
};
true
}
} }
) } ) }
@ -994,7 +1026,7 @@ options! {CodegenOptions, CodegenSetter, basic_codegen_options,
"extra arguments to append to the linker invocation (space separated)"), "extra arguments to append to the linker invocation (space separated)"),
link_dead_code: bool = (false, parse_bool, [UNTRACKED], link_dead_code: bool = (false, parse_bool, [UNTRACKED],
"don't let linker strip dead code (turning it on can be used for code coverage)"), "don't let linker strip dead code (turning it on can be used for code coverage)"),
lto: bool = (false, parse_bool, [TRACKED], lto: Lto = (Lto::No, parse_lto, [TRACKED],
"perform LLVM link-time optimizations"), "perform LLVM link-time optimizations"),
target_cpu: Option<String> = (None, parse_opt_string, [TRACKED], target_cpu: Option<String> = (None, parse_opt_string, [TRACKED],
"select target processor (rustc --print target-cpus for details)"), "select target processor (rustc --print target-cpus for details)"),
@ -1135,6 +1167,8 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"treat all errors that occur as bugs"), "treat all errors that occur as bugs"),
external_macro_backtrace: bool = (false, parse_bool, [UNTRACKED], external_macro_backtrace: bool = (false, parse_bool, [UNTRACKED],
"show macro backtraces even for non-local macros"), "show macro backtraces even for non-local macros"),
teach: bool = (false, parse_bool, [TRACKED],
"show extended diagnostic help"),
continue_parse_after_error: bool = (false, parse_bool, [TRACKED], continue_parse_after_error: bool = (false, parse_bool, [TRACKED],
"attempt to recover from parse errors (experimental)"), "attempt to recover from parse errors (experimental)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED], incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
@ -1333,7 +1367,7 @@ pub fn build_target_config(opts: &Options, sp: &Handler) -> Config {
sp.struct_fatal(&format!("Error loading target specification: {}", e)) sp.struct_fatal(&format!("Error loading target specification: {}", e))
.help("Use `--print target-list` for a list of built-in targets") .help("Use `--print target-list` for a list of built-in targets")
.emit(); .emit();
panic!(FatalError); FatalError.raise();
} }
}; };
@ -1341,8 +1375,8 @@ pub fn build_target_config(opts: &Options, sp: &Handler) -> Config {
"16" => (ast::IntTy::I16, ast::UintTy::U16), "16" => (ast::IntTy::I16, ast::UintTy::U16),
"32" => (ast::IntTy::I32, ast::UintTy::U32), "32" => (ast::IntTy::I32, ast::UintTy::U32),
"64" => (ast::IntTy::I64, ast::UintTy::U64), "64" => (ast::IntTy::I64, ast::UintTy::U64),
w => panic!(sp.fatal(&format!("target specification was invalid: \ w => sp.fatal(&format!("target specification was invalid: \
unrecognized target-pointer-width {}", w))), unrecognized target-pointer-width {}", w)).raise(),
}; };
Config { Config {
@ -1632,8 +1666,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
let mut debugging_opts = build_debugging_options(matches, error_format); let mut debugging_opts = build_debugging_options(matches, error_format);
if !debugging_opts.unstable_options && error_format == ErrorOutputType::Json(true) { if !debugging_opts.unstable_options && error_format == ErrorOutputType::Json(true) {
early_error(ErrorOutputType::Json(false), early_error(ErrorOutputType::Json(false), "--error-format=pretty-json is unstable");
"--error-format=pretty-json is unstable");
} }
let mut output_types = BTreeMap::new(); let mut output_types = BTreeMap::new();
@ -1677,7 +1710,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
let mut cg = build_codegen_options(matches, error_format); let mut cg = build_codegen_options(matches, error_format);
let mut codegen_units = cg.codegen_units; let mut codegen_units = cg.codegen_units;
let mut thinlto = None; let mut disable_thinlto = false;
// Issue #30063: if user requests llvm-related output to one // Issue #30063: if user requests llvm-related output to one
// particular path, disable codegen-units. // particular path, disable codegen-units.
@ -1699,12 +1732,12 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
} }
early_warn(error_format, "resetting to default -C codegen-units=1"); early_warn(error_format, "resetting to default -C codegen-units=1");
codegen_units = Some(1); codegen_units = Some(1);
thinlto = Some(false); disable_thinlto = true;
} }
} }
_ => { _ => {
codegen_units = Some(1); codegen_units = Some(1);
thinlto = Some(false); disable_thinlto = true;
} }
} }
} }
@ -1734,7 +1767,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
(&None, &None) => None, (&None, &None) => None,
}.map(|m| PathBuf::from(m)); }.map(|m| PathBuf::from(m));
if cg.lto && incremental.is_some() { if cg.lto != Lto::No && incremental.is_some() {
early_error(error_format, "can't perform LTO when compiling incrementally"); early_error(error_format, "can't perform LTO when compiling incrementally");
} }
@ -1934,7 +1967,7 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
debug_assertions, debug_assertions,
actually_rustdoc: false, actually_rustdoc: false,
cli_forced_codegen_units: codegen_units, cli_forced_codegen_units: codegen_units,
cli_forced_thinlto: thinlto, cli_forced_thinlto_off: disable_thinlto,
}, },
cfg) cfg)
} }
@ -2052,7 +2085,7 @@ mod dep_tracking {
use std::hash::Hash; use std::hash::Hash;
use std::path::PathBuf; use std::path::PathBuf;
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use super::{Passes, CrateType, OptLevel, DebugInfoLevel, use super::{Passes, CrateType, OptLevel, DebugInfoLevel, Lto,
OutputTypes, Externs, ErrorOutputType, Sanitizer}; OutputTypes, Externs, ErrorOutputType, Sanitizer};
use syntax::feature_gate::UnstableFeatures; use syntax::feature_gate::UnstableFeatures;
use rustc_back::{PanicStrategy, RelroLevel}; use rustc_back::{PanicStrategy, RelroLevel};
@ -2107,6 +2140,7 @@ mod dep_tracking {
impl_dep_tracking_hash_via_hash!(RelroLevel); impl_dep_tracking_hash_via_hash!(RelroLevel);
impl_dep_tracking_hash_via_hash!(Passes); impl_dep_tracking_hash_via_hash!(Passes);
impl_dep_tracking_hash_via_hash!(OptLevel); impl_dep_tracking_hash_via_hash!(OptLevel);
impl_dep_tracking_hash_via_hash!(Lto);
impl_dep_tracking_hash_via_hash!(DebugInfoLevel); impl_dep_tracking_hash_via_hash!(DebugInfoLevel);
impl_dep_tracking_hash_via_hash!(UnstableFeatures); impl_dep_tracking_hash_via_hash!(UnstableFeatures);
impl_dep_tracking_hash_via_hash!(Externs); impl_dep_tracking_hash_via_hash!(Externs);
@ -2180,6 +2214,7 @@ mod tests {
use lint; use lint;
use middle::cstore; use middle::cstore;
use session::config::{build_configuration, build_session_options_and_crate_config}; use session::config::{build_configuration, build_session_options_and_crate_config};
use session::config::Lto;
use session::build_session; use session::build_session;
use std::collections::{BTreeMap, BTreeSet}; use std::collections::{BTreeMap, BTreeSet};
use std::iter::FromIterator; use std::iter::FromIterator;
@ -2656,7 +2691,7 @@ mod tests {
// Make sure changing a [TRACKED] option changes the hash // Make sure changing a [TRACKED] option changes the hash
opts = reference.clone(); opts = reference.clone();
opts.cg.lto = true; opts.cg.lto = Lto::Fat;
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash()); assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
opts = reference.clone(); opts = reference.clone();

View file

@ -250,7 +250,7 @@ impl Session {
} }
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
panic!(self.diagnostic().span_fatal(sp, msg)) self.diagnostic().span_fatal(sp, msg).raise()
} }
pub fn span_fatal_with_code<S: Into<MultiSpan>>( pub fn span_fatal_with_code<S: Into<MultiSpan>>(
&self, &self,
@ -258,10 +258,10 @@ impl Session {
msg: &str, msg: &str,
code: DiagnosticId, code: DiagnosticId,
) -> ! { ) -> ! {
panic!(self.diagnostic().span_fatal_with_code(sp, msg, code)) self.diagnostic().span_fatal_with_code(sp, msg, code).raise()
} }
pub fn fatal(&self, msg: &str) -> ! { pub fn fatal(&self, msg: &str) -> ! {
panic!(self.diagnostic().fatal(msg)) self.diagnostic().fatal(msg).raise()
} }
pub fn span_err_or_warn<S: Into<MultiSpan>>(&self, is_warning: bool, sp: S, msg: &str) { pub fn span_err_or_warn<S: Into<MultiSpan>>(&self, is_warning: bool, sp: S, msg: &str) {
if is_warning { if is_warning {
@ -498,9 +498,65 @@ impl Session {
self.use_mir() self.use_mir()
} }
pub fn lto(&self) -> bool { /// Calculates the flavor of LTO to use for this compilation.
self.opts.cg.lto || self.target.target.options.requires_lto pub fn lto(&self) -> config::Lto {
// If our target has codegen requirements ignore the command line
if self.target.target.options.requires_lto {
return config::Lto::Fat
}
// If the user specified something, return that. If they only said `-C
// lto` and we've for whatever reason forced off ThinLTO via the CLI,
// then ensure we can't use a ThinLTO.
match self.opts.cg.lto {
config::Lto::No => {}
config::Lto::Yes if self.opts.cli_forced_thinlto_off => {
return config::Lto::Fat
}
other => return other,
}
// Ok at this point the target doesn't require anything and the user
// hasn't asked for anything. Our next decision is whether or not
// we enable "auto" ThinLTO where we use multiple codegen units and
// then do ThinLTO over those codegen units. The logic below will
// either return `No` or `ThinLocal`.
// If processing command line options determined that we're incompatible
// with ThinLTO (e.g. `-C lto --emit llvm-ir`) then return that option.
if self.opts.cli_forced_thinlto_off {
return config::Lto::No
}
// If `-Z thinlto` specified process that, but note that this is mostly
// a deprecated option now that `-C lto=thin` exists.
if let Some(enabled) = self.opts.debugging_opts.thinlto {
if enabled {
return config::Lto::ThinLocal
} else {
return config::Lto::No
}
}
// If there's only one codegen unit and LTO isn't enabled then there's
// no need for ThinLTO so just return false.
if self.codegen_units() == 1 {
return config::Lto::No
}
// Right now ThinLTO isn't compatible with incremental compilation.
if self.opts.incremental.is_some() {
return config::Lto::No
}
// Now we're in "defaults" territory. By default we enable ThinLTO for
// optimized compiles (anything greater than O0).
match self.opts.optimize {
config::OptLevel::No => config::Lto::No,
_ => config::Lto::ThinLocal,
}
} }
/// Returns the panic strategy for this compile session. If the user explicitly selected one /// Returns the panic strategy for this compile session. If the user explicitly selected one
/// using '-C panic', use that, otherwise use the panic strategy defined by the target. /// using '-C panic', use that, otherwise use the panic strategy defined by the target.
pub fn panic_strategy(&self) -> PanicStrategy { pub fn panic_strategy(&self) -> PanicStrategy {
@ -805,36 +861,8 @@ impl Session {
16 16
} }
/// Returns whether ThinLTO is enabled for this compilation pub fn teach(&self, code: &DiagnosticId) -> bool {
pub fn thinlto(&self) -> bool { self.opts.debugging_opts.teach && !self.parse_sess.span_diagnostic.code_emitted(code)
// If processing command line options determined that we're incompatible
// with ThinLTO (e.g. `-C lto --emit llvm-ir`) then return that option.
if let Some(enabled) = self.opts.cli_forced_thinlto {
return enabled
}
// If explicitly specified, use that with the next highest priority
if let Some(enabled) = self.opts.debugging_opts.thinlto {
return enabled
}
// If there's only one codegen unit and LTO isn't enabled then there's
// no need for ThinLTO so just return false.
if self.codegen_units() == 1 && !self.lto() {
return false
}
// Right now ThinLTO isn't compatible with incremental compilation.
if self.opts.incremental.is_some() {
return false
}
// Now we're in "defaults" territory. By default we enable ThinLTO for
// optimized compiles (anything greater than O0).
match self.opts.optimize {
config::OptLevel::No => false,
_ => true,
}
} }
} }
@ -919,7 +947,7 @@ pub fn build_session_(sopts: config::Options,
let host = match Target::search(config::host_triple()) { let host = match Target::search(config::host_triple()) {
Ok(t) => t, Ok(t) => t,
Err(e) => { Err(e) => {
panic!(span_diagnostic.fatal(&format!("Error loading host specification: {}", e))); span_diagnostic.fatal(&format!("Error loading host specification: {}", e)).raise();
} }
}; };
let target_cfg = config::build_target_config(&sopts, &span_diagnostic); let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
@ -945,7 +973,7 @@ pub fn build_session_(sopts: config::Options,
let working_dir = match env::current_dir() { let working_dir = match env::current_dir() {
Ok(dir) => dir, Ok(dir) => dir,
Err(e) => { Err(e) => {
panic!(p_s.span_diagnostic.fatal(&format!("Current directory is invalid: {}", e))) p_s.span_diagnostic.fatal(&format!("Current directory is invalid: {}", e)).raise()
} }
}; };
let working_dir = file_path_mapping.map_prefix(working_dir); let working_dir = file_path_mapping.map_prefix(working_dir);
@ -1076,7 +1104,7 @@ pub fn early_error(output: config::ErrorOutputType, msg: &str) -> ! {
}; };
let handler = errors::Handler::with_emitter(true, false, emitter); let handler = errors::Handler::with_emitter(true, false, emitter);
handler.emit(&MultiSpan::new(), msg, errors::Level::Fatal); handler.emit(&MultiSpan::new(), msg, errors::Level::Fatal);
panic!(errors::FatalError); errors::FatalError.raise();
} }
pub fn early_warn(output: config::ErrorOutputType, msg: &str) { pub fn early_warn(output: config::ErrorOutputType, msg: &str) {

View file

@ -794,48 +794,56 @@ impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
} }
fn get_fn_like_arguments(&self, node: hir::map::Node) -> (Span, Vec<ArgKind>) { fn get_fn_like_arguments(&self, node: hir::map::Node) -> (Span, Vec<ArgKind>) {
if let hir::map::NodeExpr(&hir::Expr { match node {
node: hir::ExprClosure(_, ref _decl, id, span, _), hir::map::NodeExpr(&hir::Expr {
.. node: hir::ExprClosure(_, ref _decl, id, span, _),
}) = node { ..
(self.tcx.sess.codemap().def_span(span), self.tcx.hir.body(id).arguments.iter() }) => {
.map(|arg| { (self.tcx.sess.codemap().def_span(span), self.tcx.hir.body(id).arguments.iter()
if let hir::Pat { .map(|arg| {
node: hir::PatKind::Tuple(args, _), if let hir::Pat {
span, node: hir::PatKind::Tuple(args, _),
..
} = arg.pat.clone().into_inner() {
ArgKind::Tuple(
span, span,
args.iter().map(|pat| { ..
let snippet = self.tcx.sess.codemap() } = arg.pat.clone().into_inner() {
.span_to_snippet(pat.span).unwrap(); ArgKind::Tuple(
(snippet, "_".to_owned()) span,
}).collect::<Vec<_>>(), args.iter().map(|pat| {
) let snippet = self.tcx.sess.codemap()
} else { .span_to_snippet(pat.span).unwrap();
let name = self.tcx.sess.codemap().span_to_snippet(arg.pat.span).unwrap(); (snippet, "_".to_owned())
ArgKind::Arg(name, "_".to_owned()) }).collect::<Vec<_>>(),
} )
}) } else {
.collect::<Vec<ArgKind>>()) let name = self.tcx.sess.codemap()
} else if let hir::map::NodeItem(&hir::Item { .span_to_snippet(arg.pat.span).unwrap();
span, ArgKind::Arg(name, "_".to_owned())
node: hir::ItemFn(ref decl, ..), }
.. })
}) = node { .collect::<Vec<ArgKind>>())
(self.tcx.sess.codemap().def_span(span), decl.inputs.iter() }
.map(|arg| match arg.clone().into_inner().node { hir::map::NodeItem(&hir::Item {
hir::TyTup(ref tys) => ArgKind::Tuple( span,
arg.span, node: hir::ItemFn(ref decl, ..),
tys.iter() ..
.map(|_| ("_".to_owned(), "_".to_owned())) }) |
.collect::<Vec<_>>(), hir::map::NodeImplItem(&hir::ImplItem {
), span,
_ => ArgKind::Arg("_".to_owned(), "_".to_owned()) node: hir::ImplItemKind::Method(hir::MethodSig { ref decl, .. }, _),
}).collect::<Vec<ArgKind>>()) ..
} else { }) => {
panic!("non-FnLike node found: {:?}", node); (self.tcx.sess.codemap().def_span(span), decl.inputs.iter()
.map(|arg| match arg.clone().into_inner().node {
hir::TyTup(ref tys) => ArgKind::Tuple(
arg.span,
tys.iter()
.map(|_| ("_".to_owned(), "_".to_owned()))
.collect::<Vec<_>>(),
),
_ => ArgKind::Arg("_".to_owned(), "_".to_owned())
}).collect::<Vec<ArgKind>>())
}
_ => panic!("non-FnLike node found: {:?}", node),
} }
} }

View file

@ -101,7 +101,7 @@ pub struct MismatchedProjectionTypes<'tcx> {
pub err: ty::error::TypeError<'tcx> pub err: ty::error::TypeError<'tcx>
} }
#[derive(PartialEq, Eq, Debug)] #[derive(PartialEq, Eq, PartialOrd, Ord, Debug)]
enum ProjectionTyCandidate<'tcx> { enum ProjectionTyCandidate<'tcx> {
// from a where-clause in the env or object type // from a where-clause in the env or object type
ParamEnv(ty::PolyProjectionPredicate<'tcx>), ParamEnv(ty::PolyProjectionPredicate<'tcx>),
@ -293,9 +293,23 @@ impl<'a, 'b, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for AssociatedTypeNormalizer<'a,
Reveal::UserFacing => ty, Reveal::UserFacing => ty,
Reveal::All => { Reveal::All => {
let recursion_limit = self.tcx().sess.recursion_limit.get();
if self.depth >= recursion_limit {
let obligation = Obligation::with_depth(
self.cause.clone(),
recursion_limit,
self.param_env,
ty,
);
self.selcx.infcx().report_overflow_error(&obligation, true);
}
let generic_ty = self.tcx().type_of(def_id); let generic_ty = self.tcx().type_of(def_id);
let concrete_ty = generic_ty.subst(self.tcx(), substs); let concrete_ty = generic_ty.subst(self.tcx(), substs);
self.fold_ty(concrete_ty) self.depth += 1;
let folded_ty = self.fold_ty(concrete_ty);
self.depth -= 1;
folded_ty
} }
} }
} }
@ -824,21 +838,12 @@ fn project_type<'cx, 'gcx, 'tcx>(
// Drop duplicates. // Drop duplicates.
// //
// Note: `candidates.vec` seems to be on the critical path of the // Note: `candidates.vec` seems to be on the critical path of the
// compiler. Replacing it with an hash set was also tried, which would // compiler. Replacing it with an HashSet was also tried, which would
// render the following dedup unnecessary. It led to cleaner code but // render the following dedup unnecessary. The original comment indicated
// prolonged compiling time of `librustc` from 5m30s to 6m in one test, or // that it was 9% slower, but that data is now obsolete and a new
// ~9% performance lost. // benchmark should be performed.
if candidates.vec.len() > 1 { candidates.vec.sort_unstable();
let mut i = 0; candidates.vec.dedup();
while i < candidates.vec.len() {
let has_dup = (0..i).any(|j| candidates.vec[i] == candidates.vec[j]);
if has_dup {
candidates.vec.swap_remove(i);
} else {
i += 1;
}
}
}
// Prefer where-clauses. As in select, if there are multiple // Prefer where-clauses. As in select, if there are multiple
// candidates, we prefer where-clause candidates over impls. This // candidates, we prefer where-clause candidates over impls. This

View file

@ -262,10 +262,11 @@ impl<'a, 'gcx, 'tcx> TyS<'tcx> {
})) }))
}, },
TyArray(ty, len) => { TyArray(ty, len) => {
if len.val.to_const_int().and_then(|i| i.to_u64()) == Some(0) { match len.val.to_const_int().and_then(|i| i.to_u64()) {
DefIdForest::empty() // If the array is definitely non-empty, it's uninhabited if
} else { // the type of its elements is uninhabited.
ty.uninhabited_from(visited, tcx) Some(n) if n != 0 => ty.uninhabited_from(visited, tcx),
_ => DefIdForest::empty()
} }
} }
TyRef(_, ref tm) => { TyRef(_, ref tm) => {

View file

@ -617,8 +617,8 @@ impl<'tcx> QueryDescription<'tcx> for queries::optimized_mir<'tcx> {
} }
fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>, fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
id: SerializedDepNodeIndex) id: SerializedDepNodeIndex)
-> Option<Self::Value> { -> Option<Self::Value> {
let mir: Option<::mir::Mir<'tcx>> = tcx.on_disk_query_result_cache let mir: Option<::mir::Mir<'tcx>> = tcx.on_disk_query_result_cache
.try_load_query_result(tcx, id); .try_load_query_result(tcx, id);
mir.map(|x| tcx.alloc_mir(x)) mir.map(|x| tcx.alloc_mir(x))
@ -637,6 +637,27 @@ impl<'tcx> QueryDescription<'tcx> for queries::target_features_whitelist<'tcx> {
} }
} }
impl<'tcx> QueryDescription<'tcx> for queries::instance_def_size_estimate<'tcx> {
fn describe(tcx: TyCtxt, def: ty::InstanceDef<'tcx>) -> String {
format!("estimating size for `{}`", tcx.item_path_str(def.def_id()))
}
}
impl<'tcx> QueryDescription<'tcx> for queries::generics_of<'tcx> {
#[inline]
fn cache_on_disk(def_id: Self::Key) -> bool {
def_id.is_local()
}
fn try_load_from_disk<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
id: SerializedDepNodeIndex)
-> Option<Self::Value> {
let generics: Option<ty::Generics> = tcx.on_disk_query_result_cache
.try_load_query_result(tcx, id);
generics.map(|x| tcx.alloc_generics(x))
}
}
macro_rules! impl_disk_cacheable_query( macro_rules! impl_disk_cacheable_query(
($query_name:ident, |$key:tt| $cond:expr) => { ($query_name:ident, |$key:tt| $cond:expr) => {
impl<'tcx> QueryDescription<'tcx> for queries::$query_name<'tcx> { impl<'tcx> QueryDescription<'tcx> for queries::$query_name<'tcx> {
@ -662,3 +683,6 @@ impl_disk_cacheable_query!(mir_const_qualif, |def_id| def_id.is_local());
impl_disk_cacheable_query!(check_match, |def_id| def_id.is_local()); impl_disk_cacheable_query!(check_match, |def_id| def_id.is_local());
impl_disk_cacheable_query!(contains_extern_indicator, |_| true); impl_disk_cacheable_query!(contains_extern_indicator, |_| true);
impl_disk_cacheable_query!(def_symbol_name, |_| true); impl_disk_cacheable_query!(def_symbol_name, |_| true);
impl_disk_cacheable_query!(type_of, |def_id| def_id.is_local());
impl_disk_cacheable_query!(predicates_of, |def_id| def_id.is_local());
impl_disk_cacheable_query!(used_trait_imports, |def_id| def_id.is_local());

View file

@ -365,6 +365,9 @@ define_maps! { <'tcx>
target_features_whitelist_node(CrateNum) -> Rc<FxHashSet<String>>, target_features_whitelist_node(CrateNum) -> Rc<FxHashSet<String>>,
[] fn target_features_enabled: TargetFeaturesEnabled(DefId) -> Rc<Vec<String>>, [] fn target_features_enabled: TargetFeaturesEnabled(DefId) -> Rc<Vec<String>>,
// Get an estimate of the size of an InstanceDef based on its MIR for CGU partitioning.
[] fn instance_def_size_estimate: instance_def_size_estimate_dep_node(ty::InstanceDef<'tcx>)
-> usize,
} }
////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////
@ -514,3 +517,10 @@ fn substitute_normalize_and_test_predicates_node<'tcx>(key: (DefId, &'tcx Substs
fn target_features_whitelist_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> { fn target_features_whitelist_node<'tcx>(_: CrateNum) -> DepConstructor<'tcx> {
DepConstructor::TargetFeaturesWhitelist DepConstructor::TargetFeaturesWhitelist
} }
fn instance_def_size_estimate_dep_node<'tcx>(instance_def: ty::InstanceDef<'tcx>)
-> DepConstructor<'tcx> {
DepConstructor::InstanceDefSizeEstimate {
instance_def
}
}

View file

@ -204,8 +204,12 @@ impl<'sess> OnDiskCache<'sess> {
let enc = &mut encoder; let enc = &mut encoder;
let qri = &mut query_result_index; let qri = &mut query_result_index;
// Encode TypeckTables encode_query_results::<type_of, _>(tcx, enc, qri)?;
encode_query_results::<generics_of, _>(tcx, enc, qri)?;
encode_query_results::<predicates_of, _>(tcx, enc, qri)?;
encode_query_results::<used_trait_imports, _>(tcx, enc, qri)?;
encode_query_results::<typeck_tables_of, _>(tcx, enc, qri)?; encode_query_results::<typeck_tables_of, _>(tcx, enc, qri)?;
encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?;
encode_query_results::<optimized_mir, _>(tcx, enc, qri)?; encode_query_results::<optimized_mir, _>(tcx, enc, qri)?;
encode_query_results::<unsafety_check_result, _>(tcx, enc, qri)?; encode_query_results::<unsafety_check_result, _>(tcx, enc, qri)?;
encode_query_results::<borrowck, _>(tcx, enc, qri)?; encode_query_results::<borrowck, _>(tcx, enc, qri)?;
@ -215,7 +219,6 @@ impl<'sess> OnDiskCache<'sess> {
encode_query_results::<const_is_rvalue_promotable_to_static, _>(tcx, enc, qri)?; encode_query_results::<const_is_rvalue_promotable_to_static, _>(tcx, enc, qri)?;
encode_query_results::<contains_extern_indicator, _>(tcx, enc, qri)?; encode_query_results::<contains_extern_indicator, _>(tcx, enc, qri)?;
encode_query_results::<symbol_name, _>(tcx, enc, qri)?; encode_query_results::<symbol_name, _>(tcx, enc, qri)?;
encode_query_results::<trans_fulfill_obligation, _>(tcx, enc, qri)?;
encode_query_results::<check_match, _>(tcx, enc, qri)?; encode_query_results::<check_match, _>(tcx, enc, qri)?;
} }

View file

@ -761,6 +761,7 @@ pub fn force_from_dep_node<'a, 'gcx, 'lcx>(tcx: TyCtxt<'a, 'gcx, 'lcx>,
DepKind::EraseRegionsTy | DepKind::EraseRegionsTy |
DepKind::NormalizeTy | DepKind::NormalizeTy |
DepKind::SubstituteNormalizeAndTestPredicates | DepKind::SubstituteNormalizeAndTestPredicates |
DepKind::InstanceDefSizeEstimate |
// This one should never occur in this context // This one should never occur in this context
DepKind::Null => { DepKind::Null => {
@ -982,4 +983,8 @@ impl_load_from_cache!(
ConstIsRvaluePromotableToStatic => const_is_rvalue_promotable_to_static, ConstIsRvaluePromotableToStatic => const_is_rvalue_promotable_to_static,
ContainsExternIndicator => contains_extern_indicator, ContainsExternIndicator => contains_extern_indicator,
CheckMatch => check_match, CheckMatch => check_match,
TypeOfItem => type_of,
GenericsOfItem => generics_of,
PredicatesOfItem => predicates_of,
UsedTraitImports => used_trait_imports,
); );

View file

@ -17,7 +17,7 @@ pub use self::fold::TypeFoldable;
use hir::{map as hir_map, FreevarMap, TraitMap}; use hir::{map as hir_map, FreevarMap, TraitMap};
use hir::def::{Def, CtorKind, ExportMap}; use hir::def::{Def, CtorKind, ExportMap};
use hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE}; use hir::def_id::{CrateNum, DefId, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use hir::map::DefPathData; use hir::map::DefPathData;
use hir::svh::Svh; use hir::svh::Svh;
use ich::Fingerprint; use ich::Fingerprint;
@ -39,8 +39,8 @@ use util::nodemap::{NodeSet, DefIdMap, FxHashMap, FxHashSet};
use serialize::{self, Encodable, Encoder}; use serialize::{self, Encodable, Encoder};
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::BTreeMap;
use std::cmp; use std::cmp;
use std::cmp::Ordering;
use std::fmt; use std::fmt;
use std::hash::{Hash, Hasher}; use std::hash::{Hash, Hasher};
use std::iter::FromIterator; use std::iter::FromIterator;
@ -499,6 +499,20 @@ impl<'tcx> Hash for TyS<'tcx> {
} }
} }
impl<'tcx> Ord for TyS<'tcx> {
#[inline]
fn cmp(&self, other: &TyS<'tcx>) -> Ordering {
// (self as *const _).cmp(other as *const _)
(self as *const TyS<'tcx>).cmp(&(other as *const TyS<'tcx>))
}
}
impl<'tcx> PartialOrd for TyS<'tcx> {
#[inline]
fn partial_cmp(&self, other: &TyS<'tcx>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<'tcx> TyS<'tcx> { impl<'tcx> TyS<'tcx> {
pub fn is_primitive_ty(&self) -> bool { pub fn is_primitive_ty(&self) -> bool {
match self.sty { match self.sty {
@ -568,6 +582,19 @@ impl<T> PartialEq for Slice<T> {
} }
impl<T> Eq for Slice<T> {} impl<T> Eq for Slice<T> {}
impl<T> Ord for Slice<T> {
#[inline]
fn cmp(&self, other: &Slice<T>) -> Ordering {
(&self.0 as *const [T]).cmp(&(&other.0 as *const [T]))
}
}
impl<T> PartialOrd for Slice<T> {
#[inline]
fn partial_cmp(&self, other: &Slice<T>) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl<T> Hash for Slice<T> { impl<T> Hash for Slice<T> {
fn hash<H: Hasher>(&self, s: &mut H) { fn hash<H: Hasher>(&self, s: &mut H) {
(self.as_ptr(), self.len()).hash(s) (self.as_ptr(), self.len()).hash(s)
@ -758,9 +785,8 @@ pub struct Generics {
pub regions: Vec<RegionParameterDef>, pub regions: Vec<RegionParameterDef>,
pub types: Vec<TypeParameterDef>, pub types: Vec<TypeParameterDef>,
/// Reverse map to each `TypeParameterDef`'s `index` field, from /// Reverse map to each `TypeParameterDef`'s `index` field
/// `def_id.index` (`def_id.krate` is the same as the item's). pub type_param_to_index: FxHashMap<DefId, u32>,
pub type_param_to_index: BTreeMap<DefIndex, u32>,
pub has_self: bool, pub has_self: bool,
pub has_late_bound_regions: Option<Span>, pub has_late_bound_regions: Option<Span>,
@ -1103,7 +1129,7 @@ pub type PolySubtypePredicate<'tcx> = ty::Binder<SubtypePredicate<'tcx>>;
/// equality between arbitrary types. Processing an instance of /// equality between arbitrary types. Processing an instance of
/// Form #2 eventually yields one of these `ProjectionPredicate` /// Form #2 eventually yields one of these `ProjectionPredicate`
/// instances to normalize the LHS. /// instances to normalize the LHS.
#[derive(Copy, Clone, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct ProjectionPredicate<'tcx> { pub struct ProjectionPredicate<'tcx> {
pub projection_ty: ProjectionTy<'tcx>, pub projection_ty: ProjectionTy<'tcx>,
pub ty: Ty<'tcx>, pub ty: Ty<'tcx>,
@ -2695,6 +2721,20 @@ fn crate_hash<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
tcx.hir.crate_hash tcx.hir.crate_hash
} }
fn instance_def_size_estimate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
instance_def: InstanceDef<'tcx>)
-> usize {
match instance_def {
InstanceDef::Item(..) |
InstanceDef::DropGlue(..) => {
let mir = tcx.instance_mir(instance_def);
mir.basic_blocks().iter().map(|bb| bb.statements.len()).sum()
},
// Estimate the size of other compiler-generated shims to be 1.
_ => 1
}
}
pub fn provide(providers: &mut ty::maps::Providers) { pub fn provide(providers: &mut ty::maps::Providers) {
context::provide(providers); context::provide(providers);
erase_regions::provide(providers); erase_regions::provide(providers);
@ -2712,6 +2752,7 @@ pub fn provide(providers: &mut ty::maps::Providers) {
original_crate_name, original_crate_name,
crate_hash, crate_hash,
trait_impls_of: trait_def::trait_impls_of_provider, trait_impls_of: trait_def::trait_impls_of_provider,
instance_def_size_estimate,
..*providers ..*providers
}; };
} }

View file

@ -638,7 +638,7 @@ impl<'tcx> PolyExistentialTraitRef<'tcx> {
/// erase, or otherwise "discharge" these bound regions, we change the /// erase, or otherwise "discharge" these bound regions, we change the
/// type from `Binder<T>` to just `T` (see /// type from `Binder<T>` to just `T` (see
/// e.g. `liberate_late_bound_regions`). /// e.g. `liberate_late_bound_regions`).
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Binder<T>(pub T); pub struct Binder<T>(pub T);
impl<T> Binder<T> { impl<T> Binder<T> {
@ -738,7 +738,7 @@ impl<T> Binder<T> {
/// Represents the projection of an associated type. In explicit UFCS /// Represents the projection of an associated type. In explicit UFCS
/// form this would be written `<T as Trait<..>>::N`. /// form this would be written `<T as Trait<..>>::N`.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct ProjectionTy<'tcx> { pub struct ProjectionTy<'tcx> {
/// The parameters of the associated item. /// The parameters of the associated item.
pub substs: &'tcx Substs<'tcx>, pub substs: &'tcx Substs<'tcx>,

View file

@ -29,7 +29,7 @@ use std::mem;
/// To reduce memory usage, a `Kind` is a interned pointer, /// To reduce memory usage, a `Kind` is a interned pointer,
/// with the lowest 2 bits being reserved for a tag to /// with the lowest 2 bits being reserved for a tag to
/// indicate the type (`Ty` or `Region`) it points to. /// indicate the type (`Ty` or `Region`) it points to.
#[derive(Copy, Clone, PartialEq, Eq, Hash)] #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Kind<'tcx> { pub struct Kind<'tcx> {
ptr: NonZero<usize>, ptr: NonZero<usize>,
marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>)> marker: PhantomData<(Ty<'tcx>, ty::Region<'tcx>)>

View file

@ -320,8 +320,8 @@ pub struct TargetOptions {
/// Relocation model to use in object file. Corresponds to `llc /// Relocation model to use in object file. Corresponds to `llc
/// -relocation-model=$relocation_model`. Defaults to "pic". /// -relocation-model=$relocation_model`. Defaults to "pic".
pub relocation_model: String, pub relocation_model: String,
/// Code model to use. Corresponds to `llc -code-model=$code_model`. Defaults to "default". /// Code model to use. Corresponds to `llc -code-model=$code_model`.
pub code_model: String, pub code_model: Option<String>,
/// TLS model to use. Options are "global-dynamic" (default), "local-dynamic", "initial-exec" /// TLS model to use. Options are "global-dynamic" (default), "local-dynamic", "initial-exec"
/// and "local-exec". This is similar to the -ftls-model option in GCC/Clang. /// and "local-exec". This is similar to the -ftls-model option in GCC/Clang.
pub tls_model: String, pub tls_model: String,
@ -483,7 +483,7 @@ impl Default for TargetOptions {
only_cdylib: false, only_cdylib: false,
executables: false, executables: false,
relocation_model: "pic".to_string(), relocation_model: "pic".to_string(),
code_model: "default".to_string(), code_model: None,
tls_model: "global-dynamic".to_string(), tls_model: "global-dynamic".to_string(),
disable_redzone: false, disable_redzone: false,
eliminate_frame_pointer: true, eliminate_frame_pointer: true,
@ -736,7 +736,7 @@ impl Target {
key!(only_cdylib, bool); key!(only_cdylib, bool);
key!(executables, bool); key!(executables, bool);
key!(relocation_model); key!(relocation_model);
key!(code_model); key!(code_model, optional);
key!(tls_model); key!(tls_model);
key!(disable_redzone, bool); key!(disable_redzone, bool);
key!(eliminate_frame_pointer, bool); key!(eliminate_frame_pointer, bool);

View file

@ -87,11 +87,11 @@ use std::env;
use std::ffi::OsString; use std::ffi::OsString;
use std::io::{self, Read, Write}; use std::io::{self, Read, Write};
use std::iter::repeat; use std::iter::repeat;
use std::panic;
use std::path::PathBuf; use std::path::PathBuf;
use std::process::{self, Command, Stdio}; use std::process::{self, Command, Stdio};
use std::rc::Rc; use std::rc::Rc;
use std::str; use std::str;
use std::sync::{Arc, Mutex};
use std::thread; use std::thread;
use syntax::ast; use syntax::ast;
@ -168,7 +168,7 @@ pub fn run<F>(run_compiler: F) -> isize
handler.emit(&MultiSpan::new(), handler.emit(&MultiSpan::new(),
"aborting due to previous error(s)", "aborting due to previous error(s)",
errors::Level::Fatal); errors::Level::Fatal);
exit_on_err(); panic::resume_unwind(Box::new(errors::FatalErrorMarker));
} }
} }
} }
@ -1228,27 +1228,16 @@ pub fn in_rustc_thread<F, R>(f: F) -> Result<R, Box<Any + Send>>
/// The diagnostic emitter yielded to the procedure should be used for reporting /// The diagnostic emitter yielded to the procedure should be used for reporting
/// errors of the compiler. /// errors of the compiler.
pub fn monitor<F: FnOnce() + Send + 'static>(f: F) { pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
struct Sink(Arc<Mutex<Vec<u8>>>);
impl Write for Sink {
fn write(&mut self, data: &[u8]) -> io::Result<usize> {
Write::write(&mut *self.0.lock().unwrap(), data)
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
let data = Arc::new(Mutex::new(Vec::new()));
let err = Sink(data.clone());
let result = in_rustc_thread(move || { let result = in_rustc_thread(move || {
io::set_panic(Some(box err));
f() f()
}); });
if let Err(value) = result { if let Err(value) = result {
// Thread panicked without emitting a fatal diagnostic // Thread panicked without emitting a fatal diagnostic
if !value.is::<errors::FatalError>() { if !value.is::<errors::FatalErrorMarker>() {
// Emit a newline
eprintln!("");
let emitter = let emitter =
Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto, Box::new(errors::emitter::EmitterWriter::stderr(errors::ColorConfig::Auto,
None, None,
@ -1273,22 +1262,12 @@ pub fn monitor<F: FnOnce() + Send + 'static>(f: F) {
&note, &note,
errors::Level::Note); errors::Level::Note);
} }
eprintln!("{}", str::from_utf8(&data.lock().unwrap()).unwrap());
} }
exit_on_err(); panic::resume_unwind(Box::new(errors::FatalErrorMarker));
} }
} }
fn exit_on_err() -> ! {
// Panic so the process returns a failure code, but don't pollute the
// output with some unnecessary panic messages, we've already
// printed everything that we needed to.
io::set_panic(Some(box io::sink()));
panic!();
}
#[cfg(stage0)] #[cfg(stage0)]
pub fn diagnostics_registry() -> errors::registry::Registry { pub fn diagnostics_registry() -> errors::registry::Registry {
use errors::registry::Registry; use errors::registry::Registry;

View file

@ -27,7 +27,7 @@ pub struct Diagnostic {
pub suggestions: Vec<CodeSuggestion>, pub suggestions: Vec<CodeSuggestion>,
} }
#[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)] #[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum DiagnosticId { pub enum DiagnosticId {
Error(String), Error(String),
Lint(String), Lint(String),
@ -281,6 +281,10 @@ impl Diagnostic {
self self
} }
pub fn get_code(&self) -> Option<DiagnosticId> {
self.code.clone()
}
pub fn message(&self) -> String { pub fn message(&self) -> String {
self.message.iter().map(|i| i.0.to_owned()).collect::<String>() self.message.iter().map(|i| i.0.to_owned()).collect::<String>()
} }

View file

@ -19,6 +19,7 @@
#![cfg_attr(unix, feature(libc))] #![cfg_attr(unix, feature(libc))]
#![feature(conservative_impl_trait)] #![feature(conservative_impl_trait)]
#![feature(i128_type)] #![feature(i128_type)]
#![feature(optin_builtin_traits)]
extern crate term; extern crate term;
#[cfg(unix)] #[cfg(unix)]
@ -44,6 +45,7 @@ use std::rc::Rc;
use std::{error, fmt}; use std::{error, fmt};
use std::sync::atomic::AtomicUsize; use std::sync::atomic::AtomicUsize;
use std::sync::atomic::Ordering::SeqCst; use std::sync::atomic::Ordering::SeqCst;
use std::panic;
mod diagnostic; mod diagnostic;
mod diagnostic_builder; mod diagnostic_builder;
@ -201,6 +203,18 @@ impl CodeSuggestion {
#[must_use] #[must_use]
pub struct FatalError; pub struct FatalError;
pub struct FatalErrorMarker;
// Don't implement Send on FatalError. This makes it impossible to panic!(FatalError).
// We don't want to invoke the panic handler and print a backtrace for fatal errors.
impl !Send for FatalError {}
impl FatalError {
pub fn raise(self) -> ! {
panic::resume_unwind(Box::new(FatalErrorMarker))
}
}
impl fmt::Display for FatalError { impl fmt::Display for FatalError {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> { fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "parser fatal error") write!(f, "parser fatal error")
@ -245,6 +259,11 @@ pub struct Handler {
delayed_span_bug: RefCell<Option<Diagnostic>>, delayed_span_bug: RefCell<Option<Diagnostic>>,
tracked_diagnostics: RefCell<Option<Vec<Diagnostic>>>, tracked_diagnostics: RefCell<Option<Vec<Diagnostic>>>,
// This set contains the `DiagnosticId` of all emitted diagnostics to avoid
// emitting the same diagnostic with extended help (`--teach`) twice, which
// would be uneccessary repetition.
tracked_diagnostic_codes: RefCell<FxHashSet<DiagnosticId>>,
// This set contains a hash of every diagnostic that has been emitted by // This set contains a hash of every diagnostic that has been emitted by
// this handler. These hashes is used to avoid emitting the same error // this handler. These hashes is used to avoid emitting the same error
// twice. // twice.
@ -303,6 +322,7 @@ impl Handler {
continue_after_error: Cell::new(true), continue_after_error: Cell::new(true),
delayed_span_bug: RefCell::new(None), delayed_span_bug: RefCell::new(None),
tracked_diagnostics: RefCell::new(None), tracked_diagnostics: RefCell::new(None),
tracked_diagnostic_codes: RefCell::new(FxHashSet()),
emitted_diagnostics: RefCell::new(FxHashSet()), emitted_diagnostics: RefCell::new(FxHashSet()),
} }
} }
@ -539,7 +559,7 @@ impl Handler {
} }
} }
panic!(self.fatal(&s)); self.fatal(&s).raise();
} }
pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) { pub fn emit(&self, msp: &MultiSpan, msg: &str, lvl: Level) {
if lvl == Warning && !self.flags.can_emit_warnings { if lvl == Warning && !self.flags.can_emit_warnings {
@ -575,6 +595,14 @@ impl Handler {
(ret, diagnostics) (ret, diagnostics)
} }
/// `true` if a diagnostic with this code has already been emitted in this handler.
///
/// Used to suppress emitting the same error multiple times with extended explanation when
/// calling `-Zteach`.
pub fn code_emitted(&self, code: &DiagnosticId) -> bool {
self.tracked_diagnostic_codes.borrow().contains(code)
}
fn emit_db(&self, db: &DiagnosticBuilder) { fn emit_db(&self, db: &DiagnosticBuilder) {
let diagnostic = &**db; let diagnostic = &**db;
@ -582,6 +610,10 @@ impl Handler {
list.push(diagnostic.clone()); list.push(diagnostic.clone());
} }
if let Some(ref code) = diagnostic.code {
self.tracked_diagnostic_codes.borrow_mut().insert(code.clone());
}
let diagnostic_hash = { let diagnostic_hash = {
use std::hash::Hash; use std::hash::Hash;
let mut hasher = StableHasher::new(); let mut hasher = StableHasher::new();

View file

@ -299,12 +299,11 @@ pub enum RelocMode {
#[repr(C)] #[repr(C)]
pub enum CodeModel { pub enum CodeModel {
Other, Other,
Default,
JITDefault,
Small, Small,
Kernel, Kernel,
Medium, Medium,
Large, Large,
None,
} }
/// LLVMRustDiagnosticKind /// LLVMRustDiagnosticKind
@ -331,7 +330,6 @@ pub enum DiagnosticKind {
pub enum ArchiveKind { pub enum ArchiveKind {
Other, Other,
K_GNU, K_GNU,
K_MIPS64,
K_BSD, K_BSD,
K_COFF, K_COFF,
} }
@ -498,6 +496,10 @@ pub mod debuginfo {
const FlagStaticMember = (1 << 12); const FlagStaticMember = (1 << 12);
const FlagLValueReference = (1 << 13); const FlagLValueReference = (1 << 13);
const FlagRValueReference = (1 << 14); const FlagRValueReference = (1 << 14);
const FlagExternalTypeRef = (1 << 15);
const FlagIntroducedVirtual = (1 << 18);
const FlagBitField = (1 << 19);
const FlagNoReturn = (1 << 20);
const FlagMainSubprogram = (1 << 21); const FlagMainSubprogram = (1 << 21);
} }
} }

View file

@ -105,7 +105,6 @@ impl FromStr for ArchiveKind {
fn from_str(s: &str) -> Result<Self, Self::Err> { fn from_str(s: &str) -> Result<Self, Self::Err> {
match s { match s {
"gnu" => Ok(ArchiveKind::K_GNU), "gnu" => Ok(ArchiveKind::K_GNU),
"mips64" => Ok(ArchiveKind::K_MIPS64),
"bsd" => Ok(ArchiveKind::K_BSD), "bsd" => Ok(ArchiveKind::K_BSD),
"coff" => Ok(ArchiveKind::K_COFF), "coff" => Ok(ArchiveKind::K_COFF),
_ => Err(()), _ => Err(()),

View file

@ -681,6 +681,8 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
let data = self.infcx.take_and_reset_region_constraints(); let data = self.infcx.take_and_reset_region_constraints();
if !data.is_empty() { if !data.is_empty() {
debug!("fully_perform_op: constraints generated at {:?} are {:#?}",
locations, data);
self.constraints self.constraints
.outlives_sets .outlives_sets
.push(OutlivesSet { locations, data }); .push(OutlivesSet { locations, data });
@ -1539,6 +1541,7 @@ impl<'a, 'gcx, 'tcx> TypeChecker<'a, 'gcx, 'tcx> {
where where
T: fmt::Debug + TypeFoldable<'tcx>, T: fmt::Debug + TypeFoldable<'tcx>,
{ {
debug!("normalize(value={:?}, location={:?})", value, location);
self.fully_perform_op(location.at_self(), |this| { self.fully_perform_op(location.at_self(), |this| {
let mut selcx = traits::SelectionContext::new(this.infcx); let mut selcx = traits::SelectionContext::new(this.infcx);
let cause = this.misc(this.last_span); let cause = this.misc(this.last_span);

View file

@ -115,6 +115,7 @@ use syntax::ast::NodeId;
use syntax::symbol::{Symbol, InternedString}; use syntax::symbol::{Symbol, InternedString};
use rustc::mir::mono::MonoItem; use rustc::mir::mono::MonoItem;
use monomorphize::item::{MonoItemExt, InstantiationMode}; use monomorphize::item::{MonoItemExt, InstantiationMode};
use core::usize;
pub use rustc::mir::mono::CodegenUnit; pub use rustc::mir::mono::CodegenUnit;
@ -224,6 +225,8 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let mut initial_partitioning = place_root_translation_items(tcx, let mut initial_partitioning = place_root_translation_items(tcx,
trans_items); trans_items);
initial_partitioning.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx));
debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter()); debug_dump(tcx, "INITIAL PARTITIONING:", initial_partitioning.codegen_units.iter());
// If the partitioning should produce a fixed count of codegen units, merge // If the partitioning should produce a fixed count of codegen units, merge
@ -241,6 +244,8 @@ pub fn partition<'a, 'tcx, I>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let mut post_inlining = place_inlined_translation_items(initial_partitioning, let mut post_inlining = place_inlined_translation_items(initial_partitioning,
inlining_map); inlining_map);
post_inlining.codegen_units.iter_mut().for_each(|cgu| cgu.estimate_size(&tcx));
debug_dump(tcx, "POST INLINING:", post_inlining.codegen_units.iter()); debug_dump(tcx, "POST INLINING:", post_inlining.codegen_units.iter());
// Next we try to make as many symbols "internal" as possible, so LLVM has // Next we try to make as many symbols "internal" as possible, so LLVM has
@ -422,14 +427,13 @@ fn merge_codegen_units<'tcx>(initial_partitioning: &mut PreInliningPartitioning<
codegen_units.sort_by_key(|cgu| cgu.name().clone()); codegen_units.sort_by_key(|cgu| cgu.name().clone());
// Merge the two smallest codegen units until the target size is reached. // Merge the two smallest codegen units until the target size is reached.
// Note that "size" is estimated here rather inaccurately as the number of
// translation items in a given unit. This could be improved on.
while codegen_units.len() > target_cgu_count { while codegen_units.len() > target_cgu_count {
// Sort small cgus to the back // Sort small cgus to the back
codegen_units.sort_by_key(|cgu| -(cgu.items().len() as i64)); codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate());
let mut smallest = codegen_units.pop().unwrap(); let mut smallest = codegen_units.pop().unwrap();
let second_smallest = codegen_units.last_mut().unwrap(); let second_smallest = codegen_units.last_mut().unwrap();
second_smallest.modify_size_estimate(smallest.size_estimate());
for (k, v) in smallest.items_mut().drain() { for (k, v) in smallest.items_mut().drain() {
second_smallest.items_mut().insert(k, v); second_smallest.items_mut().insert(k, v);
} }

View file

@ -141,14 +141,6 @@ impl<'a> AstValidator<'a> {
impl<'a> Visitor<'a> for AstValidator<'a> { impl<'a> Visitor<'a> for AstValidator<'a> {
fn visit_expr(&mut self, expr: &'a Expr) { fn visit_expr(&mut self, expr: &'a Expr) {
match expr.node { match expr.node {
ExprKind::While(.., Some(ident)) |
ExprKind::Loop(_, Some(ident)) |
ExprKind::WhileLet(.., Some(ident)) |
ExprKind::ForLoop(.., Some(ident)) |
ExprKind::Break(Some(ident), _) |
ExprKind::Continue(Some(ident)) => {
self.check_label(ident.node, ident.span);
}
ExprKind::InlineAsm(..) if !self.session.target.target.options.allow_asm => { ExprKind::InlineAsm(..) if !self.session.target.target.options.allow_asm => {
span_err!(self.session, expr.span, E0472, "asm! is unsupported on this target"); span_err!(self.session, expr.span, E0472, "asm! is unsupported on this target");
} }
@ -211,6 +203,11 @@ impl<'a> Visitor<'a> for AstValidator<'a> {
visit::walk_use_tree(self, use_tree, id); visit::walk_use_tree(self, use_tree, id);
} }
fn visit_label(&mut self, label: &'a Label) {
self.check_label(label.ident, label.span);
visit::walk_label(self, label);
}
fn visit_lifetime(&mut self, lifetime: &'a Lifetime) { fn visit_lifetime(&mut self, lifetime: &'a Lifetime) {
self.check_lifetime(lifetime); self.check_lifetime(lifetime);
visit::walk_lifetime(self, lifetime); visit::walk_lifetime(self, lifetime);

View file

@ -55,7 +55,7 @@ use syntax::attr;
use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind}; use syntax::ast::{Arm, BindingMode, Block, Crate, Expr, ExprKind};
use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, GenericParam, Generics}; use syntax::ast::{FnDecl, ForeignItem, ForeignItemKind, GenericParam, Generics};
use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind}; use syntax::ast::{Item, ItemKind, ImplItem, ImplItemKind};
use syntax::ast::{Local, Mutability, Pat, PatKind, Path}; use syntax::ast::{Label, Local, Mutability, Pat, PatKind, Path};
use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind}; use syntax::ast::{QSelf, TraitItemKind, TraitRef, Ty, TyKind};
use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue}; use syntax::feature_gate::{feature_err, emit_feature_err, GateIssue};
use syntax::parse::token; use syntax::parse::token;
@ -2045,7 +2045,7 @@ impl<'a> Resolver<'a> {
segments: vec![], segments: vec![],
span: use_tree.span, span: use_tree.span,
}; };
self.resolve_use_tree(item, use_tree, &path); self.resolve_use_tree(item.id, use_tree, &path);
} }
ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) | ItemKind::GlobalAsm(_) => { ItemKind::ExternCrate(_) | ItemKind::MacroDef(..) | ItemKind::GlobalAsm(_) => {
@ -2056,7 +2056,7 @@ impl<'a> Resolver<'a> {
} }
} }
fn resolve_use_tree(&mut self, item: &Item, use_tree: &ast::UseTree, prefix: &Path) { fn resolve_use_tree(&mut self, id: NodeId, use_tree: &ast::UseTree, prefix: &Path) {
match use_tree.kind { match use_tree.kind {
ast::UseTreeKind::Nested(ref items) => { ast::UseTreeKind::Nested(ref items) => {
let path = Path { let path = Path {
@ -2070,10 +2070,10 @@ impl<'a> Resolver<'a> {
if items.len() == 0 { if items.len() == 0 {
// Resolve prefix of an import with empty braces (issue #28388). // Resolve prefix of an import with empty braces (issue #28388).
self.smart_resolve_path(item.id, None, &path, PathSource::ImportPrefix); self.smart_resolve_path(id, None, &path, PathSource::ImportPrefix);
} else { } else {
for &(ref tree, _) in items { for &(ref tree, nested_id) in items {
self.resolve_use_tree(item, tree, &path); self.resolve_use_tree(nested_id, tree, &path);
} }
} }
} }
@ -3415,13 +3415,13 @@ impl<'a> Resolver<'a> {
} }
} }
fn with_resolved_label<F>(&mut self, label: Option<SpannedIdent>, id: NodeId, f: F) fn with_resolved_label<F>(&mut self, label: Option<Label>, id: NodeId, f: F)
where F: FnOnce(&mut Resolver) where F: FnOnce(&mut Resolver)
{ {
if let Some(label) = label { if let Some(label) = label {
let def = Def::Label(id); let def = Def::Label(id);
self.with_label_rib(|this| { self.with_label_rib(|this| {
this.label_ribs.last_mut().unwrap().bindings.insert(label.node, def); this.label_ribs.last_mut().unwrap().bindings.insert(label.ident, def);
f(this); f(this);
}); });
} else { } else {
@ -3429,7 +3429,7 @@ impl<'a> Resolver<'a> {
} }
} }
fn resolve_labeled_block(&mut self, label: Option<SpannedIdent>, id: NodeId, block: &Block) { fn resolve_labeled_block(&mut self, label: Option<Label>, id: NodeId, block: &Block) {
self.with_resolved_label(label, id, |this| this.visit_block(block)); self.with_resolved_label(label, id, |this| this.visit_block(block));
} }
@ -3452,19 +3452,19 @@ impl<'a> Resolver<'a> {
} }
ExprKind::Break(Some(label), _) | ExprKind::Continue(Some(label)) => { ExprKind::Break(Some(label), _) | ExprKind::Continue(Some(label)) => {
match self.search_label(label.node, |rib, id| rib.bindings.get(&id).cloned()) { match self.search_label(label.ident, |rib, id| rib.bindings.get(&id).cloned()) {
None => { None => {
// Search again for close matches... // Search again for close matches...
// Picks the first label that is "close enough", which is not necessarily // Picks the first label that is "close enough", which is not necessarily
// the closest match // the closest match
let close_match = self.search_label(label.node, |rib, ident| { let close_match = self.search_label(label.ident, |rib, ident| {
let names = rib.bindings.iter().map(|(id, _)| &id.name); let names = rib.bindings.iter().map(|(id, _)| &id.name);
find_best_match_for_name(names, &*ident.name.as_str(), None) find_best_match_for_name(names, &*ident.name.as_str(), None)
}); });
self.record_def(expr.id, err_path_resolution()); self.record_def(expr.id, err_path_resolution());
resolve_error(self, resolve_error(self,
label.span, label.span,
ResolutionError::UndeclaredLabel(&label.node.name.as_str(), ResolutionError::UndeclaredLabel(&label.ident.name.as_str(),
close_match)); close_match));
} }
Some(def @ Def::Label(_)) => { Some(def @ Def::Label(_)) => {

View file

@ -871,6 +871,31 @@ impl<'a, 'tcx> FnType<'tcx> {
match arg.layout.abi { match arg.layout.abi {
layout::Abi::Aggregate { .. } => {} layout::Abi::Aggregate { .. } => {}
// This is a fun case! The gist of what this is doing is
// that we want callers and callees to always agree on the
// ABI of how they pass SIMD arguments. If we were to *not*
// make these arguments indirect then they'd be immediates
// in LLVM, which means that they'd used whatever the
// appropriate ABI is for the callee and the caller. That
// means, for example, if the caller doesn't have AVX
// enabled but the callee does, then passing an AVX argument
// across this boundary would cause corrupt data to show up.
//
// This problem is fixed by unconditionally passing SIMD
// arguments through memory between callers and callees
// which should get them all to agree on ABI regardless of
// target feature sets. Some more information about this
// issue can be found in #44367.
//
// Note that the platform intrinsic ABI is exempt here as
// that's how we connect up to LLVM and it's unstable
// anyway, we control all calls to it in libstd.
layout::Abi::Vector { .. } if abi != Abi::PlatformIntrinsic => {
arg.make_indirect();
return
}
_ => return _ => return
} }

View file

@ -16,7 +16,7 @@ use super::rpath::RPathConfig;
use super::rpath; use super::rpath;
use metadata::METADATA_FILENAME; use metadata::METADATA_FILENAME;
use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, PrintRequest}; use rustc::session::config::{self, NoDebugInfo, OutputFilenames, OutputType, PrintRequest};
use rustc::session::config::RUST_CGU_EXT; use rustc::session::config::{RUST_CGU_EXT, Lto};
use rustc::session::filesearch; use rustc::session::filesearch;
use rustc::session::search_paths::PathKind; use rustc::session::search_paths::PathKind;
use rustc::session::Session; use rustc::session::Session;
@ -503,7 +503,8 @@ fn link_staticlib(sess: &Session,
}); });
ab.add_rlib(path, ab.add_rlib(path,
&name.as_str(), &name.as_str(),
sess.lto() && !ignored_for_lto(sess, &trans.crate_info, cnum), is_full_lto_enabled(sess) &&
!ignored_for_lto(sess, &trans.crate_info, cnum),
skip_object_files).unwrap(); skip_object_files).unwrap();
all_native_libs.extend(trans.crate_info.native_libraries[&cnum].iter().cloned()); all_native_libs.extend(trans.crate_info.native_libraries[&cnum].iter().cloned());
@ -1211,7 +1212,8 @@ fn add_upstream_rust_crates(cmd: &mut Linker,
lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib) lib.kind == NativeLibraryKind::NativeStatic && !relevant_lib(sess, lib)
}); });
if (!sess.lto() || ignored_for_lto(sess, &trans.crate_info, cnum)) && if (!is_full_lto_enabled(sess) ||
ignored_for_lto(sess, &trans.crate_info, cnum)) &&
crate_type != config::CrateTypeDylib && crate_type != config::CrateTypeDylib &&
!skip_native { !skip_native {
cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath)); cmd.link_rlib(&fix_windows_verbatim_for_gcc(cratepath));
@ -1264,7 +1266,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker,
// file, then we don't need the object file as it's part of the // file, then we don't need the object file as it's part of the
// LTO module. Note that `#![no_builtins]` is excluded from LTO, // LTO module. Note that `#![no_builtins]` is excluded from LTO,
// though, so we let that object file slide. // though, so we let that object file slide.
let skip_because_lto = sess.lto() && let skip_because_lto = is_full_lto_enabled(sess) &&
is_rust_object && is_rust_object &&
(sess.target.target.options.no_builtins || (sess.target.target.options.no_builtins ||
!trans.crate_info.is_no_builtins.contains(&cnum)); !trans.crate_info.is_no_builtins.contains(&cnum));
@ -1301,7 +1303,7 @@ fn add_upstream_rust_crates(cmd: &mut Linker,
fn add_dynamic_crate(cmd: &mut Linker, sess: &Session, cratepath: &Path) { fn add_dynamic_crate(cmd: &mut Linker, sess: &Session, cratepath: &Path) {
// If we're performing LTO, then it should have been previously required // If we're performing LTO, then it should have been previously required
// that all upstream rust dependencies were available in an rlib format. // that all upstream rust dependencies were available in an rlib format.
assert!(!sess.lto()); assert!(!is_full_lto_enabled(sess));
// Just need to tell the linker about where the library lives and // Just need to tell the linker about where the library lives and
// what its name is // what its name is
@ -1409,3 +1411,13 @@ fn link_binaryen(sess: &Session,
e)); e));
} }
} }
fn is_full_lto_enabled(sess: &Session) -> bool {
match sess.lto() {
Lto::Yes |
Lto::Thin |
Lto::Fat => true,
Lto::No |
Lto::ThinLocal => false,
}
}

View file

@ -18,7 +18,7 @@ use llvm::{ModuleRef, TargetMachineRef, True, False};
use llvm; use llvm;
use rustc::hir::def_id::LOCAL_CRATE; use rustc::hir::def_id::LOCAL_CRATE;
use rustc::middle::exported_symbols::SymbolExportLevel; use rustc::middle::exported_symbols::SymbolExportLevel;
use rustc::session::config; use rustc::session::config::{self, Lto};
use rustc::util::common::time; use rustc::util::common::time;
use time_graph::Timeline; use time_graph::Timeline;
use {ModuleTranslation, ModuleLlvm, ModuleKind, ModuleSource}; use {ModuleTranslation, ModuleLlvm, ModuleKind, ModuleSource};
@ -95,25 +95,22 @@ impl LtoModuleTranslation {
} }
} }
pub enum LTOMode {
WholeCrateGraph,
JustThisCrate,
}
pub(crate) fn run(cgcx: &CodegenContext, pub(crate) fn run(cgcx: &CodegenContext,
modules: Vec<ModuleTranslation>, modules: Vec<ModuleTranslation>,
mode: LTOMode, timeline: &mut Timeline)
timeline: &mut Timeline)
-> Result<Vec<LtoModuleTranslation>, FatalError> -> Result<Vec<LtoModuleTranslation>, FatalError>
{ {
let diag_handler = cgcx.create_diag_handler(); let diag_handler = cgcx.create_diag_handler();
let export_threshold = match mode { let export_threshold = match cgcx.lto {
LTOMode::WholeCrateGraph => { // We're just doing LTO for our one crate
Lto::ThinLocal => SymbolExportLevel::Rust,
// We're doing LTO for the entire crate graph
Lto::Yes | Lto::Fat | Lto::Thin => {
symbol_export::crates_export_threshold(&cgcx.crate_types) symbol_export::crates_export_threshold(&cgcx.crate_types)
} }
LTOMode::JustThisCrate => {
SymbolExportLevel::Rust Lto::No => panic!("didn't request LTO but we're doing LTO"),
}
}; };
let symbol_filter = &|&(ref name, _, level): &(String, _, SymbolExportLevel)| { let symbol_filter = &|&(ref name, _, level): &(String, _, SymbolExportLevel)| {
@ -140,7 +137,7 @@ pub(crate) fn run(cgcx: &CodegenContext,
// We save off all the bytecode and LLVM module ids for later processing // We save off all the bytecode and LLVM module ids for later processing
// with either fat or thin LTO // with either fat or thin LTO
let mut upstream_modules = Vec::new(); let mut upstream_modules = Vec::new();
if let LTOMode::WholeCrateGraph = mode { if cgcx.lto != Lto::ThinLocal {
if cgcx.opts.cg.prefer_dynamic { if cgcx.opts.cg.prefer_dynamic {
diag_handler.struct_err("cannot prefer dynamic linking when performing LTO") diag_handler.struct_err("cannot prefer dynamic linking when performing LTO")
.note("only 'staticlib', 'bin', and 'cdylib' outputs are \ .note("only 'staticlib', 'bin', and 'cdylib' outputs are \
@ -186,13 +183,16 @@ pub(crate) fn run(cgcx: &CodegenContext,
} }
let arr = symbol_white_list.iter().map(|c| c.as_ptr()).collect::<Vec<_>>(); let arr = symbol_white_list.iter().map(|c| c.as_ptr()).collect::<Vec<_>>();
match mode { match cgcx.lto {
LTOMode::WholeCrateGraph if !cgcx.thinlto => { Lto::Yes | // `-C lto` == fat LTO by default
Lto::Fat => {
fat_lto(cgcx, &diag_handler, modules, upstream_modules, &arr, timeline) fat_lto(cgcx, &diag_handler, modules, upstream_modules, &arr, timeline)
} }
_ => { Lto::Thin |
Lto::ThinLocal => {
thin_lto(&diag_handler, modules, upstream_modules, &arr, timeline) thin_lto(&diag_handler, modules, upstream_modules, &arr, timeline)
} }
Lto::No => unreachable!(),
} }
} }

View file

@ -11,6 +11,7 @@
use back::bytecode::{self, RLIB_BYTECODE_EXTENSION}; use back::bytecode::{self, RLIB_BYTECODE_EXTENSION};
use back::lto::{self, ModuleBuffer, ThinBuffer}; use back::lto::{self, ModuleBuffer, ThinBuffer};
use back::link::{self, get_linker, remove}; use back::link::{self, get_linker, remove};
use back::command::Command;
use back::linker::LinkerInfo; use back::linker::LinkerInfo;
use back::symbol_export::ExportedSymbols; use back::symbol_export::ExportedSymbols;
use base; use base;
@ -18,8 +19,8 @@ use consts;
use rustc_incremental::{save_trans_partition, in_incr_comp_dir}; use rustc_incremental::{save_trans_partition, in_incr_comp_dir};
use rustc::dep_graph::{DepGraph, WorkProductFileKind}; use rustc::dep_graph::{DepGraph, WorkProductFileKind};
use rustc::middle::cstore::{LinkMeta, EncodedMetadata}; use rustc::middle::cstore::{LinkMeta, EncodedMetadata};
use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses, use rustc::session::config::{self, OutputFilenames, OutputType, Passes, SomePasses,
AllPasses, Sanitizer}; AllPasses, Sanitizer, Lto};
use rustc::session::Session; use rustc::session::Session;
use rustc::util::nodemap::FxHashMap; use rustc::util::nodemap::FxHashMap;
use rustc_back::LinkerFlavor; use rustc_back::LinkerFlavor;
@ -32,7 +33,7 @@ use CrateInfo;
use rustc::hir::def_id::{CrateNum, LOCAL_CRATE}; use rustc::hir::def_id::{CrateNum, LOCAL_CRATE};
use rustc::ty::TyCtxt; use rustc::ty::TyCtxt;
use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry}; use rustc::util::common::{time, time_depth, set_time_depth, path2cstr, print_time_passes_entry};
use rustc::util::fs::{link_or_copy, rename_or_copy_remove}; use rustc::util::fs::{link_or_copy};
use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId}; use errors::{self, Handler, Level, DiagnosticBuilder, FatalError, DiagnosticId};
use errors::emitter::{Emitter}; use errors::emitter::{Emitter};
use syntax::attr; use syntax::attr;
@ -68,8 +69,7 @@ pub const RELOC_MODEL_ARGS : [(&'static str, llvm::RelocMode); 7] = [
("ropi-rwpi", llvm::RelocMode::ROPI_RWPI), ("ropi-rwpi", llvm::RelocMode::ROPI_RWPI),
]; ];
pub const CODE_GEN_MODEL_ARGS : [(&'static str, llvm::CodeModel); 5] = [ pub const CODE_GEN_MODEL_ARGS: &[(&str, llvm::CodeModel)] = &[
("default", llvm::CodeModel::Default),
("small", llvm::CodeModel::Small), ("small", llvm::CodeModel::Small),
("kernel", llvm::CodeModel::Kernel), ("kernel", llvm::CodeModel::Kernel),
("medium", llvm::CodeModel::Medium), ("medium", llvm::CodeModel::Medium),
@ -155,7 +155,7 @@ fn get_llvm_opt_size(optimize: config::OptLevel) -> llvm::CodeGenOptSize {
pub fn create_target_machine(sess: &Session) -> TargetMachineRef { pub fn create_target_machine(sess: &Session) -> TargetMachineRef {
target_machine_factory(sess)().unwrap_or_else(|err| { target_machine_factory(sess)().unwrap_or_else(|err| {
panic!(llvm_err(sess.diagnostic(), err)) llvm_err(sess.diagnostic(), err).raise()
}) })
} }
@ -170,20 +170,23 @@ pub fn target_machine_factory(sess: &Session)
let ffunction_sections = sess.target.target.options.function_sections; let ffunction_sections = sess.target.target.options.function_sections;
let fdata_sections = ffunction_sections; let fdata_sections = ffunction_sections;
let code_model_arg = match sess.opts.cg.code_model { let code_model_arg = sess.opts.cg.code_model.as_ref().or(
Some(ref s) => &s, sess.target.target.options.code_model.as_ref(),
None => &sess.target.target.options.code_model, );
};
let code_model = match CODE_GEN_MODEL_ARGS.iter().find( let code_model = match code_model_arg {
|&&arg| arg.0 == code_model_arg) { Some(s) => {
Some(x) => x.1, match CODE_GEN_MODEL_ARGS.iter().find(|arg| arg.0 == s) {
_ => { Some(x) => x.1,
sess.err(&format!("{:?} is not a valid code model", _ => {
code_model_arg)); sess.err(&format!("{:?} is not a valid code model",
sess.abort_if_errors(); code_model_arg));
bug!(); sess.abort_if_errors();
bug!();
}
}
} }
None => llvm::CodeModel::None,
}; };
let singlethread = sess.target.target.options.singlethread; let singlethread = sess.target.target.options.singlethread;
@ -258,6 +261,7 @@ pub struct ModuleConfig {
// make the object file bitcode. Provides easy compatibility with // make the object file bitcode. Provides easy compatibility with
// emscripten's ecc compiler, when used as the linker. // emscripten's ecc compiler, when used as the linker.
obj_is_bitcode: bool, obj_is_bitcode: bool,
no_integrated_as: bool,
} }
impl ModuleConfig { impl ModuleConfig {
@ -275,6 +279,7 @@ impl ModuleConfig {
emit_asm: false, emit_asm: false,
emit_obj: false, emit_obj: false,
obj_is_bitcode: false, obj_is_bitcode: false,
no_integrated_as: false,
no_verify: false, no_verify: false,
no_prepopulate_passes: false, no_prepopulate_passes: false,
@ -313,13 +318,18 @@ impl ModuleConfig {
} }
} }
/// Assembler name and command used by codegen when no_integrated_as is enabled
struct AssemblerCommand {
name: PathBuf,
cmd: Command,
}
/// Additional resources used by optimize_and_codegen (not module specific) /// Additional resources used by optimize_and_codegen (not module specific)
#[derive(Clone)] #[derive(Clone)]
pub struct CodegenContext { pub struct CodegenContext {
// Resouces needed when running LTO // Resouces needed when running LTO
pub time_passes: bool, pub time_passes: bool,
pub lto: bool, pub lto: Lto,
pub thinlto: bool,
pub no_landing_pads: bool, pub no_landing_pads: bool,
pub save_temps: bool, pub save_temps: bool,
pub fewer_names: bool, pub fewer_names: bool,
@ -356,6 +366,8 @@ pub struct CodegenContext {
// A reference to the TimeGraph so we can register timings. None means that // A reference to the TimeGraph so we can register timings. None means that
// measuring is disabled. // measuring is disabled.
time_graph: Option<TimeGraph>, time_graph: Option<TimeGraph>,
// The assembler command if no_integrated_as option is enabled, None otherwise
assembler_cmd: Option<Arc<AssemblerCommand>>,
} }
impl CodegenContext { impl CodegenContext {
@ -576,13 +588,8 @@ fn generate_lto_work(cgcx: &CodegenContext,
TRANS_WORK_PACKAGE_KIND, TRANS_WORK_PACKAGE_KIND,
"generate lto") "generate lto")
}).unwrap_or(Timeline::noop()); }).unwrap_or(Timeline::noop());
let mode = if cgcx.lto { let lto_modules = lto::run(cgcx, modules, &mut timeline)
lto::LTOMode::WholeCrateGraph .unwrap_or_else(|e| e.raise());
} else {
lto::LTOMode::JustThisCrate
};
let lto_modules = lto::run(cgcx, modules, mode, &mut timeline)
.unwrap_or_else(|e| panic!(e));
lto_modules.into_iter().map(|module| { lto_modules.into_iter().map(|module| {
let cost = module.cost(); let cost = module.cost();
@ -639,13 +646,17 @@ unsafe fn codegen(cgcx: &CodegenContext,
!cgcx.crate_types.contains(&config::CrateTypeRlib) && !cgcx.crate_types.contains(&config::CrateTypeRlib) &&
mtrans.kind == ModuleKind::Regular; mtrans.kind == ModuleKind::Regular;
// If we don't have the integrated assembler, then we need to emit asm
// from LLVM and use `gcc` to create the object file.
let asm_to_obj = config.emit_obj && config.no_integrated_as;
// Change what we write and cleanup based on whether obj files are // Change what we write and cleanup based on whether obj files are
// just llvm bitcode. In that case write bitcode, and possibly // just llvm bitcode. In that case write bitcode, and possibly
// delete the bitcode if it wasn't requested. Don't generate the // delete the bitcode if it wasn't requested. Don't generate the
// machine code, instead copy the .o file from the .bc // machine code, instead copy the .o file from the .bc
let write_bc = config.emit_bc || (config.obj_is_bitcode && !asm2wasm); let write_bc = config.emit_bc || (config.obj_is_bitcode && !asm2wasm);
let rm_bc = !config.emit_bc && config.obj_is_bitcode && !asm2wasm; let rm_bc = !config.emit_bc && config.obj_is_bitcode && !asm2wasm;
let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm; let write_obj = config.emit_obj && !config.obj_is_bitcode && !asm2wasm && !asm_to_obj;
let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode && !asm2wasm; let copy_bc_to_obj = config.emit_obj && config.obj_is_bitcode && !asm2wasm;
let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name); let bc_out = cgcx.output_filenames.temp_path(OutputType::Bitcode, module_name);
@ -725,13 +736,13 @@ unsafe fn codegen(cgcx: &CodegenContext,
timeline.record("ir"); timeline.record("ir");
} }
if config.emit_asm || (asm2wasm && config.emit_obj) { if config.emit_asm || (asm2wasm && config.emit_obj) || asm_to_obj {
let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name); let path = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
// We can't use the same module for asm and binary output, because that triggers // We can't use the same module for asm and binary output, because that triggers
// various errors like invalid IR or broken binaries, so we might have to clone the // various errors like invalid IR or broken binaries, so we might have to clone the
// module to produce the asm output // module to produce the asm output
let llmod = if config.emit_obj { let llmod = if config.emit_obj && !asm2wasm {
llvm::LLVMCloneModule(llmod) llvm::LLVMCloneModule(llmod)
} else { } else {
llmod llmod
@ -740,7 +751,7 @@ unsafe fn codegen(cgcx: &CodegenContext,
write_output_file(diag_handler, tm, cpm, llmod, &path, write_output_file(diag_handler, tm, cpm, llmod, &path,
llvm::FileType::AssemblyFile) llvm::FileType::AssemblyFile)
})?; })?;
if config.emit_obj { if config.emit_obj && !asm2wasm {
llvm::LLVMDisposeModule(llmod); llvm::LLVMDisposeModule(llmod);
} }
timeline.record("asm"); timeline.record("asm");
@ -760,6 +771,14 @@ unsafe fn codegen(cgcx: &CodegenContext,
llvm::FileType::ObjectFile) llvm::FileType::ObjectFile)
})?; })?;
timeline.record("obj"); timeline.record("obj");
} else if asm_to_obj {
let assembly = cgcx.output_filenames.temp_path(OutputType::Assembly, module_name);
run_assembler(cgcx, diag_handler, &assembly, &obj_out);
timeline.record("asm_to_obj");
if !config.emit_asm && !cgcx.save_temps {
drop(fs::remove_file(&assembly));
}
} }
Ok(()) Ok(())
@ -841,7 +860,6 @@ pub fn start_async_translation(tcx: TyCtxt,
total_cgus: usize) total_cgus: usize)
-> OngoingCrateTranslation { -> OngoingCrateTranslation {
let sess = tcx.sess; let sess = tcx.sess;
let crate_output = tcx.output_filenames(LOCAL_CRATE);
let crate_name = tcx.crate_name(LOCAL_CRATE); let crate_name = tcx.crate_name(LOCAL_CRATE);
let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins"); let no_builtins = attr::contains_name(&tcx.hir.krate().attrs, "no_builtins");
let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs, let subsystem = attr::first_attr_value_str_by_name(&tcx.hir.krate().attrs,
@ -855,19 +873,9 @@ pub fn start_async_translation(tcx: TyCtxt,
subsystem.to_string() subsystem.to_string()
}); });
let no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
(tcx.sess.target.target.options.no_integrated_as &&
(crate_output.outputs.contains_key(&OutputType::Object) ||
crate_output.outputs.contains_key(&OutputType::Exe)));
let linker_info = LinkerInfo::new(tcx); let linker_info = LinkerInfo::new(tcx);
let crate_info = CrateInfo::new(tcx); let crate_info = CrateInfo::new(tcx);
let output_types_override = if no_integrated_as {
OutputTypes::new(&[(OutputType::Assembly, None)])
} else {
sess.opts.output_types.clone()
};
// Figure out what we actually need to build. // Figure out what we actually need to build.
let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone()); let mut modules_config = ModuleConfig::new(sess.opts.cg.passes.clone());
let mut metadata_config = ModuleConfig::new(vec![]); let mut metadata_config = ModuleConfig::new(vec![]);
@ -913,7 +921,10 @@ pub fn start_async_translation(tcx: TyCtxt,
allocator_config.emit_bc_compressed = true; allocator_config.emit_bc_compressed = true;
} }
for output_type in output_types_override.keys() { modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
tcx.sess.target.target.options.no_integrated_as;
for output_type in sess.opts.output_types.keys() {
match *output_type { match *output_type {
OutputType::Bitcode => { modules_config.emit_bc = true; } OutputType::Bitcode => { modules_config.emit_bc = true; }
OutputType::LlvmAssembly => { modules_config.emit_ir = true; } OutputType::LlvmAssembly => { modules_config.emit_ir = true; }
@ -976,7 +987,6 @@ pub fn start_async_translation(tcx: TyCtxt,
metadata, metadata,
windows_subsystem, windows_subsystem,
linker_info, linker_info,
no_integrated_as,
crate_info, crate_info,
time_graph, time_graph,
@ -1280,28 +1290,51 @@ fn execute_work_item(cgcx: &CodegenContext,
unsafe { unsafe {
optimize(cgcx, &diag_handler, &mtrans, config, timeline)?; optimize(cgcx, &diag_handler, &mtrans, config, timeline)?;
let lto = cgcx.lto; // After we've done the initial round of optimizations we need to
// decide whether to synchronously codegen this module or ship it
let auto_thin_lto = // back to the coordinator thread for further LTO processing (which
cgcx.thinlto && // has to wait for all the initial modules to be optimized).
cgcx.total_cgus > 1 &&
mtrans.kind != ModuleKind::Allocator;
// If we're a metadata module we never participate in LTO.
// //
// If LTO was explicitly requested on the command line, we always // Here we dispatch based on the `cgcx.lto` and kind of module we're
// LTO everything else. // translating...
// let needs_lto = match cgcx.lto {
// If LTO *wasn't* explicitly requested and we're not a metdata Lto::No => false,
// module, then we may automatically do ThinLTO if we've got
// multiple codegen units. Note, however, that the allocator module // Here we've got a full crate graph LTO requested. We ignore
// doesn't participate here automatically because of linker // this, however, if the crate type is only an rlib as there's
// shenanigans later on. // no full crate graph to process, that'll happen later.
if mtrans.kind == ModuleKind::Metadata || (!lto && !auto_thin_lto) { //
// This use case currently comes up primarily for targets that
// require LTO so the request for LTO is always unconditionally
// passed down to the backend, but we don't actually want to do
// anything about it yet until we've got a final product.
Lto::Yes | Lto::Fat | Lto::Thin => {
cgcx.crate_types.len() != 1 ||
cgcx.crate_types[0] != config::CrateTypeRlib
}
// When we're automatically doing ThinLTO for multi-codegen-unit
// builds we don't actually want to LTO the allocator modules if
// it shows up. This is due to various linker shenanigans that
// we'll encounter later.
//
// Additionally here's where we also factor in the current LLVM
// version. If it doesn't support ThinLTO we skip this.
Lto::ThinLocal => {
mtrans.kind != ModuleKind::Allocator &&
llvm::LLVMRustThinLTOAvailable()
}
};
// Metadata modules never participate in LTO regardless of the lto
// settings.
let needs_lto = needs_lto && mtrans.kind != ModuleKind::Metadata;
if needs_lto {
Ok(WorkItemResult::NeedsLTO(mtrans))
} else {
let module = codegen(cgcx, &diag_handler, mtrans, config, timeline)?; let module = codegen(cgcx, &diag_handler, mtrans, config, timeline)?;
Ok(WorkItemResult::Compiled(module)) Ok(WorkItemResult::Compiled(module))
} else {
Ok(WorkItemResult::NeedsLTO(mtrans))
} }
} }
} }
@ -1377,28 +1410,25 @@ fn start_executing_work(tcx: TyCtxt,
each_linked_rlib_for_lto.push((cnum, path.to_path_buf())); each_linked_rlib_for_lto.push((cnum, path.to_path_buf()));
})); }));
let crate_types = sess.crate_types.borrow();
let only_rlib = crate_types.len() == 1 &&
crate_types[0] == config::CrateTypeRlib;
let wasm_import_memory = let wasm_import_memory =
attr::contains_name(&tcx.hir.krate().attrs, "wasm_import_memory"); attr::contains_name(&tcx.hir.krate().attrs, "wasm_import_memory");
let assembler_cmd = if modules_config.no_integrated_as {
// HACK: currently we use linker (gcc) as our assembler
let (name, mut cmd, _) = get_linker(sess);
cmd.args(&sess.target.target.options.asm_args);
Some(Arc::new(AssemblerCommand {
name,
cmd,
}))
} else {
None
};
let cgcx = CodegenContext { let cgcx = CodegenContext {
crate_types: sess.crate_types.borrow().clone(), crate_types: sess.crate_types.borrow().clone(),
each_linked_rlib_for_lto, each_linked_rlib_for_lto,
// If we're only building an rlibc then allow the LTO flag to be passed lto: sess.lto(),
// but don't actually do anything, the full LTO will happen later
lto: sess.lto() && !only_rlib,
// Enable ThinLTO if requested, but only if the target we're compiling
// for doesn't require full LTO. Some targets require one LLVM module
// (they effectively don't have a linker) so it's up to us to use LTO to
// link everything together.
thinlto: sess.thinlto() &&
!sess.target.target.options.requires_lto &&
unsafe { llvm::LLVMRustThinLTOAvailable() },
no_landing_pads: sess.no_landing_pads(), no_landing_pads: sess.no_landing_pads(),
fewer_names: sess.fewer_names(), fewer_names: sess.fewer_names(),
save_temps: sess.opts.cg.save_temps, save_temps: sess.opts.cg.save_temps,
@ -1423,6 +1453,7 @@ fn start_executing_work(tcx: TyCtxt,
binaryen_linker: tcx.sess.linker_flavor() == LinkerFlavor::Binaryen, binaryen_linker: tcx.sess.linker_flavor() == LinkerFlavor::Binaryen,
debuginfo: tcx.sess.opts.debuginfo, debuginfo: tcx.sess.opts.debuginfo,
wasm_import_memory: wasm_import_memory, wasm_import_memory: wasm_import_memory,
assembler_cmd,
}; };
// This is the "main loop" of parallel work happening for parallel codegen. // This is the "main loop" of parallel work happening for parallel codegen.
@ -1931,15 +1962,14 @@ fn spawn_work(cgcx: CodegenContext, work: WorkItem) {
}); });
} }
pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) { pub fn run_assembler(cgcx: &CodegenContext, handler: &Handler, assembly: &Path, object: &Path) {
let (pname, mut cmd, _) = get_linker(sess); let assembler = cgcx.assembler_cmd
.as_ref()
.expect("cgcx.assembler_cmd is missing?");
for arg in &sess.target.target.options.asm_args { let pname = &assembler.name;
cmd.arg(arg); let mut cmd = assembler.cmd.clone();
} cmd.arg("-c").arg("-o").arg(object).arg(assembly);
cmd.arg("-c").arg("-o").arg(&outputs.path(OutputType::Object))
.arg(&outputs.temp_path(OutputType::Assembly, None));
debug!("{:?}", cmd); debug!("{:?}", cmd);
match cmd.output() { match cmd.output() {
@ -1948,18 +1978,18 @@ pub fn run_assembler(sess: &Session, outputs: &OutputFilenames) {
let mut note = prog.stderr.clone(); let mut note = prog.stderr.clone();
note.extend_from_slice(&prog.stdout); note.extend_from_slice(&prog.stdout);
sess.struct_err(&format!("linking with `{}` failed: {}", handler.struct_err(&format!("linking with `{}` failed: {}",
pname.display(), pname.display(),
prog.status)) prog.status))
.note(&format!("{:?}", &cmd)) .note(&format!("{:?}", &cmd))
.note(str::from_utf8(&note[..]).unwrap()) .note(str::from_utf8(&note[..]).unwrap())
.emit(); .emit();
sess.abort_if_errors(); handler.abort_if_errors();
} }
}, },
Err(e) => { Err(e) => {
sess.err(&format!("could not exec the linker `{}`: {}", pname.display(), e)); handler.err(&format!("could not exec the linker `{}`: {}", pname.display(), e));
sess.abort_if_errors(); handler.abort_if_errors();
} }
} }
} }
@ -2133,7 +2163,6 @@ pub struct OngoingCrateTranslation {
metadata: EncodedMetadata, metadata: EncodedMetadata,
windows_subsystem: Option<String>, windows_subsystem: Option<String>,
linker_info: LinkerInfo, linker_info: LinkerInfo,
no_integrated_as: bool,
crate_info: CrateInfo, crate_info: CrateInfo,
time_graph: Option<TimeGraph>, time_graph: Option<TimeGraph>,
coordinator_send: Sender<Box<Any + Send>>, coordinator_send: Sender<Box<Any + Send>>,
@ -2189,26 +2218,6 @@ impl OngoingCrateTranslation {
metadata_module: compiled_modules.metadata_module, metadata_module: compiled_modules.metadata_module,
}; };
if self.no_integrated_as {
run_assembler(sess, &self.output_filenames);
// HACK the linker expects the object file to be named foo.0.o but
// `run_assembler` produces an object named just foo.o. Rename it if we
// are going to build an executable
if sess.opts.output_types.contains_key(&OutputType::Exe) {
let f = self.output_filenames.path(OutputType::Object);
rename_or_copy_remove(&f,
f.with_file_name(format!("{}.0.o",
f.file_stem().unwrap().to_string_lossy()))).unwrap();
}
// Remove assembly source, unless --save-temps was specified
if !sess.opts.cg.save_temps {
fs::remove_file(&self.output_filenames
.temp_path(OutputType::Assembly, None)).unwrap();
}
}
trans trans
} }

View file

@ -78,7 +78,7 @@ use std::ffi::CString;
use std::str; use std::str;
use std::sync::Arc; use std::sync::Arc;
use std::time::{Instant, Duration}; use std::time::{Instant, Duration};
use std::i32; use std::{i32, usize};
use std::iter; use std::iter;
use std::sync::mpsc; use std::sync::mpsc;
use syntax_pos::Span; use syntax_pos::Span;
@ -823,12 +823,10 @@ pub fn trans_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module); ongoing_translation.submit_pre_translated_module_to_llvm(tcx, metadata_module);
// We sort the codegen units by size. This way we can schedule work for LLVM // We sort the codegen units by size. This way we can schedule work for LLVM
// a bit more efficiently. Note that "size" is defined rather crudely at the // a bit more efficiently.
// moment as it is just the number of TransItems in the CGU, not taking into
// account the size of each TransItem.
let codegen_units = { let codegen_units = {
let mut codegen_units = codegen_units; let mut codegen_units = codegen_units;
codegen_units.sort_by_key(|cgu| -(cgu.items().len() as isize)); codegen_units.sort_by_key(|cgu| usize::MAX - cgu.size_estimate());
codegen_units codegen_units
}; };

View file

@ -16,9 +16,11 @@ use context::CodegenCx;
use rustc::ty::layout::{self, TyLayout, Size}; use rustc::ty::layout::{self, TyLayout, Size};
#[derive(Clone, Copy, PartialEq, Debug)] /// Classification of "eightbyte" components.
// NB: the order of the variants is from general to specific,
// such that `unify(a, b)` is the "smaller" of `a` and `b`.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Debug)]
enum Class { enum Class {
None,
Int, Int,
Sse, Sse,
SseUp SseUp
@ -32,29 +34,10 @@ const LARGEST_VECTOR_SIZE: usize = 512;
const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64; const MAX_EIGHTBYTES: usize = LARGEST_VECTOR_SIZE / 64;
fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>) fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>)
-> Result<[Class; MAX_EIGHTBYTES], Memory> { -> Result<[Option<Class>; MAX_EIGHTBYTES], Memory> {
fn unify(cls: &mut [Class],
off: Size,
c: Class) {
let i = (off.bytes() / 8) as usize;
let to_write = match (cls[i], c) {
(Class::None, _) => c,
(_, Class::None) => return,
(Class::Int, _) |
(_, Class::Int) => Class::Int,
(Class::Sse, _) |
(_, Class::Sse) => Class::Sse,
(Class::SseUp, Class::SseUp) => Class::SseUp
};
cls[i] = to_write;
}
fn classify<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, fn classify<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
layout: TyLayout<'tcx>, layout: TyLayout<'tcx>,
cls: &mut [Class], cls: &mut [Option<Class>],
off: Size) off: Size)
-> Result<(), Memory> { -> Result<(), Memory> {
if !off.is_abi_aligned(layout.align) { if !off.is_abi_aligned(layout.align) {
@ -64,31 +47,20 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>)
return Ok(()); return Ok(());
} }
match layout.abi { let mut c = match layout.abi {
layout::Abi::Uninhabited => {} layout::Abi::Uninhabited => return Ok(()),
layout::Abi::Scalar(ref scalar) => { layout::Abi::Scalar(ref scalar) => {
let reg = match scalar.value { match scalar.value {
layout::Int(..) | layout::Int(..) |
layout::Pointer => Class::Int, layout::Pointer => Class::Int,
layout::F32 | layout::F32 |
layout::F64 => Class::Sse layout::F64 => Class::Sse
};
unify(cls, off, reg);
}
layout::Abi::Vector { ref element, count } => {
unify(cls, off, Class::Sse);
// everything after the first one is the upper
// half of a register.
let stride = element.value.size(cx);
for i in 1..count {
let field_off = off + stride * i;
unify(cls, field_off, Class::SseUp);
} }
} }
layout::Abi::Vector { .. } => Class::Sse,
layout::Abi::ScalarPair(..) | layout::Abi::ScalarPair(..) |
layout::Abi::Aggregate { .. } => { layout::Abi::Aggregate { .. } => {
match layout.variants { match layout.variants {
@ -97,12 +69,26 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>)
let field_off = off + layout.fields.offset(i); let field_off = off + layout.fields.offset(i);
classify(cx, layout.field(cx, i), cls, field_off)?; classify(cx, layout.field(cx, i), cls, field_off)?;
} }
return Ok(());
} }
layout::Variants::Tagged { .. } | layout::Variants::Tagged { .. } |
layout::Variants::NicheFilling { .. } => return Err(Memory), layout::Variants::NicheFilling { .. } => return Err(Memory),
} }
} }
};
// Fill in `cls` for scalars (Int/Sse) and vectors (Sse).
let first = (off.bytes() / 8) as usize;
let last = ((off.bytes() + layout.size.bytes() - 1) / 8) as usize;
for cls in &mut cls[first..=last] {
*cls = Some(cls.map_or(c, |old| old.min(c)));
// Everything after the first Sse "eightbyte"
// component is the upper half of a register.
if c == Class::Sse {
c = Class::SseUp;
}
} }
Ok(()) Ok(())
@ -113,23 +99,23 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>)
return Err(Memory); return Err(Memory);
} }
let mut cls = [Class::None; MAX_EIGHTBYTES]; let mut cls = [None; MAX_EIGHTBYTES];
classify(cx, arg.layout, &mut cls, Size::from_bytes(0))?; classify(cx, arg.layout, &mut cls, Size::from_bytes(0))?;
if n > 2 { if n > 2 {
if cls[0] != Class::Sse { if cls[0] != Some(Class::Sse) {
return Err(Memory); return Err(Memory);
} }
if cls[1..n].iter().any(|&c| c != Class::SseUp) { if cls[1..n].iter().any(|&c| c != Some(Class::SseUp)) {
return Err(Memory); return Err(Memory);
} }
} else { } else {
let mut i = 0; let mut i = 0;
while i < n { while i < n {
if cls[i] == Class::SseUp { if cls[i] == Some(Class::SseUp) {
cls[i] = Class::Sse; cls[i] = Some(Class::Sse);
} else if cls[i] == Class::Sse { } else if cls[i] == Some(Class::Sse) {
i += 1; i += 1;
while i != n && cls[i] == Class::SseUp { i += 1; } while i != n && cls[i] == Some(Class::SseUp) { i += 1; }
} else { } else {
i += 1; i += 1;
} }
@ -139,14 +125,14 @@ fn classify_arg<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, arg: &ArgType<'tcx>)
Ok(cls) Ok(cls)
} }
fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> { fn reg_component(cls: &[Option<Class>], i: &mut usize, size: Size) -> Option<Reg> {
if *i >= cls.len() { if *i >= cls.len() {
return None; return None;
} }
match cls[*i] { match cls[*i] {
Class::None => None, None => None,
Class::Int => { Some(Class::Int) => {
*i += 1; *i += 1;
Some(match size.bytes() { Some(match size.bytes() {
1 => Reg::i8(), 1 => Reg::i8(),
@ -156,8 +142,10 @@ fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> {
_ => Reg::i64() _ => Reg::i64()
}) })
} }
Class::Sse => { Some(Class::Sse) => {
let vec_len = 1 + cls[*i+1..].iter().take_while(|&&c| c == Class::SseUp).count(); let vec_len = 1 + cls[*i+1..].iter()
.take_while(|&&c| c == Some(Class::SseUp))
.count();
*i += vec_len; *i += vec_len;
Some(if vec_len == 1 { Some(if vec_len == 1 {
match size.bytes() { match size.bytes() {
@ -171,20 +159,20 @@ fn reg_component(cls: &[Class], i: &mut usize, size: Size) -> Option<Reg> {
} }
}) })
} }
c => bug!("reg_component: unhandled class {:?}", c) Some(c) => bug!("reg_component: unhandled class {:?}", c)
} }
} }
fn cast_target(cls: &[Class], size: Size) -> CastTarget { fn cast_target(cls: &[Option<Class>], size: Size) -> CastTarget {
let mut i = 0; let mut i = 0;
let lo = reg_component(cls, &mut i, size).unwrap(); let lo = reg_component(cls, &mut i, size).unwrap();
let offset = Size::from_bytes(8) * (i as u64); let offset = Size::from_bytes(8) * (i as u64);
let target = if size <= offset { let mut target = CastTarget::from(lo);
CastTarget::from(lo) if size > offset {
} else { if let Some(hi) = reg_component(cls, &mut i, size - offset) {
let hi = reg_component(cls, &mut i, size - offset).unwrap(); target = CastTarget::Pair(lo, hi);
CastTarget::Pair(lo, hi) }
}; }
assert_eq!(reg_component(cls, &mut i, Size::from_bytes(0)), None); assert_eq!(reg_component(cls, &mut i, Size::from_bytes(0)), None);
target target
} }
@ -194,44 +182,48 @@ pub fn compute_abi_info<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>, fty: &mut FnType<'tc
let mut sse_regs = 8; // XMM0-7 let mut sse_regs = 8; // XMM0-7
let mut x86_64_ty = |arg: &mut ArgType<'tcx>, is_arg: bool| { let mut x86_64_ty = |arg: &mut ArgType<'tcx>, is_arg: bool| {
let cls = classify_arg(cx, arg); let mut cls_or_mem = classify_arg(cx, arg);
let mut needed_int = 0; let mut needed_int = 0;
let mut needed_sse = 0; let mut needed_sse = 0;
let in_mem = match cls { if is_arg {
Err(Memory) => true, if let Ok(cls) = cls_or_mem {
Ok(ref cls) if is_arg => { for &c in &cls {
for &c in cls {
match c { match c {
Class::Int => needed_int += 1, Some(Class::Int) => needed_int += 1,
Class::Sse => needed_sse += 1, Some(Class::Sse) => needed_sse += 1,
_ => {} _ => {}
} }
} }
arg.layout.is_aggregate() && if arg.layout.is_aggregate() {
(int_regs < needed_int || sse_regs < needed_sse) if int_regs < needed_int || sse_regs < needed_sse {
cls_or_mem = Err(Memory);
}
}
} }
Ok(_) => false }
};
if in_mem { match cls_or_mem {
if is_arg { Err(Memory) => {
arg.make_indirect_byval(); if is_arg {
} else { arg.make_indirect_byval();
// `sret` parameter thus one less integer register available } else {
arg.make_indirect(); // `sret` parameter thus one less integer register available
int_regs -= 1; arg.make_indirect();
int_regs -= 1;
}
} }
} else { Ok(ref cls) => {
// split into sized chunks passed individually // split into sized chunks passed individually
int_regs -= needed_int; int_regs -= needed_int;
sse_regs -= needed_sse; sse_regs -= needed_sse;
if arg.layout.is_aggregate() { if arg.layout.is_aggregate() {
let size = arg.layout.size; let size = arg.layout.size;
arg.cast_to(cast_target(cls.as_ref().unwrap(), size)) arg.cast_to(cast_target(cls, size))
} else { } else {
arg.extend_integer_width_to(32); arg.extend_integer_width_to(32);
}
} }
} }
}; };

View file

@ -270,6 +270,9 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CodegenCx<'a, 'tcx>,
} }
None => {} None => {}
}; };
if sig.output().is_never() {
flags = flags | DIFlags::FlagNoReturn;
}
let fn_metadata = unsafe { let fn_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateFunction( llvm::LLVMRustDIBuilderCreateFunction(

View file

@ -487,16 +487,18 @@ fn arg_local_refs<'a, 'tcx>(bx: &Builder<'a, 'tcx>,
// The Rust ABI passes indirect variables using a pointer and a manual copy, so we // The Rust ABI passes indirect variables using a pointer and a manual copy, so we
// need to insert a deref here, but the C ABI uses a pointer and a copy using the // need to insert a deref here, but the C ABI uses a pointer and a copy using the
// byval attribute, for which LLVM does the deref itself, so we must not add it. // byval attribute, for which LLVM does the deref itself, so we must not add it.
// Starting with D31439 in LLVM 5, it *always* does the deref itself.
let mut variable_access = VariableAccess::DirectVariable { let mut variable_access = VariableAccess::DirectVariable {
alloca: place.llval alloca: place.llval
}; };
if unsafe { llvm::LLVMRustVersionMajor() < 5 } {
if let PassMode::Indirect(ref attrs) = arg.mode { if let PassMode::Indirect(ref attrs) = arg.mode {
if !attrs.contains(ArgAttribute::ByVal) { if !attrs.contains(ArgAttribute::ByVal) {
variable_access = VariableAccess::IndirectVariable { variable_access = VariableAccess::IndirectVariable {
alloca: place.llval, alloca: place.llval,
address_operations: &deref_op, address_operations: &deref_op,
}; };
}
} }
} }

View file

@ -979,7 +979,7 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
let item_id = tcx.hir.get_parent_node(node_id); let item_id = tcx.hir.get_parent_node(node_id);
let item_def_id = tcx.hir.local_def_id(item_id); let item_def_id = tcx.hir.local_def_id(item_id);
let generics = tcx.generics_of(item_def_id); let generics = tcx.generics_of(item_def_id);
let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id).index]; let index = generics.type_param_to_index[&tcx.hir.local_def_id(node_id)];
tcx.mk_param(index, tcx.hir.name(node_id)) tcx.mk_param(index, tcx.hir.name(node_id))
} }
Def::SelfTy(_, Some(def_id)) => { Def::SelfTy(_, Some(def_id)) => {
@ -1206,22 +1206,27 @@ impl<'o, 'gcx: 'tcx, 'tcx> AstConv<'gcx, 'tcx>+'o {
let output = bare_fn_ty.output(); let output = bare_fn_ty.output();
let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output); let late_bound_in_ret = tcx.collect_referenced_late_bound_regions(&output);
for br in late_bound_in_ret.difference(&late_bound_in_args) { for br in late_bound_in_ret.difference(&late_bound_in_args) {
let br_name = match *br { let lifetime_name = match *br {
ty::BrNamed(_, name) => name, ty::BrNamed(_, name) => format!("lifetime `{}`,", name),
_ => { ty::BrAnon(_) | ty::BrFresh(_) | ty::BrEnv => format!("an anonymous lifetime"),
span_bug!(
decl.output.span(),
"anonymous bound region {:?} in return but not args",
br);
}
}; };
struct_span_err!(tcx.sess, let mut err = struct_span_err!(tcx.sess,
decl.output.span(), decl.output.span(),
E0581, E0581,
"return type references lifetime `{}`, \ "return type references {} \
which does not appear in the fn input types", which is not constrained by the fn input types",
br_name) lifetime_name);
.emit(); if let ty::BrAnon(_) = *br {
// The only way for an anonymous lifetime to wind up
// in the return type but **also** be unconstrained is
// if it only appears in "associated types" in the
// input. See #47511 for an example. In this case,
// though we can easily give a hint that ought to be
// relevant.
err.note("lifetimes appearing in an associated type \
are not considered constrained");
}
err.emit();
} }
bare_fn_ty bare_fn_ty

View file

@ -281,10 +281,12 @@ impl<'a, 'gcx, 'tcx> CastCheck<'tcx> {
.emit(); .emit();
} }
CastError::SizedUnsizedCast => { CastError::SizedUnsizedCast => {
type_error_struct!(fcx.tcx.sess, self.span, self.expr_ty, E0607, use structured_errors::{SizedUnsizedCastError, StructuredDiagnostic};
"cannot cast thin pointer `{}` to fat pointer `{}`", SizedUnsizedCastError::new(&fcx.tcx.sess,
self.expr_ty, self.span,
fcx.ty_to_string(self.cast_ty)).emit(); self.expr_ty,
fcx.ty_to_string(self.cast_ty))
.diagnostic().emit();
} }
CastError::UnknownCastPtrKind | CastError::UnknownCastPtrKind |
CastError::UnknownExprPtrKind => { CastError::UnknownExprPtrKind => {

View file

@ -103,6 +103,7 @@ use rustc::ty::maps::Providers;
use rustc::ty::util::{Representability, IntTypeExt}; use rustc::ty::util::{Representability, IntTypeExt};
use rustc::ty::layout::LayoutOf; use rustc::ty::layout::LayoutOf;
use errors::{DiagnosticBuilder, DiagnosticId}; use errors::{DiagnosticBuilder, DiagnosticId};
use require_c_abi_if_variadic; use require_c_abi_if_variadic;
use session::{CompileIncomplete, config, Session}; use session::{CompileIncomplete, config, Session};
use TypeAndSubsts; use TypeAndSubsts;
@ -1636,7 +1637,7 @@ impl<'a, 'gcx, 'tcx> AstConv<'gcx, 'tcx> for FnCtxt<'a, 'gcx, 'tcx> {
let item_id = tcx.hir.ty_param_owner(node_id); let item_id = tcx.hir.ty_param_owner(node_id);
let item_def_id = tcx.hir.local_def_id(item_id); let item_def_id = tcx.hir.local_def_id(item_id);
let generics = tcx.generics_of(item_def_id); let generics = tcx.generics_of(item_def_id);
let index = generics.type_param_to_index[&def_id.index]; let index = generics.type_param_to_index[&def_id];
ty::GenericPredicates { ty::GenericPredicates {
parent: None, parent: None,
predicates: self.param_env.caller_bounds.iter().filter(|predicate| { predicates: self.param_env.caller_bounds.iter().filter(|predicate| {
@ -2599,9 +2600,8 @@ impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
// arguments which we skipped above. // arguments which we skipped above.
if variadic { if variadic {
fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) { fn variadic_error<'tcx>(s: &Session, span: Span, t: Ty<'tcx>, cast_ty: &str) {
type_error_struct!(s, span, t, E0617, use structured_errors::{VariadicError, StructuredDiagnostic};
"can't pass `{}` to variadic function, cast to `{}`", VariadicError::new(s, span, t, cast_ty).diagnostic().emit();
t, cast_ty).emit();
} }
for arg in args.iter().skip(expected_arg_count) { for arg in args.iter().skip(expected_arg_count) {

View file

@ -40,8 +40,6 @@ use util::nodemap::FxHashMap;
use rustc_const_math::ConstInt; use rustc_const_math::ConstInt;
use std::collections::BTreeMap;
use syntax::{abi, ast}; use syntax::{abi, ast};
use syntax::codemap::Spanned; use syntax::codemap::Spanned;
use syntax::symbol::{Symbol, keywords}; use syntax::symbol::{Symbol, keywords};
@ -240,7 +238,7 @@ fn type_param_predicates<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
let param_owner = tcx.hir.ty_param_owner(param_id); let param_owner = tcx.hir.ty_param_owner(param_id);
let param_owner_def_id = tcx.hir.local_def_id(param_owner); let param_owner_def_id = tcx.hir.local_def_id(param_owner);
let generics = tcx.generics_of(param_owner_def_id); let generics = tcx.generics_of(param_owner_def_id);
let index = generics.type_param_to_index[&def_id.index]; let index = generics.type_param_to_index[&def_id];
let ty = tcx.mk_param(index, tcx.hir.ty_param_name(param_id)); let ty = tcx.mk_param(index, tcx.hir.ty_param_name(param_id));
// Don't look for bounds where the type parameter isn't in scope. // Don't look for bounds where the type parameter isn't in scope.
@ -1024,10 +1022,9 @@ fn generics_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}); });
} }
let mut type_param_to_index = BTreeMap::new(); let type_param_to_index = types.iter()
for param in &types { .map(|param| (param.def_id, param.index))
type_param_to_index.insert(param.def_id.index, param.index); .collect();
}
tcx.alloc_generics(ty::Generics { tcx.alloc_generics(ty::Generics {
parent: parent_def_id, parent: parent_def_id,

View file

@ -123,16 +123,17 @@ use std::iter;
// registered before they are used. // registered before they are used.
mod diagnostics; mod diagnostics;
mod astconv;
mod check; mod check;
mod check_unused; mod check_unused;
mod astconv; mod coherence;
mod collect; mod collect;
mod constrained_type_params; mod constrained_type_params;
mod structured_errors;
mod impl_wf_check; mod impl_wf_check;
mod coherence; mod namespace;
mod outlives; mod outlives;
mod variance; mod variance;
mod namespace;
pub struct TypeAndSubsts<'tcx> { pub struct TypeAndSubsts<'tcx> {
substs: &'tcx Substs<'tcx>, substs: &'tcx Substs<'tcx>,

View file

@ -0,0 +1,150 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use rustc::session::Session;
use syntax_pos::Span;
use errors::{DiagnosticId, DiagnosticBuilder};
use rustc::ty::{Ty, TypeFoldable};
pub trait StructuredDiagnostic<'tcx> {
fn session(&self) -> &Session;
fn code(&self) -> DiagnosticId;
fn common(&self) -> DiagnosticBuilder<'tcx>;
fn diagnostic(&self) -> DiagnosticBuilder<'tcx> {
let err = self.common();
if self.session().teach(&self.code()) {
self.extended(err)
} else {
self.regular(err)
}
}
fn regular(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
err
}
fn extended(&self, err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
err
}
}
pub struct VariadicError<'tcx> {
sess: &'tcx Session,
span: Span,
t: Ty<'tcx>,
cast_ty: &'tcx str,
}
impl<'tcx> VariadicError<'tcx> {
pub fn new(sess: &'tcx Session,
span: Span,
t: Ty<'tcx>,
cast_ty: &'tcx str) -> VariadicError<'tcx> {
VariadicError { sess, span, t, cast_ty }
}
}
impl<'tcx> StructuredDiagnostic<'tcx> for VariadicError<'tcx> {
fn session(&self) -> &Session { self.sess }
fn code(&self) -> DiagnosticId {
__diagnostic_used!(E0617);
DiagnosticId::Error("E0617".to_owned())
}
fn common(&self) -> DiagnosticBuilder<'tcx> {
let mut err = if self.t.references_error() {
self.sess.diagnostic().struct_dummy()
} else {
self.sess.struct_span_fatal_with_code(
self.span,
&format!("can't pass `{}` to variadic function", self.t),
self.code(),
)
};
if let Ok(snippet) = self.sess.codemap().span_to_snippet(self.span) {
err.span_suggestion(self.span,
&format!("cast the value to `{}`", self.cast_ty),
format!("{} as {}", snippet, self.cast_ty));
} else {
err.help(&format!("cast the value to `{}`", self.cast_ty));
}
err
}
fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
err.note(&format!("certain types, like `{}`, must be cast before passing them to a \
variadic function, because of arcane ABI rules dictated by the C \
standard",
self.t));
err
}
}
pub struct SizedUnsizedCastError<'tcx> {
sess: &'tcx Session,
span: Span,
expr_ty: Ty<'tcx>,
cast_ty: String,
}
impl<'tcx> SizedUnsizedCastError<'tcx> {
pub fn new(sess: &'tcx Session,
span: Span,
expr_ty: Ty<'tcx>,
cast_ty: String) -> SizedUnsizedCastError<'tcx> {
SizedUnsizedCastError { sess, span, expr_ty, cast_ty }
}
}
impl<'tcx> StructuredDiagnostic<'tcx> for SizedUnsizedCastError<'tcx> {
fn session(&self) -> &Session { self.sess }
fn code(&self) -> DiagnosticId {
__diagnostic_used!(E0607);
DiagnosticId::Error("E0607".to_owned())
}
fn common(&self) -> DiagnosticBuilder<'tcx> {
if self.expr_ty.references_error() {
self.sess.diagnostic().struct_dummy()
} else {
self.sess.struct_span_fatal_with_code(
self.span,
&format!("cannot cast thin pointer `{}` to fat pointer `{}`",
self.expr_ty,
self.cast_ty),
self.code(),
)
}
}
fn extended(&self, mut err: DiagnosticBuilder<'tcx>) -> DiagnosticBuilder<'tcx> {
err.help(
"Thin pointers are \"simple\" pointers: they are purely a reference to a
memory address.
Fat pointers are pointers referencing \"Dynamically Sized Types\" (also
called DST). DST don't have a statically known size, therefore they can
only exist behind some kind of pointers that contain additional
information. Slices and trait objects are DSTs. In the case of slices,
the additional information the fat pointer holds is their size.
To fix this error, don't try to cast directly between thin and fat
pointers.
For more information about casts, take a look at The Book:
https://doc.rust-lang.org/book/first-edition/casting-between-types.html");
err
}
}

View file

@ -14,16 +14,12 @@
//! We walk the set of items and, for each member, generate new constraints. //! We walk the set of items and, for each member, generate new constraints.
use hir::def_id::DefId; use hir::def_id::DefId;
use rustc::dep_graph::{DepGraphSafe, DepKind, DepNodeColor};
use rustc::ich::StableHashingContext;
use rustc::ty::subst::Substs; use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt}; use rustc::ty::{self, Ty, TyCtxt};
use syntax::ast; use syntax::ast;
use rustc::hir; use rustc::hir;
use rustc::hir::itemlikevisit::ItemLikeVisitor; use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc_data_structures::stable_hasher::StableHashingContextProvider;
use super::terms::*; use super::terms::*;
use super::terms::VarianceTerm::*; use super::terms::VarianceTerm::*;
@ -132,50 +128,11 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for ConstraintContext<'a, 'tcx> {
} }
} }
impl<'a, 'tcx> StableHashingContextProvider for ConstraintContext<'a, 'tcx> {
type ContextType = StableHashingContext<'tcx>;
fn create_stable_hashing_context(&self) -> Self::ContextType {
self.terms_cx.tcx.create_stable_hashing_context()
}
}
impl<'a, 'tcx> DepGraphSafe for ConstraintContext<'a, 'tcx> {}
impl<'a, 'tcx> ConstraintContext<'a, 'tcx> { impl<'a, 'tcx> ConstraintContext<'a, 'tcx> {
fn visit_node_helper(&mut self, id: ast::NodeId) { fn visit_node_helper(&mut self, id: ast::NodeId) {
let tcx = self.terms_cx.tcx; let tcx = self.terms_cx.tcx;
let def_id = tcx.hir.local_def_id(id); let def_id = tcx.hir.local_def_id(id);
self.build_constraints_for_item(def_id);
// Encapsulate constructing the constraints into a task we can
// reference later. This can go away once the red-green
// algorithm is in place.
//
// See README.md for a detailed discussion
// on dep-graph management.
let dep_node = def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
if let Some(DepNodeColor::Green(_)) = tcx.dep_graph.node_color(&dep_node) {
// If the corresponding node has already been marked as green, the
// appropriate portion of the DepGraph has already been loaded from
// the previous graph, so we don't do any dep-tracking. Since we
// don't cache any values though, we still have to re-run the
// computation.
tcx.dep_graph.with_ignore(|| {
self.build_constraints_for_item(def_id);
});
} else {
tcx.dep_graph.with_task(dep_node,
self,
def_id,
visit_item_task);
}
fn visit_item_task<'a, 'tcx>(ccx: &mut ConstraintContext<'a, 'tcx>,
def_id: DefId)
{
ccx.build_constraints_for_item(def_id);
}
} }
fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> { fn tcx(&self) -> TyCtxt<'a, 'tcx, 'tcx> {

View file

@ -12,7 +12,6 @@
//! parameters. See README.md for details. //! parameters. See README.md for details.
use arena; use arena;
use rustc::dep_graph::DepKind;
use rustc::hir; use rustc::hir;
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE}; use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::ty::{self, CrateVariancesMap, TyCtxt}; use rustc::ty::{self, CrateVariancesMap, TyCtxt};
@ -95,9 +94,6 @@ fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
// Everything else must be inferred. // Everything else must be inferred.
let crate_map = tcx.crate_variances(LOCAL_CRATE); let crate_map = tcx.crate_variances(LOCAL_CRATE);
let dep_node = item_def_id.to_dep_node(tcx, DepKind::ItemVarianceConstraints);
tcx.dep_graph.read(dep_node);
crate_map.variances.get(&item_def_id) crate_map.variances.get(&item_def_id)
.unwrap_or(&crate_map.empty_variance) .unwrap_or(&crate_map.empty_variance)
.clone() .clone()

View file

@ -659,7 +659,8 @@ pub struct Attributes {
pub other_attrs: Vec<ast::Attribute>, pub other_attrs: Vec<ast::Attribute>,
pub cfg: Option<Rc<Cfg>>, pub cfg: Option<Rc<Cfg>>,
pub span: Option<syntax_pos::Span>, pub span: Option<syntax_pos::Span>,
pub links: Vec<(String, DefId)>, /// map from Rust paths to resolved defs and potential URL fragments
pub links: Vec<(String, DefId, Option<String>)>,
} }
impl Attributes { impl Attributes {
@ -820,8 +821,12 @@ impl Attributes {
/// Cache must be populated before call /// Cache must be populated before call
pub fn links(&self) -> Vec<(String, String)> { pub fn links(&self) -> Vec<(String, String)> {
use html::format::href; use html::format::href;
self.links.iter().filter_map(|&(ref s, did)| { self.links.iter().filter_map(|&(ref s, did, ref fragment)| {
if let Some((href, ..)) = href(did) { if let Some((mut href, ..)) = href(did) {
if let Some(ref fragment) = *fragment {
href.push_str("#");
href.push_str(fragment);
}
Some((s.clone(), href)) Some((s.clone(), href))
} else { } else {
None None
@ -843,10 +848,8 @@ impl AttributesExt for Attributes {
/// they exist in both namespaces (structs and modules) /// they exist in both namespaces (structs and modules)
fn value_ns_kind(def: Def, path_str: &str) -> Option<(&'static str, String)> { fn value_ns_kind(def: Def, path_str: &str) -> Option<(&'static str, String)> {
match def { match def {
// structs and mods exist in both namespaces. skip them // structs, variants, and mods exist in both namespaces. skip them
Def::StructCtor(..) | Def::Mod(..) => None, Def::StructCtor(..) | Def::Mod(..) | Def::Variant(..) | Def::VariantCtor(..) => None,
Def::Variant(..) | Def::VariantCtor(..)
=> Some(("variant", format!("{}()", path_str))),
Def::Fn(..) Def::Fn(..)
=> Some(("function", format!("{}()", path_str))), => Some(("function", format!("{}()", path_str))),
Def::Method(..) Def::Method(..)
@ -880,10 +883,10 @@ fn ambiguity_error(cx: &DocContext, attrs: &Attributes,
let sp = attrs.doc_strings.first() let sp = attrs.doc_strings.first()
.map_or(DUMMY_SP, |a| a.span()); .map_or(DUMMY_SP, |a| a.span());
cx.sess() cx.sess()
.struct_span_err(sp, .struct_span_warn(sp,
&format!("`{}` is both {} {} and {} {}", &format!("`{}` is both {} {} and {} {}",
path_str, article1, kind1, path_str, article1, kind1,
article2, kind2)) article2, kind2))
.help(&format!("try `{}` if you want to select the {}, \ .help(&format!("try `{}` if you want to select the {}, \
or `{}` if you want to \ or `{}` if you want to \
select the {}", select the {}",
@ -892,21 +895,114 @@ fn ambiguity_error(cx: &DocContext, attrs: &Attributes,
.emit(); .emit();
} }
/// Given an enum variant's def, return the def of its enum and the associated fragment
fn handle_variant(cx: &DocContext, def: Def) -> Result<(Def, Option<String>), ()> {
use rustc::ty::DefIdTree;
let parent = if let Some(parent) = cx.tcx.parent(def.def_id()) {
parent
} else {
return Err(())
};
let parent_def = Def::Enum(parent);
let variant = cx.tcx.expect_variant_def(def);
Ok((parent_def, Some(format!("{}.v", variant.name))))
}
/// Resolve a given string as a path, along with whether or not it is /// Resolve a given string as a path, along with whether or not it is
/// in the value namespace /// in the value namespace. Also returns an optional URL fragment in the case
fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<hir::Path, ()> { /// of variants and methods
fn resolve(cx: &DocContext, path_str: &str, is_val: bool) -> Result<(Def, Option<String>), ()> {
// In case we're in a module, try to resolve the relative // In case we're in a module, try to resolve the relative
// path // path
if let Some(id) = cx.mod_ids.borrow().last() { if let Some(id) = cx.mod_ids.borrow().last() {
cx.resolver.borrow_mut() let result = cx.resolver.borrow_mut()
.with_scope(*id, |resolver| { .with_scope(*id,
resolver.resolve_str_path_error(DUMMY_SP, |resolver| {
&path_str, is_val) resolver.resolve_str_path_error(DUMMY_SP,
}) &path_str, is_val)
});
if let Ok(result) = result {
// In case this is a trait item, skip the
// early return and try looking for the trait
let value = match result.def {
Def::Method(_) | Def::AssociatedConst(_) => true,
Def::AssociatedTy(_) => false,
Def::Variant(_) => return handle_variant(cx, result.def),
// not a trait item, just return what we found
_ => return Ok((result.def, None))
};
if value != is_val {
return Err(())
}
} else {
// If resolution failed, it may still be a method
// because methods are not handled by the resolver
// If so, bail when we're not looking for a value
if !is_val {
return Err(())
}
}
// Try looking for methods and associated items
let mut split = path_str.rsplitn(2, "::");
let mut item_name = if let Some(first) = split.next() {
first
} else {
return Err(())
};
let mut path = if let Some(second) = split.next() {
second
} else {
return Err(())
};
let ty = cx.resolver.borrow_mut()
.with_scope(*id,
|resolver| {
resolver.resolve_str_path_error(DUMMY_SP,
&path, false)
})?;
match ty.def {
Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
let item = cx.tcx.inherent_impls(did).iter()
.flat_map(|imp| cx.tcx.associated_items(*imp))
.find(|item| item.name == item_name);
if let Some(item) = item {
if item.kind == ty::AssociatedKind::Method && is_val {
Ok((ty.def, Some(format!("method.{}", item_name))))
} else {
Err(())
}
} else {
Err(())
}
}
Def::Trait(did) => {
let item = cx.tcx.associated_item_def_ids(did).iter()
.map(|item| cx.tcx.associated_item(*item))
.find(|item| item.name == item_name);
if let Some(item) = item {
let kind = match item.kind {
ty::AssociatedKind::Const if is_val => "associatedconstant",
ty::AssociatedKind::Type if !is_val => "associatedtype",
ty::AssociatedKind::Method if is_val => "tymethod",
_ => return Err(())
};
Ok((ty.def, Some(format!("{}.{}", kind, item_name))))
} else {
Err(())
}
}
_ => Err(())
}
} else { } else {
// FIXME(Manishearth) this branch doesn't seem to ever be hit, really Err(())
cx.resolver.borrow_mut()
.resolve_str_path_error(DUMMY_SP, &path_str, is_val)
} }
} }
@ -955,7 +1051,7 @@ impl Clean<Attributes> for [ast::Attribute] {
if UnstableFeatures::from_environment().is_nightly_build() { if UnstableFeatures::from_environment().is_nightly_build() {
let dox = attrs.collapsed_doc_value().unwrap_or_else(String::new); let dox = attrs.collapsed_doc_value().unwrap_or_else(String::new);
for link in markdown_links(&dox, cx.render_type) { for link in markdown_links(&dox, cx.render_type) {
let def = { let (def, fragment) = {
let mut kind = PathKind::Unknown; let mut kind = PathKind::Unknown;
let path_str = if let Some(prefix) = let path_str = if let Some(prefix) =
["struct@", "enum@", "type@", ["struct@", "enum@", "type@",
@ -965,7 +1061,8 @@ impl Clean<Attributes> for [ast::Attribute] {
link.trim_left_matches(prefix) link.trim_left_matches(prefix)
} else if let Some(prefix) = } else if let Some(prefix) =
["const@", "static@", ["const@", "static@",
"value@", "function@", "mod@", "fn@", "module@"] "value@", "function@", "mod@",
"fn@", "module@", "method@"]
.iter().find(|p| link.starts_with(**p)) { .iter().find(|p| link.starts_with(**p)) {
kind = PathKind::Value; kind = PathKind::Value;
link.trim_left_matches(prefix) link.trim_left_matches(prefix)
@ -993,8 +1090,8 @@ impl Clean<Attributes> for [ast::Attribute] {
match kind { match kind {
PathKind::Value => { PathKind::Value => {
if let Ok(path) = resolve(cx, path_str, true) { if let Ok(def) = resolve(cx, path_str, true) {
path.def def
} else { } else {
// this could just be a normal link or a broken link // this could just be a normal link or a broken link
// we could potentially check if something is // we could potentially check if something is
@ -1003,8 +1100,8 @@ impl Clean<Attributes> for [ast::Attribute] {
} }
} }
PathKind::Type => { PathKind::Type => {
if let Ok(path) = resolve(cx, path_str, false) { if let Ok(def) = resolve(cx, path_str, false) {
path.def def
} else { } else {
// this could just be a normal link // this could just be a normal link
continue; continue;
@ -1013,42 +1110,42 @@ impl Clean<Attributes> for [ast::Attribute] {
PathKind::Unknown => { PathKind::Unknown => {
// try everything! // try everything!
if let Some(macro_def) = macro_resolve(cx, path_str) { if let Some(macro_def) = macro_resolve(cx, path_str) {
if let Ok(type_path) = resolve(cx, path_str, false) { if let Ok(type_def) = resolve(cx, path_str, false) {
let (type_kind, article, type_disambig) let (type_kind, article, type_disambig)
= type_ns_kind(type_path.def, path_str); = type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str, ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig, article, type_kind, &type_disambig,
"a", "macro", &format!("macro@{}", path_str)); "a", "macro", &format!("macro@{}", path_str));
continue; continue;
} else if let Ok(value_path) = resolve(cx, path_str, true) { } else if let Ok(value_def) = resolve(cx, path_str, true) {
let (value_kind, value_disambig) let (value_kind, value_disambig)
= value_ns_kind(value_path.def, path_str) = value_ns_kind(value_def.0, path_str)
.expect("struct and mod cases should have been \ .expect("struct and mod cases should have been \
caught in previous branch"); caught in previous branch");
ambiguity_error(cx, &attrs, path_str, ambiguity_error(cx, &attrs, path_str,
"a", value_kind, &value_disambig, "a", value_kind, &value_disambig,
"a", "macro", &format!("macro@{}", path_str)); "a", "macro", &format!("macro@{}", path_str));
} }
macro_def (macro_def, None)
} else if let Ok(type_path) = resolve(cx, path_str, false) { } else if let Ok(type_def) = resolve(cx, path_str, false) {
// It is imperative we search for not-a-value first // It is imperative we search for not-a-value first
// Otherwise we will find struct ctors for when we are looking // Otherwise we will find struct ctors for when we are looking
// for structs, and the link won't work. // for structs, and the link won't work.
// if there is something in both namespaces // if there is something in both namespaces
if let Ok(value_path) = resolve(cx, path_str, true) { if let Ok(value_def) = resolve(cx, path_str, true) {
let kind = value_ns_kind(value_path.def, path_str); let kind = value_ns_kind(value_def.0, path_str);
if let Some((value_kind, value_disambig)) = kind { if let Some((value_kind, value_disambig)) = kind {
let (type_kind, article, type_disambig) let (type_kind, article, type_disambig)
= type_ns_kind(type_path.def, path_str); = type_ns_kind(type_def.0, path_str);
ambiguity_error(cx, &attrs, path_str, ambiguity_error(cx, &attrs, path_str,
article, type_kind, &type_disambig, article, type_kind, &type_disambig,
"a", value_kind, &value_disambig); "a", value_kind, &value_disambig);
continue; continue;
} }
} }
type_path.def type_def
} else if let Ok(value_path) = resolve(cx, path_str, true) { } else if let Ok(value_def) = resolve(cx, path_str, true) {
value_path.def value_def
} else { } else {
// this could just be a normal link // this could just be a normal link
continue; continue;
@ -1056,7 +1153,7 @@ impl Clean<Attributes> for [ast::Attribute] {
} }
PathKind::Macro => { PathKind::Macro => {
if let Some(def) = macro_resolve(cx, path_str) { if let Some(def) = macro_resolve(cx, path_str) {
def (def, None)
} else { } else {
continue continue
} }
@ -1066,7 +1163,7 @@ impl Clean<Attributes> for [ast::Attribute] {
let id = register_def(cx, def); let id = register_def(cx, def);
attrs.links.push((link, id)); attrs.links.push((link, id, fragment));
} }
cx.sess().abort_if_errors(); cx.sess().abort_if_errors();

View file

@ -1243,6 +1243,16 @@ impl DocFolder for Cache {
_ => self.stripped_mod, _ => self.stripped_mod,
}; };
// If the impl is from a masked crate or references something from a
// masked crate then remove it completely.
if let clean::ImplItem(ref i) = item.inner {
if self.masked_crates.contains(&item.def_id.krate) ||
i.trait_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) ||
i.for_.def_id().map_or(false, |d| self.masked_crates.contains(&d.krate)) {
return None;
}
}
// Register any generics to their corresponding string. This is used // Register any generics to their corresponding string. This is used
// when pretty-printing types. // when pretty-printing types.
if let Some(generics) = item.inner.generics() { if let Some(generics) = item.inner.generics() {
@ -1257,14 +1267,10 @@ impl DocFolder for Cache {
// Collect all the implementors of traits. // Collect all the implementors of traits.
if let clean::ImplItem(ref i) = item.inner { if let clean::ImplItem(ref i) = item.inner {
if !self.masked_crates.contains(&item.def_id.krate) { if let Some(did) = i.trait_.def_id() {
if let Some(did) = i.trait_.def_id() { self.implementors.entry(did).or_insert(vec![]).push(Impl {
if i.for_.def_id().map_or(true, |d| !self.masked_crates.contains(&d.krate)) { impl_item: item.clone(),
self.implementors.entry(did).or_insert(vec![]).push(Impl { });
impl_item: item.clone(),
});
}
}
} }
} }
@ -1427,24 +1433,20 @@ impl DocFolder for Cache {
// Note: matching twice to restrict the lifetime of the `i` borrow. // Note: matching twice to restrict the lifetime of the `i` borrow.
let mut dids = FxHashSet(); let mut dids = FxHashSet();
if let clean::Item { inner: clean::ImplItem(ref i), .. } = item { if let clean::Item { inner: clean::ImplItem(ref i), .. } = item {
let masked_trait = i.trait_.def_id().map_or(false, match i.for_ {
|d| self.masked_crates.contains(&d.krate)); clean::ResolvedPath { did, .. } |
if !masked_trait { clean::BorrowedRef {
match i.for_ { type_: box clean::ResolvedPath { did, .. }, ..
clean::ResolvedPath { did, .. } | } => {
clean::BorrowedRef { dids.insert(did);
type_: box clean::ResolvedPath { did, .. }, .. }
} => { ref t => {
dids.insert(did); let did = t.primitive_type().and_then(|t| {
} self.primitive_locations.get(&t).cloned()
ref t => { });
let did = t.primitive_type().and_then(|t| {
self.primitive_locations.get(&t).cloned()
});
if let Some(did) = did { if let Some(did) = did {
dids.insert(did); dids.insert(did);
}
} }
} }
} }

View file

@ -33,6 +33,18 @@ use std::fmt;
use std::rc::Rc; use std::rc::Rc;
use std::u32; use std::u32;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Label {
pub ident: Ident,
pub span: Span,
}
impl fmt::Debug for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "label({:?})", self.ident)
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime { pub struct Lifetime {
pub id: NodeId, pub id: NodeId,
@ -1078,23 +1090,23 @@ pub enum ExprKind {
/// A while loop, with an optional label /// A while loop, with an optional label
/// ///
/// `'label: while expr { block }` /// `'label: while expr { block }`
While(P<Expr>, P<Block>, Option<SpannedIdent>), While(P<Expr>, P<Block>, Option<Label>),
/// A while-let loop, with an optional label /// A while-let loop, with an optional label
/// ///
/// `'label: while let pat = expr { block }` /// `'label: while let pat = expr { block }`
/// ///
/// This is desugared to a combination of `loop` and `match` expressions. /// This is desugared to a combination of `loop` and `match` expressions.
WhileLet(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>), WhileLet(P<Pat>, P<Expr>, P<Block>, Option<Label>),
/// A for loop, with an optional label /// A for loop, with an optional label
/// ///
/// `'label: for pat in expr { block }` /// `'label: for pat in expr { block }`
/// ///
/// This is desugared to a combination of `loop` and `match` expressions. /// This is desugared to a combination of `loop` and `match` expressions.
ForLoop(P<Pat>, P<Expr>, P<Block>, Option<SpannedIdent>), ForLoop(P<Pat>, P<Expr>, P<Block>, Option<Label>),
/// Conditionless loop (can be exited with break, continue, or return) /// Conditionless loop (can be exited with break, continue, or return)
/// ///
/// `'label: loop { block }` /// `'label: loop { block }`
Loop(P<Block>, Option<SpannedIdent>), Loop(P<Block>, Option<Label>),
/// A `match` block. /// A `match` block.
Match(P<Expr>, Vec<Arm>), Match(P<Expr>, Vec<Arm>),
/// A closure (for example, `move |a, b, c| a + b + c`) /// A closure (for example, `move |a, b, c| a + b + c`)
@ -1133,9 +1145,9 @@ pub enum ExprKind {
/// A referencing operation (`&a` or `&mut a`) /// A referencing operation (`&a` or `&mut a`)
AddrOf(Mutability, P<Expr>), AddrOf(Mutability, P<Expr>),
/// A `break`, with an optional label to break, and an optional expression /// A `break`, with an optional label to break, and an optional expression
Break(Option<SpannedIdent>, Option<P<Expr>>), Break(Option<Label>, Option<P<Expr>>),
/// A `continue`, with an optional label /// A `continue`, with an optional label
Continue(Option<SpannedIdent>), Continue(Option<Label>),
/// A `return`, with an optional value to be returned /// A `return`, with an optional value to be returned
Ret(Option<P<Expr>>), Ret(Option<P<Expr>>),

View file

@ -786,7 +786,7 @@ impl<'a> ExtCtxt<'a> {
/// substitute; we never hit resolve/type-checking so the dummy /// substitute; we never hit resolve/type-checking so the dummy
/// value doesn't have to match anything) /// value doesn't have to match anything)
pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! { pub fn span_fatal<S: Into<MultiSpan>>(&self, sp: S, msg: &str) -> ! {
panic!(self.parse_sess.span_diagnostic.span_fatal(sp, msg)); self.parse_sess.span_diagnostic.span_fatal(sp, msg).raise();
} }
/// Emit `msg` attached to `sp`, without immediately stopping /// Emit `msg` attached to `sp`, without immediately stopping

View file

@ -455,7 +455,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
suggested_limit)); suggested_limit));
err.emit(); err.emit();
self.cx.trace_macros_diag(); self.cx.trace_macros_diag();
panic!(FatalError); FatalError.raise();
} }
Some(result) Some(result)

View file

@ -116,9 +116,10 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::T
while self.p.token != token::Eof { while self.p.token != token::Eof {
match panictry!(self.p.parse_item()) { match panictry!(self.p.parse_item()) {
Some(item) => ret.push(item), Some(item) => ret.push(item),
None => panic!(self.p.diagnostic().span_fatal(self.p.span, None => self.p.diagnostic().span_fatal(self.p.span,
&format!("expected item, found `{}`", &format!("expected item, found `{}`",
self.p.this_token_to_string()))) self.p.this_token_to_string()))
.raise()
} }
} }
Some(ret) Some(ret)

View file

@ -573,7 +573,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
Some(i) => token::NtItem(i), Some(i) => token::NtItem(i),
None => { None => {
p.fatal("expected an item keyword").emit(); p.fatal("expected an item keyword").emit();
panic!(FatalError); FatalError.raise();
} }
}, },
"block" => token::NtBlock(panictry!(p.parse_block())), "block" => token::NtBlock(panictry!(p.parse_block())),
@ -581,7 +581,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
Some(s) => token::NtStmt(s), Some(s) => token::NtStmt(s),
None => { None => {
p.fatal("expected a statement").emit(); p.fatal("expected a statement").emit();
panic!(FatalError); FatalError.raise();
} }
}, },
"pat" => token::NtPat(panictry!(p.parse_pat())), "pat" => token::NtPat(panictry!(p.parse_pat())),
@ -597,7 +597,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
let token_str = pprust::token_to_string(&p.token); let token_str = pprust::token_to_string(&p.token);
p.fatal(&format!("expected ident, found {}", p.fatal(&format!("expected ident, found {}",
&token_str[..])).emit(); &token_str[..])).emit();
panic!(FatalError) FatalError.raise()
} }
}, },
"path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))), "path" => token::NtPath(panictry!(p.parse_path_common(PathStyle::Type, false))),

View file

@ -222,10 +222,10 @@ pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item)
Success(m) => m, Success(m) => m,
Failure(sp, tok) => { Failure(sp, tok) => {
let s = parse_failure_msg(tok); let s = parse_failure_msg(tok);
panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s)); sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
} }
Error(sp, s) => { Error(sp, s) => {
panic!(sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s)); sess.span_diagnostic.span_fatal(sp.substitute_dummy(def.span), &s).raise();
} }
}; };

View file

@ -1954,7 +1954,7 @@ impl FeatureChecker {
.span_note(ca_span, "`#![feature(custom_attribute)]` declared here") .span_note(ca_span, "`#![feature(custom_attribute)]` declared here")
.emit(); .emit();
panic!(FatalError); FatalError.raise();
} }
if let (Some(span), None) = (self.copy_closures, self.clone_closures) { if let (Some(span), None) = (self.copy_closures, self.clone_closures) {
@ -1963,7 +1963,7 @@ impl FeatureChecker {
.span_note(span, "`#![feature(copy_closures)]` declared here") .span_note(span, "`#![feature(copy_closures)]` declared here")
.emit(); .emit();
panic!(FatalError); FatalError.raise();
} }
} }
} }

View file

@ -193,6 +193,10 @@ pub trait Folder : Sized {
noop_fold_macro_def(def, self) noop_fold_macro_def(def, self)
} }
fn fold_label(&mut self, label: Label) -> Label {
noop_fold_label(label, self)
}
fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime { fn fold_lifetime(&mut self, l: Lifetime) -> Lifetime {
noop_fold_lifetime(l, self) noop_fold_lifetime(l, self)
} }
@ -696,6 +700,13 @@ pub fn noop_fold_generic_params<T: Folder>(
params.move_map(|p| fld.fold_generic_param(p)) params.move_map(|p| fld.fold_generic_param(p))
} }
pub fn noop_fold_label<T: Folder>(label: Label, fld: &mut T) -> Label {
Label {
ident: fld.fold_ident(label.ident),
span: fld.new_span(label.span),
}
}
pub fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime { pub fn noop_fold_lifetime<T: Folder>(l: Lifetime, fld: &mut T) -> Lifetime {
Lifetime { Lifetime {
id: fld.new_id(l.id), id: fld.new_id(l.id),
@ -1206,30 +1217,26 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mu
folder.fold_block(tr), folder.fold_block(tr),
fl.map(|x| folder.fold_expr(x))) fl.map(|x| folder.fold_expr(x)))
} }
ExprKind::While(cond, body, opt_ident) => { ExprKind::While(cond, body, opt_label) => {
ExprKind::While(folder.fold_expr(cond), ExprKind::While(folder.fold_expr(cond),
folder.fold_block(body), folder.fold_block(body),
opt_ident.map(|label| respan(folder.new_span(label.span), opt_label.map(|label| folder.fold_label(label)))
folder.fold_ident(label.node))))
} }
ExprKind::WhileLet(pat, expr, body, opt_ident) => { ExprKind::WhileLet(pat, expr, body, opt_label) => {
ExprKind::WhileLet(folder.fold_pat(pat), ExprKind::WhileLet(folder.fold_pat(pat),
folder.fold_expr(expr), folder.fold_expr(expr),
folder.fold_block(body), folder.fold_block(body),
opt_ident.map(|label| respan(folder.new_span(label.span), opt_label.map(|label| folder.fold_label(label)))
folder.fold_ident(label.node))))
} }
ExprKind::ForLoop(pat, iter, body, opt_ident) => { ExprKind::ForLoop(pat, iter, body, opt_label) => {
ExprKind::ForLoop(folder.fold_pat(pat), ExprKind::ForLoop(folder.fold_pat(pat),
folder.fold_expr(iter), folder.fold_expr(iter),
folder.fold_block(body), folder.fold_block(body),
opt_ident.map(|label| respan(folder.new_span(label.span), opt_label.map(|label| folder.fold_label(label)))
folder.fold_ident(label.node))))
} }
ExprKind::Loop(body, opt_ident) => { ExprKind::Loop(body, opt_label) => {
ExprKind::Loop(folder.fold_block(body), ExprKind::Loop(folder.fold_block(body),
opt_ident.map(|label| respan(folder.new_span(label.span), opt_label.map(|label| folder.fold_label(label)))
folder.fold_ident(label.node))))
} }
ExprKind::Match(expr, arms) => { ExprKind::Match(expr, arms) => {
ExprKind::Match(folder.fold_expr(expr), ExprKind::Match(folder.fold_expr(expr),
@ -1278,15 +1285,13 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span, attrs}: Expr, folder: &mu
}); });
ExprKind::Path(qself, folder.fold_path(path)) ExprKind::Path(qself, folder.fold_path(path))
} }
ExprKind::Break(opt_ident, opt_expr) => { ExprKind::Break(opt_label, opt_expr) => {
ExprKind::Break(opt_ident.map(|label| respan(folder.new_span(label.span), ExprKind::Break(opt_label.map(|label| folder.fold_label(label)),
folder.fold_ident(label.node))),
opt_expr.map(|e| folder.fold_expr(e))) opt_expr.map(|e| folder.fold_expr(e)))
} }
ExprKind::Continue(opt_ident) => ExprKind::Continue(opt_ident.map(|label| ExprKind::Continue(opt_label) => {
respan(folder.new_span(label.span), ExprKind::Continue(opt_label.map(|label| folder.fold_label(label)))
folder.fold_ident(label.node))) }
),
ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))), ExprKind::Ret(e) => ExprKind::Ret(e.map(|x| folder.fold_expr(x))),
ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| { ExprKind::InlineAsm(asm) => ExprKind::InlineAsm(asm.map(|asm| {
InlineAsm { InlineAsm {

View file

@ -54,7 +54,7 @@ macro_rules! panictry {
Ok(e) => e, Ok(e) => e,
Err(mut e) => { Err(mut e) => {
e.emit(); e.emit();
panic!(FatalError); FatalError.raise()
} }
} }
}) })

View file

@ -265,7 +265,7 @@ fn read_block_comment(rdr: &mut StringReader,
while level > 0 { while level > 0 {
debug!("=== block comment level {}", level); debug!("=== block comment level {}", level);
if rdr.is_eof() { if rdr.is_eof() {
panic!(rdr.fatal("unterminated block comment")); rdr.fatal("unterminated block comment").raise();
} }
if rdr.ch_is('\n') { if rdr.ch_is('\n') {
trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col); trim_whitespace_prefix_and_push_line(&mut lines, curr_line, col);

View file

@ -90,7 +90,7 @@ impl<'a> StringReader<'a> {
Ok(tok) => tok, Ok(tok) => tok,
Err(_) => { Err(_) => {
self.emit_fatal_errors(); self.emit_fatal_errors();
panic!(FatalError); FatalError.raise();
} }
} }
} }
@ -191,7 +191,7 @@ impl<'a> StringReader<'a> {
let mut sr = StringReader::new_raw(sess, filemap); let mut sr = StringReader::new_raw(sess, filemap);
if sr.advance_token().is_err() { if sr.advance_token().is_err() {
sr.emit_fatal_errors(); sr.emit_fatal_errors();
panic!(FatalError); FatalError.raise();
} }
sr sr
} }
@ -216,7 +216,7 @@ impl<'a> StringReader<'a> {
if sr.advance_token().is_err() { if sr.advance_token().is_err() {
sr.emit_fatal_errors(); sr.emit_fatal_errors();
panic!(FatalError); FatalError.raise();
} }
sr sr
} }
@ -647,7 +647,7 @@ impl<'a> StringReader<'a> {
"unterminated block comment" "unterminated block comment"
}; };
let last_bpos = self.pos; let last_bpos = self.pos;
panic!(self.fatal_span_(start_bpos, last_bpos, msg)); self.fatal_span_(start_bpos, last_bpos, msg).raise();
} }
let n = self.ch.unwrap(); let n = self.ch.unwrap();
match n { match n {
@ -808,9 +808,9 @@ impl<'a> StringReader<'a> {
for _ in 0..n_digits { for _ in 0..n_digits {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.pos; let last_bpos = self.pos;
panic!(self.fatal_span_(start_bpos, self.fatal_span_(start_bpos,
last_bpos, last_bpos,
"unterminated numeric character escape")); "unterminated numeric character escape").raise();
} }
if self.ch_is(delim) { if self.ch_is(delim) {
let last_bpos = self.pos; let last_bpos = self.pos;
@ -1025,9 +1025,9 @@ impl<'a> StringReader<'a> {
} }
}, },
None => { None => {
panic!(self.fatal_span_(start_bpos, self.fatal_span_(start_bpos,
self.pos, self.pos,
"unterminated unicode escape (found EOF)")); "unterminated unicode escape (found EOF)").raise();
} }
} }
self.bump(); self.bump();
@ -1283,9 +1283,9 @@ impl<'a> StringReader<'a> {
// lifetimes shouldn't end with a single quote // lifetimes shouldn't end with a single quote
// if we find one, then this is an invalid character literal // if we find one, then this is an invalid character literal
if self.ch_is('\'') { if self.ch_is('\'') {
panic!(self.fatal_span_verbose( self.fatal_span_verbose(start_with_quote, self.next_pos,
start_with_quote, self.next_pos, String::from("character literal may only contain one codepoint"))
String::from("character literal may only contain one codepoint"))); .raise();
} }
@ -1332,9 +1332,8 @@ impl<'a> StringReader<'a> {
break; break;
} }
} }
panic!(self.fatal_span_verbose( self.fatal_span_verbose(start_with_quote, pos,
start_with_quote, pos, String::from("character literal may only contain one codepoint")).raise();
String::from("character literal may only contain one codepoint")));
} }
let id = if valid { let id = if valid {
@ -1364,9 +1363,9 @@ impl<'a> StringReader<'a> {
while !self.ch_is('"') { while !self.ch_is('"') {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.pos; let last_bpos = self.pos;
panic!(self.fatal_span_(start_bpos, self.fatal_span_(start_bpos,
last_bpos, last_bpos,
"unterminated double quote string")); "unterminated double quote string").raise();
} }
let ch_start = self.pos; let ch_start = self.pos;
@ -1399,15 +1398,15 @@ impl<'a> StringReader<'a> {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.pos; let last_bpos = self.pos;
panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise();
} else if !self.ch_is('"') { } else if !self.ch_is('"') {
let last_bpos = self.pos; let last_bpos = self.pos;
let curr_char = self.ch.unwrap(); let curr_char = self.ch.unwrap();
panic!(self.fatal_span_char(start_bpos, self.fatal_span_char(start_bpos,
last_bpos, last_bpos,
"found invalid character; only `#` is allowed \ "found invalid character; only `#` is allowed \
in raw string delimitation", in raw string delimitation",
curr_char)); curr_char).raise();
} }
self.bump(); self.bump();
let content_start_bpos = self.pos; let content_start_bpos = self.pos;
@ -1416,7 +1415,7 @@ impl<'a> StringReader<'a> {
'outer: loop { 'outer: loop {
if self.is_eof() { if self.is_eof() {
let last_bpos = self.pos; let last_bpos = self.pos;
panic!(self.fatal_span_(start_bpos, last_bpos, "unterminated raw string")); self.fatal_span_(start_bpos, last_bpos, "unterminated raw string").raise();
} }
// if self.ch_is('"') { // if self.ch_is('"') {
// content_end_bpos = self.pos; // content_end_bpos = self.pos;
@ -1573,9 +1572,9 @@ impl<'a> StringReader<'a> {
// character before position `start` are an // character before position `start` are an
// ascii single quote and ascii 'b'. // ascii single quote and ascii 'b'.
let pos = self.pos; let pos = self.pos;
panic!(self.fatal_span_verbose(start - BytePos(2), self.fatal_span_verbose(start - BytePos(2),
pos, pos,
"unterminated byte constant".to_string())); "unterminated byte constant".to_string()).raise();
} }
let id = if valid { let id = if valid {
@ -1599,7 +1598,7 @@ impl<'a> StringReader<'a> {
while !self.ch_is('"') { while !self.ch_is('"') {
if self.is_eof() { if self.is_eof() {
let pos = self.pos; let pos = self.pos;
panic!(self.fatal_span_(start, pos, "unterminated double quote byte string")); self.fatal_span_(start, pos, "unterminated double quote byte string").raise();
} }
let ch_start = self.pos; let ch_start = self.pos;
@ -1631,15 +1630,15 @@ impl<'a> StringReader<'a> {
if self.is_eof() { if self.is_eof() {
let pos = self.pos; let pos = self.pos;
panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string")); self.fatal_span_(start_bpos, pos, "unterminated raw string").raise();
} else if !self.ch_is('"') { } else if !self.ch_is('"') {
let pos = self.pos; let pos = self.pos;
let ch = self.ch.unwrap(); let ch = self.ch.unwrap();
panic!(self.fatal_span_char(start_bpos, self.fatal_span_char(start_bpos,
pos, pos,
"found invalid character; only `#` is allowed in raw \ "found invalid character; only `#` is allowed in raw \
string delimitation", string delimitation",
ch)); ch).raise();
} }
self.bump(); self.bump();
let content_start_bpos = self.pos; let content_start_bpos = self.pos;
@ -1648,7 +1647,7 @@ impl<'a> StringReader<'a> {
match self.ch { match self.ch {
None => { None => {
let pos = self.pos; let pos = self.pos;
panic!(self.fatal_span_(start_bpos, pos, "unterminated raw string")) self.fatal_span_(start_bpos, pos, "unterminated raw string").raise()
} }
Some('"') => { Some('"') => {
content_end_bpos = self.pos; content_end_bpos = self.pos;

View file

@ -212,8 +212,8 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
Err(e) => { Err(e) => {
let msg = format!("couldn't read {:?}: {}", path.display(), e); let msg = format!("couldn't read {:?}: {}", path.display(), e);
match spanopt { match spanopt {
Some(sp) => panic!(sess.span_diagnostic.span_fatal(sp, &msg)), Some(sp) => sess.span_diagnostic.span_fatal(sp, &msg).raise(),
None => panic!(sess.span_diagnostic.fatal(&msg)) None => sess.span_diagnostic.fatal(&msg).raise()
} }
} }
} }

View file

@ -23,7 +23,7 @@ use ast::{Field, FnDecl};
use ast::{ForeignItem, ForeignItemKind, FunctionRetTy}; use ast::{ForeignItem, ForeignItemKind, FunctionRetTy};
use ast::GenericParam; use ast::GenericParam;
use ast::{Ident, ImplItem, IsAuto, Item, ItemKind}; use ast::{Ident, ImplItem, IsAuto, Item, ItemKind};
use ast::{Lifetime, LifetimeDef, Lit, LitKind, UintTy}; use ast::{Label, Lifetime, LifetimeDef, Lit, LitKind, UintTy};
use ast::Local; use ast::Local;
use ast::MacStmtStyle; use ast::MacStmtStyle;
use ast::Mac_; use ast::Mac_;
@ -1325,15 +1325,17 @@ impl<'a> Parser<'a> {
self.check_keyword(keywords::Extern) self.check_keyword(keywords::Extern)
} }
fn get_label(&mut self) -> ast::Ident { fn eat_label(&mut self) -> Option<Label> {
match self.token { let ident = match self.token {
token::Lifetime(ref ident) => *ident, token::Lifetime(ref ident) => *ident,
token::Interpolated(ref nt) => match nt.0 { token::Interpolated(ref nt) => match nt.0 {
token::NtLifetime(lifetime) => lifetime.ident, token::NtLifetime(lifetime) => lifetime.ident,
_ => self.bug("not a lifetime"), _ => return None,
}, },
_ => self.bug("not a lifetime"), _ => return None,
} };
self.bump();
Some(Label { ident, span: self.prev_span })
} }
/// parse a TyKind::BareFn type: /// parse a TyKind::BareFn type:
@ -2317,11 +2319,8 @@ impl<'a> Parser<'a> {
let lo = self.prev_span; let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs); return self.parse_while_expr(None, lo, attrs);
} }
if self.token.is_lifetime() { if let Some(label) = self.eat_label() {
let label = Spanned { node: self.get_label(), let lo = label.span;
span: self.span };
let lo = self.span;
self.bump();
self.expect(&token::Colon)?; self.expect(&token::Colon)?;
if self.eat_keyword(keywords::While) { if self.eat_keyword(keywords::While) {
return self.parse_while_expr(Some(label), lo, attrs) return self.parse_while_expr(Some(label), lo, attrs)
@ -2339,16 +2338,8 @@ impl<'a> Parser<'a> {
return self.parse_loop_expr(None, lo, attrs); return self.parse_loop_expr(None, lo, attrs);
} }
if self.eat_keyword(keywords::Continue) { if self.eat_keyword(keywords::Continue) {
let ex = if self.token.is_lifetime() { let label = self.eat_label();
let ex = ExprKind::Continue(Some(Spanned{ let ex = ExprKind::Continue(label);
node: self.get_label(),
span: self.span
}));
self.bump();
ex
} else {
ExprKind::Continue(None)
};
let hi = self.prev_span; let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs)); return Ok(self.mk_expr(lo.to(hi), ex, attrs));
} }
@ -2376,16 +2367,7 @@ impl<'a> Parser<'a> {
ex = ExprKind::Ret(None); ex = ExprKind::Ret(None);
} }
} else if self.eat_keyword(keywords::Break) { } else if self.eat_keyword(keywords::Break) {
let lt = if self.token.is_lifetime() { let label = self.eat_label();
let spanned_lt = Spanned {
node: self.get_label(),
span: self.span
};
self.bump();
Some(spanned_lt)
} else {
None
};
let e = if self.token.can_begin_expr() let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace) && !(self.token == token::OpenDelim(token::Brace)
&& self.restrictions.contains( && self.restrictions.contains(
@ -2394,7 +2376,7 @@ impl<'a> Parser<'a> {
} else { } else {
None None
}; };
ex = ExprKind::Break(lt, e); ex = ExprKind::Break(label, e);
hi = self.prev_span; hi = self.prev_span;
} else if self.eat_keyword(keywords::Yield) { } else if self.eat_keyword(keywords::Yield) {
if self.token.can_begin_expr() { if self.token.can_begin_expr() {
@ -3291,7 +3273,7 @@ impl<'a> Parser<'a> {
} }
/// Parse a 'for' .. 'in' expression ('for' token already eaten) /// Parse a 'for' .. 'in' expression ('for' token already eaten)
pub fn parse_for_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, pub fn parse_for_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
// Parse: `for <src_pat> in <src_expr> <src_loop_block>` // Parse: `for <src_pat> in <src_expr> <src_loop_block>`
@ -3309,25 +3291,25 @@ impl<'a> Parser<'a> {
attrs.extend(iattrs); attrs.extend(iattrs);
let hi = self.prev_span; let hi = self.prev_span;
Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_ident), attrs)) Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
} }
/// Parse a 'while' or 'while let' expression ('while' token already eaten) /// Parse a 'while' or 'while let' expression ('while' token already eaten)
pub fn parse_while_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, pub fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.token.is_keyword(keywords::Let) { if self.token.is_keyword(keywords::Let) {
return self.parse_while_let_expr(opt_ident, span_lo, attrs); return self.parse_while_let_expr(opt_label, span_lo, attrs);
} }
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?; let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let (iattrs, body) = self.parse_inner_attrs_and_block()?; let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs); attrs.extend(iattrs);
let span = span_lo.to(body.span); let span = span_lo.to(body.span);
return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_ident), attrs)); return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
} }
/// Parse a 'while let' expression ('while' token already eaten) /// Parse a 'while let' expression ('while' token already eaten)
pub fn parse_while_let_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, pub fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
self.expect_keyword(keywords::Let)?; self.expect_keyword(keywords::Let)?;
@ -3337,17 +3319,17 @@ impl<'a> Parser<'a> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?; let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs); attrs.extend(iattrs);
let span = span_lo.to(body.span); let span = span_lo.to(body.span);
return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_ident), attrs)); return Ok(self.mk_expr(span, ExprKind::WhileLet(pat, expr, body, opt_label), attrs));
} }
// parse `loop {...}`, `loop` token already eaten // parse `loop {...}`, `loop` token already eaten
pub fn parse_loop_expr(&mut self, opt_ident: Option<ast::SpannedIdent>, pub fn parse_loop_expr(&mut self, opt_label: Option<Label>,
span_lo: Span, span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> { mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
let (iattrs, body) = self.parse_inner_attrs_and_block()?; let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs); attrs.extend(iattrs);
let span = span_lo.to(body.span); let span = span_lo.to(body.span);
Ok(self.mk_expr(span, ExprKind::Loop(body, opt_ident), attrs)) Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
} }
/// Parse a `do catch {...}` expression (`do catch` token already eaten) /// Parse a `do catch {...}` expression (`do catch` token already eaten)

View file

@ -2104,9 +2104,9 @@ impl<'a> State<'a> {
ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => { ast::ExprKind::IfLet(ref pat, ref expr, ref blk, ref elseopt) => {
self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?; self.print_if_let(pat, expr, blk, elseopt.as_ref().map(|e| &**e))?;
} }
ast::ExprKind::While(ref test, ref blk, opt_ident) => { ast::ExprKind::While(ref test, ref blk, opt_label) => {
if let Some(ident) = opt_ident { if let Some(label) = opt_label {
self.print_ident(ident.node)?; self.print_ident(label.ident)?;
self.word_space(":")?; self.word_space(":")?;
} }
self.head("while")?; self.head("while")?;
@ -2114,9 +2114,9 @@ impl<'a> State<'a> {
self.s.space()?; self.s.space()?;
self.print_block_with_attrs(blk, attrs)?; self.print_block_with_attrs(blk, attrs)?;
} }
ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_ident) => { ast::ExprKind::WhileLet(ref pat, ref expr, ref blk, opt_label) => {
if let Some(ident) = opt_ident { if let Some(label) = opt_label {
self.print_ident(ident.node)?; self.print_ident(label.ident)?;
self.word_space(":")?; self.word_space(":")?;
} }
self.head("while let")?; self.head("while let")?;
@ -2127,9 +2127,9 @@ impl<'a> State<'a> {
self.s.space()?; self.s.space()?;
self.print_block_with_attrs(blk, attrs)?; self.print_block_with_attrs(blk, attrs)?;
} }
ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_ident) => { ast::ExprKind::ForLoop(ref pat, ref iter, ref blk, opt_label) => {
if let Some(ident) = opt_ident { if let Some(label) = opt_label {
self.print_ident(ident.node)?; self.print_ident(label.ident)?;
self.word_space(":")?; self.word_space(":")?;
} }
self.head("for")?; self.head("for")?;
@ -2140,9 +2140,9 @@ impl<'a> State<'a> {
self.s.space()?; self.s.space()?;
self.print_block_with_attrs(blk, attrs)?; self.print_block_with_attrs(blk, attrs)?;
} }
ast::ExprKind::Loop(ref blk, opt_ident) => { ast::ExprKind::Loop(ref blk, opt_label) => {
if let Some(ident) = opt_ident { if let Some(label) = opt_label {
self.print_ident(ident.node)?; self.print_ident(label.ident)?;
self.word_space(":")?; self.word_space(":")?;
} }
self.head("loop")?; self.head("loop")?;
@ -2238,11 +2238,11 @@ impl<'a> State<'a> {
ast::ExprKind::Path(Some(ref qself), ref path) => { ast::ExprKind::Path(Some(ref qself), ref path) => {
self.print_qpath(path, qself, true)? self.print_qpath(path, qself, true)?
} }
ast::ExprKind::Break(opt_ident, ref opt_expr) => { ast::ExprKind::Break(opt_label, ref opt_expr) => {
self.s.word("break")?; self.s.word("break")?;
self.s.space()?; self.s.space()?;
if let Some(ident) = opt_ident { if let Some(label) = opt_label {
self.print_ident(ident.node)?; self.print_ident(label.ident)?;
self.s.space()?; self.s.space()?;
} }
if let Some(ref expr) = *opt_expr { if let Some(ref expr) = *opt_expr {
@ -2250,11 +2250,11 @@ impl<'a> State<'a> {
self.s.space()?; self.s.space()?;
} }
} }
ast::ExprKind::Continue(opt_ident) => { ast::ExprKind::Continue(opt_label) => {
self.s.word("continue")?; self.s.word("continue")?;
self.s.space()?; self.s.space()?;
if let Some(ident) = opt_ident { if let Some(label) = opt_label {
self.print_ident(ident.node)?; self.print_ident(label.ident)?;
self.s.space()? self.s.space()?
} }
} }

View file

@ -123,7 +123,7 @@ impl<'a> fold::Folder for TestHarnessGenerator<'a> {
match i.node { match i.node {
ast::ItemKind::Fn(_, ast::Unsafety::Unsafe, _, _, _, _) => { ast::ItemKind::Fn(_, ast::Unsafety::Unsafe, _, _, _, _) => {
let diag = self.cx.span_diagnostic; let diag = self.cx.span_diagnostic;
panic!(diag.span_fatal(i.span, "unsafe functions cannot be used for tests")); diag.span_fatal(i.span, "unsafe functions cannot be used for tests").raise();
} }
_ => { _ => {
debug!("this is a test function"); debug!("this is a test function");

View file

@ -101,6 +101,9 @@ pub trait Visitor<'ast>: Sized {
fn visit_variant(&mut self, v: &'ast Variant, g: &'ast Generics, item_id: NodeId) { fn visit_variant(&mut self, v: &'ast Variant, g: &'ast Generics, item_id: NodeId) {
walk_variant(self, v, g, item_id) walk_variant(self, v, g, item_id)
} }
fn visit_label(&mut self, label: &'ast Label) {
walk_label(self, label)
}
fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) { fn visit_lifetime(&mut self, lifetime: &'ast Lifetime) {
walk_lifetime(self, lifetime) walk_lifetime(self, lifetime)
} }
@ -163,25 +166,6 @@ macro_rules! walk_list {
} }
} }
pub fn walk_opt_name<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_name: Option<Name>) {
if let Some(name) = opt_name {
visitor.visit_name(span, name);
}
}
pub fn walk_opt_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, opt_ident: Option<Ident>) {
if let Some(ident) = opt_ident {
visitor.visit_ident(span, ident);
}
}
pub fn walk_opt_sp_ident<'a, V: Visitor<'a>>(visitor: &mut V,
opt_sp_ident: &Option<Spanned<Ident>>) {
if let Some(ref sp_ident) = *opt_sp_ident {
visitor.visit_ident(sp_ident.span, sp_ident.node);
}
}
pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, ident: Ident) { pub fn walk_ident<'a, V: Visitor<'a>>(visitor: &mut V, span: Span, ident: Ident) {
visitor.visit_name(span, ident.name); visitor.visit_name(span, ident.name);
} }
@ -204,6 +188,10 @@ pub fn walk_local<'a, V: Visitor<'a>>(visitor: &mut V, local: &'a Local) {
walk_list!(visitor, visit_expr, &local.init); walk_list!(visitor, visit_expr, &local.init);
} }
pub fn walk_label<'a, V: Visitor<'a>>(visitor: &mut V, label: &'a Label) {
visitor.visit_ident(label.span, label.ident);
}
pub fn walk_lifetime<'a, V: Visitor<'a>>(visitor: &mut V, lifetime: &'a Lifetime) { pub fn walk_lifetime<'a, V: Visitor<'a>>(visitor: &mut V, lifetime: &'a Lifetime) {
visitor.visit_ident(lifetime.span, lifetime.ident); visitor.visit_ident(lifetime.span, lifetime.ident);
} }
@ -226,7 +214,9 @@ pub fn walk_item<'a, V: Visitor<'a>>(visitor: &mut V, item: &'a Item) {
visitor.visit_ident(item.span, item.ident); visitor.visit_ident(item.span, item.ident);
match item.node { match item.node {
ItemKind::ExternCrate(opt_name) => { ItemKind::ExternCrate(opt_name) => {
walk_opt_name(visitor, item.span, opt_name) if let Some(name) = opt_name {
visitor.visit_name(item.span, name);
}
} }
ItemKind::Use(ref use_tree) => { ItemKind::Use(ref use_tree) => {
visitor.visit_use_tree(use_tree, item.id, false) visitor.visit_use_tree(use_tree, item.id, false)
@ -622,7 +612,9 @@ pub fn walk_struct_def<'a, V: Visitor<'a>>(visitor: &mut V, struct_definition: &
pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) { pub fn walk_struct_field<'a, V: Visitor<'a>>(visitor: &mut V, struct_field: &'a StructField) {
visitor.visit_vis(&struct_field.vis); visitor.visit_vis(&struct_field.vis);
walk_opt_ident(visitor, struct_field.span, struct_field.ident); if let Some(ident) = struct_field.ident {
visitor.visit_ident(struct_field.span, ident);
}
visitor.visit_ty(&struct_field.ty); visitor.visit_ty(&struct_field.ty);
walk_list!(visitor, visit_attribute, &struct_field.attrs); walk_list!(visitor, visit_attribute, &struct_field.attrs);
} }
@ -708,10 +700,10 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
visitor.visit_block(if_block); visitor.visit_block(if_block);
walk_list!(visitor, visit_expr, optional_else); walk_list!(visitor, visit_expr, optional_else);
} }
ExprKind::While(ref subexpression, ref block, ref opt_sp_ident) => { ExprKind::While(ref subexpression, ref block, ref opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_expr(subexpression); visitor.visit_expr(subexpression);
visitor.visit_block(block); visitor.visit_block(block);
walk_opt_sp_ident(visitor, opt_sp_ident);
} }
ExprKind::IfLet(ref pattern, ref subexpression, ref if_block, ref optional_else) => { ExprKind::IfLet(ref pattern, ref subexpression, ref if_block, ref optional_else) => {
visitor.visit_pat(pattern); visitor.visit_pat(pattern);
@ -719,21 +711,21 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
visitor.visit_block(if_block); visitor.visit_block(if_block);
walk_list!(visitor, visit_expr, optional_else); walk_list!(visitor, visit_expr, optional_else);
} }
ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => { ExprKind::WhileLet(ref pattern, ref subexpression, ref block, ref opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_pat(pattern); visitor.visit_pat(pattern);
visitor.visit_expr(subexpression); visitor.visit_expr(subexpression);
visitor.visit_block(block); visitor.visit_block(block);
walk_opt_sp_ident(visitor, opt_sp_ident);
} }
ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_sp_ident) => { ExprKind::ForLoop(ref pattern, ref subexpression, ref block, ref opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_pat(pattern); visitor.visit_pat(pattern);
visitor.visit_expr(subexpression); visitor.visit_expr(subexpression);
visitor.visit_block(block); visitor.visit_block(block);
walk_opt_sp_ident(visitor, opt_sp_ident);
} }
ExprKind::Loop(ref block, ref opt_sp_ident) => { ExprKind::Loop(ref block, ref opt_label) => {
walk_list!(visitor, visit_label, opt_label);
visitor.visit_block(block); visitor.visit_block(block);
walk_opt_sp_ident(visitor, opt_sp_ident);
} }
ExprKind::Match(ref subexpression, ref arms) => { ExprKind::Match(ref subexpression, ref arms) => {
visitor.visit_expr(subexpression); visitor.visit_expr(subexpression);
@ -775,12 +767,12 @@ pub fn walk_expr<'a, V: Visitor<'a>>(visitor: &mut V, expression: &'a Expr) {
} }
visitor.visit_path(path, expression.id) visitor.visit_path(path, expression.id)
} }
ExprKind::Break(ref opt_sp_ident, ref opt_expr) => { ExprKind::Break(ref opt_label, ref opt_expr) => {
walk_opt_sp_ident(visitor, opt_sp_ident); walk_list!(visitor, visit_label, opt_label);
walk_list!(visitor, visit_expr, opt_expr); walk_list!(visitor, visit_expr, opt_expr);
} }
ExprKind::Continue(ref opt_sp_ident) => { ExprKind::Continue(ref opt_label) => {
walk_opt_sp_ident(visitor, opt_sp_ident); walk_list!(visitor, visit_label, opt_label);
} }
ExprKind::Ret(ref optional_expression) => { ExprKind::Ret(ref optional_expression) => {
walk_list!(visitor, visit_expr, optional_expression); walk_list!(visitor, visit_expr, optional_expression);

View file

@ -92,7 +92,7 @@ impl MultiItemModifier for ProcMacroDerive {
} }
err.emit(); err.emit();
panic!(FatalError); FatalError.raise();
} }
}; };
@ -103,13 +103,13 @@ impl MultiItemModifier for ProcMacroDerive {
// fail if there have been errors emitted // fail if there have been errors emitted
Ok(_) if ecx.parse_sess.span_diagnostic.err_count() > error_count_before => { Ok(_) if ecx.parse_sess.span_diagnostic.err_count() > error_count_before => {
ecx.struct_span_fatal(span, msg).emit(); ecx.struct_span_fatal(span, msg).emit();
panic!(FatalError); FatalError.raise();
} }
Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(), Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(),
Err(_) => { Err(_) => {
// FIXME: handle this better // FIXME: handle this better
ecx.struct_span_fatal(span, msg).emit(); ecx.struct_span_fatal(span, msg).emit();
panic!(FatalError); FatalError.raise();
} }
} }
}) })

View file

@ -51,7 +51,7 @@ impl base::AttrProcMacro for AttrProcMacro {
} }
err.emit(); err.emit();
panic!(FatalError); FatalError.raise();
} }
} }
} }
@ -86,7 +86,7 @@ impl base::ProcMacro for BangProcMacro {
} }
err.emit(); err.emit();
panic!(FatalError); FatalError.raise();
} }
} }
} }

@ -1 +1 @@
Subproject commit 2717444753318e461e0c3b30dacd03ffbac96903 Subproject commit bc344d5bc23c61ff9baf82d268a0edf199933cc3

View file

@ -42,7 +42,6 @@ struct RustArchiveIterator {
enum class LLVMRustArchiveKind { enum class LLVMRustArchiveKind {
Other, Other,
GNU, GNU,
MIPS64,
BSD, BSD,
COFF, COFF,
}; };
@ -51,8 +50,6 @@ static Archive::Kind fromRust(LLVMRustArchiveKind Kind) {
switch (Kind) { switch (Kind) {
case LLVMRustArchiveKind::GNU: case LLVMRustArchiveKind::GNU:
return Archive::K_GNU; return Archive::K_GNU;
case LLVMRustArchiveKind::MIPS64:
return Archive::K_MIPS64;
case LLVMRustArchiveKind::BSD: case LLVMRustArchiveKind::BSD:
return Archive::K_BSD; return Archive::K_BSD;
case LLVMRustArchiveKind::COFF: case LLVMRustArchiveKind::COFF:
@ -235,9 +232,16 @@ LLVMRustWriteArchive(char *Dst, size_t NumMembers,
Members.push_back(std::move(*MOrErr)); Members.push_back(std::move(*MOrErr));
} }
} }
auto Pair = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false); auto Result = writeArchive(Dst, Members, WriteSymbtab, Kind, true, false);
if (!Pair.second) #if LLVM_VERSION_GE(6, 0)
if (!Result)
return LLVMRustResult::Success; return LLVMRustResult::Success;
LLVMRustSetLastError(Pair.second.message().c_str()); LLVMRustSetLastError(toString(std::move(Result)).c_str());
#else
if (!Result.second)
return LLVMRustResult::Success;
LLVMRustSetLastError(Result.second.message().c_str());
#endif
return LLVMRustResult::Failure; return LLVMRustResult::Failure;
} }

View file

@ -23,9 +23,15 @@
#include "llvm/Support/FileSystem.h" #include "llvm/Support/FileSystem.h"
#include "llvm/Support/Host.h" #include "llvm/Support/Host.h"
#include "llvm/Target/TargetMachine.h" #include "llvm/Target/TargetMachine.h"
#include "llvm/Target/TargetSubtargetInfo.h"
#include "llvm/Transforms/IPO/PassManagerBuilder.h" #include "llvm/Transforms/IPO/PassManagerBuilder.h"
#if LLVM_VERSION_GE(6, 0)
#include "llvm/CodeGen/TargetSubtargetInfo.h"
#include "llvm/IR/IntrinsicInst.h"
#else
#include "llvm/Target/TargetSubtargetInfo.h"
#endif
#if LLVM_VERSION_GE(4, 0) #if LLVM_VERSION_GE(4, 0)
#include "llvm/Transforms/IPO/AlwaysInliner.h" #include "llvm/Transforms/IPO/AlwaysInliner.h"
#include "llvm/Transforms/IPO/FunctionImport.h" #include "llvm/Transforms/IPO/FunctionImport.h"
@ -210,20 +216,15 @@ extern "C" bool LLVMRustHasFeature(LLVMTargetMachineRef TM,
enum class LLVMRustCodeModel { enum class LLVMRustCodeModel {
Other, Other,
Default,
JITDefault,
Small, Small,
Kernel, Kernel,
Medium, Medium,
Large, Large,
None,
}; };
static CodeModel::Model fromRust(LLVMRustCodeModel Model) { static CodeModel::Model fromRust(LLVMRustCodeModel Model) {
switch (Model) { switch (Model) {
case LLVMRustCodeModel::Default:
return CodeModel::Default;
case LLVMRustCodeModel::JITDefault:
return CodeModel::JITDefault;
case LLVMRustCodeModel::Small: case LLVMRustCodeModel::Small:
return CodeModel::Small; return CodeModel::Small;
case LLVMRustCodeModel::Kernel: case LLVMRustCodeModel::Kernel:
@ -360,7 +361,6 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
bool TrapUnreachable, bool TrapUnreachable,
bool Singlethread) { bool Singlethread) {
auto CM = fromRust(RustCM);
auto OptLevel = fromRust(RustOptLevel); auto OptLevel = fromRust(RustOptLevel);
auto RM = fromRust(RustReloc); auto RM = fromRust(RustReloc);
@ -399,6 +399,13 @@ extern "C" LLVMTargetMachineRef LLVMRustCreateTargetMachine(
Options.ThreadModel = ThreadModel::Single; Options.ThreadModel = ThreadModel::Single;
} }
#if LLVM_VERSION_GE(6, 0)
Optional<CodeModel::Model> CM;
#else
CodeModel::Model CM = CodeModel::Model::Default;
#endif
if (RustCM != LLVMRustCodeModel::None)
CM = fromRust(RustCM);
TargetMachine *TM = TheTarget->createTargetMachine( TargetMachine *TM = TheTarget->createTargetMachine(
Trip.getTriple(), RealCPU, Feature, Options, RM, CM, OptLevel); Trip.getTriple(), RealCPU, Feature, Options, RM, CM, OptLevel);
return wrap(TM); return wrap(TM);

View file

@ -315,7 +315,11 @@ extern "C" void LLVMRustRemoveFunctionAttributes(LLVMValueRef Fn,
// enable fpmath flag UnsafeAlgebra // enable fpmath flag UnsafeAlgebra
extern "C" void LLVMRustSetHasUnsafeAlgebra(LLVMValueRef V) { extern "C" void LLVMRustSetHasUnsafeAlgebra(LLVMValueRef V) {
if (auto I = dyn_cast<Instruction>(unwrap<Value>(V))) { if (auto I = dyn_cast<Instruction>(unwrap<Value>(V))) {
#if LLVM_VERSION_GE(6, 0)
I->setFast(true);
#else
I->setHasUnsafeAlgebra(true); I->setHasUnsafeAlgebra(true);
#endif
} }
} }
@ -457,9 +461,13 @@ enum class LLVMRustDIFlags : uint32_t {
FlagStaticMember = (1 << 12), FlagStaticMember = (1 << 12),
FlagLValueReference = (1 << 13), FlagLValueReference = (1 << 13),
FlagRValueReference = (1 << 14), FlagRValueReference = (1 << 14),
FlagMainSubprogram = (1 << 21), FlagExternalTypeRef = (1 << 15),
FlagIntroducedVirtual = (1 << 18),
FlagBitField = (1 << 19),
FlagNoReturn = (1 << 20),
FlagMainSubprogram = (1 << 21),
// Do not add values that are not supported by the minimum LLVM // Do not add values that are not supported by the minimum LLVM
// version we support! // version we support! see llvm/include/llvm/IR/DebugInfoFlags.def
}; };
inline LLVMRustDIFlags operator&(LLVMRustDIFlags A, LLVMRustDIFlags B) { inline LLVMRustDIFlags operator&(LLVMRustDIFlags A, LLVMRustDIFlags B) {
@ -544,7 +552,19 @@ static unsigned fromRust(LLVMRustDIFlags Flags) {
if (isSet(Flags & LLVMRustDIFlags::FlagRValueReference)) { if (isSet(Flags & LLVMRustDIFlags::FlagRValueReference)) {
Result |= DINode::DIFlags::FlagRValueReference; Result |= DINode::DIFlags::FlagRValueReference;
} }
if (isSet(Flags & LLVMRustDIFlags::FlagExternalTypeRef)) {
Result |= DINode::DIFlags::FlagExternalTypeRef;
}
if (isSet(Flags & LLVMRustDIFlags::FlagIntroducedVirtual)) {
Result |= DINode::DIFlags::FlagIntroducedVirtual;
}
if (isSet(Flags & LLVMRustDIFlags::FlagBitField)) {
Result |= DINode::DIFlags::FlagBitField;
}
#if LLVM_RUSTLLVM || LLVM_VERSION_GE(4, 0) #if LLVM_RUSTLLVM || LLVM_VERSION_GE(4, 0)
if (isSet(Flags & LLVMRustDIFlags::FlagNoReturn)) {
Result |= DINode::DIFlags::FlagNoReturn;
}
if (isSet(Flags & LLVMRustDIFlags::FlagMainSubprogram)) { if (isSet(Flags & LLVMRustDIFlags::FlagMainSubprogram)) {
Result |= DINode::DIFlags::FlagMainSubprogram; Result |= DINode::DIFlags::FlagMainSubprogram;
} }

View file

@ -1,4 +1,4 @@
# If this file is modified, then llvm will be (optionally) cleaned and then rebuilt. # If this file is modified, then llvm will be (optionally) cleaned and then rebuilt.
# The actual contents of this file do not matter, but to trigger a change on the # The actual contents of this file do not matter, but to trigger a change on the
# build bots then the contents should be changed so git updates the mtime. # build bots then the contents should be changed so git updates the mtime.
2017-11-08 2018-01-25

View file

@ -0,0 +1,24 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// min-llvm-version 4.0
// compile-flags: -g -C no-prepopulate-passes
// CHECK: {{.*}}DISubprogram{{.*}}name: "foo"{{.*}}DIFlagNoReturn
fn foo() -> ! {
loop {}
}
pub fn main() {
foo();
}

View file

@ -22,9 +22,7 @@ pub struct i8x8(u64);
#[no_mangle] #[no_mangle]
pub fn a(a: &mut i8x8, b: i8x8) -> i8x8 { pub fn a(a: &mut i8x8, b: i8x8) -> i8x8 {
// CHECK-LABEL: define x86_mmx @a(x86_mmx*{{.*}}, x86_mmx{{.*}}) // CHECK-LABEL: define void @a(x86_mmx*{{.*}}, x86_mmx*{{.*}}, x86_mmx*{{.*}})
// CHECK: store x86_mmx %b, x86_mmx* %a
// CHECK: ret x86_mmx %b
*a = b; *a = b;
return b return b
} }

View file

@ -17,16 +17,22 @@ extern {
fn main() { fn main() {
unsafe { unsafe {
printf(::std::ptr::null(), 0f32); printf(::std::ptr::null(), 0f32);
//~^ ERROR can't pass `f32` to variadic function, cast to `c_double` [E0617] //~^ ERROR can't pass `f32` to variadic function
//~| HELP cast the value to `c_double`
printf(::std::ptr::null(), 0i8); printf(::std::ptr::null(), 0i8);
//~^ ERROR can't pass `i8` to variadic function, cast to `c_int` [E0617] //~^ ERROR can't pass `i8` to variadic function
//~| HELP cast the value to `c_int`
printf(::std::ptr::null(), 0i16); printf(::std::ptr::null(), 0i16);
//~^ ERROR can't pass `i16` to variadic function, cast to `c_int` [E0617] //~^ ERROR can't pass `i16` to variadic function
//~| HELP cast the value to `c_int`
printf(::std::ptr::null(), 0u8); printf(::std::ptr::null(), 0u8);
//~^ ERROR can't pass `u8` to variadic function, cast to `c_uint` [E0617] //~^ ERROR can't pass `u8` to variadic function
//~| HELP cast the value to `c_uint`
printf(::std::ptr::null(), 0u16); printf(::std::ptr::null(), 0u16);
//~^ ERROR can't pass `u16` to variadic function, cast to `c_uint` [E0617] //~^ ERROR can't pass `u16` to variadic function
//~| HELP cast the value to `c_uint`
printf(::std::ptr::null(), printf); printf(::std::ptr::null(), printf);
//~^ ERROR can't pass `unsafe extern "C" fn(*const i8, ...) {printf}` to variadic function, cast to `unsafe extern "C" fn(*const i8, ...)` [E0617] //~^ ERROR can't pass `unsafe extern "C" fn(*const i8, ...) {printf}` to variadic function
//~| HELP cast the value to `unsafe extern "C" fn(*const i8, ...)`
} }
} }

View file

@ -23,7 +23,7 @@ struct Foo<T> {
f: T f: T
} }
#[rustc_if_this_changed] #[rustc_if_this_changed(Krate)]
type TypeAlias<T> = Foo<T>; type TypeAlias<T> = Foo<T>;
#[rustc_then_this_would_need(ItemVariances)] //~ ERROR OK #[rustc_then_this_would_need(ItemVariances)] //~ ERROR OK

View file

@ -0,0 +1,27 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(never_type)]
enum Helper<T, U> {
T(T, [!; 0]),
#[allow(dead_code)]
U(U),
}
fn transmute<T, U>(t: T) -> U {
let Helper::U(u) = Helper::T(t, []);
//~^ ERROR refutable pattern in local binding: `T(_, _)` not covered
u
}
fn main() {
println!("{:?}", transmute::<&str, (*const u8, u64)>("type safety"));
}

View file

@ -0,0 +1,39 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// Test that attempts to construct infinite types via impl trait fail
// in a graceful way.
//
// Regression test for #38064.
// error-pattern:overflow evaluating the requirement `impl Quux`
#![feature(conservative_impl_trait)]
trait Quux {}
fn foo() -> impl Quux {
struct Foo<T>(T);
impl<T> Quux for Foo<T> {}
Foo(bar())
}
fn bar() -> impl Quux {
struct Bar<T>(T);
impl<T> Quux for Bar<T> {}
Bar(foo())
}
// effectively:
// struct Foo(Bar);
// struct Bar(Foo);
// should produce an error about infinite size
fn main() { foo(); }

View file

@ -17,6 +17,7 @@ fn bar(_: *const u8) {}
fn main() { fn main() {
unsafe { unsafe {
foo(0, bar); foo(0, bar);
//~^ ERROR can't pass `fn(*const u8) {bar}` to variadic function, cast to `fn(*const u8)` //~^ ERROR can't pass `fn(*const u8) {bar}` to variadic function
//~| HELP cast the value to `fn(*const u8)`
} }
} }

View file

@ -9,7 +9,7 @@
// except according to those terms. // except according to those terms.
// aux-build:needs-panic-runtime.rs // aux-build:needs-panic-runtime.rs
// aux-build:runtime-depending-on-panic-runtime.rs // aux-build:depends.rs
// error-pattern:cannot depend on a crate that needs a panic runtime // error-pattern:cannot depend on a crate that needs a panic runtime
extern crate runtime_depending_on_panic_runtime; extern crate depends;

View file

@ -16,11 +16,11 @@ all:
$(RUSTC) -C extra-filename=foo dummy.rs 2>&1 $(RUSTC) -C extra-filename=foo dummy.rs 2>&1
#Option taking no argument #Option taking no argument
$(RUSTC) -C lto= dummy.rs 2>&1 | \ $(RUSTC) -C lto= dummy.rs 2>&1 | \
$(CGREP) 'codegen option `lto` takes no value' $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or'
$(RUSTC) -C lto=1 dummy.rs 2>&1 | \ $(RUSTC) -C lto=1 dummy.rs 2>&1 | \
$(CGREP) 'codegen option `lto` takes no value' $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or'
$(RUSTC) -C lto=foo dummy.rs 2>&1 | \ $(RUSTC) -C lto=foo dummy.rs 2>&1 | \
$(CGREP) 'codegen option `lto` takes no value' $(CGREP) 'codegen option `lto` - one of `thin`, `fat`, or'
$(RUSTC) -C lto dummy.rs $(RUSTC) -C lto dummy.rs
# Should not link dead code... # Should not link dead code...

View file

@ -0,0 +1,7 @@
-include ../tools.mk
all:
ifeq ($(TARGET),x86_64-unknown-linux-gnu)
$(RUSTC) hello.rs -C no_integrated_as
$(call RUN,hello)
endif

View file

@ -0,0 +1,13 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
fn main() {
println!("Hello, world!");
}

View file

@ -14,10 +14,10 @@
#![feature(attr_literals)] #![feature(attr_literals)]
#[repr(align(16))] #[repr(align(16))]
pub struct A { pub struct A(i64);
y: i64,
}
pub extern "C" fn foo(x: A) {} pub extern "C" fn foo(x: A) {}
fn main() {} fn main() {
foo(A(0));
}

View file

@ -28,6 +28,9 @@ pub fn main() { }
#[cfg(target_arch = "mips64")] #[cfg(target_arch = "mips64")]
pub fn main() { } pub fn main() { }
#[cfg(target_arch = "powerpc")]
pub fn main() { }
#[cfg(target_arch = "powerpc64")] #[cfg(target_arch = "powerpc64")]
pub fn main() { } pub fn main() { }

View file

@ -0,0 +1,17 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// compile-flags: -Clto=fat
// no-prefer-dynamic
fn main() {
println!("hello!");
}

View file

@ -19,7 +19,8 @@ mod rusti {
} }
} }
#[cfg(any(target_os = "cloudabi", #[cfg(any(target_os = "android",
target_os = "cloudabi",
target_os = "dragonfly", target_os = "dragonfly",
target_os = "emscripten", target_os = "emscripten",
target_os = "freebsd", target_os = "freebsd",
@ -80,15 +81,3 @@ mod m {
} }
} }
} }
#[cfg(target_os = "android")]
mod m {
#[main]
#[cfg(any(target_arch = "arm", target_arch = "aarch64"))]
pub fn main() {
unsafe {
assert_eq!(::rusti::pref_align_of::<u64>(), 8);
assert_eq!(::rusti::min_align_of::<u64>(), 8);
}
}
}

View file

@ -0,0 +1,23 @@
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-emscripten
#![feature(i128_type)]
#[repr(C)]
pub struct Foo(i128);
#[no_mangle]
pub extern "C" fn foo(x: Foo) -> Foo { x }
fn main() {
foo(Foo(1));
}

Some files were not shown because too many files have changed in this diff Show more