1
Fork 0

Merge branch 'master' into def_span

This commit is contained in:
Artem Chernyak 2017-04-28 22:44:34 -05:00 committed by GitHub
commit 2f73b173cb
64 changed files with 1120 additions and 954 deletions

16
src/Cargo.lock generated
View file

@ -366,14 +366,6 @@ dependencies = [
"bitflags 0.8.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "qemu-test-client"
version = "0.1.0"
[[package]]
name = "qemu-test-server"
version = "0.1.0"
[[package]]
name = "quick-error"
version = "1.1.0"
@ -403,6 +395,14 @@ name = "regex-syntax"
version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "remote-test-client"
version = "0.1.0"
[[package]]
name = "remote-test-server"
version = "0.1.0"
[[package]]
name = "rls-data"
version = "0.1.0"

View file

@ -11,8 +11,8 @@ members = [
"tools/rustbook",
"tools/tidy",
"tools/build-manifest",
"tools/qemu-test-client",
"tools/qemu-test-server",
"tools/remote-test-client",
"tools/remote-test-server",
]
# Curiously, compiletest will segfault if compiled with opt-level=3 on 64-bit

View file

@ -28,7 +28,7 @@ use {Build, Compiler, Mode};
use dist;
use util::{self, dylib_path, dylib_path_var, exe};
const ADB_TEST_DIR: &'static str = "/data/tmp";
const ADB_TEST_DIR: &'static str = "/data/tmp/work";
/// The two modes of the test runner; tests or benchmarks.
#[derive(Copy, Clone)]
@ -243,10 +243,10 @@ pub fn compiletest(build: &Build,
.arg("--llvm-cxxflags").arg("");
}
if build.qemu_rootfs(target).is_some() {
cmd.arg("--qemu-test-client")
if build.remote_tested(target) {
cmd.arg("--remote-test-client")
.arg(build.tool(&Compiler::new(0, &build.config.build),
"qemu-test-client"));
"remote-test-client"));
}
// Running a C compiler on MSVC requires a few env vars to be set, to be
@ -445,9 +445,7 @@ pub fn krate(build: &Build,
dylib_path.insert(0, build.sysroot_libdir(&compiler, target));
cargo.env(dylib_path_var(), env::join_paths(&dylib_path).unwrap());
if target.contains("android") ||
target.contains("emscripten") ||
build.qemu_rootfs(target).is_some() {
if target.contains("emscripten") || build.remote_tested(target) {
cargo.arg("--no-run");
}
@ -459,75 +457,24 @@ pub fn krate(build: &Build,
let _time = util::timeit();
if target.contains("android") {
build.run(&mut cargo);
krate_android(build, &compiler, target, mode);
} else if target.contains("emscripten") {
if target.contains("emscripten") {
build.run(&mut cargo);
krate_emscripten(build, &compiler, target, mode);
} else if build.qemu_rootfs(target).is_some() {
} else if build.remote_tested(target) {
build.run(&mut cargo);
krate_qemu(build, &compiler, target, mode);
krate_remote(build, &compiler, target, mode);
} else {
cargo.args(&build.flags.cmd.test_args());
build.run(&mut cargo);
}
}
fn krate_android(build: &Build,
compiler: &Compiler,
target: &str,
mode: Mode) {
let mut tests = Vec::new();
let out_dir = build.cargo_out(compiler, mode, target);
find_tests(&out_dir, target, &mut tests);
find_tests(&out_dir.join("deps"), target, &mut tests);
for test in tests {
build.run(Command::new("adb").arg("push").arg(&test).arg(ADB_TEST_DIR));
let test_file_name = test.file_name().unwrap().to_string_lossy();
let log = format!("{}/check-stage{}-T-{}-H-{}-{}.log",
ADB_TEST_DIR,
compiler.stage,
target,
compiler.host,
test_file_name);
let quiet = if build.config.quiet_tests { "--quiet" } else { "" };
let program = format!("(cd {dir}; \
LD_LIBRARY_PATH=./{target} ./{test} \
--logfile {log} \
{quiet} \
{args})",
dir = ADB_TEST_DIR,
target = target,
test = test_file_name,
log = log,
quiet = quiet,
args = build.flags.cmd.test_args().join(" "));
let output = output(Command::new("adb").arg("shell").arg(&program));
println!("{}", output);
t!(fs::create_dir_all(build.out.join("tmp")));
build.run(Command::new("adb")
.arg("pull")
.arg(&log)
.arg(build.out.join("tmp")));
build.run(Command::new("adb").arg("shell").arg("rm").arg(&log));
if !output.contains("result: ok") {
panic!("some tests failed");
}
}
}
fn krate_emscripten(build: &Build,
compiler: &Compiler,
target: &str,
mode: Mode) {
let mut tests = Vec::new();
let out_dir = build.cargo_out(compiler, mode, target);
find_tests(&out_dir, target, &mut tests);
find_tests(&out_dir.join("deps"), target, &mut tests);
for test in tests {
@ -543,17 +490,16 @@ fn krate_emscripten(build: &Build,
}
}
fn krate_qemu(build: &Build,
compiler: &Compiler,
target: &str,
mode: Mode) {
fn krate_remote(build: &Build,
compiler: &Compiler,
target: &str,
mode: Mode) {
let mut tests = Vec::new();
let out_dir = build.cargo_out(compiler, mode, target);
find_tests(&out_dir, target, &mut tests);
find_tests(&out_dir.join("deps"), target, &mut tests);
let tool = build.tool(&Compiler::new(0, &build.config.build),
"qemu-test-client");
"remote-test-client");
for test in tests {
let mut cmd = Command::new(&tool);
cmd.arg("run")
@ -566,7 +512,6 @@ fn krate_qemu(build: &Build,
}
}
fn find_tests(dir: &Path,
target: &str,
dst: &mut Vec<PathBuf>) {
@ -585,59 +530,28 @@ fn find_tests(dir: &Path,
}
pub fn emulator_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
if target.contains("android") {
android_copy_libs(build, compiler, target)
} else if let Some(s) = build.qemu_rootfs(target) {
qemu_copy_libs(build, compiler, target, s)
if !build.remote_tested(target) {
return
}
}
fn android_copy_libs(build: &Build, compiler: &Compiler, target: &str) {
println!("Android copy libs to emulator ({})", target);
build.run(Command::new("adb").arg("wait-for-device"));
build.run(Command::new("adb").arg("remount"));
build.run(Command::new("adb").args(&["shell", "rm", "-r", ADB_TEST_DIR]));
build.run(Command::new("adb").args(&["shell", "mkdir", ADB_TEST_DIR]));
build.run(Command::new("adb")
.arg("push")
.arg(build.src.join("src/etc/adb_run_wrapper.sh"))
.arg(ADB_TEST_DIR));
let target_dir = format!("{}/{}", ADB_TEST_DIR, target);
build.run(Command::new("adb").args(&["shell", "mkdir", &target_dir]));
for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {
let f = t!(f);
let name = f.file_name().into_string().unwrap();
if util::is_dylib(&name) {
build.run(Command::new("adb")
.arg("push")
.arg(f.path())
.arg(&target_dir));
}
}
}
fn qemu_copy_libs(build: &Build,
compiler: &Compiler,
target: &str,
rootfs: &Path) {
println!("QEMU copy libs to emulator ({})", target);
assert!(target.starts_with("arm"), "only works with arm for now");
println!("REMOTE copy libs to emulator ({})", target);
t!(fs::create_dir_all(build.out.join("tmp")));
// Copy our freshly compiled test server over to the rootfs
let server = build.cargo_out(compiler, Mode::Tool, target)
.join(exe("qemu-test-server", target));
t!(fs::copy(&server, rootfs.join("testd")));
.join(exe("remote-test-server", target));
// Spawn the emulator and wait for it to come online
let tool = build.tool(&Compiler::new(0, &build.config.build),
"qemu-test-client");
build.run(Command::new(&tool)
.arg("spawn-emulator")
.arg(rootfs)
.arg(build.out.join("tmp")));
"remote-test-client");
let mut cmd = Command::new(&tool);
cmd.arg("spawn-emulator")
.arg(target)
.arg(&server)
.arg(build.out.join("tmp"));
if let Some(rootfs) = build.qemu_rootfs(target) {
cmd.arg(rootfs);
}
build.run(&mut cmd);
// Push all our dylibs to the emulator
for f in t!(build.sysroot_libdir(compiler, target).read_dir()) {

View file

@ -945,6 +945,12 @@ impl Build {
.map(|p| &**p)
}
/// Returns whether the target will be tested using the `remote-test-client`
/// and `remote-test-server` binaries.
fn remote_tested(&self, target: &str) -> bool {
self.qemu_rootfs(target).is_some() || target.contains("android")
}
/// Returns the root of the "rootfs" image that this target will be using,
/// if one was configured.
///

View file

@ -513,15 +513,15 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
rules.test("emulator-copy-libs", "path/to/nowhere")
.dep(|s| s.name("libtest"))
.dep(move |s| {
if build.qemu_rootfs(s.target).is_some() {
s.name("tool-qemu-test-client").target(s.host).stage(0)
if build.remote_tested(s.target) {
s.name("tool-remote-test-client").target(s.host).stage(0)
} else {
Step::noop()
}
})
.dep(move |s| {
if build.qemu_rootfs(s.target).is_some() {
s.name("tool-qemu-test-server")
if build.remote_tested(s.target) {
s.name("tool-remote-test-server")
} else {
Step::noop()
}
@ -566,14 +566,14 @@ pub fn build_rules<'a>(build: &'a Build) -> Rules {
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "build-manifest"));
rules.build("tool-qemu-test-server", "src/tools/qemu-test-server")
rules.build("tool-remote-test-server", "src/tools/remote-test-server")
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-server"));
rules.build("tool-qemu-test-client", "src/tools/qemu-test-client")
.run(move |s| compile::tool(build, s.stage, s.target, "remote-test-server"));
rules.build("tool-remote-test-client", "src/tools/remote-test-client")
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))
.run(move |s| compile::tool(build, s.stage, s.target, "qemu-test-client"));
.run(move |s| compile::tool(build, s.stage, s.target, "remote-test-client"));
rules.build("tool-cargo", "cargo")
.dep(|s| s.name("maybe-clean-tools"))
.dep(|s| s.name("libstd-tool"))

View file

@ -2,6 +2,7 @@
- [Compiler flags](compiler-flags.md)
- [linker_flavor](compiler-flags/linker-flavor.md)
- [remap_path_prefix](compiler-flags/remap-path-prefix.md)
- [Language features](language-features.md)
- [abi_msp430_interrupt](language-features/abi-msp430-interrupt.md)
- [abi_ptx](language-features/abi-ptx.md)

View file

@ -0,0 +1,37 @@
# `remap-path-prefix`
The tracking issue for this feature is: [#41555](https://github.com/rust-lang/rust/issues/41555)
------------------------
The `-Z remap-path-prefix-from`, `-Z remap-path-prefix-to` commandline option
pair allows to replace prefixes of any file paths the compiler emits in various
places. This is useful for bringing debuginfo paths into a well-known form and
for achieving reproducible builds independent of the directory the compiler was
executed in. All paths emitted by the compiler are affected, including those in
error messages.
In order to map all paths starting with `/home/foo/my-project/src` to
`/sources/my-project`, one would invoke the compiler as follows:
```text
rustc -Zremap-path-prefix-from="/home/foo/my-project/src" -Zremap-path-prefix-to="/sources/my-project"
```
Debuginfo for code from the file `/home/foo/my-project/src/foo/mod.rs`,
for example, would then point debuggers to `/sources/my-project/foo/mod.rs`
instead of the original file.
The options can be specified multiple times when multiple prefixes should be
mapped:
```text
rustc -Zremap-path-prefix-from="/home/foo/my-project/src" \
-Zremap-path-prefix-to="/sources/my-project" \
-Zremap-path-prefix-from="/home/foo/my-project/build-dir" \
-Zremap-path-prefix-to="/stable-build-dir"
```
When the options are given multiple times, the nth `-from` will be matched up
with the nth `-to` and they can appear anywhere on the commandline. Mappings
specified later on the line will take precedence over earlier ones.

View file

@ -1,35 +0,0 @@
# Copyright 2014 The Rust Project Developers. See the COPYRIGHT
# file at the top-level directory of this distribution and at
# http://rust-lang.org/COPYRIGHT.
#
# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
# option. This file may not be copied, modified, or distributed
# except according to those terms.
#
# ignore-tidy-linelength
#
# usage : adb_run_wrapper [test dir - where test executables exist] [test executable]
#
TEST_PATH=$1
BIN_PATH=/system/bin
if [ -d "$TEST_PATH" ]
then
shift
RUN=$1
if [ ! -z "$RUN" ]
then
shift
# The length of binary path (i.e. ./$RUN) should be shorter than 128 characters.
cd $TEST_PATH
TEST_EXEC_ENV=22 LD_LIBRARY_PATH=$TEST_PATH PATH=$BIN_PATH:$TEST_PATH ./$RUN $@ 1>$TEST_PATH/$RUN.stdout 2>$TEST_PATH/$RUN.stderr
L_RET=$?
echo $L_RET > $TEST_PATH/$RUN.exitcode
fi
fi

View file

@ -296,7 +296,7 @@ fn main() {
syntax::errors::registry::Registry::new(&[]),
Rc::new(DummyCrateStore));
let filemap = session.parse_sess.codemap()
.new_filemap("<n/a>".to_string(), None, code);
.new_filemap("<n/a>".to_string(), code);
let mut lexer = lexer::StringReader::new(session.diagnostic(), filemap);
let cm = session.codemap();

View file

@ -475,14 +475,13 @@ impl<'a, 'tcx> DeadVisitor<'a, 'tcx> {
// This is done to handle the case where, for example, the static
// method of a private type is used, but the type itself is never
// called directly.
if let Some(impl_list) =
self.tcx.maps.inherent_impls.borrow().get(&self.tcx.hir.local_def_id(id)) {
for &impl_did in impl_list.iter() {
for &item_did in &self.tcx.associated_item_def_ids(impl_did)[..] {
if let Some(item_node_id) = self.tcx.hir.as_local_node_id(item_did) {
if self.live_symbols.contains(&item_node_id) {
return true;
}
let def_id = self.tcx.hir.local_def_id(id);
let inherent_impls = self.tcx.inherent_impls(def_id);
for &impl_did in inherent_impls.iter() {
for &item_did in &self.tcx.associated_item_def_ids(impl_did)[..] {
if let Some(item_node_id) = self.tcx.hir.as_local_node_id(item_did) {
if self.live_symbols.contains(&item_node_id) {
return true;
}
}
}

View file

@ -25,6 +25,7 @@ use lint;
use middle::cstore;
use syntax::ast::{self, IntTy, UintTy};
use syntax::codemap::FilePathMapping;
use syntax::parse::token;
use syntax::parse;
use syntax::symbol::Symbol;
@ -492,6 +493,14 @@ impl Options {
self.incremental.is_none() ||
self.cg.codegen_units == 1
}
pub fn file_path_mapping(&self) -> FilePathMapping {
FilePathMapping::new(
self.debugging_opts.remap_path_prefix_from.iter().zip(
self.debugging_opts.remap_path_prefix_to.iter()
).map(|(src, dst)| (src.clone(), dst.clone())).collect()
)
}
}
// The type of entry function, so
@ -1012,6 +1021,10 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"Set the optimization fuel quota for a crate."),
print_fuel: Option<String> = (None, parse_opt_string, [TRACKED],
"Make Rustc print the total optimization fuel used by a crate."),
remap_path_prefix_from: Vec<String> = (vec![], parse_string_push, [TRACKED],
"add a source pattern to the file path remapping config"),
remap_path_prefix_to: Vec<String> = (vec![], parse_string_push, [TRACKED],
"add a mapping target to the file path remapping config"),
}
pub fn default_lib_output() -> CrateType {
@ -1319,7 +1332,7 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
// Convert strings provided as --cfg [cfgspec] into a crate_cfg
pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
cfgspecs.into_iter().map(|s| {
let sess = parse::ParseSess::new();
let sess = parse::ParseSess::new(FilePathMapping::empty());
let mut parser =
parse::new_parser_from_source_str(&sess, "cfgspec".to_string(), s.to_string());
@ -1430,6 +1443,23 @@ pub fn build_session_options_and_crate_config(matches: &getopts::Matches)
output_types.insert(OutputType::Exe, None);
}
let remap_path_prefix_sources = debugging_opts.remap_path_prefix_from.len();
let remap_path_prefix_targets = debugging_opts.remap_path_prefix_from.len();
if remap_path_prefix_targets < remap_path_prefix_sources {
for source in &debugging_opts.remap_path_prefix_from[remap_path_prefix_targets..] {
early_error(error_format,
&format!("option `-Zremap-path-prefix-from='{}'` does not have \
a corresponding `-Zremap-path-prefix-to`", source))
}
} else if remap_path_prefix_targets > remap_path_prefix_sources {
for target in &debugging_opts.remap_path_prefix_to[remap_path_prefix_sources..] {
early_error(error_format,
&format!("option `-Zremap-path-prefix-to='{}'` does not have \
a corresponding `-Zremap-path-prefix-from`", target))
}
}
let mut cg = build_codegen_options(matches, error_format);
// Issue #30063: if user requests llvm-related output to one

View file

@ -74,8 +74,10 @@ pub struct Session {
// The name of the root source file of the crate, in the local file system.
// The path is always expected to be absolute. `None` means that there is no
// source file.
pub local_crate_source_file: Option<PathBuf>,
pub working_dir: PathBuf,
pub local_crate_source_file: Option<String>,
// The directory the compiler has been executed in plus a flag indicating
// if the value stored here has been affected by path remapping.
pub working_dir: (String, bool),
pub lint_store: RefCell<lint::LintStore>,
pub lints: RefCell<lint::LintTable>,
/// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics
@ -553,12 +555,14 @@ pub fn build_session(sopts: config::Options,
registry: errors::registry::Registry,
cstore: Rc<CrateStore>)
-> Session {
let file_path_mapping = sopts.file_path_mapping();
build_session_with_codemap(sopts,
dep_graph,
local_crate_source_file,
registry,
cstore,
Rc::new(codemap::CodeMap::new()),
Rc::new(codemap::CodeMap::new(file_path_mapping)),
None)
}
@ -622,7 +626,7 @@ pub fn build_session_(sopts: config::Options,
Ok(t) => t,
Err(e) => {
panic!(span_diagnostic.fatal(&format!("Error loading host specification: {}", e)));
}
}
};
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
let p_s = parse::ParseSess::with_span_handler(span_diagnostic, codemap);
@ -631,14 +635,12 @@ pub fn build_session_(sopts: config::Options,
None => Some(filesearch::get_or_default_sysroot())
};
let file_path_mapping = sopts.file_path_mapping();
// Make the path absolute, if necessary
let local_crate_source_file = local_crate_source_file.map(|path|
if path.is_absolute() {
path.clone()
} else {
env::current_dir().unwrap().join(&path)
}
);
let local_crate_source_file = local_crate_source_file.map(|path| {
file_path_mapping.map_prefix(path.to_string_lossy().into_owned()).0
});
let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone());
let optimization_fuel_limit = Cell::new(sopts.debugging_opts.fuel.as_ref()
@ -646,6 +648,9 @@ pub fn build_session_(sopts: config::Options,
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();
let print_fuel = Cell::new(0);
let working_dir = env::current_dir().unwrap().to_string_lossy().into_owned();
let working_dir = file_path_mapping.map_prefix(working_dir);
let sess = Session {
dep_graph: dep_graph.clone(),
target: target_cfg,
@ -660,7 +665,7 @@ pub fn build_session_(sopts: config::Options,
derive_registrar_fn: Cell::new(None),
default_sysroot: default_sysroot,
local_crate_source_file: local_crate_source_file,
working_dir: env::current_dir().unwrap(),
working_dir: working_dir,
lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(lint::LintTable::new()),
one_time_diagnostics: RefCell::new(FxHashSet()),

View file

@ -13,16 +13,19 @@ use hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use ty::{self, Ty, TyCtxt};
use syntax::ast;
use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
use std::cell::Cell;
thread_local! {
static FORCE_ABSOLUTE: Cell<bool> = Cell::new(false)
static FORCE_ABSOLUTE: Cell<bool> = Cell::new(false);
static FORCE_IMPL_FILENAME_LINE: Cell<bool> = Cell::new(false);
}
/// Enforces that item_path_str always returns an absolute path.
/// This is useful when building symbols that contain types,
/// where we want the crate name to be part of the symbol.
/// Enforces that item_path_str always returns an absolute path and
/// also enables "type-based" impl paths. This is used when building
/// symbols that contain types, where we want the crate name to be
/// part of the symbol.
pub fn with_forced_absolute_paths<F: FnOnce() -> R, R>(f: F) -> R {
FORCE_ABSOLUTE.with(|force| {
let old = force.get();
@ -33,6 +36,20 @@ pub fn with_forced_absolute_paths<F: FnOnce() -> R, R>(f: F) -> R {
})
}
/// Force us to name impls with just the filename/line number. We
/// normally try to use types. But at some points, notably while printing
/// cycle errors, this can result in extra or suboptimal error output,
/// so this variable disables that check.
pub fn with_forced_impl_filename_line<F: FnOnce() -> R, R>(f: F) -> R {
FORCE_IMPL_FILENAME_LINE.with(|force| {
let old = force.get();
force.set(true);
let result = f();
force.set(old);
result
})
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
/// Returns a string identifying this def-id. This string is
/// suitable for user output. It is relative to the current crate
@ -199,14 +216,16 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
{
let parent_def_id = self.parent_def_id(impl_def_id).unwrap();
let use_types = if !impl_def_id.is_local() {
// always have full types available for extern crates
true
} else {
// for local crates, check whether type info is
// available; typeck might not have completed yet
self.maps.impl_trait_ref.borrow().contains_key(&impl_def_id) &&
self.maps.type_of.borrow().contains_key(&impl_def_id)
// Always use types for non-local impls, where types are always
// available, and filename/line-number is mostly uninteresting.
let use_types = !impl_def_id.is_local() || {
// Otherwise, use filename/line-number if forced.
let force_no_types = FORCE_IMPL_FILENAME_LINE.with(|f| f.get());
!force_no_types && {
// Otherwise, use types if we can query them without inducing a cycle.
ty::queries::impl_trait_ref::try_get(self, DUMMY_SP, impl_def_id).is_ok() &&
ty::queries::type_of::try_get(self, DUMMY_SP, impl_def_id).is_ok()
}
};
if !use_types {

View file

@ -17,11 +17,13 @@ use middle::privacy::AccessLevels;
use mir;
use session::CompileResult;
use ty::{self, CrateInherentImpls, Ty, TyCtxt};
use ty::item_path;
use ty::subst::Substs;
use util::nodemap::NodeSet;
use rustc_data_structures::indexed_vec::IndexVec;
use std::cell::{RefCell, RefMut};
use std::mem;
use std::ops::Deref;
use std::rc::Rc;
use syntax_pos::{Span, DUMMY_SP};
@ -139,24 +141,36 @@ pub struct CycleError<'a, 'tcx: 'a> {
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
pub fn report_cycle(self, CycleError { span, cycle }: CycleError) {
assert!(!cycle.is_empty());
// Subtle: release the refcell lock before invoking `describe()`
// below by dropping `cycle`.
let stack = cycle.to_vec();
mem::drop(cycle);
let mut err = struct_span_err!(self.sess, span, E0391,
"unsupported cyclic reference between types/traits detected");
err.span_label(span, &format!("cyclic reference"));
assert!(!stack.is_empty());
err.span_note(cycle[0].0, &format!("the cycle begins when {}...",
cycle[0].1.describe(self)));
// Disable naming impls with types in this path, since that
// sometimes cycles itself, leading to extra cycle errors.
// (And cycle errors around impls tend to occur during the
// collect/coherence phases anyhow.)
item_path::with_forced_impl_filename_line(|| {
let mut err =
struct_span_err!(self.sess, span, E0391,
"unsupported cyclic reference between types/traits detected");
err.span_label(span, &format!("cyclic reference"));
for &(span, ref query) in &cycle[1..] {
err.span_note(span, &format!("...which then requires {}...",
query.describe(self)));
}
err.span_note(stack[0].0, &format!("the cycle begins when {}...",
stack[0].1.describe(self)));
err.note(&format!("...which then again requires {}, completing the cycle.",
cycle[0].1.describe(self)));
for &(span, ref query) in &stack[1..] {
err.span_note(span, &format!("...which then requires {}...",
query.describe(self)));
}
err.emit();
err.note(&format!("...which then again requires {}, completing the cycle.",
stack[0].1.describe(self)));
err.emit();
});
}
fn cycle_check<F, R>(self, span: Span, query: Query<'gcx>, compute: F)
@ -280,11 +294,11 @@ impl<'tcx> QueryDescription for queries::def_span<'tcx> {
macro_rules! define_maps {
(<$tcx:tt>
$($(#[$attr:meta])*
pub $name:ident: $node:ident($K:ty) -> $V:ty),*) => {
[$($pub:tt)*] $name:ident: $node:ident($K:ty) -> $V:ty),*) => {
pub struct Maps<$tcx> {
providers: IndexVec<CrateNum, Providers<$tcx>>,
query_stack: RefCell<Vec<(Span, Query<$tcx>)>>,
$($(#[$attr])* pub $name: RefCell<DepTrackingMap<queries::$name<$tcx>>>),*
$($(#[$attr])* $($pub)* $name: RefCell<DepTrackingMap<queries::$name<$tcx>>>),*
}
impl<$tcx> Maps<$tcx> {
@ -341,6 +355,11 @@ macro_rules! define_maps {
-> Result<R, CycleError<'a, $tcx>>
where F: FnOnce(&$V) -> R
{
debug!("ty::queries::{}::try_get_with(key={:?}, span={:?})",
stringify!($name),
key,
span);
if let Some(result) = tcx.maps.$name.borrow().get(&key) {
return Ok(f(result));
}
@ -447,12 +466,12 @@ macro_rules! define_maps {
// the driver creates (using several `rustc_*` crates).
define_maps! { <'tcx>
/// Records the type of every item.
pub type_of: ItemSignature(DefId) -> Ty<'tcx>,
[] type_of: ItemSignature(DefId) -> Ty<'tcx>,
/// Maps from the def-id of an item (trait/struct/enum/fn) to its
/// associated generics and predicates.
pub generics_of: ItemSignature(DefId) -> &'tcx ty::Generics,
pub predicates_of: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
[] generics_of: ItemSignature(DefId) -> &'tcx ty::Generics,
[] predicates_of: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
/// Maps from the def-id of a trait to the list of
/// super-predicates. This is a subset of the full list of
@ -460,39 +479,39 @@ define_maps! { <'tcx>
/// evaluate them even during type conversion, often before the
/// full predicates are available (note that supertraits have
/// additional acyclicity requirements).
pub super_predicates_of: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
[] super_predicates_of: ItemSignature(DefId) -> ty::GenericPredicates<'tcx>,
/// To avoid cycles within the predicates of a single item we compute
/// per-type-parameter predicates for resolving `T::AssocTy`.
pub type_param_predicates: TypeParamPredicates((DefId, DefId))
[] type_param_predicates: TypeParamPredicates((DefId, DefId))
-> ty::GenericPredicates<'tcx>,
pub trait_def: ItemSignature(DefId) -> &'tcx ty::TraitDef,
pub adt_def: ItemSignature(DefId) -> &'tcx ty::AdtDef,
pub adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
pub adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>],
pub adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>,
[] trait_def: ItemSignature(DefId) -> &'tcx ty::TraitDef,
[] adt_def: ItemSignature(DefId) -> &'tcx ty::AdtDef,
[] adt_destructor: AdtDestructor(DefId) -> Option<ty::Destructor>,
[] adt_sized_constraint: SizedConstraint(DefId) -> &'tcx [Ty<'tcx>],
[] adt_dtorck_constraint: DtorckConstraint(DefId) -> ty::DtorckConstraint<'tcx>,
/// True if this is a foreign item (i.e., linked via `extern { ... }`).
pub is_foreign_item: IsForeignItem(DefId) -> bool,
[] is_foreign_item: IsForeignItem(DefId) -> bool,
/// Maps from def-id of a type or region parameter to its
/// (inferred) variance.
pub variances_of: ItemSignature(DefId) -> Rc<Vec<ty::Variance>>,
[pub] variances_of: ItemSignature(DefId) -> Rc<Vec<ty::Variance>>,
/// Maps from an impl/trait def-id to a list of the def-ids of its items
pub associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc<Vec<DefId>>,
[] associated_item_def_ids: AssociatedItemDefIds(DefId) -> Rc<Vec<DefId>>,
/// Maps from a trait item to the trait item "descriptor"
pub associated_item: AssociatedItems(DefId) -> ty::AssociatedItem,
[] associated_item: AssociatedItems(DefId) -> ty::AssociatedItem,
pub impl_trait_ref: ItemSignature(DefId) -> Option<ty::TraitRef<'tcx>>,
pub impl_polarity: ItemSignature(DefId) -> hir::ImplPolarity,
[] impl_trait_ref: ItemSignature(DefId) -> Option<ty::TraitRef<'tcx>>,
[] impl_polarity: ItemSignature(DefId) -> hir::ImplPolarity,
/// Maps a DefId of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
pub inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
[] inherent_impls: InherentImpls(DefId) -> Rc<Vec<DefId>>,
/// Maps from the def-id of a function/method or const/static
/// to its MIR. Mutation is done at an item granularity to
@ -501,60 +520,63 @@ define_maps! { <'tcx>
///
/// Note that cross-crate MIR appears to be always borrowed
/// (in the `RefCell` sense) to prevent accidental mutation.
pub mir: Mir(DefId) -> &'tcx RefCell<mir::Mir<'tcx>>,
[pub] mir: Mir(DefId) -> &'tcx RefCell<mir::Mir<'tcx>>,
/// Maps DefId's that have an associated Mir to the result
/// of the MIR qualify_consts pass. The actual meaning of
/// the value isn't known except to the pass itself.
pub mir_const_qualif: Mir(DefId) -> u8,
[] mir_const_qualif: Mir(DefId) -> u8,
/// Records the type of each closure. The def ID is the ID of the
/// expression defining the closure.
pub closure_kind: ItemSignature(DefId) -> ty::ClosureKind,
[] closure_kind: ItemSignature(DefId) -> ty::ClosureKind,
/// Records the type of each closure. The def ID is the ID of the
/// expression defining the closure.
pub closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
[] closure_type: ItemSignature(DefId) -> ty::PolyFnSig<'tcx>,
/// Caches CoerceUnsized kinds for impls on custom types.
pub coerce_unsized_info: ItemSignature(DefId)
[] coerce_unsized_info: ItemSignature(DefId)
-> ty::adjustment::CoerceUnsizedInfo,
pub typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult,
[] typeck_item_bodies: typeck_item_bodies_dep_node(CrateNum) -> CompileResult,
pub typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
[] typeck_tables_of: TypeckTables(DefId) -> &'tcx ty::TypeckTables<'tcx>,
pub coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
[] has_typeck_tables: TypeckTables(DefId) -> bool,
pub borrowck: BorrowCheck(DefId) -> (),
[] coherent_trait: coherent_trait_dep_node((CrateNum, DefId)) -> (),
[] borrowck: BorrowCheck(DefId) -> (),
/// Gets a complete map from all types to their inherent impls.
/// Not meant to be used directly outside of coherence.
/// (Defined only for LOCAL_CRATE)
pub crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
[] crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum) -> CrateInherentImpls,
/// Checks all types in the krate for overlap in their inherent impls. Reports errors.
/// Not meant to be used directly outside of coherence.
/// (Defined only for LOCAL_CRATE)
pub crate_inherent_impls_overlap_check: crate_inherent_impls_dep_node(CrateNum) -> (),
[] crate_inherent_impls_overlap_check: crate_inherent_impls_dep_node(CrateNum) -> (),
/// Results of evaluating const items or constants embedded in
/// other items (such as enum variant explicit discriminants).
pub const_eval: const_eval_dep_node((DefId, &'tcx Substs<'tcx>))
[] const_eval: const_eval_dep_node((DefId, &'tcx Substs<'tcx>))
-> const_val::EvalResult<'tcx>,
/// Performs the privacy check and computes "access levels".
pub privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
[] privacy_access_levels: PrivacyAccessLevels(CrateNum) -> Rc<AccessLevels>,
pub reachable_set: reachability_dep_node(CrateNum) -> Rc<NodeSet>,
[] reachable_set: reachability_dep_node(CrateNum) -> Rc<NodeSet>,
pub mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>,
[] mir_shims: mir_shim_dep_node(ty::InstanceDef<'tcx>) -> &'tcx RefCell<mir::Mir<'tcx>>,
pub def_symbol_name: SymbolName(DefId) -> ty::SymbolName,
pub symbol_name: symbol_name_dep_node(ty::Instance<'tcx>) -> ty::SymbolName,
[] def_symbol_name: SymbolName(DefId) -> ty::SymbolName,
[] symbol_name: symbol_name_dep_node(ty::Instance<'tcx>) -> ty::SymbolName,
pub describe_def: MetaData(DefId) -> Option<Def>,
pub def_span: MetaData(DefId) -> Span
[] describe_def: MetaData(DefId) -> Option<Def>,
[] def_span: MetaData(DefId) -> Span
}
fn coherent_trait_dep_node((_, def_id): (CrateNum, DefId)) -> DepNode<DefId> {
@ -585,4 +607,4 @@ fn typeck_item_bodies_dep_node(_: CrateNum) -> DepNode<DefId> {
fn const_eval_dep_node((def_id, _): (DefId, &Substs)) -> DepNode<DefId> {
DepNode::ConstEval(def_id)
}
}

View file

@ -2139,6 +2139,26 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
})
}
pub fn opt_associated_item(self, def_id: DefId) -> Option<AssociatedItem> {
let is_associated_item = if let Some(node_id) = self.hir.as_local_node_id(def_id) {
match self.hir.get(node_id) {
hir_map::NodeTraitItem(_) | hir_map::NodeImplItem(_) => true,
_ => false,
}
} else {
match self.describe_def(def_id).expect("no def for def-id") {
Def::AssociatedConst(_) | Def::Method(_) | Def::AssociatedTy(_) => true,
_ => false,
}
};
if is_associated_item {
Some(self.associated_item(def_id))
} else {
None
}
}
fn associated_item_from_trait_item_ref(self,
parent_def_id: DefId,
parent_vis: &hir::Visibility,
@ -2383,7 +2403,7 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
None
}
} else {
self.maps.associated_item.borrow().get(&def_id).cloned()
self.opt_associated_item(def_id)
};
match item {
@ -2404,15 +2424,13 @@ impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
if def_id.krate != LOCAL_CRATE {
return self.sess.cstore.trait_of_item(def_id);
}
match self.maps.associated_item.borrow().get(&def_id) {
Some(associated_item) => {
self.opt_associated_item(def_id)
.and_then(|associated_item| {
match associated_item.container {
TraitContainer(def_id) => Some(def_id),
ImplContainer(_) => None
}
}
None => None
}
})
}
/// Construct a parameter environment suitable for static contexts or other contexts where there
@ -2580,11 +2598,12 @@ fn associated_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId)
}
}
ref r => {
panic!("unexpected container of associated items: {:?}", r)
}
_ => { }
}
panic!("associated item not found for def_id: {:?}", def_id);
span_bug!(parent_item.span,
"unexpected parent of trait or impl item or item not found: {:?}",
parent_item.node)
}
/// Calculates the Sized-constraint.

View file

@ -206,7 +206,7 @@ pub fn run_compiler<'a>(args: &[String],
let cstore = Rc::new(CStore::new(&dep_graph));
let loader = file_loader.unwrap_or(box RealFileLoader);
let codemap = Rc::new(CodeMap::with_file_loader(loader));
let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping()));
let mut sess = session::build_session_with_codemap(
sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest,
);

View file

@ -31,7 +31,7 @@ use rustc::session::{self, config};
use std::rc::Rc;
use syntax::ast;
use syntax::abi::Abi;
use syntax::codemap::CodeMap;
use syntax::codemap::{CodeMap, FilePathMapping};
use errors;
use errors::emitter::Emitter;
use errors::{Level, DiagnosticBuilder};
@ -108,7 +108,7 @@ fn test_env<F>(source_string: &str,
&dep_graph,
None,
diagnostic_handler,
Rc::new(CodeMap::new()),
Rc::new(CodeMap::new(FilePathMapping::empty())),
cstore.clone());
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let input = config::Input::Str {

View file

@ -27,19 +27,15 @@ pub enum MethodLateContext {
PlainImpl,
}
pub fn method_context(cx: &LateContext, id: ast::NodeId, span: Span) -> MethodLateContext {
pub fn method_context(cx: &LateContext, id: ast::NodeId) -> MethodLateContext {
let def_id = cx.tcx.hir.local_def_id(id);
match cx.tcx.maps.associated_item.borrow().get(&def_id) {
None => span_bug!(span, "missing method descriptor?!"),
Some(item) => {
match item.container {
ty::TraitContainer(..) => MethodLateContext::TraitDefaultImpl,
ty::ImplContainer(cid) => {
match cx.tcx.impl_trait_ref(cid) {
Some(_) => MethodLateContext::TraitImpl,
None => MethodLateContext::PlainImpl,
}
}
let item = cx.tcx.associated_item(def_id);
match item.container {
ty::TraitContainer(..) => MethodLateContext::TraitDefaultImpl,
ty::ImplContainer(cid) => {
match cx.tcx.impl_trait_ref(cid) {
Some(_) => MethodLateContext::TraitImpl,
None => MethodLateContext::PlainImpl,
}
}
}
@ -244,7 +240,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for NonSnakeCase {
id: ast::NodeId) {
match fk {
FnKind::Method(name, ..) => {
match method_context(cx, id, span) {
match method_context(cx, id) {
MethodLateContext::PlainImpl => {
self.check_snake_case(cx, "method", &name.as_str(), Some(span))
}

View file

@ -432,7 +432,7 @@ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for MissingDoc {
fn check_impl_item(&mut self, cx: &LateContext, impl_item: &hir::ImplItem) {
// If the method is an impl for a trait, don't doc.
if method_context(cx, impl_item.id, impl_item.span) == MethodLateContext::TraitImpl {
if method_context(cx, impl_item.id) == MethodLateContext::TraitImpl {
return;
}

View file

@ -393,7 +393,7 @@ impl CrateStore for cstore::CStore {
let (name, def) = data.get_macro(id.index);
let source_name = format!("<{} macros>", name);
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body);
let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION };
let body = filemap_to_stream(&sess.parse_sess, filemap);

View file

@ -827,7 +827,7 @@ impl<'a, 'tcx> CrateMetadata {
EntryKind::AssociatedType(container) => {
(ty::AssociatedKind::Type, container, false)
}
_ => bug!()
_ => bug!("cannot get associated-item of `{:?}`", def_key)
};
ty::AssociatedItem {
@ -1138,7 +1138,7 @@ impl<'a, 'tcx> CrateMetadata {
// We can't reuse an existing FileMap, so allocate a new one
// containing the information we need.
let syntax_pos::FileMap { name,
abs_path,
name_was_remapped,
start_pos,
end_pos,
lines,
@ -1162,7 +1162,7 @@ impl<'a, 'tcx> CrateMetadata {
}
let local_version = local_codemap.new_imported_filemap(name,
abs_path,
name_was_remapped,
source_length,
lines,
multibyte_chars);

View file

@ -30,6 +30,7 @@ use std::hash::Hash;
use std::intrinsics;
use std::io::prelude::*;
use std::io::Cursor;
use std::path::Path;
use std::rc::Rc;
use std::u32;
use syntax::ast::{self, CRATE_NODE_ID};
@ -626,14 +627,14 @@ impl<'a, 'b: 'a, 'tcx: 'b> EntryBuilder<'a, 'b, 'tcx> {
// Encodes the inherent implementations of a structure, enumeration, or trait.
fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq<DefIndex> {
debug!("EntryBuilder::encode_inherent_implementations({:?})", def_id);
match self.tcx.maps.inherent_impls.borrow().get(&def_id) {
None => LazySeq::empty(),
Some(implementations) => {
self.lazy_seq(implementations.iter().map(|&def_id| {
assert!(def_id.is_local());
def_id.index
}))
}
let implementations = self.tcx.inherent_impls(def_id);
if implementations.is_empty() {
LazySeq::empty()
} else {
self.lazy_seq(implementations.iter().map(|&def_id| {
assert!(def_id.is_local());
def_id.index
}))
}
}
@ -1270,13 +1271,40 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
let codemap = self.tcx.sess.codemap();
let all_filemaps = codemap.files.borrow();
self.lazy_seq_ref(all_filemaps.iter()
let adapted = all_filemaps.iter()
.filter(|filemap| {
// No need to re-export imported filemaps, as any downstream
// crate will import them from their original source.
!filemap.is_imported()
})
.map(|filemap| &**filemap))
.map(|filemap| {
// When exporting FileMaps, we expand all paths to absolute
// paths because any relative paths are potentially relative to
// a wrong directory.
// However, if a path has been modified via
// `-Zremap-path-prefix` we assume the user has already set
// things up the way they want and don't touch the path values
// anymore.
let name = Path::new(&filemap.name);
let (ref working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir;
if filemap.name_was_remapped ||
(name.is_relative() && working_dir_was_remapped) {
// This path of this FileMap has been modified by
// path-remapping, so we use it verbatim (and avoid cloning
// the whole map in the process).
filemap.clone()
} else {
let mut adapted = (**filemap).clone();
let abs_path = Path::new(working_dir).join(name)
.to_string_lossy()
.into_owned();
adapted.name = abs_path;
Rc::new(adapted)
}
})
.collect::<Vec<_>>();
self.lazy_seq_ref(adapted.iter().map(|fm| &**fm))
}
fn encode_def_path_table(&mut self) -> Lazy<DefPathTable> {

View file

@ -37,6 +37,7 @@ use rustc::ty::{self, TyCtxt, AssociatedItemContainer};
use std::collections::HashSet;
use std::collections::hash_map::DefaultHasher;
use std::hash::*;
use std::path::Path;
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::token;
@ -114,20 +115,21 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> {
where F: FnOnce(&mut DumpVisitor<'l, 'tcx, 'll, D>)
{
let item_def_id = self.tcx.hir.local_def_id(item_id);
match self.tcx.maps.typeck_tables_of.borrow().get(&item_def_id) {
Some(tables) => {
let old_tables = self.save_ctxt.tables;
self.save_ctxt.tables = tables;
f(self);
self.save_ctxt.tables = old_tables;
}
None => f(self),
if self.tcx.has_typeck_tables(item_def_id) {
let tables = self.tcx.typeck_tables_of(item_def_id);
let old_tables = self.save_ctxt.tables;
self.save_ctxt.tables = tables;
f(self);
self.save_ctxt.tables = old_tables;
} else {
f(self)
}
}
pub fn dump_crate_info(&mut self, name: &str, krate: &ast::Crate) {
let source_file = self.tcx.sess.local_crate_source_file.as_ref();
let crate_root = source_file.map(|source_file| {
let source_file = Path::new(source_file);
match source_file.file_name() {
Some(_) => source_file.parent().unwrap().display().to_string(),
None => source_file.display().to_string(),

View file

@ -8,12 +8,12 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use super::FunctionDebugContext;
use super::{FunctionDebugContext, FunctionDebugContextData};
use super::metadata::file_metadata;
use super::utils::{DIB, span_start};
use llvm;
use llvm::debuginfo::{DIScope, DISubprogram};
use llvm::debuginfo::DIScope;
use common::CrateContext;
use rustc::mir::{Mir, VisibilityScope};
@ -53,8 +53,8 @@ pub fn create_mir_scopes(ccx: &CrateContext, mir: &Mir, debug_context: &Function
};
let mut scopes = IndexVec::from_elem(null_scope, &mir.visibility_scopes);
let fn_metadata = match *debug_context {
FunctionDebugContext::RegularContext(ref data) => data.fn_metadata,
let debug_context = match *debug_context {
FunctionDebugContext::RegularContext(ref data) => data,
FunctionDebugContext::DebugInfoDisabled |
FunctionDebugContext::FunctionWithoutDebugInfo => {
return scopes;
@ -71,7 +71,7 @@ pub fn create_mir_scopes(ccx: &CrateContext, mir: &Mir, debug_context: &Function
// Instantiate all scopes.
for idx in 0..mir.visibility_scopes.len() {
let scope = VisibilityScope::new(idx);
make_mir_scope(ccx, &mir, &has_variables, fn_metadata, scope, &mut scopes);
make_mir_scope(ccx, &mir, &has_variables, debug_context, scope, &mut scopes);
}
scopes
@ -80,7 +80,7 @@ pub fn create_mir_scopes(ccx: &CrateContext, mir: &Mir, debug_context: &Function
fn make_mir_scope(ccx: &CrateContext,
mir: &Mir,
has_variables: &BitVector,
fn_metadata: DISubprogram,
debug_context: &FunctionDebugContextData,
scope: VisibilityScope,
scopes: &mut IndexVec<VisibilityScope, MirDebugScope>) {
if scopes[scope].is_valid() {
@ -89,13 +89,13 @@ fn make_mir_scope(ccx: &CrateContext,
let scope_data = &mir.visibility_scopes[scope];
let parent_scope = if let Some(parent) = scope_data.parent_scope {
make_mir_scope(ccx, mir, has_variables, fn_metadata, parent, scopes);
make_mir_scope(ccx, mir, has_variables, debug_context, parent, scopes);
scopes[parent]
} else {
// The root is the function itself.
let loc = span_start(ccx, mir.span);
scopes[scope] = MirDebugScope {
scope_metadata: fn_metadata,
scope_metadata: debug_context.fn_metadata,
file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_pos,
};
@ -109,14 +109,17 @@ fn make_mir_scope(ccx: &CrateContext,
// However, we don't skip creating a nested scope if
// our parent is the root, because we might want to
// put arguments in the root and not have shadowing.
if parent_scope.scope_metadata != fn_metadata {
if parent_scope.scope_metadata != debug_context.fn_metadata {
scopes[scope] = parent_scope;
return;
}
}
let loc = span_start(ccx, scope_data.span);
let file_metadata = file_metadata(ccx, &loc.file.name, &loc.file.abs_path);
let file_metadata = file_metadata(ccx,
&loc.file.name,
debug_context.defining_crate);
let scope_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateLexicalBlock(
DIB(ccx),

View file

@ -26,7 +26,7 @@ use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor,
DICompositeType, DILexicalBlock, DIFlags};
use rustc::hir::def::CtorKind;
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::ty::fold::TypeVisitor;
use rustc::ty::subst::Substs;
use rustc::ty::util::TypeIdHasher;
@ -39,14 +39,12 @@ use rustc::ty::{self, AdtKind, Ty};
use rustc::ty::layout::{self, LayoutTyper};
use session::config;
use util::nodemap::FxHashMap;
use util::common::path2cstr;
use libc::{c_uint, c_longlong};
use std::ffi::CString;
use std::path::Path;
use std::ptr;
use syntax::ast;
use syntax::symbol::{Interner, InternedString};
use syntax::symbol::{Interner, InternedString, Symbol};
use syntax_pos::{self, Span};
@ -349,8 +347,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, &loc.file.name, &loc.file.abs_path);
let file_metadata = unknown_file_metadata(cx);
let metadata = composite_type_metadata(cx,
slice_llvm_type,
@ -659,44 +656,51 @@ pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
metadata
}
pub fn file_metadata(cx: &CrateContext, path: &str, full_path: &Option<String>) -> DIFile {
// FIXME (#9639): This needs to handle non-utf8 paths
let work_dir = cx.sess().working_dir.to_str().unwrap();
let file_name =
full_path.as_ref().map(|p| p.as_str()).unwrap_or_else(|| {
if path.starts_with(work_dir) {
&path[work_dir.len() + 1..path.len()]
} else {
path
}
});
pub fn file_metadata(cx: &CrateContext,
file_name: &str,
defining_crate: CrateNum) -> DIFile {
debug!("file_metadata: file_name: {}, defining_crate: {}",
file_name,
defining_crate);
file_metadata_(cx, path, file_name, &work_dir)
let directory = if defining_crate == LOCAL_CRATE {
&cx.sess().working_dir.0[..]
} else {
// If the path comes from an upstream crate we assume it has been made
// independent of the compiler's working directory one way or another.
""
};
file_metadata_raw(cx, file_name, directory)
}
pub fn unknown_file_metadata(cx: &CrateContext) -> DIFile {
// Regular filenames should not be empty, so we abuse an empty name as the
// key for the special unknown file metadata
file_metadata_(cx, "", "<unknown>", "")
file_metadata_raw(cx, "<unknown>", "")
}
fn file_metadata_(cx: &CrateContext, key: &str, file_name: &str, work_dir: &str) -> DIFile {
if let Some(file_metadata) = debug_context(cx).created_files.borrow().get(key) {
fn file_metadata_raw(cx: &CrateContext,
file_name: &str,
directory: &str)
-> DIFile {
let key = (Symbol::intern(file_name), Symbol::intern(directory));
if let Some(file_metadata) = debug_context(cx).created_files.borrow().get(&key) {
return *file_metadata;
}
debug!("file_metadata: file_name: {}, work_dir: {}", file_name, work_dir);
debug!("file_metadata: file_name: {}, directory: {}", file_name, directory);
let file_name = CString::new(file_name).unwrap();
let work_dir = CString::new(work_dir).unwrap();
let directory = CString::new(directory).unwrap();
let file_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateFile(DIB(cx), file_name.as_ptr(),
work_dir.as_ptr())
llvm::LLVMRustDIBuilderCreateFile(DIB(cx),
file_name.as_ptr(),
directory.as_ptr())
};
let mut created_files = debug_context(cx).created_files.borrow_mut();
created_files.insert(key.to_string(), file_metadata);
created_files.insert(key, file_metadata);
file_metadata
}
@ -761,25 +765,10 @@ pub fn compile_unit_metadata(scc: &SharedCrateContext,
debug_context: &CrateDebugContext,
sess: &Session)
-> DIDescriptor {
let work_dir = &sess.working_dir;
let compile_unit_name = match sess.local_crate_source_file {
None => fallback_path(scc),
Some(ref abs_path) => {
if abs_path.is_relative() {
sess.warn("debuginfo: Invalid path to crate's local root source file!");
fallback_path(scc)
} else {
match abs_path.strip_prefix(work_dir) {
Ok(ref p) if p.is_relative() => {
if p.starts_with(Path::new("./")) {
path2cstr(p)
} else {
path2cstr(&Path::new(".").join(p))
}
}
_ => fallback_path(scc)
}
}
Some(ref path) => {
CString::new(&path[..]).unwrap()
}
};
@ -789,7 +778,8 @@ pub fn compile_unit_metadata(scc: &SharedCrateContext,
(option_env!("CFG_VERSION")).expect("CFG_VERSION"));
let compile_unit_name = compile_unit_name.as_ptr();
let work_dir = path2cstr(&work_dir);
let work_dir = CString::new(&sess.working_dir.0[..]).unwrap();
let producer = CString::new(producer).unwrap();
let flags = "\0";
let split_name = "\0";
@ -1760,7 +1750,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP {
let loc = span_start(cx, span);
(file_metadata(cx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint)
(file_metadata(cx, &loc.file.name, LOCAL_CRATE), loc.line as c_uint)
} else {
(unknown_file_metadata(cx), UNKNOWN_LINE_NUMBER)
};
@ -1795,9 +1785,10 @@ pub fn create_global_var_metadata(cx: &CrateContext,
// Creates an "extension" of an existing DIScope into another file.
pub fn extend_scope_to_file(ccx: &CrateContext,
scope_metadata: DIScope,
file: &syntax_pos::FileMap)
file: &syntax_pos::FileMap,
defining_crate: CrateNum)
-> DILexicalBlock {
let file_metadata = file_metadata(ccx, &file.name, &file.abs_path);
let file_metadata = file_metadata(ccx, &file.name, defining_crate);
unsafe {
llvm::LLVMRustDIBuilderCreateLexicalBlockFile(
DIB(ccx),

View file

@ -23,7 +23,7 @@ use self::source_loc::InternalDebugLocation::{self, UnknownLocation};
use llvm;
use llvm::{ModuleRef, ContextRef, ValueRef};
use llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilderRef, DISubprogram, DIArray, DIFlags};
use rustc::hir::def_id::DefId;
use rustc::hir::def_id::{DefId, CrateNum};
use rustc::ty::subst::Substs;
use abi::Abi;
@ -42,6 +42,7 @@ use std::ptr;
use syntax_pos::{self, Span, Pos};
use syntax::ast;
use syntax::symbol::Symbol;
use rustc::ty::layout;
pub mod gdb;
@ -67,7 +68,7 @@ const DW_TAG_arg_variable: c_uint = 0x101;
pub struct CrateDebugContext<'tcx> {
llcontext: ContextRef,
builder: DIBuilderRef,
created_files: RefCell<FxHashMap<String, DIFile>>,
created_files: RefCell<FxHashMap<(Symbol, Symbol), DIFile>>,
created_enum_disr_types: RefCell<FxHashMap<(DefId, layout::Integer), DIType>>,
type_map: RefCell<TypeMap<'tcx>>,
@ -103,7 +104,7 @@ pub enum FunctionDebugContext {
}
impl FunctionDebugContext {
fn get_ref<'a>(&'a self, span: Span) -> &'a FunctionDebugContextData {
pub fn get_ref<'a>(&'a self, span: Span) -> &'a FunctionDebugContextData {
match *self {
FunctionDebugContext::RegularContext(ref data) => data,
FunctionDebugContext::DebugInfoDisabled => {
@ -128,6 +129,7 @@ impl FunctionDebugContext {
pub struct FunctionDebugContextData {
fn_metadata: DISubprogram,
source_locations_enabled: Cell<bool>,
pub defining_crate: CrateNum,
}
pub enum VariableAccess<'a> {
@ -220,8 +222,9 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
return FunctionDebugContext::FunctionWithoutDebugInfo;
}
let def_id = instance.def_id();
let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, &loc.file.name, &loc.file.abs_path);
let file_metadata = file_metadata(cx, &loc.file.name, def_id.krate);
let function_type_metadata = unsafe {
let fn_signature = get_function_signature(cx, sig);
@ -229,15 +232,15 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
};
// Find the enclosing function, in case this is a closure.
let def_key = cx.tcx().def_key(instance.def_id());
let def_key = cx.tcx().def_key(def_id);
let mut name = def_key.disambiguated_data.data.to_string();
let name_len = name.len();
let fn_def_id = cx.tcx().closure_base_def_id(instance.def_id());
let enclosing_fn_def_id = cx.tcx().closure_base_def_id(def_id);
// Get_template_parameters() will append a `<...>` clause to the function
// name if necessary.
let generics = cx.tcx().generics_of(fn_def_id);
let generics = cx.tcx().generics_of(enclosing_fn_def_id);
let substs = instance.substs.truncate_to(cx.tcx(), generics);
let template_parameters = get_template_parameters(cx,
&generics,
@ -289,6 +292,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
let fn_debug_context = FunctionDebugContextData {
fn_metadata: fn_metadata,
source_locations_enabled: Cell::new(false),
defining_crate: def_id.krate,
};
return FunctionDebugContext::RegularContext(fn_debug_context);
@ -438,8 +442,9 @@ pub fn declare_local<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
let cx = bcx.ccx;
let file = span_start(cx, span).file;
let filename = file.name.clone();
let file_metadata = file_metadata(cx, &filename[..], &file.abs_path);
let file_metadata = file_metadata(cx,
&file.name[..],
dbg_context.get_ref(span).defining_crate);
let loc = span_start(cx, span);
let type_metadata = type_metadata(cx, variable_type, span);

View file

@ -72,7 +72,7 @@ pub fn item_namespace(ccx: &CrateContext, def_id: DefId) -> DIScope {
let span = ccx.tcx().def_span(def_id);
let (file, line) = if span != DUMMY_SP {
let loc = span_start(ccx, span);
(file_metadata(ccx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint)
(file_metadata(ccx, &loc.file.name, def_id.krate), loc.line as c_uint)
} else {
(unknown_file_metadata(ccx), UNKNOWN_LINE_NUMBER)
};

View file

@ -157,7 +157,11 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
if pos < self.scopes[scope_id].file_start_pos ||
pos >= self.scopes[scope_id].file_end_pos {
let cm = self.ccx.sess().codemap();
debuginfo::extend_scope_to_file(self.ccx, scope_metadata, &cm.lookup_char_pos(pos).file)
let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate;
debuginfo::extend_scope_to_file(self.ccx,
scope_metadata,
&cm.lookup_char_pos(pos).file,
defining_crate)
} else {
scope_metadata
}

View file

@ -637,6 +637,7 @@ pub fn provide(providers: &mut Providers) {
*providers = Providers {
typeck_item_bodies,
typeck_tables_of,
has_typeck_tables,
closure_type,
closure_kind,
adt_destructor,
@ -664,55 +665,49 @@ fn adt_destructor<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
tcx.calculate_dtor(def_id, &mut dropck::check_drop_impl)
}
fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> &'tcx ty::TypeckTables<'tcx> {
// Closures' tables come from their outermost function,
// as they are part of the same "inference environment".
let outer_def_id = tcx.closure_base_def_id(def_id);
if outer_def_id != def_id {
return tcx.typeck_tables_of(outer_def_id);
}
let id = tcx.hir.as_local_node_id(def_id).unwrap();
let span = tcx.hir.span(id);
let unsupported = || {
span_bug!(span, "can't type-check body of {:?}", def_id);
};
// Figure out what primary body this item has.
let mut fn_decl = None;
let body_id = match tcx.hir.get(id) {
/// If this def-id is a "primary tables entry", returns `Some((body_id, decl))`
/// with information about it's body-id and fn-decl (if any). Otherwise,
/// returns `None`.
///
/// If this function returns "some", then `typeck_tables(def_id)` will
/// succeed; if it returns `None`, then `typeck_tables(def_id)` may or
/// may not succeed. In some cases where this function returns `None`
/// (notably closures), `typeck_tables(def_id)` would wind up
/// redirecting to the owning function.
fn primary_body_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
id: ast::NodeId)
-> Option<(hir::BodyId, Option<&'tcx hir::FnDecl>)>
{
match tcx.hir.get(id) {
hir::map::NodeItem(item) => {
match item.node {
hir::ItemConst(_, body) |
hir::ItemStatic(_, _, body) => body,
hir::ItemFn(ref decl, .., body) => {
fn_decl = Some(decl);
body
}
_ => unsupported()
hir::ItemStatic(_, _, body) =>
Some((body, None)),
hir::ItemFn(ref decl, .., body) =>
Some((body, Some(decl))),
_ =>
None,
}
}
hir::map::NodeTraitItem(item) => {
match item.node {
hir::TraitItemKind::Const(_, Some(body)) => body,
hir::TraitItemKind::Method(ref sig,
hir::TraitMethod::Provided(body)) => {
fn_decl = Some(&sig.decl);
body
}
_ => unsupported()
hir::TraitItemKind::Const(_, Some(body)) =>
Some((body, None)),
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) =>
Some((body, Some(&sig.decl))),
_ =>
None,
}
}
hir::map::NodeImplItem(item) => {
match item.node {
hir::ImplItemKind::Const(_, body) => body,
hir::ImplItemKind::Method(ref sig, body) => {
fn_decl = Some(&sig.decl);
body
}
_ => unsupported()
hir::ImplItemKind::Const(_, body) =>
Some((body, None)),
hir::ImplItemKind::Method(ref sig, body) =>
Some((body, Some(&sig.decl))),
_ =>
None,
}
}
hir::map::NodeExpr(expr) => {
@ -723,15 +718,47 @@ fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
// Assume that everything other than closures
// is a constant "initializer" expression.
match expr.node {
hir::ExprClosure(..) => {
// We should've bailed out above for closures.
span_bug!(expr.span, "unexpected closure")
}
_ => hir::BodyId { node_id: expr.id }
hir::ExprClosure(..) =>
None,
_ =>
Some((hir::BodyId { node_id: expr.id }, None)),
}
}
_ => unsupported()
};
_ => None,
}
}
fn has_typeck_tables<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> bool {
// Closures' tables come from their outermost function,
// as they are part of the same "inference environment".
let outer_def_id = tcx.closure_base_def_id(def_id);
if outer_def_id != def_id {
return tcx.has_typeck_tables(outer_def_id);
}
let id = tcx.hir.as_local_node_id(def_id).unwrap();
primary_body_of(tcx, id).is_some()
}
fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> &'tcx ty::TypeckTables<'tcx> {
// Closures' tables come from their outermost function,
// as they are part of the same "inference environment".
let outer_def_id = tcx.closure_base_def_id(def_id);
if outer_def_id != def_id {
return tcx.typeck_tables_of(outer_def_id);
}
let id = tcx.hir.as_local_node_id(def_id).unwrap();
let span = tcx.hir.span(id);
// Figure out what primary body this item has.
let (body_id, fn_decl) = primary_body_of(tcx, id).unwrap_or_else(|| {
span_bug!(span, "can't type-check body of {:?}", def_id);
});
let body = tcx.hir.body(body_id);
Inherited::build(tcx, id).enter(|inh| {

View file

@ -63,17 +63,11 @@ impl<'a, 'tcx, 'v> ItemLikeVisitor<'v> for CheckVisitor<'a, 'tcx> {
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
let mut used_trait_imports = DefIdSet();
for &body_id in tcx.hir.krate().bodies.keys() {
let item_id = tcx.hir.body_owner(body_id);
let item_def_id = tcx.hir.local_def_id(item_id);
// this will have been written by the main typeck pass
if let Some(tables) = tcx.maps.typeck_tables_of.borrow().get(&item_def_id) {
let imports = &tables.used_trait_imports;
debug!("GatherVisitor: item_def_id={:?} with imports {:#?}", item_def_id, imports);
used_trait_imports.extend(imports);
} else {
debug!("GatherVisitor: item_def_id={:?} with no imports", item_def_id);
}
let item_def_id = tcx.hir.body_owner_def_id(body_id);
let tables = tcx.typeck_tables_of(item_def_id);
let imports = &tables.used_trait_imports;
debug!("GatherVisitor: item_def_id={:?} with imports {:#?}", item_def_id, imports);
used_trait_imports.extend(imports);
}
let mut visitor = CheckVisitor { tcx, used_trait_imports };

View file

@ -66,11 +66,15 @@ pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
//
// [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4
thread_local! {
static EMPTY_DEF_ID_VEC: Rc<Vec<DefId>> = Rc::new(vec![])
}
let result = tcx.dep_graph.with_ignore(|| {
let crate_map = tcx.crate_inherent_impls(ty_def_id.krate);
match crate_map.inherent_impls.get(&ty_def_id) {
Some(v) => v.clone(),
None => Rc::new(vec![]),
None => EMPTY_DEF_ID_VEC.with(|v| v.clone())
}
});

View file

@ -129,7 +129,7 @@ pub fn run_core(search_paths: SearchPaths,
..config::basic_options().clone()
};
let codemap = Rc::new(codemap::CodeMap::new());
let codemap = Rc::new(codemap::CodeMap::new(sessopts.file_path_mapping()));
let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
true,
false,

View file

@ -26,7 +26,7 @@ use std::fmt::Display;
use std::io;
use std::io::prelude::*;
use syntax::codemap::CodeMap;
use syntax::codemap::{CodeMap, FilePathMapping};
use syntax::parse::lexer::{self, TokenAndSpan};
use syntax::parse::token;
use syntax::parse;
@ -36,8 +36,8 @@ use syntax_pos::Span;
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>,
extension: Option<&str>) -> String {
debug!("highlighting: ================\n{}\n==============", src);
let sess = parse::ParseSess::new();
let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string());
let sess = parse::ParseSess::new(FilePathMapping::empty());
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
write_header(class, id, &mut out).unwrap();
@ -58,8 +58,8 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>
/// be inserted into an element. C.f., `render_with_highlighting` which includes
/// an enclosing `<pre>` block.
pub fn render_inner_with_highlighting(src: &str) -> io::Result<String> {
let sess = parse::ParseSess::new();
let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string());
let sess = parse::ParseSess::new(FilePathMapping::empty());
let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());

View file

@ -74,7 +74,7 @@ pub fn run(input: &str,
..config::basic_options().clone()
};
let codemap = Rc::new(CodeMap::new());
let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping()));
let handler =
errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(codemap.clone()));
@ -217,7 +217,7 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec<String>, libs: SearchPaths,
}
}
let data = Arc::new(Mutex::new(Vec::new()));
let codemap = Rc::new(CodeMap::new());
let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping()));
let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
Some(codemap.clone()));
let old = io::set_panic(Some(box Sink(data.clone())));

View file

@ -104,32 +104,42 @@ impl FileLoader for RealFileLoader {
pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>,
file_loader: Box<FileLoader>
file_loader: Box<FileLoader>,
// This is used to apply the file path remapping as specified via
// -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
path_mapping: FilePathMapping,
}
impl CodeMap {
pub fn new() -> CodeMap {
pub fn new(path_mapping: FilePathMapping) -> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
file_loader: Box::new(RealFileLoader)
file_loader: Box::new(RealFileLoader),
path_mapping: path_mapping,
}
}
pub fn with_file_loader(file_loader: Box<FileLoader>) -> CodeMap {
pub fn with_file_loader(file_loader: Box<FileLoader>,
path_mapping: FilePathMapping)
-> CodeMap {
CodeMap {
files: RefCell::new(Vec::new()),
file_loader: file_loader
file_loader: file_loader,
path_mapping: path_mapping,
}
}
pub fn path_mapping(&self) -> &FilePathMapping {
&self.path_mapping
}
pub fn file_exists(&self, path: &Path) -> bool {
self.file_loader.file_exists(path)
}
pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
let src = self.file_loader.read_file(path)?;
let abs_path = self.file_loader.abs_path(path).map(|p| p.to_str().unwrap().to_string());
Ok(self.new_filemap(path.to_str().unwrap().to_string(), abs_path, src))
Ok(self.new_filemap(path.to_str().unwrap().to_string(), src))
}
fn next_start_pos(&self) -> usize {
@ -144,8 +154,7 @@ impl CodeMap {
/// Creates a new filemap without setting its line information. If you don't
/// intend to set the line information yourself, you should use new_filemap_and_lines.
pub fn new_filemap(&self, filename: FileName, abs_path: Option<FileName>,
mut src: String) -> Rc<FileMap> {
pub fn new_filemap(&self, filename: FileName, mut src: String) -> Rc<FileMap> {
let start_pos = self.next_start_pos();
let mut files = self.files.borrow_mut();
@ -156,9 +165,11 @@ impl CodeMap {
let end_pos = start_pos + src.len();
let (filename, was_remapped) = self.path_mapping.map_prefix(filename);
let filemap = Rc::new(FileMap {
name: filename,
abs_path: abs_path,
name_was_remapped: was_remapped,
src: Some(Rc::new(src)),
start_pos: Pos::from_usize(start_pos),
end_pos: Pos::from_usize(end_pos),
@ -172,11 +183,8 @@ impl CodeMap {
}
/// Creates a new filemap and sets its line information.
pub fn new_filemap_and_lines(&self, filename: &str, abs_path: Option<&str>,
src: &str) -> Rc<FileMap> {
let fm = self.new_filemap(filename.to_string(),
abs_path.map(|s| s.to_owned()),
src.to_owned());
pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Rc<FileMap> {
let fm = self.new_filemap(filename.to_string(), src.to_owned());
let mut byte_pos: u32 = fm.start_pos.0;
for line in src.lines() {
// register the start of this line
@ -195,7 +203,7 @@ impl CodeMap {
/// information for things inlined from other crates.
pub fn new_imported_filemap(&self,
filename: FileName,
abs_path: Option<FileName>,
name_was_remapped: bool,
source_len: usize,
mut file_local_lines: Vec<BytePos>,
mut file_local_multibyte_chars: Vec<MultiByteChar>)
@ -216,7 +224,7 @@ impl CodeMap {
let filemap = Rc::new(FileMap {
name: filename,
abs_path: abs_path,
name_was_remapped: name_was_remapped,
src: None,
start_pos: start_pos,
end_pos: end_pos,
@ -550,6 +558,42 @@ impl CodeMapper for CodeMap {
}
}
#[derive(Clone)]
pub struct FilePathMapping {
mapping: Vec<(String, String)>,
}
impl FilePathMapping {
pub fn empty() -> FilePathMapping {
FilePathMapping {
mapping: vec![]
}
}
pub fn new(mapping: Vec<(String, String)>) -> FilePathMapping {
FilePathMapping {
mapping: mapping
}
}
/// Applies any path prefix substitution as defined by the mapping.
/// The return value is the remapped path and a boolean indicating whether
/// the path was affected by the mapping.
pub fn map_prefix(&self, path: String) -> (String, bool) {
// NOTE: We are iterating over the mapping entries from last to first
// because entries specified later on the command line should
// take precedence.
for &(ref from, ref to) in self.mapping.iter().rev() {
if path.starts_with(from) {
let mapped = path.replacen(from, to, 1);
return (mapped, true);
}
}
(path, false)
}
}
// _____________________________________________________________________________
// Tests
//
@ -561,9 +605,8 @@ mod tests {
#[test]
fn t1 () {
let cm = CodeMap::new();
let cm = CodeMap::new(FilePathMapping::empty());
let fm = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string());
fm.next_line(BytePos(0));
// Test we can get lines with partial line info.
@ -578,9 +621,8 @@ mod tests {
#[test]
#[should_panic]
fn t2 () {
let cm = CodeMap::new();
let cm = CodeMap::new(FilePathMapping::empty());
let fm = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0));
@ -589,15 +631,12 @@ mod tests {
}
fn init_code_map() -> CodeMap {
let cm = CodeMap::new();
let cm = CodeMap::new(FilePathMapping::empty());
let fm1 = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(),
None,
"".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(),
None,
"first line.\nsecond line".to_string());
fm1.next_line(BytePos(0));
@ -656,14 +695,12 @@ mod tests {
}
fn init_code_map_mbc() -> CodeMap {
let cm = CodeMap::new();
let cm = CodeMap::new(FilePathMapping::empty());
// € is a three byte utf8 char.
let fm1 =
cm.new_filemap("blork.rs".to_string(),
None,
"fir€st €€€€ line.\nsecond line".to_string());
let fm2 = cm.new_filemap("blork2.rs".to_string(),
None,
"first line€€.\n€ second line".to_string());
fm1.next_line(BytePos(0));
@ -728,10 +765,10 @@ mod tests {
/// lines in the middle of a file.
#[test]
fn span_to_snippet_and_lines_spanning_multiple_lines() {
let cm = CodeMap::new();
let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
cm.new_filemap_and_lines("blork.rs", None, inputtext);
cm.new_filemap_and_lines("blork.rs", inputtext);
let span = span_from_selection(inputtext, selection);
// check that we are extracting the text we thought we were extracting
@ -770,11 +807,11 @@ mod tests {
/// Test failing to merge two spans on different lines
#[test]
fn span_merging_fail() {
let cm = CodeMap::new();
let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "bbbb BB\ncc CCC\n";
let selection1 = " ~~\n \n";
let selection2 = " \n ~~~\n";
cm.new_filemap_and_lines("blork.rs", None, inputtext);
cm.new_filemap_and_lines("blork.rs", inputtext);
let span1 = span_from_selection(inputtext, selection1);
let span2 = span_from_selection(inputtext, selection2);

View file

@ -783,7 +783,7 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
let filename = String::from("<macro expansion>");
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text))
}
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {

View file

@ -142,7 +142,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
// Add this input file to the code map to make it available as
// dependency information
let filename = format!("{}", file.display());
cx.codemap().new_filemap_and_lines(&filename, None, &src);
cx.codemap().new_filemap_and_lines(&filename, &src);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
}
@ -173,7 +173,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
// Add this input file to the code map to make it available as
// dependency information, but don't enter it's contents
let filename = format!("{}", file.display());
cx.codemap().new_filemap_and_lines(&filename, None, "");
cx.codemap().new_filemap_and_lines(&filename, "");
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Rc::new(bytes))))
}

View file

@ -19,7 +19,7 @@
// FIXME spec the JSON output properly.
use codemap::CodeMap;
use codemap::{CodeMap, FilePathMapping};
use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
use errors::registry::Registry;
use errors::{DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper};
@ -48,7 +48,8 @@ impl JsonEmitter {
}
pub fn basic() -> JsonEmitter {
JsonEmitter::stderr(None, Rc::new(CodeMap::new()))
let file_path_mapping = FilePathMapping::empty();
JsonEmitter::stderr(None, Rc::new(CodeMap::new(file_path_mapping)))
}
pub fn new(dst: Box<Write + Send>,

View file

@ -348,8 +348,8 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut R
let mut src = Vec::new();
srdr.read_to_end(&mut src).unwrap();
let src = String::from_utf8(src).unwrap();
let cm = CodeMap::new();
let filemap = cm.new_filemap(path, None, src);
let cm = CodeMap::new(sess.codemap().path_mapping().clone());
let filemap = cm.new_filemap(path, src);
let mut rdr = lexer::StringReader::new_raw(sess, filemap);
let mut comments: Vec<Comment> = Vec::new();

View file

@ -10,7 +10,7 @@
use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
use codemap::CodeMap;
use codemap::{CodeMap, FilePathMapping};
use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess};
use str::char_at;
@ -563,7 +563,7 @@ impl<'a> StringReader<'a> {
// I guess this is the only way to figure out if
// we're at the beginning of the file...
let cmap = CodeMap::new();
let cmap = CodeMap::new(FilePathMapping::empty());
cmap.files.borrow_mut().push(self.filemap.clone());
let loc = cmap.lookup_char_pos_adj(self.pos);
debug!("Skipping a shebang");
@ -1718,13 +1718,13 @@ mod tests {
sess: &'a ParseSess,
teststr: String)
-> StringReader<'a> {
let fm = cm.new_filemap("zebra.rs".to_string(), None, teststr);
let fm = cm.new_filemap("zebra.rs".to_string(), teststr);
StringReader::new(sess, fm)
}
#[test]
fn t1() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut string_reader = setup(&cm,
&sh,
@ -1776,7 +1776,7 @@ mod tests {
#[test]
fn doublecolonparsing() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
@ -1784,7 +1784,7 @@ mod tests {
#[test]
fn dcparsing_2() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
@ -1792,7 +1792,7 @@ mod tests {
#[test]
fn dcparsing_3() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
@ -1800,7 +1800,7 @@ mod tests {
#[test]
fn dcparsing_4() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
@ -1808,7 +1808,7 @@ mod tests {
#[test]
fn character_a() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None));
@ -1816,7 +1816,7 @@ mod tests {
#[test]
fn character_space() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None));
@ -1824,7 +1824,7 @@ mod tests {
#[test]
fn character_escaped() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None));
@ -1832,7 +1832,7 @@ mod tests {
#[test]
fn lifetime_name() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc")));
@ -1840,7 +1840,7 @@ mod tests {
#[test]
fn raw_string() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token()
@ -1850,7 +1850,7 @@ mod tests {
#[test]
fn literal_suffixes() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
@ -1894,7 +1894,7 @@ mod tests {
#[test]
fn nested_block_comments() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok {
@ -1907,7 +1907,7 @@ mod tests {
#[test]
fn crlf_comments() {
let cm = Rc::new(CodeMap::new());
let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token();

View file

@ -11,7 +11,7 @@
//! The main parser interface
use ast::{self, CrateConfig};
use codemap::CodeMap;
use codemap::{CodeMap, FilePathMapping};
use syntax_pos::{self, Span, FileMap, NO_EXPANSION};
use errors::{Handler, ColorConfig, DiagnosticBuilder};
use feature_gate::UnstableFeatures;
@ -53,8 +53,8 @@ pub struct ParseSess {
}
impl ParseSess {
pub fn new() -> Self {
let cm = Rc::new(CodeMap::new());
pub fn new(file_path_mapping: FilePathMapping) -> Self {
let cm = Rc::new(CodeMap::new(file_path_mapping));
let handler = Handler::with_tty_emitter(ColorConfig::Auto,
true,
false,
@ -143,13 +143,13 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> TokenStream {
filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source))
filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
}
// Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
-> Parser<'a> {
filemap_to_parser(sess, sess.codemap().new_filemap(name, None, source))
filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
}
/// Create a new parser, handling errors as appropriate
@ -828,7 +828,7 @@ mod tests {
}
#[test] fn parse_ident_pat () {
let sess = ParseSess::new();
let sess = ParseSess::new(FilePathMapping::empty());
let mut parser = string_to_parser(&sess, "b".to_string());
assert!(panictry!(parser.parse_pat())
== P(ast::Pat{
@ -998,7 +998,7 @@ mod tests {
}
#[test] fn crlf_doc_comments() {
let sess = ParseSess::new();
let sess = ParseSess::new(FilePathMapping::empty());
let name = "<source>".to_string();
let source = "/// doc comment\r\nfn foo() {}".to_string();
@ -1023,7 +1023,7 @@ mod tests {
#[test]
fn ttdelim_span() {
let sess = ParseSess::new();
let sess = ParseSess::new(FilePathMapping::empty());
let expr = parse::parse_expr_from_source_str("foo".to_string(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap();

View file

@ -1453,9 +1453,9 @@ impl<'a> Parser<'a> {
} else if self.eat_keyword(keywords::Impl) {
// FIXME: figure out priority of `+` in `impl Trait1 + Trait2` (#34511).
TyKind::ImplTrait(self.parse_ty_param_bounds()?)
} else if self.check(&token::Question) {
} else if self.check(&token::Question) ||
self.check_lifetime() && self.look_ahead(1, |t| t == &token::BinOp(token::Plus)){
// Bound list (trait object type)
// Bound lists starting with `'lt` are not currently supported (#40043)
TyKind::TraitObject(self.parse_ty_param_bounds_common(allow_plus)?)
} else {
let msg = format!("expected type, found {}", self.this_token_descr());

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use codemap::CodeMap;
use codemap::{CodeMap, FilePathMapping};
use errors::Handler;
use errors::emitter::EmitterWriter;
use std::io;
@ -47,8 +47,8 @@ impl<T: Write> Write for Shared<T> {
fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
let output = Arc::new(Mutex::new(Vec::new()));
let code_map = Rc::new(CodeMap::new());
code_map.new_filemap_and_lines("test.rs", None, &file_text);
let code_map = Rc::new(CodeMap::new(FilePathMapping::empty()));
code_map.new_filemap_and_lines("test.rs", &file_text);
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);

View file

@ -9,6 +9,7 @@
// except according to those terms.
use ast::{self, Ident};
use codemap::FilePathMapping;
use parse::{ParseSess, PResult, filemap_to_stream};
use parse::{lexer, new_parser_from_source_str};
use parse::parser::Parser;
@ -18,8 +19,8 @@ use std::iter::Peekable;
/// Map a string to tts, using a made-up filename:
pub fn string_to_stream(source_str: String) -> TokenStream {
let ps = ParseSess::new();
filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str))
let ps = ParseSess::new(FilePathMapping::empty());
filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str))
}
/// Map string to parser (via tts)
@ -38,7 +39,7 @@ fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T
/// Parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_crate_mod()
})
@ -46,7 +47,7 @@ pub fn string_to_crate (source_str : String) -> ast::Crate {
/// Parse a string, return an expr
pub fn string_to_expr (source_str : String) -> P<ast::Expr> {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_expr()
})
@ -54,7 +55,7 @@ pub fn string_to_expr (source_str : String) -> P<ast::Expr> {
/// Parse a string, return an item
pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_item()
})
@ -62,7 +63,7 @@ pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
/// Parse a string, return a stmt
pub fn string_to_stmt(source_str : String) -> Option<ast::Stmt> {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_stmt()
})
@ -71,7 +72,7 @@ pub fn string_to_stmt(source_str : String) -> Option<ast::Stmt> {
/// Parse a string, return a pat. Uses "irrefutable"... which doesn't
/// (currently) affect parsing.
pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| {
p.parse_pat()
})

View file

@ -369,13 +369,14 @@ pub struct MultiByteChar {
}
/// A single source in the CodeMap.
#[derive(Clone)]
pub struct FileMap {
/// The name of the file that the source came from, source that doesn't
/// originate from files has names between angle brackets by convention,
/// e.g. `<anon>`
pub name: FileName,
/// The absolute path of the file that the source came from.
pub abs_path: Option<FileName>,
/// True if the `name` field above has been modified by -Zremap-path-prefix
pub name_was_remapped: bool,
/// The complete source code
pub src: Option<Rc<String>>,
/// The start position of this source in the CodeMap
@ -392,7 +393,7 @@ impl Encodable for FileMap {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct("FileMap", 6, |s| {
s.emit_struct_field("name", 0, |s| self.name.encode(s))?;
s.emit_struct_field("abs_path", 1, |s| self.abs_path.encode(s))?;
s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?;
s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?;
s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?;
s.emit_struct_field("lines", 4, |s| {
@ -453,8 +454,8 @@ impl Decodable for FileMap {
d.read_struct("FileMap", 6, |d| {
let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?;
let abs_path: Option<String> =
d.read_struct_field("abs_path", 1, |d| Decodable::decode(d))?;
let name_was_remapped: bool =
d.read_struct_field("name_was_remapped", 1, |d| Decodable::decode(d))?;
let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?;
let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?;
let lines: Vec<BytePos> = d.read_struct_field("lines", 4, |d| {
@ -489,7 +490,7 @@ impl Decodable for FileMap {
d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?;
Ok(FileMap {
name: name,
abs_path: abs_path,
name_was_remapped: name_was_remapped,
start_pos: start_pos,
end_pos: end_pos,
src: None,

View file

@ -0,0 +1,18 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// compile-flags: -g -Zremap-path-prefix-from={{cwd}} -Zremap-path-prefix-to=/the/aux-cwd -Zremap-path-prefix-from={{src-base}}/remap_path_prefix/auxiliary -Zremap-path-prefix-to=/the/aux-src
#[inline]
pub fn some_aux_function() -> i32 {
1234
}

View file

@ -0,0 +1,31 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-windows
// ignore-tidy-linelength
// compile-flags: -g -C no-prepopulate-passes -Zremap-path-prefix-from={{cwd}} -Zremap-path-prefix-to=/the/cwd -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src
// aux-build:remap_path_prefix_aux.rs
extern crate remap_path_prefix_aux;
// Here we check that the expansion of the file!() macro is mapped.
// CHECK: internal constant [34 x i8] c"/the/src/remap_path_prefix/main.rs"
pub static FILE_PATH: &'static str = file!();
fn main() {
remap_path_prefix_aux::some_aux_function();
}
// Here we check that local debuginfo is mapped correctly.
// CHECK: !DIFile(filename: "/the/src/remap_path_prefix/main.rs", directory: "/the/cwd")
// And here that debuginfo from other crates are expanded to absolute paths.
// CHECK: !DIFile(filename: "/the/aux-src/remap_path_prefix_aux.rs", directory: "")

View file

@ -16,12 +16,13 @@ extern crate syntax;
extern crate syntax_pos;
use syntax::ast;
use syntax::codemap::FilePathMapping;
use syntax::print::pprust;
use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
let ps = syntax::parse::ParseSess::new();
let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(
&ps,

View file

@ -16,4 +16,6 @@ macro_rules! m {
fn main() {
m!(Copy + Send + 'static); //~ ERROR the trait `std::marker::Copy` cannot be made into an object
m!('static + Send);
m!('static +); //~ ERROR at least one non-builtin trait is required for an object type
}

View file

@ -0,0 +1,23 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A few contrived examples where lifetime should (or should not) be parsed as an object type.
// Lifetimes parsed as types are still rejected later by semantic checks.
// compile-flags: -Z continue-parse-after-error
// `'static` is a lifetime, `'static +` is a type, `'a` is a type
fn g() where
'static: 'static,
'static +: 'static + Copy,
//~^ ERROR at least one non-builtin trait is required for an object type
{}
fn main() {}

View file

@ -0,0 +1,29 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// A few contrived examples where lifetime should (or should not) be parsed as an object type.
// Lifetimes parsed as types are still rejected later by semantic checks.
// compile-flags: -Z continue-parse-after-error
struct S<'a, T>(&'a u8, T);
fn main() {
// `'static` is a lifetime argument, `'static +` is a type argument
let _: S<'static, u8>;
let _: S<'static, 'static +>;
//~^ at least one non-builtin trait is required for an object type
let _: S<'static, 'static>;
//~^ ERROR wrong number of lifetime parameters: expected 1, found 2
//~| ERROR wrong number of type arguments: expected 1, found 0
let _: S<'static +, 'static>;
//~^ ERROR lifetime parameters must be declared prior to type parameters
//~| ERROR at least one non-builtin trait is required for an object type
}

View file

@ -24,7 +24,7 @@ use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
let ps = syntax::parse::ParseSess::new();
let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(
&ps,

View file

@ -17,6 +17,7 @@ extern crate syntax;
use syntax::ast::*;
use syntax::attr::*;
use syntax::ast;
use syntax::codemap::FilePathMapping;
use syntax::parse;
use syntax::parse::{ParseSess, PResult};
use syntax::parse::new_parser_from_source_str;
@ -78,7 +79,7 @@ fn str_compare<T, F: Fn(&T) -> String>(e: &str, expected: &[T], actual: &[T], f:
}
fn check_expr_attrs(es: &str, expected: &[&str]) {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
let e = expr(es, &ps).expect("parse error");
let actual = &e.attrs;
str_compare(es,
@ -88,7 +89,7 @@ fn check_expr_attrs(es: &str, expected: &[&str]) {
}
fn check_stmt_attrs(es: &str, expected: &[&str]) {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
let e = stmt(es, &ps).expect("parse error");
let actual = e.node.attrs();
str_compare(es,
@ -98,7 +99,7 @@ fn check_stmt_attrs(es: &str, expected: &[&str]) {
}
fn reject_expr_parse(es: &str) {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
match expr(es, &ps) {
Ok(_) => panic!("parser did not reject `{}`", es),
Err(mut e) => e.cancel(),
@ -106,7 +107,7 @@ fn reject_expr_parse(es: &str) {
}
fn reject_stmt_parse(es: &str) {
let ps = ParseSess::new();
let ps = ParseSess::new(FilePathMapping::empty());
match stmt(es, &ps) {
Ok(_) => panic!("parser did not reject `{}`", es),
Err(mut e) => e.cancel(),

View file

@ -15,12 +15,13 @@
extern crate syntax;
extern crate syntax_pos;
use syntax::codemap::FilePathMapping;
use syntax::print::pprust::*;
use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
fn main() {
let ps = syntax::parse::ParseSess::new();
let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new(
&ps,

View file

@ -0,0 +1,22 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::fmt::Display;
static BYTE: u8 = 33;
fn main() {
let x: &('static + Display) = &BYTE;
let y: Box<'static + Display> = Box::new(BYTE);
let xstr = format!("{}", x);
let ystr = format!("{}", y);
assert_eq!(xstr, "33");
assert_eq!(ystr, "33");
}

View file

@ -13,9 +13,11 @@
#![feature(rand)]
#![feature(const_fn)]
use std::sync::atomic::{AtomicUsize, Ordering};
use std::__rand::{thread_rng, Rng};
use std::panic;
use std::sync::atomic::{AtomicUsize, Ordering};
use std::thread;
use std::cell::Cell;
const MAX_LEN: usize = 80;
@ -76,6 +78,7 @@ fn test(input: &[DropCounter]) {
let mut panic_countdown = panic_countdown;
v.sort_by(|a, b| {
if panic_countdown == 0 {
SILENCE_PANIC.with(|s| s.set(true));
panic!();
}
panic_countdown -= 1;
@ -94,7 +97,15 @@ fn test(input: &[DropCounter]) {
}
}
thread_local!(static SILENCE_PANIC: Cell<bool> = Cell::new(false));
fn main() {
let prev = panic::take_hook();
panic::set_hook(Box::new(move |info| {
if !SILENCE_PANIC.with(|s| s.get()) {
prev(info);
}
}));
for len in (1..20).chain(70..MAX_LEN) {
// Test on a random array.
let mut rng = thread_rng();

View file

@ -185,8 +185,8 @@ pub struct Config {
// Print one character per test instead of one line
pub quiet: bool,
// where to find the qemu test client process, if we're using it
pub qemu_test_client: Option<PathBuf>,
// where to find the remote test client process, if we're using it
pub remote_test_client: Option<PathBuf>,
// Configuration for various run-make tests frobbing things like C compilers
// or querying about various LLVM component information.

View file

@ -40,23 +40,24 @@ impl EarlyProps {
None,
&mut |ln| {
props.ignore =
props.ignore || parse_name_directive(ln, "ignore-test") ||
parse_name_directive(ln, &ignore_target(config)) ||
parse_name_directive(ln, &ignore_architecture(config)) ||
parse_name_directive(ln, &ignore_stage(config)) ||
parse_name_directive(ln, &ignore_env(config)) ||
(config.mode == common::Pretty && parse_name_directive(ln, "ignore-pretty")) ||
props.ignore || config.parse_name_directive(ln, "ignore-test") ||
config.parse_name_directive(ln, &ignore_target(config)) ||
config.parse_name_directive(ln, &ignore_architecture(config)) ||
config.parse_name_directive(ln, &ignore_stage(config)) ||
config.parse_name_directive(ln, &ignore_env(config)) ||
(config.mode == common::Pretty &&
config.parse_name_directive(ln, "ignore-pretty")) ||
(config.target != config.host &&
parse_name_directive(ln, "ignore-cross-compile")) ||
config.parse_name_directive(ln, "ignore-cross-compile")) ||
ignore_gdb(config, ln) ||
ignore_lldb(config, ln) ||
ignore_llvm(config, ln);
if let Some(s) = parse_aux_build(ln) {
if let Some(s) = config.parse_aux_build(ln) {
props.aux.push(s);
}
props.should_fail = props.should_fail || parse_name_directive(ln, "should-fail");
props.should_fail = props.should_fail || config.parse_name_directive(ln, "should-fail");
});
return props;
@ -80,7 +81,7 @@ impl EarlyProps {
}
if !line.contains("ignore-gdb-version") &&
parse_name_directive(line, "ignore-gdb") {
config.parse_name_directive(line, "ignore-gdb") {
return true;
}
@ -143,7 +144,7 @@ impl EarlyProps {
return false;
}
if parse_name_directive(line, "ignore-lldb") {
if config.parse_name_directive(line, "ignore-lldb") {
return true;
}
@ -260,19 +261,23 @@ impl TestProps {
}
}
pub fn from_aux_file(&self, testfile: &Path, cfg: Option<&str>) -> Self {
pub fn from_aux_file(&self,
testfile: &Path,
cfg: Option<&str>,
config: &Config)
-> Self {
let mut props = TestProps::new();
// copy over select properties to the aux build:
props.incremental_dir = self.incremental_dir.clone();
props.load_from(testfile, cfg);
props.load_from(testfile, cfg, config);
props
}
pub fn from_file(testfile: &Path) -> Self {
pub fn from_file(testfile: &Path, config: &Config) -> Self {
let mut props = TestProps::new();
props.load_from(testfile, None);
props.load_from(testfile, None, config);
props
}
@ -280,85 +285,88 @@ impl TestProps {
/// tied to a particular revision `foo` (indicated by writing
/// `//[foo]`), then the property is ignored unless `cfg` is
/// `Some("foo")`.
pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>) {
pub fn load_from(&mut self,
testfile: &Path,
cfg: Option<&str>,
config: &Config) {
iter_header(testfile,
cfg,
&mut |ln| {
if let Some(ep) = parse_error_pattern(ln) {
if let Some(ep) = config.parse_error_pattern(ln) {
self.error_patterns.push(ep);
}
if let Some(flags) = parse_compile_flags(ln) {
if let Some(flags) = config.parse_compile_flags(ln) {
self.compile_flags.extend(flags.split_whitespace()
.map(|s| s.to_owned()));
}
if let Some(r) = parse_revisions(ln) {
if let Some(r) = config.parse_revisions(ln) {
self.revisions.extend(r);
}
if self.run_flags.is_none() {
self.run_flags = parse_run_flags(ln);
self.run_flags = config.parse_run_flags(ln);
}
if self.pp_exact.is_none() {
self.pp_exact = parse_pp_exact(ln, testfile);
self.pp_exact = config.parse_pp_exact(ln, testfile);
}
if !self.build_aux_docs {
self.build_aux_docs = parse_build_aux_docs(ln);
self.build_aux_docs = config.parse_build_aux_docs(ln);
}
if !self.force_host {
self.force_host = parse_force_host(ln);
self.force_host = config.parse_force_host(ln);
}
if !self.check_stdout {
self.check_stdout = parse_check_stdout(ln);
self.check_stdout = config.parse_check_stdout(ln);
}
if !self.no_prefer_dynamic {
self.no_prefer_dynamic = parse_no_prefer_dynamic(ln);
self.no_prefer_dynamic = config.parse_no_prefer_dynamic(ln);
}
if !self.pretty_expanded {
self.pretty_expanded = parse_pretty_expanded(ln);
self.pretty_expanded = config.parse_pretty_expanded(ln);
}
if let Some(m) = parse_pretty_mode(ln) {
if let Some(m) = config.parse_pretty_mode(ln) {
self.pretty_mode = m;
}
if !self.pretty_compare_only {
self.pretty_compare_only = parse_pretty_compare_only(ln);
self.pretty_compare_only = config.parse_pretty_compare_only(ln);
}
if let Some(ab) = parse_aux_build(ln) {
if let Some(ab) = config.parse_aux_build(ln) {
self.aux_builds.push(ab);
}
if let Some(ee) = parse_env(ln, "exec-env") {
if let Some(ee) = config.parse_env(ln, "exec-env") {
self.exec_env.push(ee);
}
if let Some(ee) = parse_env(ln, "rustc-env") {
if let Some(ee) = config.parse_env(ln, "rustc-env") {
self.rustc_env.push(ee);
}
if let Some(cl) = parse_check_line(ln) {
if let Some(cl) = config.parse_check_line(ln) {
self.check_lines.push(cl);
}
if let Some(of) = parse_forbid_output(ln) {
if let Some(of) = config.parse_forbid_output(ln) {
self.forbid_output.push(of);
}
if !self.must_compile_successfully {
self.must_compile_successfully = parse_must_compile_successfully(ln);
self.must_compile_successfully = config.parse_must_compile_successfully(ln);
}
if !self.check_test_line_numbers_match {
self.check_test_line_numbers_match = parse_check_test_line_numbers_match(ln);
self.check_test_line_numbers_match = config.parse_check_test_line_numbers_match(ln);
}
});
@ -410,117 +418,121 @@ fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut FnMut(&str)) {
return;
}
fn parse_error_pattern(line: &str) -> Option<String> {
parse_name_value_directive(line, "error-pattern")
}
impl Config {
fn parse_forbid_output(line: &str) -> Option<String> {
parse_name_value_directive(line, "forbid-output")
}
fn parse_error_pattern(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "error-pattern")
}
fn parse_aux_build(line: &str) -> Option<String> {
parse_name_value_directive(line, "aux-build")
}
fn parse_forbid_output(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "forbid-output")
}
fn parse_compile_flags(line: &str) -> Option<String> {
parse_name_value_directive(line, "compile-flags")
}
fn parse_aux_build(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "aux-build")
}
fn parse_revisions(line: &str) -> Option<Vec<String>> {
parse_name_value_directive(line, "revisions")
.map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
}
fn parse_compile_flags(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "compile-flags")
}
fn parse_run_flags(line: &str) -> Option<String> {
parse_name_value_directive(line, "run-flags")
}
fn parse_revisions(&self, line: &str) -> Option<Vec<String>> {
self.parse_name_value_directive(line, "revisions")
.map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
}
fn parse_check_line(line: &str) -> Option<String> {
parse_name_value_directive(line, "check")
}
fn parse_run_flags(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "run-flags")
}
fn parse_force_host(line: &str) -> bool {
parse_name_directive(line, "force-host")
}
fn parse_check_line(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "check")
}
fn parse_build_aux_docs(line: &str) -> bool {
parse_name_directive(line, "build-aux-docs")
}
fn parse_force_host(&self, line: &str) -> bool {
self.parse_name_directive(line, "force-host")
}
fn parse_check_stdout(line: &str) -> bool {
parse_name_directive(line, "check-stdout")
}
fn parse_build_aux_docs(&self, line: &str) -> bool {
self.parse_name_directive(line, "build-aux-docs")
}
fn parse_no_prefer_dynamic(line: &str) -> bool {
parse_name_directive(line, "no-prefer-dynamic")
}
fn parse_check_stdout(&self, line: &str) -> bool {
self.parse_name_directive(line, "check-stdout")
}
fn parse_pretty_expanded(line: &str) -> bool {
parse_name_directive(line, "pretty-expanded")
}
fn parse_no_prefer_dynamic(&self, line: &str) -> bool {
self.parse_name_directive(line, "no-prefer-dynamic")
}
fn parse_pretty_mode(line: &str) -> Option<String> {
parse_name_value_directive(line, "pretty-mode")
}
fn parse_pretty_expanded(&self, line: &str) -> bool {
self.parse_name_directive(line, "pretty-expanded")
}
fn parse_pretty_compare_only(line: &str) -> bool {
parse_name_directive(line, "pretty-compare-only")
}
fn parse_pretty_mode(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "pretty-mode")
}
fn parse_must_compile_successfully(line: &str) -> bool {
parse_name_directive(line, "must-compile-successfully")
}
fn parse_pretty_compare_only(&self, line: &str) -> bool {
self.parse_name_directive(line, "pretty-compare-only")
}
fn parse_check_test_line_numbers_match(line: &str) -> bool {
parse_name_directive(line, "check-test-line-numbers-match")
}
fn parse_must_compile_successfully(&self, line: &str) -> bool {
self.parse_name_directive(line, "must-compile-successfully")
}
fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
parse_name_value_directive(line, name).map(|nv| {
// nv is either FOO or FOO=BAR
let mut strs: Vec<String> = nv.splitn(2, '=')
.map(str::to_owned)
.collect();
fn parse_check_test_line_numbers_match(&self, line: &str) -> bool {
self.parse_name_directive(line, "check-test-line-numbers-match")
}
match strs.len() {
1 => (strs.pop().unwrap(), "".to_owned()),
2 => {
let end = strs.pop().unwrap();
(strs.pop().unwrap(), end)
fn parse_env(&self, line: &str, name: &str) -> Option<(String, String)> {
self.parse_name_value_directive(line, name).map(|nv| {
// nv is either FOO or FOO=BAR
let mut strs: Vec<String> = nv.splitn(2, '=')
.map(str::to_owned)
.collect();
match strs.len() {
1 => (strs.pop().unwrap(), "".to_owned()),
2 => {
let end = strs.pop().unwrap();
(strs.pop().unwrap(), end)
}
n => panic!("Expected 1 or 2 strings, not {}", n),
}
n => panic!("Expected 1 or 2 strings, not {}", n),
}
})
}
})
}
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<PathBuf> {
if let Some(s) = parse_name_value_directive(line, "pp-exact") {
Some(PathBuf::from(&s))
} else {
if parse_name_directive(line, "pp-exact") {
testfile.file_name().map(PathBuf::from)
fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option<PathBuf> {
if let Some(s) = self.parse_name_value_directive(line, "pp-exact") {
Some(PathBuf::from(&s))
} else {
if self.parse_name_directive(line, "pp-exact") {
testfile.file_name().map(PathBuf::from)
} else {
None
}
}
}
fn parse_name_directive(&self, line: &str, directive: &str) -> bool {
// This 'no-' rule is a quick hack to allow pretty-expanded and
// no-pretty-expanded to coexist
line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
}
pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
let keycolon = format!("{}:", directive);
if let Some(colon) = line.find(&keycolon) {
let value = line[(colon + keycolon.len())..line.len()].to_owned();
debug!("{}: {}", directive, value);
Some(expand_variables(value, self))
} else {
None
}
}
}
fn parse_name_directive(line: &str, directive: &str) -> bool {
// This 'no-' rule is a quick hack to allow pretty-expanded and no-pretty-expanded to coexist
line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
}
pub fn parse_name_value_directive(line: &str, directive: &str) -> Option<String> {
let keycolon = format!("{}:", directive);
if let Some(colon) = line.find(&keycolon) {
let value = line[(colon + keycolon.len())..line.len()].to_owned();
debug!("{}: {}", directive, value);
Some(value)
} else {
None
}
}
pub fn lldb_version_to_int(version_string: &str) -> isize {
let error_string = format!("Encountered LLDB version string with unexpected format: {}",
version_string);
@ -528,3 +540,24 @@ pub fn lldb_version_to_int(version_string: &str) -> isize {
let major: isize = version_string.parse().ok().expect(&error_string);
return major;
}
fn expand_variables(mut value: String, config: &Config) -> String {
const CWD: &'static str = "{{cwd}}";
const SRC_BASE: &'static str = "{{src-base}}";
const BUILD_BASE: &'static str = "{{build-base}}";
if value.contains(CWD) {
let cwd = env::current_dir().unwrap();
value = value.replace(CWD, &cwd.to_string_lossy());
}
if value.contains(SRC_BASE) {
value = value.replace(SRC_BASE, &config.src_base.to_string_lossy());
}
if value.contains(BUILD_BASE) {
value = value.replace(BUILD_BASE, &config.build_base.to_string_lossy());
}
value
}

View file

@ -106,7 +106,7 @@ pub fn parse_config(args: Vec<String> ) -> Config {
reqopt("", "llvm-components", "list of LLVM components built in", "LIST"),
reqopt("", "llvm-cxxflags", "C++ flags for LLVM", "FLAGS"),
optopt("", "nodejs", "the name of nodejs", "PATH"),
optopt("", "qemu-test-client", "path to the qemu test client", "PATH"),
optopt("", "remote-test-client", "path to the remote test client", "PATH"),
optflag("h", "help", "show this message")];
let (argv0, args_) = args.split_first().unwrap();
@ -177,9 +177,7 @@ pub fn parse_config(args: Vec<String> ) -> Config {
llvm_version: matches.opt_str("llvm-version"),
android_cross_path: opt_path(matches, "android-cross-path"),
adb_path: opt_str2(matches.opt_str("adb-path")),
adb_test_dir: format!("{}/{}",
opt_str2(matches.opt_str("adb-test-dir")),
opt_str2(matches.opt_str("target"))),
adb_test_dir: opt_str2(matches.opt_str("adb-test-dir")),
adb_device_status:
opt_str2(matches.opt_str("target")).contains("android") &&
"(none)" != opt_str2(matches.opt_str("adb-test-dir")) &&
@ -187,7 +185,7 @@ pub fn parse_config(args: Vec<String> ) -> Config {
lldb_python_dir: matches.opt_str("lldb-python-dir"),
verbose: matches.opt_present("verbose"),
quiet: matches.opt_present("quiet"),
qemu_test_client: matches.opt_str("qemu-test-client").map(PathBuf::from),
remote_test_client: matches.opt_str("remote-test-client").map(PathBuf::from),
cc: matches.opt_str("cc").unwrap(),
cxx: matches.opt_str("cxx").unwrap(),
@ -252,27 +250,14 @@ pub fn run_tests(config: &Config) {
if let DebugInfoGdb = config.mode {
println!("{} debug-info test uses tcp 5039 port.\
please reserve it", config.target);
}
// android debug-info test uses remote debugger
// so, we test 1 thread at once.
// also trying to isolate problems with adb_run_wrapper.sh ilooping
match config.mode {
// These tests don't actually run code or don't run for android, so
// we don't need to limit ourselves there
Mode::Ui |
Mode::CompileFail |
Mode::ParseFail |
Mode::RunMake |
Mode::Codegen |
Mode::CodegenUnits |
Mode::Pretty |
Mode::Rustdoc => {}
_ => {
env::set_var("RUST_TEST_THREADS", "1");
}
// android debug-info test uses remote debugger so, we test 1 thread
// at once as they're all sharing the same TCP port to communicate
// over.
//
// we should figure out how to lift this restriction! (run them all
// on different ports allocated dynamically).
env::set_var("RUST_TEST_THREADS", "1");
}
}
@ -296,9 +281,10 @@ pub fn run_tests(config: &Config) {
}
DebugInfoGdb => {
if config.qemu_test_client.is_some() {
if config.remote_test_client.is_some() &&
!config.target.contains("android"){
println!("WARNING: debuginfo tests are not available when \
testing with QEMU");
testing with remote");
return
}
}

View file

@ -16,7 +16,6 @@ use errors::{self, ErrorKind, Error};
use filetime::FileTime;
use json;
use header::TestProps;
use header;
use procsrv;
use test::TestPaths;
use uidiff;
@ -24,7 +23,6 @@ use util::logv;
use std::collections::HashSet;
use std::env;
use std::fmt;
use std::fs::{self, File, create_dir_all};
use std::io::prelude::*;
use std::io::{self, BufReader};
@ -57,7 +55,7 @@ pub fn run(config: Config, testpaths: &TestPaths) {
print!("\n\n");
}
debug!("running {:?}", testpaths.file.display());
let base_props = TestProps::from_file(&testpaths.file);
let base_props = TestProps::from_file(&testpaths.file, &config);
let base_cx = TestCx { config: &config,
props: &base_props,
@ -70,7 +68,7 @@ pub fn run(config: Config, testpaths: &TestPaths) {
} else {
for revision in &base_props.revisions {
let mut revision_props = base_props.clone();
revision_props.load_from(&testpaths.file, Some(&revision));
revision_props.load_from(&testpaths.file, Some(&revision), &config);
let rev_cx = TestCx {
config: &config,
props: &revision_props,
@ -469,7 +467,9 @@ actual:\n\
let debugger_run_result;
match &*self.config.target {
"arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => {
"arm-linux-androideabi" |
"armv7-linux-androideabi" |
"aarch64-linux-android" => {
cmds = cmds.replace("run", "continue");
@ -534,6 +534,7 @@ actual:\n\
exe_file.file_name().unwrap().to_str()
.unwrap());
debug!("adb arg: {}", adb_arg);
let mut process = procsrv::run_background("",
&self.config.adb_path
,
@ -590,7 +591,7 @@ actual:\n\
};
debugger_run_result = ProcRes {
status: Status::Normal(status),
status: status,
stdout: out,
stderr: err,
cmdline: cmdline
@ -841,7 +842,7 @@ actual:\n\
self.dump_output(&out, &err);
ProcRes {
status: Status::Normal(status),
status: status,
stdout: out,
stderr: err,
cmdline: format!("{:?}", cmd)
@ -867,13 +868,13 @@ actual:\n\
}
for &(ref command_directive, ref check_directive) in &directives {
header::parse_name_value_directive(
self.config.parse_name_value_directive(
&line,
&command_directive).map(|cmd| {
commands.push(cmd)
});
header::parse_name_value_directive(
self.config.parse_name_value_directive(
&line,
&check_directive).map(|cmd| {
check_lines.push(cmd)
@ -1158,7 +1159,9 @@ actual:\n\
if self.props.build_aux_docs {
for rel_ab in &self.props.aux_builds {
let aux_testpaths = self.compute_aux_test_paths(rel_ab);
let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision);
let aux_props = self.props.from_aux_file(&aux_testpaths.file,
self.revision,
self.config);
let aux_cx = TestCx {
config: self.config,
props: &aux_props,
@ -1190,25 +1193,20 @@ actual:\n\
let env = self.props.exec_env.clone();
match &*self.config.target {
"arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => {
self._arm_exec_compiled_test(env)
}
// This is pretty similar to below, we're transforming:
//
// program arg1 arg2
//
// into
//
// qemu-test-client run program:support-lib.so arg1 arg2
// remote-test-client run program:support-lib.so arg1 arg2
//
// The test-client program will upload `program` to the emulator
// along with all other support libraries listed (in this case
// `support-lib.so`. It will then execute the program on the
// emulator with the arguments specified (in the environment we give
// the process) and then report back the same result.
_ if self.config.qemu_test_client.is_some() => {
_ if self.config.remote_test_client.is_some() => {
let aux_dir = self.aux_output_dir_name();
let mut args = self.make_run_args();
let mut program = args.prog.clone();
@ -1224,7 +1222,7 @@ actual:\n\
}
args.args.insert(0, program);
args.args.insert(0, "run".to_string());
args.prog = self.config.qemu_test_client.clone().unwrap()
args.prog = self.config.remote_test_client.clone().unwrap()
.into_os_string().into_string().unwrap();
self.compose_and_run(args,
env,
@ -1279,7 +1277,9 @@ actual:\n\
for rel_ab in &self.props.aux_builds {
let aux_testpaths = self.compute_aux_test_paths(rel_ab);
let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision);
let aux_props = self.props.from_aux_file(&aux_testpaths.file,
self.revision,
self.config);
let mut crate_type = if aux_props.no_prefer_dynamic {
Vec::new()
} else {
@ -1324,13 +1324,6 @@ actual:\n\
aux_testpaths.file.display()),
&auxres);
}
match &*self.config.target {
"arm-linux-androideabi" | "armv7-linux-androideabi" | "aarch64-linux-android" => {
self._arm_push_aux_shared_library();
}
_ => {}
}
}
self.compose_and_run(args,
@ -1564,7 +1557,7 @@ actual:\n\
input).expect(&format!("failed to exec `{}`", prog));
self.dump_output(&out, &err);
return ProcRes {
status: Status::Normal(status),
status: status,
stdout: out,
stderr: err,
cmdline: cmdline,
@ -1698,157 +1691,6 @@ actual:\n\
println!("---------------------------------------------------");
}
fn _arm_exec_compiled_test(&self, env: Vec<(String, String)>) -> ProcRes {
let args = self.make_run_args();
let cmdline = self.make_cmdline("", &args.prog, &args.args);
// get bare program string
let mut tvec: Vec<String> = args.prog
.split('/')
.map(str::to_owned)
.collect();
let prog_short = tvec.pop().unwrap();
// copy to target
let copy_result = procsrv::run("",
&self.config.adb_path,
None,
&[
"push".to_owned(),
args.prog.clone(),
self.config.adb_test_dir.clone()
],
vec![("".to_owned(), "".to_owned())],
Some("".to_owned()))
.expect(&format!("failed to exec `{}`", self.config.adb_path));
if self.config.verbose {
println!("push ({}) {} {} {}",
self.config.target,
args.prog,
copy_result.out,
copy_result.err);
}
logv(self.config, format!("executing ({}) {}", self.config.target, cmdline));
let mut runargs = Vec::new();
// run test via adb_run_wrapper
runargs.push("shell".to_owned());
for (key, val) in env {
runargs.push(format!("{}={}", key, val));
}
runargs.push(format!("{}/../adb_run_wrapper.sh", self.config.adb_test_dir));
runargs.push(format!("{}", self.config.adb_test_dir));
runargs.push(format!("{}", prog_short));
for tv in &args.args {
runargs.push(tv.to_owned());
}
procsrv::run("",
&self.config.adb_path,
None,
&runargs,
vec![("".to_owned(), "".to_owned())], Some("".to_owned()))
.expect(&format!("failed to exec `{}`", self.config.adb_path));
// get exitcode of result
runargs = Vec::new();
runargs.push("shell".to_owned());
runargs.push("cat".to_owned());
runargs.push(format!("{}/{}.exitcode", self.config.adb_test_dir, prog_short));
let procsrv::Result{ out: exitcode_out, err: _, status: _ } =
procsrv::run("",
&self.config.adb_path,
None,
&runargs,
vec![("".to_owned(), "".to_owned())],
Some("".to_owned()))
.expect(&format!("failed to exec `{}`", self.config.adb_path));
let mut exitcode: i32 = 0;
for c in exitcode_out.chars() {
if !c.is_numeric() { break; }
exitcode = exitcode * 10 + match c {
'0' ... '9' => c as i32 - ('0' as i32),
_ => 101,
}
}
// get stdout of result
runargs = Vec::new();
runargs.push("shell".to_owned());
runargs.push("cat".to_owned());
runargs.push(format!("{}/{}.stdout", self.config.adb_test_dir, prog_short));
let procsrv::Result{ out: stdout_out, err: _, status: _ } =
procsrv::run("",
&self.config.adb_path,
None,
&runargs,
vec![("".to_owned(), "".to_owned())],
Some("".to_owned()))
.expect(&format!("failed to exec `{}`", self.config.adb_path));
// get stderr of result
runargs = Vec::new();
runargs.push("shell".to_owned());
runargs.push("cat".to_owned());
runargs.push(format!("{}/{}.stderr", self.config.adb_test_dir, prog_short));
let procsrv::Result{ out: stderr_out, err: _, status: _ } =
procsrv::run("",
&self.config.adb_path,
None,
&runargs,
vec![("".to_owned(), "".to_owned())],
Some("".to_owned()))
.expect(&format!("failed to exec `{}`", self.config.adb_path));
self.dump_output(&stdout_out, &stderr_out);
ProcRes {
status: Status::Parsed(exitcode),
stdout: stdout_out,
stderr: stderr_out,
cmdline: cmdline
}
}
fn _arm_push_aux_shared_library(&self) {
let tdir = self.aux_output_dir_name();
let dirs = fs::read_dir(&tdir).unwrap();
for file in dirs {
let file = file.unwrap().path();
if file.extension().and_then(|s| s.to_str()) == Some("so") {
// FIXME (#9639): This needs to handle non-utf8 paths
let copy_result = procsrv::run("",
&self.config.adb_path,
None,
&[
"push".to_owned(),
file.to_str()
.unwrap()
.to_owned(),
self.config.adb_test_dir.to_owned(),
],
vec![("".to_owned(),
"".to_owned())],
Some("".to_owned()))
.expect(&format!("failed to exec `{}`", self.config.adb_path));
if self.config.verbose {
println!("push ({}) {:?} {} {}",
self.config.target, file.display(),
copy_result.out, copy_result.err);
}
}
}
}
// codegen tests (using FileCheck)
fn compile_test_and_save_ir(&self) -> ProcRes {
@ -2347,7 +2189,7 @@ actual:\n\
let output = cmd.output().expect("failed to spawn `make`");
if !output.status.success() {
let res = ProcRes {
status: Status::Normal(output.status),
status: output.status,
stdout: String::from_utf8_lossy(&output.stdout).into_owned(),
stderr: String::from_utf8_lossy(&output.stderr).into_owned(),
cmdline: format!("{:?}", cmd),
@ -2594,17 +2436,12 @@ struct ProcArgs {
}
pub struct ProcRes {
status: Status,
status: ExitStatus,
stdout: String,
stderr: String,
cmdline: String,
}
enum Status {
Parsed(i32),
Normal(ExitStatus),
}
impl ProcRes {
pub fn fatal(&self, err: Option<&str>) -> ! {
if let Some(e) = err {
@ -2628,31 +2465,6 @@ impl ProcRes {
}
}
impl Status {
fn code(&self) -> Option<i32> {
match *self {
Status::Parsed(i) => Some(i),
Status::Normal(ref e) => e.code(),
}
}
fn success(&self) -> bool {
match *self {
Status::Parsed(i) => i == 0,
Status::Normal(ref e) => e.success(),
}
}
}
impl fmt::Display for Status {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
Status::Parsed(i) => write!(f, "exit code: {}", i),
Status::Normal(ref e) => e.fmt(f),
}
}
}
enum TargetLocation {
ThisFile(PathBuf),
ThisDirectory(PathBuf),

View file

@ -1,5 +1,5 @@
[package]
name = "qemu-test-client"
name = "remote-test-client"
version = "0.1.0"
authors = ["The Rust Project Developers"]

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/// This is a small client program intended to pair with `qemu-test-server` in
/// This is a small client program intended to pair with `remote-test-server` in
/// this repository. This client connects to the server over TCP and is used to
/// push artifacts and run tests on the server instead of locally.
///
@ -16,11 +16,11 @@
/// well.
use std::env;
use std::fs::File;
use std::fs::{self, File};
use std::io::prelude::*;
use std::io::{self, BufWriter};
use std::net::TcpStream;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::thread;
use std::time::Duration;
@ -37,8 +37,10 @@ fn main() {
match &args.next().unwrap()[..] {
"spawn-emulator" => {
spawn_emulator(Path::new(&args.next().unwrap()),
Path::new(&args.next().unwrap()))
spawn_emulator(&args.next().unwrap(),
Path::new(&args.next().unwrap()),
Path::new(&args.next().unwrap()),
args.next().map(|s| s.into()))
}
"push" => {
push(Path::new(&args.next().unwrap()))
@ -50,11 +52,74 @@ fn main() {
}
}
fn spawn_emulator(rootfs: &Path, tmpdir: &Path) {
fn spawn_emulator(target: &str,
server: &Path,
tmpdir: &Path,
rootfs: Option<PathBuf>) {
if target.contains("android") {
start_android_emulator(server);
} else {
let rootfs = rootfs.as_ref().expect("need rootfs on non-android");
start_qemu_emulator(rootfs, server, tmpdir);
}
// Wait for the emulator to come online
loop {
let dur = Duration::from_millis(100);
if let Ok(mut client) = TcpStream::connect("127.0.0.1:12345") {
t!(client.set_read_timeout(Some(dur)));
t!(client.set_write_timeout(Some(dur)));
if client.write_all(b"ping").is_ok() {
let mut b = [0; 4];
if client.read_exact(&mut b).is_ok() {
break
}
}
}
thread::sleep(dur);
}
}
fn start_android_emulator(server: &Path) {
println!("waiting for device to come online");
let status = Command::new("adb")
.arg("wait-for-device")
.status()
.unwrap();
assert!(status.success());
println!("pushing server");
let status = Command::new("adb")
.arg("push")
.arg(server)
.arg("/data/tmp/testd")
.status()
.unwrap();
assert!(status.success());
println!("forwarding tcp");
let status = Command::new("adb")
.arg("forward")
.arg("tcp:12345")
.arg("tcp:12345")
.status()
.unwrap();
assert!(status.success());
println!("executing server");
Command::new("adb")
.arg("shell")
.arg("/data/tmp/testd")
.spawn()
.unwrap();
}
fn start_qemu_emulator(rootfs: &Path, server: &Path, tmpdir: &Path) {
// Generate a new rootfs image now that we've updated the test server
// executable. This is the equivalent of:
//
// find $rootfs -print 0 | cpio --null -o --format=newc > rootfs.img
t!(fs::copy(server, rootfs.join("testd")));
let rootfs_img = tmpdir.join("rootfs.img");
let mut cmd = Command::new("cpio");
cmd.arg("--null")
@ -83,22 +148,6 @@ fn spawn_emulator(rootfs: &Path, tmpdir: &Path) {
.arg("-redir").arg("tcp:12345::12345");
t!(cmd.spawn());
// Wait for the emulator to come online
loop {
let dur = Duration::from_millis(100);
if let Ok(mut client) = TcpStream::connect("127.0.0.1:12345") {
t!(client.set_read_timeout(Some(dur)));
t!(client.set_write_timeout(Some(dur)));
if client.write_all(b"ping").is_ok() {
let mut b = [0; 4];
if client.read_exact(&mut b).is_ok() {
break
}
}
}
thread::sleep(dur);
}
fn add_files(w: &mut Write, root: &Path, cur: &Path) {
for entry in t!(cur.read_dir()) {
let entry = t!(entry);
@ -116,11 +165,15 @@ fn push(path: &Path) {
let client = t!(TcpStream::connect("127.0.0.1:12345"));
let mut client = BufWriter::new(client);
t!(client.write_all(b"push"));
t!(client.write_all(path.file_name().unwrap().to_str().unwrap().as_bytes()));
t!(client.write_all(&[0]));
let mut file = t!(File::open(path));
t!(io::copy(&mut file, &mut client));
send(path, &mut client);
t!(client.flush());
// Wait for an acknowledgement that all the data was received. No idea
// why this is necessary, seems like it shouldn't be!
let mut client = client.into_inner().unwrap();
let mut buf = [0; 4];
t!(client.read_exact(&mut buf));
assert_eq!(&buf, b"ack ");
println!("done pushing {:?}", path);
}
@ -137,13 +190,20 @@ fn run(files: String, args: Vec<String>) {
t!(client.write_all(&[0]));
// Send over env vars
//
// Don't send over *everything* though as some env vars are set by and used
// by the client.
for (k, v) in env::vars() {
if k != "PATH" && k != "LD_LIBRARY_PATH" {
t!(client.write_all(k.as_bytes()));
t!(client.write_all(&[0]));
t!(client.write_all(v.as_bytes()));
t!(client.write_all(&[0]));
match &k[..] {
"PATH" |
"LD_LIBRARY_PATH" |
"PWD" => continue,
_ => {}
}
t!(client.write_all(k.as_bytes()));
t!(client.write_all(&[0]));
t!(client.write_all(v.as_bytes()));
t!(client.write_all(&[0]));
}
t!(client.write_all(&[0]));
@ -151,8 +211,6 @@ fn run(files: String, args: Vec<String>) {
let mut files = files.split(':');
let exe = files.next().unwrap();
for file in files.map(Path::new) {
t!(client.write_all(file.file_name().unwrap().to_str().unwrap().as_bytes()));
t!(client.write_all(&[0]));
send(&file, &mut client);
}
t!(client.write_all(&[0]));
@ -209,6 +267,8 @@ fn run(files: String, args: Vec<String>) {
}
fn send(path: &Path, dst: &mut Write) {
t!(dst.write_all(path.file_name().unwrap().to_str().unwrap().as_bytes()));
t!(dst.write_all(&[0]));
let mut file = t!(File::open(&path));
let amt = t!(file.metadata()).len();
t!(dst.write_all(&[

View file

@ -1,5 +1,5 @@
[package]
name = "qemu-test-server"
name = "remote-test-server"
version = "0.1.0"
authors = ["The Rust Project Developers"]

View file

@ -9,8 +9,8 @@
// except according to those terms.
/// This is a small server which is intended to run inside of an emulator. This
/// server pairs with the `qemu-test-client` program in this repository. The
/// `qemu-test-client` connects to this server over a TCP socket and performs
/// server pairs with the `remote-test-client` program in this repository. The
/// `remote-test-client` connects to this server over a TCP socket and performs
/// work such as:
///
/// 1. Pushing shared libraries to the server
@ -20,17 +20,18 @@
/// themselves having support libraries. All data over the TCP sockets is in a
/// basically custom format suiting our needs.
use std::cmp;
use std::fs::{self, File, Permissions};
use std::io::prelude::*;
use std::io::{self, BufReader};
use std::net::{TcpListener, TcpStream};
use std::os::unix::prelude::*;
use std::sync::{Arc, Mutex};
use std::path::Path;
use std::path::{Path, PathBuf};
use std::process::{Command, Stdio};
use std::str;
use std::sync::atomic::{AtomicUsize, ATOMIC_USIZE_INIT, Ordering};
use std::sync::{Arc, Mutex};
use std::thread;
use std::process::{Command, Stdio};
macro_rules! t {
($e:expr) => (match $e {
@ -43,10 +44,14 @@ static TEST: AtomicUsize = ATOMIC_USIZE_INIT;
fn main() {
println!("starting test server");
let listener = t!(TcpListener::bind("10.0.2.15:12345"));
let (listener, work) = if cfg!(target_os = "android") {
(t!(TcpListener::bind("0.0.0.0:12345")), "/data/tmp/work")
} else {
(t!(TcpListener::bind("10.0.2.15:12345")), "/tmp/work")
};
println!("listening!");
let work = Path::new("/tmp/work");
let work = Path::new(work);
t!(fs::create_dir_all(work));
let lock = Arc::new(Mutex::new(()));
@ -54,7 +59,9 @@ fn main() {
for socket in listener.incoming() {
let mut socket = t!(socket);
let mut buf = [0; 4];
t!(socket.read_exact(&mut buf));
if socket.read_exact(&mut buf).is_err() {
continue
}
if &buf[..] == b"ping" {
t!(socket.write_all(b"pong"));
} else if &buf[..] == b"push" {
@ -70,14 +77,10 @@ fn main() {
fn handle_push(socket: TcpStream, work: &Path) {
let mut reader = BufReader::new(socket);
let mut filename = Vec::new();
t!(reader.read_until(0, &mut filename));
filename.pop(); // chop off the 0
let filename = t!(str::from_utf8(&filename));
recv(&work, &mut reader);
let path = work.join(filename);
t!(io::copy(&mut reader, &mut t!(File::create(&path))));
t!(fs::set_permissions(&path, Permissions::from_mode(0o755)));
let mut socket = reader.into_inner();
t!(socket.write_all(b"ack "));
}
struct RemoveOnDrop<'a> {
@ -98,19 +101,19 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) {
// space.
let n = TEST.fetch_add(1, Ordering::SeqCst);
let path = work.join(format!("test{}", n));
let exe = path.join("exe");
t!(fs::create_dir(&path));
let _a = RemoveOnDrop { inner: &path };
// First up we'll get a list of arguments delimited with 0 bytes. An empty
// argument means that we're done.
let mut cmd = Command::new(&exe);
let mut args = Vec::new();
while t!(reader.read_until(0, &mut arg)) > 1 {
cmd.arg(t!(str::from_utf8(&arg[..arg.len() - 1])));
args.push(t!(str::from_utf8(&arg[..arg.len() - 1])).to_string());
arg.truncate(0);
}
// Next we'll get a bunch of env vars in pairs delimited by 0s as well
let mut env = Vec::new();
arg.truncate(0);
while t!(reader.read_until(0, &mut arg)) > 1 {
let key_len = arg.len() - 1;
@ -118,9 +121,9 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) {
{
let key = &arg[..key_len];
let val = &arg[key_len + 1..][..val_len];
let key = t!(str::from_utf8(key));
let val = t!(str::from_utf8(val));
cmd.env(key, val);
let key = t!(str::from_utf8(key)).to_string();
let val = t!(str::from_utf8(val)).to_string();
env.push((key, val));
}
arg.truncate(0);
}
@ -148,23 +151,23 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) {
let lock = lock.lock();
// Next there's a list of dynamic libraries preceded by their filenames.
arg.truncate(0);
while t!(reader.read_until(0, &mut arg)) > 1 {
let dst = path.join(t!(str::from_utf8(&arg[..arg.len() - 1])));
let amt = read_u32(&mut reader) as u64;
t!(io::copy(&mut reader.by_ref().take(amt),
&mut t!(File::create(&dst))));
t!(fs::set_permissions(&dst, Permissions::from_mode(0o755)));
arg.truncate(0);
while t!(reader.fill_buf())[0] != 0 {
recv(&path, &mut reader);
}
assert_eq!(t!(reader.read(&mut [0])), 1);
// Finally we'll get the binary. The other end will tell us how big the
// binary is and then we'll download it all to the exe path we calculated
// earlier.
let amt = read_u32(&mut reader) as u64;
t!(io::copy(&mut reader.by_ref().take(amt),
&mut t!(File::create(&exe))));
t!(fs::set_permissions(&exe, Permissions::from_mode(0o755)));
let exe = recv(&path, &mut reader);
let mut cmd = Command::new(&exe);
for arg in args {
cmd.arg(arg);
}
for (k, v) in env {
cmd.env(k, v);
}
// Support libraries were uploaded to `work` earlier, so make sure that's
// in `LD_LIBRARY_PATH`. Also include our own current dir which may have
@ -202,6 +205,28 @@ fn handle_run(socket: TcpStream, work: &Path, lock: &Mutex<()>) {
]));
}
fn recv<B: BufRead>(dir: &Path, io: &mut B) -> PathBuf {
let mut filename = Vec::new();
t!(io.read_until(0, &mut filename));
// We've got some tests with *really* long names. We try to name the test
// executable the same on the target as it is on the host to aid with
// debugging, but the targets we're emulating are often more restrictive
// than the hosts as well.
//
// To ensure we can run a maximum number of tests without modifications we
// just arbitrarily truncate the filename to 50 bytes. That should
// hopefully allow us to still identify what's running while staying under
// the filesystem limits.
let len = cmp::min(filename.len() - 1, 50);
let dst = dir.join(t!(str::from_utf8(&filename[..len])));
let amt = read_u32(io) as u64;
t!(io::copy(&mut io.take(amt),
&mut t!(File::create(&dst))));
t!(fs::set_permissions(&dst, Permissions::from_mode(0o755)));
return dst
}
fn my_copy(src: &mut Read, which: u8, dst: &Mutex<Write>) {
let mut b = [0; 1024];
loop {