1
Fork 0

Implement a file-path remapping feature in support of debuginfo and reproducible builds.

This commit is contained in:
Michael Woerister 2017-04-24 19:01:19 +02:00
parent b0a4074c5e
commit 39ffea31df
34 changed files with 464 additions and 315 deletions

View file

@ -296,7 +296,7 @@ fn main() {
syntax::errors::registry::Registry::new(&[]), syntax::errors::registry::Registry::new(&[]),
Rc::new(DummyCrateStore)); Rc::new(DummyCrateStore));
let filemap = session.parse_sess.codemap() let filemap = session.parse_sess.codemap()
.new_filemap("<n/a>".to_string(), None, code); .new_filemap("<n/a>".to_string(), code);
let mut lexer = lexer::StringReader::new(session.diagnostic(), filemap); let mut lexer = lexer::StringReader::new(session.diagnostic(), filemap);
let cm = session.codemap(); let cm = session.codemap();

View file

@ -25,6 +25,7 @@ use lint;
use middle::cstore; use middle::cstore;
use syntax::ast::{self, IntTy, UintTy}; use syntax::ast::{self, IntTy, UintTy};
use syntax::codemap::FilePathMapping;
use syntax::parse::token; use syntax::parse::token;
use syntax::parse; use syntax::parse;
use syntax::symbol::Symbol; use syntax::symbol::Symbol;
@ -492,6 +493,14 @@ impl Options {
self.incremental.is_none() || self.incremental.is_none() ||
self.cg.codegen_units == 1 self.cg.codegen_units == 1
} }
pub fn file_path_mapping(&self) -> FilePathMapping {
FilePathMapping::new(
self.debugging_opts.remap_path_prefix_from.iter().zip(
self.debugging_opts.remap_path_prefix_to.iter()
).map(|(src, dst)| (src.clone(), dst.clone())).collect()
)
}
} }
// The type of entry function, so // The type of entry function, so
@ -1012,6 +1021,10 @@ options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"Set the optimization fuel quota for a crate."), "Set the optimization fuel quota for a crate."),
print_fuel: Option<String> = (None, parse_opt_string, [TRACKED], print_fuel: Option<String> = (None, parse_opt_string, [TRACKED],
"Make Rustc print the total optimization fuel used by a crate."), "Make Rustc print the total optimization fuel used by a crate."),
remap_path_prefix_from: Vec<String> = (vec![], parse_string_push, [TRACKED],
"add a source pattern to the file path remapping config"),
remap_path_prefix_to: Vec<String> = (vec![], parse_string_push, [TRACKED],
"add a mapping target to the file path remapping config"),
} }
pub fn default_lib_output() -> CrateType { pub fn default_lib_output() -> CrateType {
@ -1319,7 +1332,7 @@ pub fn rustc_optgroups() -> Vec<RustcOptGroup> {
// Convert strings provided as --cfg [cfgspec] into a crate_cfg // Convert strings provided as --cfg [cfgspec] into a crate_cfg
pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig { pub fn parse_cfgspecs(cfgspecs: Vec<String> ) -> ast::CrateConfig {
cfgspecs.into_iter().map(|s| { cfgspecs.into_iter().map(|s| {
let sess = parse::ParseSess::new(); let sess = parse::ParseSess::new(FilePathMapping::empty());
let mut parser = let mut parser =
parse::new_parser_from_source_str(&sess, "cfgspec".to_string(), s.to_string()); parse::new_parser_from_source_str(&sess, "cfgspec".to_string(), s.to_string());

View file

@ -74,8 +74,10 @@ pub struct Session {
// The name of the root source file of the crate, in the local file system. // The name of the root source file of the crate, in the local file system.
// The path is always expected to be absolute. `None` means that there is no // The path is always expected to be absolute. `None` means that there is no
// source file. // source file.
pub local_crate_source_file: Option<PathBuf>, pub local_crate_source_file: Option<String>,
pub working_dir: PathBuf, // The directory the compiler has been executed in plus a flag indicating
// if the value stored here has been affected by path remapping.
pub working_dir: (String, bool),
pub lint_store: RefCell<lint::LintStore>, pub lint_store: RefCell<lint::LintStore>,
pub lints: RefCell<lint::LintTable>, pub lints: RefCell<lint::LintTable>,
/// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics /// Set of (LintId, span, message) tuples tracking lint (sub)diagnostics
@ -553,12 +555,14 @@ pub fn build_session(sopts: config::Options,
registry: errors::registry::Registry, registry: errors::registry::Registry,
cstore: Rc<CrateStore>) cstore: Rc<CrateStore>)
-> Session { -> Session {
let file_path_mapping = sopts.file_path_mapping();
build_session_with_codemap(sopts, build_session_with_codemap(sopts,
dep_graph, dep_graph,
local_crate_source_file, local_crate_source_file,
registry, registry,
cstore, cstore,
Rc::new(codemap::CodeMap::new()), Rc::new(codemap::CodeMap::new(file_path_mapping)),
None) None)
} }
@ -631,14 +635,12 @@ pub fn build_session_(sopts: config::Options,
None => Some(filesearch::get_or_default_sysroot()) None => Some(filesearch::get_or_default_sysroot())
}; };
let file_path_mapping = sopts.file_path_mapping();
// Make the path absolute, if necessary // Make the path absolute, if necessary
let local_crate_source_file = local_crate_source_file.map(|path| let local_crate_source_file = local_crate_source_file.map(|path| {
if path.is_absolute() { file_path_mapping.map_prefix(path.to_string_lossy().into_owned()).0
path.clone() });
} else {
env::current_dir().unwrap().join(&path)
}
);
let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone()); let optimization_fuel_crate = sopts.debugging_opts.fuel.as_ref().map(|i| i.0.clone());
let optimization_fuel_limit = Cell::new(sopts.debugging_opts.fuel.as_ref() let optimization_fuel_limit = Cell::new(sopts.debugging_opts.fuel.as_ref()
@ -646,6 +648,9 @@ pub fn build_session_(sopts: config::Options,
let print_fuel_crate = sopts.debugging_opts.print_fuel.clone(); let print_fuel_crate = sopts.debugging_opts.print_fuel.clone();
let print_fuel = Cell::new(0); let print_fuel = Cell::new(0);
let working_dir = env::current_dir().unwrap().to_string_lossy().into_owned();
let working_dir = file_path_mapping.map_prefix(working_dir);
let sess = Session { let sess = Session {
dep_graph: dep_graph.clone(), dep_graph: dep_graph.clone(),
target: target_cfg, target: target_cfg,
@ -660,7 +665,7 @@ pub fn build_session_(sopts: config::Options,
derive_registrar_fn: Cell::new(None), derive_registrar_fn: Cell::new(None),
default_sysroot: default_sysroot, default_sysroot: default_sysroot,
local_crate_source_file: local_crate_source_file, local_crate_source_file: local_crate_source_file,
working_dir: env::current_dir().unwrap(), working_dir: working_dir,
lint_store: RefCell::new(lint::LintStore::new()), lint_store: RefCell::new(lint::LintStore::new()),
lints: RefCell::new(lint::LintTable::new()), lints: RefCell::new(lint::LintTable::new()),
one_time_diagnostics: RefCell::new(FxHashSet()), one_time_diagnostics: RefCell::new(FxHashSet()),

View file

@ -206,7 +206,7 @@ pub fn run_compiler<'a>(args: &[String],
let cstore = Rc::new(CStore::new(&dep_graph)); let cstore = Rc::new(CStore::new(&dep_graph));
let loader = file_loader.unwrap_or(box RealFileLoader); let loader = file_loader.unwrap_or(box RealFileLoader);
let codemap = Rc::new(CodeMap::with_file_loader(loader)); let codemap = Rc::new(CodeMap::with_file_loader(loader, sopts.file_path_mapping()));
let mut sess = session::build_session_with_codemap( let mut sess = session::build_session_with_codemap(
sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest, sopts, &dep_graph, input_file_path, descriptions, cstore.clone(), codemap, emitter_dest,
); );

View file

@ -31,7 +31,7 @@ use rustc::session::{self, config};
use std::rc::Rc; use std::rc::Rc;
use syntax::ast; use syntax::ast;
use syntax::abi::Abi; use syntax::abi::Abi;
use syntax::codemap::CodeMap; use syntax::codemap::{CodeMap, FilePathMapping};
use errors; use errors;
use errors::emitter::Emitter; use errors::emitter::Emitter;
use errors::{Level, DiagnosticBuilder}; use errors::{Level, DiagnosticBuilder};
@ -108,7 +108,7 @@ fn test_env<F>(source_string: &str,
&dep_graph, &dep_graph,
None, None,
diagnostic_handler, diagnostic_handler,
Rc::new(CodeMap::new()), Rc::new(CodeMap::new(FilePathMapping::empty())),
cstore.clone()); cstore.clone());
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let input = config::Input::Str { let input = config::Input::Str {

View file

@ -395,7 +395,7 @@ impl CrateStore for cstore::CStore {
let (name, def) = data.get_macro(id.index); let (name, def) = data.get_macro(id.index);
let source_name = format!("<{} macros>", name); let source_name = format!("<{} macros>", name);
let filemap = sess.parse_sess.codemap().new_filemap(source_name, None, def.body); let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION }; let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION };
let body = filemap_to_stream(&sess.parse_sess, filemap); let body = filemap_to_stream(&sess.parse_sess, filemap);

View file

@ -1134,7 +1134,7 @@ impl<'a, 'tcx> CrateMetadata {
// We can't reuse an existing FileMap, so allocate a new one // We can't reuse an existing FileMap, so allocate a new one
// containing the information we need. // containing the information we need.
let syntax_pos::FileMap { name, let syntax_pos::FileMap { name,
abs_path, name_was_remapped,
start_pos, start_pos,
end_pos, end_pos,
lines, lines,
@ -1158,7 +1158,7 @@ impl<'a, 'tcx> CrateMetadata {
} }
let local_version = local_codemap.new_imported_filemap(name, let local_version = local_codemap.new_imported_filemap(name,
abs_path, name_was_remapped,
source_length, source_length,
lines, lines,
multibyte_chars); multibyte_chars);

View file

@ -30,6 +30,7 @@ use std::hash::Hash;
use std::intrinsics; use std::intrinsics;
use std::io::prelude::*; use std::io::prelude::*;
use std::io::Cursor; use std::io::Cursor;
use std::path::Path;
use std::rc::Rc; use std::rc::Rc;
use std::u32; use std::u32;
use syntax::ast::{self, CRATE_NODE_ID}; use syntax::ast::{self, CRATE_NODE_ID};
@ -1268,13 +1269,40 @@ impl<'a, 'tcx> EncodeContext<'a, 'tcx> {
fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> { fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
let codemap = self.tcx.sess.codemap(); let codemap = self.tcx.sess.codemap();
let all_filemaps = codemap.files.borrow(); let all_filemaps = codemap.files.borrow();
self.lazy_seq_ref(all_filemaps.iter() let adapted = all_filemaps.iter()
.filter(|filemap| { .filter(|filemap| {
// No need to re-export imported filemaps, as any downstream // No need to re-export imported filemaps, as any downstream
// crate will import them from their original source. // crate will import them from their original source.
!filemap.is_imported() !filemap.is_imported()
}) })
.map(|filemap| &**filemap)) .map(|filemap| {
// When exporting FileMaps, we expand all paths to absolute
// paths because any relative paths are potentially relative to
// a wrong directory.
// However, if a path has been modified via
// `-Zremap-path-prefix` we assume the user has already set
// things up the way they want and don't touch the path values
// anymore.
let name = Path::new(&filemap.name);
let (ref working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir;
if filemap.name_was_remapped ||
(name.is_relative() && working_dir_was_remapped) {
// This path of this FileMap has been modified by
// path-remapping, so we use it verbatim (and avoid cloning
// the whole map in the process).
filemap.clone()
} else {
let mut adapted = (**filemap).clone();
let abs_path = Path::new(working_dir).join(name)
.to_string_lossy()
.into_owned();
adapted.name = abs_path;
Rc::new(adapted)
}
})
.collect::<Vec<_>>();
self.lazy_seq_ref(adapted.iter().map(|fm| &**fm))
} }
fn encode_def_path_table(&mut self) -> Lazy<DefPathTable> { fn encode_def_path_table(&mut self) -> Lazy<DefPathTable> {

View file

@ -37,6 +37,7 @@ use rustc::ty::{self, TyCtxt, AssociatedItemContainer};
use std::collections::HashSet; use std::collections::HashSet;
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use std::hash::*; use std::hash::*;
use std::path::Path;
use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID}; use syntax::ast::{self, NodeId, PatKind, Attribute, CRATE_NODE_ID};
use syntax::parse::token; use syntax::parse::token;
@ -128,6 +129,7 @@ impl<'l, 'tcx: 'l, 'll, D: Dump + 'll> DumpVisitor<'l, 'tcx, 'll, D> {
pub fn dump_crate_info(&mut self, name: &str, krate: &ast::Crate) { pub fn dump_crate_info(&mut self, name: &str, krate: &ast::Crate) {
let source_file = self.tcx.sess.local_crate_source_file.as_ref(); let source_file = self.tcx.sess.local_crate_source_file.as_ref();
let crate_root = source_file.map(|source_file| { let crate_root = source_file.map(|source_file| {
let source_file = Path::new(source_file);
match source_file.file_name() { match source_file.file_name() {
Some(_) => source_file.parent().unwrap().display().to_string(), Some(_) => source_file.parent().unwrap().display().to_string(),
None => source_file.display().to_string(), None => source_file.display().to_string(),

View file

@ -8,12 +8,12 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use super::FunctionDebugContext; use super::{FunctionDebugContext, FunctionDebugContextData};
use super::metadata::file_metadata; use super::metadata::file_metadata;
use super::utils::{DIB, span_start}; use super::utils::{DIB, span_start};
use llvm; use llvm;
use llvm::debuginfo::{DIScope, DISubprogram}; use llvm::debuginfo::DIScope;
use common::CrateContext; use common::CrateContext;
use rustc::mir::{Mir, VisibilityScope}; use rustc::mir::{Mir, VisibilityScope};
@ -53,8 +53,8 @@ pub fn create_mir_scopes(ccx: &CrateContext, mir: &Mir, debug_context: &Function
}; };
let mut scopes = IndexVec::from_elem(null_scope, &mir.visibility_scopes); let mut scopes = IndexVec::from_elem(null_scope, &mir.visibility_scopes);
let fn_metadata = match *debug_context { let debug_context = match *debug_context {
FunctionDebugContext::RegularContext(ref data) => data.fn_metadata, FunctionDebugContext::RegularContext(ref data) => data,
FunctionDebugContext::DebugInfoDisabled | FunctionDebugContext::DebugInfoDisabled |
FunctionDebugContext::FunctionWithoutDebugInfo => { FunctionDebugContext::FunctionWithoutDebugInfo => {
return scopes; return scopes;
@ -71,7 +71,7 @@ pub fn create_mir_scopes(ccx: &CrateContext, mir: &Mir, debug_context: &Function
// Instantiate all scopes. // Instantiate all scopes.
for idx in 0..mir.visibility_scopes.len() { for idx in 0..mir.visibility_scopes.len() {
let scope = VisibilityScope::new(idx); let scope = VisibilityScope::new(idx);
make_mir_scope(ccx, &mir, &has_variables, fn_metadata, scope, &mut scopes); make_mir_scope(ccx, &mir, &has_variables, debug_context, scope, &mut scopes);
} }
scopes scopes
@ -80,7 +80,7 @@ pub fn create_mir_scopes(ccx: &CrateContext, mir: &Mir, debug_context: &Function
fn make_mir_scope(ccx: &CrateContext, fn make_mir_scope(ccx: &CrateContext,
mir: &Mir, mir: &Mir,
has_variables: &BitVector, has_variables: &BitVector,
fn_metadata: DISubprogram, debug_context: &FunctionDebugContextData,
scope: VisibilityScope, scope: VisibilityScope,
scopes: &mut IndexVec<VisibilityScope, MirDebugScope>) { scopes: &mut IndexVec<VisibilityScope, MirDebugScope>) {
if scopes[scope].is_valid() { if scopes[scope].is_valid() {
@ -89,13 +89,13 @@ fn make_mir_scope(ccx: &CrateContext,
let scope_data = &mir.visibility_scopes[scope]; let scope_data = &mir.visibility_scopes[scope];
let parent_scope = if let Some(parent) = scope_data.parent_scope { let parent_scope = if let Some(parent) = scope_data.parent_scope {
make_mir_scope(ccx, mir, has_variables, fn_metadata, parent, scopes); make_mir_scope(ccx, mir, has_variables, debug_context, parent, scopes);
scopes[parent] scopes[parent]
} else { } else {
// The root is the function itself. // The root is the function itself.
let loc = span_start(ccx, mir.span); let loc = span_start(ccx, mir.span);
scopes[scope] = MirDebugScope { scopes[scope] = MirDebugScope {
scope_metadata: fn_metadata, scope_metadata: debug_context.fn_metadata,
file_start_pos: loc.file.start_pos, file_start_pos: loc.file.start_pos,
file_end_pos: loc.file.end_pos, file_end_pos: loc.file.end_pos,
}; };
@ -109,14 +109,17 @@ fn make_mir_scope(ccx: &CrateContext,
// However, we don't skip creating a nested scope if // However, we don't skip creating a nested scope if
// our parent is the root, because we might want to // our parent is the root, because we might want to
// put arguments in the root and not have shadowing. // put arguments in the root and not have shadowing.
if parent_scope.scope_metadata != fn_metadata { if parent_scope.scope_metadata != debug_context.fn_metadata {
scopes[scope] = parent_scope; scopes[scope] = parent_scope;
return; return;
} }
} }
let loc = span_start(ccx, scope_data.span); let loc = span_start(ccx, scope_data.span);
let file_metadata = file_metadata(ccx, &loc.file.name, &loc.file.abs_path); let file_metadata = file_metadata(ccx,
&loc.file.name,
debug_context.defining_crate);
let scope_metadata = unsafe { let scope_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateLexicalBlock( llvm::LLVMRustDIBuilderCreateLexicalBlock(
DIB(ccx), DIB(ccx),

View file

@ -26,7 +26,7 @@ use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor,
DICompositeType, DILexicalBlock, DIFlags}; DICompositeType, DILexicalBlock, DIFlags};
use rustc::hir::def::CtorKind; use rustc::hir::def::CtorKind;
use rustc::hir::def_id::{DefId, LOCAL_CRATE}; use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::ty::fold::TypeVisitor; use rustc::ty::fold::TypeVisitor;
use rustc::ty::subst::Substs; use rustc::ty::subst::Substs;
use rustc::ty::util::TypeIdHasher; use rustc::ty::util::TypeIdHasher;
@ -39,14 +39,12 @@ use rustc::ty::{self, AdtKind, Ty};
use rustc::ty::layout::{self, LayoutTyper}; use rustc::ty::layout::{self, LayoutTyper};
use session::config; use session::config;
use util::nodemap::FxHashMap; use util::nodemap::FxHashMap;
use util::common::path2cstr;
use libc::{c_uint, c_longlong}; use libc::{c_uint, c_longlong};
use std::ffi::CString; use std::ffi::CString;
use std::path::Path;
use std::ptr; use std::ptr;
use syntax::ast; use syntax::ast;
use syntax::symbol::{Interner, InternedString}; use syntax::symbol::{Interner, InternedString, Symbol};
use syntax_pos::{self, Span}; use syntax_pos::{self, Span};
@ -349,8 +347,7 @@ fn vec_slice_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
assert!(member_descriptions.len() == member_llvm_types.len()); assert!(member_descriptions.len() == member_llvm_types.len());
let loc = span_start(cx, span); let file_metadata = unknown_file_metadata(cx);
let file_metadata = file_metadata(cx, &loc.file.name, &loc.file.abs_path);
let metadata = composite_type_metadata(cx, let metadata = composite_type_metadata(cx,
slice_llvm_type, slice_llvm_type,
@ -659,44 +656,51 @@ pub fn type_metadata<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
metadata metadata
} }
pub fn file_metadata(cx: &CrateContext, path: &str, full_path: &Option<String>) -> DIFile { pub fn file_metadata(cx: &CrateContext,
// FIXME (#9639): This needs to handle non-utf8 paths file_name: &str,
let work_dir = cx.sess().working_dir.to_str().unwrap(); defining_crate: CrateNum) -> DIFile {
let file_name = debug!("file_metadata: file_name: {}, defining_crate: {}",
full_path.as_ref().map(|p| p.as_str()).unwrap_or_else(|| { file_name,
if path.starts_with(work_dir) { defining_crate);
&path[work_dir.len() + 1..path.len()]
} else {
path
}
});
file_metadata_(cx, path, file_name, &work_dir) let directory = if defining_crate == LOCAL_CRATE {
&cx.sess().working_dir.0[..]
} else {
// If the path comes from an upstream crate we assume it has been made
// independent of the compiler's working directory one way or another.
""
};
file_metadata_raw(cx, file_name, directory)
} }
pub fn unknown_file_metadata(cx: &CrateContext) -> DIFile { pub fn unknown_file_metadata(cx: &CrateContext) -> DIFile {
// Regular filenames should not be empty, so we abuse an empty name as the file_metadata_raw(cx, "<unknown>", "")
// key for the special unknown file metadata
file_metadata_(cx, "", "<unknown>", "")
} }
fn file_metadata_(cx: &CrateContext, key: &str, file_name: &str, work_dir: &str) -> DIFile { fn file_metadata_raw(cx: &CrateContext,
if let Some(file_metadata) = debug_context(cx).created_files.borrow().get(key) { file_name: &str,
directory: &str)
-> DIFile {
let key = (Symbol::intern(file_name), Symbol::intern(directory));
if let Some(file_metadata) = debug_context(cx).created_files.borrow().get(&key) {
return *file_metadata; return *file_metadata;
} }
debug!("file_metadata: file_name: {}, work_dir: {}", file_name, work_dir); debug!("file_metadata: file_name: {}, directory: {}", file_name, directory);
let file_name = CString::new(file_name).unwrap(); let file_name = CString::new(file_name).unwrap();
let work_dir = CString::new(work_dir).unwrap(); let directory = CString::new(directory).unwrap();
let file_metadata = unsafe { let file_metadata = unsafe {
llvm::LLVMRustDIBuilderCreateFile(DIB(cx), file_name.as_ptr(), llvm::LLVMRustDIBuilderCreateFile(DIB(cx),
work_dir.as_ptr()) file_name.as_ptr(),
directory.as_ptr())
}; };
let mut created_files = debug_context(cx).created_files.borrow_mut(); let mut created_files = debug_context(cx).created_files.borrow_mut();
created_files.insert(key.to_string(), file_metadata); created_files.insert(key, file_metadata);
file_metadata file_metadata
} }
@ -761,25 +765,10 @@ pub fn compile_unit_metadata(scc: &SharedCrateContext,
debug_context: &CrateDebugContext, debug_context: &CrateDebugContext,
sess: &Session) sess: &Session)
-> DIDescriptor { -> DIDescriptor {
let work_dir = &sess.working_dir;
let compile_unit_name = match sess.local_crate_source_file { let compile_unit_name = match sess.local_crate_source_file {
None => fallback_path(scc), None => fallback_path(scc),
Some(ref abs_path) => { Some(ref path) => {
if abs_path.is_relative() { CString::new(&path[..]).unwrap()
sess.warn("debuginfo: Invalid path to crate's local root source file!");
fallback_path(scc)
} else {
match abs_path.strip_prefix(work_dir) {
Ok(ref p) if p.is_relative() => {
if p.starts_with(Path::new("./")) {
path2cstr(p)
} else {
path2cstr(&Path::new(".").join(p))
}
}
_ => fallback_path(scc)
}
}
} }
}; };
@ -789,7 +778,8 @@ pub fn compile_unit_metadata(scc: &SharedCrateContext,
(option_env!("CFG_VERSION")).expect("CFG_VERSION")); (option_env!("CFG_VERSION")).expect("CFG_VERSION"));
let compile_unit_name = compile_unit_name.as_ptr(); let compile_unit_name = compile_unit_name.as_ptr();
let work_dir = path2cstr(&work_dir);
let work_dir = CString::new(&sess.working_dir.0[..]).unwrap();
let producer = CString::new(producer).unwrap(); let producer = CString::new(producer).unwrap();
let flags = "\0"; let flags = "\0";
let split_name = "\0"; let split_name = "\0";
@ -1760,7 +1750,7 @@ pub fn create_global_var_metadata(cx: &CrateContext,
let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP { let (file_metadata, line_number) = if span != syntax_pos::DUMMY_SP {
let loc = span_start(cx, span); let loc = span_start(cx, span);
(file_metadata(cx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint) (file_metadata(cx, &loc.file.name, LOCAL_CRATE), loc.line as c_uint)
} else { } else {
(unknown_file_metadata(cx), UNKNOWN_LINE_NUMBER) (unknown_file_metadata(cx), UNKNOWN_LINE_NUMBER)
}; };
@ -1795,9 +1785,10 @@ pub fn create_global_var_metadata(cx: &CrateContext,
// Creates an "extension" of an existing DIScope into another file. // Creates an "extension" of an existing DIScope into another file.
pub fn extend_scope_to_file(ccx: &CrateContext, pub fn extend_scope_to_file(ccx: &CrateContext,
scope_metadata: DIScope, scope_metadata: DIScope,
file: &syntax_pos::FileMap) file: &syntax_pos::FileMap,
defining_crate: CrateNum)
-> DILexicalBlock { -> DILexicalBlock {
let file_metadata = file_metadata(ccx, &file.name, &file.abs_path); let file_metadata = file_metadata(ccx, &file.name, defining_crate);
unsafe { unsafe {
llvm::LLVMRustDIBuilderCreateLexicalBlockFile( llvm::LLVMRustDIBuilderCreateLexicalBlockFile(
DIB(ccx), DIB(ccx),

View file

@ -23,7 +23,7 @@ use self::source_loc::InternalDebugLocation::{self, UnknownLocation};
use llvm; use llvm;
use llvm::{ModuleRef, ContextRef, ValueRef}; use llvm::{ModuleRef, ContextRef, ValueRef};
use llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilderRef, DISubprogram, DIArray, DIFlags}; use llvm::debuginfo::{DIFile, DIType, DIScope, DIBuilderRef, DISubprogram, DIArray, DIFlags};
use rustc::hir::def_id::DefId; use rustc::hir::def_id::{DefId, CrateNum};
use rustc::ty::subst::Substs; use rustc::ty::subst::Substs;
use abi::Abi; use abi::Abi;
@ -42,6 +42,7 @@ use std::ptr;
use syntax_pos::{self, Span, Pos}; use syntax_pos::{self, Span, Pos};
use syntax::ast; use syntax::ast;
use syntax::symbol::Symbol;
use rustc::ty::layout; use rustc::ty::layout;
pub mod gdb; pub mod gdb;
@ -67,7 +68,7 @@ const DW_TAG_arg_variable: c_uint = 0x101;
pub struct CrateDebugContext<'tcx> { pub struct CrateDebugContext<'tcx> {
llcontext: ContextRef, llcontext: ContextRef,
builder: DIBuilderRef, builder: DIBuilderRef,
created_files: RefCell<FxHashMap<String, DIFile>>, created_files: RefCell<FxHashMap<(Symbol, Symbol), DIFile>>,
created_enum_disr_types: RefCell<FxHashMap<(DefId, layout::Integer), DIType>>, created_enum_disr_types: RefCell<FxHashMap<(DefId, layout::Integer), DIType>>,
type_map: RefCell<TypeMap<'tcx>>, type_map: RefCell<TypeMap<'tcx>>,
@ -103,7 +104,7 @@ pub enum FunctionDebugContext {
} }
impl FunctionDebugContext { impl FunctionDebugContext {
fn get_ref<'a>(&'a self, span: Span) -> &'a FunctionDebugContextData { pub fn get_ref<'a>(&'a self, span: Span) -> &'a FunctionDebugContextData {
match *self { match *self {
FunctionDebugContext::RegularContext(ref data) => data, FunctionDebugContext::RegularContext(ref data) => data,
FunctionDebugContext::DebugInfoDisabled => { FunctionDebugContext::DebugInfoDisabled => {
@ -128,6 +129,7 @@ impl FunctionDebugContext {
pub struct FunctionDebugContextData { pub struct FunctionDebugContextData {
fn_metadata: DISubprogram, fn_metadata: DISubprogram,
source_locations_enabled: Cell<bool>, source_locations_enabled: Cell<bool>,
pub defining_crate: CrateNum,
} }
pub enum VariableAccess<'a> { pub enum VariableAccess<'a> {
@ -220,8 +222,9 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
return FunctionDebugContext::FunctionWithoutDebugInfo; return FunctionDebugContext::FunctionWithoutDebugInfo;
} }
let def_id = instance.def_id();
let loc = span_start(cx, span); let loc = span_start(cx, span);
let file_metadata = file_metadata(cx, &loc.file.name, &loc.file.abs_path); let file_metadata = file_metadata(cx, &loc.file.name, def_id.krate);
let function_type_metadata = unsafe { let function_type_metadata = unsafe {
let fn_signature = get_function_signature(cx, sig); let fn_signature = get_function_signature(cx, sig);
@ -229,15 +232,15 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
}; };
// Find the enclosing function, in case this is a closure. // Find the enclosing function, in case this is a closure.
let def_key = cx.tcx().def_key(instance.def_id()); let def_key = cx.tcx().def_key(def_id);
let mut name = def_key.disambiguated_data.data.to_string(); let mut name = def_key.disambiguated_data.data.to_string();
let name_len = name.len(); let name_len = name.len();
let fn_def_id = cx.tcx().closure_base_def_id(instance.def_id()); let enclosing_fn_def_id = cx.tcx().closure_base_def_id(def_id);
// Get_template_parameters() will append a `<...>` clause to the function // Get_template_parameters() will append a `<...>` clause to the function
// name if necessary. // name if necessary.
let generics = cx.tcx().generics_of(fn_def_id); let generics = cx.tcx().generics_of(enclosing_fn_def_id);
let substs = instance.substs.truncate_to(cx.tcx(), generics); let substs = instance.substs.truncate_to(cx.tcx(), generics);
let template_parameters = get_template_parameters(cx, let template_parameters = get_template_parameters(cx,
&generics, &generics,
@ -289,6 +292,7 @@ pub fn create_function_debug_context<'a, 'tcx>(cx: &CrateContext<'a, 'tcx>,
let fn_debug_context = FunctionDebugContextData { let fn_debug_context = FunctionDebugContextData {
fn_metadata: fn_metadata, fn_metadata: fn_metadata,
source_locations_enabled: Cell::new(false), source_locations_enabled: Cell::new(false),
defining_crate: def_id.krate,
}; };
return FunctionDebugContext::RegularContext(fn_debug_context); return FunctionDebugContext::RegularContext(fn_debug_context);
@ -438,8 +442,9 @@ pub fn declare_local<'a, 'tcx>(bcx: &Builder<'a, 'tcx>,
let cx = bcx.ccx; let cx = bcx.ccx;
let file = span_start(cx, span).file; let file = span_start(cx, span).file;
let filename = file.name.clone(); let file_metadata = file_metadata(cx,
let file_metadata = file_metadata(cx, &filename[..], &file.abs_path); &file.name[..],
dbg_context.get_ref(span).defining_crate);
let loc = span_start(cx, span); let loc = span_start(cx, span);
let type_metadata = type_metadata(cx, variable_type, span); let type_metadata = type_metadata(cx, variable_type, span);

View file

@ -72,7 +72,7 @@ pub fn item_namespace(ccx: &CrateContext, def_id: DefId) -> DIScope {
let span = ccx.tcx().def_span(def_id); let span = ccx.tcx().def_span(def_id);
let (file, line) = if span != DUMMY_SP { let (file, line) = if span != DUMMY_SP {
let loc = span_start(ccx, span); let loc = span_start(ccx, span);
(file_metadata(ccx, &loc.file.name, &loc.file.abs_path), loc.line as c_uint) (file_metadata(ccx, &loc.file.name, def_id.krate), loc.line as c_uint)
} else { } else {
(unknown_file_metadata(ccx), UNKNOWN_LINE_NUMBER) (unknown_file_metadata(ccx), UNKNOWN_LINE_NUMBER)
}; };

View file

@ -157,7 +157,11 @@ impl<'a, 'tcx> MirContext<'a, 'tcx> {
if pos < self.scopes[scope_id].file_start_pos || if pos < self.scopes[scope_id].file_start_pos ||
pos >= self.scopes[scope_id].file_end_pos { pos >= self.scopes[scope_id].file_end_pos {
let cm = self.ccx.sess().codemap(); let cm = self.ccx.sess().codemap();
debuginfo::extend_scope_to_file(self.ccx, scope_metadata, &cm.lookup_char_pos(pos).file) let defining_crate = self.debug_context.get_ref(DUMMY_SP).defining_crate;
debuginfo::extend_scope_to_file(self.ccx,
scope_metadata,
&cm.lookup_char_pos(pos).file,
defining_crate)
} else { } else {
scope_metadata scope_metadata
} }

View file

@ -129,7 +129,7 @@ pub fn run_core(search_paths: SearchPaths,
..config::basic_options().clone() ..config::basic_options().clone()
}; };
let codemap = Rc::new(codemap::CodeMap::new()); let codemap = Rc::new(codemap::CodeMap::new(sessopts.file_path_mapping()));
let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto, let diagnostic_handler = errors::Handler::with_tty_emitter(ColorConfig::Auto,
true, true,
false, false,

View file

@ -26,7 +26,7 @@ use std::fmt::Display;
use std::io; use std::io;
use std::io::prelude::*; use std::io::prelude::*;
use syntax::codemap::CodeMap; use syntax::codemap::{CodeMap, FilePathMapping};
use syntax::parse::lexer::{self, TokenAndSpan}; use syntax::parse::lexer::{self, TokenAndSpan};
use syntax::parse::token; use syntax::parse::token;
use syntax::parse; use syntax::parse;
@ -36,8 +36,8 @@ use syntax_pos::Span;
pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>, pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>,
extension: Option<&str>) -> String { extension: Option<&str>) -> String {
debug!("highlighting: ================\n{}\n==============", src); debug!("highlighting: ================\n{}\n==============", src);
let sess = parse::ParseSess::new(); let sess = parse::ParseSess::new(FilePathMapping::empty());
let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string()); let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new(); let mut out = Vec::new();
write_header(class, id, &mut out).unwrap(); write_header(class, id, &mut out).unwrap();
@ -58,8 +58,8 @@ pub fn render_with_highlighting(src: &str, class: Option<&str>, id: Option<&str>
/// be inserted into an element. C.f., `render_with_highlighting` which includes /// be inserted into an element. C.f., `render_with_highlighting` which includes
/// an enclosing `<pre>` block. /// an enclosing `<pre>` block.
pub fn render_inner_with_highlighting(src: &str) -> io::Result<String> { pub fn render_inner_with_highlighting(src: &str) -> io::Result<String> {
let sess = parse::ParseSess::new(); let sess = parse::ParseSess::new(FilePathMapping::empty());
let fm = sess.codemap().new_filemap("<stdin>".to_string(), None, src.to_string()); let fm = sess.codemap().new_filemap("<stdin>".to_string(), src.to_string());
let mut out = Vec::new(); let mut out = Vec::new();
let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap()); let mut classifier = Classifier::new(lexer::StringReader::new(&sess, fm), sess.codemap());

View file

@ -74,7 +74,7 @@ pub fn run(input: &str,
..config::basic_options().clone() ..config::basic_options().clone()
}; };
let codemap = Rc::new(CodeMap::new()); let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping()));
let handler = let handler =
errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(codemap.clone())); errors::Handler::with_tty_emitter(ColorConfig::Auto, true, false, Some(codemap.clone()));
@ -217,7 +217,7 @@ fn runtest(test: &str, cratename: &str, cfgs: Vec<String>, libs: SearchPaths,
} }
} }
let data = Arc::new(Mutex::new(Vec::new())); let data = Arc::new(Mutex::new(Vec::new()));
let codemap = Rc::new(CodeMap::new()); let codemap = Rc::new(CodeMap::new(sessopts.file_path_mapping()));
let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()), let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
Some(codemap.clone())); Some(codemap.clone()));
let old = io::set_panic(Some(box Sink(data.clone()))); let old = io::set_panic(Some(box Sink(data.clone())));

View file

@ -104,32 +104,42 @@ impl FileLoader for RealFileLoader {
pub struct CodeMap { pub struct CodeMap {
pub files: RefCell<Vec<Rc<FileMap>>>, pub files: RefCell<Vec<Rc<FileMap>>>,
file_loader: Box<FileLoader> file_loader: Box<FileLoader>,
// This is used to apply the file path remapping as specified via
// -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
path_mapping: FilePathMapping,
} }
impl CodeMap { impl CodeMap {
pub fn new() -> CodeMap { pub fn new(path_mapping: FilePathMapping) -> CodeMap {
CodeMap { CodeMap {
files: RefCell::new(Vec::new()), files: RefCell::new(Vec::new()),
file_loader: Box::new(RealFileLoader) file_loader: Box::new(RealFileLoader),
path_mapping: path_mapping,
} }
} }
pub fn with_file_loader(file_loader: Box<FileLoader>) -> CodeMap { pub fn with_file_loader(file_loader: Box<FileLoader>,
path_mapping: FilePathMapping)
-> CodeMap {
CodeMap { CodeMap {
files: RefCell::new(Vec::new()), files: RefCell::new(Vec::new()),
file_loader: file_loader file_loader: file_loader,
path_mapping: path_mapping,
} }
} }
pub fn path_mapping(&self) -> &FilePathMapping {
&self.path_mapping
}
pub fn file_exists(&self, path: &Path) -> bool { pub fn file_exists(&self, path: &Path) -> bool {
self.file_loader.file_exists(path) self.file_loader.file_exists(path)
} }
pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> { pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
let src = self.file_loader.read_file(path)?; let src = self.file_loader.read_file(path)?;
let abs_path = self.file_loader.abs_path(path).map(|p| p.to_str().unwrap().to_string()); Ok(self.new_filemap(path.to_str().unwrap().to_string(), src))
Ok(self.new_filemap(path.to_str().unwrap().to_string(), abs_path, src))
} }
fn next_start_pos(&self) -> usize { fn next_start_pos(&self) -> usize {
@ -144,8 +154,7 @@ impl CodeMap {
/// Creates a new filemap without setting its line information. If you don't /// Creates a new filemap without setting its line information. If you don't
/// intend to set the line information yourself, you should use new_filemap_and_lines. /// intend to set the line information yourself, you should use new_filemap_and_lines.
pub fn new_filemap(&self, filename: FileName, abs_path: Option<FileName>, pub fn new_filemap(&self, filename: FileName, mut src: String) -> Rc<FileMap> {
mut src: String) -> Rc<FileMap> {
let start_pos = self.next_start_pos(); let start_pos = self.next_start_pos();
let mut files = self.files.borrow_mut(); let mut files = self.files.borrow_mut();
@ -156,9 +165,11 @@ impl CodeMap {
let end_pos = start_pos + src.len(); let end_pos = start_pos + src.len();
let (filename, was_remapped) = self.path_mapping.map_prefix(filename);
let filemap = Rc::new(FileMap { let filemap = Rc::new(FileMap {
name: filename, name: filename,
abs_path: abs_path, name_was_remapped: was_remapped,
src: Some(Rc::new(src)), src: Some(Rc::new(src)),
start_pos: Pos::from_usize(start_pos), start_pos: Pos::from_usize(start_pos),
end_pos: Pos::from_usize(end_pos), end_pos: Pos::from_usize(end_pos),
@ -172,11 +183,8 @@ impl CodeMap {
} }
/// Creates a new filemap and sets its line information. /// Creates a new filemap and sets its line information.
pub fn new_filemap_and_lines(&self, filename: &str, abs_path: Option<&str>, pub fn new_filemap_and_lines(&self, filename: &str, src: &str) -> Rc<FileMap> {
src: &str) -> Rc<FileMap> { let fm = self.new_filemap(filename.to_string(), src.to_owned());
let fm = self.new_filemap(filename.to_string(),
abs_path.map(|s| s.to_owned()),
src.to_owned());
let mut byte_pos: u32 = fm.start_pos.0; let mut byte_pos: u32 = fm.start_pos.0;
for line in src.lines() { for line in src.lines() {
// register the start of this line // register the start of this line
@ -195,7 +203,7 @@ impl CodeMap {
/// information for things inlined from other crates. /// information for things inlined from other crates.
pub fn new_imported_filemap(&self, pub fn new_imported_filemap(&self,
filename: FileName, filename: FileName,
abs_path: Option<FileName>, name_was_remapped: bool,
source_len: usize, source_len: usize,
mut file_local_lines: Vec<BytePos>, mut file_local_lines: Vec<BytePos>,
mut file_local_multibyte_chars: Vec<MultiByteChar>) mut file_local_multibyte_chars: Vec<MultiByteChar>)
@ -216,7 +224,7 @@ impl CodeMap {
let filemap = Rc::new(FileMap { let filemap = Rc::new(FileMap {
name: filename, name: filename,
abs_path: abs_path, name_was_remapped: name_was_remapped,
src: None, src: None,
start_pos: start_pos, start_pos: start_pos,
end_pos: end_pos, end_pos: end_pos,
@ -550,6 +558,42 @@ impl CodeMapper for CodeMap {
} }
} }
#[derive(Clone)]
pub struct FilePathMapping {
mapping: Vec<(String, String)>,
}
impl FilePathMapping {
pub fn empty() -> FilePathMapping {
FilePathMapping {
mapping: vec![]
}
}
pub fn new(mapping: Vec<(String, String)>) -> FilePathMapping {
FilePathMapping {
mapping: mapping
}
}
/// Applies any path prefix substitution as defined by the mapping.
/// The return value is the remapped path and a boolean indicating whether
/// the path was affected by the mapping.
pub fn map_prefix(&self, path: String) -> (String, bool) {
// NOTE: We are iterating over the mapping entries from last to first
// because entries specified later on the command line should
// take precedence.
for &(ref from, ref to) in self.mapping.iter().rev() {
if path.starts_with(from) {
let mapped = path.replacen(from, to, 1);
return (mapped, true);
}
}
(path, false)
}
}
// _____________________________________________________________________________ // _____________________________________________________________________________
// Tests // Tests
// //
@ -561,9 +605,8 @@ mod tests {
#[test] #[test]
fn t1 () { fn t1 () {
let cm = CodeMap::new(); let cm = CodeMap::new(FilePathMapping::empty());
let fm = cm.new_filemap("blork.rs".to_string(), let fm = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string()); "first line.\nsecond line".to_string());
fm.next_line(BytePos(0)); fm.next_line(BytePos(0));
// Test we can get lines with partial line info. // Test we can get lines with partial line info.
@ -578,9 +621,8 @@ mod tests {
#[test] #[test]
#[should_panic] #[should_panic]
fn t2 () { fn t2 () {
let cm = CodeMap::new(); let cm = CodeMap::new(FilePathMapping::empty());
let fm = cm.new_filemap("blork.rs".to_string(), let fm = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string()); "first line.\nsecond line".to_string());
// TESTING *REALLY* BROKEN BEHAVIOR: // TESTING *REALLY* BROKEN BEHAVIOR:
fm.next_line(BytePos(0)); fm.next_line(BytePos(0));
@ -589,15 +631,12 @@ mod tests {
} }
fn init_code_map() -> CodeMap { fn init_code_map() -> CodeMap {
let cm = CodeMap::new(); let cm = CodeMap::new(FilePathMapping::empty());
let fm1 = cm.new_filemap("blork.rs".to_string(), let fm1 = cm.new_filemap("blork.rs".to_string(),
None,
"first line.\nsecond line".to_string()); "first line.\nsecond line".to_string());
let fm2 = cm.new_filemap("empty.rs".to_string(), let fm2 = cm.new_filemap("empty.rs".to_string(),
None,
"".to_string()); "".to_string());
let fm3 = cm.new_filemap("blork2.rs".to_string(), let fm3 = cm.new_filemap("blork2.rs".to_string(),
None,
"first line.\nsecond line".to_string()); "first line.\nsecond line".to_string());
fm1.next_line(BytePos(0)); fm1.next_line(BytePos(0));
@ -656,14 +695,12 @@ mod tests {
} }
fn init_code_map_mbc() -> CodeMap { fn init_code_map_mbc() -> CodeMap {
let cm = CodeMap::new(); let cm = CodeMap::new(FilePathMapping::empty());
// € is a three byte utf8 char. // € is a three byte utf8 char.
let fm1 = let fm1 =
cm.new_filemap("blork.rs".to_string(), cm.new_filemap("blork.rs".to_string(),
None,
"fir€st €€€€ line.\nsecond line".to_string()); "fir€st €€€€ line.\nsecond line".to_string());
let fm2 = cm.new_filemap("blork2.rs".to_string(), let fm2 = cm.new_filemap("blork2.rs".to_string(),
None,
"first line€€.\n€ second line".to_string()); "first line€€.\n€ second line".to_string());
fm1.next_line(BytePos(0)); fm1.next_line(BytePos(0));
@ -728,10 +765,10 @@ mod tests {
/// lines in the middle of a file. /// lines in the middle of a file.
#[test] #[test]
fn span_to_snippet_and_lines_spanning_multiple_lines() { fn span_to_snippet_and_lines_spanning_multiple_lines() {
let cm = CodeMap::new(); let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n"; let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n"; let selection = " \n ~~\n~~~\n~~~~~ \n \n";
cm.new_filemap_and_lines("blork.rs", None, inputtext); cm.new_filemap_and_lines("blork.rs", inputtext);
let span = span_from_selection(inputtext, selection); let span = span_from_selection(inputtext, selection);
// check that we are extracting the text we thought we were extracting // check that we are extracting the text we thought we were extracting
@ -770,11 +807,11 @@ mod tests {
/// Test failing to merge two spans on different lines /// Test failing to merge two spans on different lines
#[test] #[test]
fn span_merging_fail() { fn span_merging_fail() {
let cm = CodeMap::new(); let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "bbbb BB\ncc CCC\n"; let inputtext = "bbbb BB\ncc CCC\n";
let selection1 = " ~~\n \n"; let selection1 = " ~~\n \n";
let selection2 = " \n ~~~\n"; let selection2 = " \n ~~~\n";
cm.new_filemap_and_lines("blork.rs", None, inputtext); cm.new_filemap_and_lines("blork.rs", inputtext);
let span1 = span_from_selection(inputtext, selection1); let span1 = span_from_selection(inputtext, selection1);
let span2 = span_from_selection(inputtext, selection2); let span2 = span_from_selection(inputtext, selection2);

View file

@ -783,7 +783,7 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream { fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
let filename = String::from("<macro expansion>"); let filename = String::from("<macro expansion>");
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, None, text)) filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text))
} }
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> { impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {

View file

@ -142,7 +142,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenT
// Add this input file to the code map to make it available as // Add this input file to the code map to make it available as
// dependency information // dependency information
let filename = format!("{}", file.display()); let filename = format!("{}", file.display());
cx.codemap().new_filemap_and_lines(&filename, None, &src); cx.codemap().new_filemap_and_lines(&filename, &src);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src))) base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
} }
@ -173,7 +173,7 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
// Add this input file to the code map to make it available as // Add this input file to the code map to make it available as
// dependency information, but don't enter it's contents // dependency information, but don't enter it's contents
let filename = format!("{}", file.display()); let filename = format!("{}", file.display());
cx.codemap().new_filemap_and_lines(&filename, None, ""); cx.codemap().new_filemap_and_lines(&filename, "");
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Rc::new(bytes)))) base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Rc::new(bytes))))
} }

View file

@ -19,7 +19,7 @@
// FIXME spec the JSON output properly. // FIXME spec the JSON output properly.
use codemap::CodeMap; use codemap::{CodeMap, FilePathMapping};
use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan}; use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
use errors::registry::Registry; use errors::registry::Registry;
use errors::{DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper}; use errors::{DiagnosticBuilder, SubDiagnostic, RenderSpan, CodeSuggestion, CodeMapper};
@ -48,7 +48,8 @@ impl JsonEmitter {
} }
pub fn basic() -> JsonEmitter { pub fn basic() -> JsonEmitter {
JsonEmitter::stderr(None, Rc::new(CodeMap::new())) let file_path_mapping = FilePathMapping::empty();
JsonEmitter::stderr(None, Rc::new(CodeMap::new(file_path_mapping)))
} }
pub fn new(dst: Box<Write + Send>, pub fn new(dst: Box<Write + Send>,

View file

@ -348,8 +348,8 @@ pub fn gather_comments_and_literals(sess: &ParseSess, path: String, srdr: &mut R
let mut src = Vec::new(); let mut src = Vec::new();
srdr.read_to_end(&mut src).unwrap(); srdr.read_to_end(&mut src).unwrap();
let src = String::from_utf8(src).unwrap(); let src = String::from_utf8(src).unwrap();
let cm = CodeMap::new(); let cm = CodeMap::new(sess.codemap().path_mapping().clone());
let filemap = cm.new_filemap(path, None, src); let filemap = cm.new_filemap(path, src);
let mut rdr = lexer::StringReader::new_raw(sess, filemap); let mut rdr = lexer::StringReader::new_raw(sess, filemap);
let mut comments: Vec<Comment> = Vec::new(); let mut comments: Vec<Comment> = Vec::new();

View file

@ -10,7 +10,7 @@
use ast::{self, Ident}; use ast::{self, Ident};
use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION}; use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
use codemap::CodeMap; use codemap::{CodeMap, FilePathMapping};
use errors::{FatalError, DiagnosticBuilder}; use errors::{FatalError, DiagnosticBuilder};
use parse::{token, ParseSess}; use parse::{token, ParseSess};
use str::char_at; use str::char_at;
@ -563,7 +563,7 @@ impl<'a> StringReader<'a> {
// I guess this is the only way to figure out if // I guess this is the only way to figure out if
// we're at the beginning of the file... // we're at the beginning of the file...
let cmap = CodeMap::new(); let cmap = CodeMap::new(FilePathMapping::empty());
cmap.files.borrow_mut().push(self.filemap.clone()); cmap.files.borrow_mut().push(self.filemap.clone());
let loc = cmap.lookup_char_pos_adj(self.pos); let loc = cmap.lookup_char_pos_adj(self.pos);
debug!("Skipping a shebang"); debug!("Skipping a shebang");
@ -1718,13 +1718,13 @@ mod tests {
sess: &'a ParseSess, sess: &'a ParseSess,
teststr: String) teststr: String)
-> StringReader<'a> { -> StringReader<'a> {
let fm = cm.new_filemap("zebra.rs".to_string(), None, teststr); let fm = cm.new_filemap("zebra.rs".to_string(), teststr);
StringReader::new(sess, fm) StringReader::new(sess, fm)
} }
#[test] #[test]
fn t1() { fn t1() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
let mut string_reader = setup(&cm, let mut string_reader = setup(&cm,
&sh, &sh,
@ -1776,7 +1776,7 @@ mod tests {
#[test] #[test]
fn doublecolonparsing() { fn doublecolonparsing() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a b".to_string()), check_tokenization(setup(&cm, &sh, "a b".to_string()),
vec![mk_ident("a"), token::Whitespace, mk_ident("b")]); vec![mk_ident("a"), token::Whitespace, mk_ident("b")]);
@ -1784,7 +1784,7 @@ mod tests {
#[test] #[test]
fn dcparsing_2() { fn dcparsing_2() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a::b".to_string()), check_tokenization(setup(&cm, &sh, "a::b".to_string()),
vec![mk_ident("a"), token::ModSep, mk_ident("b")]); vec![mk_ident("a"), token::ModSep, mk_ident("b")]);
@ -1792,7 +1792,7 @@ mod tests {
#[test] #[test]
fn dcparsing_3() { fn dcparsing_3() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a ::b".to_string()), check_tokenization(setup(&cm, &sh, "a ::b".to_string()),
vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]); vec![mk_ident("a"), token::Whitespace, token::ModSep, mk_ident("b")]);
@ -1800,7 +1800,7 @@ mod tests {
#[test] #[test]
fn dcparsing_4() { fn dcparsing_4() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
check_tokenization(setup(&cm, &sh, "a:: b".to_string()), check_tokenization(setup(&cm, &sh, "a:: b".to_string()),
vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]); vec![mk_ident("a"), token::ModSep, token::Whitespace, mk_ident("b")]);
@ -1808,7 +1808,7 @@ mod tests {
#[test] #[test]
fn character_a() { fn character_a() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "'a'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("a")), None)); token::Literal(token::Char(Symbol::intern("a")), None));
@ -1816,7 +1816,7 @@ mod tests {
#[test] #[test]
fn character_space() { fn character_space() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "' '".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern(" ")), None)); token::Literal(token::Char(Symbol::intern(" ")), None));
@ -1824,7 +1824,7 @@ mod tests {
#[test] #[test]
fn character_escaped() { fn character_escaped() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "'\\n'".to_string()).next_token().tok,
token::Literal(token::Char(Symbol::intern("\\n")), None)); token::Literal(token::Char(Symbol::intern("\\n")), None));
@ -1832,7 +1832,7 @@ mod tests {
#[test] #[test]
fn lifetime_name() { fn lifetime_name() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok, assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
token::Lifetime(Ident::from_str("'abc"))); token::Lifetime(Ident::from_str("'abc")));
@ -1840,7 +1840,7 @@ mod tests {
#[test] #[test]
fn raw_string() { fn raw_string() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()) assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
.next_token() .next_token()
@ -1850,7 +1850,7 @@ mod tests {
#[test] #[test]
fn literal_suffixes() { fn literal_suffixes() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
macro_rules! test { macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{ ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
@ -1894,7 +1894,7 @@ mod tests {
#[test] #[test]
fn nested_block_comments() { fn nested_block_comments() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string()); let mut lexer = setup(&cm, &sh, "/* /* */ */'a'".to_string());
match lexer.next_token().tok { match lexer.next_token().tok {
@ -1907,7 +1907,7 @@ mod tests {
#[test] #[test]
fn crlf_comments() { fn crlf_comments() {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(FilePathMapping::empty()));
let sh = mk_sess(cm.clone()); let sh = mk_sess(cm.clone());
let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string()); let mut lexer = setup(&cm, &sh, "// test\r\n/// test\r\n".to_string());
let comment = lexer.next_token(); let comment = lexer.next_token();

View file

@ -11,7 +11,7 @@
//! The main parser interface //! The main parser interface
use ast::{self, CrateConfig}; use ast::{self, CrateConfig};
use codemap::CodeMap; use codemap::{CodeMap, FilePathMapping};
use syntax_pos::{self, Span, FileMap, NO_EXPANSION}; use syntax_pos::{self, Span, FileMap, NO_EXPANSION};
use errors::{Handler, ColorConfig, DiagnosticBuilder}; use errors::{Handler, ColorConfig, DiagnosticBuilder};
use feature_gate::UnstableFeatures; use feature_gate::UnstableFeatures;
@ -53,8 +53,8 @@ pub struct ParseSess {
} }
impl ParseSess { impl ParseSess {
pub fn new() -> Self { pub fn new(file_path_mapping: FilePathMapping) -> Self {
let cm = Rc::new(CodeMap::new()); let cm = Rc::new(CodeMap::new(file_path_mapping));
let handler = Handler::with_tty_emitter(ColorConfig::Auto, let handler = Handler::with_tty_emitter(ColorConfig::Auto,
true, true,
false, false,
@ -143,13 +143,13 @@ pub fn parse_stmt_from_source_str<'a>(name: String, source: String, sess: &'a Pa
pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess) pub fn parse_stream_from_source_str<'a>(name: String, source: String, sess: &'a ParseSess)
-> TokenStream { -> TokenStream {
filemap_to_stream(sess, sess.codemap().new_filemap(name, None, source)) filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
} }
// Create a new parser from a source string // Create a new parser from a source string
pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String) pub fn new_parser_from_source_str<'a>(sess: &'a ParseSess, name: String, source: String)
-> Parser<'a> { -> Parser<'a> {
filemap_to_parser(sess, sess.codemap().new_filemap(name, None, source)) filemap_to_parser(sess, sess.codemap().new_filemap(name, source))
} }
/// Create a new parser, handling errors as appropriate /// Create a new parser, handling errors as appropriate
@ -828,7 +828,7 @@ mod tests {
} }
#[test] fn parse_ident_pat () { #[test] fn parse_ident_pat () {
let sess = ParseSess::new(); let sess = ParseSess::new(FilePathMapping::empty());
let mut parser = string_to_parser(&sess, "b".to_string()); let mut parser = string_to_parser(&sess, "b".to_string());
assert!(panictry!(parser.parse_pat()) assert!(panictry!(parser.parse_pat())
== P(ast::Pat{ == P(ast::Pat{
@ -998,7 +998,7 @@ mod tests {
} }
#[test] fn crlf_doc_comments() { #[test] fn crlf_doc_comments() {
let sess = ParseSess::new(); let sess = ParseSess::new(FilePathMapping::empty());
let name = "<source>".to_string(); let name = "<source>".to_string();
let source = "/// doc comment\r\nfn foo() {}".to_string(); let source = "/// doc comment\r\nfn foo() {}".to_string();
@ -1023,7 +1023,7 @@ mod tests {
#[test] #[test]
fn ttdelim_span() { fn ttdelim_span() {
let sess = ParseSess::new(); let sess = ParseSess::new(FilePathMapping::empty());
let expr = parse::parse_expr_from_source_str("foo".to_string(), let expr = parse::parse_expr_from_source_str("foo".to_string(),
"foo!( fn main() { body } )".to_string(), &sess).unwrap(); "foo!( fn main() { body } )".to_string(), &sess).unwrap();

View file

@ -8,7 +8,7 @@
// option. This file may not be copied, modified, or distributed // option. This file may not be copied, modified, or distributed
// except according to those terms. // except according to those terms.
use codemap::CodeMap; use codemap::{CodeMap, FilePathMapping};
use errors::Handler; use errors::Handler;
use errors::emitter::EmitterWriter; use errors::emitter::EmitterWriter;
use std::io; use std::io;
@ -47,8 +47,8 @@ impl<T: Write> Write for Shared<T> {
fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) { fn test_harness(file_text: &str, span_labels: Vec<SpanLabel>, expected_output: &str) {
let output = Arc::new(Mutex::new(Vec::new())); let output = Arc::new(Mutex::new(Vec::new()));
let code_map = Rc::new(CodeMap::new()); let code_map = Rc::new(CodeMap::new(FilePathMapping::empty()));
code_map.new_filemap_and_lines("test.rs", None, &file_text); code_map.new_filemap_and_lines("test.rs", &file_text);
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end); let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span); let mut msp = MultiSpan::from_span(primary_span);

View file

@ -9,6 +9,7 @@
// except according to those terms. // except according to those terms.
use ast::{self, Ident}; use ast::{self, Ident};
use codemap::FilePathMapping;
use parse::{ParseSess, PResult, filemap_to_stream}; use parse::{ParseSess, PResult, filemap_to_stream};
use parse::{lexer, new_parser_from_source_str}; use parse::{lexer, new_parser_from_source_str};
use parse::parser::Parser; use parse::parser::Parser;
@ -18,8 +19,8 @@ use std::iter::Peekable;
/// Map a string to tts, using a made-up filename: /// Map a string to tts, using a made-up filename:
pub fn string_to_stream(source_str: String) -> TokenStream { pub fn string_to_stream(source_str: String) -> TokenStream {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), None, source_str)) filemap_to_stream(&ps, ps.codemap().new_filemap("bogofile".to_string(), source_str))
} }
/// Map string to parser (via tts) /// Map string to parser (via tts)
@ -38,7 +39,7 @@ fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> T
/// Parse a string, return a crate. /// Parse a string, return a crate.
pub fn string_to_crate (source_str : String) -> ast::Crate { pub fn string_to_crate (source_str : String) -> ast::Crate {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| { with_error_checking_parse(source_str, &ps, |p| {
p.parse_crate_mod() p.parse_crate_mod()
}) })
@ -46,7 +47,7 @@ pub fn string_to_crate (source_str : String) -> ast::Crate {
/// Parse a string, return an expr /// Parse a string, return an expr
pub fn string_to_expr (source_str : String) -> P<ast::Expr> { pub fn string_to_expr (source_str : String) -> P<ast::Expr> {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| { with_error_checking_parse(source_str, &ps, |p| {
p.parse_expr() p.parse_expr()
}) })
@ -54,7 +55,7 @@ pub fn string_to_expr (source_str : String) -> P<ast::Expr> {
/// Parse a string, return an item /// Parse a string, return an item
pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> { pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| { with_error_checking_parse(source_str, &ps, |p| {
p.parse_item() p.parse_item()
}) })
@ -62,7 +63,7 @@ pub fn string_to_item (source_str : String) -> Option<P<ast::Item>> {
/// Parse a string, return a stmt /// Parse a string, return a stmt
pub fn string_to_stmt(source_str : String) -> Option<ast::Stmt> { pub fn string_to_stmt(source_str : String) -> Option<ast::Stmt> {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| { with_error_checking_parse(source_str, &ps, |p| {
p.parse_stmt() p.parse_stmt()
}) })
@ -71,7 +72,7 @@ pub fn string_to_stmt(source_str : String) -> Option<ast::Stmt> {
/// Parse a string, return a pat. Uses "irrefutable"... which doesn't /// Parse a string, return a pat. Uses "irrefutable"... which doesn't
/// (currently) affect parsing. /// (currently) affect parsing.
pub fn string_to_pat(source_str: String) -> P<ast::Pat> { pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
with_error_checking_parse(source_str, &ps, |p| { with_error_checking_parse(source_str, &ps, |p| {
p.parse_pat() p.parse_pat()
}) })

View file

@ -369,13 +369,14 @@ pub struct MultiByteChar {
} }
/// A single source in the CodeMap. /// A single source in the CodeMap.
#[derive(Clone)]
pub struct FileMap { pub struct FileMap {
/// The name of the file that the source came from, source that doesn't /// The name of the file that the source came from, source that doesn't
/// originate from files has names between angle brackets by convention, /// originate from files has names between angle brackets by convention,
/// e.g. `<anon>` /// e.g. `<anon>`
pub name: FileName, pub name: FileName,
/// The absolute path of the file that the source came from. /// True if the `name` field above has been modified by -Zremap-path-prefix
pub abs_path: Option<FileName>, pub name_was_remapped: bool,
/// The complete source code /// The complete source code
pub src: Option<Rc<String>>, pub src: Option<Rc<String>>,
/// The start position of this source in the CodeMap /// The start position of this source in the CodeMap
@ -392,7 +393,7 @@ impl Encodable for FileMap {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_struct("FileMap", 6, |s| { s.emit_struct("FileMap", 6, |s| {
s.emit_struct_field("name", 0, |s| self.name.encode(s))?; s.emit_struct_field("name", 0, |s| self.name.encode(s))?;
s.emit_struct_field("abs_path", 1, |s| self.abs_path.encode(s))?; s.emit_struct_field("name_was_remapped", 1, |s| self.name_was_remapped.encode(s))?;
s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?; s.emit_struct_field("start_pos", 2, |s| self.start_pos.encode(s))?;
s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?; s.emit_struct_field("end_pos", 3, |s| self.end_pos.encode(s))?;
s.emit_struct_field("lines", 4, |s| { s.emit_struct_field("lines", 4, |s| {
@ -453,8 +454,8 @@ impl Decodable for FileMap {
d.read_struct("FileMap", 6, |d| { d.read_struct("FileMap", 6, |d| {
let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?; let name: String = d.read_struct_field("name", 0, |d| Decodable::decode(d))?;
let abs_path: Option<String> = let name_was_remapped: bool =
d.read_struct_field("abs_path", 1, |d| Decodable::decode(d))?; d.read_struct_field("name_was_remapped", 1, |d| Decodable::decode(d))?;
let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?; let start_pos: BytePos = d.read_struct_field("start_pos", 2, |d| Decodable::decode(d))?;
let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?; let end_pos: BytePos = d.read_struct_field("end_pos", 3, |d| Decodable::decode(d))?;
let lines: Vec<BytePos> = d.read_struct_field("lines", 4, |d| { let lines: Vec<BytePos> = d.read_struct_field("lines", 4, |d| {
@ -489,7 +490,7 @@ impl Decodable for FileMap {
d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?; d.read_struct_field("multibyte_chars", 5, |d| Decodable::decode(d))?;
Ok(FileMap { Ok(FileMap {
name: name, name: name,
abs_path: abs_path, name_was_remapped: name_was_remapped,
start_pos: start_pos, start_pos: start_pos,
end_pos: end_pos, end_pos: end_pos,
src: None, src: None,

View file

@ -0,0 +1,19 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// ignore-tidy-linelength
// compile-flags: -g -Zremap-path-prefix-from={{cwd}} -Zremap-path-prefix-to=/the/cwd -Zremap-path-prefix-from={{src-base}} -Zremap-path-prefix-to=/the/src
// CHECK: !DIFile(filename: "/the/src/remap_path_prefix.rs", directory: "/the/cwd")
fn main() {
// We just check that the DIFile got remapped properly.
}

View file

@ -16,12 +16,13 @@ extern crate syntax;
extern crate syntax_pos; extern crate syntax_pos;
use syntax::ast; use syntax::ast;
use syntax::codemap::FilePathMapping;
use syntax::print::pprust; use syntax::print::pprust;
use syntax::symbol::Symbol; use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP; use syntax_pos::DUMMY_SP;
fn main() { fn main() {
let ps = syntax::parse::ParseSess::new(); let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver; let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new( let mut cx = syntax::ext::base::ExtCtxt::new(
&ps, &ps,

View file

@ -24,7 +24,7 @@ use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP; use syntax_pos::DUMMY_SP;
fn main() { fn main() {
let ps = syntax::parse::ParseSess::new(); let ps = syntax::parse::ParseSess::new(codemap::FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver; let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new( let mut cx = syntax::ext::base::ExtCtxt::new(
&ps, &ps,

View file

@ -17,6 +17,7 @@ extern crate syntax;
use syntax::ast::*; use syntax::ast::*;
use syntax::attr::*; use syntax::attr::*;
use syntax::ast; use syntax::ast;
use syntax::codemap::FilePathMapping;
use syntax::parse; use syntax::parse;
use syntax::parse::{ParseSess, PResult}; use syntax::parse::{ParseSess, PResult};
use syntax::parse::new_parser_from_source_str; use syntax::parse::new_parser_from_source_str;
@ -78,7 +79,7 @@ fn str_compare<T, F: Fn(&T) -> String>(e: &str, expected: &[T], actual: &[T], f:
} }
fn check_expr_attrs(es: &str, expected: &[&str]) { fn check_expr_attrs(es: &str, expected: &[&str]) {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
let e = expr(es, &ps).expect("parse error"); let e = expr(es, &ps).expect("parse error");
let actual = &e.attrs; let actual = &e.attrs;
str_compare(es, str_compare(es,
@ -88,7 +89,7 @@ fn check_expr_attrs(es: &str, expected: &[&str]) {
} }
fn check_stmt_attrs(es: &str, expected: &[&str]) { fn check_stmt_attrs(es: &str, expected: &[&str]) {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
let e = stmt(es, &ps).expect("parse error"); let e = stmt(es, &ps).expect("parse error");
let actual = e.node.attrs(); let actual = e.node.attrs();
str_compare(es, str_compare(es,
@ -98,7 +99,7 @@ fn check_stmt_attrs(es: &str, expected: &[&str]) {
} }
fn reject_expr_parse(es: &str) { fn reject_expr_parse(es: &str) {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
match expr(es, &ps) { match expr(es, &ps) {
Ok(_) => panic!("parser did not reject `{}`", es), Ok(_) => panic!("parser did not reject `{}`", es),
Err(mut e) => e.cancel(), Err(mut e) => e.cancel(),
@ -106,7 +107,7 @@ fn reject_expr_parse(es: &str) {
} }
fn reject_stmt_parse(es: &str) { fn reject_stmt_parse(es: &str) {
let ps = ParseSess::new(); let ps = ParseSess::new(FilePathMapping::empty());
match stmt(es, &ps) { match stmt(es, &ps) {
Ok(_) => panic!("parser did not reject `{}`", es), Ok(_) => panic!("parser did not reject `{}`", es),
Err(mut e) => e.cancel(), Err(mut e) => e.cancel(),

View file

@ -15,12 +15,13 @@
extern crate syntax; extern crate syntax;
extern crate syntax_pos; extern crate syntax_pos;
use syntax::codemap::FilePathMapping;
use syntax::print::pprust::*; use syntax::print::pprust::*;
use syntax::symbol::Symbol; use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP; use syntax_pos::DUMMY_SP;
fn main() { fn main() {
let ps = syntax::parse::ParseSess::new(); let ps = syntax::parse::ParseSess::new(FilePathMapping::empty());
let mut resolver = syntax::ext::base::DummyResolver; let mut resolver = syntax::ext::base::DummyResolver;
let mut cx = syntax::ext::base::ExtCtxt::new( let mut cx = syntax::ext::base::ExtCtxt::new(
&ps, &ps,

View file

@ -40,23 +40,24 @@ impl EarlyProps {
None, None,
&mut |ln| { &mut |ln| {
props.ignore = props.ignore =
props.ignore || parse_name_directive(ln, "ignore-test") || props.ignore || config.parse_name_directive(ln, "ignore-test") ||
parse_name_directive(ln, &ignore_target(config)) || config.parse_name_directive(ln, &ignore_target(config)) ||
parse_name_directive(ln, &ignore_architecture(config)) || config.parse_name_directive(ln, &ignore_architecture(config)) ||
parse_name_directive(ln, &ignore_stage(config)) || config.parse_name_directive(ln, &ignore_stage(config)) ||
parse_name_directive(ln, &ignore_env(config)) || config.parse_name_directive(ln, &ignore_env(config)) ||
(config.mode == common::Pretty && parse_name_directive(ln, "ignore-pretty")) || (config.mode == common::Pretty &&
config.parse_name_directive(ln, "ignore-pretty")) ||
(config.target != config.host && (config.target != config.host &&
parse_name_directive(ln, "ignore-cross-compile")) || config.parse_name_directive(ln, "ignore-cross-compile")) ||
ignore_gdb(config, ln) || ignore_gdb(config, ln) ||
ignore_lldb(config, ln) || ignore_lldb(config, ln) ||
ignore_llvm(config, ln); ignore_llvm(config, ln);
if let Some(s) = parse_aux_build(ln) { if let Some(s) = config.parse_aux_build(ln) {
props.aux.push(s); props.aux.push(s);
} }
props.should_fail = props.should_fail || parse_name_directive(ln, "should-fail"); props.should_fail = props.should_fail || config.parse_name_directive(ln, "should-fail");
}); });
return props; return props;
@ -80,7 +81,7 @@ impl EarlyProps {
} }
if !line.contains("ignore-gdb-version") && if !line.contains("ignore-gdb-version") &&
parse_name_directive(line, "ignore-gdb") { config.parse_name_directive(line, "ignore-gdb") {
return true; return true;
} }
@ -143,7 +144,7 @@ impl EarlyProps {
return false; return false;
} }
if parse_name_directive(line, "ignore-lldb") { if config.parse_name_directive(line, "ignore-lldb") {
return true; return true;
} }
@ -260,19 +261,23 @@ impl TestProps {
} }
} }
pub fn from_aux_file(&self, testfile: &Path, cfg: Option<&str>) -> Self { pub fn from_aux_file(&self,
testfile: &Path,
cfg: Option<&str>,
config: &Config)
-> Self {
let mut props = TestProps::new(); let mut props = TestProps::new();
// copy over select properties to the aux build: // copy over select properties to the aux build:
props.incremental_dir = self.incremental_dir.clone(); props.incremental_dir = self.incremental_dir.clone();
props.load_from(testfile, cfg); props.load_from(testfile, cfg, config);
props props
} }
pub fn from_file(testfile: &Path) -> Self { pub fn from_file(testfile: &Path, config: &Config) -> Self {
let mut props = TestProps::new(); let mut props = TestProps::new();
props.load_from(testfile, None); props.load_from(testfile, None, config);
props props
} }
@ -280,85 +285,88 @@ impl TestProps {
/// tied to a particular revision `foo` (indicated by writing /// tied to a particular revision `foo` (indicated by writing
/// `//[foo]`), then the property is ignored unless `cfg` is /// `//[foo]`), then the property is ignored unless `cfg` is
/// `Some("foo")`. /// `Some("foo")`.
pub fn load_from(&mut self, testfile: &Path, cfg: Option<&str>) { pub fn load_from(&mut self,
testfile: &Path,
cfg: Option<&str>,
config: &Config) {
iter_header(testfile, iter_header(testfile,
cfg, cfg,
&mut |ln| { &mut |ln| {
if let Some(ep) = parse_error_pattern(ln) { if let Some(ep) = config.parse_error_pattern(ln) {
self.error_patterns.push(ep); self.error_patterns.push(ep);
} }
if let Some(flags) = parse_compile_flags(ln) { if let Some(flags) = config.parse_compile_flags(ln) {
self.compile_flags.extend(flags.split_whitespace() self.compile_flags.extend(flags.split_whitespace()
.map(|s| s.to_owned())); .map(|s| s.to_owned()));
} }
if let Some(r) = parse_revisions(ln) { if let Some(r) = config.parse_revisions(ln) {
self.revisions.extend(r); self.revisions.extend(r);
} }
if self.run_flags.is_none() { if self.run_flags.is_none() {
self.run_flags = parse_run_flags(ln); self.run_flags = config.parse_run_flags(ln);
} }
if self.pp_exact.is_none() { if self.pp_exact.is_none() {
self.pp_exact = parse_pp_exact(ln, testfile); self.pp_exact = config.parse_pp_exact(ln, testfile);
} }
if !self.build_aux_docs { if !self.build_aux_docs {
self.build_aux_docs = parse_build_aux_docs(ln); self.build_aux_docs = config.parse_build_aux_docs(ln);
} }
if !self.force_host { if !self.force_host {
self.force_host = parse_force_host(ln); self.force_host = config.parse_force_host(ln);
} }
if !self.check_stdout { if !self.check_stdout {
self.check_stdout = parse_check_stdout(ln); self.check_stdout = config.parse_check_stdout(ln);
} }
if !self.no_prefer_dynamic { if !self.no_prefer_dynamic {
self.no_prefer_dynamic = parse_no_prefer_dynamic(ln); self.no_prefer_dynamic = config.parse_no_prefer_dynamic(ln);
} }
if !self.pretty_expanded { if !self.pretty_expanded {
self.pretty_expanded = parse_pretty_expanded(ln); self.pretty_expanded = config.parse_pretty_expanded(ln);
} }
if let Some(m) = parse_pretty_mode(ln) { if let Some(m) = config.parse_pretty_mode(ln) {
self.pretty_mode = m; self.pretty_mode = m;
} }
if !self.pretty_compare_only { if !self.pretty_compare_only {
self.pretty_compare_only = parse_pretty_compare_only(ln); self.pretty_compare_only = config.parse_pretty_compare_only(ln);
} }
if let Some(ab) = parse_aux_build(ln) { if let Some(ab) = config.parse_aux_build(ln) {
self.aux_builds.push(ab); self.aux_builds.push(ab);
} }
if let Some(ee) = parse_env(ln, "exec-env") { if let Some(ee) = config.parse_env(ln, "exec-env") {
self.exec_env.push(ee); self.exec_env.push(ee);
} }
if let Some(ee) = parse_env(ln, "rustc-env") { if let Some(ee) = config.parse_env(ln, "rustc-env") {
self.rustc_env.push(ee); self.rustc_env.push(ee);
} }
if let Some(cl) = parse_check_line(ln) { if let Some(cl) = config.parse_check_line(ln) {
self.check_lines.push(cl); self.check_lines.push(cl);
} }
if let Some(of) = parse_forbid_output(ln) { if let Some(of) = config.parse_forbid_output(ln) {
self.forbid_output.push(of); self.forbid_output.push(of);
} }
if !self.must_compile_successfully { if !self.must_compile_successfully {
self.must_compile_successfully = parse_must_compile_successfully(ln); self.must_compile_successfully = config.parse_must_compile_successfully(ln);
} }
if !self.check_test_line_numbers_match { if !self.check_test_line_numbers_match {
self.check_test_line_numbers_match = parse_check_test_line_numbers_match(ln); self.check_test_line_numbers_match = config.parse_check_test_line_numbers_match(ln);
} }
}); });
@ -410,73 +418,75 @@ fn iter_header(testfile: &Path, cfg: Option<&str>, it: &mut FnMut(&str)) {
return; return;
} }
fn parse_error_pattern(line: &str) -> Option<String> { impl Config {
parse_name_value_directive(line, "error-pattern")
fn parse_error_pattern(&self, line: &str) -> Option<String> {
self.parse_name_value_directive(line, "error-pattern")
} }
fn parse_forbid_output(line: &str) -> Option<String> { fn parse_forbid_output(&self, line: &str) -> Option<String> {
parse_name_value_directive(line, "forbid-output") self.parse_name_value_directive(line, "forbid-output")
} }
fn parse_aux_build(line: &str) -> Option<String> { fn parse_aux_build(&self, line: &str) -> Option<String> {
parse_name_value_directive(line, "aux-build") self.parse_name_value_directive(line, "aux-build")
} }
fn parse_compile_flags(line: &str) -> Option<String> { fn parse_compile_flags(&self, line: &str) -> Option<String> {
parse_name_value_directive(line, "compile-flags") self.parse_name_value_directive(line, "compile-flags")
} }
fn parse_revisions(line: &str) -> Option<Vec<String>> { fn parse_revisions(&self, line: &str) -> Option<Vec<String>> {
parse_name_value_directive(line, "revisions") self.parse_name_value_directive(line, "revisions")
.map(|r| r.split_whitespace().map(|t| t.to_string()).collect()) .map(|r| r.split_whitespace().map(|t| t.to_string()).collect())
} }
fn parse_run_flags(line: &str) -> Option<String> { fn parse_run_flags(&self, line: &str) -> Option<String> {
parse_name_value_directive(line, "run-flags") self.parse_name_value_directive(line, "run-flags")
} }
fn parse_check_line(line: &str) -> Option<String> { fn parse_check_line(&self, line: &str) -> Option<String> {
parse_name_value_directive(line, "check") self.parse_name_value_directive(line, "check")
} }
fn parse_force_host(line: &str) -> bool { fn parse_force_host(&self, line: &str) -> bool {
parse_name_directive(line, "force-host") self.parse_name_directive(line, "force-host")
} }
fn parse_build_aux_docs(line: &str) -> bool { fn parse_build_aux_docs(&self, line: &str) -> bool {
parse_name_directive(line, "build-aux-docs") self.parse_name_directive(line, "build-aux-docs")
} }
fn parse_check_stdout(line: &str) -> bool { fn parse_check_stdout(&self, line: &str) -> bool {
parse_name_directive(line, "check-stdout") self.parse_name_directive(line, "check-stdout")
} }
fn parse_no_prefer_dynamic(line: &str) -> bool { fn parse_no_prefer_dynamic(&self, line: &str) -> bool {
parse_name_directive(line, "no-prefer-dynamic") self.parse_name_directive(line, "no-prefer-dynamic")
} }
fn parse_pretty_expanded(line: &str) -> bool { fn parse_pretty_expanded(&self, line: &str) -> bool {
parse_name_directive(line, "pretty-expanded") self.parse_name_directive(line, "pretty-expanded")
} }
fn parse_pretty_mode(line: &str) -> Option<String> { fn parse_pretty_mode(&self, line: &str) -> Option<String> {
parse_name_value_directive(line, "pretty-mode") self.parse_name_value_directive(line, "pretty-mode")
} }
fn parse_pretty_compare_only(line: &str) -> bool { fn parse_pretty_compare_only(&self, line: &str) -> bool {
parse_name_directive(line, "pretty-compare-only") self.parse_name_directive(line, "pretty-compare-only")
} }
fn parse_must_compile_successfully(line: &str) -> bool { fn parse_must_compile_successfully(&self, line: &str) -> bool {
parse_name_directive(line, "must-compile-successfully") self.parse_name_directive(line, "must-compile-successfully")
} }
fn parse_check_test_line_numbers_match(line: &str) -> bool { fn parse_check_test_line_numbers_match(&self, line: &str) -> bool {
parse_name_directive(line, "check-test-line-numbers-match") self.parse_name_directive(line, "check-test-line-numbers-match")
} }
fn parse_env(line: &str, name: &str) -> Option<(String, String)> { fn parse_env(&self, line: &str, name: &str) -> Option<(String, String)> {
parse_name_value_directive(line, name).map(|nv| { self.parse_name_value_directive(line, name).map(|nv| {
// nv is either FOO or FOO=BAR // nv is either FOO or FOO=BAR
let mut strs: Vec<String> = nv.splitn(2, '=') let mut strs: Vec<String> = nv.splitn(2, '=')
.map(str::to_owned) .map(str::to_owned)
@ -493,11 +503,11 @@ fn parse_env(line: &str, name: &str) -> Option<(String, String)> {
}) })
} }
fn parse_pp_exact(line: &str, testfile: &Path) -> Option<PathBuf> { fn parse_pp_exact(&self, line: &str, testfile: &Path) -> Option<PathBuf> {
if let Some(s) = parse_name_value_directive(line, "pp-exact") { if let Some(s) = self.parse_name_value_directive(line, "pp-exact") {
Some(PathBuf::from(&s)) Some(PathBuf::from(&s))
} else { } else {
if parse_name_directive(line, "pp-exact") { if self.parse_name_directive(line, "pp-exact") {
testfile.file_name().map(PathBuf::from) testfile.file_name().map(PathBuf::from)
} else { } else {
None None
@ -505,21 +515,23 @@ fn parse_pp_exact(line: &str, testfile: &Path) -> Option<PathBuf> {
} }
} }
fn parse_name_directive(line: &str, directive: &str) -> bool { fn parse_name_directive(&self, line: &str, directive: &str) -> bool {
// This 'no-' rule is a quick hack to allow pretty-expanded and no-pretty-expanded to coexist // This 'no-' rule is a quick hack to allow pretty-expanded and
// no-pretty-expanded to coexist
line.contains(directive) && !line.contains(&("no-".to_owned() + directive)) line.contains(directive) && !line.contains(&("no-".to_owned() + directive))
} }
pub fn parse_name_value_directive(line: &str, directive: &str) -> Option<String> { pub fn parse_name_value_directive(&self, line: &str, directive: &str) -> Option<String> {
let keycolon = format!("{}:", directive); let keycolon = format!("{}:", directive);
if let Some(colon) = line.find(&keycolon) { if let Some(colon) = line.find(&keycolon) {
let value = line[(colon + keycolon.len())..line.len()].to_owned(); let value = line[(colon + keycolon.len())..line.len()].to_owned();
debug!("{}: {}", directive, value); debug!("{}: {}", directive, value);
Some(value) Some(expand_variables(value, self))
} else { } else {
None None
} }
} }
}
pub fn lldb_version_to_int(version_string: &str) -> isize { pub fn lldb_version_to_int(version_string: &str) -> isize {
let error_string = format!("Encountered LLDB version string with unexpected format: {}", let error_string = format!("Encountered LLDB version string with unexpected format: {}",
@ -528,3 +540,24 @@ pub fn lldb_version_to_int(version_string: &str) -> isize {
let major: isize = version_string.parse().ok().expect(&error_string); let major: isize = version_string.parse().ok().expect(&error_string);
return major; return major;
} }
fn expand_variables(mut value: String, config: &Config) -> String {
const CWD: &'static str = "{{cwd}}";
const SRC_BASE: &'static str = "{{src-base}}";
const BUILD_BASE: &'static str = "{{build-base}}";
if value.contains(CWD) {
let cwd = env::current_dir().unwrap();
value = value.replace(CWD, &cwd.to_string_lossy());
}
if value.contains(SRC_BASE) {
value = value.replace(SRC_BASE, &config.src_base.to_string_lossy());
}
if value.contains(BUILD_BASE) {
value = value.replace(BUILD_BASE, &config.build_base.to_string_lossy());
}
value
}

View file

@ -16,7 +16,6 @@ use errors::{self, ErrorKind, Error};
use filetime::FileTime; use filetime::FileTime;
use json; use json;
use header::TestProps; use header::TestProps;
use header;
use procsrv; use procsrv;
use test::TestPaths; use test::TestPaths;
use uidiff; use uidiff;
@ -57,7 +56,7 @@ pub fn run(config: Config, testpaths: &TestPaths) {
print!("\n\n"); print!("\n\n");
} }
debug!("running {:?}", testpaths.file.display()); debug!("running {:?}", testpaths.file.display());
let base_props = TestProps::from_file(&testpaths.file); let base_props = TestProps::from_file(&testpaths.file, &config);
let base_cx = TestCx { config: &config, let base_cx = TestCx { config: &config,
props: &base_props, props: &base_props,
@ -70,7 +69,7 @@ pub fn run(config: Config, testpaths: &TestPaths) {
} else { } else {
for revision in &base_props.revisions { for revision in &base_props.revisions {
let mut revision_props = base_props.clone(); let mut revision_props = base_props.clone();
revision_props.load_from(&testpaths.file, Some(&revision)); revision_props.load_from(&testpaths.file, Some(&revision), &config);
let rev_cx = TestCx { let rev_cx = TestCx {
config: &config, config: &config,
props: &revision_props, props: &revision_props,
@ -867,13 +866,13 @@ actual:\n\
} }
for &(ref command_directive, ref check_directive) in &directives { for &(ref command_directive, ref check_directive) in &directives {
header::parse_name_value_directive( self.config.parse_name_value_directive(
&line, &line,
&command_directive).map(|cmd| { &command_directive).map(|cmd| {
commands.push(cmd) commands.push(cmd)
}); });
header::parse_name_value_directive( self.config.parse_name_value_directive(
&line, &line,
&check_directive).map(|cmd| { &check_directive).map(|cmd| {
check_lines.push(cmd) check_lines.push(cmd)
@ -1158,7 +1157,9 @@ actual:\n\
if self.props.build_aux_docs { if self.props.build_aux_docs {
for rel_ab in &self.props.aux_builds { for rel_ab in &self.props.aux_builds {
let aux_testpaths = self.compute_aux_test_paths(rel_ab); let aux_testpaths = self.compute_aux_test_paths(rel_ab);
let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision); let aux_props = self.props.from_aux_file(&aux_testpaths.file,
self.revision,
self.config);
let aux_cx = TestCx { let aux_cx = TestCx {
config: self.config, config: self.config,
props: &aux_props, props: &aux_props,
@ -1279,7 +1280,9 @@ actual:\n\
for rel_ab in &self.props.aux_builds { for rel_ab in &self.props.aux_builds {
let aux_testpaths = self.compute_aux_test_paths(rel_ab); let aux_testpaths = self.compute_aux_test_paths(rel_ab);
let aux_props = self.props.from_aux_file(&aux_testpaths.file, self.revision); let aux_props = self.props.from_aux_file(&aux_testpaths.file,
self.revision,
self.config);
let mut crate_type = if aux_props.no_prefer_dynamic { let mut crate_type = if aux_props.no_prefer_dynamic {
Vec::new() Vec::new()
} else { } else {