1
Fork 0

Auto merge of #54517 - mcr431:53956-panic-on-include_bytes-of-own-file, r=michaelwoerister

53956 panic on include bytes of own file

fix #53956

When using `include_bytes!` on a source file in the project, compiler would panic on subsequent compilations because `expand_include_bytes` would overwrite files in the source_map with no source. This PR changes `expand_include_bytes` to check source_map and use the already existing src, if any.
This commit is contained in:
bors 2018-12-06 01:36:51 +00:00
commit 1839c144bc
21 changed files with 180 additions and 119 deletions

View file

@ -417,13 +417,14 @@ impl_stable_hash_for!(enum ::syntax_pos::hygiene::CompilerDesugaringKind {
impl_stable_hash_for!(enum ::syntax_pos::FileName { impl_stable_hash_for!(enum ::syntax_pos::FileName {
Real(pb), Real(pb),
Macros(s), Macros(s),
QuoteExpansion, QuoteExpansion(s),
Anon, Anon(s),
MacroExpansion, MacroExpansion(s),
ProcMacroSourceCode, ProcMacroSourceCode(s),
CliCrateAttr, CliCrateAttr(s),
CfgSpec, CfgSpec(s),
Custom(s) Custom(s),
DocTest(pb, line),
}); });
impl<'a> HashStable<StableHashingContext<'a>> for SourceFile { impl<'a> HashStable<StableHashingContext<'a>> for SourceFile {

View file

@ -1756,8 +1756,8 @@ pub fn parse_cfgspecs(cfgspecs: Vec<String>) -> ast::CrateConfig {
.into_iter() .into_iter()
.map(|s| { .map(|s| {
let sess = parse::ParseSess::new(FilePathMapping::empty()); let sess = parse::ParseSess::new(FilePathMapping::empty());
let mut parser = let filename = FileName::cfg_spec_source_code(&s);
parse::new_parser_from_source_str(&sess, FileName::CfgSpec, s.to_string()); let mut parser = parse::new_parser_from_source_str(&sess, filename, s.to_string());
macro_rules! error {($reason: expr) => { macro_rules! error {($reason: expr) => {
early_error(ErrorOutputType::default(), early_error(ErrorOutputType::default(),

View file

@ -594,7 +594,7 @@ fn make_input(free_matches: &[String]) -> Option<(Input, Option<PathBuf>, Option
} else { } else {
None None
}; };
Some((Input::Str { name: FileName::Anon, input: src }, Some((Input::Str { name: FileName::anon_source_code(&src), input: src },
None, err)) None, err))
} else { } else {
Some((Input::File(PathBuf::from(ifile)), Some((Input::File(PathBuf::from(ifile)),

View file

@ -129,7 +129,7 @@ fn test_env_with_pool<F>(
let cstore = CStore::new(::get_codegen_backend(&sess).metadata_loader()); let cstore = CStore::new(::get_codegen_backend(&sess).metadata_loader());
rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess)); rustc_lint::register_builtins(&mut sess.lint_store.borrow_mut(), Some(&sess));
let input = config::Input::Str { let input = config::Input::Str {
name: FileName::Anon, name: FileName::anon_source_code(&source_string),
input: source_string.to_string(), input: source_string.to_string(),
}; };
let krate = let krate =

View file

@ -1044,7 +1044,7 @@ impl EmitterWriter {
buffer.append(buffer_msg_line_offset, buffer.append(buffer_msg_line_offset,
&format!("{}:{}:{}", &format!("{}:{}:{}",
loc.file.name, loc.file.name,
sm.doctest_offset_line(loc.line), sm.doctest_offset_line(&loc.file.name, loc.line),
loc.col.0 + 1), loc.col.0 + 1),
Style::LineAndColumn); Style::LineAndColumn);
for _ in 0..max_line_num_len { for _ in 0..max_line_num_len {
@ -1054,7 +1054,7 @@ impl EmitterWriter {
buffer.prepend(0, buffer.prepend(0,
&format!("{}:{}:{}: ", &format!("{}:{}:{}: ",
loc.file.name, loc.file.name,
sm.doctest_offset_line(loc.line), sm.doctest_offset_line(&loc.file.name, loc.line),
loc.col.0 + 1), loc.col.0 + 1),
Style::LineAndColumn); Style::LineAndColumn);
} }
@ -1075,7 +1075,8 @@ impl EmitterWriter {
}; };
format!("{}:{}{}", format!("{}:{}{}",
annotated_file.file.name, annotated_file.file.name,
sm.doctest_offset_line(first_line.line_index), sm.doctest_offset_line(
&annotated_file.file.name, first_line.line_index),
col) col)
} else { } else {
annotated_file.file.name.to_string() annotated_file.file.name.to_string()

View file

@ -130,7 +130,7 @@ pub trait SourceMapper {
fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>; fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span>;
fn call_span_if_macro(&self, sp: Span) -> Span; fn call_span_if_macro(&self, sp: Span) -> Span;
fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool; fn ensure_source_file_source_present(&self, source_file: Lrc<SourceFile>) -> bool;
fn doctest_offset_line(&self, line: usize) -> usize; fn doctest_offset_line(&self, file: &FileName, line: usize) -> usize;
} }
impl CodeSuggestion { impl CodeSuggestion {

View file

@ -3008,7 +3008,7 @@ pub struct Span {
impl Span { impl Span {
pub fn empty() -> Span { pub fn empty() -> Span {
Span { Span {
filename: FileName::Anon, filename: FileName::Anon(0),
loline: 0, locol: 0, loline: 0, locol: 0,
hiline: 0, hicol: 0, hiline: 0, hicol: 0,
} }

View file

@ -197,8 +197,14 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
let (test, line_offset) = make_test(test, Some(cratename), as_test_harness, opts); let (test, line_offset) = make_test(test, Some(cratename), as_test_harness, opts);
// FIXME(#44940): if doctests ever support path remapping, then this filename // FIXME(#44940): if doctests ever support path remapping, then this filename
// needs to be the result of SourceMap::span_to_unmapped_path // needs to be the result of SourceMap::span_to_unmapped_path
let path = match filename {
FileName::Real(path) => path.clone(),
_ => PathBuf::from(r"doctest.rs"),
};
let input = config::Input::Str { let input = config::Input::Str {
name: filename.to_owned(), name: FileName::DocTest(path, line as isize - line_offset as isize),
input: test, input: test,
}; };
let outputs = OutputTypes::new(&[(OutputType::Exe, None)]); let outputs = OutputTypes::new(&[(OutputType::Exe, None)]);
@ -252,9 +258,7 @@ fn run_test(test: &str, cratename: &str, filename: &FileName, line: usize,
let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout())); let _bomb = Bomb(data.clone(), old.unwrap_or(box io::stdout()));
let (libdir, outdir, compile_result) = driver::spawn_thread_pool(sessopts, |sessopts| { let (libdir, outdir, compile_result) = driver::spawn_thread_pool(sessopts, |sessopts| {
let source_map = Lrc::new(SourceMap::new_doctest( let source_map = Lrc::new(SourceMap::new(sessopts.file_path_mapping()));
sessopts.file_path_mapping(), filename.clone(), line as isize - line_offset as isize
));
let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()), let emitter = errors::emitter::EmitterWriter::new(box Sink(data.clone()),
Some(source_map.clone()), Some(source_map.clone()),
false, false,
@ -401,7 +405,7 @@ pub fn make_test(s: &str,
use errors::emitter::EmitterWriter; use errors::emitter::EmitterWriter;
use errors::Handler; use errors::Handler;
let filename = FileName::Anon; let filename = FileName::anon_source_code(s);
let source = crates + &everything_else; let source = crates + &everything_else;
// any errors in parsing should also appear when the doctest is compiled for real, so just // any errors in parsing should also appear when the doctest is compiled for real, so just
@ -411,8 +415,6 @@ pub fn make_test(s: &str,
let handler = Handler::with_emitter(false, false, box emitter); let handler = Handler::with_emitter(false, false, box emitter);
let sess = ParseSess::with_span_handler(handler, cm); let sess = ParseSess::with_span_handler(handler, cm);
debug!("about to parse: \n{}", source);
let mut found_main = false; let mut found_main = false;
let mut found_extern_crate = cratename.is_none(); let mut found_extern_crate = cratename.is_none();
@ -487,8 +489,6 @@ pub fn make_test(s: &str,
prog.push_str("\n}"); prog.push_str("\n}");
} }
info!("final test program: {}", prog);
(prog, line_offset) (prog, line_offset)
} }

View file

@ -803,7 +803,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
for raw_attr in attrs { for raw_attr in attrs {
let mut parser = parse::new_parser_from_source_str( let mut parser = parse::new_parser_from_source_str(
parse_sess, parse_sess,
FileName::CliCrateAttr, FileName::cli_crate_attr_source_code(&raw_attr),
raw_attr.clone(), raw_attr.clone(),
); );

View file

@ -353,27 +353,27 @@ pub mod rt {
impl<'a> ExtParseUtils for ExtCtxt<'a> { impl<'a> ExtParseUtils for ExtCtxt<'a> {
fn parse_item(&self, s: String) -> P<ast::Item> { fn parse_item(&self, s: String) -> P<ast::Item> {
panictry!(parse::parse_item_from_source_str( panictry!(parse::parse_item_from_source_str(
FileName::QuoteExpansion, FileName::quote_expansion_source_code(&s),
s, s,
self.parse_sess())).expect("parse error") self.parse_sess())).expect("parse error")
} }
fn parse_stmt(&self, s: String) -> ast::Stmt { fn parse_stmt(&self, s: String) -> ast::Stmt {
panictry!(parse::parse_stmt_from_source_str( panictry!(parse::parse_stmt_from_source_str(
FileName::QuoteExpansion, FileName::quote_expansion_source_code(&s),
s, s,
self.parse_sess())).expect("parse error") self.parse_sess())).expect("parse error")
} }
fn parse_expr(&self, s: String) -> P<ast::Expr> { fn parse_expr(&self, s: String) -> P<ast::Expr> {
panictry!(parse::parse_expr_from_source_str( panictry!(parse::parse_expr_from_source_str(
FileName::QuoteExpansion, FileName::quote_expansion_source_code(&s),
s, s,
self.parse_sess())) self.parse_sess()))
} }
fn parse_tts(&self, s: String) -> Vec<TokenTree> { fn parse_tts(&self, s: String) -> Vec<TokenTree> {
let source_name = FileName::QuoteExpansion; let source_name = FileName::quote_expansion_source_code(&s);
parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None) parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None)
.into_trees().collect() .into_trees().collect()
} }

View file

@ -182,9 +182,12 @@ pub fn expand_include_bytes(cx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::Toke
DummyResult::expr(sp) DummyResult::expr(sp)
} }
Ok(..) => { Ok(..) => {
// Add this input file to the code map to make it available as let src = match String::from_utf8(bytes.clone()) {
// dependency information, but don't enter it's contents Ok(contents) => contents,
cx.source_map().new_source_file(file.into(), String::new()); Err(..) => "".to_string()
};
cx.source_map().new_source_file(file.into(), src);
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))) base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
} }
@ -201,6 +204,7 @@ fn res_rel_file(cx: &mut ExtCtxt, sp: syntax_pos::Span, arg: String) -> PathBuf
let callsite = sp.source_callsite(); let callsite = sp.source_callsite();
let mut path = match cx.source_map().span_to_unmapped_path(callsite) { let mut path = match cx.source_map().span_to_unmapped_path(callsite) {
FileName::Real(path) => path, FileName::Real(path) => path,
FileName::DocTest(path, _) => path,
other => panic!("cannot resolve relative path in non-file source `{}`", other), other => panic!("cannot resolve relative path in non-file source `{}`", other),
}; };
path.pop(); path.pop();

View file

@ -1900,7 +1900,7 @@ mod tests {
sess: &'a ParseSess, sess: &'a ParseSess,
teststr: String) teststr: String)
-> StringReader<'a> { -> StringReader<'a> {
let sf = sm.new_source_file(PathBuf::from("zebra.rs").into(), teststr); let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
StringReader::new(sess, sf, None) StringReader::new(sess, sf, None)
} }

View file

@ -974,23 +974,25 @@ mod tests {
with_globals(|| { with_globals(|| {
let sess = ParseSess::new(FilePathMapping::empty()); let sess = ParseSess::new(FilePathMapping::empty());
let name = FileName::Custom("source".to_string()); let name_1 = FileName::Custom("crlf_source_1".to_string());
let source = "/// doc comment\r\nfn foo() {}".to_string(); let source = "/// doc comment\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess) let item = parse_item_from_source_str(name_1, source, &sess)
.unwrap().unwrap(); .unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/// doc comment"); assert_eq!(doc, "/// doc comment");
let name_2 = FileName::Custom("crlf_source_2".to_string());
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name.clone(), source, &sess) let item = parse_item_from_source_str(name_2, source, &sess)
.unwrap().unwrap(); .unwrap().unwrap();
let docs = item.attrs.iter().filter(|a| a.path == "doc") let docs = item.attrs.iter().filter(|a| a.path == "doc")
.map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>(); .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
assert_eq!(&docs[..], b); assert_eq!(&docs[..], b);
let name_3 = FileName::Custom("clrf_source_3".to_string());
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap(); let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap(); let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
assert_eq!(doc, "/** doc comment\n * with CRLF */"); assert_eq!(doc, "/** doc comment\n * with CRLF */");
}); });

View file

@ -545,7 +545,8 @@ impl Token {
let tokens_for_real = nt.1.force(|| { let tokens_for_real = nt.1.force(|| {
// FIXME(#43081): Avoid this pretty-print + reparse hack // FIXME(#43081): Avoid this pretty-print + reparse hack
let source = pprust::token_to_string(self); let source = pprust::token_to_string(self);
parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span)) let filename = FileName::macro_expansion_source_code(&source);
parse_stream_from_source_str(filename, source, sess, Some(span))
}); });
// During early phases of the compiler the AST could get modified // During early phases of the compiler the AST could get modified
@ -781,10 +782,12 @@ fn prepend_attrs(sess: &ParseSess,
assert_eq!(attr.style, ast::AttrStyle::Outer, assert_eq!(attr.style, ast::AttrStyle::Outer,
"inner attributes should prevent cached tokens from existing"); "inner attributes should prevent cached tokens from existing");
let source = pprust::attr_to_string(attr);
let macro_filename = FileName::macro_expansion_source_code(&source);
if attr.is_sugared_doc { if attr.is_sugared_doc {
let stream = parse_stream_from_source_str( let stream = parse_stream_from_source_str(
FileName::MacroExpansion, macro_filename,
pprust::attr_to_string(attr), source,
sess, sess,
Some(span), Some(span),
); );
@ -805,8 +808,8 @@ fn prepend_attrs(sess: &ParseSess,
// should eventually be removed. // should eventually be removed.
} else { } else {
let stream = parse_stream_from_source_str( let stream = parse_stream_from_source_str(
FileName::MacroExpansion, macro_filename,
pprust::path_to_string(&attr.path), source,
sess, sess,
Some(span), Some(span),
); );

View file

@ -110,11 +110,19 @@ pub struct StableSourceFileId(u128);
impl StableSourceFileId { impl StableSourceFileId {
pub fn new(source_file: &SourceFile) -> StableSourceFileId { pub fn new(source_file: &SourceFile) -> StableSourceFileId {
StableSourceFileId::new_from_pieces(&source_file.name,
source_file.name_was_remapped,
source_file.unmapped_path.as_ref())
}
pub fn new_from_pieces(name: &FileName,
name_was_remapped: bool,
unmapped_path: Option<&FileName>) -> StableSourceFileId {
let mut hasher = StableHasher::new(); let mut hasher = StableHasher::new();
source_file.name.hash(&mut hasher); name.hash(&mut hasher);
source_file.name_was_remapped.hash(&mut hasher); name_was_remapped.hash(&mut hasher);
source_file.unmapped_path.hash(&mut hasher); unmapped_path.hash(&mut hasher);
StableSourceFileId(hasher.finish()) StableSourceFileId(hasher.finish())
} }
@ -136,9 +144,6 @@ pub struct SourceMap {
// This is used to apply the file path remapping as specified via // This is used to apply the file path remapping as specified via
// --remap-path-prefix to all SourceFiles allocated within this SourceMap. // --remap-path-prefix to all SourceFiles allocated within this SourceMap.
path_mapping: FilePathMapping, path_mapping: FilePathMapping,
/// In case we are in a doctest, replace all file names with the PathBuf,
/// and add the given offsets to the line info
doctest_offset: Option<(FileName, isize)>,
} }
impl SourceMap { impl SourceMap {
@ -147,19 +152,9 @@ impl SourceMap {
files: Default::default(), files: Default::default(),
file_loader: Box::new(RealFileLoader), file_loader: Box::new(RealFileLoader),
path_mapping, path_mapping,
doctest_offset: None,
} }
} }
pub fn new_doctest(path_mapping: FilePathMapping,
file: FileName, line: isize) -> SourceMap {
SourceMap {
doctest_offset: Some((file, line)),
..SourceMap::new(path_mapping)
}
}
pub fn with_file_loader(file_loader: Box<dyn FileLoader + Sync + Send>, pub fn with_file_loader(file_loader: Box<dyn FileLoader + Sync + Send>,
path_mapping: FilePathMapping) path_mapping: FilePathMapping)
-> SourceMap { -> SourceMap {
@ -167,7 +162,6 @@ impl SourceMap {
files: Default::default(), files: Default::default(),
file_loader: file_loader, file_loader: file_loader,
path_mapping, path_mapping,
doctest_offset: None,
} }
} }
@ -181,11 +175,7 @@ impl SourceMap {
pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> { pub fn load_file(&self, path: &Path) -> io::Result<Lrc<SourceFile>> {
let src = self.file_loader.read_file(path)?; let src = self.file_loader.read_file(path)?;
let filename = if let Some((ref name, _)) = self.doctest_offset { let filename = path.to_owned().into();
name.clone()
} else {
path.to_owned().into()
};
Ok(self.new_source_file(filename, src)) Ok(self.new_source_file(filename, src))
} }
@ -208,7 +198,8 @@ impl SourceMap {
} }
/// Creates a new source_file. /// Creates a new source_file.
/// This does not ensure that only one SourceFile exists per file name. /// If a file already exists in the source_map with the same id, that file is returned
/// unmodified
pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> { pub fn new_source_file(&self, filename: FileName, src: String) -> Lrc<SourceFile> {
let start_pos = self.next_start_pos(); let start_pos = self.next_start_pos();
@ -226,21 +217,30 @@ impl SourceMap {
}, },
other => (other, false), other => (other, false),
}; };
let source_file = Lrc::new(SourceFile::new(
filename,
was_remapped,
unmapped_path,
src,
Pos::from_usize(start_pos),
));
let mut files = self.files.borrow_mut(); let file_id = StableSourceFileId::new_from_pieces(&filename,
was_remapped,
Some(&unmapped_path));
files.source_files.push(source_file.clone()); return match self.source_file_by_stable_id(file_id) {
files.stable_id_to_source_file.insert(StableSourceFileId::new(&source_file), Some(lrc_sf) => lrc_sf,
source_file.clone()); None => {
let source_file = Lrc::new(SourceFile::new(
filename,
was_remapped,
unmapped_path,
src,
Pos::from_usize(start_pos),
));
source_file let mut files = self.files.borrow_mut();
files.source_files.push(source_file.clone());
files.stable_id_to_source_file.insert(file_id, source_file.clone());
source_file
}
}
} }
/// Allocates a new SourceFile representing a source file from an external /// Allocates a new SourceFile representing a source file from an external
@ -310,15 +310,17 @@ impl SourceMap {
} }
// If there is a doctest_offset, apply it to the line // If there is a doctest_offset, apply it to the line
pub fn doctest_offset_line(&self, mut orig: usize) -> usize { pub fn doctest_offset_line(&self, file: &FileName, orig: usize) -> usize {
if let Some((_, line)) = self.doctest_offset { return match file {
if line >= 0 { FileName::DocTest(_, offset) => {
orig = orig + line as usize; return if *offset >= 0 {
} else { orig + *offset as usize
orig = orig - (-line) as usize; } else {
} orig - (-(*offset)) as usize
}
},
_ => orig
} }
orig
} }
/// Lookup source information about a BytePos /// Lookup source information about a BytePos
@ -983,8 +985,8 @@ impl SourceMapper for SourceMap {
} }
) )
} }
fn doctest_offset_line(&self, line: usize) -> usize { fn doctest_offset_line(&self, file: &FileName, line: usize) -> usize {
self.doctest_offset_line(line) self.doctest_offset_line(file, line)
} }
} }

View file

@ -402,7 +402,7 @@ impl server::TokenStream for Rustc<'_> {
} }
fn from_str(&mut self, src: &str) -> Self::TokenStream { fn from_str(&mut self, src: &str) -> Self::TokenStream {
parse::parse_stream_from_source_str( parse::parse_stream_from_source_str(
FileName::ProcMacroSourceCode, FileName::proc_macro_source_code(src.clone()),
src.to_string(), src.to_string(),
self.sess, self.sess,
Some(self.call_site), Some(self.call_site),

View file

@ -90,19 +90,20 @@ pub enum FileName {
/// A macro. This includes the full name of the macro, so that there are no clashes. /// A macro. This includes the full name of the macro, so that there are no clashes.
Macros(String), Macros(String),
/// call to `quote!` /// call to `quote!`
QuoteExpansion, QuoteExpansion(u64),
/// Command line /// Command line
Anon, Anon(u64),
/// Hack in src/libsyntax/parse.rs /// Hack in src/libsyntax/parse.rs
/// FIXME(jseyfried) /// FIXME(jseyfried)
MacroExpansion, MacroExpansion(u64),
ProcMacroSourceCode, ProcMacroSourceCode(u64),
/// Strings provided as --cfg [cfgspec] stored in a crate_cfg /// Strings provided as --cfg [cfgspec] stored in a crate_cfg
CfgSpec, CfgSpec(u64),
/// Strings provided as crate attributes in the CLI /// Strings provided as crate attributes in the CLI
CliCrateAttr, CliCrateAttr(u64),
/// Custom sources for explicit parser calls from plugins and drivers /// Custom sources for explicit parser calls from plugins and drivers
Custom(String), Custom(String),
DocTest(PathBuf, isize),
} }
impl std::fmt::Display for FileName { impl std::fmt::Display for FileName {
@ -111,13 +112,15 @@ impl std::fmt::Display for FileName {
match *self { match *self {
Real(ref path) => write!(fmt, "{}", path.display()), Real(ref path) => write!(fmt, "{}", path.display()),
Macros(ref name) => write!(fmt, "<{} macros>", name), Macros(ref name) => write!(fmt, "<{} macros>", name),
QuoteExpansion => write!(fmt, "<quote expansion>"), QuoteExpansion(_) => write!(fmt, "<quote expansion>"),
MacroExpansion => write!(fmt, "<macro expansion>"), MacroExpansion(_) => write!(fmt, "<macro expansion>"),
Anon => write!(fmt, "<anon>"), Anon(_) => write!(fmt, "<anon>"),
ProcMacroSourceCode => write!(fmt, "<proc-macro source code>"), ProcMacroSourceCode(_) =>
CfgSpec => write!(fmt, "cfgspec"), write!(fmt, "<proc-macro source code>"),
CliCrateAttr => write!(fmt, "<crate attribute>"), CfgSpec(_) => write!(fmt, "<cfgspec>"),
CliCrateAttr(_) => write!(fmt, "<crate attribute>"),
Custom(ref s) => write!(fmt, "<{}>", s), Custom(ref s) => write!(fmt, "<{}>", s),
DocTest(ref path, _) => write!(fmt, "{}", path.display()),
} }
} }
} }
@ -135,13 +138,14 @@ impl FileName {
match *self { match *self {
Real(_) => true, Real(_) => true,
Macros(_) | Macros(_) |
Anon | Anon(_) |
MacroExpansion | MacroExpansion(_) |
ProcMacroSourceCode | ProcMacroSourceCode(_) |
CfgSpec | CfgSpec(_) |
CliCrateAttr | CliCrateAttr(_) |
Custom(_) | Custom(_) |
QuoteExpansion => false, QuoteExpansion(_) |
DocTest(_, _) => false,
} }
} }
@ -149,16 +153,57 @@ impl FileName {
use self::FileName::*; use self::FileName::*;
match *self { match *self {
Real(_) | Real(_) |
Anon | Anon(_) |
MacroExpansion | MacroExpansion(_) |
ProcMacroSourceCode | ProcMacroSourceCode(_) |
CfgSpec | CfgSpec(_) |
CliCrateAttr | CliCrateAttr(_) |
Custom(_) | Custom(_) |
QuoteExpansion => false, QuoteExpansion(_) |
DocTest(_, _) => false,
Macros(_) => true, Macros(_) => true,
} }
} }
pub fn quote_expansion_source_code(src: &str) -> FileName {
let mut hasher = StableHasher::new();
src.hash(&mut hasher);
FileName::QuoteExpansion(hasher.finish())
}
pub fn macro_expansion_source_code(src: &str) -> FileName {
let mut hasher = StableHasher::new();
src.hash(&mut hasher);
FileName::MacroExpansion(hasher.finish())
}
pub fn anon_source_code(src: &str) -> FileName {
let mut hasher = StableHasher::new();
src.hash(&mut hasher);
FileName::Anon(hasher.finish())
}
pub fn proc_macro_source_code(src: &str) -> FileName {
let mut hasher = StableHasher::new();
src.hash(&mut hasher);
FileName::ProcMacroSourceCode(hasher.finish())
}
pub fn cfg_spec_source_code(src: &str) -> FileName {
let mut hasher = StableHasher::new();
src.hash(&mut hasher);
FileName::QuoteExpansion(hasher.finish())
}
pub fn cli_crate_attr_source_code(src: &str) -> FileName {
let mut hasher = StableHasher::new();
src.hash(&mut hasher);
FileName::CliCrateAttr(hasher.finish())
}
pub fn doc_test_source_code(path: PathBuf, line: isize) -> FileName{
FileName::DocTest(path, line)
}
} }
/// Spans represent a region of code, used for error reporting. Positions in spans /// Spans represent a region of code, used for error reporting. Positions in spans

View file

@ -72,7 +72,8 @@ fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
driver::spawn_thread_pool(opts, |opts| { driver::spawn_thread_pool(opts, |opts| {
let (sess, cstore, codegen_backend) = basic_sess(opts); let (sess, cstore, codegen_backend) = basic_sess(opts);
let control = CompileController::basic(); let control = CompileController::basic();
let input = Input::Str { name: FileName::Anon, input: code }; let name = FileName::anon_source_code(&code);
let input = Input::Str { name, input: code };
let _ = compile_input( let _ = compile_input(
codegen_backend, codegen_backend,
&sess, &sess,

View file

@ -32,7 +32,7 @@ use std::fmt;
// Copied out of syntax::util::parser_testing // Copied out of syntax::util::parser_testing
pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> { pub fn string_to_parser<'a>(ps: &'a ParseSess, source_str: String) -> Parser<'a> {
new_parser_from_source_str(ps, FileName::Custom("bogofile".to_owned()), source_str) new_parser_from_source_str(ps, FileName::Custom(source_str.clone()), source_str)
} }
fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> PResult<'a, T> where fn with_error_checking_parse<'a, T, F>(s: String, ps: &'a ParseSess, f: F) -> PResult<'a, T> where

View file

@ -44,9 +44,11 @@ use syntax::ptr::P;
fn parse_expr(ps: &ParseSess, src: &str) -> P<Expr> { fn parse_expr(ps: &ParseSess, src: &str) -> P<Expr> {
let src_as_string = src.to_string();
let mut p = parse::new_parser_from_source_str(ps, let mut p = parse::new_parser_from_source_str(ps,
FileName::Custom("expr".to_owned()), FileName::Custom(src_as_string.clone()),
src.to_owned()); src_as_string);
p.parse_expr().unwrap() p.parse_expr().unwrap()
} }

View file

@ -12,7 +12,7 @@ error[E0425]: cannot find value `no` in this scope
3 | no 3 | no
| ^^ not found in this scope | ^^ not found in this scope
thread '$DIR/failed-doctest-output.rs - OtherStruct (line 27)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:323:13 thread '$DIR/failed-doctest-output.rs - OtherStruct (line 27)' panicked at 'couldn't compile the test', src/librustdoc/test.rs:327:13
note: Run with `RUST_BACKTRACE=1` for a backtrace. note: Run with `RUST_BACKTRACE=1` for a backtrace.
---- $DIR/failed-doctest-output.rs - SomeStruct (line 21) stdout ---- ---- $DIR/failed-doctest-output.rs - SomeStruct (line 21) stdout ----
@ -21,7 +21,7 @@ thread '$DIR/failed-doctest-output.rs - SomeStruct (line 21)' panicked at 'test
thread 'main' panicked at 'oh no', $DIR/failed-doctest-output.rs:3:1 thread 'main' panicked at 'oh no', $DIR/failed-doctest-output.rs:3:1
note: Run with `RUST_BACKTRACE=1` for a backtrace. note: Run with `RUST_BACKTRACE=1` for a backtrace.
', src/librustdoc/test.rs:358:17 ', src/librustdoc/test.rs:362:17
failures: failures: