Tweak macro parse errors when reaching EOF during macro call parse
- Add detail on origin of current parser when reaching EOF and stop saying "found <eof>" and point at the end of macro calls - Handle empty `cfg_attr` attribute - Reword empty `derive` attribute error
This commit is contained in:
parent
fc45382c12
commit
24160171e4
21 changed files with 176 additions and 93 deletions
|
@ -627,7 +627,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn same(fmt: &'static str, p: &[Piece<'static>]) {
|
fn same(fmt: &'static str, p: &[Piece<'static>]) {
|
||||||
let parser = Parser::new(fmt, None, vec![], false);
|
let parser = Parser::new(fmt, None, vec![], false, None);
|
||||||
assert!(parser.collect::<Vec<Piece<'static>>>() == p);
|
assert!(parser.collect::<Vec<Piece<'static>>>() == p);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -643,7 +643,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn musterr(s: &str) {
|
fn musterr(s: &str) {
|
||||||
let mut p = Parser::new(s, None, vec![], false);
|
let mut p = Parser::new(s, None, vec![], false, None);
|
||||||
p.next();
|
p.next();
|
||||||
assert!(!p.errors.is_empty());
|
assert!(!p.errors.is_empty());
|
||||||
}
|
}
|
||||||
|
|
|
@ -226,12 +226,12 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString {
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn verify(&self,
|
fn verify(
|
||||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
&self,
|
||||||
trait_def_id: DefId,
|
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||||
span: Span)
|
trait_def_id: DefId,
|
||||||
-> Result<(), ErrorReported>
|
span: Span,
|
||||||
{
|
) -> Result<(), ErrorReported> {
|
||||||
let name = tcx.item_name(trait_def_id);
|
let name = tcx.item_name(trait_def_id);
|
||||||
let generics = tcx.generics_of(trait_def_id);
|
let generics = tcx.generics_of(trait_def_id);
|
||||||
let parser = Parser::new(&self.0, None, vec![], false);
|
let parser = Parser::new(&self.0, None, vec![], false);
|
||||||
|
@ -272,12 +272,12 @@ impl<'a, 'gcx, 'tcx> OnUnimplementedFormatString {
|
||||||
result
|
result
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn format(&self,
|
pub fn format(
|
||||||
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
&self,
|
||||||
trait_ref: ty::TraitRef<'tcx>,
|
tcx: TyCtxt<'a, 'gcx, 'tcx>,
|
||||||
options: &FxHashMap<String, String>)
|
trait_ref: ty::TraitRef<'tcx>,
|
||||||
-> String
|
options: &FxHashMap<String, String>,
|
||||||
{
|
) -> String {
|
||||||
let name = tcx.item_name(trait_ref.def_id);
|
let name = tcx.item_name(trait_ref.def_id);
|
||||||
let trait_str = tcx.def_path_str(trait_ref.def_id);
|
let trait_str = tcx.def_path_str(trait_ref.def_id);
|
||||||
let generics = tcx.generics_of(trait_ref.def_id);
|
let generics = tcx.generics_of(trait_ref.def_id);
|
||||||
|
|
|
@ -278,7 +278,14 @@ impl Attribute {
|
||||||
pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
|
pub fn parse<'a, T, F>(&self, sess: &'a ParseSess, mut f: F) -> PResult<'a, T>
|
||||||
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||||
{
|
{
|
||||||
let mut parser = Parser::new(sess, self.tokens.clone(), None, false, false);
|
let mut parser = Parser::new(
|
||||||
|
sess,
|
||||||
|
self.tokens.clone(),
|
||||||
|
None,
|
||||||
|
false,
|
||||||
|
false,
|
||||||
|
Some("attribute"),
|
||||||
|
);
|
||||||
let result = f(&mut parser)?;
|
let result = f(&mut parser)?;
|
||||||
if parser.token != token::Eof {
|
if parser.token != token::Eof {
|
||||||
parser.unexpected()?;
|
parser.unexpected()?;
|
||||||
|
|
|
@ -94,6 +94,17 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
if !attr.check_name(sym::cfg_attr) {
|
if !attr.check_name(sym::cfg_attr) {
|
||||||
return vec![attr];
|
return vec![attr];
|
||||||
}
|
}
|
||||||
|
if attr.tokens.len() == 0 {
|
||||||
|
self.sess.span_diagnostic.struct_span_err(attr.span, "bad `cfg_attr` attribute")
|
||||||
|
.span_label(attr.span, "missing condition and attribute")
|
||||||
|
.note("`cfg_attr` must be of the form: \
|
||||||
|
`#[cfg_attr(condition, attribute)]`")
|
||||||
|
.note("for more information, visit \
|
||||||
|
<https://doc.rust-lang.org/reference/conditional-compilation.html\
|
||||||
|
#the-cfg_attr-attribute>")
|
||||||
|
.emit();
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
|
||||||
let (cfg_predicate, expanded_attrs) = match attr.parse(self.sess, |parser| {
|
let (cfg_predicate, expanded_attrs) = match attr.parse(self.sess, |parser| {
|
||||||
parser.expect(&token::OpenDelim(token::Paren))?;
|
parser.expect(&token::OpenDelim(token::Paren))?;
|
||||||
|
@ -117,7 +128,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
Ok(result) => result,
|
Ok(result) => result,
|
||||||
Err(mut e) => {
|
Err(mut e) => {
|
||||||
e.emit();
|
e.emit();
|
||||||
return Vec::new();
|
return vec![];
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -850,7 +850,11 @@ impl<'a> ExtCtxt<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
|
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
|
||||||
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect())
|
parse::stream_to_parser(
|
||||||
|
self.parse_sess,
|
||||||
|
tts.iter().cloned().collect(),
|
||||||
|
Some("macro arguments"),
|
||||||
|
)
|
||||||
}
|
}
|
||||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||||
|
|
|
@ -17,8 +17,11 @@ pub fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) ->
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
if !attr.is_meta_item_list() {
|
if !attr.is_meta_item_list() {
|
||||||
cx.span_err(attr.span,
|
cx.struct_span_err(attr.span, "bad `derive` attribute")
|
||||||
"attribute must be of the form `#[derive(Trait1, Trait2, ...)]`");
|
.span_label(attr.span, "missing traits to be derived")
|
||||||
|
.note("`derive` must be of the form: \
|
||||||
|
`#[derive(Trait1, Trait2, ...)]`")
|
||||||
|
.emit();
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -658,7 +658,14 @@ pub fn parse(
|
||||||
recurse_into_modules: bool,
|
recurse_into_modules: bool,
|
||||||
) -> NamedParseResult {
|
) -> NamedParseResult {
|
||||||
// Create a parser that can be used for the "black box" parts.
|
// Create a parser that can be used for the "black box" parts.
|
||||||
let mut parser = Parser::new(sess, tts, directory, recurse_into_modules, true);
|
let mut parser = Parser::new(
|
||||||
|
sess,
|
||||||
|
tts,
|
||||||
|
directory,
|
||||||
|
recurse_into_modules,
|
||||||
|
true,
|
||||||
|
Some("macro arguments"),
|
||||||
|
);
|
||||||
|
|
||||||
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
||||||
// the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then
|
// the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then
|
||||||
|
|
|
@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
path: Cow::from(cx.current_expansion.module.directory.as_path()),
|
path: Cow::from(cx.current_expansion.module.directory.as_path()),
|
||||||
ownership: cx.current_expansion.directory_ownership,
|
ownership: cx.current_expansion.directory_ownership,
|
||||||
};
|
};
|
||||||
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false);
|
let mut p = Parser::new(cx.parse_sess(), tts, Some(directory), true, false, None);
|
||||||
p.root_module_name = cx.current_expansion.module.mod_path.last()
|
p.root_module_name = cx.current_expansion.module.mod_path.last()
|
||||||
.map(|id| id.as_str().to_string());
|
.map(|id| id.as_str().to_string());
|
||||||
|
|
||||||
|
|
|
@ -236,7 +236,7 @@ fn maybe_source_file_to_parser(
|
||||||
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
||||||
let end_pos = source_file.end_pos;
|
let end_pos = source_file.end_pos;
|
||||||
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
|
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
|
||||||
let mut parser = stream_to_parser(sess, stream);
|
let mut parser = stream_to_parser(sess, stream, None);
|
||||||
parser.unclosed_delims = unclosed_delims;
|
parser.unclosed_delims = unclosed_delims;
|
||||||
if parser.token == token::Eof && parser.span.is_dummy() {
|
if parser.token == token::Eof && parser.span.is_dummy() {
|
||||||
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
|
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
|
||||||
|
@ -248,7 +248,7 @@ fn maybe_source_file_to_parser(
|
||||||
// must preserve old name for now, because quote! from the *existing*
|
// must preserve old name for now, because quote! from the *existing*
|
||||||
// compiler expands into it
|
// compiler expands into it
|
||||||
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
|
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
|
||||||
stream_to_parser(sess, tts.into_iter().collect())
|
stream_to_parser(sess, tts.into_iter().collect(), Some("macro arguments"))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -328,8 +328,12 @@ pub fn maybe_file_to_stream(
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given stream and the `ParseSess`, produces a parser.
|
/// Given stream and the `ParseSess`, produces a parser.
|
||||||
pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
|
pub fn stream_to_parser<'a>(
|
||||||
Parser::new(sess, stream, None, true, false)
|
sess: &'a ParseSess,
|
||||||
|
stream: TokenStream,
|
||||||
|
is_subparser: Option<&'static str>,
|
||||||
|
) -> Parser<'a> {
|
||||||
|
Parser::new(sess, stream, None, true, false, is_subparser)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given stream, the `ParseSess` and the base directory, produces a parser.
|
/// Given stream, the `ParseSess` and the base directory, produces a parser.
|
||||||
|
|
|
@ -51,7 +51,7 @@ use crate::symbol::{kw, sym, Symbol};
|
||||||
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
|
||||||
use rustc_target::spec::abi::{self, Abi};
|
use rustc_target::spec::abi::{self, Abi};
|
||||||
use syntax_pos::{
|
use syntax_pos::{
|
||||||
Span, MultiSpan, BytePos, FileName,
|
BytePos, DUMMY_SP, FileName, MultiSpan, Span,
|
||||||
hygiene::CompilerDesugaringKind,
|
hygiene::CompilerDesugaringKind,
|
||||||
};
|
};
|
||||||
use log::{debug, trace};
|
use log::{debug, trace};
|
||||||
|
@ -233,6 +233,8 @@ pub struct Parser<'a> {
|
||||||
/// error.
|
/// error.
|
||||||
crate unclosed_delims: Vec<UnmatchedBrace>,
|
crate unclosed_delims: Vec<UnmatchedBrace>,
|
||||||
last_unexpected_token_span: Option<Span>,
|
last_unexpected_token_span: Option<Span>,
|
||||||
|
/// If `true`, this `Parser` is not parsing Rust code but rather a macro call.
|
||||||
|
is_subparser: Option<&'static str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Drop for Parser<'a> {
|
impl<'a> Drop for Parser<'a> {
|
||||||
|
@ -309,7 +311,7 @@ impl TokenCursor {
|
||||||
self.frame = frame;
|
self.frame = frame;
|
||||||
continue
|
continue
|
||||||
} else {
|
} else {
|
||||||
return TokenAndSpan { tok: token::Eof, sp: syntax_pos::DUMMY_SP }
|
return TokenAndSpan { tok: token::Eof, sp: DUMMY_SP }
|
||||||
};
|
};
|
||||||
|
|
||||||
match self.frame.last_token {
|
match self.frame.last_token {
|
||||||
|
@ -533,17 +535,19 @@ enum TokenExpectType {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
pub fn new(sess: &'a ParseSess,
|
pub fn new(
|
||||||
tokens: TokenStream,
|
sess: &'a ParseSess,
|
||||||
directory: Option<Directory<'a>>,
|
tokens: TokenStream,
|
||||||
recurse_into_file_modules: bool,
|
directory: Option<Directory<'a>>,
|
||||||
desugar_doc_comments: bool)
|
recurse_into_file_modules: bool,
|
||||||
-> Self {
|
desugar_doc_comments: bool,
|
||||||
|
is_subparser: Option<&'static str>,
|
||||||
|
) -> Self {
|
||||||
let mut parser = Parser {
|
let mut parser = Parser {
|
||||||
sess,
|
sess,
|
||||||
token: token::Whitespace,
|
token: token::Whitespace,
|
||||||
span: syntax_pos::DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
prev_span: syntax_pos::DUMMY_SP,
|
prev_span: DUMMY_SP,
|
||||||
meta_var_span: None,
|
meta_var_span: None,
|
||||||
prev_token_kind: PrevTokenKind::Other,
|
prev_token_kind: PrevTokenKind::Other,
|
||||||
restrictions: Restrictions::empty(),
|
restrictions: Restrictions::empty(),
|
||||||
|
@ -568,6 +572,7 @@ impl<'a> Parser<'a> {
|
||||||
max_angle_bracket_count: 0,
|
max_angle_bracket_count: 0,
|
||||||
unclosed_delims: Vec::new(),
|
unclosed_delims: Vec::new(),
|
||||||
last_unexpected_token_span: None,
|
last_unexpected_token_span: None,
|
||||||
|
is_subparser,
|
||||||
};
|
};
|
||||||
|
|
||||||
let tok = parser.next_tok();
|
let tok = parser.next_tok();
|
||||||
|
@ -639,16 +644,28 @@ impl<'a> Parser<'a> {
|
||||||
} else {
|
} else {
|
||||||
let token_str = pprust::token_to_string(t);
|
let token_str = pprust::token_to_string(t);
|
||||||
let this_token_str = self.this_token_descr();
|
let this_token_str = self.this_token_descr();
|
||||||
let mut err = self.fatal(&format!("expected `{}`, found {}",
|
let (prev_sp, sp) = match (&self.token, self.is_subparser) {
|
||||||
token_str,
|
// Point at the end of the macro call when reaching end of macro arguments.
|
||||||
this_token_str));
|
(token::Token::Eof, Some(_)) => {
|
||||||
|
let sp = self.sess.source_map().next_point(self.span);
|
||||||
let sp = if self.token == token::Token::Eof {
|
(sp, sp)
|
||||||
// EOF, don't want to point at the following char, but rather the last token
|
}
|
||||||
self.prev_span
|
// We don't want to point at the following span after DUMMY_SP.
|
||||||
} else {
|
// This happens when the parser finds an empty TokenStream.
|
||||||
self.sess.source_map().next_point(self.prev_span)
|
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
||||||
|
// EOF, don't want to point at the following char, but rather the last token.
|
||||||
|
(token::Token::Eof, None) => (self.prev_span, self.span),
|
||||||
|
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
||||||
};
|
};
|
||||||
|
let msg = format!(
|
||||||
|
"expected `{}`, found {}",
|
||||||
|
token_str,
|
||||||
|
match (&self.token, self.is_subparser) {
|
||||||
|
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
||||||
|
_ => this_token_str,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let mut err = self.struct_span_err(sp, &msg);
|
||||||
let label_exp = format!("expected `{}`", token_str);
|
let label_exp = format!("expected `{}`", token_str);
|
||||||
match self.recover_closing_delimiter(&[t.clone()], err) {
|
match self.recover_closing_delimiter(&[t.clone()], err) {
|
||||||
Err(e) => err = e,
|
Err(e) => err = e,
|
||||||
|
@ -657,15 +674,15 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let cm = self.sess.source_map();
|
let cm = self.sess.source_map();
|
||||||
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
|
match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
|
||||||
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
||||||
// When the spans are in the same line, it means that the only content
|
// When the spans are in the same line, it means that the only content
|
||||||
// between them is whitespace, point only at the found token.
|
// between them is whitespace, point only at the found token.
|
||||||
err.span_label(self.span, label_exp);
|
err.span_label(sp, label_exp);
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
err.span_label(sp, label_exp);
|
err.span_label(prev_sp, label_exp);
|
||||||
err.span_label(self.span, "unexpected token");
|
err.span_label(sp, "unexpected token");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Err(err)
|
Err(err)
|
||||||
|
@ -812,7 +829,7 @@ impl<'a> Parser<'a> {
|
||||||
// | expected one of 8 possible tokens here
|
// | expected one of 8 possible tokens here
|
||||||
err.span_label(self.span, label_exp);
|
err.span_label(self.span, label_exp);
|
||||||
}
|
}
|
||||||
_ if self.prev_span == syntax_pos::DUMMY_SP => {
|
_ if self.prev_span == DUMMY_SP => {
|
||||||
// Account for macro context where the previous span might not be
|
// Account for macro context where the previous span might not be
|
||||||
// available to avoid incorrect output (#54841).
|
// available to avoid incorrect output (#54841).
|
||||||
err.span_label(self.span, "unexpected token");
|
err.span_label(self.span, "unexpected token");
|
||||||
|
@ -2041,7 +2058,7 @@ impl<'a> Parser<'a> {
|
||||||
path = self.parse_path(PathStyle::Type)?;
|
path = self.parse_path(PathStyle::Type)?;
|
||||||
path_span = path_lo.to(self.prev_span);
|
path_span = path_lo.to(self.prev_span);
|
||||||
} else {
|
} else {
|
||||||
path = ast::Path { segments: Vec::new(), span: syntax_pos::DUMMY_SP };
|
path = ast::Path { segments: Vec::new(), span: DUMMY_SP };
|
||||||
path_span = self.span.to(self.span);
|
path_span = self.span.to(self.span);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2627,16 +2644,24 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
self.cancel(&mut err);
|
self.cancel(&mut err);
|
||||||
let msg = format!("expected expression, found {}",
|
let (span, msg) = match (&self.token, self.is_subparser) {
|
||||||
self.this_token_descr());
|
(&token::Token::Eof, Some(origin)) => {
|
||||||
let mut err = self.fatal(&msg);
|
let sp = self.sess.source_map().next_point(self.span);
|
||||||
|
(sp, format!( "expected expression, found end of {}", origin))
|
||||||
|
}
|
||||||
|
_ => (self.span, format!(
|
||||||
|
"expected expression, found {}",
|
||||||
|
self.this_token_descr(),
|
||||||
|
)),
|
||||||
|
};
|
||||||
|
let mut err = self.struct_span_err(span, &msg);
|
||||||
let sp = self.sess.source_map().start_point(self.span);
|
let sp = self.sess.source_map().start_point(self.span);
|
||||||
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow()
|
if let Some(sp) = self.sess.ambiguous_block_expr_parse.borrow()
|
||||||
.get(&sp)
|
.get(&sp)
|
||||||
{
|
{
|
||||||
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
self.sess.expr_parentheses_needed(&mut err, *sp, None);
|
||||||
}
|
}
|
||||||
err.span_label(self.span, "expected expression");
|
err.span_label(span, "expected expression");
|
||||||
return Err(err);
|
return Err(err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -5592,7 +5617,7 @@ impl<'a> Parser<'a> {
|
||||||
where_clause: WhereClause {
|
where_clause: WhereClause {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
predicates: Vec::new(),
|
predicates: Vec::new(),
|
||||||
span: syntax_pos::DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
},
|
},
|
||||||
span: span_lo.to(self.prev_span),
|
span: span_lo.to(self.prev_span),
|
||||||
})
|
})
|
||||||
|
@ -5838,7 +5863,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut where_clause = WhereClause {
|
let mut where_clause = WhereClause {
|
||||||
id: ast::DUMMY_NODE_ID,
|
id: ast::DUMMY_NODE_ID,
|
||||||
predicates: Vec::new(),
|
predicates: Vec::new(),
|
||||||
span: syntax_pos::DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
};
|
};
|
||||||
|
|
||||||
if !self.eat_keyword(kw::Where) {
|
if !self.eat_keyword(kw::Where) {
|
||||||
|
@ -7005,7 +7030,7 @@ impl<'a> Parser<'a> {
|
||||||
Ident::with_empty_ctxt(sym::warn_directory_ownership)),
|
Ident::with_empty_ctxt(sym::warn_directory_ownership)),
|
||||||
tokens: TokenStream::empty(),
|
tokens: TokenStream::empty(),
|
||||||
is_sugared_doc: false,
|
is_sugared_doc: false,
|
||||||
span: syntax_pos::DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
};
|
};
|
||||||
attr::mark_known(&attr);
|
attr::mark_known(&attr);
|
||||||
attrs.push(attr);
|
attrs.push(attr);
|
||||||
|
@ -7013,7 +7038,7 @@ impl<'a> Parser<'a> {
|
||||||
Ok((id, ItemKind::Mod(module), Some(attrs)))
|
Ok((id, ItemKind::Mod(module), Some(attrs)))
|
||||||
} else {
|
} else {
|
||||||
let placeholder = ast::Mod {
|
let placeholder = ast::Mod {
|
||||||
inner: syntax_pos::DUMMY_SP,
|
inner: DUMMY_SP,
|
||||||
items: Vec::new(),
|
items: Vec::new(),
|
||||||
inline: false
|
inline: false
|
||||||
};
|
};
|
||||||
|
|
|
@ -138,7 +138,11 @@ fn parse_inline_asm<'a>(
|
||||||
if p2.token != token::Eof {
|
if p2.token != token::Eof {
|
||||||
let mut extra_tts = p2.parse_all_token_trees()?;
|
let mut extra_tts = p2.parse_all_token_trees()?;
|
||||||
extra_tts.extend(tts[first_colon..].iter().cloned());
|
extra_tts.extend(tts[first_colon..].iter().cloned());
|
||||||
p = parse::stream_to_parser(cx.parse_sess, extra_tts.into_iter().collect());
|
p = parse::stream_to_parser(
|
||||||
|
cx.parse_sess,
|
||||||
|
extra_tts.into_iter().collect(),
|
||||||
|
Some("inline assembly"),
|
||||||
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
asm = s;
|
asm = s;
|
||||||
|
|
|
@ -89,7 +89,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
||||||
let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
|
let error_count_before = ecx.parse_sess.span_diagnostic.err_count();
|
||||||
let msg = "proc-macro derive produced unparseable tokens";
|
let msg = "proc-macro derive produced unparseable tokens";
|
||||||
|
|
||||||
let mut parser = parse::stream_to_parser(ecx.parse_sess, stream);
|
let mut parser = parse::stream_to_parser(ecx.parse_sess, stream, Some("proc-macro derive"));
|
||||||
let mut items = vec![];
|
let mut items = vec![];
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
|
|
|
@ -12,17 +12,17 @@ error: expected expression, found keyword `struct`
|
||||||
LL | format!(struct);
|
LL | format!(struct);
|
||||||
| ^^^^^^ expected expression
|
| ^^^^^^ expected expression
|
||||||
|
|
||||||
error: expected expression, found `<eof>`
|
error: expected expression, found end of macro arguments
|
||||||
--> $DIR/format-parse-errors.rs:4:23
|
--> $DIR/format-parse-errors.rs:4:24
|
||||||
|
|
|
|
||||||
LL | format!("s", name =);
|
LL | format!("s", name =);
|
||||||
| ^ expected expression
|
| ^ expected expression
|
||||||
|
|
||||||
error: expected `=`, found `<eof>`
|
error: expected `=`, found end of macro arguments
|
||||||
--> $DIR/format-parse-errors.rs:5:29
|
--> $DIR/format-parse-errors.rs:5:32
|
||||||
|
|
|
|
||||||
LL | format!("s", foo = foo, bar);
|
LL | format!("s", foo = foo, bar);
|
||||||
| ^^^ expected `=`
|
| ^ expected `=`
|
||||||
|
|
||||||
error: expected expression, found keyword `struct`
|
error: expected expression, found keyword `struct`
|
||||||
--> $DIR/format-parse-errors.rs:6:24
|
--> $DIR/format-parse-errors.rs:6:24
|
||||||
|
|
|
@ -1,17 +1,13 @@
|
||||||
#[derive(Copy(Bad))]
|
#[derive(Copy(Bad))] //~ ERROR expected one of `)`, `,`, or `::`, found `(`
|
||||||
//~^ ERROR expected one of `)`, `,`, or `::`, found `(`
|
|
||||||
struct Test1;
|
struct Test1;
|
||||||
|
|
||||||
#[derive(Copy="bad")]
|
#[derive(Copy="bad")] //~ ERROR expected one of `)`, `,`, or `::`, found `=`
|
||||||
//~^ ERROR expected one of `)`, `,`, or `::`, found `=`
|
|
||||||
struct Test2;
|
struct Test2;
|
||||||
|
|
||||||
#[derive()]
|
#[derive()] //~ WARNING empty trait list
|
||||||
//~^ WARNING empty trait list
|
|
||||||
struct Test3;
|
struct Test3;
|
||||||
|
|
||||||
#[derive]
|
#[derive] //~ ERROR bad `derive` attribute
|
||||||
//~^ ERROR attribute must be of the form
|
|
||||||
struct Test4;
|
struct Test4;
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -5,22 +5,24 @@ LL | #[derive(Copy(Bad))]
|
||||||
| ^ expected one of `)`, `,`, or `::` here
|
| ^ expected one of `)`, `,`, or `::` here
|
||||||
|
|
||||||
error: expected one of `)`, `,`, or `::`, found `=`
|
error: expected one of `)`, `,`, or `::`, found `=`
|
||||||
--> $DIR/malformed-derive-entry.rs:5:14
|
--> $DIR/malformed-derive-entry.rs:4:14
|
||||||
|
|
|
|
||||||
LL | #[derive(Copy="bad")]
|
LL | #[derive(Copy="bad")]
|
||||||
| ^ expected one of `)`, `,`, or `::` here
|
| ^ expected one of `)`, `,`, or `::` here
|
||||||
|
|
||||||
warning: empty trait list in `derive`
|
warning: empty trait list in `derive`
|
||||||
--> $DIR/malformed-derive-entry.rs:9:1
|
--> $DIR/malformed-derive-entry.rs:7:1
|
||||||
|
|
|
|
||||||
LL | #[derive()]
|
LL | #[derive()]
|
||||||
| ^^^^^^^^^^^
|
| ^^^^^^^^^^^
|
||||||
|
|
||||||
error: attribute must be of the form `#[derive(Trait1, Trait2, ...)]`
|
error: bad `derive` attribute
|
||||||
--> $DIR/malformed-derive-entry.rs:13:1
|
--> $DIR/malformed-derive-entry.rs:10:1
|
||||||
|
|
|
|
||||||
LL | #[derive]
|
LL | #[derive]
|
||||||
| ^^^^^^^^^
|
| ^^^^^^^^^ missing traits to be derived
|
||||||
|
|
|
||||||
|
= note: `derive` must be of the form: `#[derive(Trait1, Trait2, ...)]`
|
||||||
|
|
||||||
error: aborting due to 3 previous errors
|
error: aborting due to 3 previous errors
|
||||||
|
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
#[cfg_attr] //~ ERROR expected `(`, found `<eof>`
|
#[cfg_attr] //~ ERROR bad `cfg_attr` attribute
|
||||||
struct S1;
|
struct S1;
|
||||||
|
|
||||||
#[cfg_attr = ""] //~ ERROR expected `(`, found `=`
|
#[cfg_attr = ""] //~ ERROR expected `(`, found `=`
|
||||||
struct S2;
|
struct S2;
|
||||||
|
|
||||||
#[derive] //~ ERROR attribute must be of the form
|
#[derive] //~ ERROR bad `derive` attribute
|
||||||
struct S3;
|
struct S3;
|
||||||
|
|
||||||
#[derive = ""] //~ ERROR attribute must be of the form
|
#[derive = ""] //~ ERROR bad `derive` attribute
|
||||||
struct S4;
|
struct S4;
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -1,25 +1,33 @@
|
||||||
error: expected `(`, found `<eof>`
|
error: bad `cfg_attr` attribute
|
||||||
|
--> $DIR/malformed-special-attrs.rs:1:1
|
||||||
|
|
|
||||||
|
LL | #[cfg_attr]
|
||||||
|
| ^^^^^^^^^^^ missing condition and attribute
|
||||||
|
|
|
||||||
|
= note: `cfg_attr` must be of the form: `#[cfg_attr(condition, attribute)]`
|
||||||
|
= note: for more information, visit <https://doc.rust-lang.org/reference/conditional-compilation.html#the-cfg_attr-attribute>
|
||||||
|
|
||||||
error: expected `(`, found `=`
|
error: expected `(`, found `=`
|
||||||
--> $DIR/malformed-special-attrs.rs:4:12
|
--> $DIR/malformed-special-attrs.rs:4:12
|
||||||
|
|
|
|
||||||
LL | #[cfg_attr]
|
|
||||||
| - expected `(`
|
|
||||||
...
|
|
||||||
LL | #[cfg_attr = ""]
|
LL | #[cfg_attr = ""]
|
||||||
| ^ unexpected token
|
| ^ expected `(`
|
||||||
|
|
||||||
error: attribute must be of the form `#[derive(Trait1, Trait2, ...)]`
|
error: bad `derive` attribute
|
||||||
--> $DIR/malformed-special-attrs.rs:7:1
|
--> $DIR/malformed-special-attrs.rs:7:1
|
||||||
|
|
|
|
||||||
LL | #[derive]
|
LL | #[derive]
|
||||||
| ^^^^^^^^^
|
| ^^^^^^^^^ missing traits to be derived
|
||||||
|
|
|
||||||
|
= note: `derive` must be of the form: `#[derive(Trait1, Trait2, ...)]`
|
||||||
|
|
||||||
error: attribute must be of the form `#[derive(Trait1, Trait2, ...)]`
|
error: bad `derive` attribute
|
||||||
--> $DIR/malformed-special-attrs.rs:10:1
|
--> $DIR/malformed-special-attrs.rs:10:1
|
||||||
|
|
|
|
||||||
LL | #[derive = ""]
|
LL | #[derive = ""]
|
||||||
| ^^^^^^^^^^^^^^
|
| ^^^^^^^^^^^^^^ missing traits to be derived
|
||||||
|
|
|
||||||
|
= note: `derive` must be of the form: `#[derive(Trait1, Trait2, ...)]`
|
||||||
|
|
||||||
error: aborting due to 4 previous errors
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
|
|
4
src/test/ui/parser/bad-macro-argument.rs
Normal file
4
src/test/ui/parser/bad-macro-argument.rs
Normal file
|
@ -0,0 +1,4 @@
|
||||||
|
fn main() {
|
||||||
|
let message = "world";
|
||||||
|
println!("Hello, {}", message/); //~ ERROR expected expression
|
||||||
|
}
|
8
src/test/ui/parser/bad-macro-argument.stderr
Normal file
8
src/test/ui/parser/bad-macro-argument.stderr
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
error: expected expression, found end of macro arguments
|
||||||
|
--> $DIR/bad-macro-argument.rs:3:35
|
||||||
|
|
|
||||||
|
LL | println!("Hello, {}", message/);
|
||||||
|
| ^ expected expression
|
||||||
|
|
||||||
|
error: aborting due to previous error
|
||||||
|
|
|
@ -9,7 +9,7 @@ use attr_stmt_expr::{duplicate, no_output};
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let _ = #[no_output] "Hello, world!";
|
let _ = #[no_output] "Hello, world!";
|
||||||
//~^ ERROR expected expression, found `<eof>`
|
//~^ ERROR expected expression, found end of macro arguments
|
||||||
|
|
||||||
let _ = #[duplicate] "Hello, world!";
|
let _ = #[duplicate] "Hello, world!";
|
||||||
//~^ ERROR macro expansion ignores token `,` and any following
|
//~^ ERROR macro expansion ignores token `,` and any following
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
error: expected expression, found `<eof>`
|
error: expected expression, found end of macro arguments
|
||||||
--> $DIR/attr-invalid-exprs.rs:11:13
|
--> $DIR/attr-invalid-exprs.rs:11:13
|
||||||
|
|
|
|
||||||
LL | let _ = #[no_output] "Hello, world!";
|
LL | let _ = #[no_output] "Hello, world!";
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue