review comments
This commit is contained in:
parent
24160171e4
commit
5c5fa775e5
11 changed files with 73 additions and 66 deletions
|
@ -627,7 +627,7 @@ mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
|
||||||
fn same(fmt: &'static str, p: &[Piece<'static>]) {
|
fn same(fmt: &'static str, p: &[Piece<'static>]) {
|
||||||
let parser = Parser::new(fmt, None, vec![], false, None);
|
let parser = Parser::new(fmt, None, vec![], false);
|
||||||
assert!(parser.collect::<Vec<Piece<'static>>>() == p);
|
assert!(parser.collect::<Vec<Piece<'static>>>() == p);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -643,7 +643,7 @@ mod tests {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn musterr(s: &str) {
|
fn musterr(s: &str) {
|
||||||
let mut p = Parser::new(s, None, vec![], false, None);
|
let mut p = Parser::new(s, None, vec![], false);
|
||||||
p.next();
|
p.next();
|
||||||
assert!(!p.errors.is_empty());
|
assert!(!p.errors.is_empty());
|
||||||
}
|
}
|
||||||
|
|
|
@ -98,7 +98,7 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
self.sess.span_diagnostic.struct_span_err(attr.span, "bad `cfg_attr` attribute")
|
self.sess.span_diagnostic.struct_span_err(attr.span, "bad `cfg_attr` attribute")
|
||||||
.span_label(attr.span, "missing condition and attribute")
|
.span_label(attr.span, "missing condition and attribute")
|
||||||
.note("`cfg_attr` must be of the form: \
|
.note("`cfg_attr` must be of the form: \
|
||||||
`#[cfg_attr(condition, attribute)]`")
|
`#[cfg_attr(condition, attribute, other_attribute, ...)]`")
|
||||||
.note("for more information, visit \
|
.note("for more information, visit \
|
||||||
<https://doc.rust-lang.org/reference/conditional-compilation.html\
|
<https://doc.rust-lang.org/reference/conditional-compilation.html\
|
||||||
#the-cfg_attr-attribute>")
|
#the-cfg_attr-attribute>")
|
||||||
|
|
|
@ -11,7 +11,7 @@ use crate::parse::{self, parser, DirectoryOwnership};
|
||||||
use crate::parse::token;
|
use crate::parse::token;
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::symbol::{kw, sym, Ident, Symbol};
|
use crate::symbol::{kw, sym, Ident, Symbol};
|
||||||
use crate::ThinVec;
|
use crate::{ThinVec, MACRO_ARGUMENTS};
|
||||||
use crate::tokenstream::{self, TokenStream};
|
use crate::tokenstream::{self, TokenStream};
|
||||||
|
|
||||||
use errors::{DiagnosticBuilder, DiagnosticId};
|
use errors::{DiagnosticBuilder, DiagnosticId};
|
||||||
|
@ -850,11 +850,7 @@ impl<'a> ExtCtxt<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
|
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
|
||||||
parse::stream_to_parser(
|
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect(), MACRO_ARGUMENTS)
|
||||||
self.parse_sess,
|
|
||||||
tts.iter().cloned().collect(),
|
|
||||||
Some("macro arguments"),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||||
|
|
|
@ -664,7 +664,7 @@ pub fn parse(
|
||||||
directory,
|
directory,
|
||||||
recurse_into_modules,
|
recurse_into_modules,
|
||||||
true,
|
true,
|
||||||
Some("macro arguments"),
|
crate::MACRO_ARGUMENTS,
|
||||||
);
|
);
|
||||||
|
|
||||||
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
||||||
|
|
|
@ -31,6 +31,8 @@ pub use rustc_data_structures::thin_vec::ThinVec;
|
||||||
use ast::AttrId;
|
use ast::AttrId;
|
||||||
use syntax_pos::edition::Edition;
|
use syntax_pos::edition::Edition;
|
||||||
|
|
||||||
|
const MACRO_ARGUMENTS: Option<&'static str> = Some("macro arguments");
|
||||||
|
|
||||||
// A variant of 'try!' that panics on an Err. This is used as a crutch on the
|
// A variant of 'try!' that panics on an Err. This is used as a crutch on the
|
||||||
// way towards a non-panic!-prone parser. It should be used for fatal parsing
|
// way towards a non-panic!-prone parser. It should be used for fatal parsing
|
||||||
// errors; eventually we plan to convert all code using panictry to just use
|
// errors; eventually we plan to convert all code using panictry to just use
|
||||||
|
|
|
@ -13,7 +13,7 @@ use crate::symbol::kw;
|
||||||
use crate::ThinVec;
|
use crate::ThinVec;
|
||||||
use errors::{Applicability, DiagnosticBuilder};
|
use errors::{Applicability, DiagnosticBuilder};
|
||||||
use log::debug;
|
use log::debug;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::{Span, DUMMY_SP};
|
||||||
|
|
||||||
pub trait RecoverQPath: Sized + 'static {
|
pub trait RecoverQPath: Sized + 'static {
|
||||||
const PATH_STYLE: PathStyle = PathStyle::Expr;
|
const PATH_STYLE: PathStyle = PathStyle::Expr;
|
||||||
|
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
let mut path = ast::Path {
|
let mut path = ast::Path {
|
||||||
segments: Vec::new(),
|
segments: Vec::new(),
|
||||||
span: syntax_pos::DUMMY_SP,
|
span: DUMMY_SP,
|
||||||
};
|
};
|
||||||
self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
|
self.parse_path_segments(&mut path.segments, T::PATH_STYLE)?;
|
||||||
path.span = ty_span.to(self.prev_span);
|
path.span = ty_span.to(self.prev_span);
|
||||||
|
@ -267,6 +267,58 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Create a `DiagnosticBuilder` for an unexpected token `t` and try to recover if it is a
|
||||||
|
/// closing delimiter.
|
||||||
|
pub fn unexpected_try_recover(
|
||||||
|
&mut self,
|
||||||
|
t: &token::Token,
|
||||||
|
) -> PResult<'a, bool /* recovered */> {
|
||||||
|
let token_str = pprust::token_to_string(t);
|
||||||
|
let this_token_str = self.this_token_descr();
|
||||||
|
let (prev_sp, sp) = match (&self.token, self.subparser_name) {
|
||||||
|
// Point at the end of the macro call when reaching end of macro arguments.
|
||||||
|
(token::Token::Eof, Some(_)) => {
|
||||||
|
let sp = self.sess.source_map().next_point(self.span);
|
||||||
|
(sp, sp)
|
||||||
|
}
|
||||||
|
// We don't want to point at the following span after DUMMY_SP.
|
||||||
|
// This happens when the parser finds an empty TokenStream.
|
||||||
|
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
||||||
|
// EOF, don't want to point at the following char, but rather the last token.
|
||||||
|
(token::Token::Eof, None) => (self.prev_span, self.span),
|
||||||
|
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
||||||
|
};
|
||||||
|
let msg = format!(
|
||||||
|
"expected `{}`, found {}",
|
||||||
|
token_str,
|
||||||
|
match (&self.token, self.subparser_name) {
|
||||||
|
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
||||||
|
_ => this_token_str,
|
||||||
|
},
|
||||||
|
);
|
||||||
|
let mut err = self.struct_span_err(sp, &msg);
|
||||||
|
let label_exp = format!("expected `{}`", token_str);
|
||||||
|
match self.recover_closing_delimiter(&[t.clone()], err) {
|
||||||
|
Err(e) => err = e,
|
||||||
|
Ok(recovered) => {
|
||||||
|
return Ok(recovered);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let cm = self.sess.source_map();
|
||||||
|
match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
|
||||||
|
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
||||||
|
// When the spans are in the same line, it means that the only content
|
||||||
|
// between them is whitespace, point only at the found token.
|
||||||
|
err.span_label(sp, label_exp);
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
err.span_label(prev_sp, label_exp);
|
||||||
|
err.span_label(sp, "unexpected token");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Err(err)
|
||||||
|
}
|
||||||
|
|
||||||
/// Consume alternative await syntaxes like `await <expr>`, `await? <expr>`, `await(<expr>)`
|
/// Consume alternative await syntaxes like `await <expr>`, `await? <expr>`, `await(<expr>)`
|
||||||
/// and `await { <expr> }`.
|
/// and `await { <expr> }`.
|
||||||
crate fn parse_incorrect_await_syntax(
|
crate fn parse_incorrect_await_syntax(
|
||||||
|
|
|
@ -248,7 +248,7 @@ fn maybe_source_file_to_parser(
|
||||||
// must preserve old name for now, because quote! from the *existing*
|
// must preserve old name for now, because quote! from the *existing*
|
||||||
// compiler expands into it
|
// compiler expands into it
|
||||||
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
|
pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec<TokenTree>) -> Parser<'_> {
|
||||||
stream_to_parser(sess, tts.into_iter().collect(), Some("macro arguments"))
|
stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -331,9 +331,9 @@ pub fn maybe_file_to_stream(
|
||||||
pub fn stream_to_parser<'a>(
|
pub fn stream_to_parser<'a>(
|
||||||
sess: &'a ParseSess,
|
sess: &'a ParseSess,
|
||||||
stream: TokenStream,
|
stream: TokenStream,
|
||||||
is_subparser: Option<&'static str>,
|
subparser_name: Option<&'static str>,
|
||||||
) -> Parser<'a> {
|
) -> Parser<'a> {
|
||||||
Parser::new(sess, stream, None, true, false, is_subparser)
|
Parser::new(sess, stream, None, true, false, subparser_name)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given stream, the `ParseSess` and the base directory, produces a parser.
|
/// Given stream, the `ParseSess` and the base directory, produces a parser.
|
||||||
|
|
|
@ -233,8 +233,8 @@ pub struct Parser<'a> {
|
||||||
/// error.
|
/// error.
|
||||||
crate unclosed_delims: Vec<UnmatchedBrace>,
|
crate unclosed_delims: Vec<UnmatchedBrace>,
|
||||||
last_unexpected_token_span: Option<Span>,
|
last_unexpected_token_span: Option<Span>,
|
||||||
/// If `true`, this `Parser` is not parsing Rust code but rather a macro call.
|
/// If present, this `Parser` is not parsing Rust code but rather a macro call.
|
||||||
is_subparser: Option<&'static str>,
|
crate subparser_name: Option<&'static str>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Drop for Parser<'a> {
|
impl<'a> Drop for Parser<'a> {
|
||||||
|
@ -541,7 +541,7 @@ impl<'a> Parser<'a> {
|
||||||
directory: Option<Directory<'a>>,
|
directory: Option<Directory<'a>>,
|
||||||
recurse_into_file_modules: bool,
|
recurse_into_file_modules: bool,
|
||||||
desugar_doc_comments: bool,
|
desugar_doc_comments: bool,
|
||||||
is_subparser: Option<&'static str>,
|
subparser_name: Option<&'static str>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
let mut parser = Parser {
|
let mut parser = Parser {
|
||||||
sess,
|
sess,
|
||||||
|
@ -572,7 +572,7 @@ impl<'a> Parser<'a> {
|
||||||
max_angle_bracket_count: 0,
|
max_angle_bracket_count: 0,
|
||||||
unclosed_delims: Vec::new(),
|
unclosed_delims: Vec::new(),
|
||||||
last_unexpected_token_span: None,
|
last_unexpected_token_span: None,
|
||||||
is_subparser,
|
subparser_name,
|
||||||
};
|
};
|
||||||
|
|
||||||
let tok = parser.next_tok();
|
let tok = parser.next_tok();
|
||||||
|
@ -642,50 +642,7 @@ impl<'a> Parser<'a> {
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(false)
|
Ok(false)
|
||||||
} else {
|
} else {
|
||||||
let token_str = pprust::token_to_string(t);
|
self.unexpected_try_recover(t)
|
||||||
let this_token_str = self.this_token_descr();
|
|
||||||
let (prev_sp, sp) = match (&self.token, self.is_subparser) {
|
|
||||||
// Point at the end of the macro call when reaching end of macro arguments.
|
|
||||||
(token::Token::Eof, Some(_)) => {
|
|
||||||
let sp = self.sess.source_map().next_point(self.span);
|
|
||||||
(sp, sp)
|
|
||||||
}
|
|
||||||
// We don't want to point at the following span after DUMMY_SP.
|
|
||||||
// This happens when the parser finds an empty TokenStream.
|
|
||||||
_ if self.prev_span == DUMMY_SP => (self.span, self.span),
|
|
||||||
// EOF, don't want to point at the following char, but rather the last token.
|
|
||||||
(token::Token::Eof, None) => (self.prev_span, self.span),
|
|
||||||
_ => (self.sess.source_map().next_point(self.prev_span), self.span),
|
|
||||||
};
|
|
||||||
let msg = format!(
|
|
||||||
"expected `{}`, found {}",
|
|
||||||
token_str,
|
|
||||||
match (&self.token, self.is_subparser) {
|
|
||||||
(token::Token::Eof, Some(origin)) => format!("end of {}", origin),
|
|
||||||
_ => this_token_str,
|
|
||||||
},
|
|
||||||
);
|
|
||||||
let mut err = self.struct_span_err(sp, &msg);
|
|
||||||
let label_exp = format!("expected `{}`", token_str);
|
|
||||||
match self.recover_closing_delimiter(&[t.clone()], err) {
|
|
||||||
Err(e) => err = e,
|
|
||||||
Ok(recovered) => {
|
|
||||||
return Ok(recovered);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let cm = self.sess.source_map();
|
|
||||||
match (cm.lookup_line(prev_sp.lo()), cm.lookup_line(sp.lo())) {
|
|
||||||
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
|
||||||
// When the spans are in the same line, it means that the only content
|
|
||||||
// between them is whitespace, point only at the found token.
|
|
||||||
err.span_label(sp, label_exp);
|
|
||||||
}
|
|
||||||
_ => {
|
|
||||||
err.span_label(prev_sp, label_exp);
|
|
||||||
err.span_label(sp, "unexpected token");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Err(err)
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
self.expect_one_of(slice::from_ref(t), &[])
|
self.expect_one_of(slice::from_ref(t), &[])
|
||||||
|
@ -2644,7 +2601,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
self.cancel(&mut err);
|
self.cancel(&mut err);
|
||||||
let (span, msg) = match (&self.token, self.is_subparser) {
|
let (span, msg) = match (&self.token, self.subparser_name) {
|
||||||
(&token::Token::Eof, Some(origin)) => {
|
(&token::Token::Eof, Some(origin)) => {
|
||||||
let sp = self.sess.source_map().next_point(self.span);
|
let sp = self.sess.source_map().next_point(self.span);
|
||||||
(sp, format!( "expected expression, found end of {}", origin))
|
(sp, format!( "expected expression, found end of {}", origin))
|
||||||
|
|
|
@ -4,7 +4,7 @@ error: bad `cfg_attr` attribute
|
||||||
LL | #[cfg_attr]
|
LL | #[cfg_attr]
|
||||||
| ^^^^^^^^^^^ missing condition and attribute
|
| ^^^^^^^^^^^ missing condition and attribute
|
||||||
|
|
|
|
||||||
= note: `cfg_attr` must be of the form: `#[cfg_attr(condition, attribute)]`
|
= note: `cfg_attr` must be of the form: `#[cfg_attr(condition, attribute, other_attribute, ...)]`
|
||||||
= note: for more information, visit <https://doc.rust-lang.org/reference/conditional-compilation.html#the-cfg_attr-attribute>
|
= note: for more information, visit <https://doc.rust-lang.org/reference/conditional-compilation.html#the-cfg_attr-attribute>
|
||||||
|
|
||||||
error: expected `(`, found `=`
|
error: expected `(`, found `=`
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue