Auto merge of #57944 - estebank:unclosed-delim-the-quickening, r=oli-obk
Deduplicate mismatched delimiter errors Delay unmatched delimiter errors until after the parser has run to deduplicate them when parsing and attempt recovering intelligently. Second attempt at #54029, follow up to #53949. Fix #31528.
This commit is contained in:
commit
3315728c06
19 changed files with 332 additions and 157 deletions
|
@ -672,8 +672,8 @@ impl EmitterWriter {
|
||||||
// | | something about `foo`
|
// | | something about `foo`
|
||||||
// | something about `fn foo()`
|
// | something about `fn foo()`
|
||||||
annotations_position.sort_by(|a, b| {
|
annotations_position.sort_by(|a, b| {
|
||||||
// Decreasing order
|
// Decreasing order. When `a` and `b` are the same length, prefer `Primary`.
|
||||||
a.1.len().cmp(&b.1.len()).reverse()
|
(a.1.len(), !a.1.is_primary).cmp(&(b.1.len(), !b.1.is_primary)).reverse()
|
||||||
});
|
});
|
||||||
|
|
||||||
// Write the underlines.
|
// Write the underlines.
|
||||||
|
|
|
@ -29,6 +29,7 @@ use syntax::attr;
|
||||||
use syntax::source_map;
|
use syntax::source_map;
|
||||||
use syntax::edition::Edition;
|
use syntax::edition::Edition;
|
||||||
use syntax::parse::source_file_to_stream;
|
use syntax::parse::source_file_to_stream;
|
||||||
|
use syntax::parse::parser::emit_unclosed_delims;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
use syntax_pos::{Span, NO_EXPANSION, FileName};
|
use syntax_pos::{Span, NO_EXPANSION, FileName};
|
||||||
use rustc_data_structures::bit_set::BitSet;
|
use rustc_data_structures::bit_set::BitSet;
|
||||||
|
@ -436,7 +437,8 @@ impl cstore::CStore {
|
||||||
|
|
||||||
let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
|
let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
|
||||||
let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
|
let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
|
||||||
let body = source_file_to_stream(&sess.parse_sess, source_file, None);
|
let (body, errors) = source_file_to_stream(&sess.parse_sess, source_file, None);
|
||||||
|
emit_unclosed_delims(&errors, &sess.diagnostic());
|
||||||
|
|
||||||
// Mark the attrs as used
|
// Mark the attrs as used
|
||||||
let attrs = data.get_item_attrs(id.index, sess);
|
let attrs = data.get_item_attrs(id.index, sess);
|
||||||
|
|
|
@ -33,6 +33,15 @@ impl Default for TokenAndSpan {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, Debug)]
|
||||||
|
pub struct UnmatchedBrace {
|
||||||
|
pub expected_delim: token::DelimToken,
|
||||||
|
pub found_delim: token::DelimToken,
|
||||||
|
pub found_span: Span,
|
||||||
|
pub unclosed_span: Option<Span>,
|
||||||
|
pub candidate_span: Option<Span>,
|
||||||
|
}
|
||||||
|
|
||||||
pub struct StringReader<'a> {
|
pub struct StringReader<'a> {
|
||||||
pub sess: &'a ParseSess,
|
pub sess: &'a ParseSess,
|
||||||
/// The absolute offset within the source_map of the next character to read
|
/// The absolute offset within the source_map of the next character to read
|
||||||
|
@ -58,6 +67,7 @@ pub struct StringReader<'a> {
|
||||||
span_src_raw: Span,
|
span_src_raw: Span,
|
||||||
/// Stack of open delimiters and their spans. Used for error message.
|
/// Stack of open delimiters and their spans. Used for error message.
|
||||||
open_braces: Vec<(token::DelimToken, Span)>,
|
open_braces: Vec<(token::DelimToken, Span)>,
|
||||||
|
crate unmatched_braces: Vec<UnmatchedBrace>,
|
||||||
/// The type and spans for all braces
|
/// The type and spans for all braces
|
||||||
///
|
///
|
||||||
/// Used only for error recovery when arriving to EOF with mismatched braces.
|
/// Used only for error recovery when arriving to EOF with mismatched braces.
|
||||||
|
@ -222,6 +232,7 @@ impl<'a> StringReader<'a> {
|
||||||
span: syntax_pos::DUMMY_SP,
|
span: syntax_pos::DUMMY_SP,
|
||||||
span_src_raw: syntax_pos::DUMMY_SP,
|
span_src_raw: syntax_pos::DUMMY_SP,
|
||||||
open_braces: Vec::new(),
|
open_braces: Vec::new(),
|
||||||
|
unmatched_braces: Vec::new(),
|
||||||
matching_delim_spans: Vec::new(),
|
matching_delim_spans: Vec::new(),
|
||||||
override_span,
|
override_span,
|
||||||
last_unclosed_found_span: None,
|
last_unclosed_found_span: None,
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
use crate::print::pprust::token_to_string;
|
use crate::print::pprust::token_to_string;
|
||||||
use crate::parse::lexer::StringReader;
|
use crate::parse::lexer::{StringReader, UnmatchedBrace};
|
||||||
use crate::parse::{token, PResult};
|
use crate::parse::{token, PResult};
|
||||||
use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
|
use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
|
||||||
|
|
||||||
|
@ -101,38 +101,38 @@ impl<'a> StringReader<'a> {
|
||||||
}
|
}
|
||||||
// Incorrect delimiter.
|
// Incorrect delimiter.
|
||||||
token::CloseDelim(other) => {
|
token::CloseDelim(other) => {
|
||||||
let token_str = token_to_string(&self.token);
|
let mut unclosed_delimiter = None;
|
||||||
|
let mut candidate = None;
|
||||||
if self.last_unclosed_found_span != Some(self.span) {
|
if self.last_unclosed_found_span != Some(self.span) {
|
||||||
// do not complain about the same unclosed delimiter multiple times
|
// do not complain about the same unclosed delimiter multiple times
|
||||||
self.last_unclosed_found_span = Some(self.span);
|
self.last_unclosed_found_span = Some(self.span);
|
||||||
let msg = format!("incorrect close delimiter: `{}`", token_str);
|
|
||||||
let mut err = self.sess.span_diagnostic.struct_span_err(
|
|
||||||
self.span,
|
|
||||||
&msg,
|
|
||||||
);
|
|
||||||
err.span_label(self.span, "incorrect close delimiter");
|
|
||||||
// This is a conservative error: only report the last unclosed
|
// This is a conservative error: only report the last unclosed
|
||||||
// delimiter. The previous unclosed delimiters could actually be
|
// delimiter. The previous unclosed delimiters could actually be
|
||||||
// closed! The parser just hasn't gotten to them yet.
|
// closed! The parser just hasn't gotten to them yet.
|
||||||
if let Some(&(_, sp)) = self.open_braces.last() {
|
if let Some(&(_, sp)) = self.open_braces.last() {
|
||||||
err.span_label(sp, "un-closed delimiter");
|
unclosed_delimiter = Some(sp);
|
||||||
};
|
};
|
||||||
if let Some(current_padding) = sm.span_to_margin(self.span) {
|
if let Some(current_padding) = sm.span_to_margin(self.span) {
|
||||||
for (brace, brace_span) in &self.open_braces {
|
for (brace, brace_span) in &self.open_braces {
|
||||||
if let Some(padding) = sm.span_to_margin(*brace_span) {
|
if let Some(padding) = sm.span_to_margin(*brace_span) {
|
||||||
// high likelihood of these two corresponding
|
// high likelihood of these two corresponding
|
||||||
if current_padding == padding && brace == &other {
|
if current_padding == padding && brace == &other {
|
||||||
err.span_label(
|
candidate = Some(*brace_span);
|
||||||
*brace_span,
|
|
||||||
"close delimiter possibly meant for this",
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
err.emit();
|
let (tok, _) = self.open_braces.pop().unwrap();
|
||||||
|
self.unmatched_braces.push(UnmatchedBrace {
|
||||||
|
expected_delim: tok,
|
||||||
|
found_delim: other,
|
||||||
|
found_span: self.span,
|
||||||
|
unclosed_span: unclosed_delimiter,
|
||||||
|
candidate_span: candidate,
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
self.open_braces.pop();
|
||||||
}
|
}
|
||||||
self.open_braces.pop().unwrap();
|
|
||||||
|
|
||||||
// If the incorrect delimiter matches an earlier opening
|
// If the incorrect delimiter matches an earlier opening
|
||||||
// delimiter, then don't consume it (it can be used to
|
// delimiter, then don't consume it (it can be used to
|
||||||
|
|
|
@ -9,6 +9,7 @@ use crate::parse::parser::Parser;
|
||||||
use crate::symbol::Symbol;
|
use crate::symbol::Symbol;
|
||||||
use crate::tokenstream::{TokenStream, TokenTree};
|
use crate::tokenstream::{TokenStream, TokenTree};
|
||||||
use crate::diagnostics::plugin::ErrorMap;
|
use crate::diagnostics::plugin::ErrorMap;
|
||||||
|
use crate::print::pprust::token_to_string;
|
||||||
|
|
||||||
use rustc_data_structures::sync::{Lrc, Lock};
|
use rustc_data_structures::sync::{Lrc, Lock};
|
||||||
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
|
||||||
|
@ -136,15 +137,17 @@ pub fn parse_crate_attrs_from_source_str(name: FileName, source: String, sess: &
|
||||||
new_parser_from_source_str(sess, name, source).parse_inner_attributes()
|
new_parser_from_source_str(sess, name, source).parse_inner_attributes()
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_stream_from_source_str(name: FileName, source: String, sess: &ParseSess,
|
pub fn parse_stream_from_source_str(
|
||||||
override_span: Option<Span>)
|
name: FileName,
|
||||||
-> TokenStream {
|
source: String,
|
||||||
|
sess: &ParseSess,
|
||||||
|
override_span: Option<Span>,
|
||||||
|
) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
|
||||||
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
|
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Create a new parser from a source string
|
/// Create a new parser from a source string
|
||||||
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
|
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
|
||||||
-> Parser<'_> {
|
|
||||||
panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
|
panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -195,12 +198,14 @@ fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Pars
|
||||||
|
|
||||||
/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
|
/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the
|
||||||
/// initial token stream.
|
/// initial token stream.
|
||||||
fn maybe_source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>)
|
fn maybe_source_file_to_parser(
|
||||||
-> Result<Parser<'_>, Vec<Diagnostic>>
|
sess: &ParseSess,
|
||||||
{
|
source_file: Lrc<SourceFile>,
|
||||||
|
) -> Result<Parser<'_>, Vec<Diagnostic>> {
|
||||||
let end_pos = source_file.end_pos;
|
let end_pos = source_file.end_pos;
|
||||||
let mut parser = stream_to_parser(sess, maybe_file_to_stream(sess, source_file, None)?);
|
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
|
||||||
|
let mut parser = stream_to_parser(sess, stream);
|
||||||
|
parser.unclosed_delims = unclosed_delims;
|
||||||
if parser.token == token::Eof && parser.span.is_dummy() {
|
if parser.token == token::Eof && parser.span.is_dummy() {
|
||||||
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
|
parser.span = Span::new(end_pos, end_pos, parser.span.ctxt());
|
||||||
}
|
}
|
||||||
|
@ -247,25 +252,44 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a source_file, produce a sequence of token-trees
|
/// Given a source_file, produce a sequence of token-trees
|
||||||
pub fn source_file_to_stream(sess: &ParseSess,
|
pub fn source_file_to_stream(
|
||||||
|
sess: &ParseSess,
|
||||||
source_file: Lrc<SourceFile>,
|
source_file: Lrc<SourceFile>,
|
||||||
override_span: Option<Span>) -> TokenStream {
|
override_span: Option<Span>,
|
||||||
|
) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
|
||||||
panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
|
panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
|
/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
|
||||||
/// parsing the token tream.
|
/// parsing the token tream.
|
||||||
pub fn maybe_file_to_stream(sess: &ParseSess,
|
pub fn maybe_file_to_stream(
|
||||||
|
sess: &ParseSess,
|
||||||
source_file: Lrc<SourceFile>,
|
source_file: Lrc<SourceFile>,
|
||||||
override_span: Option<Span>) -> Result<TokenStream, Vec<Diagnostic>> {
|
override_span: Option<Span>,
|
||||||
|
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
|
||||||
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
|
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
|
||||||
srdr.real_token();
|
srdr.real_token();
|
||||||
|
|
||||||
match srdr.parse_all_token_trees() {
|
match srdr.parse_all_token_trees() {
|
||||||
Ok(stream) => Ok(stream),
|
Ok(stream) => Ok((stream, srdr.unmatched_braces)),
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
let mut buffer = Vec::with_capacity(1);
|
let mut buffer = Vec::with_capacity(1);
|
||||||
err.buffer(&mut buffer);
|
err.buffer(&mut buffer);
|
||||||
|
// Not using `emit_unclosed_delims` to use `db.buffer`
|
||||||
|
for unmatched in srdr.unmatched_braces {
|
||||||
|
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
|
||||||
|
"incorrect close delimiter: `{}`",
|
||||||
|
token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
||||||
|
));
|
||||||
|
db.span_label(unmatched.found_span, "incorrect close delimiter");
|
||||||
|
if let Some(sp) = unmatched.candidate_span {
|
||||||
|
db.span_label(sp, "close delimiter possibly meant for this");
|
||||||
|
}
|
||||||
|
if let Some(sp) = unmatched.unclosed_span {
|
||||||
|
db.span_label(sp, "un-closed delimiter");
|
||||||
|
}
|
||||||
|
db.buffer(&mut buffer);
|
||||||
|
}
|
||||||
Err(buffer)
|
Err(buffer)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -35,7 +35,7 @@ use crate::ext::base::DummyResult;
|
||||||
use crate::source_map::{self, SourceMap, Spanned, respan};
|
use crate::source_map::{self, SourceMap, Spanned, respan};
|
||||||
use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
|
use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
|
||||||
use crate::parse::{self, SeqSep, classify, token};
|
use crate::parse::{self, SeqSep, classify, token};
|
||||||
use crate::parse::lexer::TokenAndSpan;
|
use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
|
||||||
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
|
||||||
use crate::parse::token::DelimToken;
|
use crate::parse::token::DelimToken;
|
||||||
use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
|
use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
|
||||||
|
@ -251,6 +251,11 @@ pub struct Parser<'a> {
|
||||||
///
|
///
|
||||||
/// See the comments in the `parse_path_segment` function for more details.
|
/// See the comments in the `parse_path_segment` function for more details.
|
||||||
crate unmatched_angle_bracket_count: u32,
|
crate unmatched_angle_bracket_count: u32,
|
||||||
|
crate max_angle_bracket_count: u32,
|
||||||
|
/// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
|
||||||
|
/// it gets removed from here. Every entry left at the end gets emitted as an independent
|
||||||
|
/// error.
|
||||||
|
crate unclosed_delims: Vec<UnmatchedBrace>,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@ -575,6 +580,8 @@ impl<'a> Parser<'a> {
|
||||||
desugar_doc_comments,
|
desugar_doc_comments,
|
||||||
cfg_mods: true,
|
cfg_mods: true,
|
||||||
unmatched_angle_bracket_count: 0,
|
unmatched_angle_bracket_count: 0,
|
||||||
|
max_angle_bracket_count: 0,
|
||||||
|
unclosed_delims: Vec::new(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let tok = parser.next_tok();
|
let tok = parser.next_tok();
|
||||||
|
@ -644,11 +651,11 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Expect and consume the token t. Signal an error if
|
/// Expect and consume the token t. Signal an error if
|
||||||
/// the next token is not t.
|
/// the next token is not t.
|
||||||
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, ()> {
|
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
|
||||||
if self.expected_tokens.is_empty() {
|
if self.expected_tokens.is_empty() {
|
||||||
if self.token == *t {
|
if self.token == *t {
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(())
|
Ok(false)
|
||||||
} else {
|
} else {
|
||||||
let token_str = pprust::token_to_string(t);
|
let token_str = pprust::token_to_string(t);
|
||||||
let this_token_str = self.this_token_descr();
|
let this_token_str = self.this_token_descr();
|
||||||
|
@ -663,6 +670,12 @@ impl<'a> Parser<'a> {
|
||||||
self.sess.source_map().next_point(self.prev_span)
|
self.sess.source_map().next_point(self.prev_span)
|
||||||
};
|
};
|
||||||
let label_exp = format!("expected `{}`", token_str);
|
let label_exp = format!("expected `{}`", token_str);
|
||||||
|
match self.recover_closing_delimiter(&[t.clone()], err) {
|
||||||
|
Err(e) => err = e,
|
||||||
|
Ok(recovered) => {
|
||||||
|
return Ok(recovered);
|
||||||
|
}
|
||||||
|
}
|
||||||
let cm = self.sess.source_map();
|
let cm = self.sess.source_map();
|
||||||
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
|
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
|
||||||
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
(Ok(ref a), Ok(ref b)) if a.line == b.line => {
|
||||||
|
@ -682,12 +695,64 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn recover_closing_delimiter(
|
||||||
|
&mut self,
|
||||||
|
tokens: &[token::Token],
|
||||||
|
mut err: DiagnosticBuilder<'a>,
|
||||||
|
) -> PResult<'a, bool> {
|
||||||
|
let mut pos = None;
|
||||||
|
// we want to use the last closing delim that would apply
|
||||||
|
for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
|
||||||
|
if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
|
||||||
|
&& Some(self.span) > unmatched.unclosed_span
|
||||||
|
{
|
||||||
|
pos = Some(i);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
match pos {
|
||||||
|
Some(pos) => {
|
||||||
|
// Recover and assume that the detected unclosed delimiter was meant for
|
||||||
|
// this location. Emit the diagnostic and act as if the delimiter was
|
||||||
|
// present for the parser's sake.
|
||||||
|
|
||||||
|
// Don't attempt to recover from this unclosed delimiter more than once.
|
||||||
|
let unmatched = self.unclosed_delims.remove(pos);
|
||||||
|
let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
|
||||||
|
|
||||||
|
// We want to suggest the inclusion of the closing delimiter where it makes
|
||||||
|
// the most sense, which is immediately after the last token:
|
||||||
|
//
|
||||||
|
// {foo(bar {}}
|
||||||
|
// - ^
|
||||||
|
// | |
|
||||||
|
// | help: `)` may belong here (FIXME: #58270)
|
||||||
|
// |
|
||||||
|
// unclosed delimiter
|
||||||
|
if let Some(sp) = unmatched.unclosed_span {
|
||||||
|
err.span_label(sp, "unclosed delimiter");
|
||||||
|
}
|
||||||
|
err.span_suggestion_short(
|
||||||
|
self.sess.source_map().next_point(self.prev_span),
|
||||||
|
&format!("{} may belong here", delim.to_string()),
|
||||||
|
delim.to_string(),
|
||||||
|
Applicability::MaybeIncorrect,
|
||||||
|
);
|
||||||
|
err.emit();
|
||||||
|
self.expected_tokens.clear(); // reduce errors
|
||||||
|
Ok(true)
|
||||||
|
}
|
||||||
|
_ => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Expect next token to be edible or inedible token. If edible,
|
/// Expect next token to be edible or inedible token. If edible,
|
||||||
/// then consume it; if inedible, then return without consuming
|
/// then consume it; if inedible, then return without consuming
|
||||||
/// anything. Signal a fatal error if next token is unexpected.
|
/// anything. Signal a fatal error if next token is unexpected.
|
||||||
pub fn expect_one_of(&mut self,
|
pub fn expect_one_of(
|
||||||
|
&mut self,
|
||||||
edible: &[token::Token],
|
edible: &[token::Token],
|
||||||
inedible: &[token::Token]) -> PResult<'a, ()>{
|
inedible: &[token::Token],
|
||||||
|
) -> PResult<'a, bool /* recovered */> {
|
||||||
fn tokens_to_string(tokens: &[TokenType]) -> String {
|
fn tokens_to_string(tokens: &[TokenType]) -> String {
|
||||||
let mut i = tokens.iter();
|
let mut i = tokens.iter();
|
||||||
// This might be a sign we need a connect method on Iterator.
|
// This might be a sign we need a connect method on Iterator.
|
||||||
|
@ -707,10 +772,10 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
if edible.contains(&self.token) {
|
if edible.contains(&self.token) {
|
||||||
self.bump();
|
self.bump();
|
||||||
Ok(())
|
Ok(false)
|
||||||
} else if inedible.contains(&self.token) {
|
} else if inedible.contains(&self.token) {
|
||||||
// leave it in the input
|
// leave it in the input
|
||||||
Ok(())
|
Ok(false)
|
||||||
} else {
|
} else {
|
||||||
let mut expected = edible.iter()
|
let mut expected = edible.iter()
|
||||||
.map(|x| TokenType::Token(x.clone()))
|
.map(|x| TokenType::Token(x.clone()))
|
||||||
|
@ -761,6 +826,15 @@ impl<'a> Parser<'a> {
|
||||||
} else {
|
} else {
|
||||||
label_sp
|
label_sp
|
||||||
};
|
};
|
||||||
|
match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
|
||||||
|
TokenType::Token(t) => Some(t.clone()),
|
||||||
|
_ => None,
|
||||||
|
}).collect::<Vec<_>>(), err) {
|
||||||
|
Err(e) => err = e,
|
||||||
|
Ok(recovered) => {
|
||||||
|
return Ok(recovered);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let cm = self.sess.source_map();
|
let cm = self.sess.source_map();
|
||||||
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
|
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
|
||||||
|
@ -1070,6 +1144,7 @@ impl<'a> Parser<'a> {
|
||||||
if ate {
|
if ate {
|
||||||
// See doc comment for `unmatched_angle_bracket_count`.
|
// See doc comment for `unmatched_angle_bracket_count`.
|
||||||
self.unmatched_angle_bracket_count += 1;
|
self.unmatched_angle_bracket_count += 1;
|
||||||
|
self.max_angle_bracket_count += 1;
|
||||||
debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
|
debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1110,12 +1185,12 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
|
|
||||||
match ate {
|
match ate {
|
||||||
Some(x) => {
|
Some(_) => {
|
||||||
// See doc comment for `unmatched_angle_bracket_count`.
|
// See doc comment for `unmatched_angle_bracket_count`.
|
||||||
self.unmatched_angle_bracket_count -= 1;
|
self.unmatched_angle_bracket_count -= 1;
|
||||||
debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
|
debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
|
||||||
|
|
||||||
Ok(x)
|
Ok(())
|
||||||
},
|
},
|
||||||
None => self.unexpected(),
|
None => self.unexpected(),
|
||||||
}
|
}
|
||||||
|
@ -1144,19 +1219,22 @@ impl<'a> Parser<'a> {
|
||||||
-> PResult<'a, Vec<T>> where
|
-> PResult<'a, Vec<T>> where
|
||||||
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||||
{
|
{
|
||||||
let val = self.parse_seq_to_before_end(ket, sep, f)?;
|
let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
|
||||||
|
if !recovered {
|
||||||
self.bump();
|
self.bump();
|
||||||
|
}
|
||||||
Ok(val)
|
Ok(val)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a sequence, not including the closing delimiter. The function
|
/// Parse a sequence, not including the closing delimiter. The function
|
||||||
/// f must consume tokens until reaching the next separator or
|
/// f must consume tokens until reaching the next separator or
|
||||||
/// closing bracket.
|
/// closing bracket.
|
||||||
pub fn parse_seq_to_before_end<T, F>(&mut self,
|
pub fn parse_seq_to_before_end<T, F>(
|
||||||
|
&mut self,
|
||||||
ket: &token::Token,
|
ket: &token::Token,
|
||||||
sep: SeqSep,
|
sep: SeqSep,
|
||||||
f: F)
|
f: F,
|
||||||
-> PResult<'a, Vec<T>>
|
) -> PResult<'a, (Vec<T>, bool)>
|
||||||
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
|
||||||
{
|
{
|
||||||
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
|
self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
|
||||||
|
@ -1168,10 +1246,11 @@ impl<'a> Parser<'a> {
|
||||||
sep: SeqSep,
|
sep: SeqSep,
|
||||||
expect: TokenExpectType,
|
expect: TokenExpectType,
|
||||||
mut f: F,
|
mut f: F,
|
||||||
) -> PResult<'a, Vec<T>>
|
) -> PResult<'a, (Vec<T>, bool /* recovered */)>
|
||||||
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
|
where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
|
||||||
{
|
{
|
||||||
let mut first: bool = true;
|
let mut first = true;
|
||||||
|
let mut recovered = false;
|
||||||
let mut v = vec![];
|
let mut v = vec![];
|
||||||
while !kets.iter().any(|k| {
|
while !kets.iter().any(|k| {
|
||||||
match expect {
|
match expect {
|
||||||
|
@ -1187,7 +1266,13 @@ impl<'a> Parser<'a> {
|
||||||
if first {
|
if first {
|
||||||
first = false;
|
first = false;
|
||||||
} else {
|
} else {
|
||||||
if let Err(mut e) = self.expect(t) {
|
match self.expect(t) {
|
||||||
|
Ok(false) => {}
|
||||||
|
Ok(true) => {
|
||||||
|
recovered = true;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
Err(mut e) => {
|
||||||
// Attempt to keep parsing if it was a similar separator
|
// Attempt to keep parsing if it was a similar separator
|
||||||
if let Some(ref tokens) = t.similar_tokens() {
|
if let Some(ref tokens) = t.similar_tokens() {
|
||||||
if tokens.contains(&self.token) {
|
if tokens.contains(&self.token) {
|
||||||
|
@ -1209,6 +1294,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
if sep.trailing_sep_allowed && kets.iter().any(|k| {
|
if sep.trailing_sep_allowed && kets.iter().any(|k| {
|
||||||
match expect {
|
match expect {
|
||||||
TokenExpectType::Expect => self.check(k),
|
TokenExpectType::Expect => self.check(k),
|
||||||
|
@ -1222,23 +1308,26 @@ impl<'a> Parser<'a> {
|
||||||
v.push(t);
|
v.push(t);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(v)
|
Ok((v, recovered))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a sequence, including the closing delimiter. The function
|
/// Parse a sequence, including the closing delimiter. The function
|
||||||
/// f must consume tokens until reaching the next separator or
|
/// f must consume tokens until reaching the next separator or
|
||||||
/// closing bracket.
|
/// closing bracket.
|
||||||
fn parse_unspanned_seq<T, F>(&mut self,
|
fn parse_unspanned_seq<T, F>(
|
||||||
|
&mut self,
|
||||||
bra: &token::Token,
|
bra: &token::Token,
|
||||||
ket: &token::Token,
|
ket: &token::Token,
|
||||||
sep: SeqSep,
|
sep: SeqSep,
|
||||||
f: F)
|
f: F,
|
||||||
-> PResult<'a, Vec<T>> where
|
) -> PResult<'a, Vec<T>> where
|
||||||
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
|
||||||
{
|
{
|
||||||
self.expect(bra)?;
|
self.expect(bra)?;
|
||||||
let result = self.parse_seq_to_before_end(ket, sep, f)?;
|
let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
|
||||||
|
if !recovered {
|
||||||
self.eat(ket);
|
self.eat(ket);
|
||||||
|
}
|
||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2290,7 +2379,10 @@ impl<'a> Parser<'a> {
|
||||||
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
|
// We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
|
||||||
// it isn't, then we reset the unmatched angle bracket count as we're about to start
|
// it isn't, then we reset the unmatched angle bracket count as we're about to start
|
||||||
// parsing a new path.
|
// parsing a new path.
|
||||||
if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; }
|
if style == PathStyle::Expr {
|
||||||
|
self.unmatched_angle_bracket_count = 0;
|
||||||
|
self.max_angle_bracket_count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
let args = if self.eat_lt() {
|
let args = if self.eat_lt() {
|
||||||
// `<'a, T, A = U>`
|
// `<'a, T, A = U>`
|
||||||
|
@ -2302,12 +2394,14 @@ impl<'a> Parser<'a> {
|
||||||
} else {
|
} else {
|
||||||
// `(T, U) -> R`
|
// `(T, U) -> R`
|
||||||
self.bump(); // `(`
|
self.bump(); // `(`
|
||||||
let inputs = self.parse_seq_to_before_tokens(
|
let (inputs, recovered) = self.parse_seq_to_before_tokens(
|
||||||
&[&token::CloseDelim(token::Paren)],
|
&[&token::CloseDelim(token::Paren)],
|
||||||
SeqSep::trailing_allowed(token::Comma),
|
SeqSep::trailing_allowed(token::Comma),
|
||||||
TokenExpectType::Expect,
|
TokenExpectType::Expect,
|
||||||
|p| p.parse_ty())?;
|
|p| p.parse_ty())?;
|
||||||
|
if !recovered {
|
||||||
self.bump(); // `)`
|
self.bump(); // `)`
|
||||||
|
}
|
||||||
let span = lo.to(self.prev_span);
|
let span = lo.to(self.prev_span);
|
||||||
let output = if self.eat(&token::RArrow) {
|
let output = if self.eat(&token::RArrow) {
|
||||||
Some(self.parse_ty_common(false, false)?)
|
Some(self.parse_ty_common(false, false)?)
|
||||||
|
@ -2513,9 +2607,13 @@ impl<'a> Parser<'a> {
|
||||||
// (e,) is a tuple with only one field, e
|
// (e,) is a tuple with only one field, e
|
||||||
let mut es = vec![];
|
let mut es = vec![];
|
||||||
let mut trailing_comma = false;
|
let mut trailing_comma = false;
|
||||||
|
let mut recovered = false;
|
||||||
while self.token != token::CloseDelim(token::Paren) {
|
while self.token != token::CloseDelim(token::Paren) {
|
||||||
es.push(self.parse_expr()?);
|
es.push(self.parse_expr()?);
|
||||||
self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
|
recovered = self.expect_one_of(
|
||||||
|
&[],
|
||||||
|
&[token::Comma, token::CloseDelim(token::Paren)],
|
||||||
|
)?;
|
||||||
if self.eat(&token::Comma) {
|
if self.eat(&token::Comma) {
|
||||||
trailing_comma = true;
|
trailing_comma = true;
|
||||||
} else {
|
} else {
|
||||||
|
@ -2523,7 +2621,9 @@ impl<'a> Parser<'a> {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if !recovered {
|
||||||
self.bump();
|
self.bump();
|
||||||
|
}
|
||||||
|
|
||||||
hi = self.prev_span;
|
hi = self.prev_span;
|
||||||
ex = if es.len() == 1 && !trailing_comma {
|
ex = if es.len() == 1 && !trailing_comma {
|
||||||
|
@ -2720,6 +2820,21 @@ impl<'a> Parser<'a> {
|
||||||
hi = pth.span;
|
hi = pth.span;
|
||||||
ex = ExprKind::Path(None, pth);
|
ex = ExprKind::Path(None, pth);
|
||||||
} else {
|
} else {
|
||||||
|
if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
|
||||||
|
// Don't complain about bare semicolons after unclosed braces
|
||||||
|
// recovery in order to keep the error count down. Fixing the
|
||||||
|
// delimiters will possibly also fix the bare semicolon found in
|
||||||
|
// expression context. For example, silence the following error:
|
||||||
|
// ```
|
||||||
|
// error: expected expression, found `;`
|
||||||
|
// --> file.rs:2:13
|
||||||
|
// |
|
||||||
|
// 2 | foo(bar(;
|
||||||
|
// | ^ expected expression
|
||||||
|
// ```
|
||||||
|
self.bump();
|
||||||
|
return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
|
||||||
|
}
|
||||||
match self.parse_literal_maybe_minus() {
|
match self.parse_literal_maybe_minus() {
|
||||||
Ok(expr) => {
|
Ok(expr) => {
|
||||||
hi = expr.span;
|
hi = expr.span;
|
||||||
|
@ -2819,7 +2934,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
match self.expect_one_of(&[token::Comma],
|
match self.expect_one_of(&[token::Comma],
|
||||||
&[token::CloseDelim(token::Brace)]) {
|
&[token::CloseDelim(token::Brace)]) {
|
||||||
Ok(()) => if let Some(f) = parsed_field.or(recovery_field) {
|
Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
|
||||||
// only include the field if there's no parse error for the field name
|
// only include the field if there's no parse error for the field name
|
||||||
fields.push(f);
|
fields.push(f);
|
||||||
}
|
}
|
||||||
|
@ -5939,7 +6054,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
let sp = self.span;
|
let sp = self.span;
|
||||||
let mut variadic = false;
|
let mut variadic = false;
|
||||||
let args: Vec<Option<Arg>> =
|
let (args, recovered): (Vec<Option<Arg>>, bool) =
|
||||||
self.parse_seq_to_before_end(
|
self.parse_seq_to_before_end(
|
||||||
&token::CloseDelim(token::Paren),
|
&token::CloseDelim(token::Paren),
|
||||||
SeqSep::trailing_allowed(token::Comma),
|
SeqSep::trailing_allowed(token::Comma),
|
||||||
|
@ -5987,7 +6102,9 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
|
if !recovered {
|
||||||
self.eat(&token::CloseDelim(token::Paren));
|
self.eat(&token::CloseDelim(token::Paren));
|
||||||
|
}
|
||||||
|
|
||||||
let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
|
let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
|
||||||
|
|
||||||
|
@ -6132,15 +6249,15 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
// Parse the rest of the function parameter list.
|
// Parse the rest of the function parameter list.
|
||||||
let sep = SeqSep::trailing_allowed(token::Comma);
|
let sep = SeqSep::trailing_allowed(token::Comma);
|
||||||
let fn_inputs = if let Some(self_arg) = self_arg {
|
let (fn_inputs, recovered) = if let Some(self_arg) = self_arg {
|
||||||
if self.check(&token::CloseDelim(token::Paren)) {
|
if self.check(&token::CloseDelim(token::Paren)) {
|
||||||
vec![self_arg]
|
(vec![self_arg], false)
|
||||||
} else if self.eat(&token::Comma) {
|
} else if self.eat(&token::Comma) {
|
||||||
let mut fn_inputs = vec![self_arg];
|
let mut fn_inputs = vec![self_arg];
|
||||||
fn_inputs.append(&mut self.parse_seq_to_before_end(
|
let (mut input, recovered) = self.parse_seq_to_before_end(
|
||||||
&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
|
&token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
|
||||||
);
|
fn_inputs.append(&mut input);
|
||||||
fn_inputs
|
(fn_inputs, recovered)
|
||||||
} else {
|
} else {
|
||||||
return self.unexpected();
|
return self.unexpected();
|
||||||
}
|
}
|
||||||
|
@ -6148,8 +6265,10 @@ impl<'a> Parser<'a> {
|
||||||
self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
|
self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if !recovered {
|
||||||
// Parse closing paren and return type.
|
// Parse closing paren and return type.
|
||||||
self.expect(&token::CloseDelim(token::Paren))?;
|
self.expect(&token::CloseDelim(token::Paren))?;
|
||||||
|
}
|
||||||
Ok(P(FnDecl {
|
Ok(P(FnDecl {
|
||||||
inputs: fn_inputs,
|
inputs: fn_inputs,
|
||||||
output: self.parse_ret_ty(true)?,
|
output: self.parse_ret_ty(true)?,
|
||||||
|
@ -6169,7 +6288,7 @@ impl<'a> Parser<'a> {
|
||||||
SeqSep::trailing_allowed(token::Comma),
|
SeqSep::trailing_allowed(token::Comma),
|
||||||
TokenExpectType::NoExpect,
|
TokenExpectType::NoExpect,
|
||||||
|p| p.parse_fn_block_arg()
|
|p| p.parse_fn_block_arg()
|
||||||
)?;
|
)?.0;
|
||||||
self.expect_or()?;
|
self.expect_or()?;
|
||||||
args
|
args
|
||||||
}
|
}
|
||||||
|
@ -8168,7 +8287,7 @@ impl<'a> Parser<'a> {
|
||||||
// eat a matched-delimiter token tree:
|
// eat a matched-delimiter token tree:
|
||||||
let (delim, tts) = self.expect_delimited_token_tree()?;
|
let (delim, tts) = self.expect_delimited_token_tree()?;
|
||||||
if delim != MacDelimiter::Brace {
|
if delim != MacDelimiter::Brace {
|
||||||
self.expect(&token::Semi)?
|
self.expect(&token::Semi)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
|
Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
|
||||||
|
@ -8313,11 +8432,14 @@ impl<'a> Parser<'a> {
|
||||||
/// entry point for the parser.
|
/// entry point for the parser.
|
||||||
pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
|
pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
|
||||||
let lo = self.span;
|
let lo = self.span;
|
||||||
Ok(ast::Crate {
|
let krate = Ok(ast::Crate {
|
||||||
attrs: self.parse_inner_attributes()?,
|
attrs: self.parse_inner_attributes()?,
|
||||||
module: self.parse_mod_items(&token::Eof, lo)?,
|
module: self.parse_mod_items(&token::Eof, lo)?,
|
||||||
span: lo.to(self.span),
|
span: lo.to(self.span),
|
||||||
})
|
});
|
||||||
|
emit_unclosed_delims(&self.unclosed_delims, self.diagnostic());
|
||||||
|
self.unclosed_delims.clear();
|
||||||
|
krate
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
|
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
|
||||||
|
@ -8346,3 +8468,20 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn emit_unclosed_delims(unclosed_delims: &[UnmatchedBrace], handler: &errors::Handler) {
|
||||||
|
for unmatched in unclosed_delims {
|
||||||
|
let mut err = handler.struct_span_err(unmatched.found_span, &format!(
|
||||||
|
"incorrect close delimiter: `{}`",
|
||||||
|
pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
||||||
|
));
|
||||||
|
err.span_label(unmatched.found_span, "incorrect close delimiter");
|
||||||
|
if let Some(sp) = unmatched.candidate_span {
|
||||||
|
err.span_label(sp, "close delimiter possibly meant for this");
|
||||||
|
}
|
||||||
|
if let Some(sp) = unmatched.unclosed_span {
|
||||||
|
err.span_label(sp, "un-closed delimiter");
|
||||||
|
}
|
||||||
|
err.emit();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -10,6 +10,7 @@ use crate::print::pprust;
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::symbol::keywords;
|
use crate::symbol::keywords;
|
||||||
use crate::syntax::parse::parse_stream_from_source_str;
|
use crate::syntax::parse::parse_stream_from_source_str;
|
||||||
|
use crate::syntax::parse::parser::emit_unclosed_delims;
|
||||||
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
|
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
|
||||||
|
|
||||||
use serialize::{Decodable, Decoder, Encodable, Encoder};
|
use serialize::{Decodable, Decoder, Encodable, Encoder};
|
||||||
|
@ -501,8 +502,8 @@ impl Token {
|
||||||
/// Enables better error recovery when the wrong token is found.
|
/// Enables better error recovery when the wrong token is found.
|
||||||
crate fn similar_tokens(&self) -> Option<Vec<Token>> {
|
crate fn similar_tokens(&self) -> Option<Vec<Token>> {
|
||||||
match *self {
|
match *self {
|
||||||
Comma => Some(vec![Dot, Lt]),
|
Comma => Some(vec![Dot, Lt, Semi]),
|
||||||
Semi => Some(vec![Colon]),
|
Semi => Some(vec![Colon, Comma]),
|
||||||
_ => None
|
_ => None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -559,7 +560,10 @@ impl Token {
|
||||||
// FIXME(#43081): Avoid this pretty-print + reparse hack
|
// FIXME(#43081): Avoid this pretty-print + reparse hack
|
||||||
let source = pprust::token_to_string(self);
|
let source = pprust::token_to_string(self);
|
||||||
let filename = FileName::macro_expansion_source_code(&source);
|
let filename = FileName::macro_expansion_source_code(&source);
|
||||||
parse_stream_from_source_str(filename, source, sess, Some(span))
|
let (tokens, errors) = parse_stream_from_source_str(
|
||||||
|
filename, source, sess, Some(span));
|
||||||
|
emit_unclosed_delims(&errors, &sess.span_diagnostic);
|
||||||
|
tokens
|
||||||
});
|
});
|
||||||
|
|
||||||
// During early phases of the compiler the AST could get modified
|
// During early phases of the compiler the AST could get modified
|
||||||
|
@ -800,12 +804,13 @@ fn prepend_attrs(sess: &ParseSess,
|
||||||
let source = pprust::attr_to_string(attr);
|
let source = pprust::attr_to_string(attr);
|
||||||
let macro_filename = FileName::macro_expansion_source_code(&source);
|
let macro_filename = FileName::macro_expansion_source_code(&source);
|
||||||
if attr.is_sugared_doc {
|
if attr.is_sugared_doc {
|
||||||
let stream = parse_stream_from_source_str(
|
let (stream, errors) = parse_stream_from_source_str(
|
||||||
macro_filename,
|
macro_filename,
|
||||||
source,
|
source,
|
||||||
sess,
|
sess,
|
||||||
Some(span),
|
Some(span),
|
||||||
);
|
);
|
||||||
|
emit_unclosed_delims(&errors, &sess.span_diagnostic);
|
||||||
builder.push(stream);
|
builder.push(stream);
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
@ -822,12 +827,13 @@ fn prepend_attrs(sess: &ParseSess,
|
||||||
// ... and for more complicated paths, fall back to a reparse hack that
|
// ... and for more complicated paths, fall back to a reparse hack that
|
||||||
// should eventually be removed.
|
// should eventually be removed.
|
||||||
} else {
|
} else {
|
||||||
let stream = parse_stream_from_source_str(
|
let (stream, errors) = parse_stream_from_source_str(
|
||||||
macro_filename,
|
macro_filename,
|
||||||
source,
|
source,
|
||||||
sess,
|
sess,
|
||||||
Some(span),
|
Some(span),
|
||||||
);
|
);
|
||||||
|
emit_unclosed_delims(&errors, &sess.span_diagnostic);
|
||||||
brackets.push(stream);
|
brackets.push(stream);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -12,8 +12,11 @@ use std::path::PathBuf;
|
||||||
/// Map a string to tts, using a made-up filename:
|
/// Map a string to tts, using a made-up filename:
|
||||||
pub fn string_to_stream(source_str: String) -> TokenStream {
|
pub fn string_to_stream(source_str: String) -> TokenStream {
|
||||||
let ps = ParseSess::new(FilePathMapping::empty());
|
let ps = ParseSess::new(FilePathMapping::empty());
|
||||||
source_file_to_stream(&ps, ps.source_map()
|
source_file_to_stream(
|
||||||
.new_source_file(PathBuf::from("bogofile").into(), source_str), None)
|
&ps,
|
||||||
|
ps.source_map().new_source_file(PathBuf::from("bogofile").into(),
|
||||||
|
source_str,
|
||||||
|
), None).0
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Map string to parser (via tts)
|
/// Map string to parser (via tts)
|
||||||
|
|
|
@ -12,6 +12,7 @@ use syntax::ast;
|
||||||
use syntax::ext::base::ExtCtxt;
|
use syntax::ext::base::ExtCtxt;
|
||||||
use syntax::parse::lexer::comments;
|
use syntax::parse::lexer::comments;
|
||||||
use syntax::parse::{self, token, ParseSess};
|
use syntax::parse::{self, token, ParseSess};
|
||||||
|
use syntax::parse::parser::emit_unclosed_delims;
|
||||||
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||||
use syntax_pos::hygiene::{SyntaxContext, Transparency};
|
use syntax_pos::hygiene::{SyntaxContext, Transparency};
|
||||||
use syntax_pos::symbol::{keywords, Symbol};
|
use syntax_pos::symbol::{keywords, Symbol};
|
||||||
|
@ -409,12 +410,14 @@ impl server::TokenStream for Rustc<'_> {
|
||||||
stream.is_empty()
|
stream.is_empty()
|
||||||
}
|
}
|
||||||
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||||
parse::parse_stream_from_source_str(
|
let (tokens, errors) = parse::parse_stream_from_source_str(
|
||||||
FileName::proc_macro_source_code(src.clone()),
|
FileName::proc_macro_source_code(src.clone()),
|
||||||
src.to_string(),
|
src.to_string(),
|
||||||
self.sess,
|
self.sess,
|
||||||
Some(self.call_site),
|
Some(self.call_site),
|
||||||
)
|
);
|
||||||
|
emit_unclosed_delims(&errors, &self.sess.span_diagnostic);
|
||||||
|
tokens
|
||||||
}
|
}
|
||||||
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
|
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
|
||||||
stream.to_string()
|
stream.to_string()
|
||||||
|
|
|
@ -9,7 +9,7 @@ LL | x //~ error: use of moved value: `x`
|
||||||
LL | | //~^ value used here after move
|
LL | | //~^ value used here after move
|
||||||
LL | | +=
|
LL | | +=
|
||||||
LL | | x; //~ value moved here
|
LL | | x; //~ value moved here
|
||||||
| | -
|
| | ^
|
||||||
| | |
|
| | |
|
||||||
| |_____move out of `x` occurs here
|
| |_____move out of `x` occurs here
|
||||||
| borrow later used here
|
| borrow later used here
|
||||||
|
|
|
@ -90,7 +90,7 @@ LL | use issue_52891::a;
|
||||||
LL | m,
|
LL | m,
|
||||||
| ______-
|
| ______-
|
||||||
LL | | a}; //~ ERROR `a` is defined multiple times
|
LL | | a}; //~ ERROR `a` is defined multiple times
|
||||||
| | -
|
| | ^
|
||||||
| | |
|
| | |
|
||||||
| |_____`a` reimported here
|
| |_____`a` reimported here
|
||||||
| help: remove unnecessary import
|
| help: remove unnecessary import
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
error: unexpected token: `;`
|
||||||
|
--> $DIR/parser-recovery-2.rs:12:15
|
||||||
|
|
|
||||||
|
LL | let x = y.; //~ ERROR unexpected token
|
||||||
|
| ^
|
||||||
|
|
||||||
error: incorrect close delimiter: `)`
|
error: incorrect close delimiter: `)`
|
||||||
--> $DIR/parser-recovery-2.rs:8:5
|
--> $DIR/parser-recovery-2.rs:8:5
|
||||||
|
|
|
|
||||||
|
@ -7,12 +13,6 @@ LL | let x = foo(); //~ ERROR cannot find function `foo` in this scope
|
||||||
LL | ) //~ ERROR incorrect close delimiter: `)`
|
LL | ) //~ ERROR incorrect close delimiter: `)`
|
||||||
| ^ incorrect close delimiter
|
| ^ incorrect close delimiter
|
||||||
|
|
||||||
error: unexpected token: `;`
|
|
||||||
--> $DIR/parser-recovery-2.rs:12:15
|
|
||||||
|
|
|
||||||
LL | let x = y.; //~ ERROR unexpected token
|
|
||||||
| ^
|
|
||||||
|
|
||||||
error[E0425]: cannot find function `foo` in this scope
|
error[E0425]: cannot find function `foo` in this scope
|
||||||
--> $DIR/parser-recovery-2.rs:7:17
|
--> $DIR/parser-recovery-2.rs:7:17
|
||||||
|
|
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ pub fn trace_option(option: Option<isize>) {
|
||||||
option.map(|some| 42;
|
option.map(|some| 42;
|
||||||
//~^ ERROR: expected one of
|
//~^ ERROR: expected one of
|
||||||
|
|
||||||
} //~ ERROR: incorrect close delimiter
|
}
|
||||||
//~^ ERROR: expected expression, found `)`
|
//~^ ERROR: expected expression, found `)`
|
||||||
|
|
||||||
fn main() {}
|
fn main() {}
|
||||||
|
|
|
@ -1,25 +1,17 @@
|
||||||
error: incorrect close delimiter: `}`
|
|
||||||
--> $DIR/issue-10636-2.rs:8:1
|
|
||||||
|
|
|
||||||
LL | pub fn trace_option(option: Option<isize>) {
|
|
||||||
| - close delimiter possibly meant for this
|
|
||||||
LL | option.map(|some| 42;
|
|
||||||
| - un-closed delimiter
|
|
||||||
...
|
|
||||||
LL | } //~ ERROR: incorrect close delimiter
|
|
||||||
| ^ incorrect close delimiter
|
|
||||||
|
|
||||||
error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;`
|
error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;`
|
||||||
--> $DIR/issue-10636-2.rs:5:25
|
--> $DIR/issue-10636-2.rs:5:25
|
||||||
|
|
|
|
||||||
LL | option.map(|some| 42;
|
LL | option.map(|some| 42;
|
||||||
| ^ expected one of `)`, `,`, `.`, `?`, or an operator here
|
| - ^
|
||||||
|
| | |
|
||||||
|
| | help: `)` may belong here
|
||||||
|
| unclosed delimiter
|
||||||
|
|
||||||
error: expected expression, found `)`
|
error: expected expression, found `)`
|
||||||
--> $DIR/issue-10636-2.rs:8:1
|
--> $DIR/issue-10636-2.rs:8:1
|
||||||
|
|
|
|
||||||
LL | } //~ ERROR: incorrect close delimiter
|
LL | }
|
||||||
| ^ expected expression
|
| ^ expected expression
|
||||||
|
|
||||||
error: aborting due to 3 previous errors
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,9 @@
|
||||||
|
error: unexpected close delimiter: `}`
|
||||||
|
--> $DIR/macro-mismatched-delim-paren-brace.rs:5:1
|
||||||
|
|
|
||||||
|
LL | } //~ ERROR unexpected close delimiter: `}`
|
||||||
|
| ^ unexpected close delimiter
|
||||||
|
|
||||||
error: incorrect close delimiter: `}`
|
error: incorrect close delimiter: `}`
|
||||||
--> $DIR/macro-mismatched-delim-paren-brace.rs:4:5
|
--> $DIR/macro-mismatched-delim-paren-brace.rs:4:5
|
||||||
|
|
|
|
||||||
|
@ -7,11 +13,5 @@ LL | bar, "baz", 1, 2.0
|
||||||
LL | } //~ ERROR incorrect close delimiter
|
LL | } //~ ERROR incorrect close delimiter
|
||||||
| ^ incorrect close delimiter
|
| ^ incorrect close delimiter
|
||||||
|
|
||||||
error: unexpected close delimiter: `}`
|
|
||||||
--> $DIR/macro-mismatched-delim-paren-brace.rs:5:1
|
|
||||||
|
|
|
||||||
LL | } //~ ERROR unexpected close delimiter: `}`
|
|
||||||
| ^ unexpected close delimiter
|
|
||||||
|
|
||||||
error: aborting due to 2 previous errors
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
|
|
@ -17,7 +17,7 @@ pub mod raw {
|
||||||
//~| expected type `()`
|
//~| expected type `()`
|
||||||
//~| found type `std::result::Result<bool, std::io::Error>`
|
//~| found type `std::result::Result<bool, std::io::Error>`
|
||||||
//~| expected one of
|
//~| expected one of
|
||||||
} else { //~ ERROR: incorrect close delimiter: `}`
|
} else {
|
||||||
//~^ ERROR: expected one of
|
//~^ ERROR: expected one of
|
||||||
//~| unexpected token
|
//~| unexpected token
|
||||||
Ok(false);
|
Ok(false);
|
||||||
|
|
|
@ -1,19 +1,11 @@
|
||||||
error: incorrect close delimiter: `}`
|
|
||||||
--> $DIR/token-error-correct-3.rs:20:9
|
|
||||||
|
|
|
||||||
LL | if !is_directory(path.as_ref()) { //~ ERROR: cannot find function `is_directory`
|
|
||||||
| - close delimiter possibly meant for this
|
|
||||||
LL | callback(path.as_ref(); //~ ERROR expected one of
|
|
||||||
| - un-closed delimiter
|
|
||||||
...
|
|
||||||
LL | } else { //~ ERROR: incorrect close delimiter: `}`
|
|
||||||
| ^ incorrect close delimiter
|
|
||||||
|
|
||||||
error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;`
|
error: expected one of `)`, `,`, `.`, `?`, or an operator, found `;`
|
||||||
--> $DIR/token-error-correct-3.rs:14:35
|
--> $DIR/token-error-correct-3.rs:14:35
|
||||||
|
|
|
|
||||||
LL | callback(path.as_ref(); //~ ERROR expected one of
|
LL | callback(path.as_ref(); //~ ERROR expected one of
|
||||||
| ^ expected one of `)`, `,`, `.`, `?`, or an operator here
|
| - ^
|
||||||
|
| | |
|
||||||
|
| | help: `)` may belong here
|
||||||
|
| unclosed delimiter
|
||||||
|
|
||||||
error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)`
|
error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)`
|
||||||
--> $DIR/token-error-correct-3.rs:20:9
|
--> $DIR/token-error-correct-3.rs:20:9
|
||||||
|
@ -21,7 +13,7 @@ error: expected one of `.`, `;`, `?`, `}`, or an operator, found `)`
|
||||||
LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types
|
LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mismatched types
|
||||||
| - expected one of `.`, `;`, `?`, `}`, or an operator here
|
| - expected one of `.`, `;`, `?`, `}`, or an operator here
|
||||||
...
|
...
|
||||||
LL | } else { //~ ERROR: incorrect close delimiter: `}`
|
LL | } else {
|
||||||
| ^ unexpected token
|
| ^ unexpected token
|
||||||
|
|
||||||
error[E0425]: cannot find function `is_directory` in this scope
|
error[E0425]: cannot find function `is_directory` in this scope
|
||||||
|
@ -41,7 +33,7 @@ LL | fs::create_dir_all(path.as_ref()).map(|()| true) //~ ERROR: mis
|
||||||
= note: expected type `()`
|
= note: expected type `()`
|
||||||
found type `std::result::Result<bool, std::io::Error>`
|
found type `std::result::Result<bool, std::io::Error>`
|
||||||
|
|
||||||
error: aborting due to 5 previous errors
|
error: aborting due to 4 previous errors
|
||||||
|
|
||||||
Some errors occurred: E0308, E0425.
|
Some errors occurred: E0308, E0425.
|
||||||
For more information about an error, try `rustc --explain E0308`.
|
For more information about an error, try `rustc --explain E0308`.
|
||||||
|
|
|
@ -2,6 +2,8 @@
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
foo(bar(;
|
foo(bar(;
|
||||||
//~^ ERROR: expected expression, found `;`
|
//~^ ERROR cannot find function `bar` in this scope
|
||||||
}
|
}
|
||||||
//~^ ERROR: incorrect close delimiter: `}`
|
//~^ ERROR: incorrect close delimiter: `}`
|
||||||
|
|
||||||
|
fn foo(_: usize) {}
|
||||||
|
|
|
@ -5,15 +5,16 @@ LL | fn main() {
|
||||||
| - close delimiter possibly meant for this
|
| - close delimiter possibly meant for this
|
||||||
LL | foo(bar(;
|
LL | foo(bar(;
|
||||||
| - un-closed delimiter
|
| - un-closed delimiter
|
||||||
LL | //~^ ERROR: expected expression, found `;`
|
LL | //~^ ERROR cannot find function `bar` in this scope
|
||||||
LL | }
|
LL | }
|
||||||
| ^ incorrect close delimiter
|
| ^ incorrect close delimiter
|
||||||
|
|
||||||
error: expected expression, found `;`
|
error[E0425]: cannot find function `bar` in this scope
|
||||||
--> $DIR/token-error-correct.rs:4:13
|
--> $DIR/token-error-correct.rs:4:9
|
||||||
|
|
|
|
||||||
LL | foo(bar(;
|
LL | foo(bar(;
|
||||||
| ^ expected expression
|
| ^^^ not found in this scope
|
||||||
|
|
||||||
error: aborting due to 2 previous errors
|
error: aborting due to 2 previous errors
|
||||||
|
|
||||||
|
For more information about this error, try `rustc --explain E0425`.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue