Move token tree related lexer state to a separate struct
We only used a bunch of fields when tokenizing into a token tree, so let's move them out of the base lexer
This commit is contained in:
parent
efa3c27f0f
commit
d29f0d23c3
3 changed files with 71 additions and 48 deletions
|
@ -66,15 +66,7 @@ pub struct StringReader<'a> {
|
|||
span: Span,
|
||||
/// The raw source span which *does not* take `override_span` into account
|
||||
span_src_raw: Span,
|
||||
/// Stack of open delimiters and their spans. Used for error message.
|
||||
open_braces: Vec<(token::DelimToken, Span)>,
|
||||
crate unmatched_braces: Vec<UnmatchedBrace>,
|
||||
/// The type and spans for all braces
|
||||
///
|
||||
/// Used only for error recovery when arriving to EOF with mismatched braces.
|
||||
matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
|
||||
crate override_span: Option<Span>,
|
||||
last_unclosed_found_span: Option<Span>,
|
||||
override_span: Option<Span>,
|
||||
}
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
|
@ -254,11 +246,7 @@ impl<'a> StringReader<'a> {
|
|||
token: token::Eof,
|
||||
span: syntax_pos::DUMMY_SP,
|
||||
span_src_raw: syntax_pos::DUMMY_SP,
|
||||
open_braces: Vec::new(),
|
||||
unmatched_braces: Vec::new(),
|
||||
matching_delim_spans: Vec::new(),
|
||||
override_span,
|
||||
last_unclosed_found_span: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,14 +1,42 @@
|
|||
use syntax_pos::Span;
|
||||
|
||||
use crate::print::pprust::token_to_string;
|
||||
use crate::parse::lexer::{StringReader, UnmatchedBrace};
|
||||
use crate::parse::{token, PResult};
|
||||
use crate::tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
|
||||
let mut tt_reader = TokenTreesReader {
|
||||
string_reader: self,
|
||||
open_braces: Vec::new(),
|
||||
unmatched_braces: Vec::new(),
|
||||
matching_delim_spans: Vec::new(),
|
||||
last_unclosed_found_span: None,
|
||||
};
|
||||
let res = tt_reader.parse_all_token_trees();
|
||||
(res, tt_reader.unmatched_braces)
|
||||
}
|
||||
}
|
||||
|
||||
struct TokenTreesReader<'a> {
|
||||
string_reader: StringReader<'a>,
|
||||
/// Stack of open delimiters and their spans. Used for error message.
|
||||
open_braces: Vec<(token::DelimToken, Span)>,
|
||||
unmatched_braces: Vec<UnmatchedBrace>,
|
||||
/// The type and spans for all braces
|
||||
///
|
||||
/// Used only for error recovery when arriving to EOF with mismatched braces.
|
||||
matching_delim_spans: Vec<(token::DelimToken, Span, Span)>,
|
||||
last_unclosed_found_span: Option<Span>,
|
||||
}
|
||||
|
||||
impl<'a> TokenTreesReader<'a> {
|
||||
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
|
||||
crate fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
|
||||
fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
|
||||
let mut tts = Vec::new();
|
||||
|
||||
while self.token != token::Eof {
|
||||
while self.string_reader.token != token::Eof {
|
||||
tts.push(self.parse_token_tree()?);
|
||||
}
|
||||
|
||||
|
@ -19,7 +47,7 @@ impl<'a> StringReader<'a> {
|
|||
fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
|
||||
let mut tts = vec![];
|
||||
loop {
|
||||
if let token::CloseDelim(..) = self.token {
|
||||
if let token::CloseDelim(..) = self.string_reader.token {
|
||||
return TokenStream::new(tts);
|
||||
}
|
||||
|
||||
|
@ -34,11 +62,12 @@ impl<'a> StringReader<'a> {
|
|||
}
|
||||
|
||||
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
|
||||
let sm = self.sess.source_map();
|
||||
match self.token {
|
||||
let sm = self.string_reader.sess.source_map();
|
||||
match self.string_reader.token {
|
||||
token::Eof => {
|
||||
let msg = "this file contains an un-closed delimiter";
|
||||
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg);
|
||||
let mut err = self.string_reader.sess.span_diagnostic
|
||||
.struct_span_err(self.span(), msg);
|
||||
for &(_, sp) in &self.open_braces {
|
||||
err.span_label(sp, "un-closed delimiter");
|
||||
}
|
||||
|
@ -46,13 +75,12 @@ impl<'a> StringReader<'a> {
|
|||
if let Some((delim, _)) = self.open_braces.last() {
|
||||
if let Some((_, open_sp, close_sp)) = self.matching_delim_spans.iter()
|
||||
.filter(|(d, open_sp, close_sp)| {
|
||||
|
||||
if let Some(close_padding) = sm.span_to_margin(*close_sp) {
|
||||
if let Some(open_padding) = sm.span_to_margin(*open_sp) {
|
||||
return delim == d && close_padding != open_padding;
|
||||
if let Some(close_padding) = sm.span_to_margin(*close_sp) {
|
||||
if let Some(open_padding) = sm.span_to_margin(*open_sp) {
|
||||
return delim == d && close_padding != open_padding;
|
||||
}
|
||||
}
|
||||
}
|
||||
false
|
||||
false
|
||||
}).next() // these are in reverse order as they get inserted on close, but
|
||||
{ // we want the last open/first close
|
||||
err.span_label(
|
||||
|
@ -69,11 +97,11 @@ impl<'a> StringReader<'a> {
|
|||
},
|
||||
token::OpenDelim(delim) => {
|
||||
// The span for beginning of the delimited section
|
||||
let pre_span = self.span;
|
||||
let pre_span = self.span();
|
||||
|
||||
// Parse the open delimiter.
|
||||
self.open_braces.push((delim, self.span));
|
||||
self.real_token();
|
||||
self.open_braces.push((delim, self.span()));
|
||||
self.string_reader.real_token();
|
||||
|
||||
// Parse the token trees within the delimiters.
|
||||
// We stop at any delimiter so we can try to recover if the user
|
||||
|
@ -81,9 +109,9 @@ impl<'a> StringReader<'a> {
|
|||
let tts = self.parse_token_trees_until_close_delim();
|
||||
|
||||
// Expand to cover the entire delimited token tree
|
||||
let delim_span = DelimSpan::from_pair(pre_span, self.span);
|
||||
let delim_span = DelimSpan::from_pair(pre_span, self.span());
|
||||
|
||||
match self.token {
|
||||
match self.string_reader.token {
|
||||
// Correct delimiter.
|
||||
token::CloseDelim(d) if d == delim => {
|
||||
let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
|
||||
|
@ -93,26 +121,26 @@ impl<'a> StringReader<'a> {
|
|||
self.matching_delim_spans.clear();
|
||||
} else {
|
||||
self.matching_delim_spans.push(
|
||||
(open_brace, open_brace_span, self.span),
|
||||
(open_brace, open_brace_span, self.span()),
|
||||
);
|
||||
}
|
||||
// Parse the close delimiter.
|
||||
self.real_token();
|
||||
self.string_reader.real_token();
|
||||
}
|
||||
// Incorrect delimiter.
|
||||
token::CloseDelim(other) => {
|
||||
let mut unclosed_delimiter = None;
|
||||
let mut candidate = None;
|
||||
if self.last_unclosed_found_span != Some(self.span) {
|
||||
if self.last_unclosed_found_span != Some(self.span()) {
|
||||
// do not complain about the same unclosed delimiter multiple times
|
||||
self.last_unclosed_found_span = Some(self.span);
|
||||
self.last_unclosed_found_span = Some(self.span());
|
||||
// This is a conservative error: only report the last unclosed
|
||||
// delimiter. The previous unclosed delimiters could actually be
|
||||
// closed! The parser just hasn't gotten to them yet.
|
||||
if let Some(&(_, sp)) = self.open_braces.last() {
|
||||
unclosed_delimiter = Some(sp);
|
||||
};
|
||||
if let Some(current_padding) = sm.span_to_margin(self.span) {
|
||||
if let Some(current_padding) = sm.span_to_margin(self.span()) {
|
||||
for (brace, brace_span) in &self.open_braces {
|
||||
if let Some(padding) = sm.span_to_margin(*brace_span) {
|
||||
// high likelihood of these two corresponding
|
||||
|
@ -126,7 +154,7 @@ impl<'a> StringReader<'a> {
|
|||
self.unmatched_braces.push(UnmatchedBrace {
|
||||
expected_delim: tok,
|
||||
found_delim: other,
|
||||
found_span: self.span,
|
||||
found_span: self.span(),
|
||||
unclosed_span: unclosed_delimiter,
|
||||
candidate_span: candidate,
|
||||
});
|
||||
|
@ -142,7 +170,7 @@ impl<'a> StringReader<'a> {
|
|||
// bar(baz(
|
||||
// } // Incorrect delimiter but matches the earlier `{`
|
||||
if !self.open_braces.iter().any(|&(b, _)| b == other) {
|
||||
self.real_token();
|
||||
self.string_reader.real_token();
|
||||
}
|
||||
}
|
||||
token::Eof => {
|
||||
|
@ -162,22 +190,28 @@ impl<'a> StringReader<'a> {
|
|||
token::CloseDelim(_) => {
|
||||
// An unexpected closing delimiter (i.e., there is no
|
||||
// matching opening delimiter).
|
||||
let token_str = token_to_string(&self.token);
|
||||
let token_str = token_to_string(&self.string_reader.token);
|
||||
let msg = format!("unexpected close delimiter: `{}`", token_str);
|
||||
let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
|
||||
err.span_label(self.span, "unexpected close delimiter");
|
||||
let mut err = self.string_reader.sess.span_diagnostic
|
||||
.struct_span_err(self.span(), &msg);
|
||||
err.span_label(self.span(), "unexpected close delimiter");
|
||||
Err(err)
|
||||
},
|
||||
_ => {
|
||||
let tt = TokenTree::Token(self.span, self.token.clone());
|
||||
let tt = TokenTree::Token(self.span(), self.string_reader.token.clone());
|
||||
// Note that testing for joint-ness here is done via the raw
|
||||
// source span as the joint-ness is a property of the raw source
|
||||
// rather than wanting to take `override_span` into account.
|
||||
let raw = self.span_src_raw;
|
||||
self.real_token();
|
||||
let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
|
||||
let raw = self.string_reader.span_src_raw;
|
||||
self.string_reader.real_token();
|
||||
let is_joint = raw.hi() == self.string_reader.span_src_raw.lo()
|
||||
&& token::is_op(&self.string_reader.token);
|
||||
Ok((tt, if is_joint { Joint } else { NonJoint }))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn span(&self) -> Span {
|
||||
self.string_reader.span
|
||||
}
|
||||
}
|
||||
|
|
|
@ -295,7 +295,7 @@ pub fn source_file_to_stream(
|
|||
}
|
||||
|
||||
/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
|
||||
/// parsing the token tream.
|
||||
/// parsing the token stream.
|
||||
pub fn maybe_file_to_stream(
|
||||
sess: &ParseSess,
|
||||
source_file: Lrc<SourceFile>,
|
||||
|
@ -303,14 +303,15 @@ pub fn maybe_file_to_stream(
|
|||
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
|
||||
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
|
||||
srdr.real_token();
|
||||
let (token_trees, unmatched_braces) = srdr.into_token_trees();
|
||||
|
||||
match srdr.parse_all_token_trees() {
|
||||
Ok(stream) => Ok((stream, srdr.unmatched_braces)),
|
||||
match token_trees {
|
||||
Ok(stream) => Ok((stream, unmatched_braces)),
|
||||
Err(err) => {
|
||||
let mut buffer = Vec::with_capacity(1);
|
||||
err.buffer(&mut buffer);
|
||||
// Not using `emit_unclosed_delims` to use `db.buffer`
|
||||
for unmatched in srdr.unmatched_braces {
|
||||
for unmatched in unmatched_braces {
|
||||
let mut db = sess.span_diagnostic.struct_span_err(unmatched.found_span, &format!(
|
||||
"incorrect close delimiter: `{}`",
|
||||
token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue