1
Fork 0

move span and token to tt reader

This commit is contained in:
Aleksey Kladov 2019-05-13 12:06:37 +03:00
parent d29f0d23c3
commit b91e0a3786
3 changed files with 35 additions and 40 deletions

View file

@ -62,10 +62,6 @@ pub struct StringReader<'a> {
// cache a direct reference to the source text, so that we don't have to // cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.source_file.src.as_ref().unwrap()` all the time. // retrieve it via `self.source_file.src.as_ref().unwrap()` all the time.
src: Lrc<String>, src: Lrc<String>,
token: token::Token,
span: Span,
/// The raw source span which *does not* take `override_span` into account
span_src_raw: Span,
override_span: Option<Span>, override_span: Option<Span>,
} }
@ -113,8 +109,6 @@ impl<'a> StringReader<'a> {
sp: self.peek_span, sp: self.peek_span,
}; };
self.advance_token()?; self.advance_token()?;
self.span_src_raw = self.peek_span_src_raw;
Ok(ret_val) Ok(ret_val)
} }
@ -151,9 +145,6 @@ impl<'a> StringReader<'a> {
} }
} }
self.token = t.tok.clone();
self.span = t.sp;
Ok(t) Ok(t)
} }
@ -243,9 +234,6 @@ impl<'a> StringReader<'a> {
peek_span_src_raw: syntax_pos::DUMMY_SP, peek_span_src_raw: syntax_pos::DUMMY_SP,
src, src,
fatal_errs: Vec::new(), fatal_errs: Vec::new(),
token: token::Eof,
span: syntax_pos::DUMMY_SP,
span_src_raw: syntax_pos::DUMMY_SP,
override_span, override_span,
} }
} }

View file

@ -9,6 +9,8 @@ impl<'a> StringReader<'a> {
crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) { crate fn into_token_trees(self) -> (PResult<'a, TokenStream>, Vec<UnmatchedBrace>) {
let mut tt_reader = TokenTreesReader { let mut tt_reader = TokenTreesReader {
string_reader: self, string_reader: self,
token: token::Eof,
span: syntax_pos::DUMMY_SP,
open_braces: Vec::new(), open_braces: Vec::new(),
unmatched_braces: Vec::new(), unmatched_braces: Vec::new(),
matching_delim_spans: Vec::new(), matching_delim_spans: Vec::new(),
@ -21,6 +23,8 @@ impl<'a> StringReader<'a> {
struct TokenTreesReader<'a> { struct TokenTreesReader<'a> {
string_reader: StringReader<'a>, string_reader: StringReader<'a>,
token: token::Token,
span: Span,
/// Stack of open delimiters and their spans. Used for error message. /// Stack of open delimiters and their spans. Used for error message.
open_braces: Vec<(token::DelimToken, Span)>, open_braces: Vec<(token::DelimToken, Span)>,
unmatched_braces: Vec<UnmatchedBrace>, unmatched_braces: Vec<UnmatchedBrace>,
@ -36,7 +40,8 @@ impl<'a> TokenTreesReader<'a> {
fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> { fn parse_all_token_trees(&mut self) -> PResult<'a, TokenStream> {
let mut tts = Vec::new(); let mut tts = Vec::new();
while self.string_reader.token != token::Eof { self.real_token();
while self.token != token::Eof {
tts.push(self.parse_token_tree()?); tts.push(self.parse_token_tree()?);
} }
@ -47,7 +52,7 @@ impl<'a> TokenTreesReader<'a> {
fn parse_token_trees_until_close_delim(&mut self) -> TokenStream { fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
let mut tts = vec![]; let mut tts = vec![];
loop { loop {
if let token::CloseDelim(..) = self.string_reader.token { if let token::CloseDelim(..) = self.token {
return TokenStream::new(tts); return TokenStream::new(tts);
} }
@ -63,11 +68,11 @@ impl<'a> TokenTreesReader<'a> {
fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> { fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
let sm = self.string_reader.sess.source_map(); let sm = self.string_reader.sess.source_map();
match self.string_reader.token { match self.token {
token::Eof => { token::Eof => {
let msg = "this file contains an un-closed delimiter"; let msg = "this file contains an un-closed delimiter";
let mut err = self.string_reader.sess.span_diagnostic let mut err = self.string_reader.sess.span_diagnostic
.struct_span_err(self.span(), msg); .struct_span_err(self.span, msg);
for &(_, sp) in &self.open_braces { for &(_, sp) in &self.open_braces {
err.span_label(sp, "un-closed delimiter"); err.span_label(sp, "un-closed delimiter");
} }
@ -97,11 +102,11 @@ impl<'a> TokenTreesReader<'a> {
}, },
token::OpenDelim(delim) => { token::OpenDelim(delim) => {
// The span for beginning of the delimited section // The span for beginning of the delimited section
let pre_span = self.span(); let pre_span = self.span;
// Parse the open delimiter. // Parse the open delimiter.
self.open_braces.push((delim, self.span())); self.open_braces.push((delim, self.span));
self.string_reader.real_token(); self.real_token();
// Parse the token trees within the delimiters. // Parse the token trees within the delimiters.
// We stop at any delimiter so we can try to recover if the user // We stop at any delimiter so we can try to recover if the user
@ -109,9 +114,9 @@ impl<'a> TokenTreesReader<'a> {
let tts = self.parse_token_trees_until_close_delim(); let tts = self.parse_token_trees_until_close_delim();
// Expand to cover the entire delimited token tree // Expand to cover the entire delimited token tree
let delim_span = DelimSpan::from_pair(pre_span, self.span()); let delim_span = DelimSpan::from_pair(pre_span, self.span);
match self.string_reader.token { match self.token {
// Correct delimiter. // Correct delimiter.
token::CloseDelim(d) if d == delim => { token::CloseDelim(d) if d == delim => {
let (open_brace, open_brace_span) = self.open_braces.pop().unwrap(); let (open_brace, open_brace_span) = self.open_braces.pop().unwrap();
@ -121,26 +126,26 @@ impl<'a> TokenTreesReader<'a> {
self.matching_delim_spans.clear(); self.matching_delim_spans.clear();
} else { } else {
self.matching_delim_spans.push( self.matching_delim_spans.push(
(open_brace, open_brace_span, self.span()), (open_brace, open_brace_span, self.span),
); );
} }
// Parse the close delimiter. // Parse the close delimiter.
self.string_reader.real_token(); self.real_token();
} }
// Incorrect delimiter. // Incorrect delimiter.
token::CloseDelim(other) => { token::CloseDelim(other) => {
let mut unclosed_delimiter = None; let mut unclosed_delimiter = None;
let mut candidate = None; let mut candidate = None;
if self.last_unclosed_found_span != Some(self.span()) { if self.last_unclosed_found_span != Some(self.span) {
// do not complain about the same unclosed delimiter multiple times // do not complain about the same unclosed delimiter multiple times
self.last_unclosed_found_span = Some(self.span()); self.last_unclosed_found_span = Some(self.span);
// This is a conservative error: only report the last unclosed // This is a conservative error: only report the last unclosed
// delimiter. The previous unclosed delimiters could actually be // delimiter. The previous unclosed delimiters could actually be
// closed! The parser just hasn't gotten to them yet. // closed! The parser just hasn't gotten to them yet.
if let Some(&(_, sp)) = self.open_braces.last() { if let Some(&(_, sp)) = self.open_braces.last() {
unclosed_delimiter = Some(sp); unclosed_delimiter = Some(sp);
}; };
if let Some(current_padding) = sm.span_to_margin(self.span()) { if let Some(current_padding) = sm.span_to_margin(self.span) {
for (brace, brace_span) in &self.open_braces { for (brace, brace_span) in &self.open_braces {
if let Some(padding) = sm.span_to_margin(*brace_span) { if let Some(padding) = sm.span_to_margin(*brace_span) {
// high likelihood of these two corresponding // high likelihood of these two corresponding
@ -154,7 +159,7 @@ impl<'a> TokenTreesReader<'a> {
self.unmatched_braces.push(UnmatchedBrace { self.unmatched_braces.push(UnmatchedBrace {
expected_delim: tok, expected_delim: tok,
found_delim: other, found_delim: other,
found_span: self.span(), found_span: self.span,
unclosed_span: unclosed_delimiter, unclosed_span: unclosed_delimiter,
candidate_span: candidate, candidate_span: candidate,
}); });
@ -170,7 +175,7 @@ impl<'a> TokenTreesReader<'a> {
// bar(baz( // bar(baz(
// } // Incorrect delimiter but matches the earlier `{` // } // Incorrect delimiter but matches the earlier `{`
if !self.open_braces.iter().any(|&(b, _)| b == other) { if !self.open_braces.iter().any(|&(b, _)| b == other) {
self.string_reader.real_token(); self.real_token();
} }
} }
token::Eof => { token::Eof => {
@ -190,28 +195,31 @@ impl<'a> TokenTreesReader<'a> {
token::CloseDelim(_) => { token::CloseDelim(_) => {
// An unexpected closing delimiter (i.e., there is no // An unexpected closing delimiter (i.e., there is no
// matching opening delimiter). // matching opening delimiter).
let token_str = token_to_string(&self.string_reader.token); let token_str = token_to_string(&self.token);
let msg = format!("unexpected close delimiter: `{}`", token_str); let msg = format!("unexpected close delimiter: `{}`", token_str);
let mut err = self.string_reader.sess.span_diagnostic let mut err = self.string_reader.sess.span_diagnostic
.struct_span_err(self.span(), &msg); .struct_span_err(self.span, &msg);
err.span_label(self.span(), "unexpected close delimiter"); err.span_label(self.span, "unexpected close delimiter");
Err(err) Err(err)
}, },
_ => { _ => {
let tt = TokenTree::Token(self.span(), self.string_reader.token.clone()); let tt = TokenTree::Token(self.span, self.token.clone());
// Note that testing for joint-ness here is done via the raw // Note that testing for joint-ness here is done via the raw
// source span as the joint-ness is a property of the raw source // source span as the joint-ness is a property of the raw source
// rather than wanting to take `override_span` into account. // rather than wanting to take `override_span` into account.
let raw = self.string_reader.span_src_raw; let raw = self.string_reader.peek_span_src_raw;
self.string_reader.real_token(); self.real_token();
let is_joint = raw.hi() == self.string_reader.span_src_raw.lo() let is_joint = raw.hi() == self.string_reader.peek_span_src_raw.lo()
&& token::is_op(&self.string_reader.token); && token::is_op(&self.token);
Ok((tt, if is_joint { Joint } else { NonJoint })) Ok((tt, if is_joint { Joint } else { NonJoint }))
} }
} }
} }
fn span(&self) -> Span { fn real_token(&mut self) {
self.string_reader.span let t = self.string_reader.real_token();
self.token = t.tok;
self.span = t.sp;
} }
} }

View file

@ -301,8 +301,7 @@ pub fn maybe_file_to_stream(
source_file: Lrc<SourceFile>, source_file: Lrc<SourceFile>,
override_span: Option<Span>, override_span: Option<Span>,
) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> { ) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
let mut srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?; let srdr = lexer::StringReader::new_or_buffered_errs(sess, source_file, override_span)?;
srdr.real_token();
let (token_trees, unmatched_braces) = srdr.into_token_trees(); let (token_trees, unmatched_braces) = srdr.into_token_trees();
match token_trees { match token_trees {