1
Fork 0

parser: Remove Options from unnormalized tokens

They are always set synchronously with normalized tokens now
This commit is contained in:
Vadim Petrochenkov 2020-02-16 23:19:51 +03:00
parent ed2fd28d38
commit 06fbb0b4fa
4 changed files with 26 additions and 43 deletions

View file

@ -9,7 +9,7 @@ use rustc_errors::{Diagnostic, FatalError, Level, PResult};
use rustc_session::parse::ParseSess; use rustc_session::parse::ParseSess;
use rustc_span::{FileName, SourceFile, Span}; use rustc_span::{FileName, SourceFile, Span};
use syntax::ast; use syntax::ast;
use syntax::token::{self, Nonterminal}; use syntax::token::{self, Nonterminal, Token};
use syntax::tokenstream::{self, TokenStream, TokenTree}; use syntax::tokenstream::{self, TokenStream, TokenTree};
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
@ -170,9 +170,9 @@ fn maybe_source_file_to_parser(
let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
let mut parser = stream_to_parser(sess, stream, None); let mut parser = stream_to_parser(sess, stream, None);
parser.unclosed_delims = unclosed_delims; parser.unclosed_delims = unclosed_delims;
if parser.token == token::Eof && parser.token.span.is_dummy() { if parser.token == token::Eof {
parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); let span = Span::new(end_pos, end_pos, parser.token.span.ctxt());
assert!(parser.unnormalized_token.is_none()); parser.set_token(Token::new(token::Eof, span));
} }
Ok(parser) Ok(parser)

View file

@ -166,7 +166,7 @@ impl<'a> Parser<'a> {
while let Some(op) = self.check_assoc_op() { while let Some(op) = self.check_assoc_op() {
// Adjust the span for interpolated LHS to point to the `$lhs` token // Adjust the span for interpolated LHS to point to the `$lhs` token
// and not to what it refers to. // and not to what it refers to.
let lhs_span = match self.unnormalized_prev_token().kind { let lhs_span = match self.unnormalized_prev_token.kind {
TokenKind::Interpolated(..) => self.prev_span, TokenKind::Interpolated(..) => self.prev_span,
_ => lhs.span, _ => lhs.span,
}; };
@ -527,7 +527,7 @@ impl<'a> Parser<'a> {
) -> PResult<'a, (Span, P<Expr>)> { ) -> PResult<'a, (Span, P<Expr>)> {
expr.map(|e| { expr.map(|e| {
( (
match self.unnormalized_prev_token().kind { match self.unnormalized_prev_token.kind {
TokenKind::Interpolated(..) => self.prev_span, TokenKind::Interpolated(..) => self.prev_span,
_ => e.span, _ => e.span,
}, },

View file

@ -93,18 +93,16 @@ pub struct Parser<'a> {
/// Use span from this token if you need an isolated span. /// Use span from this token if you need an isolated span.
pub token: Token, pub token: Token,
/// The current non-normalized token if it's different from `token`. /// The current non-normalized token if it's different from `token`.
/// Preferable use is through the `unnormalized_token()` getter.
/// Use span from this token if you need to concatenate it with some neighbouring spans. /// Use span from this token if you need to concatenate it with some neighbouring spans.
pub unnormalized_token: Option<Token>, unnormalized_token: Token,
/// The previous normalized token. /// The previous normalized token.
/// Use span from this token if you need an isolated span. /// Use span from this token if you need an isolated span.
prev_token: Token, prev_token: Token,
/// The previous non-normalized token if it's different from `prev_token`. /// The previous non-normalized token if it's different from `prev_token`.
/// Preferable use is through the `unnormalized_prev_token()` getter.
/// Use span from this token if you need to concatenate it with some neighbouring spans. /// Use span from this token if you need to concatenate it with some neighbouring spans.
unnormalized_prev_token: Option<Token>, unnormalized_prev_token: Token,
/// Equivalent to `unnormalized_prev_token().span`. /// Equivalent to `unnormalized_prev_token.span`.
/// FIXME: Remove in favor of `(unnormalized_)prev_token().span`. /// FIXME: Remove in favor of `(unnormalized_)prev_token.span`.
pub prev_span: Span, pub prev_span: Span,
restrictions: Restrictions, restrictions: Restrictions,
/// Used to determine the path to externally loaded source files. /// Used to determine the path to externally loaded source files.
@ -378,9 +376,9 @@ impl<'a> Parser<'a> {
let mut parser = Parser { let mut parser = Parser {
sess, sess,
token: Token::dummy(), token: Token::dummy(),
unnormalized_token: None, unnormalized_token: Token::dummy(),
prev_token: Token::dummy(), prev_token: Token::dummy(),
unnormalized_prev_token: None, unnormalized_prev_token: Token::dummy(),
prev_span: DUMMY_SP, prev_span: DUMMY_SP,
restrictions: Restrictions::empty(), restrictions: Restrictions::empty(),
recurse_into_file_modules, recurse_into_file_modules,
@ -422,14 +420,6 @@ impl<'a> Parser<'a> {
parser parser
} }
fn unnormalized_token(&self) -> &Token {
self.unnormalized_token.as_ref().unwrap_or(&self.token)
}
fn unnormalized_prev_token(&self) -> &Token {
self.unnormalized_prev_token.as_ref().unwrap_or(&self.prev_token)
}
fn next_tok(&mut self, fallback_span: Span) -> Token { fn next_tok(&mut self, fallback_span: Span) -> Token {
let mut next = if self.desugar_doc_comments { let mut next = if self.desugar_doc_comments {
self.token_cursor.next_desugared() self.token_cursor.next_desugared()
@ -899,18 +889,17 @@ impl<'a> Parser<'a> {
// Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`) // Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`)
// tokens are replaced with usual identifier and lifetime tokens, // tokens are replaced with usual identifier and lifetime tokens,
// so the former are never encountered during normal parsing. // so the former are never encountered during normal parsing.
fn normalize_token(token: &Token) -> Option<Token> { crate fn set_token(&mut self, token: Token) {
match &token.kind { self.unnormalized_token = token;
self.token = match &self.unnormalized_token.kind {
token::Interpolated(nt) => match **nt { token::Interpolated(nt) => match **nt {
token::NtIdent(ident, is_raw) => { token::NtIdent(ident, is_raw) => {
Some(Token::new(token::Ident(ident.name, is_raw), ident.span)) Token::new(token::Ident(ident.name, is_raw), ident.span)
} }
token::NtLifetime(ident) => { token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span),
Some(Token::new(token::Lifetime(ident.name), ident.span)) _ => self.unnormalized_token.clone(),
}
_ => None,
}, },
_ => None, _ => self.unnormalized_token.clone(),
} }
} }
@ -925,13 +914,11 @@ impl<'a> Parser<'a> {
// Update the current and previous tokens. // Update the current and previous tokens.
self.prev_token = self.token.take(); self.prev_token = self.token.take();
self.unnormalized_prev_token = self.unnormalized_token.take(); self.unnormalized_prev_token = self.unnormalized_token.take();
self.token = self.next_tok(self.unnormalized_prev_token().span); let next_token = self.next_tok(self.unnormalized_prev_token.span);
if let Some(normalized_token) = Self::normalize_token(&self.token) { self.set_token(next_token);
self.unnormalized_token = Some(mem::replace(&mut self.token, normalized_token));
}
// Update fields derived from the previous token. // Update fields derived from the previous token.
self.prev_span = self.unnormalized_prev_token().span; self.prev_span = self.unnormalized_prev_token.span;
self.expected_tokens.clear(); self.expected_tokens.clear();
} }
@ -945,13 +932,10 @@ impl<'a> Parser<'a> {
// Update the current and previous tokens. // Update the current and previous tokens.
self.prev_token = self.token.take(); self.prev_token = self.token.take();
self.unnormalized_prev_token = self.unnormalized_token.take(); self.unnormalized_prev_token = self.unnormalized_token.take();
self.token = Token::new(next, span); self.set_token(Token::new(next, span));
if let Some(normalized_token) = Self::normalize_token(&self.token) {
self.unnormalized_token = Some(mem::replace(&mut self.token, normalized_token));
}
// Update fields derived from the previous token. // Update fields derived from the previous token.
self.prev_span = self.unnormalized_prev_token().span.with_hi(span.lo()); self.prev_span = self.unnormalized_prev_token.span.with_hi(span.lo());
self.expected_tokens.clear(); self.expected_tokens.clear();
} }
@ -1096,8 +1080,7 @@ impl<'a> Parser<'a> {
&mut self.token_cursor.frame, &mut self.token_cursor.frame,
self.token_cursor.stack.pop().unwrap(), self.token_cursor.stack.pop().unwrap(),
); );
self.token = Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close); self.set_token(Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close));
self.unnormalized_token = None;
self.bump(); self.bump();
TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream.into()) TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream.into())
} }

View file

@ -134,7 +134,7 @@ impl<'a> Parser<'a> {
path path
}); });
let lo = self.unnormalized_token().span; let lo = self.unnormalized_token.span;
let mut segments = Vec::new(); let mut segments = Vec::new();
let mod_sep_ctxt = self.token.span.ctxt(); let mod_sep_ctxt = self.token.span.ctxt();
if self.eat(&token::ModSep) { if self.eat(&token::ModSep) {