diff --git a/src/librustc_parse/lib.rs b/src/librustc_parse/lib.rs index 40d7a34a8b0..a0b8415b3e1 100644 --- a/src/librustc_parse/lib.rs +++ b/src/librustc_parse/lib.rs @@ -9,7 +9,7 @@ use rustc_errors::{Diagnostic, FatalError, Level, PResult}; use rustc_session::parse::ParseSess; use rustc_span::{FileName, SourceFile, Span}; use syntax::ast; -use syntax::token::{self, Nonterminal}; +use syntax::token::{self, Nonterminal, Token}; use syntax::tokenstream::{self, TokenStream, TokenTree}; use std::path::{Path, PathBuf}; @@ -170,9 +170,9 @@ fn maybe_source_file_to_parser( let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?; let mut parser = stream_to_parser(sess, stream, None); parser.unclosed_delims = unclosed_delims; - if parser.token == token::Eof && parser.token.span.is_dummy() { - parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); - assert!(parser.unnormalized_token.is_none()); + if parser.token == token::Eof { + let span = Span::new(end_pos, end_pos, parser.token.span.ctxt()); + parser.set_token(Token::new(token::Eof, span)); } Ok(parser) diff --git a/src/librustc_parse/parser/expr.rs b/src/librustc_parse/parser/expr.rs index 51822ab2ea5..97daa91eed1 100644 --- a/src/librustc_parse/parser/expr.rs +++ b/src/librustc_parse/parser/expr.rs @@ -166,7 +166,7 @@ impl<'a> Parser<'a> { while let Some(op) = self.check_assoc_op() { // Adjust the span for interpolated LHS to point to the `$lhs` token // and not to what it refers to. - let lhs_span = match self.unnormalized_prev_token().kind { + let lhs_span = match self.unnormalized_prev_token.kind { TokenKind::Interpolated(..) => self.prev_span, _ => lhs.span, }; @@ -527,7 +527,7 @@ impl<'a> Parser<'a> { ) -> PResult<'a, (Span, P)> { expr.map(|e| { ( - match self.unnormalized_prev_token().kind { + match self.unnormalized_prev_token.kind { TokenKind::Interpolated(..) => self.prev_span, _ => e.span, }, diff --git a/src/librustc_parse/parser/mod.rs b/src/librustc_parse/parser/mod.rs index e04cfa37468..937e5e3cd69 100644 --- a/src/librustc_parse/parser/mod.rs +++ b/src/librustc_parse/parser/mod.rs @@ -93,18 +93,16 @@ pub struct Parser<'a> { /// Use span from this token if you need an isolated span. pub token: Token, /// The current non-normalized token if it's different from `token`. - /// Preferable use is through the `unnormalized_token()` getter. /// Use span from this token if you need to concatenate it with some neighbouring spans. - pub unnormalized_token: Option, + unnormalized_token: Token, /// The previous normalized token. /// Use span from this token if you need an isolated span. prev_token: Token, /// The previous non-normalized token if it's different from `prev_token`. - /// Preferable use is through the `unnormalized_prev_token()` getter. /// Use span from this token if you need to concatenate it with some neighbouring spans. - unnormalized_prev_token: Option, - /// Equivalent to `unnormalized_prev_token().span`. - /// FIXME: Remove in favor of `(unnormalized_)prev_token().span`. + unnormalized_prev_token: Token, + /// Equivalent to `unnormalized_prev_token.span`. + /// FIXME: Remove in favor of `(unnormalized_)prev_token.span`. pub prev_span: Span, restrictions: Restrictions, /// Used to determine the path to externally loaded source files. @@ -378,9 +376,9 @@ impl<'a> Parser<'a> { let mut parser = Parser { sess, token: Token::dummy(), - unnormalized_token: None, + unnormalized_token: Token::dummy(), prev_token: Token::dummy(), - unnormalized_prev_token: None, + unnormalized_prev_token: Token::dummy(), prev_span: DUMMY_SP, restrictions: Restrictions::empty(), recurse_into_file_modules, @@ -422,14 +420,6 @@ impl<'a> Parser<'a> { parser } - fn unnormalized_token(&self) -> &Token { - self.unnormalized_token.as_ref().unwrap_or(&self.token) - } - - fn unnormalized_prev_token(&self) -> &Token { - self.unnormalized_prev_token.as_ref().unwrap_or(&self.prev_token) - } - fn next_tok(&mut self, fallback_span: Span) -> Token { let mut next = if self.desugar_doc_comments { self.token_cursor.next_desugared() @@ -899,18 +889,17 @@ impl<'a> Parser<'a> { // Interpolated identifier (`$i: ident`) and lifetime (`$l: lifetime`) // tokens are replaced with usual identifier and lifetime tokens, // so the former are never encountered during normal parsing. - fn normalize_token(token: &Token) -> Option { - match &token.kind { + crate fn set_token(&mut self, token: Token) { + self.unnormalized_token = token; + self.token = match &self.unnormalized_token.kind { token::Interpolated(nt) => match **nt { token::NtIdent(ident, is_raw) => { - Some(Token::new(token::Ident(ident.name, is_raw), ident.span)) + Token::new(token::Ident(ident.name, is_raw), ident.span) } - token::NtLifetime(ident) => { - Some(Token::new(token::Lifetime(ident.name), ident.span)) - } - _ => None, + token::NtLifetime(ident) => Token::new(token::Lifetime(ident.name), ident.span), + _ => self.unnormalized_token.clone(), }, - _ => None, + _ => self.unnormalized_token.clone(), } } @@ -925,13 +914,11 @@ impl<'a> Parser<'a> { // Update the current and previous tokens. self.prev_token = self.token.take(); self.unnormalized_prev_token = self.unnormalized_token.take(); - self.token = self.next_tok(self.unnormalized_prev_token().span); - if let Some(normalized_token) = Self::normalize_token(&self.token) { - self.unnormalized_token = Some(mem::replace(&mut self.token, normalized_token)); - } + let next_token = self.next_tok(self.unnormalized_prev_token.span); + self.set_token(next_token); // Update fields derived from the previous token. - self.prev_span = self.unnormalized_prev_token().span; + self.prev_span = self.unnormalized_prev_token.span; self.expected_tokens.clear(); } @@ -945,13 +932,10 @@ impl<'a> Parser<'a> { // Update the current and previous tokens. self.prev_token = self.token.take(); self.unnormalized_prev_token = self.unnormalized_token.take(); - self.token = Token::new(next, span); - if let Some(normalized_token) = Self::normalize_token(&self.token) { - self.unnormalized_token = Some(mem::replace(&mut self.token, normalized_token)); - } + self.set_token(Token::new(next, span)); // Update fields derived from the previous token. - self.prev_span = self.unnormalized_prev_token().span.with_hi(span.lo()); + self.prev_span = self.unnormalized_prev_token.span.with_hi(span.lo()); self.expected_tokens.clear(); } @@ -1096,8 +1080,7 @@ impl<'a> Parser<'a> { &mut self.token_cursor.frame, self.token_cursor.stack.pop().unwrap(), ); - self.token = Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close); - self.unnormalized_token = None; + self.set_token(Token::new(TokenKind::CloseDelim(frame.delim), frame.span.close)); self.bump(); TokenTree::Delimited(frame.span, frame.delim, frame.tree_cursor.stream.into()) } diff --git a/src/librustc_parse/parser/path.rs b/src/librustc_parse/parser/path.rs index 761c06b70ee..18e57c6a5d4 100644 --- a/src/librustc_parse/parser/path.rs +++ b/src/librustc_parse/parser/path.rs @@ -134,7 +134,7 @@ impl<'a> Parser<'a> { path }); - let lo = self.unnormalized_token().span; + let lo = self.unnormalized_token.span; let mut segments = Vec::new(); let mod_sep_ctxt = self.token.span.ctxt(); if self.eat(&token::ModSep) {