Remove LazyTokenStream.

It's present within `Token::Interpolated` as an optimization, so that if
a nonterminal is converted to a `TokenStream` multiple times, the
first-computed value is saved and reused.

But in practice it's not needed. `interpolated_to_tokenstream()` is a
cold function: it's only called a few dozen times while compiling rustc
itself, and a few hundred times across the entire `rustc-perf` suite.
Furthermore, when it is called, it is almost always the first
conversion, so no benefit is gained from it.

So this commit removes `LazyTokenStream`, along with the now-unnecessary
`Token::interpolated()`.

As well as a significant simplification, the removal speeds things up
slightly, mostly due to not having to `drop` the `LazyTokenStream`
instances.
This commit is contained in:
Nicholas Nethercote 2019-02-15 09:10:02 +11:00
parent d26bf742db
commit f8801f3bf6
13 changed files with 58 additions and 113 deletions

View file

@ -339,7 +339,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
fn visit_token(&mut self, t: Token) {
if let Token::Interpolated(nt) = t {
if let token::NtExpr(ref expr) = nt.0 {
if let token::NtExpr(ref expr) = *nt {
if let ExprKind::Mac(..) = expr.node {
self.visit_macro_invoc(expr.id);
}

View file

@ -1025,7 +1025,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
fn visit_token(&mut self, t: Token) {
if let Token::Interpolated(nt) = t {
if let token::NtExpr(ref expr) = nt.0 {
if let token::NtExpr(ref expr) = *nt {
if let ast::ExprKind::Mac(..) = expr.node {
self.visit_invoc(expr.id);
}

View file

@ -517,7 +517,7 @@ impl MetaItem {
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
Path { span, segments }
}
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
token::Nonterminal::NtPath(ref path) => path.clone(),
@ -682,7 +682,7 @@ impl LitKind {
match token {
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
Token::Interpolated(ref nt) => match nt.0 {
Token::Interpolated(nt) => match *nt {
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
_ => None,

View file

@ -266,7 +266,7 @@ impl<F> TTMacroExpander for F
impl MutVisitor for AvoidInterpolatedIdents {
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
if let token::NtIdent(ident, is_raw) = nt.0 {
if let token::NtIdent(ident, is_raw) = **nt {
*tt = tokenstream::TokenTree::Token(ident.span,
token::Ident(ident, is_raw));
}

View file

@ -25,6 +25,7 @@ use syntax_pos::{Span, DUMMY_SP, FileName};
use syntax_pos::hygiene::ExpnFormat;
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
use std::fs;
use std::io::ErrorKind;
use std::{iter, mem};
@ -586,14 +587,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
AttrProcMacro(ref mac, ..) => {
self.gate_proc_macro_attr_item(attr.span, &item);
let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item {
let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
Annotatable::Item(item) => token::NtItem(item),
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
Annotatable::Expr(expr) => token::NtExpr(expr),
})).into();
}))).into();
let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,

View file

@ -829,7 +829,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
},
"block" => match *token {
Token::OpenDelim(token::Brace) => true,
Token::Interpolated(ref nt) => match nt.0 {
Token::Interpolated(ref nt) => match **nt {
token::NtItem(_)
| token::NtPat(_)
| token::NtTy(_)
@ -843,9 +843,9 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
},
"path" | "meta" => match *token {
Token::ModSep | Token::Ident(..) => true,
Token::Interpolated(ref nt) => match nt.0 {
Token::Interpolated(ref nt) => match **nt {
token::NtPath(_) | token::NtMeta(_) => true,
_ => may_be_ident(&nt.0),
_ => may_be_ident(&nt),
},
_ => false,
},
@ -862,12 +862,12 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
Token::ModSep | // path
Token::Lt | // path (UFCS constant)
Token::BinOp(token::Shl) => true, // path (double UFCS)
Token::Interpolated(ref nt) => may_be_ident(&nt.0),
Token::Interpolated(ref nt) => may_be_ident(nt),
_ => false,
},
"lifetime" => match *token {
Token::Lifetime(_) => true,
Token::Interpolated(ref nt) => match nt.0 {
Token::Interpolated(ref nt) => match **nt {
token::NtLifetime(_) | token::NtTT(_) => true,
_ => false,
},

View file

@ -149,7 +149,8 @@ pub fn transcribe(cx: &ExtCtxt<'_>,
result.push(tt.clone().into());
} else {
sp = sp.apply_mark(cx.current_expansion.mark);
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
let token =
TokenTree::Token(sp, Token::Interpolated(Lrc::new((**nt).clone())));
result.push(token.into());
}
} else {

View file

@ -581,9 +581,8 @@ pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
token::Ident(id, _is_raw) => vis.visit_ident(id),
token::Lifetime(id) => vis.visit_ident(id),
token::Interpolated(nt) => {
let nt = Lrc::make_mut(nt);
vis.visit_interpolated(&mut nt.0);
nt.1 = token::LazyTokenStream::new();
let mut nt = Lrc::make_mut(nt);
vis.visit_interpolated(&mut nt);
}
_ => {}
}

View file

@ -141,7 +141,7 @@ impl<'a> Parser<'a> {
/// The delimiters or `=` are still put into the resulting token stream.
crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
let meta = match self.token {
token::Interpolated(ref nt) => match nt.0 {
token::Interpolated(ref nt) => match **nt {
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
_ => None,
},
@ -227,7 +227,7 @@ impl<'a> Parser<'a> {
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
let nt_meta = match self.token {
token::Interpolated(ref nt) => match nt.0 {
token::Interpolated(ref nt) => match **nt {
token::NtMeta(ref e) => Some(e.clone()),
_ => None,
},

View file

@ -119,7 +119,7 @@ enum BlockMode {
macro_rules! maybe_whole_expr {
($p:expr) => {
if let token::Interpolated(nt) = $p.token.clone() {
match nt.0 {
match *nt {
token::NtExpr(ref e) | token::NtLiteral(ref e) => {
$p.bump();
return Ok((*e).clone());
@ -146,7 +146,7 @@ macro_rules! maybe_whole_expr {
macro_rules! maybe_whole {
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
if let token::Interpolated(nt) = $p.token.clone() {
if let token::$constructor($x) = nt.0.clone() {
if let token::$constructor($x) = (*nt).clone() {
$p.bump();
return Ok($e);
}
@ -1570,7 +1570,7 @@ impl<'a> Parser<'a> {
Some(body)
}
token::Interpolated(ref nt) => {
match &nt.0 {
match **nt {
token::NtBlock(..) => {
*at_end = true;
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
@ -1913,7 +1913,7 @@ impl<'a> Parser<'a> {
fn is_named_argument(&mut self) -> bool {
let offset = match self.token {
token::Interpolated(ref nt) => match nt.0 {
token::Interpolated(ref nt) => match **nt {
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
_ => 0,
}
@ -2099,7 +2099,7 @@ impl<'a> Parser<'a> {
/// Matches `token_lit = LIT_INTEGER | ...`.
fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
let out = match self.token {
token::Interpolated(ref nt) => match nt.0 {
token::Interpolated(ref nt) => match **nt {
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
ExprKind::Lit(ref lit) => { lit.node.clone() }
_ => { return self.unexpected_last(&self.token); }
@ -2299,7 +2299,7 @@ impl<'a> Parser<'a> {
/// attributes.
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
let meta_ident = match self.token {
token::Interpolated(ref nt) => match nt.0 {
token::Interpolated(ref nt) => match **nt {
token::NtMeta(ref meta) => match meta.node {
ast::MetaItemKind::Word => Some(meta.ident.clone()),
_ => None,
@ -3271,7 +3271,7 @@ impl<'a> Parser<'a> {
self.meta_var_span = Some(self.span);
// Interpolated identifier and lifetime tokens are replaced with usual identifier
// and lifetime tokens, so the former are never encountered during normal parsing.
match nt.0 {
match **nt {
token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
_ => return,
@ -3403,7 +3403,7 @@ impl<'a> Parser<'a> {
// can't continue an expression after an ident
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
token::Literal(..) | token::Pound => true,
token::Interpolated(ref nt) => match nt.0 {
token::Interpolated(ref nt) => match **nt {
token::NtIdent(..) | token::NtExpr(..) |
token::NtBlock(..) | token::NtPath(..) => true,
_ => false,

View file

@ -13,16 +13,15 @@ use crate::syntax::parse::parse_stream_from_source_str;
use crate::syntax::parse::parser::emit_unclosed_delims;
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
use serialize::{Decodable, Decoder, Encodable, Encoder};
use syntax_pos::symbol::{self, Symbol};
use syntax_pos::{self, Span, FileName};
use log::info;
use std::{cmp, fmt};
use std::fmt;
use std::mem;
#[cfg(target_arch = "x86_64")]
use rustc_data_structures::static_assert;
use rustc_data_structures::sync::{Lrc, Lock};
use rustc_data_structures::sync::Lrc;
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum BinOpToken {
@ -184,9 +183,8 @@ pub enum Token {
Ident(ast::Ident, /* is_raw */ bool),
Lifetime(ast::Ident),
// The `LazyTokenStream` is a pure function of the `Nonterminal`,
// and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
Interpolated(Lrc<Nonterminal>),
// Can be expanded into several tokens.
/// A doc comment.
DocComment(ast::Name),
@ -209,10 +207,6 @@ pub enum Token {
static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::<Token>() == 16);
impl Token {
pub fn interpolated(nt: Nonterminal) -> Token {
Token::Interpolated(Lrc::new((nt, LazyTokenStream::new())))
}
/// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
pub fn from_ast_ident(ident: ast::Ident) -> Token {
Ident(ident, ident.is_raw_guess())
@ -244,7 +238,7 @@ impl Token {
ModSep | // global path
Lifetime(..) | // labeled loop
Pound => true, // expression attributes
Interpolated(ref nt) => match nt.0 {
Interpolated(ref nt) => match **nt {
NtLiteral(..) |
NtIdent(..) |
NtExpr(..) |
@ -272,7 +266,7 @@ impl Token {
Lifetime(..) | // lifetime bound in trait object
Lt | BinOp(Shl) | // associated path
ModSep => true, // global path
Interpolated(ref nt) => match nt.0 {
Interpolated(ref nt) => match **nt {
NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true,
_ => false,
},
@ -284,7 +278,7 @@ impl Token {
pub fn can_begin_const_arg(&self) -> bool {
match self {
OpenDelim(Brace) => true,
Interpolated(ref nt) => match nt.0 {
Interpolated(ref nt) => match **nt {
NtExpr(..) => true,
NtBlock(..) => true,
NtLiteral(..) => true,
@ -316,7 +310,7 @@ impl Token {
BinOp(Minus) => true,
Ident(ident, false) if ident.name == keywords::True.name() => true,
Ident(ident, false) if ident.name == keywords::False.name() => true,
Interpolated(ref nt) => match nt.0 {
Interpolated(ref nt) => match **nt {
NtLiteral(..) => true,
_ => false,
},
@ -328,7 +322,7 @@ impl Token {
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
match *self {
Ident(ident, is_raw) => Some((ident, is_raw)),
Interpolated(ref nt) => match nt.0 {
Interpolated(ref nt) => match **nt {
NtIdent(ident, is_raw) => Some((ident, is_raw)),
_ => None,
},
@ -339,7 +333,7 @@ impl Token {
pub fn lifetime(&self) -> Option<ast::Ident> {
match *self {
Lifetime(ident) => Some(ident),
Interpolated(ref nt) => match nt.0 {
Interpolated(ref nt) => match **nt {
NtLifetime(ident) => Some(ident),
_ => None,
},
@ -367,7 +361,7 @@ impl Token {
/// Returns `true` if the token is an interpolated path.
fn is_path(&self) -> bool {
if let Interpolated(ref nt) = *self {
if let NtPath(..) = nt.0 {
if let NtPath(..) = **nt {
return true;
}
}
@ -508,8 +502,8 @@ impl Token {
}
}
pub fn interpolated_to_tokenstream(sess: &ParseSess, nt: Lrc<(Nonterminal, LazyTokenStream)>,
span: Span) -> TokenStream {
pub fn interpolated_to_tokenstream(sess: &ParseSess, nt: Lrc<Nonterminal>, span: Span)
-> TokenStream {
// An `Interpolated` token means that we have a `Nonterminal`
// which is often a parsed AST item. At this point we now need
// to convert the parsed AST to an actual token stream, e.g.
@ -524,41 +518,36 @@ impl Token {
// stream they came from. Here we attempt to extract these
// lossless token streams before we fall back to the
// stringification.
let mut tokens = None;
match nt.0 {
let tokens = match *nt {
Nonterminal::NtItem(ref item) => {
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
}
Nonterminal::NtTraitItem(ref item) => {
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
}
Nonterminal::NtImplItem(ref item) => {
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
}
Nonterminal::NtIdent(ident, is_raw) => {
let token = Token::Ident(ident, is_raw);
tokens = Some(TokenTree::Token(ident.span, token).into());
Some(TokenTree::Token(ident.span, token).into())
}
Nonterminal::NtLifetime(ident) => {
let token = Token::Lifetime(ident);
tokens = Some(TokenTree::Token(ident.span, token).into());
Some(TokenTree::Token(ident.span, token).into())
}
Nonterminal::NtTT(ref tt) => {
tokens = Some(tt.clone().into());
Some(tt.clone().into())
}
_ => {}
}
_ => None,
};
let tokens_for_real = nt.1.force(|| {
// FIXME(#43081): Avoid this pretty-print + reparse hack
let source = pprust::nonterminal_to_string(&nt.0);
let filename = FileName::macro_expansion_source_code(&source);
let (tokens, errors) = parse_stream_from_source_str(
filename, source, sess, Some(span));
emit_unclosed_delims(&errors, &sess.span_diagnostic);
tokens
});
// FIXME(#43081): Avoid this pretty-print + reparse hack
let source = pprust::nonterminal_to_string(&nt);
let filename = FileName::macro_expansion_source_code(&source);
let (tokens_for_real, errors) =
parse_stream_from_source_str(filename, source, sess, Some(span));
emit_unclosed_delims(&errors, &sess.span_diagnostic);
// During early phases of the compiler the AST could get modified
// directly (e.g., attributes added or removed) and the internal cache
@ -734,52 +723,6 @@ crate fn is_op(tok: &Token) -> bool {
}
}
#[derive(Clone)]
pub struct LazyTokenStream(Lock<Option<TokenStream>>);
impl cmp::Eq for LazyTokenStream {}
impl PartialEq for LazyTokenStream {
fn eq(&self, _other: &LazyTokenStream) -> bool {
true
}
}
impl fmt::Debug for LazyTokenStream {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt::Debug::fmt(&self.clone().0.into_inner(), f)
}
}
impl LazyTokenStream {
pub fn new() -> Self {
LazyTokenStream(Lock::new(None))
}
fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
let mut opt_stream = self.0.lock();
if opt_stream.is_none() {
*opt_stream = Some(f());
}
opt_stream.clone().unwrap()
}
}
impl Encodable for LazyTokenStream {
fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> {
Ok(())
}
}
impl Decodable for LazyTokenStream {
fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> {
Ok(LazyTokenStream::new())
}
}
impl ::std::hash::Hash for LazyTokenStream {
fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {}
}
fn prepend_attrs(sess: &ParseSess,
attrs: &[ast::Attribute],
tokens: Option<&tokenstream::TokenStream>,

View file

@ -257,7 +257,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::Comment => "/* */".to_string(),
token::Shebang(s) => format!("/* shebang: {}*/", s),
token::Interpolated(ref nt) => nonterminal_to_string(&nt.0),
token::Interpolated(ref nt) => nonterminal_to_string(nt),
}
}

View file

@ -2,6 +2,7 @@ use crate::proc_macro_impl::EXEC_STRATEGY;
use crate::proc_macro_server;
use errors::FatalError;
use rustc_data_structures::sync::Lrc;
use syntax::ast::{self, ItemKind, Attribute, Mac};
use syntax::attr::{mark_used, mark_known};
use syntax::source_map::Span;
@ -65,7 +66,7 @@ impl MultiItemModifier for ProcMacroDerive {
// Mark attributes as known, and used.
MarkAttrs(&self.attrs).visit_item(&item);
let token = Token::interpolated(token::NtItem(item));
let token = Token::Interpolated(Lrc::new(token::NtItem(item)));
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
let server = proc_macro_server::Rustc::new(ecx);