Rollup merge of #58476 - nnethercote:rm-LazyTokenStream, r=petrochenkov
Remove `LazyTokenStream`. `LazyTokenStream` was added in #40939. Perhaps it was an effective optimization then, but no longer. This PR removes it, making the code both simpler and faster. r? @alexcrichton
This commit is contained in:
commit
585d4d29d9
16 changed files with 168 additions and 226 deletions
|
@ -1124,19 +1124,19 @@ impl<'a> LoweringContext<'a> {
|
||||||
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||||
span,
|
span,
|
||||||
delim,
|
delim,
|
||||||
self.lower_token_stream(tts.into()).into(),
|
self.lower_token_stream(tts),
|
||||||
).into(),
|
).into(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
|
fn lower_token(&mut self, token: Token, span: Span) -> TokenStream {
|
||||||
match token {
|
match token {
|
||||||
Token::Interpolated(_) => {}
|
Token::Interpolated(nt) => {
|
||||||
other => return TokenTree::Token(span, other).into(),
|
let tts = nt.to_tokenstream(&self.sess.parse_sess, span);
|
||||||
|
self.lower_token_stream(tts)
|
||||||
|
}
|
||||||
|
other => TokenTree::Token(span, other).into(),
|
||||||
}
|
}
|
||||||
|
|
||||||
let tts = token.interpolated_to_tokenstream(&self.sess.parse_sess, span);
|
|
||||||
self.lower_token_stream(tts)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
|
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
|
||||||
|
|
|
@ -339,7 +339,7 @@ impl<'a> visit::Visitor<'a> for DefCollector<'a> {
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let Token::Interpolated(nt) = t {
|
if let Token::Interpolated(nt) = t {
|
||||||
if let token::NtExpr(ref expr) = nt.0 {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ExprKind::Mac(..) = expr.node {
|
if let ExprKind::Mac(..) = expr.node {
|
||||||
self.visit_macro_invoc(expr.id);
|
self.visit_macro_invoc(expr.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1025,7 +1025,7 @@ impl<'a, 'b> Visitor<'a> for BuildReducedGraphVisitor<'a, 'b> {
|
||||||
|
|
||||||
fn visit_token(&mut self, t: Token) {
|
fn visit_token(&mut self, t: Token) {
|
||||||
if let Token::Interpolated(nt) = t {
|
if let Token::Interpolated(nt) = t {
|
||||||
if let token::NtExpr(ref expr) = nt.0 {
|
if let token::NtExpr(ref expr) = *nt {
|
||||||
if let ast::ExprKind::Mac(..) = expr.node {
|
if let ast::ExprKind::Mac(..) = expr.node {
|
||||||
self.visit_invoc(expr.id);
|
self.visit_invoc(expr.id);
|
||||||
}
|
}
|
||||||
|
|
|
@ -517,7 +517,7 @@ impl MetaItem {
|
||||||
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
let span = span.with_hi(segments.last().unwrap().ident.span.hi());
|
||||||
Path { span, segments }
|
Path { span, segments }
|
||||||
}
|
}
|
||||||
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match nt.0 {
|
Some(TokenTree::Token(_, Token::Interpolated(nt))) => match *nt {
|
||||||
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
|
token::Nonterminal::NtIdent(ident, _) => Path::from_ident(ident),
|
||||||
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
|
token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
|
||||||
token::Nonterminal::NtPath(ref path) => path.clone(),
|
token::Nonterminal::NtPath(ref path) => path.clone(),
|
||||||
|
@ -682,7 +682,7 @@ impl LitKind {
|
||||||
match token {
|
match token {
|
||||||
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
|
Token::Ident(ident, false) if ident.name == "true" => Some(LitKind::Bool(true)),
|
||||||
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
|
Token::Ident(ident, false) if ident.name == "false" => Some(LitKind::Bool(false)),
|
||||||
Token::Interpolated(ref nt) => match nt.0 {
|
Token::Interpolated(nt) => match *nt {
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
||||||
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
|
ExprKind::Lit(ref lit) => Some(lit.node.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
|
|
@ -266,7 +266,7 @@ impl<F> TTMacroExpander for F
|
||||||
impl MutVisitor for AvoidInterpolatedIdents {
|
impl MutVisitor for AvoidInterpolatedIdents {
|
||||||
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
|
fn visit_tt(&mut self, tt: &mut tokenstream::TokenTree) {
|
||||||
if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
|
if let tokenstream::TokenTree::Token(_, token::Interpolated(nt)) = tt {
|
||||||
if let token::NtIdent(ident, is_raw) = nt.0 {
|
if let token::NtIdent(ident, is_raw) = **nt {
|
||||||
*tt = tokenstream::TokenTree::Token(ident.span,
|
*tt = tokenstream::TokenTree::Token(ident.span,
|
||||||
token::Ident(ident, is_raw));
|
token::Ident(ident, is_raw));
|
||||||
}
|
}
|
||||||
|
|
|
@ -25,6 +25,7 @@ use syntax_pos::{Span, DUMMY_SP, FileName};
|
||||||
use syntax_pos::hygiene::ExpnFormat;
|
use syntax_pos::hygiene::ExpnFormat;
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
use std::fs;
|
use std::fs;
|
||||||
use std::io::ErrorKind;
|
use std::io::ErrorKind;
|
||||||
use std::{iter, mem};
|
use std::{iter, mem};
|
||||||
|
@ -584,14 +585,14 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
}
|
}
|
||||||
AttrProcMacro(ref mac, ..) => {
|
AttrProcMacro(ref mac, ..) => {
|
||||||
self.gate_proc_macro_attr_item(attr.span, &item);
|
self.gate_proc_macro_attr_item(attr.span, &item);
|
||||||
let item_tok = TokenTree::Token(DUMMY_SP, Token::interpolated(match item {
|
let item_tok = TokenTree::Token(DUMMY_SP, Token::Interpolated(Lrc::new(match item {
|
||||||
Annotatable::Item(item) => token::NtItem(item),
|
Annotatable::Item(item) => token::NtItem(item),
|
||||||
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
|
Annotatable::TraitItem(item) => token::NtTraitItem(item.into_inner()),
|
||||||
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
|
Annotatable::ImplItem(item) => token::NtImplItem(item.into_inner()),
|
||||||
Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
|
Annotatable::ForeignItem(item) => token::NtForeignItem(item.into_inner()),
|
||||||
Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
|
Annotatable::Stmt(stmt) => token::NtStmt(stmt.into_inner()),
|
||||||
Annotatable::Expr(expr) => token::NtExpr(expr),
|
Annotatable::Expr(expr) => token::NtExpr(expr),
|
||||||
})).into();
|
}))).into();
|
||||||
let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
|
let input = self.extract_proc_macro_attr_input(attr.tokens, attr.span);
|
||||||
let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
|
let tok_result = mac.expand(self.cx, attr.span, input, item_tok);
|
||||||
let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
|
let res = self.parse_ast_fragment(tok_result, invoc.fragment_kind,
|
||||||
|
|
|
@ -88,6 +88,7 @@ use smallvec::{smallvec, SmallVec};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||||
use std::mem;
|
use std::mem;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
|
@ -179,7 +180,7 @@ struct MatcherPos<'root, 'tt: 'root> {
|
||||||
/// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
|
/// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
|
||||||
/// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
|
/// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
|
||||||
/// wants the shared `matches`, one should use `up.matches`.
|
/// wants the shared `matches`, one should use `up.matches`.
|
||||||
matches: Box<[Rc<NamedMatchVec>]>,
|
matches: Box<[Lrc<NamedMatchVec>]>,
|
||||||
/// The position in `matches` corresponding to the first metavar in this matcher's sequence of
|
/// The position in `matches` corresponding to the first metavar in this matcher's sequence of
|
||||||
/// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
|
/// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
|
||||||
/// to `matches[match_lo]`.
|
/// to `matches[match_lo]`.
|
||||||
|
@ -218,7 +219,7 @@ struct MatcherPos<'root, 'tt: 'root> {
|
||||||
impl<'root, 'tt> MatcherPos<'root, 'tt> {
|
impl<'root, 'tt> MatcherPos<'root, 'tt> {
|
||||||
/// Adds `m` as a named match for the `idx`-th metavar.
|
/// Adds `m` as a named match for the `idx`-th metavar.
|
||||||
fn push_match(&mut self, idx: usize, m: NamedMatch) {
|
fn push_match(&mut self, idx: usize, m: NamedMatch) {
|
||||||
let matches = Rc::make_mut(&mut self.matches[idx]);
|
let matches = Lrc::make_mut(&mut self.matches[idx]);
|
||||||
matches.push(m);
|
matches.push(m);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -295,11 +296,11 @@ pub fn count_names(ms: &[TokenTree]) -> usize {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
|
/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
|
||||||
fn create_matches(len: usize) -> Box<[Rc<NamedMatchVec>]> {
|
fn create_matches(len: usize) -> Box<[Lrc<NamedMatchVec>]> {
|
||||||
if len == 0 {
|
if len == 0 {
|
||||||
vec![]
|
vec![]
|
||||||
} else {
|
} else {
|
||||||
let empty_matches = Rc::new(SmallVec::new());
|
let empty_matches = Lrc::new(SmallVec::new());
|
||||||
vec![empty_matches; len]
|
vec![empty_matches; len]
|
||||||
}.into_boxed_slice()
|
}.into_boxed_slice()
|
||||||
}
|
}
|
||||||
|
@ -353,8 +354,8 @@ fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherP
|
||||||
/// token tree it was derived from.
|
/// token tree it was derived from.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum NamedMatch {
|
pub enum NamedMatch {
|
||||||
MatchedSeq(Rc<NamedMatchVec>, DelimSpan),
|
MatchedSeq(Lrc<NamedMatchVec>, DelimSpan),
|
||||||
MatchedNonterminal(Rc<Nonterminal>),
|
MatchedNonterminal(Lrc<Nonterminal>),
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Takes a sequence of token trees `ms` representing a matcher which successfully matched input
|
/// Takes a sequence of token trees `ms` representing a matcher which successfully matched input
|
||||||
|
@ -561,7 +562,7 @@ fn inner_parse_loop<'root, 'tt>(
|
||||||
new_item.match_cur += seq.num_captures;
|
new_item.match_cur += seq.num_captures;
|
||||||
new_item.idx += 1;
|
new_item.idx += 1;
|
||||||
for idx in item.match_cur..item.match_cur + seq.num_captures {
|
for idx in item.match_cur..item.match_cur + seq.num_captures {
|
||||||
new_item.push_match(idx, MatchedSeq(Rc::new(smallvec![]), sp));
|
new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![]), sp));
|
||||||
}
|
}
|
||||||
cur_items.push(new_item);
|
cur_items.push(new_item);
|
||||||
}
|
}
|
||||||
|
@ -707,7 +708,7 @@ pub fn parse(
|
||||||
let matches = eof_items[0]
|
let matches = eof_items[0]
|
||||||
.matches
|
.matches
|
||||||
.iter_mut()
|
.iter_mut()
|
||||||
.map(|dv| Rc::make_mut(dv).pop().unwrap());
|
.map(|dv| Lrc::make_mut(dv).pop().unwrap());
|
||||||
return nameize(sess, ms, matches);
|
return nameize(sess, ms, matches);
|
||||||
} else if eof_items.len() > 1 {
|
} else if eof_items.len() > 1 {
|
||||||
return Error(
|
return Error(
|
||||||
|
@ -780,7 +781,7 @@ pub fn parse(
|
||||||
let match_cur = item.match_cur;
|
let match_cur = item.match_cur;
|
||||||
item.push_match(
|
item.push_match(
|
||||||
match_cur,
|
match_cur,
|
||||||
MatchedNonterminal(Rc::new(parse_nt(&mut parser, span, &ident.as_str()))),
|
MatchedNonterminal(Lrc::new(parse_nt(&mut parser, span, &ident.as_str()))),
|
||||||
);
|
);
|
||||||
item.idx += 1;
|
item.idx += 1;
|
||||||
item.match_cur += 1;
|
item.match_cur += 1;
|
||||||
|
@ -829,7 +830,7 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
|
||||||
},
|
},
|
||||||
"block" => match *token {
|
"block" => match *token {
|
||||||
Token::OpenDelim(token::Brace) => true,
|
Token::OpenDelim(token::Brace) => true,
|
||||||
Token::Interpolated(ref nt) => match nt.0 {
|
Token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtItem(_)
|
token::NtItem(_)
|
||||||
| token::NtPat(_)
|
| token::NtPat(_)
|
||||||
| token::NtTy(_)
|
| token::NtTy(_)
|
||||||
|
@ -843,9 +844,9 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
|
||||||
},
|
},
|
||||||
"path" | "meta" => match *token {
|
"path" | "meta" => match *token {
|
||||||
Token::ModSep | Token::Ident(..) => true,
|
Token::ModSep | Token::Ident(..) => true,
|
||||||
Token::Interpolated(ref nt) => match nt.0 {
|
Token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtPath(_) | token::NtMeta(_) => true,
|
token::NtPath(_) | token::NtMeta(_) => true,
|
||||||
_ => may_be_ident(&nt.0),
|
_ => may_be_ident(&nt),
|
||||||
},
|
},
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
@ -862,12 +863,12 @@ fn may_begin_with(name: &str, token: &Token) -> bool {
|
||||||
Token::ModSep | // path
|
Token::ModSep | // path
|
||||||
Token::Lt | // path (UFCS constant)
|
Token::Lt | // path (UFCS constant)
|
||||||
Token::BinOp(token::Shl) => true, // path (double UFCS)
|
Token::BinOp(token::Shl) => true, // path (double UFCS)
|
||||||
Token::Interpolated(ref nt) => may_be_ident(&nt.0),
|
Token::Interpolated(ref nt) => may_be_ident(nt),
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
"lifetime" => match *token {
|
"lifetime" => match *token {
|
||||||
Token::Lifetime(_) => true,
|
Token::Lifetime(_) => true,
|
||||||
Token::Interpolated(ref nt) => match nt.0 {
|
Token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtLifetime(_) | token::NtTT(_) => true,
|
token::NtLifetime(_) | token::NtTT(_) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
|
|
@ -149,7 +149,7 @@ pub fn transcribe(cx: &ExtCtxt<'_>,
|
||||||
result.push(tt.clone().into());
|
result.push(tt.clone().into());
|
||||||
} else {
|
} else {
|
||||||
sp = sp.apply_mark(cx.current_expansion.mark);
|
sp = sp.apply_mark(cx.current_expansion.mark);
|
||||||
let token = TokenTree::Token(sp, Token::interpolated((**nt).clone()));
|
let token = TokenTree::Token(sp, Token::Interpolated(nt.clone()));
|
||||||
result.push(token.into());
|
result.push(token.into());
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -581,9 +581,8 @@ pub fn noop_visit_token<T: MutVisitor>(t: &mut Token, vis: &mut T) {
|
||||||
token::Ident(id, _is_raw) => vis.visit_ident(id),
|
token::Ident(id, _is_raw) => vis.visit_ident(id),
|
||||||
token::Lifetime(id) => vis.visit_ident(id),
|
token::Lifetime(id) => vis.visit_ident(id),
|
||||||
token::Interpolated(nt) => {
|
token::Interpolated(nt) => {
|
||||||
let nt = Lrc::make_mut(nt);
|
let mut nt = Lrc::make_mut(nt);
|
||||||
vis.visit_interpolated(&mut nt.0);
|
vis.visit_interpolated(&mut nt);
|
||||||
nt.1 = token::LazyTokenStream::new();
|
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
}
|
}
|
||||||
|
|
|
@ -141,7 +141,7 @@ impl<'a> Parser<'a> {
|
||||||
/// The delimiters or `=` are still put into the resulting token stream.
|
/// The delimiters or `=` are still put into the resulting token stream.
|
||||||
crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
|
crate fn parse_meta_item_unrestricted(&mut self) -> PResult<'a, (ast::Path, TokenStream)> {
|
||||||
let meta = match self.token {
|
let meta = match self.token {
|
||||||
token::Interpolated(ref nt) => match nt.0 {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
|
Nonterminal::NtMeta(ref meta) => Some(meta.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
@ -227,7 +227,7 @@ impl<'a> Parser<'a> {
|
||||||
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
|
/// meta_item_inner : (meta_item | UNSUFFIXED_LIT) (',' meta_item_inner)? ;
|
||||||
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
pub fn parse_meta_item(&mut self) -> PResult<'a, ast::MetaItem> {
|
||||||
let nt_meta = match self.token {
|
let nt_meta = match self.token {
|
||||||
token::Interpolated(ref nt) => match nt.0 {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtMeta(ref e) => Some(e.clone()),
|
token::NtMeta(ref e) => Some(e.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
|
|
@ -119,7 +119,7 @@ enum BlockMode {
|
||||||
macro_rules! maybe_whole_expr {
|
macro_rules! maybe_whole_expr {
|
||||||
($p:expr) => {
|
($p:expr) => {
|
||||||
if let token::Interpolated(nt) = $p.token.clone() {
|
if let token::Interpolated(nt) = $p.token.clone() {
|
||||||
match nt.0 {
|
match *nt {
|
||||||
token::NtExpr(ref e) | token::NtLiteral(ref e) => {
|
token::NtExpr(ref e) | token::NtLiteral(ref e) => {
|
||||||
$p.bump();
|
$p.bump();
|
||||||
return Ok((*e).clone());
|
return Ok((*e).clone());
|
||||||
|
@ -146,7 +146,7 @@ macro_rules! maybe_whole_expr {
|
||||||
macro_rules! maybe_whole {
|
macro_rules! maybe_whole {
|
||||||
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
|
||||||
if let token::Interpolated(nt) = $p.token.clone() {
|
if let token::Interpolated(nt) = $p.token.clone() {
|
||||||
if let token::$constructor($x) = nt.0.clone() {
|
if let token::$constructor($x) = (*nt).clone() {
|
||||||
$p.bump();
|
$p.bump();
|
||||||
return Ok($e);
|
return Ok($e);
|
||||||
}
|
}
|
||||||
|
@ -1570,7 +1570,7 @@ impl<'a> Parser<'a> {
|
||||||
Some(body)
|
Some(body)
|
||||||
}
|
}
|
||||||
token::Interpolated(ref nt) => {
|
token::Interpolated(ref nt) => {
|
||||||
match &nt.0 {
|
match **nt {
|
||||||
token::NtBlock(..) => {
|
token::NtBlock(..) => {
|
||||||
*at_end = true;
|
*at_end = true;
|
||||||
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
|
let (inner_attrs, body) = self.parse_inner_attrs_and_block()?;
|
||||||
|
@ -1913,7 +1913,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
fn is_named_argument(&mut self) -> bool {
|
fn is_named_argument(&mut self) -> bool {
|
||||||
let offset = match self.token {
|
let offset = match self.token {
|
||||||
token::Interpolated(ref nt) => match nt.0 {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
token::NtPat(..) => return self.look_ahead(1, |t| t == &token::Colon),
|
||||||
_ => 0,
|
_ => 0,
|
||||||
}
|
}
|
||||||
|
@ -2099,7 +2099,7 @@ impl<'a> Parser<'a> {
|
||||||
/// Matches `token_lit = LIT_INTEGER | ...`.
|
/// Matches `token_lit = LIT_INTEGER | ...`.
|
||||||
fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
|
fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
|
||||||
let out = match self.token {
|
let out = match self.token {
|
||||||
token::Interpolated(ref nt) => match nt.0 {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
token::NtExpr(ref v) | token::NtLiteral(ref v) => match v.node {
|
||||||
ExprKind::Lit(ref lit) => { lit.node.clone() }
|
ExprKind::Lit(ref lit) => { lit.node.clone() }
|
||||||
_ => { return self.unexpected_last(&self.token); }
|
_ => { return self.unexpected_last(&self.token); }
|
||||||
|
@ -2299,7 +2299,7 @@ impl<'a> Parser<'a> {
|
||||||
/// attributes.
|
/// attributes.
|
||||||
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
|
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
|
||||||
let meta_ident = match self.token {
|
let meta_ident = match self.token {
|
||||||
token::Interpolated(ref nt) => match nt.0 {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtMeta(ref meta) => match meta.node {
|
token::NtMeta(ref meta) => match meta.node {
|
||||||
ast::MetaItemKind::Word => Some(meta.ident.clone()),
|
ast::MetaItemKind::Word => Some(meta.ident.clone()),
|
||||||
_ => None,
|
_ => None,
|
||||||
|
@ -3271,7 +3271,7 @@ impl<'a> Parser<'a> {
|
||||||
self.meta_var_span = Some(self.span);
|
self.meta_var_span = Some(self.span);
|
||||||
// Interpolated identifier and lifetime tokens are replaced with usual identifier
|
// Interpolated identifier and lifetime tokens are replaced with usual identifier
|
||||||
// and lifetime tokens, so the former are never encountered during normal parsing.
|
// and lifetime tokens, so the former are never encountered during normal parsing.
|
||||||
match nt.0 {
|
match **nt {
|
||||||
token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
|
token::NtIdent(ident, is_raw) => (token::Ident(ident, is_raw), ident.span),
|
||||||
token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
|
token::NtLifetime(ident) => (token::Lifetime(ident), ident.span),
|
||||||
_ => return,
|
_ => return,
|
||||||
|
@ -3403,7 +3403,7 @@ impl<'a> Parser<'a> {
|
||||||
// can't continue an expression after an ident
|
// can't continue an expression after an ident
|
||||||
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
|
token::Ident(ident, is_raw) => token::ident_can_begin_expr(ident, is_raw),
|
||||||
token::Literal(..) | token::Pound => true,
|
token::Literal(..) | token::Pound => true,
|
||||||
token::Interpolated(ref nt) => match nt.0 {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
token::NtIdent(..) | token::NtExpr(..) |
|
token::NtIdent(..) | token::NtExpr(..) |
|
||||||
token::NtBlock(..) | token::NtPath(..) => true,
|
token::NtBlock(..) | token::NtPath(..) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
|
|
|
@ -13,16 +13,15 @@ use crate::syntax::parse::parse_stream_from_source_str;
|
||||||
use crate::syntax::parse::parser::emit_unclosed_delims;
|
use crate::syntax::parse::parser::emit_unclosed_delims;
|
||||||
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
|
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
|
||||||
|
|
||||||
use serialize::{Decodable, Decoder, Encodable, Encoder};
|
|
||||||
use syntax_pos::symbol::{self, Symbol};
|
use syntax_pos::symbol::{self, Symbol};
|
||||||
use syntax_pos::{self, Span, FileName};
|
use syntax_pos::{self, Span, FileName};
|
||||||
use log::info;
|
use log::info;
|
||||||
|
|
||||||
use std::{cmp, fmt};
|
use std::fmt;
|
||||||
use std::mem;
|
use std::mem;
|
||||||
#[cfg(target_arch = "x86_64")]
|
#[cfg(target_arch = "x86_64")]
|
||||||
use rustc_data_structures::static_assert;
|
use rustc_data_structures::static_assert;
|
||||||
use rustc_data_structures::sync::{Lrc, Lock};
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
pub enum BinOpToken {
|
pub enum BinOpToken {
|
||||||
|
@ -87,7 +86,7 @@ impl Lit {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// See comments in `interpolated_to_tokenstream` for why we care about
|
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||||
// *probably* equal here rather than actual equality
|
// *probably* equal here rather than actual equality
|
||||||
fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
|
fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
|
||||||
mem::discriminant(self) == mem::discriminant(other)
|
mem::discriminant(self) == mem::discriminant(other)
|
||||||
|
@ -184,9 +183,8 @@ pub enum Token {
|
||||||
Ident(ast::Ident, /* is_raw */ bool),
|
Ident(ast::Ident, /* is_raw */ bool),
|
||||||
Lifetime(ast::Ident),
|
Lifetime(ast::Ident),
|
||||||
|
|
||||||
// The `LazyTokenStream` is a pure function of the `Nonterminal`,
|
Interpolated(Lrc<Nonterminal>),
|
||||||
// and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
|
|
||||||
Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
|
|
||||||
// Can be expanded into several tokens.
|
// Can be expanded into several tokens.
|
||||||
/// A doc comment.
|
/// A doc comment.
|
||||||
DocComment(ast::Name),
|
DocComment(ast::Name),
|
||||||
|
@ -209,10 +207,6 @@ pub enum Token {
|
||||||
static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::<Token>() == 16);
|
static_assert!(MEM_SIZE_OF_STATEMENT: mem::size_of::<Token>() == 16);
|
||||||
|
|
||||||
impl Token {
|
impl Token {
|
||||||
pub fn interpolated(nt: Nonterminal) -> Token {
|
|
||||||
Token::Interpolated(Lrc::new((nt, LazyTokenStream::new())))
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
|
/// Recovers a `Token` from an `ast::Ident`. This creates a raw identifier if necessary.
|
||||||
pub fn from_ast_ident(ident: ast::Ident) -> Token {
|
pub fn from_ast_ident(ident: ast::Ident) -> Token {
|
||||||
Ident(ident, ident.is_raw_guess())
|
Ident(ident, ident.is_raw_guess())
|
||||||
|
@ -244,7 +238,7 @@ impl Token {
|
||||||
ModSep | // global path
|
ModSep | // global path
|
||||||
Lifetime(..) | // labeled loop
|
Lifetime(..) | // labeled loop
|
||||||
Pound => true, // expression attributes
|
Pound => true, // expression attributes
|
||||||
Interpolated(ref nt) => match nt.0 {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtLiteral(..) |
|
NtLiteral(..) |
|
||||||
NtIdent(..) |
|
NtIdent(..) |
|
||||||
NtExpr(..) |
|
NtExpr(..) |
|
||||||
|
@ -272,7 +266,7 @@ impl Token {
|
||||||
Lifetime(..) | // lifetime bound in trait object
|
Lifetime(..) | // lifetime bound in trait object
|
||||||
Lt | BinOp(Shl) | // associated path
|
Lt | BinOp(Shl) | // associated path
|
||||||
ModSep => true, // global path
|
ModSep => true, // global path
|
||||||
Interpolated(ref nt) => match nt.0 {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true,
|
NtIdent(..) | NtTy(..) | NtPath(..) | NtLifetime(..) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
@ -284,7 +278,7 @@ impl Token {
|
||||||
pub fn can_begin_const_arg(&self) -> bool {
|
pub fn can_begin_const_arg(&self) -> bool {
|
||||||
match self {
|
match self {
|
||||||
OpenDelim(Brace) => true,
|
OpenDelim(Brace) => true,
|
||||||
Interpolated(ref nt) => match nt.0 {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtExpr(..) => true,
|
NtExpr(..) => true,
|
||||||
NtBlock(..) => true,
|
NtBlock(..) => true,
|
||||||
NtLiteral(..) => true,
|
NtLiteral(..) => true,
|
||||||
|
@ -316,7 +310,7 @@ impl Token {
|
||||||
BinOp(Minus) => true,
|
BinOp(Minus) => true,
|
||||||
Ident(ident, false) if ident.name == keywords::True.name() => true,
|
Ident(ident, false) if ident.name == keywords::True.name() => true,
|
||||||
Ident(ident, false) if ident.name == keywords::False.name() => true,
|
Ident(ident, false) if ident.name == keywords::False.name() => true,
|
||||||
Interpolated(ref nt) => match nt.0 {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtLiteral(..) => true,
|
NtLiteral(..) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
},
|
},
|
||||||
|
@ -328,7 +322,7 @@ impl Token {
|
||||||
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
|
pub fn ident(&self) -> Option<(ast::Ident, /* is_raw */ bool)> {
|
||||||
match *self {
|
match *self {
|
||||||
Ident(ident, is_raw) => Some((ident, is_raw)),
|
Ident(ident, is_raw) => Some((ident, is_raw)),
|
||||||
Interpolated(ref nt) => match nt.0 {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtIdent(ident, is_raw) => Some((ident, is_raw)),
|
NtIdent(ident, is_raw) => Some((ident, is_raw)),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
@ -339,7 +333,7 @@ impl Token {
|
||||||
pub fn lifetime(&self) -> Option<ast::Ident> {
|
pub fn lifetime(&self) -> Option<ast::Ident> {
|
||||||
match *self {
|
match *self {
|
||||||
Lifetime(ident) => Some(ident),
|
Lifetime(ident) => Some(ident),
|
||||||
Interpolated(ref nt) => match nt.0 {
|
Interpolated(ref nt) => match **nt {
|
||||||
NtLifetime(ident) => Some(ident),
|
NtLifetime(ident) => Some(ident),
|
||||||
_ => None,
|
_ => None,
|
||||||
},
|
},
|
||||||
|
@ -367,7 +361,7 @@ impl Token {
|
||||||
/// Returns `true` if the token is an interpolated path.
|
/// Returns `true` if the token is an interpolated path.
|
||||||
fn is_path(&self) -> bool {
|
fn is_path(&self) -> bool {
|
||||||
if let Interpolated(ref nt) = *self {
|
if let Interpolated(ref nt) = *self {
|
||||||
if let NtPath(..) = nt.0 {
|
if let NtPath(..) = **nt {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -508,98 +502,7 @@ impl Token {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn interpolated_to_tokenstream(&self, sess: &ParseSess, span: Span)
|
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||||
-> TokenStream
|
|
||||||
{
|
|
||||||
let nt = match *self {
|
|
||||||
Token::Interpolated(ref nt) => nt,
|
|
||||||
_ => panic!("only works on interpolated tokens"),
|
|
||||||
};
|
|
||||||
|
|
||||||
// An `Interpolated` token means that we have a `Nonterminal`
|
|
||||||
// which is often a parsed AST item. At this point we now need
|
|
||||||
// to convert the parsed AST to an actual token stream, e.g.
|
|
||||||
// un-parse it basically.
|
|
||||||
//
|
|
||||||
// Unfortunately there's not really a great way to do that in a
|
|
||||||
// guaranteed lossless fashion right now. The fallback here is
|
|
||||||
// to just stringify the AST node and reparse it, but this loses
|
|
||||||
// all span information.
|
|
||||||
//
|
|
||||||
// As a result, some AST nodes are annotated with the token
|
|
||||||
// stream they came from. Here we attempt to extract these
|
|
||||||
// lossless token streams before we fall back to the
|
|
||||||
// stringification.
|
|
||||||
let mut tokens = None;
|
|
||||||
|
|
||||||
match nt.0 {
|
|
||||||
Nonterminal::NtItem(ref item) => {
|
|
||||||
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
||||||
}
|
|
||||||
Nonterminal::NtTraitItem(ref item) => {
|
|
||||||
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
||||||
}
|
|
||||||
Nonterminal::NtImplItem(ref item) => {
|
|
||||||
tokens = prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span);
|
|
||||||
}
|
|
||||||
Nonterminal::NtIdent(ident, is_raw) => {
|
|
||||||
let token = Token::Ident(ident, is_raw);
|
|
||||||
tokens = Some(TokenTree::Token(ident.span, token).into());
|
|
||||||
}
|
|
||||||
Nonterminal::NtLifetime(ident) => {
|
|
||||||
let token = Token::Lifetime(ident);
|
|
||||||
tokens = Some(TokenTree::Token(ident.span, token).into());
|
|
||||||
}
|
|
||||||
Nonterminal::NtTT(ref tt) => {
|
|
||||||
tokens = Some(tt.clone().into());
|
|
||||||
}
|
|
||||||
_ => {}
|
|
||||||
}
|
|
||||||
|
|
||||||
let tokens_for_real = nt.1.force(|| {
|
|
||||||
// FIXME(#43081): Avoid this pretty-print + reparse hack
|
|
||||||
let source = pprust::token_to_string(self);
|
|
||||||
let filename = FileName::macro_expansion_source_code(&source);
|
|
||||||
let (tokens, errors) = parse_stream_from_source_str(
|
|
||||||
filename, source, sess, Some(span));
|
|
||||||
emit_unclosed_delims(&errors, &sess.span_diagnostic);
|
|
||||||
tokens
|
|
||||||
});
|
|
||||||
|
|
||||||
// During early phases of the compiler the AST could get modified
|
|
||||||
// directly (e.g., attributes added or removed) and the internal cache
|
|
||||||
// of tokens my not be invalidated or updated. Consequently if the
|
|
||||||
// "lossless" token stream disagrees with our actual stringification
|
|
||||||
// (which has historically been much more battle-tested) then we go
|
|
||||||
// with the lossy stream anyway (losing span information).
|
|
||||||
//
|
|
||||||
// Note that the comparison isn't `==` here to avoid comparing spans,
|
|
||||||
// but it *also* is a "probable" equality which is a pretty weird
|
|
||||||
// definition. We mostly want to catch actual changes to the AST
|
|
||||||
// like a `#[cfg]` being processed or some weird `macro_rules!`
|
|
||||||
// expansion.
|
|
||||||
//
|
|
||||||
// What we *don't* want to catch is the fact that a user-defined
|
|
||||||
// literal like `0xf` is stringified as `15`, causing the cached token
|
|
||||||
// stream to not be literal `==` token-wise (ignoring spans) to the
|
|
||||||
// token stream we got from stringification.
|
|
||||||
//
|
|
||||||
// Instead the "probably equal" check here is "does each token
|
|
||||||
// recursively have the same discriminant?" We basically don't look at
|
|
||||||
// the token values here and assume that such fine grained token stream
|
|
||||||
// modifications, including adding/removing typically non-semantic
|
|
||||||
// tokens such as extra braces and commas, don't happen.
|
|
||||||
if let Some(tokens) = tokens {
|
|
||||||
if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
|
|
||||||
return tokens
|
|
||||||
}
|
|
||||||
info!("cached tokens found, but they're not \"probably equal\", \
|
|
||||||
going with stringified version");
|
|
||||||
}
|
|
||||||
return tokens_for_real
|
|
||||||
}
|
|
||||||
|
|
||||||
// See comments in `interpolated_to_tokenstream` for why we care about
|
|
||||||
// *probably* equal here rather than actual equality
|
// *probably* equal here rather than actual equality
|
||||||
crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
|
crate fn probably_equal_for_proc_macro(&self, other: &Token) -> bool {
|
||||||
if mem::discriminant(self) != mem::discriminant(other) {
|
if mem::discriminant(self) != mem::discriminant(other) {
|
||||||
|
@ -731,6 +634,85 @@ impl fmt::Debug for Nonterminal {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Nonterminal {
|
||||||
|
pub fn to_tokenstream(&self, sess: &ParseSess, span: Span) -> TokenStream {
|
||||||
|
// A `Nonterminal` is often a parsed AST item. At this point we now
|
||||||
|
// need to convert the parsed AST to an actual token stream, e.g.
|
||||||
|
// un-parse it basically.
|
||||||
|
//
|
||||||
|
// Unfortunately there's not really a great way to do that in a
|
||||||
|
// guaranteed lossless fashion right now. The fallback here is to just
|
||||||
|
// stringify the AST node and reparse it, but this loses all span
|
||||||
|
// information.
|
||||||
|
//
|
||||||
|
// As a result, some AST nodes are annotated with the token stream they
|
||||||
|
// came from. Here we attempt to extract these lossless token streams
|
||||||
|
// before we fall back to the stringification.
|
||||||
|
let tokens = match *self {
|
||||||
|
Nonterminal::NtItem(ref item) => {
|
||||||
|
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
||||||
|
}
|
||||||
|
Nonterminal::NtTraitItem(ref item) => {
|
||||||
|
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
||||||
|
}
|
||||||
|
Nonterminal::NtImplItem(ref item) => {
|
||||||
|
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
||||||
|
}
|
||||||
|
Nonterminal::NtIdent(ident, is_raw) => {
|
||||||
|
let token = Token::Ident(ident, is_raw);
|
||||||
|
Some(TokenTree::Token(ident.span, token).into())
|
||||||
|
}
|
||||||
|
Nonterminal::NtLifetime(ident) => {
|
||||||
|
let token = Token::Lifetime(ident);
|
||||||
|
Some(TokenTree::Token(ident.span, token).into())
|
||||||
|
}
|
||||||
|
Nonterminal::NtTT(ref tt) => {
|
||||||
|
Some(tt.clone().into())
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
};
|
||||||
|
|
||||||
|
// FIXME(#43081): Avoid this pretty-print + reparse hack
|
||||||
|
let source = pprust::nonterminal_to_string(self);
|
||||||
|
let filename = FileName::macro_expansion_source_code(&source);
|
||||||
|
let (tokens_for_real, errors) =
|
||||||
|
parse_stream_from_source_str(filename, source, sess, Some(span));
|
||||||
|
emit_unclosed_delims(&errors, &sess.span_diagnostic);
|
||||||
|
|
||||||
|
// During early phases of the compiler the AST could get modified
|
||||||
|
// directly (e.g., attributes added or removed) and the internal cache
|
||||||
|
// of tokens my not be invalidated or updated. Consequently if the
|
||||||
|
// "lossless" token stream disagrees with our actual stringification
|
||||||
|
// (which has historically been much more battle-tested) then we go
|
||||||
|
// with the lossy stream anyway (losing span information).
|
||||||
|
//
|
||||||
|
// Note that the comparison isn't `==` here to avoid comparing spans,
|
||||||
|
// but it *also* is a "probable" equality which is a pretty weird
|
||||||
|
// definition. We mostly want to catch actual changes to the AST
|
||||||
|
// like a `#[cfg]` being processed or some weird `macro_rules!`
|
||||||
|
// expansion.
|
||||||
|
//
|
||||||
|
// What we *don't* want to catch is the fact that a user-defined
|
||||||
|
// literal like `0xf` is stringified as `15`, causing the cached token
|
||||||
|
// stream to not be literal `==` token-wise (ignoring spans) to the
|
||||||
|
// token stream we got from stringification.
|
||||||
|
//
|
||||||
|
// Instead the "probably equal" check here is "does each token
|
||||||
|
// recursively have the same discriminant?" We basically don't look at
|
||||||
|
// the token values here and assume that such fine grained token stream
|
||||||
|
// modifications, including adding/removing typically non-semantic
|
||||||
|
// tokens such as extra braces and commas, don't happen.
|
||||||
|
if let Some(tokens) = tokens {
|
||||||
|
if tokens.probably_equal_for_proc_macro(&tokens_for_real) {
|
||||||
|
return tokens
|
||||||
|
}
|
||||||
|
info!("cached tokens found, but they're not \"probably equal\", \
|
||||||
|
going with stringified version");
|
||||||
|
}
|
||||||
|
return tokens_for_real
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
crate fn is_op(tok: &Token) -> bool {
|
crate fn is_op(tok: &Token) -> bool {
|
||||||
match *tok {
|
match *tok {
|
||||||
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
|
OpenDelim(..) | CloseDelim(..) | Literal(..) | DocComment(..) |
|
||||||
|
@ -740,52 +722,6 @@ crate fn is_op(tok: &Token) -> bool {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct LazyTokenStream(Lock<Option<TokenStream>>);
|
|
||||||
|
|
||||||
impl cmp::Eq for LazyTokenStream {}
|
|
||||||
impl PartialEq for LazyTokenStream {
|
|
||||||
fn eq(&self, _other: &LazyTokenStream) -> bool {
|
|
||||||
true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Debug for LazyTokenStream {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
|
||||||
fmt::Debug::fmt(&self.clone().0.into_inner(), f)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl LazyTokenStream {
|
|
||||||
pub fn new() -> Self {
|
|
||||||
LazyTokenStream(Lock::new(None))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn force<F: FnOnce() -> TokenStream>(&self, f: F) -> TokenStream {
|
|
||||||
let mut opt_stream = self.0.lock();
|
|
||||||
if opt_stream.is_none() {
|
|
||||||
*opt_stream = Some(f());
|
|
||||||
}
|
|
||||||
opt_stream.clone().unwrap()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Encodable for LazyTokenStream {
|
|
||||||
fn encode<S: Encoder>(&self, _: &mut S) -> Result<(), S::Error> {
|
|
||||||
Ok(())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Decodable for LazyTokenStream {
|
|
||||||
fn decode<D: Decoder>(_: &mut D) -> Result<LazyTokenStream, D::Error> {
|
|
||||||
Ok(LazyTokenStream::new())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl ::std::hash::Hash for LazyTokenStream {
|
|
||||||
fn hash<H: ::std::hash::Hasher>(&self, _hasher: &mut H) {}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn prepend_attrs(sess: &ParseSess,
|
fn prepend_attrs(sess: &ParseSess,
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
tokens: Option<&tokenstream::TokenStream>,
|
tokens: Option<&tokenstream::TokenStream>,
|
||||||
|
|
|
@ -4,7 +4,7 @@ use crate::ast::{Attribute, MacDelimiter, GenericArg};
|
||||||
use crate::util::parser::{self, AssocOp, Fixity};
|
use crate::util::parser::{self, AssocOp, Fixity};
|
||||||
use crate::attr;
|
use crate::attr;
|
||||||
use crate::source_map::{self, SourceMap, Spanned};
|
use crate::source_map::{self, SourceMap, Spanned};
|
||||||
use crate::parse::token::{self, BinOpToken, Token};
|
use crate::parse::token::{self, BinOpToken, Nonterminal, Token};
|
||||||
use crate::parse::lexer::comments;
|
use crate::parse::lexer::comments;
|
||||||
use crate::parse::{self, ParseSess};
|
use crate::parse::{self, ParseSess};
|
||||||
use crate::print::pp::{self, Breaks};
|
use crate::print::pp::{self, Breaks};
|
||||||
|
@ -257,29 +257,33 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||||
token::Comment => "/* */".to_string(),
|
token::Comment => "/* */".to_string(),
|
||||||
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
||||||
|
|
||||||
token::Interpolated(ref nt) => match nt.0 {
|
token::Interpolated(ref nt) => nonterminal_to_string(nt),
|
||||||
token::NtExpr(ref e) => expr_to_string(e),
|
}
|
||||||
token::NtMeta(ref e) => meta_item_to_string(e),
|
}
|
||||||
token::NtTy(ref e) => ty_to_string(e),
|
|
||||||
token::NtPath(ref e) => path_to_string(e),
|
pub fn nonterminal_to_string(nt: &Nonterminal) -> String {
|
||||||
token::NtItem(ref e) => item_to_string(e),
|
match *nt {
|
||||||
token::NtBlock(ref e) => block_to_string(e),
|
token::NtExpr(ref e) => expr_to_string(e),
|
||||||
token::NtStmt(ref e) => stmt_to_string(e),
|
token::NtMeta(ref e) => meta_item_to_string(e),
|
||||||
token::NtPat(ref e) => pat_to_string(e),
|
token::NtTy(ref e) => ty_to_string(e),
|
||||||
token::NtIdent(e, false) => ident_to_string(e),
|
token::NtPath(ref e) => path_to_string(e),
|
||||||
token::NtIdent(e, true) => format!("r#{}", ident_to_string(e)),
|
token::NtItem(ref e) => item_to_string(e),
|
||||||
token::NtLifetime(e) => ident_to_string(e),
|
token::NtBlock(ref e) => block_to_string(e),
|
||||||
token::NtLiteral(ref e) => expr_to_string(e),
|
token::NtStmt(ref e) => stmt_to_string(e),
|
||||||
token::NtTT(ref tree) => tt_to_string(tree.clone()),
|
token::NtPat(ref e) => pat_to_string(e),
|
||||||
token::NtArm(ref e) => arm_to_string(e),
|
token::NtIdent(e, false) => ident_to_string(e),
|
||||||
token::NtImplItem(ref e) => impl_item_to_string(e),
|
token::NtIdent(e, true) => format!("r#{}", ident_to_string(e)),
|
||||||
token::NtTraitItem(ref e) => trait_item_to_string(e),
|
token::NtLifetime(e) => ident_to_string(e),
|
||||||
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
|
token::NtLiteral(ref e) => expr_to_string(e),
|
||||||
token::NtWhereClause(ref e) => where_clause_to_string(e),
|
token::NtTT(ref tree) => tt_to_string(tree.clone()),
|
||||||
token::NtArg(ref e) => arg_to_string(e),
|
token::NtArm(ref e) => arm_to_string(e),
|
||||||
token::NtVis(ref e) => vis_to_string(e),
|
token::NtImplItem(ref e) => impl_item_to_string(e),
|
||||||
token::NtForeignItem(ref e) => foreign_item_to_string(e),
|
token::NtTraitItem(ref e) => trait_item_to_string(e),
|
||||||
}
|
token::NtGenerics(ref e) => generic_params_to_string(&e.params),
|
||||||
|
token::NtWhereClause(ref e) => where_clause_to_string(e),
|
||||||
|
token::NtArg(ref e) => arg_to_string(e),
|
||||||
|
token::NtVis(ref e) => vis_to_string(e),
|
||||||
|
token::NtForeignItem(ref e) => foreign_item_to_string(e),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -72,7 +72,7 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// See comments in `interpolated_to_tokenstream` for why we care about
|
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||||
// *probably* equal here rather than actual equality
|
// *probably* equal here rather than actual equality
|
||||||
//
|
//
|
||||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||||
|
@ -310,7 +310,7 @@ impl TokenStream {
|
||||||
t1.next().is_none() && t2.next().is_none()
|
t1.next().is_none() && t2.next().is_none()
|
||||||
}
|
}
|
||||||
|
|
||||||
// See comments in `interpolated_to_tokenstream` for why we care about
|
// See comments in `Nonterminal::to_tokenstream` for why we care about
|
||||||
// *probably* equal here rather than actual equality
|
// *probably* equal here rather than actual equality
|
||||||
//
|
//
|
||||||
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
// This is otherwise the same as `eq_unspanned`, only recursing with a
|
||||||
|
|
|
@ -2,6 +2,7 @@ use crate::proc_macro_impl::EXEC_STRATEGY;
|
||||||
use crate::proc_macro_server;
|
use crate::proc_macro_server;
|
||||||
|
|
||||||
use errors::FatalError;
|
use errors::FatalError;
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
use syntax::ast::{self, ItemKind, Attribute, Mac};
|
use syntax::ast::{self, ItemKind, Attribute, Mac};
|
||||||
use syntax::attr::{mark_used, mark_known};
|
use syntax::attr::{mark_used, mark_known};
|
||||||
use syntax::source_map::Span;
|
use syntax::source_map::Span;
|
||||||
|
@ -65,7 +66,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
||||||
// Mark attributes as known, and used.
|
// Mark attributes as known, and used.
|
||||||
MarkAttrs(&self.attrs).visit_item(&item);
|
MarkAttrs(&self.attrs).visit_item(&item);
|
||||||
|
|
||||||
let token = Token::interpolated(token::NtItem(item));
|
let token = Token::Interpolated(Lrc::new(token::NtItem(item)));
|
||||||
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
|
let input = tokenstream::TokenTree::Token(DUMMY_SP, token).into();
|
||||||
|
|
||||||
let server = proc_macro_server::Rustc::new(ecx);
|
let server = proc_macro_server::Rustc::new(ecx);
|
||||||
|
|
|
@ -178,8 +178,8 @@ impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||||
tt!(Punct::new('#', false))
|
tt!(Punct::new('#', false))
|
||||||
}
|
}
|
||||||
|
|
||||||
Interpolated(_) => {
|
Interpolated(nt) => {
|
||||||
let stream = token.interpolated_to_tokenstream(sess, span);
|
let stream = nt.to_tokenstream(sess, span);
|
||||||
TokenTree::Group(Group {
|
TokenTree::Group(Group {
|
||||||
delimiter: Delimiter::None,
|
delimiter: Delimiter::None,
|
||||||
stream,
|
stream,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue