Introduce TtHandle
and use it in TokenSet
.
This removes the last use of `<mbe::TokenTree as Clone>`. It also removes two trivial methods on `Delimited`.
This commit is contained in:
parent
2657d8f7b3
commit
75fd391aaa
3 changed files with 111 additions and 53 deletions
|
@ -26,18 +26,6 @@ struct Delimited {
|
||||||
tts: Vec<TokenTree>,
|
tts: Vec<TokenTree>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Delimited {
|
|
||||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
|
|
||||||
fn open_tt(&self, span: DelimSpan) -> TokenTree {
|
|
||||||
TokenTree::token(token::OpenDelim(self.delim), span.open)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
|
|
||||||
fn close_tt(&self, span: DelimSpan) -> TokenTree {
|
|
||||||
TokenTree::token(token::CloseDelim(self.delim), span.close)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(PartialEq, Encodable, Decodable, Debug)]
|
#[derive(PartialEq, Encodable, Decodable, Debug)]
|
||||||
struct SequenceRepetition {
|
struct SequenceRepetition {
|
||||||
/// The sequence of token trees
|
/// The sequence of token trees
|
||||||
|
|
|
@ -142,10 +142,13 @@ pub(super) fn compute_locs(sess: &ParseSess, matcher: &[TokenTree]) -> Vec<Match
|
||||||
locs.push(MatcherLoc::Token { token: token.clone() });
|
locs.push(MatcherLoc::Token { token: token.clone() });
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(span, delimited) => {
|
TokenTree::Delimited(span, delimited) => {
|
||||||
|
let open_token = Token::new(token::OpenDelim(delimited.delim), span.open);
|
||||||
|
let close_token = Token::new(token::CloseDelim(delimited.delim), span.close);
|
||||||
|
|
||||||
locs.push(MatcherLoc::Delimited);
|
locs.push(MatcherLoc::Delimited);
|
||||||
inner(sess, &[delimited.open_tt(*span)], locs, next_metavar, seq_depth);
|
locs.push(MatcherLoc::Token { token: open_token });
|
||||||
inner(sess, &delimited.tts, locs, next_metavar, seq_depth);
|
inner(sess, &delimited.tts, locs, next_metavar, seq_depth);
|
||||||
inner(sess, &[delimited.close_tt(*span)], locs, next_metavar, seq_depth);
|
locs.push(MatcherLoc::Token { token: close_token });
|
||||||
}
|
}
|
||||||
TokenTree::Sequence(_, seq) => {
|
TokenTree::Sequence(_, seq) => {
|
||||||
// We can't determine `idx_first_after` and construct the final
|
// We can't determine `idx_first_after` and construct the final
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
|
||||||
use crate::mbe::transcribe::transcribe;
|
use crate::mbe::transcribe::transcribe;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind::*};
|
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind, TokenKind::*};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
|
use rustc_ast::tokenstream::{DelimSpan, TokenStream};
|
||||||
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
||||||
use rustc_ast_pretty::pprust;
|
use rustc_ast_pretty::pprust;
|
||||||
|
@ -658,18 +658,18 @@ fn check_matcher(
|
||||||
// that do not try to inject artificial span information. My plan is
|
// that do not try to inject artificial span information. My plan is
|
||||||
// to try to catch such cases ahead of time and not include them in
|
// to try to catch such cases ahead of time and not include them in
|
||||||
// the precomputed mapping.)
|
// the precomputed mapping.)
|
||||||
struct FirstSets {
|
struct FirstSets<'tt> {
|
||||||
// this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
|
// this maps each TokenTree::Sequence `$(tt ...) SEP OP` that is uniquely identified by its
|
||||||
// span in the original matcher to the First set for the inner sequence `tt ...`.
|
// span in the original matcher to the First set for the inner sequence `tt ...`.
|
||||||
//
|
//
|
||||||
// If two sequences have the same span in a matcher, then map that
|
// If two sequences have the same span in a matcher, then map that
|
||||||
// span to None (invalidating the mapping here and forcing the code to
|
// span to None (invalidating the mapping here and forcing the code to
|
||||||
// use a slow path).
|
// use a slow path).
|
||||||
first: FxHashMap<Span, Option<TokenSet>>,
|
first: FxHashMap<Span, Option<TokenSet<'tt>>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FirstSets {
|
impl<'tt> FirstSets<'tt> {
|
||||||
fn new(tts: &[mbe::TokenTree]) -> FirstSets {
|
fn new(tts: &'tt [mbe::TokenTree]) -> FirstSets<'tt> {
|
||||||
use mbe::TokenTree;
|
use mbe::TokenTree;
|
||||||
|
|
||||||
let mut sets = FirstSets { first: FxHashMap::default() };
|
let mut sets = FirstSets { first: FxHashMap::default() };
|
||||||
|
@ -679,7 +679,7 @@ impl FirstSets {
|
||||||
// walks backward over `tts`, returning the FIRST for `tts`
|
// walks backward over `tts`, returning the FIRST for `tts`
|
||||||
// and updating `sets` at the same time for all sequence
|
// and updating `sets` at the same time for all sequence
|
||||||
// substructure we find within `tts`.
|
// substructure we find within `tts`.
|
||||||
fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
|
fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
|
||||||
let mut first = TokenSet::empty();
|
let mut first = TokenSet::empty();
|
||||||
for tt in tts.iter().rev() {
|
for tt in tts.iter().rev() {
|
||||||
match *tt {
|
match *tt {
|
||||||
|
@ -687,11 +687,14 @@ impl FirstSets {
|
||||||
| TokenTree::MetaVar(..)
|
| TokenTree::MetaVar(..)
|
||||||
| TokenTree::MetaVarDecl(..)
|
| TokenTree::MetaVarDecl(..)
|
||||||
| TokenTree::MetaVarExpr(..) => {
|
| TokenTree::MetaVarExpr(..) => {
|
||||||
first.replace_with(tt.clone());
|
first.replace_with(TtHandle::TtRef(tt));
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(span, ref delimited) => {
|
TokenTree::Delimited(span, ref delimited) => {
|
||||||
build_recur(sets, &delimited.tts);
|
build_recur(sets, &delimited.tts);
|
||||||
first.replace_with(delimited.open_tt(span));
|
first.replace_with(TtHandle::from_token_kind(
|
||||||
|
token::OpenDelim(delimited.delim),
|
||||||
|
span.open,
|
||||||
|
));
|
||||||
}
|
}
|
||||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||||
let subfirst = build_recur(sets, &seq_rep.tts);
|
let subfirst = build_recur(sets, &seq_rep.tts);
|
||||||
|
@ -715,7 +718,7 @@ impl FirstSets {
|
||||||
// token could be the separator token itself.
|
// token could be the separator token itself.
|
||||||
|
|
||||||
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
||||||
first.add_one_maybe(TokenTree::Token(sep.clone()));
|
first.add_one_maybe(TtHandle::from_token(sep.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// Reverse scan: Sequence comes before `first`.
|
// Reverse scan: Sequence comes before `first`.
|
||||||
|
@ -741,7 +744,7 @@ impl FirstSets {
|
||||||
|
|
||||||
// walks forward over `tts` until all potential FIRST tokens are
|
// walks forward over `tts` until all potential FIRST tokens are
|
||||||
// identified.
|
// identified.
|
||||||
fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
|
fn first(&self, tts: &'tt [mbe::TokenTree]) -> TokenSet<'tt> {
|
||||||
use mbe::TokenTree;
|
use mbe::TokenTree;
|
||||||
|
|
||||||
let mut first = TokenSet::empty();
|
let mut first = TokenSet::empty();
|
||||||
|
@ -752,11 +755,14 @@ impl FirstSets {
|
||||||
| TokenTree::MetaVar(..)
|
| TokenTree::MetaVar(..)
|
||||||
| TokenTree::MetaVarDecl(..)
|
| TokenTree::MetaVarDecl(..)
|
||||||
| TokenTree::MetaVarExpr(..) => {
|
| TokenTree::MetaVarExpr(..) => {
|
||||||
first.add_one(tt.clone());
|
first.add_one(TtHandle::TtRef(tt));
|
||||||
return first;
|
return first;
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(span, ref delimited) => {
|
TokenTree::Delimited(span, ref delimited) => {
|
||||||
first.add_one(delimited.open_tt(span));
|
first.add_one(TtHandle::from_token_kind(
|
||||||
|
token::OpenDelim(delimited.delim),
|
||||||
|
span.open,
|
||||||
|
));
|
||||||
return first;
|
return first;
|
||||||
}
|
}
|
||||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||||
|
@ -775,7 +781,7 @@ impl FirstSets {
|
||||||
// If the sequence contents can be empty, then the first
|
// If the sequence contents can be empty, then the first
|
||||||
// token could be the separator token itself.
|
// token could be the separator token itself.
|
||||||
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
if let (Some(sep), true) = (&seq_rep.separator, subfirst.maybe_empty) {
|
||||||
first.add_one_maybe(TokenTree::Token(sep.clone()));
|
first.add_one_maybe(TtHandle::from_token(sep.clone()));
|
||||||
}
|
}
|
||||||
|
|
||||||
assert!(first.maybe_empty);
|
assert!(first.maybe_empty);
|
||||||
|
@ -803,6 +809,62 @@ impl FirstSets {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Most `mbe::TokenTree`s are pre-existing in the matcher, but some are defined
|
||||||
|
// implicitly, such as opening/closing delimiters and sequence repetition ops.
|
||||||
|
// This type encapsulates both kinds. It implements `Clone` while avoiding the
|
||||||
|
// need for `mbe::TokenTree` to implement `Clone`.
|
||||||
|
#[derive(Debug)]
|
||||||
|
enum TtHandle<'tt> {
|
||||||
|
/// This is used in most cases.
|
||||||
|
TtRef(&'tt mbe::TokenTree),
|
||||||
|
|
||||||
|
/// This is only used for implicit token trees. The `mbe::TokenTree` *must*
|
||||||
|
/// be `mbe::TokenTree::Token`. No other variants are allowed. We store an
|
||||||
|
/// `mbe::TokenTree` rather than a `Token` so that `get()` can return a
|
||||||
|
/// `&mbe::TokenTree`.
|
||||||
|
Token(mbe::TokenTree),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tt> TtHandle<'tt> {
|
||||||
|
fn from_token(tok: Token) -> Self {
|
||||||
|
TtHandle::Token(mbe::TokenTree::Token(tok))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn from_token_kind(kind: TokenKind, span: Span) -> Self {
|
||||||
|
TtHandle::from_token(Token::new(kind, span))
|
||||||
|
}
|
||||||
|
|
||||||
|
// Get a reference to a token tree.
|
||||||
|
fn get(&'tt self) -> &'tt mbe::TokenTree {
|
||||||
|
match self {
|
||||||
|
TtHandle::TtRef(tt) => tt,
|
||||||
|
TtHandle::Token(token_tt) => &token_tt,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tt> PartialEq for TtHandle<'tt> {
|
||||||
|
fn eq(&self, other: &TtHandle<'tt>) -> bool {
|
||||||
|
self.get() == other.get()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'tt> Clone for TtHandle<'tt> {
|
||||||
|
fn clone(&self) -> Self {
|
||||||
|
match self {
|
||||||
|
TtHandle::TtRef(tt) => TtHandle::TtRef(tt),
|
||||||
|
|
||||||
|
// This variant *must* contain a `mbe::TokenTree::Token`, and not
|
||||||
|
// any other variant of `mbe::TokenTree`.
|
||||||
|
TtHandle::Token(mbe::TokenTree::Token(tok)) => {
|
||||||
|
TtHandle::Token(mbe::TokenTree::Token(tok.clone()))
|
||||||
|
}
|
||||||
|
|
||||||
|
_ => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
|
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
|
||||||
// (for macro-by-example syntactic variables). It also carries the
|
// (for macro-by-example syntactic variables). It also carries the
|
||||||
// `maybe_empty` flag; that is true if and only if the matcher can
|
// `maybe_empty` flag; that is true if and only if the matcher can
|
||||||
|
@ -814,12 +876,12 @@ impl FirstSets {
|
||||||
//
|
//
|
||||||
// (Notably, we must allow for *-op to occur zero times.)
|
// (Notably, we must allow for *-op to occur zero times.)
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct TokenSet {
|
struct TokenSet<'tt> {
|
||||||
tokens: Vec<mbe::TokenTree>,
|
tokens: Vec<TtHandle<'tt>>,
|
||||||
maybe_empty: bool,
|
maybe_empty: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenSet {
|
impl<'tt> TokenSet<'tt> {
|
||||||
// Returns a set for the empty sequence.
|
// Returns a set for the empty sequence.
|
||||||
fn empty() -> Self {
|
fn empty() -> Self {
|
||||||
TokenSet { tokens: Vec::new(), maybe_empty: true }
|
TokenSet { tokens: Vec::new(), maybe_empty: true }
|
||||||
|
@ -827,15 +889,15 @@ impl TokenSet {
|
||||||
|
|
||||||
// Returns the set `{ tok }` for the single-token (and thus
|
// Returns the set `{ tok }` for the single-token (and thus
|
||||||
// non-empty) sequence [tok].
|
// non-empty) sequence [tok].
|
||||||
fn singleton(tok: mbe::TokenTree) -> Self {
|
fn singleton(tt: TtHandle<'tt>) -> Self {
|
||||||
TokenSet { tokens: vec![tok], maybe_empty: false }
|
TokenSet { tokens: vec![tt], maybe_empty: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Changes self to be the set `{ tok }`.
|
// Changes self to be the set `{ tok }`.
|
||||||
// Since `tok` is always present, marks self as non-empty.
|
// Since `tok` is always present, marks self as non-empty.
|
||||||
fn replace_with(&mut self, tok: mbe::TokenTree) {
|
fn replace_with(&mut self, tt: TtHandle<'tt>) {
|
||||||
self.tokens.clear();
|
self.tokens.clear();
|
||||||
self.tokens.push(tok);
|
self.tokens.push(tt);
|
||||||
self.maybe_empty = false;
|
self.maybe_empty = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -848,17 +910,17 @@ impl TokenSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds `tok` to the set for `self`, marking sequence as non-empy.
|
// Adds `tok` to the set for `self`, marking sequence as non-empy.
|
||||||
fn add_one(&mut self, tok: mbe::TokenTree) {
|
fn add_one(&mut self, tt: TtHandle<'tt>) {
|
||||||
if !self.tokens.contains(&tok) {
|
if !self.tokens.contains(&tt) {
|
||||||
self.tokens.push(tok);
|
self.tokens.push(tt);
|
||||||
}
|
}
|
||||||
self.maybe_empty = false;
|
self.maybe_empty = false;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
|
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
|
||||||
fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
|
fn add_one_maybe(&mut self, tt: TtHandle<'tt>) {
|
||||||
if !self.tokens.contains(&tok) {
|
if !self.tokens.contains(&tt) {
|
||||||
self.tokens.push(tok);
|
self.tokens.push(tt);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -870,9 +932,9 @@ impl TokenSet {
|
||||||
// setting of the empty flag of `self`. If `other` is guaranteed
|
// setting of the empty flag of `self`. If `other` is guaranteed
|
||||||
// non-empty, then `self` is marked non-empty.
|
// non-empty, then `self` is marked non-empty.
|
||||||
fn add_all(&mut self, other: &Self) {
|
fn add_all(&mut self, other: &Self) {
|
||||||
for tok in &other.tokens {
|
for tt in &other.tokens {
|
||||||
if !self.tokens.contains(tok) {
|
if !self.tokens.contains(tt) {
|
||||||
self.tokens.push(tok.clone());
|
self.tokens.push(tt.clone());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if !other.maybe_empty {
|
if !other.maybe_empty {
|
||||||
|
@ -892,14 +954,14 @@ impl TokenSet {
|
||||||
//
|
//
|
||||||
// Requires that `first_sets` is pre-computed for `matcher`;
|
// Requires that `first_sets` is pre-computed for `matcher`;
|
||||||
// see `FirstSets::new`.
|
// see `FirstSets::new`.
|
||||||
fn check_matcher_core(
|
fn check_matcher_core<'tt>(
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
features: &Features,
|
features: &Features,
|
||||||
def: &ast::Item,
|
def: &ast::Item,
|
||||||
first_sets: &FirstSets,
|
first_sets: &FirstSets<'tt>,
|
||||||
matcher: &[mbe::TokenTree],
|
matcher: &'tt [mbe::TokenTree],
|
||||||
follow: &TokenSet,
|
follow: &TokenSet<'tt>,
|
||||||
) -> TokenSet {
|
) -> TokenSet<'tt> {
|
||||||
use mbe::TokenTree;
|
use mbe::TokenTree;
|
||||||
|
|
||||||
let mut last = TokenSet::empty();
|
let mut last = TokenSet::empty();
|
||||||
|
@ -938,12 +1000,15 @@ fn check_matcher_core(
|
||||||
// followed by anything against SUFFIX.
|
// followed by anything against SUFFIX.
|
||||||
continue 'each_token;
|
continue 'each_token;
|
||||||
} else {
|
} else {
|
||||||
last.replace_with(token.clone());
|
last.replace_with(TtHandle::TtRef(token));
|
||||||
suffix_first = build_suffix_first();
|
suffix_first = build_suffix_first();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(span, ref d) => {
|
TokenTree::Delimited(span, ref d) => {
|
||||||
let my_suffix = TokenSet::singleton(d.close_tt(span));
|
let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
|
||||||
|
token::CloseDelim(d.delim),
|
||||||
|
span.close,
|
||||||
|
));
|
||||||
check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix);
|
check_matcher_core(sess, features, def, first_sets, &d.tts, &my_suffix);
|
||||||
// don't track non NT tokens
|
// don't track non NT tokens
|
||||||
last.replace_with_irrelevant();
|
last.replace_with_irrelevant();
|
||||||
|
@ -967,7 +1032,7 @@ fn check_matcher_core(
|
||||||
let mut new;
|
let mut new;
|
||||||
let my_suffix = if let Some(sep) = &seq_rep.separator {
|
let my_suffix = if let Some(sep) = &seq_rep.separator {
|
||||||
new = suffix_first.clone();
|
new = suffix_first.clone();
|
||||||
new.add_one_maybe(TokenTree::Token(sep.clone()));
|
new.add_one_maybe(TtHandle::from_token(sep.clone()));
|
||||||
&new
|
&new
|
||||||
} else {
|
} else {
|
||||||
&suffix_first
|
&suffix_first
|
||||||
|
@ -994,9 +1059,11 @@ fn check_matcher_core(
|
||||||
|
|
||||||
// Now `last` holds the complete set of NT tokens that could
|
// Now `last` holds the complete set of NT tokens that could
|
||||||
// end the sequence before SUFFIX. Check that every one works with `suffix`.
|
// end the sequence before SUFFIX. Check that every one works with `suffix`.
|
||||||
for token in &last.tokens {
|
for tt in &last.tokens {
|
||||||
if let TokenTree::MetaVarDecl(span, name, Some(kind)) = *token {
|
if let &TokenTree::MetaVarDecl(span, name, Some(kind)) = tt.get() {
|
||||||
for next_token in &suffix_first.tokens {
|
for next_token in &suffix_first.tokens {
|
||||||
|
let next_token = next_token.get();
|
||||||
|
|
||||||
// Check if the old pat is used and the next token is `|`
|
// Check if the old pat is used and the next token is `|`
|
||||||
// to warn about incompatibility with Rust 2021.
|
// to warn about incompatibility with Rust 2021.
|
||||||
// We only emit this lint if we're parsing the original
|
// We only emit this lint if we're parsing the original
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue