pull mbe token tree definition up
This commit is contained in:
parent
636b3543c2
commit
9fd75f5287
7 changed files with 243 additions and 234 deletions
|
@ -8,3 +8,159 @@ crate mod macro_check;
|
|||
crate mod macro_parser;
|
||||
crate mod macro_rules;
|
||||
crate mod quoted;
|
||||
|
||||
use crate::ast;
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::tokenstream::{DelimSpan};
|
||||
|
||||
use syntax_pos::{BytePos, Span};
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
|
||||
/// that the delimiter itself might be `NoDelim`.
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||
crate struct Delimited {
|
||||
crate delim: token::DelimToken,
|
||||
crate tts: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
impl Delimited {
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
|
||||
crate fn open_tt(&self, span: Span) -> TokenTree {
|
||||
let open_span = if span.is_dummy() {
|
||||
span
|
||||
} else {
|
||||
span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
|
||||
};
|
||||
TokenTree::token(token::OpenDelim(self.delim), open_span)
|
||||
}
|
||||
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
|
||||
crate fn close_tt(&self, span: Span) -> TokenTree {
|
||||
let close_span = if span.is_dummy() {
|
||||
span
|
||||
} else {
|
||||
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
||||
};
|
||||
TokenTree::token(token::CloseDelim(self.delim), close_span)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||
crate struct SequenceRepetition {
|
||||
/// The sequence of token trees
|
||||
crate tts: Vec<TokenTree>,
|
||||
/// The optional separator
|
||||
crate separator: Option<Token>,
|
||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||
crate kleene: KleeneToken,
|
||||
/// The number of `Match`s that appear in the sequence (and subsequences)
|
||||
crate num_captures: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||
crate struct KleeneToken {
|
||||
crate span: Span,
|
||||
crate op: KleeneOp,
|
||||
}
|
||||
|
||||
impl KleeneToken {
|
||||
crate fn new(op: KleeneOp, span: Span) -> KleeneToken {
|
||||
KleeneToken { span, op }
|
||||
}
|
||||
}
|
||||
|
||||
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
||||
/// for token sequences.
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
crate enum KleeneOp {
|
||||
/// Kleene star (`*`) for zero or more repetitions
|
||||
ZeroOrMore,
|
||||
/// Kleene plus (`+`) for one or more repetitions
|
||||
OneOrMore,
|
||||
/// Kleene optional (`?`) for zero or one reptitions
|
||||
ZeroOrOne,
|
||||
}
|
||||
|
||||
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
|
||||
/// are "first-class" token trees. Useful for parsing macros.
|
||||
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
crate enum TokenTree {
|
||||
Token(Token),
|
||||
Delimited(DelimSpan, Lrc<Delimited>),
|
||||
/// A kleene-style repetition sequence
|
||||
Sequence(DelimSpan, Lrc<SequenceRepetition>),
|
||||
/// e.g., `$var`
|
||||
MetaVar(Span, ast::Ident),
|
||||
/// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
|
||||
MetaVarDecl(
|
||||
Span,
|
||||
ast::Ident, /* name to bind */
|
||||
ast::Ident, /* kind of nonterminal */
|
||||
),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
/// Return the number of tokens in the tree.
|
||||
crate fn len(&self) -> usize {
|
||||
match *self {
|
||||
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
|
||||
token::NoDelim => delimed.tts.len(),
|
||||
_ => delimed.tts.len() + 2,
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given token tree is delimited.
|
||||
crate fn is_delimited(&self) -> bool {
|
||||
match *self {
|
||||
TokenTree::Delimited(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given token tree is a token of the given kind.
|
||||
crate fn is_token(&self, expected_kind: &TokenKind) -> bool {
|
||||
match self {
|
||||
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
|
||||
crate fn get_tt(&self, index: usize) -> TokenTree {
|
||||
match (self, index) {
|
||||
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
||||
delimed.tts[index].clone()
|
||||
}
|
||||
(&TokenTree::Delimited(span, ref delimed), _) => {
|
||||
if index == 0 {
|
||||
return delimed.open_tt(span.open);
|
||||
}
|
||||
if index == delimed.tts.len() + 1 {
|
||||
return delimed.close_tt(span.close);
|
||||
}
|
||||
delimed.tts[index - 1].clone()
|
||||
}
|
||||
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
|
||||
_ => panic!("Cannot expand a token tree"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the `TokenTree`'s span.
|
||||
crate fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(Token { span, .. })
|
||||
| TokenTree::MetaVar(span, _)
|
||||
| TokenTree::MetaVarDecl(span, _, _) => span,
|
||||
TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
|
||||
}
|
||||
}
|
||||
|
||||
crate fn token(kind: TokenKind, span: Span) -> TokenTree {
|
||||
TokenTree::Token(Token::new(kind, span))
|
||||
}
|
||||
}
|
||||
|
|
|
@ -106,7 +106,7 @@
|
|||
//! bound.
|
||||
use crate::ast::NodeId;
|
||||
use crate::early_buffered_lints::BufferedEarlyLintId;
|
||||
use crate::ext::mbe::quoted::{KleeneToken, TokenTree};
|
||||
use crate::ext::mbe::{KleeneToken, TokenTree};
|
||||
use crate::parse::token::TokenKind;
|
||||
use crate::parse::token::{DelimToken, Token};
|
||||
use crate::parse::ParseSess;
|
||||
|
|
|
@ -75,7 +75,7 @@ crate use ParseResult::*;
|
|||
use TokenTreeOrTokenTreeSlice::*;
|
||||
|
||||
use crate::ast::{Ident, Name};
|
||||
use crate::ext::mbe::quoted::{self, TokenTree};
|
||||
use crate::ext::mbe::{self, TokenTree};
|
||||
use crate::parse::{Directory, ParseSess};
|
||||
use crate::parse::parser::{Parser, PathStyle};
|
||||
use crate::parse::token::{self, DocComment, Nonterminal, Token};
|
||||
|
@ -195,7 +195,7 @@ struct MatcherPos<'root, 'tt> {
|
|||
// `None`.
|
||||
|
||||
/// The KleeneOp of this sequence if we are in a repetition.
|
||||
seq_op: Option<quoted::KleeneOp>,
|
||||
seq_op: Option<mbe::KleeneOp>,
|
||||
|
||||
/// The separator if we are in a repetition.
|
||||
sep: Option<Token>,
|
||||
|
@ -532,7 +532,7 @@ fn inner_parse_loop<'root, 'tt>(
|
|||
}
|
||||
// We don't need a separator. Move the "dot" back to the beginning of the matcher
|
||||
// and try to match again UNLESS we are only allowed to have _one_ repetition.
|
||||
else if item.seq_op != Some(quoted::KleeneOp::ZeroOrOne) {
|
||||
else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) {
|
||||
item.match_cur = item.match_lo;
|
||||
item.idx = 0;
|
||||
cur_items.push(item);
|
||||
|
@ -555,8 +555,8 @@ fn inner_parse_loop<'root, 'tt>(
|
|||
// implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
|
||||
// result in a "no rules expected token" error by virtue of this matcher not
|
||||
// working.
|
||||
if seq.kleene.op == quoted::KleeneOp::ZeroOrMore
|
||||
|| seq.kleene.op == quoted::KleeneOp::ZeroOrOne
|
||||
if seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||
|| seq.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||
{
|
||||
let mut new_item = item.clone();
|
||||
new_item.match_cur += seq.num_captures;
|
||||
|
|
|
@ -4,11 +4,11 @@ use crate::edition::Edition;
|
|||
use crate::ext::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
|
||||
use crate::ext::base::{SyntaxExtension, SyntaxExtensionKind};
|
||||
use crate::ext::expand::{AstFragment, AstFragmentKind};
|
||||
use crate::ext::mbe;
|
||||
use crate::ext::mbe::macro_check;
|
||||
use crate::ext::mbe::macro_parser::{parse, parse_failure_msg};
|
||||
use crate::ext::mbe::macro_parser::{Error, Failure, Success};
|
||||
use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq};
|
||||
use crate::ext::mbe::quoted;
|
||||
use crate::ext::mbe::transcribe::transcribe;
|
||||
use crate::feature_gate::Features;
|
||||
use crate::parse::parser::Parser;
|
||||
|
@ -135,8 +135,8 @@ struct MacroRulesMacroExpander {
|
|||
name: ast::Ident,
|
||||
span: Span,
|
||||
transparency: Transparency,
|
||||
lhses: Vec<quoted::TokenTree>,
|
||||
rhses: Vec<quoted::TokenTree>,
|
||||
lhses: Vec<mbe::TokenTree>,
|
||||
rhses: Vec<mbe::TokenTree>,
|
||||
valid: bool,
|
||||
}
|
||||
|
||||
|
@ -169,8 +169,8 @@ fn generic_extension<'cx>(
|
|||
name: ast::Ident,
|
||||
transparency: Transparency,
|
||||
arg: TokenStream,
|
||||
lhses: &[quoted::TokenTree],
|
||||
rhses: &[quoted::TokenTree],
|
||||
lhses: &[mbe::TokenTree],
|
||||
rhses: &[mbe::TokenTree],
|
||||
) -> Box<dyn MacResult + 'cx> {
|
||||
if cx.trace_macros() {
|
||||
trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
|
||||
|
@ -182,7 +182,7 @@ fn generic_extension<'cx>(
|
|||
for (i, lhs) in lhses.iter().enumerate() {
|
||||
// try each arm's matchers
|
||||
let lhs_tt = match *lhs {
|
||||
quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||
_ => cx.span_bug(sp, "malformed macro lhs"),
|
||||
};
|
||||
|
||||
|
@ -190,7 +190,7 @@ fn generic_extension<'cx>(
|
|||
Success(named_matches) => {
|
||||
let rhs = match rhses[i] {
|
||||
// ignore delimiters
|
||||
quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
|
||||
mbe::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
|
||||
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||
};
|
||||
let arm_span = rhses[i].span();
|
||||
|
@ -258,7 +258,7 @@ fn generic_extension<'cx>(
|
|||
for lhs in lhses {
|
||||
// try each arm's matchers
|
||||
let lhs_tt = match *lhs {
|
||||
quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||
_ => continue,
|
||||
};
|
||||
match TokenTree::parse(cx, lhs_tt, arg.clone()) {
|
||||
|
@ -312,32 +312,32 @@ pub fn compile_declarative_macro(
|
|||
// ...quasiquoting this would be nice.
|
||||
// These spans won't matter, anyways
|
||||
let argument_gram = vec![
|
||||
quoted::TokenTree::Sequence(
|
||||
mbe::TokenTree::Sequence(
|
||||
DelimSpan::dummy(),
|
||||
Lrc::new(quoted::SequenceRepetition {
|
||||
Lrc::new(mbe::SequenceRepetition {
|
||||
tts: vec![
|
||||
quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
|
||||
quoted::TokenTree::token(token::FatArrow, def.span),
|
||||
quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
|
||||
mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
|
||||
mbe::TokenTree::token(token::FatArrow, def.span),
|
||||
mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
|
||||
],
|
||||
separator: Some(Token::new(
|
||||
if body.legacy { token::Semi } else { token::Comma },
|
||||
def.span,
|
||||
)),
|
||||
kleene: quoted::KleeneToken::new(quoted::KleeneOp::OneOrMore, def.span),
|
||||
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
|
||||
num_captures: 2,
|
||||
}),
|
||||
),
|
||||
// to phase into semicolon-termination instead of semicolon-separation
|
||||
quoted::TokenTree::Sequence(
|
||||
mbe::TokenTree::Sequence(
|
||||
DelimSpan::dummy(),
|
||||
Lrc::new(quoted::SequenceRepetition {
|
||||
tts: vec![quoted::TokenTree::token(
|
||||
Lrc::new(mbe::SequenceRepetition {
|
||||
tts: vec![mbe::TokenTree::token(
|
||||
if body.legacy { token::Semi } else { token::Comma },
|
||||
def.span,
|
||||
)],
|
||||
separator: None,
|
||||
kleene: quoted::KleeneToken::new(quoted::KleeneOp::ZeroOrMore, def.span),
|
||||
kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
|
||||
num_captures: 0,
|
||||
}),
|
||||
),
|
||||
|
@ -367,7 +367,7 @@ pub fn compile_declarative_macro(
|
|||
.map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = *m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
let tt = quoted::parse(
|
||||
let tt = mbe::quoted::parse(
|
||||
tt.clone().into(),
|
||||
true,
|
||||
sess,
|
||||
|
@ -384,7 +384,7 @@ pub fn compile_declarative_macro(
|
|||
}
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
})
|
||||
.collect::<Vec<quoted::TokenTree>>(),
|
||||
.collect::<Vec<mbe::TokenTree>>(),
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
|
||||
};
|
||||
|
||||
|
@ -394,7 +394,7 @@ pub fn compile_declarative_macro(
|
|||
.map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = *m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
return quoted::parse(
|
||||
return mbe::quoted::parse(
|
||||
tt.clone().into(),
|
||||
false,
|
||||
sess,
|
||||
|
@ -409,7 +409,7 @@ pub fn compile_declarative_macro(
|
|||
}
|
||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||
})
|
||||
.collect::<Vec<quoted::TokenTree>>(),
|
||||
.collect::<Vec<mbe::TokenTree>>(),
|
||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
|
||||
};
|
||||
|
||||
|
@ -454,11 +454,11 @@ fn check_lhs_nt_follows(
|
|||
sess: &ParseSess,
|
||||
features: &Features,
|
||||
attrs: &[ast::Attribute],
|
||||
lhs: "ed::TokenTree,
|
||||
lhs: &mbe::TokenTree,
|
||||
) -> bool {
|
||||
// lhs is going to be like TokenTree::Delimited(...), where the
|
||||
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
|
||||
if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
|
||||
if let mbe::TokenTree::Delimited(_, ref tts) = *lhs {
|
||||
check_matcher(sess, features, attrs, &tts.tts)
|
||||
} else {
|
||||
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
|
||||
|
@ -471,8 +471,8 @@ fn check_lhs_nt_follows(
|
|||
|
||||
/// Checks that the lhs contains no repetition which could match an empty token
|
||||
/// tree, because then the matcher would hang indefinitely.
|
||||
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
||||
use quoted::TokenTree;
|
||||
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
|
||||
use mbe::TokenTree;
|
||||
for tt in tts {
|
||||
match *tt {
|
||||
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
|
||||
|
@ -486,8 +486,8 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
|||
&& seq.tts.iter().all(|seq_tt| match *seq_tt {
|
||||
TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
|
||||
TokenTree::Sequence(_, ref sub_seq) => {
|
||||
sub_seq.kleene.op == quoted::KleeneOp::ZeroOrMore
|
||||
|| sub_seq.kleene.op == quoted::KleeneOp::ZeroOrOne
|
||||
sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||
|| sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||
}
|
||||
_ => false,
|
||||
})
|
||||
|
@ -506,9 +506,9 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
|||
true
|
||||
}
|
||||
|
||||
fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool {
|
||||
fn check_rhs(sess: &ParseSess, rhs: &mbe::TokenTree) -> bool {
|
||||
match *rhs {
|
||||
quoted::TokenTree::Delimited(..) => return true,
|
||||
mbe::TokenTree::Delimited(..) => return true,
|
||||
_ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
|
||||
}
|
||||
false
|
||||
|
@ -518,7 +518,7 @@ fn check_matcher(
|
|||
sess: &ParseSess,
|
||||
features: &Features,
|
||||
attrs: &[ast::Attribute],
|
||||
matcher: &[quoted::TokenTree],
|
||||
matcher: &[mbe::TokenTree],
|
||||
) -> bool {
|
||||
let first_sets = FirstSets::new(matcher);
|
||||
let empty_suffix = TokenSet::empty();
|
||||
|
@ -550,8 +550,8 @@ struct FirstSets {
|
|||
}
|
||||
|
||||
impl FirstSets {
|
||||
fn new(tts: &[quoted::TokenTree]) -> FirstSets {
|
||||
use quoted::TokenTree;
|
||||
fn new(tts: &[mbe::TokenTree]) -> FirstSets {
|
||||
use mbe::TokenTree;
|
||||
|
||||
let mut sets = FirstSets { first: FxHashMap::default() };
|
||||
build_recur(&mut sets, tts);
|
||||
|
@ -598,8 +598,8 @@ impl FirstSets {
|
|||
|
||||
// Reverse scan: Sequence comes before `first`.
|
||||
if subfirst.maybe_empty
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
|
||||
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||
{
|
||||
// If sequence is potentially empty, then
|
||||
// union them (preserving first emptiness).
|
||||
|
@ -619,8 +619,8 @@ impl FirstSets {
|
|||
|
||||
// walks forward over `tts` until all potential FIRST tokens are
|
||||
// identified.
|
||||
fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
|
||||
use quoted::TokenTree;
|
||||
fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
|
||||
use mbe::TokenTree;
|
||||
|
||||
let mut first = TokenSet::empty();
|
||||
for tt in tts.iter() {
|
||||
|
@ -656,8 +656,8 @@ impl FirstSets {
|
|||
assert!(first.maybe_empty);
|
||||
first.add_all(subfirst);
|
||||
if subfirst.maybe_empty
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
|
||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
|
||||
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||
{
|
||||
// Continue scanning for more first
|
||||
// tokens, but also make sure we
|
||||
|
@ -678,7 +678,7 @@ impl FirstSets {
|
|||
}
|
||||
}
|
||||
|
||||
// A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s
|
||||
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
|
||||
// (for macro-by-example syntactic variables). It also carries the
|
||||
// `maybe_empty` flag; that is true if and only if the matcher can
|
||||
// match an empty token sequence.
|
||||
|
@ -690,7 +690,7 @@ impl FirstSets {
|
|||
// (Notably, we must allow for *-op to occur zero times.)
|
||||
#[derive(Clone, Debug)]
|
||||
struct TokenSet {
|
||||
tokens: Vec<quoted::TokenTree>,
|
||||
tokens: Vec<mbe::TokenTree>,
|
||||
maybe_empty: bool,
|
||||
}
|
||||
|
||||
|
@ -702,13 +702,13 @@ impl TokenSet {
|
|||
|
||||
// Returns the set `{ tok }` for the single-token (and thus
|
||||
// non-empty) sequence [tok].
|
||||
fn singleton(tok: quoted::TokenTree) -> Self {
|
||||
fn singleton(tok: mbe::TokenTree) -> Self {
|
||||
TokenSet { tokens: vec![tok], maybe_empty: false }
|
||||
}
|
||||
|
||||
// Changes self to be the set `{ tok }`.
|
||||
// Since `tok` is always present, marks self as non-empty.
|
||||
fn replace_with(&mut self, tok: quoted::TokenTree) {
|
||||
fn replace_with(&mut self, tok: mbe::TokenTree) {
|
||||
self.tokens.clear();
|
||||
self.tokens.push(tok);
|
||||
self.maybe_empty = false;
|
||||
|
@ -723,7 +723,7 @@ impl TokenSet {
|
|||
}
|
||||
|
||||
// Adds `tok` to the set for `self`, marking sequence as non-empy.
|
||||
fn add_one(&mut self, tok: quoted::TokenTree) {
|
||||
fn add_one(&mut self, tok: mbe::TokenTree) {
|
||||
if !self.tokens.contains(&tok) {
|
||||
self.tokens.push(tok);
|
||||
}
|
||||
|
@ -731,7 +731,7 @@ impl TokenSet {
|
|||
}
|
||||
|
||||
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
|
||||
fn add_one_maybe(&mut self, tok: quoted::TokenTree) {
|
||||
fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
|
||||
if !self.tokens.contains(&tok) {
|
||||
self.tokens.push(tok);
|
||||
}
|
||||
|
@ -772,10 +772,10 @@ fn check_matcher_core(
|
|||
features: &Features,
|
||||
attrs: &[ast::Attribute],
|
||||
first_sets: &FirstSets,
|
||||
matcher: &[quoted::TokenTree],
|
||||
matcher: &[mbe::TokenTree],
|
||||
follow: &TokenSet,
|
||||
) -> TokenSet {
|
||||
use quoted::TokenTree;
|
||||
use mbe::TokenTree;
|
||||
|
||||
let mut last = TokenSet::empty();
|
||||
|
||||
|
@ -950,8 +950,8 @@ fn check_matcher_core(
|
|||
last
|
||||
}
|
||||
|
||||
fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool {
|
||||
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||
fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
|
||||
if let mbe::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||
frag_can_be_followed_by_any(frag_spec.name)
|
||||
} else {
|
||||
// (Non NT's can always be followed by anthing in matchers.)
|
||||
|
@ -997,8 +997,8 @@ enum IsInFollow {
|
|||
/// break macros that were relying on that binary operator as a
|
||||
/// separator.
|
||||
// when changing this do not forget to update doc/book/macros.md!
|
||||
fn is_in_follow(tok: "ed::TokenTree, frag: Symbol) -> IsInFollow {
|
||||
use quoted::TokenTree;
|
||||
fn is_in_follow(tok: &mbe::TokenTree, frag: Symbol) -> IsInFollow {
|
||||
use mbe::TokenTree;
|
||||
|
||||
if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
|
||||
// closing a token tree can never be matched by any fragment;
|
||||
|
@ -1116,10 +1116,10 @@ fn has_legal_fragment_specifier(
|
|||
sess: &ParseSess,
|
||||
features: &Features,
|
||||
attrs: &[ast::Attribute],
|
||||
tok: "ed::TokenTree,
|
||||
tok: &mbe::TokenTree,
|
||||
) -> Result<(), String> {
|
||||
debug!("has_legal_fragment_specifier({:?})", tok);
|
||||
if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
|
||||
if let mbe::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
|
||||
let frag_span = tok.span();
|
||||
if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
|
||||
return Err(frag_spec.to_string());
|
||||
|
@ -1160,13 +1160,13 @@ fn is_legal_fragment_specifier(
|
|||
}
|
||||
}
|
||||
|
||||
fn quoted_tt_to_string(tt: "ed::TokenTree) -> String {
|
||||
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
|
||||
match *tt {
|
||||
quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
|
||||
quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
|
||||
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
||||
mbe::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
|
||||
mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
|
||||
mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
||||
_ => panic!(
|
||||
"unexpected quoted::TokenTree::{{Sequence or Delimited}} \
|
||||
"unexpected mbe::TokenTree::{{Sequence or Delimited}} \
|
||||
in follow set checker"
|
||||
),
|
||||
}
|
||||
|
|
|
@ -1,166 +1,19 @@
|
|||
use crate::ast;
|
||||
use crate::ast::NodeId;
|
||||
use crate::ext::mbe::macro_parser;
|
||||
use crate::ext::mbe::{TokenTree, KleeneOp, KleeneToken, SequenceRepetition, Delimited};
|
||||
use crate::feature_gate::Features;
|
||||
use crate::parse::token::{self, Token, TokenKind};
|
||||
use crate::parse::token::{self, Token};
|
||||
use crate::parse::ParseSess;
|
||||
use crate::print::pprust;
|
||||
use crate::symbol::kw;
|
||||
use crate::tokenstream::{self, DelimSpan};
|
||||
use crate::tokenstream;
|
||||
|
||||
use syntax_pos::{edition::Edition, BytePos, Span};
|
||||
use syntax_pos::{edition::Edition, Span};
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use std::iter::Peekable;
|
||||
|
||||
/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
|
||||
/// that the delimiter itself might be `NoDelim`.
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||
crate struct Delimited {
|
||||
crate delim: token::DelimToken,
|
||||
crate tts: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
impl Delimited {
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
|
||||
crate fn open_tt(&self, span: Span) -> TokenTree {
|
||||
let open_span = if span.is_dummy() {
|
||||
span
|
||||
} else {
|
||||
span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
|
||||
};
|
||||
TokenTree::token(token::OpenDelim(self.delim), open_span)
|
||||
}
|
||||
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
|
||||
crate fn close_tt(&self, span: Span) -> TokenTree {
|
||||
let close_span = if span.is_dummy() {
|
||||
span
|
||||
} else {
|
||||
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
||||
};
|
||||
TokenTree::token(token::CloseDelim(self.delim), close_span)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||
crate struct SequenceRepetition {
|
||||
/// The sequence of token trees
|
||||
crate tts: Vec<TokenTree>,
|
||||
/// The optional separator
|
||||
crate separator: Option<Token>,
|
||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||
crate kleene: KleeneToken,
|
||||
/// The number of `Match`s that appear in the sequence (and subsequences)
|
||||
crate num_captures: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||
crate struct KleeneToken {
|
||||
crate span: Span,
|
||||
crate op: KleeneOp,
|
||||
}
|
||||
|
||||
impl KleeneToken {
|
||||
crate fn new(op: KleeneOp, span: Span) -> KleeneToken {
|
||||
KleeneToken { span, op }
|
||||
}
|
||||
}
|
||||
|
||||
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
||||
/// for token sequences.
|
||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||
crate enum KleeneOp {
|
||||
/// Kleene star (`*`) for zero or more repetitions
|
||||
ZeroOrMore,
|
||||
/// Kleene plus (`+`) for one or more repetitions
|
||||
OneOrMore,
|
||||
/// Kleene optional (`?`) for zero or one reptitions
|
||||
ZeroOrOne,
|
||||
}
|
||||
|
||||
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
|
||||
/// are "first-class" token trees. Useful for parsing macros.
|
||||
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
crate enum TokenTree {
|
||||
Token(Token),
|
||||
Delimited(DelimSpan, Lrc<Delimited>),
|
||||
/// A kleene-style repetition sequence
|
||||
Sequence(DelimSpan, Lrc<SequenceRepetition>),
|
||||
/// e.g., `$var`
|
||||
MetaVar(Span, ast::Ident),
|
||||
/// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
|
||||
MetaVarDecl(
|
||||
Span,
|
||||
ast::Ident, /* name to bind */
|
||||
ast::Ident, /* kind of nonterminal */
|
||||
),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
/// Return the number of tokens in the tree.
|
||||
crate fn len(&self) -> usize {
|
||||
match *self {
|
||||
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
|
||||
token::NoDelim => delimed.tts.len(),
|
||||
_ => delimed.tts.len() + 2,
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given token tree is delimited.
|
||||
crate fn is_delimited(&self) -> bool {
|
||||
match *self {
|
||||
TokenTree::Delimited(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given token tree is a token of the given kind.
|
||||
crate fn is_token(&self, expected_kind: &TokenKind) -> bool {
|
||||
match self {
|
||||
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
|
||||
crate fn get_tt(&self, index: usize) -> TokenTree {
|
||||
match (self, index) {
|
||||
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
||||
delimed.tts[index].clone()
|
||||
}
|
||||
(&TokenTree::Delimited(span, ref delimed), _) => {
|
||||
if index == 0 {
|
||||
return delimed.open_tt(span.open);
|
||||
}
|
||||
if index == delimed.tts.len() + 1 {
|
||||
return delimed.close_tt(span.close);
|
||||
}
|
||||
delimed.tts[index - 1].clone()
|
||||
}
|
||||
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
|
||||
_ => panic!("Cannot expand a token tree"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the `TokenTree`'s span.
|
||||
crate fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(Token { span, .. })
|
||||
| TokenTree::MetaVar(span, _)
|
||||
| TokenTree::MetaVarDecl(span, _, _) => span,
|
||||
TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
|
||||
}
|
||||
}
|
||||
|
||||
crate fn token(kind: TokenKind, span: Span) -> TokenTree {
|
||||
TokenTree::Token(Token::new(kind, span))
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
|
||||
/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
|
||||
/// collection of `TokenTree` for use in parsing a macro.
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use crate::ast::{Ident, Mac};
|
||||
use crate::ext::base::ExtCtxt;
|
||||
use crate::ext::mbe;
|
||||
use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
||||
use crate::ext::mbe::quoted;
|
||||
use crate::mut_visit::{self, MutVisitor};
|
||||
use crate::parse::token::{self, NtTT, Token};
|
||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||
|
@ -38,22 +38,22 @@ impl Marker {
|
|||
|
||||
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||
enum Frame {
|
||||
Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
|
||||
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
|
||||
Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
|
||||
Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
|
||||
}
|
||||
|
||||
impl Frame {
|
||||
/// Construct a new frame around the delimited set of tokens.
|
||||
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
|
||||
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts });
|
||||
fn new(tts: Vec<mbe::TokenTree>) -> Frame {
|
||||
let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
|
||||
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Frame {
|
||||
type Item = quoted::TokenTree;
|
||||
type Item = mbe::TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<quoted::TokenTree> {
|
||||
fn next(&mut self) -> Option<mbe::TokenTree> {
|
||||
match *self {
|
||||
Frame::Delimited { ref forest, ref mut idx, .. } => {
|
||||
*idx += 1;
|
||||
|
@ -90,7 +90,7 @@ impl Iterator for Frame {
|
|||
pub(super) fn transcribe(
|
||||
cx: &ExtCtxt<'_>,
|
||||
interp: &FxHashMap<Ident, NamedMatch>,
|
||||
src: Vec<quoted::TokenTree>,
|
||||
src: Vec<mbe::TokenTree>,
|
||||
transparency: Transparency,
|
||||
) -> TokenStream {
|
||||
// Nothing for us to transcribe...
|
||||
|
@ -178,7 +178,7 @@ pub(super) fn transcribe(
|
|||
// We are descending into a sequence. We first make sure that the matchers in the RHS
|
||||
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
|
||||
// macro writer has made a mistake.
|
||||
seq @ quoted::TokenTree::Sequence(..) => {
|
||||
seq @ mbe::TokenTree::Sequence(..) => {
|
||||
match lockstep_iter_size(&seq, interp, &repeats) {
|
||||
LockstepIterSize::Unconstrained => {
|
||||
cx.span_fatal(
|
||||
|
@ -199,7 +199,7 @@ pub(super) fn transcribe(
|
|||
LockstepIterSize::Constraint(len, _) => {
|
||||
// We do this to avoid an extra clone above. We know that this is a
|
||||
// sequence already.
|
||||
let (sp, seq) = if let quoted::TokenTree::Sequence(sp, seq) = seq {
|
||||
let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
|
||||
(sp, seq)
|
||||
} else {
|
||||
unreachable!()
|
||||
|
@ -207,7 +207,7 @@ pub(super) fn transcribe(
|
|||
|
||||
// Is the repetition empty?
|
||||
if len == 0 {
|
||||
if seq.kleene.op == quoted::KleeneOp::OneOrMore {
|
||||
if seq.kleene.op == mbe::KleeneOp::OneOrMore {
|
||||
// FIXME: this really ought to be caught at macro definition
|
||||
// time... It happens when the Kleene operator in the matcher and
|
||||
// the body for the same meta-variable do not match.
|
||||
|
@ -232,7 +232,7 @@ pub(super) fn transcribe(
|
|||
}
|
||||
|
||||
// Replace the meta-var with the matched token tree from the invocation.
|
||||
quoted::TokenTree::MetaVar(mut sp, mut ident) => {
|
||||
mbe::TokenTree::MetaVar(mut sp, mut ident) => {
|
||||
// Find the matched nonterminal from the macro invocation, and use it to replace
|
||||
// the meta-var.
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
||||
|
@ -269,7 +269,7 @@ pub(super) fn transcribe(
|
|||
// We will produce all of the results of the inside of the `Delimited` and then we will
|
||||
// jump back out of the Delimited, pop the result_stack and add the new results back to
|
||||
// the previous results (from outside the Delimited).
|
||||
quoted::TokenTree::Delimited(mut span, delimited) => {
|
||||
mbe::TokenTree::Delimited(mut span, delimited) => {
|
||||
marker.visit_delim_span(&mut span);
|
||||
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
|
||||
result_stack.push(mem::take(&mut result));
|
||||
|
@ -277,14 +277,14 @@ pub(super) fn transcribe(
|
|||
|
||||
// Nothing much to do here. Just push the token to the result, being careful to
|
||||
// preserve syntax context.
|
||||
quoted::TokenTree::Token(token) => {
|
||||
mbe::TokenTree::Token(token) => {
|
||||
let mut tt = TokenTree::Token(token);
|
||||
marker.visit_tt(&mut tt);
|
||||
result.push(tt.into());
|
||||
}
|
||||
|
||||
// There should be no meta-var declarations in the invocation of a macro.
|
||||
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||
mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -368,11 +368,11 @@ impl LockstepIterSize {
|
|||
/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
|
||||
/// multiple nested matcher sequences.
|
||||
fn lockstep_iter_size(
|
||||
tree: "ed::TokenTree,
|
||||
tree: &mbe::TokenTree,
|
||||
interpolations: &FxHashMap<Ident, NamedMatch>,
|
||||
repeats: &[(usize, usize)],
|
||||
) -> LockstepIterSize {
|
||||
use quoted::TokenTree;
|
||||
use mbe::TokenTree;
|
||||
match *tree {
|
||||
TokenTree::Delimited(_, ref delimed) => {
|
||||
delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||
|
|
|
@ -14,7 +14,7 @@
|
|||
//! ownership of the original.
|
||||
|
||||
use crate::ext::base;
|
||||
use crate::ext::mbe::{macro_parser, quoted};
|
||||
use crate::ext::mbe::{self, macro_parser};
|
||||
use crate::parse::Directory;
|
||||
use crate::parse::token::{self, DelimToken, Token, TokenKind};
|
||||
use crate::print::pprust;
|
||||
|
@ -64,7 +64,7 @@ where
|
|||
|
||||
impl TokenTree {
|
||||
/// Use this token tree as a matcher to parse given tts.
|
||||
crate fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStream)
|
||||
crate fn parse(cx: &base::ExtCtxt<'_>, mtch: &[mbe::TokenTree], tts: TokenStream)
|
||||
-> macro_parser::NamedParseResult {
|
||||
// `None` is because we're not interpolating
|
||||
let directory = Directory {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue