pull mbe token tree definition up
This commit is contained in:
parent
636b3543c2
commit
9fd75f5287
7 changed files with 243 additions and 234 deletions
|
@ -8,3 +8,159 @@ crate mod macro_check;
|
||||||
crate mod macro_parser;
|
crate mod macro_parser;
|
||||||
crate mod macro_rules;
|
crate mod macro_rules;
|
||||||
crate mod quoted;
|
crate mod quoted;
|
||||||
|
|
||||||
|
use crate::ast;
|
||||||
|
use crate::parse::token::{self, Token, TokenKind};
|
||||||
|
use crate::tokenstream::{DelimSpan};
|
||||||
|
|
||||||
|
use syntax_pos::{BytePos, Span};
|
||||||
|
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
|
||||||
|
/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
|
||||||
|
/// that the delimiter itself might be `NoDelim`.
|
||||||
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||||
|
crate struct Delimited {
|
||||||
|
crate delim: token::DelimToken,
|
||||||
|
crate tts: Vec<TokenTree>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Delimited {
|
||||||
|
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
|
||||||
|
crate fn open_tt(&self, span: Span) -> TokenTree {
|
||||||
|
let open_span = if span.is_dummy() {
|
||||||
|
span
|
||||||
|
} else {
|
||||||
|
span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
|
||||||
|
};
|
||||||
|
TokenTree::token(token::OpenDelim(self.delim), open_span)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
|
||||||
|
crate fn close_tt(&self, span: Span) -> TokenTree {
|
||||||
|
let close_span = if span.is_dummy() {
|
||||||
|
span
|
||||||
|
} else {
|
||||||
|
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
||||||
|
};
|
||||||
|
TokenTree::token(token::CloseDelim(self.delim), close_span)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
||||||
|
crate struct SequenceRepetition {
|
||||||
|
/// The sequence of token trees
|
||||||
|
crate tts: Vec<TokenTree>,
|
||||||
|
/// The optional separator
|
||||||
|
crate separator: Option<Token>,
|
||||||
|
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||||
|
crate kleene: KleeneToken,
|
||||||
|
/// The number of `Match`s that appear in the sequence (and subsequences)
|
||||||
|
crate num_captures: usize,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
|
||||||
|
crate struct KleeneToken {
|
||||||
|
crate span: Span,
|
||||||
|
crate op: KleeneOp,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl KleeneToken {
|
||||||
|
crate fn new(op: KleeneOp, span: Span) -> KleeneToken {
|
||||||
|
KleeneToken { span, op }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
||||||
|
/// for token sequences.
|
||||||
|
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
||||||
|
crate enum KleeneOp {
|
||||||
|
/// Kleene star (`*`) for zero or more repetitions
|
||||||
|
ZeroOrMore,
|
||||||
|
/// Kleene plus (`+`) for one or more repetitions
|
||||||
|
OneOrMore,
|
||||||
|
/// Kleene optional (`?`) for zero or one reptitions
|
||||||
|
ZeroOrOne,
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
|
||||||
|
/// are "first-class" token trees. Useful for parsing macros.
|
||||||
|
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||||
|
crate enum TokenTree {
|
||||||
|
Token(Token),
|
||||||
|
Delimited(DelimSpan, Lrc<Delimited>),
|
||||||
|
/// A kleene-style repetition sequence
|
||||||
|
Sequence(DelimSpan, Lrc<SequenceRepetition>),
|
||||||
|
/// e.g., `$var`
|
||||||
|
MetaVar(Span, ast::Ident),
|
||||||
|
/// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
|
||||||
|
MetaVarDecl(
|
||||||
|
Span,
|
||||||
|
ast::Ident, /* name to bind */
|
||||||
|
ast::Ident, /* kind of nonterminal */
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
impl TokenTree {
|
||||||
|
/// Return the number of tokens in the tree.
|
||||||
|
crate fn len(&self) -> usize {
|
||||||
|
match *self {
|
||||||
|
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
|
||||||
|
token::NoDelim => delimed.tts.len(),
|
||||||
|
_ => delimed.tts.len() + 2,
|
||||||
|
},
|
||||||
|
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
||||||
|
_ => 0,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the given token tree is delimited.
|
||||||
|
crate fn is_delimited(&self) -> bool {
|
||||||
|
match *self {
|
||||||
|
TokenTree::Delimited(..) => true,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Returns `true` if the given token tree is a token of the given kind.
|
||||||
|
crate fn is_token(&self, expected_kind: &TokenKind) -> bool {
|
||||||
|
match self {
|
||||||
|
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
|
||||||
|
_ => false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
|
||||||
|
crate fn get_tt(&self, index: usize) -> TokenTree {
|
||||||
|
match (self, index) {
|
||||||
|
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
||||||
|
delimed.tts[index].clone()
|
||||||
|
}
|
||||||
|
(&TokenTree::Delimited(span, ref delimed), _) => {
|
||||||
|
if index == 0 {
|
||||||
|
return delimed.open_tt(span.open);
|
||||||
|
}
|
||||||
|
if index == delimed.tts.len() + 1 {
|
||||||
|
return delimed.close_tt(span.close);
|
||||||
|
}
|
||||||
|
delimed.tts[index - 1].clone()
|
||||||
|
}
|
||||||
|
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
|
||||||
|
_ => panic!("Cannot expand a token tree"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Retrieves the `TokenTree`'s span.
|
||||||
|
crate fn span(&self) -> Span {
|
||||||
|
match *self {
|
||||||
|
TokenTree::Token(Token { span, .. })
|
||||||
|
| TokenTree::MetaVar(span, _)
|
||||||
|
| TokenTree::MetaVarDecl(span, _, _) => span,
|
||||||
|
TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
crate fn token(kind: TokenKind, span: Span) -> TokenTree {
|
||||||
|
TokenTree::Token(Token::new(kind, span))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
|
@ -106,7 +106,7 @@
|
||||||
//! bound.
|
//! bound.
|
||||||
use crate::ast::NodeId;
|
use crate::ast::NodeId;
|
||||||
use crate::early_buffered_lints::BufferedEarlyLintId;
|
use crate::early_buffered_lints::BufferedEarlyLintId;
|
||||||
use crate::ext::mbe::quoted::{KleeneToken, TokenTree};
|
use crate::ext::mbe::{KleeneToken, TokenTree};
|
||||||
use crate::parse::token::TokenKind;
|
use crate::parse::token::TokenKind;
|
||||||
use crate::parse::token::{DelimToken, Token};
|
use crate::parse::token::{DelimToken, Token};
|
||||||
use crate::parse::ParseSess;
|
use crate::parse::ParseSess;
|
||||||
|
|
|
@ -75,7 +75,7 @@ crate use ParseResult::*;
|
||||||
use TokenTreeOrTokenTreeSlice::*;
|
use TokenTreeOrTokenTreeSlice::*;
|
||||||
|
|
||||||
use crate::ast::{Ident, Name};
|
use crate::ast::{Ident, Name};
|
||||||
use crate::ext::mbe::quoted::{self, TokenTree};
|
use crate::ext::mbe::{self, TokenTree};
|
||||||
use crate::parse::{Directory, ParseSess};
|
use crate::parse::{Directory, ParseSess};
|
||||||
use crate::parse::parser::{Parser, PathStyle};
|
use crate::parse::parser::{Parser, PathStyle};
|
||||||
use crate::parse::token::{self, DocComment, Nonterminal, Token};
|
use crate::parse::token::{self, DocComment, Nonterminal, Token};
|
||||||
|
@ -195,7 +195,7 @@ struct MatcherPos<'root, 'tt> {
|
||||||
// `None`.
|
// `None`.
|
||||||
|
|
||||||
/// The KleeneOp of this sequence if we are in a repetition.
|
/// The KleeneOp of this sequence if we are in a repetition.
|
||||||
seq_op: Option<quoted::KleeneOp>,
|
seq_op: Option<mbe::KleeneOp>,
|
||||||
|
|
||||||
/// The separator if we are in a repetition.
|
/// The separator if we are in a repetition.
|
||||||
sep: Option<Token>,
|
sep: Option<Token>,
|
||||||
|
@ -532,7 +532,7 @@ fn inner_parse_loop<'root, 'tt>(
|
||||||
}
|
}
|
||||||
// We don't need a separator. Move the "dot" back to the beginning of the matcher
|
// We don't need a separator. Move the "dot" back to the beginning of the matcher
|
||||||
// and try to match again UNLESS we are only allowed to have _one_ repetition.
|
// and try to match again UNLESS we are only allowed to have _one_ repetition.
|
||||||
else if item.seq_op != Some(quoted::KleeneOp::ZeroOrOne) {
|
else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) {
|
||||||
item.match_cur = item.match_lo;
|
item.match_cur = item.match_lo;
|
||||||
item.idx = 0;
|
item.idx = 0;
|
||||||
cur_items.push(item);
|
cur_items.push(item);
|
||||||
|
@ -555,8 +555,8 @@ fn inner_parse_loop<'root, 'tt>(
|
||||||
// implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
|
// implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
|
||||||
// result in a "no rules expected token" error by virtue of this matcher not
|
// result in a "no rules expected token" error by virtue of this matcher not
|
||||||
// working.
|
// working.
|
||||||
if seq.kleene.op == quoted::KleeneOp::ZeroOrMore
|
if seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||||
|| seq.kleene.op == quoted::KleeneOp::ZeroOrOne
|
|| seq.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||||
{
|
{
|
||||||
let mut new_item = item.clone();
|
let mut new_item = item.clone();
|
||||||
new_item.match_cur += seq.num_captures;
|
new_item.match_cur += seq.num_captures;
|
||||||
|
|
|
@ -4,11 +4,11 @@ use crate::edition::Edition;
|
||||||
use crate::ext::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
|
use crate::ext::base::{DummyResult, ExtCtxt, MacResult, TTMacroExpander};
|
||||||
use crate::ext::base::{SyntaxExtension, SyntaxExtensionKind};
|
use crate::ext::base::{SyntaxExtension, SyntaxExtensionKind};
|
||||||
use crate::ext::expand::{AstFragment, AstFragmentKind};
|
use crate::ext::expand::{AstFragment, AstFragmentKind};
|
||||||
|
use crate::ext::mbe;
|
||||||
use crate::ext::mbe::macro_check;
|
use crate::ext::mbe::macro_check;
|
||||||
use crate::ext::mbe::macro_parser::{parse, parse_failure_msg};
|
use crate::ext::mbe::macro_parser::{parse, parse_failure_msg};
|
||||||
use crate::ext::mbe::macro_parser::{Error, Failure, Success};
|
use crate::ext::mbe::macro_parser::{Error, Failure, Success};
|
||||||
use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq};
|
use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq};
|
||||||
use crate::ext::mbe::quoted;
|
|
||||||
use crate::ext::mbe::transcribe::transcribe;
|
use crate::ext::mbe::transcribe::transcribe;
|
||||||
use crate::feature_gate::Features;
|
use crate::feature_gate::Features;
|
||||||
use crate::parse::parser::Parser;
|
use crate::parse::parser::Parser;
|
||||||
|
@ -135,8 +135,8 @@ struct MacroRulesMacroExpander {
|
||||||
name: ast::Ident,
|
name: ast::Ident,
|
||||||
span: Span,
|
span: Span,
|
||||||
transparency: Transparency,
|
transparency: Transparency,
|
||||||
lhses: Vec<quoted::TokenTree>,
|
lhses: Vec<mbe::TokenTree>,
|
||||||
rhses: Vec<quoted::TokenTree>,
|
rhses: Vec<mbe::TokenTree>,
|
||||||
valid: bool,
|
valid: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -169,8 +169,8 @@ fn generic_extension<'cx>(
|
||||||
name: ast::Ident,
|
name: ast::Ident,
|
||||||
transparency: Transparency,
|
transparency: Transparency,
|
||||||
arg: TokenStream,
|
arg: TokenStream,
|
||||||
lhses: &[quoted::TokenTree],
|
lhses: &[mbe::TokenTree],
|
||||||
rhses: &[quoted::TokenTree],
|
rhses: &[mbe::TokenTree],
|
||||||
) -> Box<dyn MacResult + 'cx> {
|
) -> Box<dyn MacResult + 'cx> {
|
||||||
if cx.trace_macros() {
|
if cx.trace_macros() {
|
||||||
trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
|
trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
|
||||||
|
@ -182,7 +182,7 @@ fn generic_extension<'cx>(
|
||||||
for (i, lhs) in lhses.iter().enumerate() {
|
for (i, lhs) in lhses.iter().enumerate() {
|
||||||
// try each arm's matchers
|
// try each arm's matchers
|
||||||
let lhs_tt = match *lhs {
|
let lhs_tt = match *lhs {
|
||||||
quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||||
_ => cx.span_bug(sp, "malformed macro lhs"),
|
_ => cx.span_bug(sp, "malformed macro lhs"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -190,7 +190,7 @@ fn generic_extension<'cx>(
|
||||||
Success(named_matches) => {
|
Success(named_matches) => {
|
||||||
let rhs = match rhses[i] {
|
let rhs = match rhses[i] {
|
||||||
// ignore delimiters
|
// ignore delimiters
|
||||||
quoted::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
|
mbe::TokenTree::Delimited(_, ref delimed) => delimed.tts.clone(),
|
||||||
_ => cx.span_bug(sp, "malformed macro rhs"),
|
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||||
};
|
};
|
||||||
let arm_span = rhses[i].span();
|
let arm_span = rhses[i].span();
|
||||||
|
@ -258,7 +258,7 @@ fn generic_extension<'cx>(
|
||||||
for lhs in lhses {
|
for lhs in lhses {
|
||||||
// try each arm's matchers
|
// try each arm's matchers
|
||||||
let lhs_tt = match *lhs {
|
let lhs_tt = match *lhs {
|
||||||
quoted::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
mbe::TokenTree::Delimited(_, ref delim) => &delim.tts[..],
|
||||||
_ => continue,
|
_ => continue,
|
||||||
};
|
};
|
||||||
match TokenTree::parse(cx, lhs_tt, arg.clone()) {
|
match TokenTree::parse(cx, lhs_tt, arg.clone()) {
|
||||||
|
@ -312,32 +312,32 @@ pub fn compile_declarative_macro(
|
||||||
// ...quasiquoting this would be nice.
|
// ...quasiquoting this would be nice.
|
||||||
// These spans won't matter, anyways
|
// These spans won't matter, anyways
|
||||||
let argument_gram = vec![
|
let argument_gram = vec![
|
||||||
quoted::TokenTree::Sequence(
|
mbe::TokenTree::Sequence(
|
||||||
DelimSpan::dummy(),
|
DelimSpan::dummy(),
|
||||||
Lrc::new(quoted::SequenceRepetition {
|
Lrc::new(mbe::SequenceRepetition {
|
||||||
tts: vec![
|
tts: vec![
|
||||||
quoted::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
|
mbe::TokenTree::MetaVarDecl(def.span, lhs_nm, tt_spec),
|
||||||
quoted::TokenTree::token(token::FatArrow, def.span),
|
mbe::TokenTree::token(token::FatArrow, def.span),
|
||||||
quoted::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
|
mbe::TokenTree::MetaVarDecl(def.span, rhs_nm, tt_spec),
|
||||||
],
|
],
|
||||||
separator: Some(Token::new(
|
separator: Some(Token::new(
|
||||||
if body.legacy { token::Semi } else { token::Comma },
|
if body.legacy { token::Semi } else { token::Comma },
|
||||||
def.span,
|
def.span,
|
||||||
)),
|
)),
|
||||||
kleene: quoted::KleeneToken::new(quoted::KleeneOp::OneOrMore, def.span),
|
kleene: mbe::KleeneToken::new(mbe::KleeneOp::OneOrMore, def.span),
|
||||||
num_captures: 2,
|
num_captures: 2,
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
// to phase into semicolon-termination instead of semicolon-separation
|
// to phase into semicolon-termination instead of semicolon-separation
|
||||||
quoted::TokenTree::Sequence(
|
mbe::TokenTree::Sequence(
|
||||||
DelimSpan::dummy(),
|
DelimSpan::dummy(),
|
||||||
Lrc::new(quoted::SequenceRepetition {
|
Lrc::new(mbe::SequenceRepetition {
|
||||||
tts: vec![quoted::TokenTree::token(
|
tts: vec![mbe::TokenTree::token(
|
||||||
if body.legacy { token::Semi } else { token::Comma },
|
if body.legacy { token::Semi } else { token::Comma },
|
||||||
def.span,
|
def.span,
|
||||||
)],
|
)],
|
||||||
separator: None,
|
separator: None,
|
||||||
kleene: quoted::KleeneToken::new(quoted::KleeneOp::ZeroOrMore, def.span),
|
kleene: mbe::KleeneToken::new(mbe::KleeneOp::ZeroOrMore, def.span),
|
||||||
num_captures: 0,
|
num_captures: 0,
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
|
@ -367,7 +367,7 @@ pub fn compile_declarative_macro(
|
||||||
.map(|m| {
|
.map(|m| {
|
||||||
if let MatchedNonterminal(ref nt) = *m {
|
if let MatchedNonterminal(ref nt) = *m {
|
||||||
if let NtTT(ref tt) = **nt {
|
if let NtTT(ref tt) = **nt {
|
||||||
let tt = quoted::parse(
|
let tt = mbe::quoted::parse(
|
||||||
tt.clone().into(),
|
tt.clone().into(),
|
||||||
true,
|
true,
|
||||||
sess,
|
sess,
|
||||||
|
@ -384,7 +384,7 @@ pub fn compile_declarative_macro(
|
||||||
}
|
}
|
||||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||||
})
|
})
|
||||||
.collect::<Vec<quoted::TokenTree>>(),
|
.collect::<Vec<mbe::TokenTree>>(),
|
||||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
|
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -394,7 +394,7 @@ pub fn compile_declarative_macro(
|
||||||
.map(|m| {
|
.map(|m| {
|
||||||
if let MatchedNonterminal(ref nt) = *m {
|
if let MatchedNonterminal(ref nt) = *m {
|
||||||
if let NtTT(ref tt) = **nt {
|
if let NtTT(ref tt) = **nt {
|
||||||
return quoted::parse(
|
return mbe::quoted::parse(
|
||||||
tt.clone().into(),
|
tt.clone().into(),
|
||||||
false,
|
false,
|
||||||
sess,
|
sess,
|
||||||
|
@ -409,7 +409,7 @@ pub fn compile_declarative_macro(
|
||||||
}
|
}
|
||||||
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs")
|
||||||
})
|
})
|
||||||
.collect::<Vec<quoted::TokenTree>>(),
|
.collect::<Vec<mbe::TokenTree>>(),
|
||||||
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
|
_ => sess.span_diagnostic.span_bug(def.span, "wrong-structured rhs"),
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -454,11 +454,11 @@ fn check_lhs_nt_follows(
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
features: &Features,
|
features: &Features,
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
lhs: "ed::TokenTree,
|
lhs: &mbe::TokenTree,
|
||||||
) -> bool {
|
) -> bool {
|
||||||
// lhs is going to be like TokenTree::Delimited(...), where the
|
// lhs is going to be like TokenTree::Delimited(...), where the
|
||||||
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
|
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
|
||||||
if let quoted::TokenTree::Delimited(_, ref tts) = *lhs {
|
if let mbe::TokenTree::Delimited(_, ref tts) = *lhs {
|
||||||
check_matcher(sess, features, attrs, &tts.tts)
|
check_matcher(sess, features, attrs, &tts.tts)
|
||||||
} else {
|
} else {
|
||||||
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
|
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
|
||||||
|
@ -471,8 +471,8 @@ fn check_lhs_nt_follows(
|
||||||
|
|
||||||
/// Checks that the lhs contains no repetition which could match an empty token
|
/// Checks that the lhs contains no repetition which could match an empty token
|
||||||
/// tree, because then the matcher would hang indefinitely.
|
/// tree, because then the matcher would hang indefinitely.
|
||||||
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
|
||||||
use quoted::TokenTree;
|
use mbe::TokenTree;
|
||||||
for tt in tts {
|
for tt in tts {
|
||||||
match *tt {
|
match *tt {
|
||||||
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
|
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
|
||||||
|
@ -486,8 +486,8 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
||||||
&& seq.tts.iter().all(|seq_tt| match *seq_tt {
|
&& seq.tts.iter().all(|seq_tt| match *seq_tt {
|
||||||
TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
|
TokenTree::MetaVarDecl(_, _, id) => id.name == sym::vis,
|
||||||
TokenTree::Sequence(_, ref sub_seq) => {
|
TokenTree::Sequence(_, ref sub_seq) => {
|
||||||
sub_seq.kleene.op == quoted::KleeneOp::ZeroOrMore
|
sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||||
|| sub_seq.kleene.op == quoted::KleeneOp::ZeroOrOne
|
|| sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||||
}
|
}
|
||||||
_ => false,
|
_ => false,
|
||||||
})
|
})
|
||||||
|
@ -506,9 +506,9 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
||||||
true
|
true
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_rhs(sess: &ParseSess, rhs: "ed::TokenTree) -> bool {
|
fn check_rhs(sess: &ParseSess, rhs: &mbe::TokenTree) -> bool {
|
||||||
match *rhs {
|
match *rhs {
|
||||||
quoted::TokenTree::Delimited(..) => return true,
|
mbe::TokenTree::Delimited(..) => return true,
|
||||||
_ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
|
_ => sess.span_diagnostic.span_err(rhs.span(), "macro rhs must be delimited"),
|
||||||
}
|
}
|
||||||
false
|
false
|
||||||
|
@ -518,7 +518,7 @@ fn check_matcher(
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
features: &Features,
|
features: &Features,
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
matcher: &[quoted::TokenTree],
|
matcher: &[mbe::TokenTree],
|
||||||
) -> bool {
|
) -> bool {
|
||||||
let first_sets = FirstSets::new(matcher);
|
let first_sets = FirstSets::new(matcher);
|
||||||
let empty_suffix = TokenSet::empty();
|
let empty_suffix = TokenSet::empty();
|
||||||
|
@ -550,8 +550,8 @@ struct FirstSets {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl FirstSets {
|
impl FirstSets {
|
||||||
fn new(tts: &[quoted::TokenTree]) -> FirstSets {
|
fn new(tts: &[mbe::TokenTree]) -> FirstSets {
|
||||||
use quoted::TokenTree;
|
use mbe::TokenTree;
|
||||||
|
|
||||||
let mut sets = FirstSets { first: FxHashMap::default() };
|
let mut sets = FirstSets { first: FxHashMap::default() };
|
||||||
build_recur(&mut sets, tts);
|
build_recur(&mut sets, tts);
|
||||||
|
@ -598,8 +598,8 @@ impl FirstSets {
|
||||||
|
|
||||||
// Reverse scan: Sequence comes before `first`.
|
// Reverse scan: Sequence comes before `first`.
|
||||||
if subfirst.maybe_empty
|
if subfirst.maybe_empty
|
||||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
|
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
|
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||||
{
|
{
|
||||||
// If sequence is potentially empty, then
|
// If sequence is potentially empty, then
|
||||||
// union them (preserving first emptiness).
|
// union them (preserving first emptiness).
|
||||||
|
@ -619,8 +619,8 @@ impl FirstSets {
|
||||||
|
|
||||||
// walks forward over `tts` until all potential FIRST tokens are
|
// walks forward over `tts` until all potential FIRST tokens are
|
||||||
// identified.
|
// identified.
|
||||||
fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
|
fn first(&self, tts: &[mbe::TokenTree]) -> TokenSet {
|
||||||
use quoted::TokenTree;
|
use mbe::TokenTree;
|
||||||
|
|
||||||
let mut first = TokenSet::empty();
|
let mut first = TokenSet::empty();
|
||||||
for tt in tts.iter() {
|
for tt in tts.iter() {
|
||||||
|
@ -656,8 +656,8 @@ impl FirstSets {
|
||||||
assert!(first.maybe_empty);
|
assert!(first.maybe_empty);
|
||||||
first.add_all(subfirst);
|
first.add_all(subfirst);
|
||||||
if subfirst.maybe_empty
|
if subfirst.maybe_empty
|
||||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrMore
|
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||||
|| seq_rep.kleene.op == quoted::KleeneOp::ZeroOrOne
|
|| seq_rep.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||||
{
|
{
|
||||||
// Continue scanning for more first
|
// Continue scanning for more first
|
||||||
// tokens, but also make sure we
|
// tokens, but also make sure we
|
||||||
|
@ -678,7 +678,7 @@ impl FirstSets {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// A set of `quoted::TokenTree`s, which may include `TokenTree::Match`s
|
// A set of `mbe::TokenTree`s, which may include `TokenTree::Match`s
|
||||||
// (for macro-by-example syntactic variables). It also carries the
|
// (for macro-by-example syntactic variables). It also carries the
|
||||||
// `maybe_empty` flag; that is true if and only if the matcher can
|
// `maybe_empty` flag; that is true if and only if the matcher can
|
||||||
// match an empty token sequence.
|
// match an empty token sequence.
|
||||||
|
@ -690,7 +690,7 @@ impl FirstSets {
|
||||||
// (Notably, we must allow for *-op to occur zero times.)
|
// (Notably, we must allow for *-op to occur zero times.)
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
struct TokenSet {
|
struct TokenSet {
|
||||||
tokens: Vec<quoted::TokenTree>,
|
tokens: Vec<mbe::TokenTree>,
|
||||||
maybe_empty: bool,
|
maybe_empty: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -702,13 +702,13 @@ impl TokenSet {
|
||||||
|
|
||||||
// Returns the set `{ tok }` for the single-token (and thus
|
// Returns the set `{ tok }` for the single-token (and thus
|
||||||
// non-empty) sequence [tok].
|
// non-empty) sequence [tok].
|
||||||
fn singleton(tok: quoted::TokenTree) -> Self {
|
fn singleton(tok: mbe::TokenTree) -> Self {
|
||||||
TokenSet { tokens: vec![tok], maybe_empty: false }
|
TokenSet { tokens: vec![tok], maybe_empty: false }
|
||||||
}
|
}
|
||||||
|
|
||||||
// Changes self to be the set `{ tok }`.
|
// Changes self to be the set `{ tok }`.
|
||||||
// Since `tok` is always present, marks self as non-empty.
|
// Since `tok` is always present, marks self as non-empty.
|
||||||
fn replace_with(&mut self, tok: quoted::TokenTree) {
|
fn replace_with(&mut self, tok: mbe::TokenTree) {
|
||||||
self.tokens.clear();
|
self.tokens.clear();
|
||||||
self.tokens.push(tok);
|
self.tokens.push(tok);
|
||||||
self.maybe_empty = false;
|
self.maybe_empty = false;
|
||||||
|
@ -723,7 +723,7 @@ impl TokenSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds `tok` to the set for `self`, marking sequence as non-empy.
|
// Adds `tok` to the set for `self`, marking sequence as non-empy.
|
||||||
fn add_one(&mut self, tok: quoted::TokenTree) {
|
fn add_one(&mut self, tok: mbe::TokenTree) {
|
||||||
if !self.tokens.contains(&tok) {
|
if !self.tokens.contains(&tok) {
|
||||||
self.tokens.push(tok);
|
self.tokens.push(tok);
|
||||||
}
|
}
|
||||||
|
@ -731,7 +731,7 @@ impl TokenSet {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
|
// Adds `tok` to the set for `self`. (Leaves `maybe_empty` flag alone.)
|
||||||
fn add_one_maybe(&mut self, tok: quoted::TokenTree) {
|
fn add_one_maybe(&mut self, tok: mbe::TokenTree) {
|
||||||
if !self.tokens.contains(&tok) {
|
if !self.tokens.contains(&tok) {
|
||||||
self.tokens.push(tok);
|
self.tokens.push(tok);
|
||||||
}
|
}
|
||||||
|
@ -772,10 +772,10 @@ fn check_matcher_core(
|
||||||
features: &Features,
|
features: &Features,
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
first_sets: &FirstSets,
|
first_sets: &FirstSets,
|
||||||
matcher: &[quoted::TokenTree],
|
matcher: &[mbe::TokenTree],
|
||||||
follow: &TokenSet,
|
follow: &TokenSet,
|
||||||
) -> TokenSet {
|
) -> TokenSet {
|
||||||
use quoted::TokenTree;
|
use mbe::TokenTree;
|
||||||
|
|
||||||
let mut last = TokenSet::empty();
|
let mut last = TokenSet::empty();
|
||||||
|
|
||||||
|
@ -950,8 +950,8 @@ fn check_matcher_core(
|
||||||
last
|
last
|
||||||
}
|
}
|
||||||
|
|
||||||
fn token_can_be_followed_by_any(tok: "ed::TokenTree) -> bool {
|
fn token_can_be_followed_by_any(tok: &mbe::TokenTree) -> bool {
|
||||||
if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
if let mbe::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
|
||||||
frag_can_be_followed_by_any(frag_spec.name)
|
frag_can_be_followed_by_any(frag_spec.name)
|
||||||
} else {
|
} else {
|
||||||
// (Non NT's can always be followed by anthing in matchers.)
|
// (Non NT's can always be followed by anthing in matchers.)
|
||||||
|
@ -997,8 +997,8 @@ enum IsInFollow {
|
||||||
/// break macros that were relying on that binary operator as a
|
/// break macros that were relying on that binary operator as a
|
||||||
/// separator.
|
/// separator.
|
||||||
// when changing this do not forget to update doc/book/macros.md!
|
// when changing this do not forget to update doc/book/macros.md!
|
||||||
fn is_in_follow(tok: "ed::TokenTree, frag: Symbol) -> IsInFollow {
|
fn is_in_follow(tok: &mbe::TokenTree, frag: Symbol) -> IsInFollow {
|
||||||
use quoted::TokenTree;
|
use mbe::TokenTree;
|
||||||
|
|
||||||
if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
|
if let TokenTree::Token(Token { kind: token::CloseDelim(_), .. }) = *tok {
|
||||||
// closing a token tree can never be matched by any fragment;
|
// closing a token tree can never be matched by any fragment;
|
||||||
|
@ -1116,10 +1116,10 @@ fn has_legal_fragment_specifier(
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
features: &Features,
|
features: &Features,
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
tok: "ed::TokenTree,
|
tok: &mbe::TokenTree,
|
||||||
) -> Result<(), String> {
|
) -> Result<(), String> {
|
||||||
debug!("has_legal_fragment_specifier({:?})", tok);
|
debug!("has_legal_fragment_specifier({:?})", tok);
|
||||||
if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
|
if let mbe::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
|
||||||
let frag_span = tok.span();
|
let frag_span = tok.span();
|
||||||
if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
|
if !is_legal_fragment_specifier(sess, features, attrs, frag_spec.name, frag_span) {
|
||||||
return Err(frag_spec.to_string());
|
return Err(frag_spec.to_string());
|
||||||
|
@ -1160,13 +1160,13 @@ fn is_legal_fragment_specifier(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn quoted_tt_to_string(tt: "ed::TokenTree) -> String {
|
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
|
||||||
match *tt {
|
match *tt {
|
||||||
quoted::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
|
mbe::TokenTree::Token(ref token) => crate::print::pprust::token_to_string(&token),
|
||||||
quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
|
mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
|
||||||
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
mbe::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
||||||
_ => panic!(
|
_ => panic!(
|
||||||
"unexpected quoted::TokenTree::{{Sequence or Delimited}} \
|
"unexpected mbe::TokenTree::{{Sequence or Delimited}} \
|
||||||
in follow set checker"
|
in follow set checker"
|
||||||
),
|
),
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,166 +1,19 @@
|
||||||
use crate::ast;
|
use crate::ast;
|
||||||
use crate::ast::NodeId;
|
use crate::ast::NodeId;
|
||||||
use crate::ext::mbe::macro_parser;
|
use crate::ext::mbe::macro_parser;
|
||||||
|
use crate::ext::mbe::{TokenTree, KleeneOp, KleeneToken, SequenceRepetition, Delimited};
|
||||||
use crate::feature_gate::Features;
|
use crate::feature_gate::Features;
|
||||||
use crate::parse::token::{self, Token, TokenKind};
|
use crate::parse::token::{self, Token};
|
||||||
use crate::parse::ParseSess;
|
use crate::parse::ParseSess;
|
||||||
use crate::print::pprust;
|
use crate::print::pprust;
|
||||||
use crate::symbol::kw;
|
use crate::symbol::kw;
|
||||||
use crate::tokenstream::{self, DelimSpan};
|
use crate::tokenstream;
|
||||||
|
|
||||||
use syntax_pos::{edition::Edition, BytePos, Span};
|
use syntax_pos::{edition::Edition, Span};
|
||||||
|
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
use std::iter::Peekable;
|
use std::iter::Peekable;
|
||||||
|
|
||||||
/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
|
|
||||||
/// that the delimiter itself might be `NoDelim`.
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
|
||||||
crate struct Delimited {
|
|
||||||
crate delim: token::DelimToken,
|
|
||||||
crate tts: Vec<TokenTree>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Delimited {
|
|
||||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
|
|
||||||
crate fn open_tt(&self, span: Span) -> TokenTree {
|
|
||||||
let open_span = if span.is_dummy() {
|
|
||||||
span
|
|
||||||
} else {
|
|
||||||
span.with_hi(span.lo() + BytePos(self.delim.len() as u32))
|
|
||||||
};
|
|
||||||
TokenTree::token(token::OpenDelim(self.delim), open_span)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
|
|
||||||
crate fn close_tt(&self, span: Span) -> TokenTree {
|
|
||||||
let close_span = if span.is_dummy() {
|
|
||||||
span
|
|
||||||
} else {
|
|
||||||
span.with_lo(span.hi() - BytePos(self.delim.len() as u32))
|
|
||||||
};
|
|
||||||
TokenTree::token(token::CloseDelim(self.delim), close_span)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug)]
|
|
||||||
crate struct SequenceRepetition {
|
|
||||||
/// The sequence of token trees
|
|
||||||
crate tts: Vec<TokenTree>,
|
|
||||||
/// The optional separator
|
|
||||||
crate separator: Option<Token>,
|
|
||||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
|
||||||
crate kleene: KleeneToken,
|
|
||||||
/// The number of `Match`s that appear in the sequence (and subsequences)
|
|
||||||
crate num_captures: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
|
|
||||||
crate struct KleeneToken {
|
|
||||||
crate span: Span,
|
|
||||||
crate op: KleeneOp,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl KleeneToken {
|
|
||||||
crate fn new(op: KleeneOp, span: Span) -> KleeneToken {
|
|
||||||
KleeneToken { span, op }
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// A Kleene-style [repetition operator](http://en.wikipedia.org/wiki/Kleene_star)
|
|
||||||
/// for token sequences.
|
|
||||||
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
|
|
||||||
crate enum KleeneOp {
|
|
||||||
/// Kleene star (`*`) for zero or more repetitions
|
|
||||||
ZeroOrMore,
|
|
||||||
/// Kleene plus (`+`) for one or more repetitions
|
|
||||||
OneOrMore,
|
|
||||||
/// Kleene optional (`?`) for zero or one reptitions
|
|
||||||
ZeroOrOne,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
|
|
||||||
/// are "first-class" token trees. Useful for parsing macros.
|
|
||||||
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
|
||||||
crate enum TokenTree {
|
|
||||||
Token(Token),
|
|
||||||
Delimited(DelimSpan, Lrc<Delimited>),
|
|
||||||
/// A kleene-style repetition sequence
|
|
||||||
Sequence(DelimSpan, Lrc<SequenceRepetition>),
|
|
||||||
/// e.g., `$var`
|
|
||||||
MetaVar(Span, ast::Ident),
|
|
||||||
/// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
|
|
||||||
MetaVarDecl(
|
|
||||||
Span,
|
|
||||||
ast::Ident, /* name to bind */
|
|
||||||
ast::Ident, /* kind of nonterminal */
|
|
||||||
),
|
|
||||||
}
|
|
||||||
|
|
||||||
impl TokenTree {
|
|
||||||
/// Return the number of tokens in the tree.
|
|
||||||
crate fn len(&self) -> usize {
|
|
||||||
match *self {
|
|
||||||
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
|
|
||||||
token::NoDelim => delimed.tts.len(),
|
|
||||||
_ => delimed.tts.len() + 2,
|
|
||||||
},
|
|
||||||
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
|
||||||
_ => 0,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the given token tree is delimited.
|
|
||||||
crate fn is_delimited(&self) -> bool {
|
|
||||||
match *self {
|
|
||||||
TokenTree::Delimited(..) => true,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns `true` if the given token tree is a token of the given kind.
|
|
||||||
crate fn is_token(&self, expected_kind: &TokenKind) -> bool {
|
|
||||||
match self {
|
|
||||||
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
|
|
||||||
_ => false,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
|
|
||||||
crate fn get_tt(&self, index: usize) -> TokenTree {
|
|
||||||
match (self, index) {
|
|
||||||
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
|
||||||
delimed.tts[index].clone()
|
|
||||||
}
|
|
||||||
(&TokenTree::Delimited(span, ref delimed), _) => {
|
|
||||||
if index == 0 {
|
|
||||||
return delimed.open_tt(span.open);
|
|
||||||
}
|
|
||||||
if index == delimed.tts.len() + 1 {
|
|
||||||
return delimed.close_tt(span.close);
|
|
||||||
}
|
|
||||||
delimed.tts[index - 1].clone()
|
|
||||||
}
|
|
||||||
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
|
|
||||||
_ => panic!("Cannot expand a token tree"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Retrieves the `TokenTree`'s span.
|
|
||||||
crate fn span(&self) -> Span {
|
|
||||||
match *self {
|
|
||||||
TokenTree::Token(Token { span, .. })
|
|
||||||
| TokenTree::MetaVar(span, _)
|
|
||||||
| TokenTree::MetaVarDecl(span, _, _) => span,
|
|
||||||
TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
crate fn token(kind: TokenKind, span: Span) -> TokenTree {
|
|
||||||
TokenTree::Token(Token::new(kind, span))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
|
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
|
||||||
/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
|
/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
|
||||||
/// collection of `TokenTree` for use in parsing a macro.
|
/// collection of `TokenTree` for use in parsing a macro.
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use crate::ast::{Ident, Mac};
|
use crate::ast::{Ident, Mac};
|
||||||
use crate::ext::base::ExtCtxt;
|
use crate::ext::base::ExtCtxt;
|
||||||
|
use crate::ext::mbe;
|
||||||
use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
use crate::ext::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
||||||
use crate::ext::mbe::quoted;
|
|
||||||
use crate::mut_visit::{self, MutVisitor};
|
use crate::mut_visit::{self, MutVisitor};
|
||||||
use crate::parse::token::{self, NtTT, Token};
|
use crate::parse::token::{self, NtTT, Token};
|
||||||
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||||
|
@ -38,22 +38,22 @@ impl Marker {
|
||||||
|
|
||||||
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||||
enum Frame {
|
enum Frame {
|
||||||
Delimited { forest: Lrc<quoted::Delimited>, idx: usize, span: DelimSpan },
|
Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
|
||||||
Sequence { forest: Lrc<quoted::SequenceRepetition>, idx: usize, sep: Option<Token> },
|
Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Frame {
|
impl Frame {
|
||||||
/// Construct a new frame around the delimited set of tokens.
|
/// Construct a new frame around the delimited set of tokens.
|
||||||
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
|
fn new(tts: Vec<mbe::TokenTree>) -> Frame {
|
||||||
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts });
|
let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
|
||||||
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
|
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Iterator for Frame {
|
impl Iterator for Frame {
|
||||||
type Item = quoted::TokenTree;
|
type Item = mbe::TokenTree;
|
||||||
|
|
||||||
fn next(&mut self) -> Option<quoted::TokenTree> {
|
fn next(&mut self) -> Option<mbe::TokenTree> {
|
||||||
match *self {
|
match *self {
|
||||||
Frame::Delimited { ref forest, ref mut idx, .. } => {
|
Frame::Delimited { ref forest, ref mut idx, .. } => {
|
||||||
*idx += 1;
|
*idx += 1;
|
||||||
|
@ -90,7 +90,7 @@ impl Iterator for Frame {
|
||||||
pub(super) fn transcribe(
|
pub(super) fn transcribe(
|
||||||
cx: &ExtCtxt<'_>,
|
cx: &ExtCtxt<'_>,
|
||||||
interp: &FxHashMap<Ident, NamedMatch>,
|
interp: &FxHashMap<Ident, NamedMatch>,
|
||||||
src: Vec<quoted::TokenTree>,
|
src: Vec<mbe::TokenTree>,
|
||||||
transparency: Transparency,
|
transparency: Transparency,
|
||||||
) -> TokenStream {
|
) -> TokenStream {
|
||||||
// Nothing for us to transcribe...
|
// Nothing for us to transcribe...
|
||||||
|
@ -178,7 +178,7 @@ pub(super) fn transcribe(
|
||||||
// We are descending into a sequence. We first make sure that the matchers in the RHS
|
// We are descending into a sequence. We first make sure that the matchers in the RHS
|
||||||
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
|
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
|
||||||
// macro writer has made a mistake.
|
// macro writer has made a mistake.
|
||||||
seq @ quoted::TokenTree::Sequence(..) => {
|
seq @ mbe::TokenTree::Sequence(..) => {
|
||||||
match lockstep_iter_size(&seq, interp, &repeats) {
|
match lockstep_iter_size(&seq, interp, &repeats) {
|
||||||
LockstepIterSize::Unconstrained => {
|
LockstepIterSize::Unconstrained => {
|
||||||
cx.span_fatal(
|
cx.span_fatal(
|
||||||
|
@ -199,7 +199,7 @@ pub(super) fn transcribe(
|
||||||
LockstepIterSize::Constraint(len, _) => {
|
LockstepIterSize::Constraint(len, _) => {
|
||||||
// We do this to avoid an extra clone above. We know that this is a
|
// We do this to avoid an extra clone above. We know that this is a
|
||||||
// sequence already.
|
// sequence already.
|
||||||
let (sp, seq) = if let quoted::TokenTree::Sequence(sp, seq) = seq {
|
let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
|
||||||
(sp, seq)
|
(sp, seq)
|
||||||
} else {
|
} else {
|
||||||
unreachable!()
|
unreachable!()
|
||||||
|
@ -207,7 +207,7 @@ pub(super) fn transcribe(
|
||||||
|
|
||||||
// Is the repetition empty?
|
// Is the repetition empty?
|
||||||
if len == 0 {
|
if len == 0 {
|
||||||
if seq.kleene.op == quoted::KleeneOp::OneOrMore {
|
if seq.kleene.op == mbe::KleeneOp::OneOrMore {
|
||||||
// FIXME: this really ought to be caught at macro definition
|
// FIXME: this really ought to be caught at macro definition
|
||||||
// time... It happens when the Kleene operator in the matcher and
|
// time... It happens when the Kleene operator in the matcher and
|
||||||
// the body for the same meta-variable do not match.
|
// the body for the same meta-variable do not match.
|
||||||
|
@ -232,7 +232,7 @@ pub(super) fn transcribe(
|
||||||
}
|
}
|
||||||
|
|
||||||
// Replace the meta-var with the matched token tree from the invocation.
|
// Replace the meta-var with the matched token tree from the invocation.
|
||||||
quoted::TokenTree::MetaVar(mut sp, mut ident) => {
|
mbe::TokenTree::MetaVar(mut sp, mut ident) => {
|
||||||
// Find the matched nonterminal from the macro invocation, and use it to replace
|
// Find the matched nonterminal from the macro invocation, and use it to replace
|
||||||
// the meta-var.
|
// the meta-var.
|
||||||
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
||||||
|
@ -269,7 +269,7 @@ pub(super) fn transcribe(
|
||||||
// We will produce all of the results of the inside of the `Delimited` and then we will
|
// We will produce all of the results of the inside of the `Delimited` and then we will
|
||||||
// jump back out of the Delimited, pop the result_stack and add the new results back to
|
// jump back out of the Delimited, pop the result_stack and add the new results back to
|
||||||
// the previous results (from outside the Delimited).
|
// the previous results (from outside the Delimited).
|
||||||
quoted::TokenTree::Delimited(mut span, delimited) => {
|
mbe::TokenTree::Delimited(mut span, delimited) => {
|
||||||
marker.visit_delim_span(&mut span);
|
marker.visit_delim_span(&mut span);
|
||||||
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
|
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
|
||||||
result_stack.push(mem::take(&mut result));
|
result_stack.push(mem::take(&mut result));
|
||||||
|
@ -277,14 +277,14 @@ pub(super) fn transcribe(
|
||||||
|
|
||||||
// Nothing much to do here. Just push the token to the result, being careful to
|
// Nothing much to do here. Just push the token to the result, being careful to
|
||||||
// preserve syntax context.
|
// preserve syntax context.
|
||||||
quoted::TokenTree::Token(token) => {
|
mbe::TokenTree::Token(token) => {
|
||||||
let mut tt = TokenTree::Token(token);
|
let mut tt = TokenTree::Token(token);
|
||||||
marker.visit_tt(&mut tt);
|
marker.visit_tt(&mut tt);
|
||||||
result.push(tt.into());
|
result.push(tt.into());
|
||||||
}
|
}
|
||||||
|
|
||||||
// There should be no meta-var declarations in the invocation of a macro.
|
// There should be no meta-var declarations in the invocation of a macro.
|
||||||
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -368,11 +368,11 @@ impl LockstepIterSize {
|
||||||
/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
|
/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
|
||||||
/// multiple nested matcher sequences.
|
/// multiple nested matcher sequences.
|
||||||
fn lockstep_iter_size(
|
fn lockstep_iter_size(
|
||||||
tree: "ed::TokenTree,
|
tree: &mbe::TokenTree,
|
||||||
interpolations: &FxHashMap<Ident, NamedMatch>,
|
interpolations: &FxHashMap<Ident, NamedMatch>,
|
||||||
repeats: &[(usize, usize)],
|
repeats: &[(usize, usize)],
|
||||||
) -> LockstepIterSize {
|
) -> LockstepIterSize {
|
||||||
use quoted::TokenTree;
|
use mbe::TokenTree;
|
||||||
match *tree {
|
match *tree {
|
||||||
TokenTree::Delimited(_, ref delimed) => {
|
TokenTree::Delimited(_, ref delimed) => {
|
||||||
delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||||
|
|
|
@ -14,7 +14,7 @@
|
||||||
//! ownership of the original.
|
//! ownership of the original.
|
||||||
|
|
||||||
use crate::ext::base;
|
use crate::ext::base;
|
||||||
use crate::ext::mbe::{macro_parser, quoted};
|
use crate::ext::mbe::{self, macro_parser};
|
||||||
use crate::parse::Directory;
|
use crate::parse::Directory;
|
||||||
use crate::parse::token::{self, DelimToken, Token, TokenKind};
|
use crate::parse::token::{self, DelimToken, Token, TokenKind};
|
||||||
use crate::print::pprust;
|
use crate::print::pprust;
|
||||||
|
@ -64,7 +64,7 @@ where
|
||||||
|
|
||||||
impl TokenTree {
|
impl TokenTree {
|
||||||
/// Use this token tree as a matcher to parse given tts.
|
/// Use this token tree as a matcher to parse given tts.
|
||||||
crate fn parse(cx: &base::ExtCtxt<'_>, mtch: &[quoted::TokenTree], tts: TokenStream)
|
crate fn parse(cx: &base::ExtCtxt<'_>, mtch: &[mbe::TokenTree], tts: TokenStream)
|
||||||
-> macro_parser::NamedParseResult {
|
-> macro_parser::NamedParseResult {
|
||||||
// `None` is because we're not interpolating
|
// `None` is because we're not interpolating
|
||||||
let directory = Directory {
|
let directory = Directory {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue