Call compute_locs
once per rule.
Currently it's called in `parse_tt` every time a match rule is invoked. This commit moves it so it's called instead once per match rule, in `compile_declarative_macro. This is a performance win. The commit also moves `compute_locs` out of `TtParser`, because there's no longer any reason for it to be in there.
This commit is contained in:
parent
7300bd6a38
commit
238d9076fc
2 changed files with 123 additions and 114 deletions
|
@ -106,7 +106,7 @@ rustc_data_structures::static_assert_size!(NamedMatchVec, 48);
|
||||||
///
|
///
|
||||||
/// This means a matcher can be represented by `&[MatcherLoc]`, and traversal mostly involves
|
/// This means a matcher can be represented by `&[MatcherLoc]`, and traversal mostly involves
|
||||||
/// simply incrementing the current matcher position index by one.
|
/// simply incrementing the current matcher position index by one.
|
||||||
enum MatcherLoc {
|
pub(super) enum MatcherLoc {
|
||||||
Token {
|
Token {
|
||||||
token: Token,
|
token: Token,
|
||||||
},
|
},
|
||||||
|
@ -138,6 +138,78 @@ enum MatcherLoc {
|
||||||
Eof,
|
Eof,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub(super) fn compute_locs(sess: &ParseSess, matcher: &[TokenTree]) -> Vec<MatcherLoc> {
|
||||||
|
fn inner(
|
||||||
|
sess: &ParseSess,
|
||||||
|
tts: &[TokenTree],
|
||||||
|
locs: &mut Vec<MatcherLoc>,
|
||||||
|
next_metavar: &mut usize,
|
||||||
|
seq_depth: usize,
|
||||||
|
) {
|
||||||
|
for tt in tts {
|
||||||
|
match tt {
|
||||||
|
TokenTree::Token(token) => {
|
||||||
|
locs.push(MatcherLoc::Token { token: token.clone() });
|
||||||
|
}
|
||||||
|
TokenTree::Delimited(_, delimited) => {
|
||||||
|
locs.push(MatcherLoc::Delimited);
|
||||||
|
inner(sess, &delimited.all_tts, locs, next_metavar, seq_depth);
|
||||||
|
}
|
||||||
|
TokenTree::Sequence(_, seq) => {
|
||||||
|
// We can't determine `idx_first_after` and construct the final
|
||||||
|
// `MatcherLoc::Sequence` until after `inner()` is called and the sequence end
|
||||||
|
// pieces are processed. So we push a dummy value (`Eof` is cheapest to
|
||||||
|
// construct) now, and overwrite it with the proper value below.
|
||||||
|
let dummy = MatcherLoc::Eof;
|
||||||
|
locs.push(dummy);
|
||||||
|
|
||||||
|
let next_metavar_orig = *next_metavar;
|
||||||
|
let op = seq.kleene.op;
|
||||||
|
let idx_first = locs.len();
|
||||||
|
let idx_seq = idx_first - 1;
|
||||||
|
inner(sess, &seq.tts, locs, next_metavar, seq_depth + 1);
|
||||||
|
|
||||||
|
if let Some(separator) = &seq.separator {
|
||||||
|
locs.push(MatcherLoc::SequenceSep { separator: separator.clone() });
|
||||||
|
locs.push(MatcherLoc::SequenceKleeneOpAfterSep { idx_first });
|
||||||
|
} else {
|
||||||
|
locs.push(MatcherLoc::SequenceKleeneOpNoSep { op, idx_first });
|
||||||
|
}
|
||||||
|
|
||||||
|
// Overwrite the dummy value pushed above with the proper value.
|
||||||
|
locs[idx_seq] = MatcherLoc::Sequence {
|
||||||
|
op,
|
||||||
|
num_metavar_decls: seq.num_captures,
|
||||||
|
idx_first_after: locs.len(),
|
||||||
|
next_metavar: next_metavar_orig,
|
||||||
|
seq_depth,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
&TokenTree::MetaVarDecl(span, bind, kind) => {
|
||||||
|
locs.push(MatcherLoc::MetaVarDecl {
|
||||||
|
span,
|
||||||
|
bind,
|
||||||
|
kind,
|
||||||
|
next_metavar: *next_metavar,
|
||||||
|
seq_depth,
|
||||||
|
});
|
||||||
|
*next_metavar += 1;
|
||||||
|
}
|
||||||
|
TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut locs = vec![];
|
||||||
|
let mut next_metavar = 0;
|
||||||
|
inner(sess, matcher, &mut locs, &mut next_metavar, /* seq_depth */ 0);
|
||||||
|
|
||||||
|
// A final entry is needed for eof.
|
||||||
|
locs.push(MatcherLoc::Eof);
|
||||||
|
|
||||||
|
locs
|
||||||
|
}
|
||||||
|
|
||||||
/// A single matcher position, representing the state of matching.
|
/// A single matcher position, representing the state of matching.
|
||||||
struct MatcherPos {
|
struct MatcherPos {
|
||||||
/// The index into `TtParser::locs`, which represents the "dot".
|
/// The index into `TtParser::locs`, which represents the "dot".
|
||||||
|
@ -301,9 +373,6 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
|
||||||
pub struct TtParser {
|
pub struct TtParser {
|
||||||
macro_name: Ident,
|
macro_name: Ident,
|
||||||
|
|
||||||
/// The matcher of the current rule.
|
|
||||||
locs: Vec<MatcherLoc>,
|
|
||||||
|
|
||||||
/// The set of current mps to be processed. This should be empty by the end of a successful
|
/// The set of current mps to be processed. This should be empty by the end of a successful
|
||||||
/// execution of `parse_tt_inner`.
|
/// execution of `parse_tt_inner`.
|
||||||
cur_mps: Vec<MatcherPos>,
|
cur_mps: Vec<MatcherPos>,
|
||||||
|
@ -324,7 +393,6 @@ impl TtParser {
|
||||||
pub(super) fn new(macro_name: Ident) -> TtParser {
|
pub(super) fn new(macro_name: Ident) -> TtParser {
|
||||||
TtParser {
|
TtParser {
|
||||||
macro_name,
|
macro_name,
|
||||||
locs: vec![],
|
|
||||||
cur_mps: vec![],
|
cur_mps: vec![],
|
||||||
next_mps: vec![],
|
next_mps: vec![],
|
||||||
bb_mps: vec![],
|
bb_mps: vec![],
|
||||||
|
@ -332,84 +400,6 @@ impl TtParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Convert a `&[TokenTree]` to a `&[MatcherLoc]`. Note: this conversion happens every time the
|
|
||||||
/// macro is called, which may be many times if there are many call sites or if it is
|
|
||||||
/// recursive. This conversion is fairly cheap and the representation is sufficiently better
|
|
||||||
/// for matching than `&[TokenTree]` that it's a clear performance win even with the overhead.
|
|
||||||
/// But it might be possible to move the conversion outwards so it only occurs once per macro.
|
|
||||||
fn compute_locs(&mut self, sess: &ParseSess, matcher: &[TokenTree]) -> usize {
|
|
||||||
fn inner(
|
|
||||||
sess: &ParseSess,
|
|
||||||
tts: &[TokenTree],
|
|
||||||
locs: &mut Vec<MatcherLoc>,
|
|
||||||
next_metavar: &mut usize,
|
|
||||||
seq_depth: usize,
|
|
||||||
) {
|
|
||||||
for tt in tts {
|
|
||||||
match tt {
|
|
||||||
TokenTree::Token(token) => {
|
|
||||||
locs.push(MatcherLoc::Token { token: token.clone() });
|
|
||||||
}
|
|
||||||
TokenTree::Delimited(_, delimited) => {
|
|
||||||
locs.push(MatcherLoc::Delimited);
|
|
||||||
inner(sess, &delimited.all_tts, locs, next_metavar, seq_depth);
|
|
||||||
}
|
|
||||||
TokenTree::Sequence(_, seq) => {
|
|
||||||
// We can't determine `idx_first_after` and construct the final
|
|
||||||
// `MatcherLoc::Sequence` until after `inner()` is called and the sequence
|
|
||||||
// end pieces are processed. So we push a dummy value (`Eof` is cheapest to
|
|
||||||
// construct) now, and overwrite it with the proper value below.
|
|
||||||
let dummy = MatcherLoc::Eof;
|
|
||||||
locs.push(dummy);
|
|
||||||
|
|
||||||
let next_metavar_orig = *next_metavar;
|
|
||||||
let op = seq.kleene.op;
|
|
||||||
let idx_first = locs.len();
|
|
||||||
let idx_seq = idx_first - 1;
|
|
||||||
inner(sess, &seq.tts, locs, next_metavar, seq_depth + 1);
|
|
||||||
|
|
||||||
if let Some(separator) = &seq.separator {
|
|
||||||
locs.push(MatcherLoc::SequenceSep { separator: separator.clone() });
|
|
||||||
locs.push(MatcherLoc::SequenceKleeneOpAfterSep { idx_first });
|
|
||||||
} else {
|
|
||||||
locs.push(MatcherLoc::SequenceKleeneOpNoSep { op, idx_first });
|
|
||||||
}
|
|
||||||
|
|
||||||
// Overwrite the dummy value pushed above with the proper value.
|
|
||||||
locs[idx_seq] = MatcherLoc::Sequence {
|
|
||||||
op,
|
|
||||||
num_metavar_decls: seq.num_captures,
|
|
||||||
idx_first_after: locs.len(),
|
|
||||||
next_metavar: next_metavar_orig,
|
|
||||||
seq_depth,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
&TokenTree::MetaVarDecl(span, bind, kind) => {
|
|
||||||
locs.push(MatcherLoc::MetaVarDecl {
|
|
||||||
span,
|
|
||||||
bind,
|
|
||||||
kind,
|
|
||||||
next_metavar: *next_metavar,
|
|
||||||
seq_depth,
|
|
||||||
});
|
|
||||||
*next_metavar += 1;
|
|
||||||
}
|
|
||||||
TokenTree::MetaVar(..) | TokenTree::MetaVarExpr(..) => unreachable!(),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
self.locs.clear();
|
|
||||||
let mut next_metavar = 0;
|
|
||||||
inner(sess, matcher, &mut self.locs, &mut next_metavar, /* seq_depth */ 0);
|
|
||||||
|
|
||||||
// A final entry is needed for eof.
|
|
||||||
self.locs.push(MatcherLoc::Eof);
|
|
||||||
|
|
||||||
// This is the number of metavar decls.
|
|
||||||
next_metavar
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Process the matcher positions of `cur_mps` until it is empty. In the process, this will
|
/// Process the matcher positions of `cur_mps` until it is empty. In the process, this will
|
||||||
/// produce more mps in `next_mps` and `bb_mps`.
|
/// produce more mps in `next_mps` and `bb_mps`.
|
||||||
///
|
///
|
||||||
|
@ -420,7 +410,7 @@ impl TtParser {
|
||||||
fn parse_tt_inner(
|
fn parse_tt_inner(
|
||||||
&mut self,
|
&mut self,
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
num_metavar_decls: usize,
|
matcher: &[MatcherLoc],
|
||||||
token: &Token,
|
token: &Token,
|
||||||
) -> Option<NamedParseResult> {
|
) -> Option<NamedParseResult> {
|
||||||
// Matcher positions that would be valid if the macro invocation was over now. Only
|
// Matcher positions that would be valid if the macro invocation was over now. Only
|
||||||
|
@ -428,7 +418,7 @@ impl TtParser {
|
||||||
let mut eof_mps = EofMatcherPositions::None;
|
let mut eof_mps = EofMatcherPositions::None;
|
||||||
|
|
||||||
while let Some(mut mp) = self.cur_mps.pop() {
|
while let Some(mut mp) = self.cur_mps.pop() {
|
||||||
match &self.locs[mp.idx] {
|
match &matcher[mp.idx] {
|
||||||
MatcherLoc::Token { token: t } => {
|
MatcherLoc::Token { token: t } => {
|
||||||
// If it's a doc comment, we just ignore it and move on to the next tt in the
|
// If it's a doc comment, we just ignore it and move on to the next tt in the
|
||||||
// matcher. This is a bug, but #95267 showed that existing programs rely on
|
// matcher. This is a bug, but #95267 showed that existing programs rely on
|
||||||
|
@ -536,7 +526,7 @@ impl TtParser {
|
||||||
}
|
}
|
||||||
MatcherLoc::Eof => {
|
MatcherLoc::Eof => {
|
||||||
// We are past the matcher's end, and not in a sequence. Try to end things.
|
// We are past the matcher's end, and not in a sequence. Try to end things.
|
||||||
debug_assert_eq!(mp.idx, self.locs.len() - 1);
|
debug_assert_eq!(mp.idx, matcher.len() - 1);
|
||||||
if *token == token::Eof {
|
if *token == token::Eof {
|
||||||
eof_mps = match eof_mps {
|
eof_mps = match eof_mps {
|
||||||
EofMatcherPositions::None => EofMatcherPositions::One(mp),
|
EofMatcherPositions::None => EofMatcherPositions::One(mp),
|
||||||
|
@ -554,11 +544,10 @@ impl TtParser {
|
||||||
if *token == token::Eof {
|
if *token == token::Eof {
|
||||||
Some(match eof_mps {
|
Some(match eof_mps {
|
||||||
EofMatcherPositions::One(mut eof_mp) => {
|
EofMatcherPositions::One(mut eof_mp) => {
|
||||||
assert_eq!(eof_mp.matches.len(), num_metavar_decls);
|
|
||||||
// Need to take ownership of the matches from within the `Lrc`.
|
// Need to take ownership of the matches from within the `Lrc`.
|
||||||
Lrc::make_mut(&mut eof_mp.matches);
|
Lrc::make_mut(&mut eof_mp.matches);
|
||||||
let matches = Lrc::try_unwrap(eof_mp.matches).unwrap().into_iter();
|
let matches = Lrc::try_unwrap(eof_mp.matches).unwrap().into_iter();
|
||||||
self.nameize(sess, matches)
|
self.nameize(sess, matcher, matches)
|
||||||
}
|
}
|
||||||
EofMatcherPositions::Multiple => {
|
EofMatcherPositions::Multiple => {
|
||||||
Error(token.span, "ambiguity: multiple successful parses".to_string())
|
Error(token.span, "ambiguity: multiple successful parses".to_string())
|
||||||
|
@ -580,10 +569,8 @@ impl TtParser {
|
||||||
pub(super) fn parse_tt(
|
pub(super) fn parse_tt(
|
||||||
&mut self,
|
&mut self,
|
||||||
parser: &mut Cow<'_, Parser<'_>>,
|
parser: &mut Cow<'_, Parser<'_>>,
|
||||||
matcher: &[TokenTree],
|
matcher: &[MatcherLoc],
|
||||||
) -> NamedParseResult {
|
) -> NamedParseResult {
|
||||||
let num_metavar_decls = self.compute_locs(parser.sess, matcher);
|
|
||||||
|
|
||||||
// A queue of possible matcher positions. We initialize it with the matcher position in
|
// A queue of possible matcher positions. We initialize it with the matcher position in
|
||||||
// which the "dot" is before the first token of the first token tree in `matcher`.
|
// which the "dot" is before the first token of the first token tree in `matcher`.
|
||||||
// `parse_tt_inner` then processes all of these possible matcher positions and produces
|
// `parse_tt_inner` then processes all of these possible matcher positions and produces
|
||||||
|
@ -598,7 +585,7 @@ impl TtParser {
|
||||||
|
|
||||||
// Process `cur_mps` until either we have finished the input or we need to get some
|
// Process `cur_mps` until either we have finished the input or we need to get some
|
||||||
// parsing from the black-box parser done.
|
// parsing from the black-box parser done.
|
||||||
if let Some(res) = self.parse_tt_inner(&parser.sess, num_metavar_decls, &parser.token) {
|
if let Some(res) = self.parse_tt_inner(&parser.sess, matcher, &parser.token) {
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -626,7 +613,7 @@ impl TtParser {
|
||||||
(0, 1) => {
|
(0, 1) => {
|
||||||
// We need to call the black-box parser to get some nonterminal.
|
// We need to call the black-box parser to get some nonterminal.
|
||||||
let mut mp = self.bb_mps.pop().unwrap();
|
let mut mp = self.bb_mps.pop().unwrap();
|
||||||
let loc = &self.locs[mp.idx];
|
let loc = &matcher[mp.idx];
|
||||||
if let &MatcherLoc::MetaVarDecl {
|
if let &MatcherLoc::MetaVarDecl {
|
||||||
span,
|
span,
|
||||||
kind: Some(kind),
|
kind: Some(kind),
|
||||||
|
@ -664,7 +651,7 @@ impl TtParser {
|
||||||
|
|
||||||
(_, _) => {
|
(_, _) => {
|
||||||
// Too many possibilities!
|
// Too many possibilities!
|
||||||
return self.ambiguity_error(parser.token.span);
|
return self.ambiguity_error(matcher, parser.token.span);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -672,11 +659,15 @@ impl TtParser {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn ambiguity_error(&self, token_span: rustc_span::Span) -> NamedParseResult {
|
fn ambiguity_error(
|
||||||
|
&self,
|
||||||
|
matcher: &[MatcherLoc],
|
||||||
|
token_span: rustc_span::Span,
|
||||||
|
) -> NamedParseResult {
|
||||||
let nts = self
|
let nts = self
|
||||||
.bb_mps
|
.bb_mps
|
||||||
.iter()
|
.iter()
|
||||||
.map(|mp| match &self.locs[mp.idx] {
|
.map(|mp| match &matcher[mp.idx] {
|
||||||
MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
|
MatcherLoc::MetaVarDecl { bind, kind: Some(kind), .. } => {
|
||||||
format!("{} ('{}')", kind, bind)
|
format!("{} ('{}')", kind, bind)
|
||||||
}
|
}
|
||||||
|
@ -702,12 +693,13 @@ impl TtParser {
|
||||||
fn nameize<I: Iterator<Item = NamedMatch>>(
|
fn nameize<I: Iterator<Item = NamedMatch>>(
|
||||||
&self,
|
&self,
|
||||||
sess: &ParseSess,
|
sess: &ParseSess,
|
||||||
|
matcher: &[MatcherLoc],
|
||||||
mut res: I,
|
mut res: I,
|
||||||
) -> NamedParseResult {
|
) -> NamedParseResult {
|
||||||
// Make that each metavar has _exactly one_ binding. If so, insert the binding into the
|
// Make that each metavar has _exactly one_ binding. If so, insert the binding into the
|
||||||
// `NamedParseResult`. Otherwise, it's an error.
|
// `NamedParseResult`. Otherwise, it's an error.
|
||||||
let mut ret_val = FxHashMap::default();
|
let mut ret_val = FxHashMap::default();
|
||||||
for loc in self.locs.iter() {
|
for loc in matcher {
|
||||||
if let &MatcherLoc::MetaVarDecl { span, bind, kind, .. } = loc {
|
if let &MatcherLoc::MetaVarDecl { span, bind, kind, .. } = loc {
|
||||||
if kind.is_some() {
|
if kind.is_some() {
|
||||||
match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
|
match ret_val.entry(MacroRulesNormalizedIdent::new(bind)) {
|
||||||
|
|
|
@ -4,7 +4,7 @@ use crate::expand::{ensure_complete_parse, parse_ast_fragment, AstFragment, AstF
|
||||||
use crate::mbe;
|
use crate::mbe;
|
||||||
use crate::mbe::macro_check;
|
use crate::mbe::macro_check;
|
||||||
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser};
|
use crate::mbe::macro_parser::{Error, ErrorReported, Failure, Success, TtParser};
|
||||||
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree};
|
use crate::mbe::macro_parser::{MatchedSeq, MatchedTokenTree, MatcherLoc};
|
||||||
use crate::mbe::transcribe::transcribe;
|
use crate::mbe::transcribe::transcribe;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
|
@ -159,7 +159,7 @@ struct MacroRulesMacroExpander {
|
||||||
name: Ident,
|
name: Ident,
|
||||||
span: Span,
|
span: Span,
|
||||||
transparency: Transparency,
|
transparency: Transparency,
|
||||||
lhses: Vec<mbe::TokenTree>,
|
lhses: Vec<Vec<MatcherLoc>>,
|
||||||
rhses: Vec<mbe::TokenTree>,
|
rhses: Vec<mbe::TokenTree>,
|
||||||
valid: bool,
|
valid: bool,
|
||||||
is_local: bool,
|
is_local: bool,
|
||||||
|
@ -210,7 +210,7 @@ fn generic_extension<'cx, 'tt>(
|
||||||
name: Ident,
|
name: Ident,
|
||||||
transparency: Transparency,
|
transparency: Transparency,
|
||||||
arg: TokenStream,
|
arg: TokenStream,
|
||||||
lhses: &'tt [mbe::TokenTree],
|
lhses: &'tt [Vec<MatcherLoc>],
|
||||||
rhses: &'tt [mbe::TokenTree],
|
rhses: &'tt [mbe::TokenTree],
|
||||||
is_local: bool,
|
is_local: bool,
|
||||||
) -> Box<dyn MacResult + 'cx> {
|
) -> Box<dyn MacResult + 'cx> {
|
||||||
|
@ -245,14 +245,6 @@ fn generic_extension<'cx, 'tt>(
|
||||||
// this situation.)
|
// this situation.)
|
||||||
let parser = parser_from_cx(sess, arg.clone());
|
let parser = parser_from_cx(sess, arg.clone());
|
||||||
|
|
||||||
// A matcher is always delimited, but the delimiters are ignored.
|
|
||||||
let delimited_inner_tts = |tt: &'tt mbe::TokenTree| -> &'tt [mbe::TokenTree] {
|
|
||||||
match tt {
|
|
||||||
mbe::TokenTree::Delimited(_, delimited) => delimited.inner_tts(),
|
|
||||||
_ => cx.span_bug(sp, "malformed macro lhs"),
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Try each arm's matchers.
|
// Try each arm's matchers.
|
||||||
let mut tt_parser = TtParser::new(name);
|
let mut tt_parser = TtParser::new(name);
|
||||||
for (i, lhs) in lhses.iter().enumerate() {
|
for (i, lhs) in lhses.iter().enumerate() {
|
||||||
|
@ -262,13 +254,19 @@ fn generic_extension<'cx, 'tt>(
|
||||||
// are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
|
// are not recorded. On the first `Success(..)`ful matcher, the spans are merged.
|
||||||
let mut gated_spans_snapshot = mem::take(&mut *sess.gated_spans.spans.borrow_mut());
|
let mut gated_spans_snapshot = mem::take(&mut *sess.gated_spans.spans.borrow_mut());
|
||||||
|
|
||||||
match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), delimited_inner_tts(lhs)) {
|
match tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs) {
|
||||||
Success(named_matches) => {
|
Success(named_matches) => {
|
||||||
// The matcher was `Success(..)`ful.
|
// The matcher was `Success(..)`ful.
|
||||||
// Merge the gated spans from parsing the matcher with the pre-existing ones.
|
// Merge the gated spans from parsing the matcher with the pre-existing ones.
|
||||||
sess.gated_spans.merge(gated_spans_snapshot);
|
sess.gated_spans.merge(gated_spans_snapshot);
|
||||||
|
|
||||||
let rhs = delimited_inner_tts(&rhses[i]).to_vec().clone();
|
// Ignore the delimiters on the RHS.
|
||||||
|
let rhs = match &rhses[i] {
|
||||||
|
mbe::TokenTree::Delimited(_, delimited) => {
|
||||||
|
delimited.inner_tts().to_vec().clone()
|
||||||
|
}
|
||||||
|
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||||
|
};
|
||||||
let arm_span = rhses[i].span();
|
let arm_span = rhses[i].span();
|
||||||
|
|
||||||
let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
|
let rhs_spans = rhs.iter().map(|t| t.span()).collect::<Vec<_>>();
|
||||||
|
@ -346,10 +344,8 @@ fn generic_extension<'cx, 'tt>(
|
||||||
// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
|
// Check whether there's a missing comma in this macro call, like `println!("{}" a);`
|
||||||
if let Some((arg, comma_span)) = arg.add_comma() {
|
if let Some((arg, comma_span)) = arg.add_comma() {
|
||||||
for lhs in lhses {
|
for lhs in lhses {
|
||||||
if let Success(_) = tt_parser.parse_tt(
|
let parser = parser_from_cx(sess, arg.clone());
|
||||||
&mut Cow::Borrowed(&parser_from_cx(sess, arg.clone())),
|
if let Success(_) = tt_parser.parse_tt(&mut Cow::Borrowed(&parser), lhs) {
|
||||||
delimited_inner_tts(lhs),
|
|
||||||
) {
|
|
||||||
if comma_span.is_dummy() {
|
if comma_span.is_dummy() {
|
||||||
err.note("you might be missing a comma");
|
err.note("you might be missing a comma");
|
||||||
} else {
|
} else {
|
||||||
|
@ -440,6 +436,8 @@ pub fn compile_declarative_macro(
|
||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
];
|
];
|
||||||
|
// Convert it into `MatcherLoc` form.
|
||||||
|
let argument_gram = mbe::macro_parser::compute_locs(&sess.parse_sess, &argument_gram);
|
||||||
|
|
||||||
let parser = Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS);
|
let parser = Parser::new(&sess.parse_sess, body, true, rustc_parse::MACRO_ARGUMENTS);
|
||||||
let mut tt_parser = TtParser::new(def.ident);
|
let mut tt_parser = TtParser::new(def.ident);
|
||||||
|
@ -536,6 +534,25 @@ pub fn compile_declarative_macro(
|
||||||
None => {}
|
None => {}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Convert the lhses into `MatcherLoc` form, which is better for doing the
|
||||||
|
// actual matching. Unless the matcher is invalid.
|
||||||
|
let lhses = if valid {
|
||||||
|
lhses
|
||||||
|
.iter()
|
||||||
|
.map(|lhs| {
|
||||||
|
// Ignore the delimiters around the matcher.
|
||||||
|
match lhs {
|
||||||
|
mbe::TokenTree::Delimited(_, delimited) => {
|
||||||
|
mbe::macro_parser::compute_locs(&sess.parse_sess, delimited.inner_tts())
|
||||||
|
}
|
||||||
|
_ => sess.parse_sess.span_diagnostic.span_bug(def.span, "malformed macro lhs"),
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.collect()
|
||||||
|
} else {
|
||||||
|
vec![]
|
||||||
|
};
|
||||||
|
|
||||||
mk_syn_ext(Box::new(MacroRulesMacroExpander {
|
mk_syn_ext(Box::new(MacroRulesMacroExpander {
|
||||||
name: def.ident,
|
name: def.ident,
|
||||||
span: def.span,
|
span: def.span,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue