Implement edition-based macro pat feature
This commit is contained in:
parent
2987785df3
commit
40bf3c0f09
12 changed files with 151 additions and 76 deletions
|
@ -77,9 +77,9 @@ use TokenTreeOrTokenTreeSlice::*;
|
|||
use crate::mbe::{self, TokenTree};
|
||||
|
||||
use rustc_ast::token::{self, DocComment, Nonterminal, Token};
|
||||
use rustc_parse::parser::{OrPatNonterminalMode, Parser};
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::{edition::Edition, symbol::MacroRulesNormalizedIdent};
|
||||
use rustc_span::symbol::MacroRulesNormalizedIdent;
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
|
@ -419,18 +419,6 @@ fn token_name_eq(t1: &Token, t2: &Token) -> bool {
|
|||
}
|
||||
}
|
||||
|
||||
/// In edition 2015/18, `:pat` can only match `pat<no_top_alt>` because otherwise, we have
|
||||
/// breakage. As of edition 2021, `:pat` matches `top_pat`.
|
||||
///
|
||||
/// See <https://github.com/rust-lang/rust/issues/54883> for more info.
|
||||
fn or_pat_mode(edition: Edition) -> OrPatNonterminalMode {
|
||||
match edition {
|
||||
Edition::Edition2015 | Edition::Edition2018 => OrPatNonterminalMode::NoTopAlt,
|
||||
// FIXME(mark-i-m): uncomment this when edition 2021 machinery is added.
|
||||
// Edition::Edition2021 => OrPatNonterminalMode::TopPat,
|
||||
}
|
||||
}
|
||||
|
||||
/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
|
||||
/// produce more items in `next_items`, `eof_items`, and `bb_items`.
|
||||
///
|
||||
|
@ -578,14 +566,13 @@ fn inner_parse_loop<'root, 'tt>(
|
|||
|
||||
// We need to match a metavar with a valid ident... call out to the black-box
|
||||
// parser by adding an item to `bb_items`.
|
||||
TokenTree::MetaVarDecl(span, _, Some(kind)) => {
|
||||
TokenTree::MetaVarDecl(_, _, Some(kind)) => {
|
||||
// Built-in nonterminals never start with these tokens, so we can eliminate
|
||||
// them from consideration.
|
||||
//
|
||||
// We use the span of the metavariable declaration to determine any
|
||||
// edition-specific matching behavior for non-terminals.
|
||||
if Parser::nonterminal_may_begin_with(kind, token, or_pat_mode(span.edition()))
|
||||
{
|
||||
if Parser::nonterminal_may_begin_with(kind, token) {
|
||||
bb_items.push(item);
|
||||
}
|
||||
}
|
||||
|
@ -749,8 +736,7 @@ pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> Na
|
|||
let match_cur = item.match_cur;
|
||||
// We use the span of the metavariable declaration to determine any
|
||||
// edition-specific matching behavior for non-terminals.
|
||||
let nt = match parser.to_mut().parse_nonterminal(kind, or_pat_mode(span.edition()))
|
||||
{
|
||||
let nt = match parser.to_mut().parse_nonterminal(kind) {
|
||||
Err(mut err) => {
|
||||
err.span_label(
|
||||
span,
|
||||
|
|
|
@ -476,10 +476,15 @@ pub fn compile_declarative_macro(
|
|||
.map(|m| {
|
||||
if let MatchedNonterminal(ref nt) = *m {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
let tt =
|
||||
mbe::quoted::parse(tt.clone().into(), true, &sess.parse_sess, def.id)
|
||||
.pop()
|
||||
.unwrap();
|
||||
let tt = mbe::quoted::parse(
|
||||
tt.clone().into(),
|
||||
true,
|
||||
&sess.parse_sess,
|
||||
def.id,
|
||||
features,
|
||||
)
|
||||
.pop()
|
||||
.unwrap();
|
||||
valid &= check_lhs_nt_follows(&sess.parse_sess, features, &def.attrs, &tt);
|
||||
return tt;
|
||||
}
|
||||
|
@ -501,6 +506,7 @@ pub fn compile_declarative_macro(
|
|||
false,
|
||||
&sess.parse_sess,
|
||||
def.id,
|
||||
features,
|
||||
)
|
||||
.pop()
|
||||
.unwrap();
|
||||
|
@ -1090,7 +1096,7 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
|
|||
_ => IsInFollow::No(TOKENS),
|
||||
}
|
||||
}
|
||||
NonterminalKind::Pat => {
|
||||
NonterminalKind::Pat2018 { .. } | NonterminalKind::Pat2021 { .. } => {
|
||||
const TOKENS: &[&str] = &["`=>`", "`,`", "`=`", "`|`", "`if`", "`in`"];
|
||||
match tok {
|
||||
TokenTree::Token(token) => match token.kind {
|
||||
|
|
|
@ -5,8 +5,9 @@ use rustc_ast::token::{self, Token};
|
|||
use rustc_ast::tokenstream;
|
||||
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::{kw, Ident};
|
||||
use rustc_feature::Features;
|
||||
use rustc_session::parse::{feature_err, ParseSess};
|
||||
use rustc_span::symbol::{kw, sym, Ident};
|
||||
|
||||
use rustc_span::Span;
|
||||
|
||||
|
@ -29,10 +30,8 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
|
|||
/// `ident` are "matchers". They are not present in the body of a macro rule -- just in the
|
||||
/// pattern, so we pass a parameter to indicate whether to expect them or not.
|
||||
/// - `sess`: the parsing session. Any errors will be emitted to this session.
|
||||
/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
|
||||
/// unstable features or not.
|
||||
/// - `edition`: which edition are we in.
|
||||
/// - `macro_node_id`: the NodeId of the macro we are parsing.
|
||||
/// - `node_id`: the NodeId of the macro we are parsing.
|
||||
/// - `features`: language features so we can do feature gating.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
|
@ -42,6 +41,7 @@ pub(super) fn parse(
|
|||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
features: &Features,
|
||||
) -> Vec<TokenTree> {
|
||||
// Will contain the final collection of `self::TokenTree`
|
||||
let mut result = Vec::new();
|
||||
|
@ -52,7 +52,7 @@ pub(super) fn parse(
|
|||
while let Some(tree) = trees.next() {
|
||||
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
||||
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id);
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id, features);
|
||||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
|
||||
let span = match trees.next() {
|
||||
|
@ -61,18 +61,39 @@ pub(super) fn parse(
|
|||
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
|
||||
Some((frag, _)) => {
|
||||
let span = token.span.with_lo(start_sp.lo());
|
||||
let kind = token::NonterminalKind::from_symbol(frag.name)
|
||||
.unwrap_or_else(|| {
|
||||
let msg = format!(
|
||||
"invalid fragment specifier `{}`",
|
||||
frag.name
|
||||
);
|
||||
sess.span_diagnostic
|
||||
.struct_span_err(span, &msg)
|
||||
.help(VALID_FRAGMENT_NAMES_MSG)
|
||||
|
||||
match frag.name {
|
||||
sym::pat2018 | sym::pat2021 => {
|
||||
if !features.edition_macro_pats {
|
||||
feature_err(
|
||||
sess,
|
||||
sym::edition_macro_pats,
|
||||
frag.span,
|
||||
"`pat2018` and `pat2021` are unstable.",
|
||||
)
|
||||
.emit();
|
||||
token::NonterminalKind::Ident
|
||||
});
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
|
||||
let kind =
|
||||
token::NonterminalKind::from_symbol(frag.name, || {
|
||||
span.edition()
|
||||
})
|
||||
.unwrap_or_else(
|
||||
|| {
|
||||
let msg = format!(
|
||||
"invalid fragment specifier `{}`",
|
||||
frag.name
|
||||
);
|
||||
sess.span_diagnostic
|
||||
.struct_span_err(span, &msg)
|
||||
.help(VALID_FRAGMENT_NAMES_MSG)
|
||||
.emit();
|
||||
token::NonterminalKind::Ident
|
||||
},
|
||||
);
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, Some(kind)));
|
||||
continue;
|
||||
}
|
||||
|
@ -110,14 +131,14 @@ pub(super) fn parse(
|
|||
/// converting `tree`
|
||||
/// - `expect_matchers`: same as for `parse` (see above).
|
||||
/// - `sess`: the parsing session. Any errors will be emitted to this session.
|
||||
/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
|
||||
/// unstable features or not.
|
||||
/// - `features`: language features so we can do feature gating.
|
||||
fn parse_tree(
|
||||
tree: tokenstream::TokenTree,
|
||||
outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
features: &Features,
|
||||
) -> TokenTree {
|
||||
// Depending on what `tree` is, we could be parsing different parts of a macro
|
||||
match tree {
|
||||
|
@ -145,7 +166,7 @@ fn parse_tree(
|
|||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||
}
|
||||
// Parse the contents of the sequence itself
|
||||
let sequence = parse(tts, expect_matchers, sess, node_id);
|
||||
let sequence = parse(tts, expect_matchers, sess, node_id, features);
|
||||
// Get the Kleene operator and optional separator
|
||||
let (separator, kleene) =
|
||||
parse_sep_and_kleene_op(&mut trees, span.entire(), sess);
|
||||
|
@ -196,7 +217,10 @@ fn parse_tree(
|
|||
// descend into the delimited set and further parse it.
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||
span,
|
||||
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess, node_id) }),
|
||||
Lrc::new(Delimited {
|
||||
delim,
|
||||
tts: parse(tts, expect_matchers, sess, node_id, features),
|
||||
}),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue