Rollup merge of #125530 - SparrowLii:expand2, r=petrochenkov
cleanup dependence of `ExtCtxt` in transcribe when macro expansion part of #125356 We can remove `transcribe`’s dependence on `ExtCtxt` to facilitate subsequent work (such as moving macro expansion into the incremental compilation system) r? ```@petrochenkov``` Thanks for the reviewing!
This commit is contained in:
commit
f50b4f5034
2 changed files with 31 additions and 32 deletions
|
@ -223,7 +223,8 @@ fn expand_macro<'cx>(
|
|||
let arm_span = rhses[i].span();
|
||||
|
||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||
let tts = match transcribe(cx, &named_matches, rhs, rhs_span, transparency) {
|
||||
let id = cx.current_expansion.id;
|
||||
let tts = match transcribe(psess, &named_matches, rhs, rhs_span, transparency, id) {
|
||||
Ok(tts) => tts,
|
||||
Err(err) => {
|
||||
let guar = err.emit();
|
||||
|
|
|
@ -1,4 +1,3 @@
|
|||
use crate::base::ExtCtxt;
|
||||
use crate::errors::{
|
||||
CountRepetitionMisplaced, MetaVarExprUnrecognizedVar, MetaVarsDifSeqMatchers, MustRepeatOnce,
|
||||
NoSyntaxVarsExprRepeat, VarStillRepeating,
|
||||
|
@ -9,12 +8,13 @@ use rustc_ast::mut_visit::{self, MutVisitor};
|
|||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::{DelimSpacing, DelimSpan, Spacing, TokenStream, TokenTree};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_errors::{pluralize, Diag, PResult};
|
||||
use rustc_errors::{pluralize, Diag, DiagCtxt, PResult};
|
||||
use rustc_parse::parser::ParseNtResult;
|
||||
use rustc_span::hygiene::{LocalExpnId, Transparency};
|
||||
use rustc_span::symbol::{sym, Ident, MacroRulesNormalizedIdent};
|
||||
use rustc_span::{with_metavar_spans, Span, SyntaxContext};
|
||||
|
||||
use rustc_session::parse::ParseSess;
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::mem;
|
||||
|
||||
|
@ -99,11 +99,12 @@ impl<'a> Iterator for Frame<'a> {
|
|||
///
|
||||
/// Along the way, we do some additional error checking.
|
||||
pub(super) fn transcribe<'a>(
|
||||
cx: &ExtCtxt<'a>,
|
||||
psess: &'a ParseSess,
|
||||
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
src: &mbe::Delimited,
|
||||
src_span: DelimSpan,
|
||||
transparency: Transparency,
|
||||
expand_id: LocalExpnId,
|
||||
) -> PResult<'a, TokenStream> {
|
||||
// Nothing for us to transcribe...
|
||||
if src.tts.is_empty() {
|
||||
|
@ -137,8 +138,9 @@ pub(super) fn transcribe<'a>(
|
|||
// again, and we are done transcribing.
|
||||
let mut result: Vec<TokenTree> = Vec::new();
|
||||
let mut result_stack = Vec::new();
|
||||
let mut marker = Marker(cx.current_expansion.id, transparency, Default::default());
|
||||
let mut marker = Marker(expand_id, transparency, Default::default());
|
||||
|
||||
let dcx = &psess.dcx;
|
||||
loop {
|
||||
// Look at the last frame on the stack.
|
||||
// If it still has a TokenTree we have not looked at yet, use that tree.
|
||||
|
@ -201,9 +203,7 @@ pub(super) fn transcribe<'a>(
|
|||
seq @ mbe::TokenTree::Sequence(_, seq_rep) => {
|
||||
match lockstep_iter_size(seq, interp, &repeats) {
|
||||
LockstepIterSize::Unconstrained => {
|
||||
return Err(cx
|
||||
.dcx()
|
||||
.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
|
||||
return Err(dcx.create_err(NoSyntaxVarsExprRepeat { span: seq.span() }));
|
||||
}
|
||||
|
||||
LockstepIterSize::Contradiction(msg) => {
|
||||
|
@ -211,9 +211,9 @@ pub(super) fn transcribe<'a>(
|
|||
// happens when two meta-variables are used in the same repetition in a
|
||||
// sequence, but they come from different sequence matchers and repeat
|
||||
// different amounts.
|
||||
return Err(cx
|
||||
.dcx()
|
||||
.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg }));
|
||||
return Err(
|
||||
dcx.create_err(MetaVarsDifSeqMatchers { span: seq.span(), msg })
|
||||
);
|
||||
}
|
||||
|
||||
LockstepIterSize::Constraint(len, _) => {
|
||||
|
@ -227,9 +227,7 @@ pub(super) fn transcribe<'a>(
|
|||
// FIXME: this really ought to be caught at macro definition
|
||||
// time... It happens when the Kleene operator in the matcher and
|
||||
// the body for the same meta-variable do not match.
|
||||
return Err(cx
|
||||
.dcx()
|
||||
.create_err(MustRepeatOnce { span: sp.entire() }));
|
||||
return Err(dcx.create_err(MustRepeatOnce { span: sp.entire() }));
|
||||
}
|
||||
} else {
|
||||
// 0 is the initial counter (we have done 0 repetitions so far). `len`
|
||||
|
@ -274,7 +272,7 @@ pub(super) fn transcribe<'a>(
|
|||
MatchedSingle(ParseNtResult::Tt(tt)) => {
|
||||
// `tt`s are emitted into the output stream directly as "raw tokens",
|
||||
// without wrapping them into groups.
|
||||
maybe_use_metavar_location(cx, &stack, sp, tt, &mut marker)
|
||||
maybe_use_metavar_location(psess, &stack, sp, tt, &mut marker)
|
||||
}
|
||||
MatchedSingle(ParseNtResult::Ident(ident, is_raw)) => {
|
||||
marker.visit_span(&mut sp);
|
||||
|
@ -295,7 +293,7 @@ pub(super) fn transcribe<'a>(
|
|||
}
|
||||
MatchedSeq(..) => {
|
||||
// We were unable to descend far enough. This is an error.
|
||||
return Err(cx.dcx().create_err(VarStillRepeating { span: sp, ident }));
|
||||
return Err(dcx.create_err(VarStillRepeating { span: sp, ident }));
|
||||
}
|
||||
};
|
||||
result.push(tt)
|
||||
|
@ -314,7 +312,7 @@ pub(super) fn transcribe<'a>(
|
|||
|
||||
// Replace meta-variable expressions with the result of their expansion.
|
||||
mbe::TokenTree::MetaVarExpr(sp, expr) => {
|
||||
transcribe_metavar_expr(cx, expr, interp, &mut marker, &repeats, &mut result, sp)?;
|
||||
transcribe_metavar_expr(dcx, expr, interp, &mut marker, &repeats, &mut result, sp)?;
|
||||
}
|
||||
|
||||
// If we are entering a new delimiter, we push its contents to the `stack` to be
|
||||
|
@ -374,7 +372,7 @@ pub(super) fn transcribe<'a>(
|
|||
/// combine with each other and not with tokens outside of the sequence.
|
||||
/// - The metavariable span comes from a different crate, then we prefer the more local span.
|
||||
fn maybe_use_metavar_location(
|
||||
cx: &ExtCtxt<'_>,
|
||||
psess: &ParseSess,
|
||||
stack: &[Frame<'_>],
|
||||
mut metavar_span: Span,
|
||||
orig_tt: &TokenTree,
|
||||
|
@ -412,7 +410,7 @@ fn maybe_use_metavar_location(
|
|||
&& insert(mspans, dspan.entire(), metavar_span)
|
||||
}),
|
||||
};
|
||||
if no_collision || cx.source_map().is_imported(metavar_span) {
|
||||
if no_collision || psess.source_map().is_imported(metavar_span) {
|
||||
return orig_tt.clone();
|
||||
}
|
||||
|
||||
|
@ -573,7 +571,7 @@ fn lockstep_iter_size(
|
|||
/// * `[ $( ${count(foo, 1)} ),* ]` will return an error because `${count(foo, 1)}` is
|
||||
/// declared inside a single repetition and the index `1` implies two nested repetitions.
|
||||
fn count_repetitions<'a>(
|
||||
cx: &ExtCtxt<'a>,
|
||||
dcx: &'a DiagCtxt,
|
||||
depth_user: usize,
|
||||
mut matched: &NamedMatch,
|
||||
repeats: &[(usize, usize)],
|
||||
|
@ -610,7 +608,7 @@ fn count_repetitions<'a>(
|
|||
.and_then(|el| el.checked_sub(repeats.len()))
|
||||
.unwrap_or_default();
|
||||
if depth_user > depth_max {
|
||||
return Err(out_of_bounds_err(cx, depth_max + 1, sp.entire(), "count"));
|
||||
return Err(out_of_bounds_err(dcx, depth_max + 1, sp.entire(), "count"));
|
||||
}
|
||||
|
||||
// `repeats` records all of the nested levels at which we are currently
|
||||
|
@ -626,7 +624,7 @@ fn count_repetitions<'a>(
|
|||
}
|
||||
|
||||
if let MatchedSingle(_) = matched {
|
||||
return Err(cx.dcx().create_err(CountRepetitionMisplaced { span: sp.entire() }));
|
||||
return Err(dcx.create_err(CountRepetitionMisplaced { span: sp.entire() }));
|
||||
}
|
||||
|
||||
count(depth_user, depth_max, matched)
|
||||
|
@ -634,7 +632,7 @@ fn count_repetitions<'a>(
|
|||
|
||||
/// Returns a `NamedMatch` item declared on the LHS given an arbitrary [Ident]
|
||||
fn matched_from_ident<'ctx, 'interp, 'rslt>(
|
||||
cx: &ExtCtxt<'ctx>,
|
||||
dcx: &'ctx DiagCtxt,
|
||||
ident: Ident,
|
||||
interp: &'interp FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
) -> PResult<'ctx, &'rslt NamedMatch>
|
||||
|
@ -643,12 +641,12 @@ where
|
|||
{
|
||||
let span = ident.span;
|
||||
let key = MacroRulesNormalizedIdent::new(ident);
|
||||
interp.get(&key).ok_or_else(|| cx.dcx().create_err(MetaVarExprUnrecognizedVar { span, key }))
|
||||
interp.get(&key).ok_or_else(|| dcx.create_err(MetaVarExprUnrecognizedVar { span, key }))
|
||||
}
|
||||
|
||||
/// Used by meta-variable expressions when an user input is out of the actual declared bounds. For
|
||||
/// example, index(999999) in an repetition of only three elements.
|
||||
fn out_of_bounds_err<'a>(cx: &ExtCtxt<'a>, max: usize, span: Span, ty: &str) -> Diag<'a> {
|
||||
fn out_of_bounds_err<'a>(dcx: &'a DiagCtxt, max: usize, span: Span, ty: &str) -> Diag<'a> {
|
||||
let msg = if max == 0 {
|
||||
format!(
|
||||
"meta-variable expression `{ty}` with depth parameter \
|
||||
|
@ -660,11 +658,11 @@ fn out_of_bounds_err<'a>(cx: &ExtCtxt<'a>, max: usize, span: Span, ty: &str) ->
|
|||
must be less than {max}"
|
||||
)
|
||||
};
|
||||
cx.dcx().struct_span_err(span, msg)
|
||||
dcx.struct_span_err(span, msg)
|
||||
}
|
||||
|
||||
fn transcribe_metavar_expr<'a>(
|
||||
cx: &ExtCtxt<'a>,
|
||||
dcx: &'a DiagCtxt,
|
||||
expr: &MetaVarExpr,
|
||||
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
marker: &mut Marker,
|
||||
|
@ -679,8 +677,8 @@ fn transcribe_metavar_expr<'a>(
|
|||
};
|
||||
match *expr {
|
||||
MetaVarExpr::Count(original_ident, depth) => {
|
||||
let matched = matched_from_ident(cx, original_ident, interp)?;
|
||||
let count = count_repetitions(cx, depth, matched, repeats, sp)?;
|
||||
let matched = matched_from_ident(dcx, original_ident, interp)?;
|
||||
let count = count_repetitions(dcx, depth, matched, repeats, sp)?;
|
||||
let tt = TokenTree::token_alone(
|
||||
TokenKind::lit(token::Integer, sym::integer(count), None),
|
||||
visited_span(),
|
||||
|
@ -689,7 +687,7 @@ fn transcribe_metavar_expr<'a>(
|
|||
}
|
||||
MetaVarExpr::Ignore(original_ident) => {
|
||||
// Used to ensure that `original_ident` is present in the LHS
|
||||
let _ = matched_from_ident(cx, original_ident, interp)?;
|
||||
let _ = matched_from_ident(dcx, original_ident, interp)?;
|
||||
}
|
||||
MetaVarExpr::Index(depth) => match repeats.iter().nth_back(depth) {
|
||||
Some((index, _)) => {
|
||||
|
@ -698,7 +696,7 @@ fn transcribe_metavar_expr<'a>(
|
|||
visited_span(),
|
||||
));
|
||||
}
|
||||
None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "index")),
|
||||
None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "index")),
|
||||
},
|
||||
MetaVarExpr::Len(depth) => match repeats.iter().nth_back(depth) {
|
||||
Some((_, length)) => {
|
||||
|
@ -707,7 +705,7 @@ fn transcribe_metavar_expr<'a>(
|
|||
visited_span(),
|
||||
));
|
||||
}
|
||||
None => return Err(out_of_bounds_err(cx, repeats.len(), sp.entire(), "len")),
|
||||
None => return Err(out_of_bounds_err(dcx, repeats.len(), sp.entire(), "len")),
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue