1
Fork 0

rustc_expand: remove ref patterns

This commit is contained in:
Maybe Waffle 2022-12-23 17:34:23 +00:00
parent fc6cda8603
commit bddbf38af2
8 changed files with 75 additions and 82 deletions

View file

@ -63,21 +63,21 @@ pub enum Annotatable {
impl Annotatable { impl Annotatable {
pub fn span(&self) -> Span { pub fn span(&self) -> Span {
match *self { match self {
Annotatable::Item(ref item) => item.span, Annotatable::Item(item) => item.span,
Annotatable::TraitItem(ref trait_item) => trait_item.span, Annotatable::TraitItem(trait_item) => trait_item.span,
Annotatable::ImplItem(ref impl_item) => impl_item.span, Annotatable::ImplItem(impl_item) => impl_item.span,
Annotatable::ForeignItem(ref foreign_item) => foreign_item.span, Annotatable::ForeignItem(foreign_item) => foreign_item.span,
Annotatable::Stmt(ref stmt) => stmt.span, Annotatable::Stmt(stmt) => stmt.span,
Annotatable::Expr(ref expr) => expr.span, Annotatable::Expr(expr) => expr.span,
Annotatable::Arm(ref arm) => arm.span, Annotatable::Arm(arm) => arm.span,
Annotatable::ExprField(ref field) => field.span, Annotatable::ExprField(field) => field.span,
Annotatable::PatField(ref fp) => fp.pat.span, Annotatable::PatField(fp) => fp.pat.span,
Annotatable::GenericParam(ref gp) => gp.ident.span, Annotatable::GenericParam(gp) => gp.ident.span,
Annotatable::Param(ref p) => p.span, Annotatable::Param(p) => p.span,
Annotatable::FieldDef(ref sf) => sf.span, Annotatable::FieldDef(sf) => sf.span,
Annotatable::Variant(ref v) => v.span, Annotatable::Variant(v) => v.span,
Annotatable::Crate(ref c) => c.spans.inner_span, Annotatable::Crate(c) => c.spans.inner_span,
} }
} }

View file

@ -298,7 +298,7 @@ impl<'a> StripUnconfigured<'a> {
Some(AttrTokenTree::Delimited(sp, delim, inner)) Some(AttrTokenTree::Delimited(sp, delim, inner))
.into_iter() .into_iter()
} }
AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(ref nt) = token.kind => { AttrTokenTree::Token(ref token, _) if let TokenKind::Interpolated(nt) = &token.kind => {
panic!( panic!(
"Nonterminal should have been flattened at {:?}: {:?}", "Nonterminal should have been flattened at {:?}: {:?}",
token.span, nt token.span, nt

View file

@ -144,12 +144,12 @@ macro_rules! ast_fragments {
} }
pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) { pub fn visit_with<'a, V: Visitor<'a>>(&'a self, visitor: &mut V) {
match *self { match self {
AstFragment::OptExpr(Some(ref expr)) => visitor.visit_expr(expr), AstFragment::OptExpr(Some(expr)) => visitor.visit_expr(expr),
AstFragment::OptExpr(None) => {} AstFragment::OptExpr(None) => {}
AstFragment::MethodReceiverExpr(ref expr) => visitor.visit_method_receiver_expr(expr), AstFragment::MethodReceiverExpr(expr) => visitor.visit_method_receiver_expr(expr),
$($(AstFragment::$Kind(ref ast) => visitor.$visit_ast(ast),)?)* $($(AstFragment::$Kind(ast) => visitor.$visit_ast(ast),)?)*
$($(AstFragment::$Kind(ref ast) => for ast_elt in &ast[..] { $($(AstFragment::$Kind(ast) => for ast_elt in &ast[..] {
visitor.$visit_ast_elt(ast_elt, $($args)*); visitor.$visit_ast_elt(ast_elt, $($args)*);
})?)* })?)*
} }
@ -592,7 +592,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
let expn_id = invoc.expansion_data.id; let expn_id = invoc.expansion_data.id;
let parent_def = self.cx.resolver.invocation_parent(expn_id); let parent_def = self.cx.resolver.invocation_parent(expn_id);
let span = match &mut invoc.kind { let span = match &mut invoc.kind {
InvocationKind::Bang { ref mut span, .. } => span, InvocationKind::Bang { span, .. } => span,
InvocationKind::Attr { attr, .. } => &mut attr.span, InvocationKind::Attr { attr, .. } => &mut attr.span,
InvocationKind::Derive { path, .. } => &mut path.span, InvocationKind::Derive { path, .. } => &mut path.span,
}; };
@ -945,8 +945,8 @@ pub fn ensure_complete_parse<'a>(
let def_site_span = parser.token.span.with_ctxt(SyntaxContext::root()); let def_site_span = parser.token.span.with_ctxt(SyntaxContext::root());
let semi_span = parser.sess.source_map().next_point(span); let semi_span = parser.sess.source_map().next_point(span);
let add_semicolon = match parser.sess.source_map().span_to_snippet(semi_span) { let add_semicolon = match &parser.sess.source_map().span_to_snippet(semi_span) {
Ok(ref snippet) if &snippet[..] != ";" && kind_name == "expression" => { Ok(snippet) if &snippet[..] != ";" && kind_name == "expression" => {
Some(span.shrink_to_hi()) Some(span.shrink_to_hi())
} }
_ => None, _ => None,

View file

@ -151,9 +151,9 @@ impl<'a, T> Iterator for &'a Stack<'a, T> {
// Iterates from top to bottom of the stack. // Iterates from top to bottom of the stack.
fn next(&mut self) -> Option<&'a T> { fn next(&mut self) -> Option<&'a T> {
match *self { match self {
Stack::Empty => None, Stack::Empty => None,
Stack::Push { ref top, ref prev } => { Stack::Push { top, prev } => {
*self = prev; *self = prev;
Some(top) Some(top)
} }
@ -437,8 +437,8 @@ fn check_nested_occurrences(
// We check that the meta-variable is correctly used. // We check that the meta-variable is correctly used.
check_occurrences(sess, node_id, tt, macros, binders, ops, valid); check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
} }
(NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del)) (NestedMacroState::MacroRulesNotName, TokenTree::Delimited(_, del))
| (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del)) | (NestedMacroState::MacroName, TokenTree::Delimited(_, del))
if del.delim == Delimiter::Brace => if del.delim == Delimiter::Brace =>
{ {
let macro_rules = state == NestedMacroState::MacroRulesNotName; let macro_rules = state == NestedMacroState::MacroRulesNotName;
@ -497,7 +497,7 @@ fn check_nested_occurrences(
valid, valid,
); );
} }
(_, ref tt) => { (_, tt) => {
state = NestedMacroState::Empty; state = NestedMacroState::Empty;
check_occurrences(sess, node_id, tt, macros, binders, ops, valid); check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
} }

View file

@ -486,11 +486,11 @@ pub fn compile_declarative_macro(
let mut valid = true; let mut valid = true;
// Extract the arguments: // Extract the arguments:
let lhses = match argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] { let lhses = match &argument_map[&MacroRulesNormalizedIdent::new(lhs_nm)] {
MatchedSeq(ref s) => s MatchedSeq(s) => s
.iter() .iter()
.map(|m| { .map(|m| {
if let MatchedTokenTree(ref tt) = *m { if let MatchedTokenTree(tt) = m {
let tt = mbe::quoted::parse( let tt = mbe::quoted::parse(
TokenStream::new(vec![tt.clone()]), TokenStream::new(vec![tt.clone()]),
true, true,
@ -510,11 +510,11 @@ pub fn compile_declarative_macro(
_ => sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"), _ => sess.parse_sess.span_diagnostic.span_bug(def.span, "wrong-structured lhs"),
}; };
let rhses = match argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] { let rhses = match &argument_map[&MacroRulesNormalizedIdent::new(rhs_nm)] {
MatchedSeq(ref s) => s MatchedSeq(s) => s
.iter() .iter()
.map(|m| { .map(|m| {
if let MatchedTokenTree(ref tt) = *m { if let MatchedTokenTree(tt) = m {
return mbe::quoted::parse( return mbe::quoted::parse(
TokenStream::new(vec![tt.clone()]), TokenStream::new(vec![tt.clone()]),
false, false,
@ -624,21 +624,21 @@ fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree)
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool { fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
use mbe::TokenTree; use mbe::TokenTree;
for tt in tts { for tt in tts {
match *tt { match tt {
TokenTree::Token(..) TokenTree::Token(..)
| TokenTree::MetaVar(..) | TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..) | TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarExpr(..) => (), | TokenTree::MetaVarExpr(..) => (),
TokenTree::Delimited(_, ref del) => { TokenTree::Delimited(_, del) => {
if !check_lhs_no_empty_seq(sess, &del.tts) { if !check_lhs_no_empty_seq(sess, &del.tts) {
return false; return false;
} }
} }
TokenTree::Sequence(span, ref seq) => { TokenTree::Sequence(span, seq) => {
if seq.separator.is_none() if seq.separator.is_none()
&& seq.tts.iter().all(|seq_tt| match *seq_tt { && seq.tts.iter().all(|seq_tt| match seq_tt {
TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => true, TokenTree::MetaVarDecl(_, _, Some(NonterminalKind::Vis)) => true,
TokenTree::Sequence(_, ref sub_seq) => { TokenTree::Sequence(_, sub_seq) => {
sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|| sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne || sub_seq.kleene.op == mbe::KleeneOp::ZeroOrOne
} }
@ -736,21 +736,21 @@ impl<'tt> FirstSets<'tt> {
fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> { fn build_recur<'tt>(sets: &mut FirstSets<'tt>, tts: &'tt [TokenTree]) -> TokenSet<'tt> {
let mut first = TokenSet::empty(); let mut first = TokenSet::empty();
for tt in tts.iter().rev() { for tt in tts.iter().rev() {
match *tt { match tt {
TokenTree::Token(..) TokenTree::Token(..)
| TokenTree::MetaVar(..) | TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..) | TokenTree::MetaVarDecl(..)
| TokenTree::MetaVarExpr(..) => { | TokenTree::MetaVarExpr(..) => {
first.replace_with(TtHandle::TtRef(tt)); first.replace_with(TtHandle::TtRef(tt));
} }
TokenTree::Delimited(span, ref delimited) => { TokenTree::Delimited(span, delimited) => {
build_recur(sets, &delimited.tts); build_recur(sets, &delimited.tts);
first.replace_with(TtHandle::from_token_kind( first.replace_with(TtHandle::from_token_kind(
token::OpenDelim(delimited.delim), token::OpenDelim(delimited.delim),
span.open, span.open,
)); ));
} }
TokenTree::Sequence(sp, ref seq_rep) => { TokenTree::Sequence(sp, seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts); let subfirst = build_recur(sets, &seq_rep.tts);
match sets.first.entry(sp.entire()) { match sets.first.entry(sp.entire()) {
@ -804,7 +804,7 @@ impl<'tt> FirstSets<'tt> {
let mut first = TokenSet::empty(); let mut first = TokenSet::empty();
for tt in tts.iter() { for tt in tts.iter() {
assert!(first.maybe_empty); assert!(first.maybe_empty);
match *tt { match tt {
TokenTree::Token(..) TokenTree::Token(..)
| TokenTree::MetaVar(..) | TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..) | TokenTree::MetaVarDecl(..)
@ -812,14 +812,14 @@ impl<'tt> FirstSets<'tt> {
first.add_one(TtHandle::TtRef(tt)); first.add_one(TtHandle::TtRef(tt));
return first; return first;
} }
TokenTree::Delimited(span, ref delimited) => { TokenTree::Delimited(span, delimited) => {
first.add_one(TtHandle::from_token_kind( first.add_one(TtHandle::from_token_kind(
token::OpenDelim(delimited.delim), token::OpenDelim(delimited.delim),
span.open, span.open,
)); ));
return first; return first;
} }
TokenTree::Sequence(sp, ref seq_rep) => { TokenTree::Sequence(sp, seq_rep) => {
let subfirst_owned; let subfirst_owned;
let subfirst = match self.first.get(&sp.entire()) { let subfirst = match self.first.get(&sp.entire()) {
Some(Some(subfirst)) => subfirst, Some(Some(subfirst)) => subfirst,
@ -1041,7 +1041,7 @@ fn check_matcher_core<'tt>(
// First, update `last` so that it corresponds to the set // First, update `last` so that it corresponds to the set
// of NT tokens that might end the sequence `... token`. // of NT tokens that might end the sequence `... token`.
match *token { match token {
TokenTree::Token(..) TokenTree::Token(..)
| TokenTree::MetaVar(..) | TokenTree::MetaVar(..)
| TokenTree::MetaVarDecl(..) | TokenTree::MetaVarDecl(..)
@ -1057,7 +1057,7 @@ fn check_matcher_core<'tt>(
suffix_first = build_suffix_first(); suffix_first = build_suffix_first();
} }
} }
TokenTree::Delimited(span, ref d) => { TokenTree::Delimited(span, d) => {
let my_suffix = TokenSet::singleton(TtHandle::from_token_kind( let my_suffix = TokenSet::singleton(TtHandle::from_token_kind(
token::CloseDelim(d.delim), token::CloseDelim(d.delim),
span.close, span.close,
@ -1070,7 +1070,7 @@ fn check_matcher_core<'tt>(
// against SUFFIX // against SUFFIX
continue 'each_token; continue 'each_token;
} }
TokenTree::Sequence(_, ref seq_rep) => { TokenTree::Sequence(_, seq_rep) => {
suffix_first = build_suffix_first(); suffix_first = build_suffix_first();
// The trick here: when we check the interior, we want // The trick here: when we check the interior, we want
// to include the separator (if any) as a potential // to include the separator (if any) as a potential
@ -1372,8 +1372,8 @@ fn is_in_follow(tok: &mbe::TokenTree, kind: NonterminalKind) -> IsInFollow {
} }
fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String { fn quoted_tt_to_string(tt: &mbe::TokenTree) -> String {
match *tt { match tt {
mbe::TokenTree::Token(ref token) => pprust::token_to_string(&token).into(), mbe::TokenTree::Token(token) => pprust::token_to_string(&token).into(),
mbe::TokenTree::MetaVar(_, name) => format!("${}", name), mbe::TokenTree::MetaVar(_, name) => format!("${}", name),
mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind), mbe::TokenTree::MetaVarDecl(_, name, Some(kind)) => format!("${}:{}", name, kind),
mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name), mbe::TokenTree::MetaVarDecl(_, name, None) => format!("${}:", name),

View file

@ -47,8 +47,7 @@ impl<'a> Iterator for Frame<'a> {
fn next(&mut self) -> Option<&'a mbe::TokenTree> { fn next(&mut self) -> Option<&'a mbe::TokenTree> {
match self { match self {
Frame::Delimited { tts, ref mut idx, .. } Frame::Delimited { tts, idx, .. } | Frame::Sequence { tts, idx, .. } => {
| Frame::Sequence { tts, ref mut idx, .. } => {
let res = tts.get(*idx); let res = tts.get(*idx);
*idx += 1; *idx += 1;
res res
@ -220,13 +219,13 @@ pub(super) fn transcribe<'a>(
let ident = MacroRulesNormalizedIdent::new(original_ident); let ident = MacroRulesNormalizedIdent::new(original_ident);
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) { if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
match cur_matched { match cur_matched {
MatchedTokenTree(ref tt) => { MatchedTokenTree(tt) => {
// `tt`s are emitted into the output stream directly as "raw tokens", // `tt`s are emitted into the output stream directly as "raw tokens",
// without wrapping them into groups. // without wrapping them into groups.
let token = tt.clone(); let token = tt.clone();
result.push(token); result.push(token);
} }
MatchedNonterminal(ref nt) => { MatchedNonterminal(nt) => {
// Other variables are emitted into the output stream as groups with // Other variables are emitted into the output stream as groups with
// `Delimiter::Invisible` to maintain parsing priorities. // `Delimiter::Invisible` to maintain parsing priorities.
// `Interpolated` is currently used for such groups in rustc parser. // `Interpolated` is currently used for such groups in rustc parser.
@ -299,12 +298,11 @@ fn lookup_cur_matched<'a>(
interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>, interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
repeats: &[(usize, usize)], repeats: &[(usize, usize)],
) -> Option<&'a NamedMatch> { ) -> Option<&'a NamedMatch> {
interpolations.get(&ident).map(|matched| { interpolations.get(&ident).map(|mut matched| {
let mut matched = matched;
for &(idx, _) in repeats { for &(idx, _) in repeats {
match matched { match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => break, MatchedTokenTree(_) | MatchedNonterminal(_) => break,
MatchedSeq(ref ads) => matched = ads.get(idx).unwrap(), MatchedSeq(ads) => matched = ads.get(idx).unwrap(),
} }
} }
@ -339,7 +337,7 @@ impl LockstepIterSize {
match self { match self {
LockstepIterSize::Unconstrained => other, LockstepIterSize::Unconstrained => other,
LockstepIterSize::Contradiction(_) => self, LockstepIterSize::Contradiction(_) => self,
LockstepIterSize::Constraint(l_len, ref l_id) => match other { LockstepIterSize::Constraint(l_len, l_id) => match other {
LockstepIterSize::Unconstrained => self, LockstepIterSize::Unconstrained => self,
LockstepIterSize::Contradiction(_) => other, LockstepIterSize::Contradiction(_) => other,
LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self, LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
@ -378,33 +376,33 @@ fn lockstep_iter_size(
repeats: &[(usize, usize)], repeats: &[(usize, usize)],
) -> LockstepIterSize { ) -> LockstepIterSize {
use mbe::TokenTree; use mbe::TokenTree;
match *tree { match tree {
TokenTree::Delimited(_, ref delimited) => { TokenTree::Delimited(_, delimited) => {
delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { delimited.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
size.with(lockstep_iter_size(tt, interpolations, repeats)) size.with(lockstep_iter_size(tt, interpolations, repeats))
}) })
} }
TokenTree::Sequence(_, ref seq) => { TokenTree::Sequence(_, seq) => {
seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| { seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
size.with(lockstep_iter_size(tt, interpolations, repeats)) size.with(lockstep_iter_size(tt, interpolations, repeats))
}) })
} }
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => { TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
let name = MacroRulesNormalizedIdent::new(name); let name = MacroRulesNormalizedIdent::new(*name);
match lookup_cur_matched(name, interpolations, repeats) { match lookup_cur_matched(name, interpolations, repeats) {
Some(matched) => match matched { Some(matched) => match matched {
MatchedTokenTree(_) | MatchedNonterminal(_) => LockstepIterSize::Unconstrained, MatchedTokenTree(_) | MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
MatchedSeq(ref ads) => LockstepIterSize::Constraint(ads.len(), name), MatchedSeq(ads) => LockstepIterSize::Constraint(ads.len(), name),
}, },
_ => LockstepIterSize::Unconstrained, _ => LockstepIterSize::Unconstrained,
} }
} }
TokenTree::MetaVarExpr(_, ref expr) => { TokenTree::MetaVarExpr(_, expr) => {
let default_rslt = LockstepIterSize::Unconstrained; let default_rslt = LockstepIterSize::Unconstrained;
let Some(ident) = expr.ident() else { return default_rslt; }; let Some(ident) = expr.ident() else { return default_rslt; };
let name = MacroRulesNormalizedIdent::new(ident); let name = MacroRulesNormalizedIdent::new(ident);
match lookup_cur_matched(name, interpolations, repeats) { match lookup_cur_matched(name, interpolations, repeats) {
Some(MatchedSeq(ref ads)) => { Some(MatchedSeq(ads)) => {
default_rslt.with(LockstepIterSize::Constraint(ads.len(), name)) default_rslt.with(LockstepIterSize::Constraint(ads.len(), name))
} }
_ => default_rslt, _ => default_rslt,
@ -449,7 +447,7 @@ fn count_repetitions<'a>(
Some(_) => Err(out_of_bounds_err(cx, declared_lhs_depth, sp.entire(), "count")), Some(_) => Err(out_of_bounds_err(cx, declared_lhs_depth, sp.entire(), "count")),
} }
} }
MatchedSeq(ref named_matches) => { MatchedSeq(named_matches) => {
let new_declared_lhs_depth = declared_lhs_depth + 1; let new_declared_lhs_depth = declared_lhs_depth + 1;
match depth_opt { match depth_opt {
None => named_matches None => named_matches
@ -472,7 +470,7 @@ fn count_repetitions<'a>(
// before we start counting. `matched` contains the various levels of the // before we start counting. `matched` contains the various levels of the
// tree as we descend, and its final value is the subtree we are currently at. // tree as we descend, and its final value is the subtree we are currently at.
for &(idx, _) in repeats { for &(idx, _) in repeats {
if let MatchedSeq(ref ads) = matched { if let MatchedSeq(ads) = matched {
matched = &ads[idx]; matched = &ads[idx];
} }
} }

View file

@ -176,9 +176,9 @@ fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
} }
impl<'a> visit::Visitor<'a> for PatIdentVisitor { impl<'a> visit::Visitor<'a> for PatIdentVisitor {
fn visit_pat(&mut self, p: &'a ast::Pat) { fn visit_pat(&mut self, p: &'a ast::Pat) {
match p.kind { match &p.kind {
PatKind::Ident(_, ref ident, _) => { PatKind::Ident(_, ident, _) => {
self.spans.push(ident.span.clone()); self.spans.push(ident.span);
} }
_ => { _ => {
visit::walk_pat(self, p); visit::walk_pat(self, p);
@ -290,10 +290,8 @@ fn ttdelim_span() {
) )
.unwrap(); .unwrap();
let tts: Vec<_> = match expr.kind { let ast::ExprKind::MacCall(mac) = &expr.kind else { panic!("not a macro") };
ast::ExprKind::MacCall(ref mac) => mac.args.tokens.clone().into_trees().collect(), let tts: Vec<_> = mac.args.tokens.clone().into_trees().collect();
_ => panic!("not a macro"),
};
let span = tts.iter().rev().next().unwrap().span(); let span = tts.iter().rev().next().unwrap().span();
@ -318,11 +316,8 @@ fn out_of_line_mod() {
.unwrap() .unwrap()
.unwrap(); .unwrap();
if let ast::ItemKind::Mod(_, ref mod_kind) = item.kind { let ast::ItemKind::Mod(_, mod_kind) = &item.kind else { panic!() };
assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2)); assert!(matches!(mod_kind, ast::ModKind::Loaded(items, ..) if items.len() == 2));
} else {
panic!();
}
}); });
} }

View file

@ -597,8 +597,8 @@ impl server::SourceFile for Rustc<'_, '_> {
} }
fn path(&mut self, file: &Self::SourceFile) -> String { fn path(&mut self, file: &Self::SourceFile) -> String {
match file.name { match &file.name {
FileName::Real(ref name) => name FileName::Real(name) => name
.local_path() .local_path()
.expect("attempting to get a file path in an imported file in `proc_macro::SourceFile::path`") .expect("attempting to get a file path in an imported file in `proc_macro::SourceFile::path`")
.to_str() .to_str()