1
Fork 0

Add check for ui_testing via promoting parameters from ParseSess to Session

This commit is contained in:
George-lewis 2024-01-10 00:37:30 -05:00
parent b55faad314
commit 36a69e9d39
44 changed files with 188 additions and 233 deletions

View file

@ -1150,7 +1150,7 @@ impl<'a> ExtCtxt<'a> {
///
/// This unifies the logic used for resolving `include_X!`.
pub fn resolve_path(
parse_sess: &ParseSess,
parse_sess: &Session,
path: impl Into<PathBuf>,
span: Span,
) -> PResult<'_, PathBuf> {
@ -1166,7 +1166,7 @@ pub fn resolve_path(
.expect("attempting to resolve a file path in an external file"),
FileName::DocTest(path, _) => path,
other => {
return Err(parse_sess.dcx.create_err(errors::ResolveRelativePath {
return Err(parse_sess.dcx().create_err(errors::ResolveRelativePath {
span,
path: parse_sess.source_map().filename_for_diagnostics(&other).to_string(),
}));
@ -1390,7 +1390,7 @@ pub fn parse_macro_name_and_helper_attrs(
/// asserts in old versions of those crates and their wide use in the ecosystem.
/// See issue #73345 for more details.
/// FIXME(#73933): Remove this eventually.
fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool {
fn pretty_printing_compatibility_hack(item: &Item, sess: &Session) -> bool {
let name = item.ident.name;
if name == sym::ProceduralMasqueradeDummyType {
if let ast::ItemKind::Enum(enum_def, _) = &item.kind {
@ -1418,7 +1418,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool {
};
if crate_matches {
sess.buffer_lint_with_diagnostic(
sess.parse_sess.buffer_lint_with_diagnostic(
PROC_MACRO_BACK_COMPAT,
item.ident.span,
ast::CRATE_NODE_ID,
@ -1439,7 +1439,7 @@ fn pretty_printing_compatibility_hack(item: &Item, sess: &ParseSess) -> bool {
false
}
pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &ParseSess) -> bool {
pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &Session) -> bool {
let item = match ann {
Annotatable::Item(item) => item,
Annotatable::Stmt(stmt) => match &stmt.kind {
@ -1451,7 +1451,7 @@ pub(crate) fn ann_pretty_printing_compatibility_hack(ann: &Annotatable, sess: &P
pretty_printing_compatibility_hack(item, sess)
}
pub(crate) fn nt_pretty_printing_compatibility_hack(nt: &Nonterminal, sess: &ParseSess) -> bool {
pub(crate) fn nt_pretty_printing_compatibility_hack(nt: &Nonterminal, sess: &Session) -> bool {
let item = match nt {
Nonterminal::NtItem(item) => item,
Nonterminal::NtStmt(stmt) => match &stmt.kind {

View file

@ -256,12 +256,7 @@ impl<'a> StripUnconfigured<'a> {
);
}
if !attr::cfg_matches(
&cfg_predicate,
&self.sess.parse_sess,
self.lint_node_id,
self.features,
) {
if !attr::cfg_matches(&cfg_predicate, &self.sess, self.lint_node_id, self.features) {
return vec![];
}
@ -369,12 +364,7 @@ impl<'a> StripUnconfigured<'a> {
};
(
parse_cfg(&meta_item, self.sess).map_or(true, |meta_item| {
attr::cfg_matches(
meta_item,
&self.sess.parse_sess,
self.lint_node_id,
self.features,
)
attr::cfg_matches(meta_item, &self.sess, self.lint_node_id, self.features)
}),
Some(meta_item),
)
@ -385,7 +375,7 @@ impl<'a> StripUnconfigured<'a> {
pub(crate) fn maybe_emit_expr_attr_err(&self, attr: &Attribute) {
if self.features.is_some_and(|features| !features.stmt_expr_attributes) {
let mut err = feature_err(
&self.sess.parse_sess,
&self.sess,
sym::stmt_expr_attributes,
attr.span,
"attributes on expressions are experimental",

View file

@ -30,8 +30,8 @@ use rustc_parse::parser::{
use rustc_parse::validate_attr;
use rustc_session::lint::builtin::{UNUSED_ATTRIBUTES, UNUSED_DOC_COMMENTS};
use rustc_session::lint::BuiltinLintDiagnostics;
use rustc_session::parse::{feature_err, ParseSess};
use rustc_session::Limit;
use rustc_session::parse::feature_err;
use rustc_session::{Limit, Session};
use rustc_span::symbol::{sym, Ident};
use rustc_span::{FileName, LocalExpnId, Span};
@ -800,7 +800,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
return;
}
feature_err(
&self.cx.sess.parse_sess,
&self.cx.sess,
sym::proc_macro_hygiene,
span,
format!("custom attributes cannot be applied to {kind}"),
@ -810,7 +810,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
fn gate_proc_macro_input(&self, annotatable: &Annotatable) {
struct GateProcMacroInput<'a> {
parse_sess: &'a ParseSess,
sess: &'a Session,
}
impl<'ast, 'a> Visitor<'ast> for GateProcMacroInput<'a> {
@ -820,7 +820,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
if !matches!(mod_kind, ModKind::Loaded(_, Inline::Yes, _)) =>
{
feature_err(
self.parse_sess,
self.sess,
sym::proc_macro_hygiene,
item.span,
"non-inline modules in proc macro input are unstable",
@ -835,8 +835,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}
if !self.cx.ecfg.features.proc_macro_hygiene {
annotatable
.visit_with(&mut GateProcMacroInput { parse_sess: &self.cx.sess.parse_sess });
annotatable.visit_with(&mut GateProcMacroInput { sess: &self.cx.sess });
}
}

View file

@ -477,14 +477,14 @@ pub fn compile_declarative_macro(
let tt = mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]),
true,
&sess.parse_sess,
sess,
def.id,
features,
edition,
)
.pop()
.unwrap();
valid &= check_lhs_nt_follows(&sess.parse_sess, def, &tt);
valid &= check_lhs_nt_follows(sess, def, &tt);
return tt;
}
sess.dcx().span_bug(def.span, "wrong-structured lhs")
@ -501,7 +501,7 @@ pub fn compile_declarative_macro(
return mbe::quoted::parse(
&TokenStream::new(vec![tt.clone()]),
false,
&sess.parse_sess,
sess,
def.id,
features,
edition,
@ -516,12 +516,12 @@ pub fn compile_declarative_macro(
};
for rhs in &rhses {
valid &= check_rhs(&sess.parse_sess, rhs);
valid &= check_rhs(sess, rhs);
}
// don't abort iteration early, so that errors for multiple lhses can be reported
for lhs in &lhses {
valid &= check_lhs_no_empty_seq(&sess.parse_sess, slice::from_ref(lhs));
valid &= check_lhs_no_empty_seq(sess, slice::from_ref(lhs));
}
valid &= macro_check::check_meta_variables(&sess.parse_sess, def.id, def.span, &lhses, &rhses);
@ -588,21 +588,21 @@ pub fn compile_declarative_macro(
(mk_syn_ext(expander), rule_spans)
}
fn check_lhs_nt_follows(sess: &ParseSess, def: &ast::Item, lhs: &mbe::TokenTree) -> bool {
fn check_lhs_nt_follows(sess: &Session, def: &ast::Item, lhs: &mbe::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
if let mbe::TokenTree::Delimited(.., delimited) = lhs {
check_matcher(sess, def, &delimited.tts)
} else {
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
sess.dcx.span_err(lhs.span(), msg);
sess.dcx().span_err(lhs.span(), msg);
false
}
// we don't abort on errors on rejection, the driver will do that for us
// after parsing/expansion. we can report every error in every macro this way.
}
fn is_empty_token_tree(sess: &ParseSess, seq: &mbe::SequenceRepetition) -> bool {
fn is_empty_token_tree(sess: &Session, seq: &mbe::SequenceRepetition) -> bool {
if seq.separator.is_some() {
false
} else {
@ -621,7 +621,7 @@ fn is_empty_token_tree(sess: &ParseSess, seq: &mbe::SequenceRepetition) -> bool
iter.next();
}
let span = t.span.to(now.span);
sess.dcx.span_note(span, "doc comments are ignored in matcher position");
sess.dcx().span_note(span, "doc comments are ignored in matcher position");
}
mbe::TokenTree::Sequence(_, sub_seq)
if (sub_seq.kleene.op == mbe::KleeneOp::ZeroOrMore
@ -635,7 +635,7 @@ fn is_empty_token_tree(sess: &ParseSess, seq: &mbe::SequenceRepetition) -> bool
/// Checks that the lhs contains no repetition which could match an empty token
/// tree, because then the matcher would hang indefinitely.
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
fn check_lhs_no_empty_seq(sess: &Session, tts: &[mbe::TokenTree]) -> bool {
use mbe::TokenTree;
for tt in tts {
match tt {
@ -651,7 +651,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
TokenTree::Sequence(span, seq) => {
if is_empty_token_tree(sess, seq) {
let sp = span.entire();
sess.dcx.span_err(sp, "repetition matches empty token tree");
sess.dcx().span_err(sp, "repetition matches empty token tree");
return false;
}
if !check_lhs_no_empty_seq(sess, &seq.tts) {
@ -664,22 +664,22 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[mbe::TokenTree]) -> bool {
true
}
fn check_rhs(sess: &ParseSess, rhs: &mbe::TokenTree) -> bool {
fn check_rhs(sess: &Session, rhs: &mbe::TokenTree) -> bool {
match *rhs {
mbe::TokenTree::Delimited(..) => return true,
_ => {
sess.dcx.span_err(rhs.span(), "macro rhs must be delimited");
sess.dcx().span_err(rhs.span(), "macro rhs must be delimited");
}
}
false
}
fn check_matcher(sess: &ParseSess, def: &ast::Item, matcher: &[mbe::TokenTree]) -> bool {
fn check_matcher(sess: &Session, def: &ast::Item, matcher: &[mbe::TokenTree]) -> bool {
let first_sets = FirstSets::new(matcher);
let empty_suffix = TokenSet::empty();
let err = sess.dcx.err_count();
let err = sess.dcx().err_count();
check_matcher_core(sess, def, &first_sets, matcher, &empty_suffix);
err == sess.dcx.err_count()
err == sess.dcx().err_count()
}
fn has_compile_error_macro(rhs: &mbe::TokenTree) -> bool {
@ -1014,7 +1014,7 @@ impl<'tt> TokenSet<'tt> {
// Requires that `first_sets` is pre-computed for `matcher`;
// see `FirstSets::new`.
fn check_matcher_core<'tt>(
sess: &ParseSess,
sess: &Session,
def: &ast::Item,
first_sets: &FirstSets<'tt>,
matcher: &'tt [mbe::TokenTree],
@ -1139,7 +1139,7 @@ fn check_matcher_core<'tt>(
name,
Some(NonterminalKind::PatParam { inferred: false }),
));
sess.buffer_lint_with_diagnostic(
sess.parse_sess.buffer_lint_with_diagnostic(
RUST_2021_INCOMPATIBLE_OR_PATTERNS,
span,
ast::CRATE_NODE_ID,
@ -1158,7 +1158,7 @@ fn check_matcher_core<'tt>(
};
let sp = next_token.span();
let mut err = sess.dcx.struct_span_err(
let mut err = sess.dcx().struct_span_err(
sp,
format!(
"`${name}:{frag}` {may_be} followed by `{next}`, which \
@ -1172,7 +1172,7 @@ fn check_matcher_core<'tt>(
err.span_label(sp, format!("not allowed after `{kind}` fragments"));
if kind == NonterminalKind::PatWithOr
&& sess.edition.at_least_rust_2021()
&& sess.parse_sess.edition.at_least_rust_2021()
&& next_token.is_token(&BinOp(token::BinOpToken::Or))
{
let suggestion = quoted_tt_to_string(&TokenTree::MetaVarDecl(

View file

@ -5,7 +5,8 @@ use rustc_ast::token::{self, Delimiter, Token};
use rustc_ast::{tokenstream, NodeId};
use rustc_ast_pretty::pprust;
use rustc_feature::Features;
use rustc_session::parse::{feature_err, ParseSess};
use rustc_session::parse::feature_err;
use rustc_session::Session;
use rustc_span::symbol::{kw, sym, Ident};
use rustc_span::edition::Edition;
@ -38,7 +39,7 @@ const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
pub(super) fn parse(
input: &tokenstream::TokenStream,
parsing_patterns: bool,
sess: &ParseSess,
sess: &Session,
node_id: NodeId,
features: &Features,
edition: Edition,
@ -84,7 +85,7 @@ pub(super) fn parse(
"invalid fragment specifier `{}`",
frag.name
);
sess.dcx
sess.dcx()
.struct_span_err(span, msg)
.with_help(VALID_FRAGMENT_NAMES_MSG)
.emit();
@ -113,7 +114,7 @@ pub(super) fn parse(
}
/// Asks for the `macro_metavar_expr` feature if it is not already declared
fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &ParseSess, span: Span) {
fn maybe_emit_macro_metavar_expr_feature(features: &Features, sess: &Session, span: Span) {
if !features.macro_metavar_expr {
let msg = "meta-variable expressions are unstable";
feature_err(sess, sym::macro_metavar_expr, span, msg).emit();
@ -138,7 +139,7 @@ fn parse_tree<'a>(
tree: &'a tokenstream::TokenTree,
outer_trees: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
parsing_patterns: bool,
sess: &ParseSess,
sess: &Session,
node_id: NodeId,
features: &Features,
edition: Edition,
@ -174,7 +175,8 @@ fn parse_tree<'a>(
// The delimiter is `{`. This indicates the beginning
// of a meta-variable expression (e.g. `${count(ident)}`).
// Try to parse the meta-variable expression.
match MetaVarExpr::parse(tts, delim_span.entire(), sess) {
match MetaVarExpr::parse(tts, delim_span.entire(), &sess.parse_sess)
{
Err(err) => {
err.emit();
// Returns early the same read `$` to avoid spanning
@ -195,7 +197,7 @@ fn parse_tree<'a>(
_ => {
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
let msg = format!("expected `(` or `{{`, found `{tok}`");
sess.dcx.span_err(delim_span.entire(), msg);
sess.dcx().span_err(delim_span.entire(), msg);
}
}
}
@ -244,7 +246,7 @@ fn parse_tree<'a>(
Some(tokenstream::TokenTree::Token(token, _)) => {
let msg =
format!("expected identifier, found `{}`", pprust::token_to_string(token),);
sess.dcx.span_err(token.span, msg);
sess.dcx().span_err(token.span, msg);
TokenTree::MetaVar(token.span, Ident::empty())
}
@ -313,7 +315,7 @@ fn parse_kleene_op<'a>(
fn parse_sep_and_kleene_op<'a>(
input: &mut impl Iterator<Item = &'a tokenstream::TokenTree>,
span: Span,
sess: &ParseSess,
sess: &Session,
) -> (Option<Token>, KleeneToken) {
// We basically look at two token trees here, denoted as #1 and #2 below
let span = match parse_kleene_op(input, span) {
@ -325,7 +327,7 @@ fn parse_sep_and_kleene_op<'a>(
// #2 is the `?` Kleene op, which does not take a separator (error)
Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
// Error!
sess.dcx.span_err(
sess.dcx().span_err(
token.span,
"the `?` macro repetition operator does not take a separator",
);
@ -346,7 +348,7 @@ fn parse_sep_and_kleene_op<'a>(
};
// If we ever get to this point, we have experienced an "unexpected token" error
sess.dcx.span_err(span, "expected one of: `*`, `+`, or `?`");
sess.dcx().span_err(span, "expected one of: `*`, `+`, or `?`");
// Return a dummy
(None, KleeneToken::new(KleeneOp::ZeroOrMore, span))
@ -355,9 +357,10 @@ fn parse_sep_and_kleene_op<'a>(
// `$$` or a meta-variable is the lhs of a macro but shouldn't.
//
// For example, `macro_rules! foo { ( ${length()} ) => {} }`
fn span_dollar_dollar_or_metavar_in_the_lhs_err(sess: &ParseSess, token: &Token) {
sess.dcx.span_err(token.span, format!("unexpected token: {}", pprust::token_to_string(token)));
sess.dcx.span_note(
fn span_dollar_dollar_or_metavar_in_the_lhs_err(sess: &Session, token: &Token) {
sess.dcx()
.span_err(token.span, format!("unexpected token: {}", pprust::token_to_string(token)));
sess.dcx().span_note(
token.span,
"`$$` and meta-variable expressions are not allowed inside macro parameter definitions",
);

View file

@ -119,7 +119,7 @@ impl MultiItemModifier for DeriveProcMacro {
// We need special handling for statement items
// (e.g. `fn foo() { #[derive(Debug)] struct Bar; }`)
let is_stmt = matches!(item, Annotatable::Stmt(..));
let hack = crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess.parse_sess);
let hack = crate::base::ann_pretty_printing_compatibility_hack(&item, &ecx.sess);
let input = if hack {
let nt = match item {
Annotatable::Item(item) => token::NtItem(item),

View file

@ -258,7 +258,7 @@ impl FromInternal<(TokenStream, &mut Rustc<'_, '_>)> for Vec<TokenTree<TokenStre
// represented as a delimited group.
// FIXME: It needs to be removed, but there are some
// compatibility issues (see #73345).
if crate::base::nt_pretty_printing_compatibility_hack(&nt.0, rustc.sess()) {
if crate::base::nt_pretty_printing_compatibility_hack(&nt.0, rustc.ecx.sess) {
trees.extend(Self::from_internal((stream, rustc)));
} else {
trees.push(TokenTree::Group(Group {