Introduce new parsing infrastructure and types for parsed attributes
fixup docs in parser
This commit is contained in:
parent
115b3b03b0
commit
dbd3b7928e
30 changed files with 1417 additions and 282 deletions
15
Cargo.lock
15
Cargo.lock
|
@ -3207,6 +3207,7 @@ dependencies = [
|
|||
"rustc_abi",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
"rustc_attr_parsing",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_feature",
|
||||
|
@ -3215,6 +3216,7 @@ dependencies = [
|
|||
"rustc_index",
|
||||
"rustc_macros",
|
||||
"rustc_middle",
|
||||
"rustc_parse",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"rustc_target",
|
||||
|
@ -3261,16 +3263,11 @@ version = "0.0.0"
|
|||
dependencies = [
|
||||
"rustc_abi",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_feature",
|
||||
"rustc_fluent_macro",
|
||||
"rustc_lexer",
|
||||
"rustc_macros",
|
||||
"rustc_serialize",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"thin-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3285,11 +3282,13 @@ dependencies = [
|
|||
"rustc_errors",
|
||||
"rustc_feature",
|
||||
"rustc_fluent_macro",
|
||||
"rustc_hir",
|
||||
"rustc_lexer",
|
||||
"rustc_macros",
|
||||
"rustc_serialize",
|
||||
"rustc_session",
|
||||
"rustc_span",
|
||||
"thin-vec",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
|
@ -3690,6 +3689,7 @@ dependencies = [
|
|||
"rustc_abi",
|
||||
"rustc_arena",
|
||||
"rustc_ast",
|
||||
"rustc_attr_data_structures",
|
||||
"rustc_data_structures",
|
||||
"rustc_hashes",
|
||||
"rustc_index",
|
||||
|
@ -3737,6 +3737,7 @@ dependencies = [
|
|||
"rustc_abi",
|
||||
"rustc_ast",
|
||||
"rustc_ast_pretty",
|
||||
"rustc_attr_parsing",
|
||||
"rustc_hir",
|
||||
"rustc_span",
|
||||
]
|
||||
|
@ -4244,6 +4245,7 @@ name = "rustc_query_impl"
|
|||
version = "0.0.0"
|
||||
dependencies = [
|
||||
"measureme",
|
||||
"rustc_attr_data_structures",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_hashes",
|
||||
|
@ -4266,6 +4268,7 @@ dependencies = [
|
|||
"rustc-rayon-core",
|
||||
"rustc_abi",
|
||||
"rustc_ast",
|
||||
"rustc_attr_data_structures",
|
||||
"rustc_data_structures",
|
||||
"rustc_errors",
|
||||
"rustc_feature",
|
||||
|
|
|
@ -11,6 +11,7 @@ doctest = false
|
|||
rustc_abi = { path = "../rustc_abi" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||
rustc_attr_parsing = { path = "../rustc_attr_parsing" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_feature = { path = "../rustc_feature" }
|
||||
|
@ -19,6 +20,7 @@ rustc_hir = { path = "../rustc_hir" }
|
|||
rustc_index = { path = "../rustc_index" }
|
||||
rustc_macros = { path = "../rustc_macros" }
|
||||
rustc_middle = { path = "../rustc_middle" }
|
||||
rustc_parse = { path = "../rustc_parse" }
|
||||
rustc_session = { path = "../rustc_session" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
rustc_target = { path = "../rustc_target" }
|
||||
|
|
|
@ -108,7 +108,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
};
|
||||
let span = self.lower_span(l.span);
|
||||
let source = hir::LocalSource::Normal;
|
||||
self.lower_attrs(hir_id, &l.attrs);
|
||||
self.lower_attrs(hir_id, &l.attrs, l.span);
|
||||
self.arena.alloc(hir::LetStmt { hir_id, ty, pat, init, els, span, source })
|
||||
}
|
||||
|
||||
|
|
|
@ -77,9 +77,8 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
self.attrs.insert(
|
||||
ex.hir_id.local_id,
|
||||
&*self.arena.alloc_from_iter(
|
||||
e.attrs
|
||||
.iter()
|
||||
.map(|a| self.lower_attr(a))
|
||||
self.lower_attrs_vec(&e.attrs, e.span)
|
||||
.into_iter()
|
||||
.chain(old_attrs.iter().cloned()),
|
||||
),
|
||||
);
|
||||
|
@ -98,7 +97,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
}
|
||||
|
||||
let expr_hir_id = self.lower_node_id(e.id);
|
||||
self.lower_attrs(expr_hir_id, &e.attrs);
|
||||
self.lower_attrs(expr_hir_id, &e.attrs, e.span);
|
||||
|
||||
let kind = match &e.kind {
|
||||
ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
|
||||
|
@ -670,7 +669,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let guard = arm.guard.as_ref().map(|cond| self.lower_expr(cond));
|
||||
let hir_id = self.next_id();
|
||||
let span = self.lower_span(arm.span);
|
||||
self.lower_attrs(hir_id, &arm.attrs);
|
||||
self.lower_attrs(hir_id, &arm.attrs, arm.span);
|
||||
let is_never_pattern = pat.is_never_pattern();
|
||||
let body = if let Some(body) = &arm.body
|
||||
&& !is_never_pattern
|
||||
|
@ -839,6 +838,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
style: AttrStyle::Outer,
|
||||
span: unstable_span,
|
||||
}],
|
||||
span,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
@ -1673,7 +1673,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
|
||||
let hir_id = self.lower_node_id(f.id);
|
||||
self.lower_attrs(hir_id, &f.attrs);
|
||||
self.lower_attrs(hir_id, &f.attrs, f.span);
|
||||
hir::ExprField {
|
||||
hir_id,
|
||||
ident: self.lower_ident(f.ident),
|
||||
|
@ -1936,7 +1936,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
//
|
||||
// Also, add the attributes to the outer returned expr node.
|
||||
let expr = self.expr_drop_temps_mut(for_span, match_expr);
|
||||
self.lower_attrs(expr.hir_id, &e.attrs);
|
||||
self.lower_attrs(expr.hir_id, &e.attrs, e.span);
|
||||
expr
|
||||
}
|
||||
|
||||
|
@ -1993,7 +1993,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let val_ident = Ident::with_dummy_span(sym::val);
|
||||
let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
|
||||
let val_expr = self.expr_ident(span, val_ident, val_pat_nid);
|
||||
self.lower_attrs(val_expr.hir_id, &attrs);
|
||||
self.lower_attrs(val_expr.hir_id, &attrs, span);
|
||||
let continue_pat = self.pat_cf_continue(unstable_span, val_pat);
|
||||
self.arm(continue_pat, val_expr)
|
||||
};
|
||||
|
@ -2024,7 +2024,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let ret_expr = self.checked_return(Some(from_residual_expr));
|
||||
self.arena.alloc(self.expr(try_span, ret_expr))
|
||||
};
|
||||
self.lower_attrs(ret_expr.hir_id, &attrs);
|
||||
self.lower_attrs(ret_expr.hir_id, &attrs, ret_expr.span);
|
||||
|
||||
let break_pat = self.pat_cf_break(try_span, residual_local);
|
||||
self.arm(break_pat, ret_expr)
|
||||
|
|
|
@ -11,7 +11,7 @@ use rustc_index::{IndexSlice, IndexVec};
|
|||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
|
||||
use rustc_span::edit_distance::find_best_match_for_name;
|
||||
use rustc_span::{DesugaringKind, Ident, Span, Symbol, kw, sym};
|
||||
use rustc_span::{DUMMY_SP, DesugaringKind, Ident, Span, Symbol, kw, sym};
|
||||
use smallvec::{SmallVec, smallvec};
|
||||
use thin_vec::ThinVec;
|
||||
use tracing::instrument;
|
||||
|
@ -93,7 +93,8 @@ impl<'a, 'hir> ItemLowerer<'a, 'hir> {
|
|||
debug_assert_eq!(self.resolver.node_id_to_def_id[&CRATE_NODE_ID], CRATE_DEF_ID);
|
||||
self.with_lctx(CRATE_NODE_ID, |lctx| {
|
||||
let module = lctx.lower_mod(&c.items, &c.spans);
|
||||
lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs);
|
||||
// FIXME(jdonszelman): is dummy span ever a problem here?
|
||||
lctx.lower_attrs(hir::CRATE_HIR_ID, &c.attrs, DUMMY_SP);
|
||||
hir::OwnerNode::Crate(module)
|
||||
})
|
||||
}
|
||||
|
@ -157,7 +158,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let mut ident = i.ident;
|
||||
let vis_span = self.lower_span(i.vis.span);
|
||||
let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs, i.span);
|
||||
let kind = self.lower_item_kind(i.span, i.id, hir_id, &mut ident, attrs, vis_span, &i.kind);
|
||||
let item = hir::Item {
|
||||
owner_id: hir_id.expect_owner(),
|
||||
|
@ -620,7 +621,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
fn lower_foreign_item(&mut self, i: &ForeignItem) -> &'hir hir::ForeignItem<'hir> {
|
||||
let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id);
|
||||
let owner_id = hir_id.expect_owner();
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs, i.span);
|
||||
let item = hir::ForeignItem {
|
||||
owner_id,
|
||||
ident: self.lower_ident(i.ident),
|
||||
|
@ -678,7 +679,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
fn lower_variant(&mut self, v: &Variant) -> hir::Variant<'hir> {
|
||||
let hir_id = self.lower_node_id(v.id);
|
||||
self.lower_attrs(hir_id, &v.attrs);
|
||||
self.lower_attrs(hir_id, &v.attrs, v.span);
|
||||
hir::Variant {
|
||||
hir_id,
|
||||
def_id: self.local_def_id(v.id),
|
||||
|
@ -740,7 +741,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
) -> hir::FieldDef<'hir> {
|
||||
let ty = self.lower_ty(&f.ty, ImplTraitContext::Disallowed(ImplTraitPosition::FieldTy));
|
||||
let hir_id = self.lower_node_id(f.id);
|
||||
self.lower_attrs(hir_id, &f.attrs);
|
||||
self.lower_attrs(hir_id, &f.attrs, f.span);
|
||||
hir::FieldDef {
|
||||
span: self.lower_span(f.span),
|
||||
hir_id,
|
||||
|
@ -759,7 +760,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
fn lower_trait_item(&mut self, i: &AssocItem) -> &'hir hir::TraitItem<'hir> {
|
||||
let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs, i.span);
|
||||
let trait_item_def_id = hir_id.expect_owner();
|
||||
|
||||
let (generics, kind, has_default) = match &i.kind {
|
||||
|
@ -895,7 +896,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
let has_value = true;
|
||||
let (defaultness, _) = self.lower_defaultness(i.kind.defaultness(), has_value);
|
||||
let hir_id = hir::HirId::make_owner(self.current_hir_id_owner.def_id);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs);
|
||||
let attrs = self.lower_attrs(hir_id, &i.attrs, i.span);
|
||||
|
||||
let (generics, kind) = match &i.kind {
|
||||
AssocItemKind::Const(box ConstItem { generics, ty, expr, .. }) => self.lower_generics(
|
||||
|
@ -1056,7 +1057,7 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
|||
|
||||
fn lower_param(&mut self, param: &Param) -> hir::Param<'hir> {
|
||||
let hir_id = self.lower_node_id(param.id);
|
||||
self.lower_attrs(hir_id, ¶m.attrs);
|
||||
self.lower_attrs(hir_id, ¶m.attrs, param.span);
|
||||
hir::Param {
|
||||
hir_id,
|
||||
pat: self.lower_pat(¶m.pat),
|
||||
|
|
|
@ -45,6 +45,7 @@ use std::sync::Arc;
|
|||
|
||||
use rustc_ast::node_id::NodeMap;
|
||||
use rustc_ast::{self as ast, *};
|
||||
use rustc_attr_parsing::{AttributeParser, OmitDoc};
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::sorted_map::SortedMap;
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
|
@ -60,7 +61,8 @@ use rustc_macros::extension;
|
|||
use rustc_middle::span_bug;
|
||||
use rustc_middle::ty::{ResolverAstLowering, TyCtxt};
|
||||
use rustc_session::parse::{add_feature_diagnostics, feature_err};
|
||||
use rustc_span::{DUMMY_SP, DesugaringKind, Ident, Span, Symbol, kw, sym};
|
||||
use rustc_span::symbol::{Ident, Symbol, kw, sym};
|
||||
use rustc_span::{DUMMY_SP, DesugaringKind, Span};
|
||||
use smallvec::{SmallVec, smallvec};
|
||||
use thin_vec::ThinVec;
|
||||
use tracing::{debug, instrument, trace};
|
||||
|
@ -137,10 +139,13 @@ struct LoweringContext<'a, 'hir> {
|
|||
allow_async_iterator: Arc<[Symbol]>,
|
||||
allow_for_await: Arc<[Symbol]>,
|
||||
allow_async_fn_traits: Arc<[Symbol]>,
|
||||
|
||||
attribute_parser: AttributeParser<'hir>,
|
||||
}
|
||||
|
||||
impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||
fn new(tcx: TyCtxt<'hir>, resolver: &'a mut ResolverAstLowering) -> Self {
|
||||
let registered_tools = tcx.registered_tools(()).iter().map(|x| x.name).collect();
|
||||
Self {
|
||||
// Pseudo-globals.
|
||||
tcx,
|
||||
|
@ -181,6 +186,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
// FIXME(gen_blocks): how does `closure_track_caller`/`async_fn_track_caller`
|
||||
// interact with `gen`/`async gen` blocks
|
||||
allow_async_iterator: [sym::gen_future, sym::async_iterator].into(),
|
||||
|
||||
attribute_parser: AttributeParser::new(tcx.sess, tcx.features(), registered_tools),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -216,7 +223,6 @@ impl ResolverAstLowering {
|
|||
None
|
||||
}
|
||||
|
||||
/// Obtains resolution for a `NodeId` with a single resolution.
|
||||
fn get_partial_res(&self, id: NodeId) -> Option<PartialRes> {
|
||||
self.partial_res_map.get(&id).copied()
|
||||
}
|
||||
|
@ -855,45 +861,27 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
ret
|
||||
}
|
||||
|
||||
fn lower_attrs(&mut self, id: HirId, attrs: &[Attribute]) -> &'hir [hir::Attribute] {
|
||||
fn lower_attrs(
|
||||
&mut self,
|
||||
id: HirId,
|
||||
attrs: &[Attribute],
|
||||
target_span: Span,
|
||||
) -> &'hir [hir::Attribute] {
|
||||
if attrs.is_empty() {
|
||||
&[]
|
||||
} else {
|
||||
let lowered_attrs = self.lower_attrs_vec(attrs, target_span);
|
||||
|
||||
debug_assert_eq!(id.owner, self.current_hir_id_owner);
|
||||
let ret = self.arena.alloc_from_iter(attrs.iter().map(|a| self.lower_attr(a)));
|
||||
let ret = self.arena.alloc_from_iter(lowered_attrs);
|
||||
debug_assert!(!ret.is_empty());
|
||||
self.attrs.insert(id.local_id, ret);
|
||||
ret
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_attr(&self, attr: &Attribute) -> hir::Attribute {
|
||||
// Note that we explicitly do not walk the path. Since we don't really
|
||||
// lower attributes (we use the AST version) there is nowhere to keep
|
||||
// the `HirId`s. We don't actually need HIR version of attributes anyway.
|
||||
// Tokens are also not needed after macro expansion and parsing.
|
||||
let kind = match attr.kind {
|
||||
AttrKind::Normal(ref normal) => hir::AttrKind::Normal(Box::new(hir::AttrItem {
|
||||
unsafety: self.lower_safety(normal.item.unsafety, hir::Safety::Safe),
|
||||
path: hir::AttrPath {
|
||||
segments: normal
|
||||
.item
|
||||
.path
|
||||
.segments
|
||||
.iter()
|
||||
.map(|i| i.ident)
|
||||
.collect::<Vec<_>>()
|
||||
.into_boxed_slice(),
|
||||
span: normal.item.path.span,
|
||||
},
|
||||
args: self.lower_attr_args(&normal.item.args),
|
||||
})),
|
||||
AttrKind::DocComment(comment_kind, data) => {
|
||||
hir::AttrKind::DocComment(comment_kind, data)
|
||||
}
|
||||
};
|
||||
|
||||
hir::Attribute { kind, id: attr.id, style: attr.style, span: self.lower_span(attr.span) }
|
||||
fn lower_attrs_vec(&self, attrs: &[Attribute], target_span: Span) -> Vec<hir::Attribute> {
|
||||
self.attribute_parser.parse_attribute_list(attrs, target_span, OmitDoc::Lower)
|
||||
}
|
||||
|
||||
fn alias_attrs(&mut self, id: HirId, target_id: HirId) {
|
||||
|
@ -905,34 +893,6 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
}
|
||||
}
|
||||
|
||||
fn lower_attr_args(&self, args: &AttrArgs) -> hir::AttrArgs {
|
||||
match args {
|
||||
AttrArgs::Empty => hir::AttrArgs::Empty,
|
||||
AttrArgs::Delimited(args) => hir::AttrArgs::Delimited(self.lower_delim_args(args)),
|
||||
// This is an inert key-value attribute - it will never be visible to macros
|
||||
// after it gets lowered to HIR. Therefore, we can extract literals to handle
|
||||
// nonterminals in `#[doc]` (e.g. `#[doc = $e]`).
|
||||
&AttrArgs::Eq { eq_span, ref expr } => {
|
||||
// In valid code the value always ends up as a single literal. Otherwise, a dummy
|
||||
// literal suffices because the error is handled elsewhere.
|
||||
let lit = if let ExprKind::Lit(token_lit) = expr.kind
|
||||
&& let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span)
|
||||
{
|
||||
lit
|
||||
} else {
|
||||
let guar = self.dcx().has_errors().unwrap();
|
||||
MetaItemLit {
|
||||
symbol: kw::Empty,
|
||||
suffix: None,
|
||||
kind: LitKind::Err(guar),
|
||||
span: DUMMY_SP,
|
||||
}
|
||||
};
|
||||
hir::AttrArgs::Eq { eq_span, expr: lit }
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn lower_delim_args(&self, args: &DelimArgs) -> DelimArgs {
|
||||
DelimArgs { dspan: args.dspan, delim: args.delim, tokens: args.tokens.flattened() }
|
||||
}
|
||||
|
@ -1845,7 +1805,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
let (name, kind) = self.lower_generic_param_kind(param, source);
|
||||
|
||||
let hir_id = self.lower_node_id(param.id);
|
||||
self.lower_attrs(hir_id, ¶m.attrs);
|
||||
self.lower_attrs(hir_id, ¶m.attrs, param.span());
|
||||
hir::GenericParam {
|
||||
hir_id,
|
||||
def_id: self.local_def_id(param.id),
|
||||
|
|
|
@ -93,7 +93,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
|||
|
||||
let fs = self.arena.alloc_from_iter(fields.iter().map(|f| {
|
||||
let hir_id = self.lower_node_id(f.id);
|
||||
self.lower_attrs(hir_id, &f.attrs);
|
||||
self.lower_attrs(hir_id, &f.attrs, f.span);
|
||||
|
||||
hir::PatField {
|
||||
hir_id,
|
||||
|
|
|
@ -7,14 +7,9 @@ edition = "2024"
|
|||
# tidy-alphabetical-start
|
||||
rustc_abi = {path = "../rustc_abi"}
|
||||
rustc_ast = {path = "../rustc_ast"}
|
||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||
rustc_data_structures = {path = "../rustc_data_structures"}
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_feature = { path = "../rustc_feature" }
|
||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
||||
rustc_lexer = { path = "../rustc_lexer" }
|
||||
rustc_macros = {path = "../rustc_macros"}
|
||||
rustc_serialize = {path = "../rustc_serialize"}
|
||||
rustc_session = { path = "../rustc_session" }
|
||||
rustc_span = {path = "../rustc_span"}
|
||||
thin-vec = "0.2.12"
|
||||
# tidy-alphabetical-end
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
use rustc_abi::Align;
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::token::CommentKind;
|
||||
use rustc_ast::{self as ast, AttrStyle};
|
||||
use rustc_macros::{Decodable, Encodable, HashStable_Generic};
|
||||
use rustc_span::{Span, Symbol};
|
||||
|
||||
|
@ -54,7 +55,7 @@ impl OptimizeAttr {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
#[derive(Clone, Debug, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum DiagnosticAttribute {
|
||||
// tidy-alphabetical-start
|
||||
DoNotRecommend,
|
||||
|
@ -62,7 +63,7 @@ pub enum DiagnosticAttribute {
|
|||
// tidy-alphabetical-end
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Debug, Encodable, Decodable, Copy, Clone)]
|
||||
#[derive(PartialEq, Debug, Encodable, Decodable, Copy, Clone, HashStable_Generic)]
|
||||
pub enum ReprAttr {
|
||||
ReprInt(IntType),
|
||||
ReprRust,
|
||||
|
@ -80,7 +81,7 @@ pub enum TransparencyError {
|
|||
}
|
||||
|
||||
#[derive(Eq, PartialEq, Debug, Copy, Clone)]
|
||||
#[derive(Encodable, Decodable)]
|
||||
#[derive(Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum IntType {
|
||||
SignedInt(ast::IntTy),
|
||||
UnsignedInt(ast::UintTy),
|
||||
|
@ -132,3 +133,27 @@ impl Deprecation {
|
|||
matches!(self.since, DeprecatedSince::RustcVersion(_))
|
||||
}
|
||||
}
|
||||
|
||||
/// Attributes represent parsed, *built in*, inert attributes. That means,
|
||||
/// attributes that are not actually ever expanded.
|
||||
/// For more information on this, see the module docs on the rustc_attr_parsing crate.
|
||||
/// They're instead used as markers, to guide the compilation process in various way in most every stage of the compiler.
|
||||
/// These are kept around after the AST, into the HIR and further on.
|
||||
///
|
||||
/// The word parsed could be a little misleading here, because the parser already parses
|
||||
/// attributes early on. However, the result, an [`ast::Attribute`]
|
||||
/// is only parsed at a high level, still containing a token stream in many cases. That is
|
||||
/// because the structure of the contents varies from attribute to attribute.
|
||||
/// With a parsed attribute I mean that each attribute is processed individually into a
|
||||
/// final structure, which on-site (the place where the attribute is useful for, think the
|
||||
/// the place where `must_use` is checked) little to no extra parsing or validating needs to
|
||||
/// happen.
|
||||
///
|
||||
/// For more docs, look in [`rustc_attr`](https://doc.rust-lang.org/stable/nightly-rustc/rustc_attr/index.html)
|
||||
// FIXME(jdonszelmann): rename to AttributeKind once hir::AttributeKind is dissolved
|
||||
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable)]
|
||||
pub enum AttributeKind {
|
||||
// tidy-alphabetical-start
|
||||
DocComment { style: AttrStyle, kind: CommentKind, span: Span, comment: Symbol },
|
||||
// tidy-alphabetical-end
|
||||
}
|
||||
|
|
|
@ -11,6 +11,10 @@ mod stability;
|
|||
mod version;
|
||||
|
||||
pub use attributes::*;
|
||||
pub(crate) use rustc_session::HashStableContext;
|
||||
pub use stability::*;
|
||||
pub use version::*;
|
||||
|
||||
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||
/// instead of implementing everything in `rustc_middle`.
|
||||
pub trait HashStableContext: rustc_ast::HashStableContext + rustc_abi::HashStableContext {}
|
||||
|
|
|
@ -13,9 +13,11 @@ rustc_data_structures = { path = "../rustc_data_structures" }
|
|||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_feature = { path = "../rustc_feature" }
|
||||
rustc_fluent_macro = { path = "../rustc_fluent_macro" }
|
||||
rustc_hir = { path = "../rustc_hir" }
|
||||
rustc_lexer = { path = "../rustc_lexer" }
|
||||
rustc_macros = { path = "../rustc_macros" }
|
||||
rustc_serialize = { path = "../rustc_serialize" }
|
||||
rustc_session = { path = "../rustc_session" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
thin-vec = "0.2.12"
|
||||
# tidy-alphabetical-end
|
||||
|
|
|
@ -88,6 +88,8 @@ attr_parsing_multiple_stability_levels =
|
|||
attr_parsing_non_ident_feature =
|
||||
'feature' is not an identifier
|
||||
|
||||
attr_parsing_repr_ident =
|
||||
meta item in `repr` must be an identifier
|
||||
attr_parsing_rustc_allowed_unstable_pairing =
|
||||
`rustc_allowed_through_unstable_modules` attribute must be paired with a `stable` attribute
|
||||
|
||||
|
@ -107,6 +109,10 @@ attr_parsing_unknown_meta_item =
|
|||
attr_parsing_unknown_version_literal =
|
||||
unknown version literal format, assuming it refers to a future version
|
||||
|
||||
attr_parsing_unrecognized_repr_hint =
|
||||
unrecognized representation hint
|
||||
.help = valid reprs are `Rust` (default), `C`, `align`, `packed`, `transparent`, `simd`, `i8`, `u8`, `i16`, `u16`, `i32`, `u32`, `i64`, `u64`, `i128`, `u128`, `isize`, `usize`
|
||||
|
||||
attr_parsing_unstable_cfg_target_compact =
|
||||
compact `cfg(target(..))` is experimental and subject to change
|
||||
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
//! You can find more docs on what groups are on [`AttributeParser`] itself.
|
||||
//! This module defines traits for attribute parsers, little state machines that recognize and parse
|
||||
//! attributes out of a longer list of attributes. The main trait is called [`AttributeParser`].
|
||||
//! You can find more docs about [`AttributeParser`]s on the trait itself.
|
||||
//! However, for many types of attributes, implementing [`AttributeParser`] is not necessary.
|
||||
//! It allows for a lot of flexibility you might not want.
|
||||
//!
|
||||
|
@ -10,7 +12,16 @@
|
|||
//! - [`CombineAttributeParser`]: makes it easy to implement an attribute which should combine the
|
||||
//! contents of attributes, if an attribute appear multiple times in a list
|
||||
//!
|
||||
//! Attributes should be added to [`ATTRIBUTE_GROUP_MAPPING`](crate::context::ATTRIBUTE_GROUP_MAPPING) to be parsed.
|
||||
//! Attributes should be added to [`ATTRIBUTE_MAPPING`](crate::context::ATTRIBUTE_MAPPING) to be parsed.
|
||||
|
||||
use std::marker::PhantomData;
|
||||
|
||||
use rustc_attr_data_structures::AttributeKind;
|
||||
use rustc_span::Span;
|
||||
use thin_vec::ThinVec;
|
||||
|
||||
use crate::context::{AcceptContext, FinalizeContext};
|
||||
use crate::parser::ArgParser;
|
||||
|
||||
pub(crate) mod allow_unstable;
|
||||
pub(crate) mod cfg;
|
||||
|
@ -28,3 +39,122 @@ pub use deprecation::*;
|
|||
pub use repr::*;
|
||||
pub use stability::*;
|
||||
pub use transparency::*;
|
||||
|
||||
type AcceptFn<T> = fn(&mut T, &AcceptContext<'_>, &ArgParser<'_>);
|
||||
type AcceptMapping<T> = &'static [(&'static [rustc_span::Symbol], AcceptFn<T>)];
|
||||
|
||||
/// An [`AttributeParser`] is a type which searches for syntactic attributes.
|
||||
///
|
||||
/// Parsers are often tiny state machines that gets to see all syntactical attributes on an item.
|
||||
/// [`Default::default`] creates a fresh instance that sits in some kind of initial state, usually that the
|
||||
/// attribute it is looking for was not yet seen.
|
||||
///
|
||||
/// Then, it defines what paths this group will accept in [`AttributeParser::ATTRIBUTES`].
|
||||
/// These are listed as pairs, of symbols and function pointers. The function pointer will
|
||||
/// be called when that attribute is found on an item, which can influence the state of the little
|
||||
/// state machine.
|
||||
///
|
||||
/// Finally, after all attributes on an item have been seen, and possibly been accepted,
|
||||
/// the [`finalize`](AttributeParser::finalize) functions for all attribute parsers are called. Each can then report
|
||||
/// whether it has seen the attribute it has been looking for.
|
||||
///
|
||||
/// The state machine is automatically reset to parse attributes on the next item.
|
||||
pub(crate) trait AttributeParser: Default + 'static {
|
||||
/// The symbols for the attributes that this parser is interested in.
|
||||
///
|
||||
/// If an attribute has this symbol, the `accept` function will be called on it.
|
||||
const ATTRIBUTES: AcceptMapping<Self>;
|
||||
|
||||
/// The parser has gotten a chance to accept the attributes on an item,
|
||||
/// here it can produce an attribute.
|
||||
fn finalize(self, cx: &FinalizeContext<'_>) -> Option<AttributeKind>;
|
||||
}
|
||||
|
||||
/// Alternative to [`AttributeParser`] that automatically handles state management.
|
||||
/// A slightly simpler and more restricted way to convert attributes.
|
||||
/// Assumes that an attribute can only appear a single time on an item,
|
||||
/// and errors when it sees more.
|
||||
///
|
||||
/// [`Single<T> where T: SingleAttributeParser`](Single) implements [`AttributeParser`].
|
||||
///
|
||||
/// [`SingleAttributeParser`] can only convert attributes one-to-one, and cannot combine multiple
|
||||
/// attributes together like is necessary for `#[stable()]` and `#[unstable()]` for example.
|
||||
pub(crate) trait SingleAttributeParser: 'static {
|
||||
const PATH: &'static [rustc_span::Symbol];
|
||||
|
||||
/// Caled when a duplicate attribute is found.
|
||||
///
|
||||
/// `first_span` is the span of the first occurrence of this attribute.
|
||||
// FIXME(jdonszelmann): default error
|
||||
fn on_duplicate(cx: &AcceptContext<'_>, first_span: Span);
|
||||
|
||||
/// Converts a single syntactical attribute to a single semantic attribute, or [`AttributeKind`]
|
||||
fn convert(cx: &AcceptContext<'_>, args: &ArgParser<'_>) -> Option<AttributeKind>;
|
||||
}
|
||||
|
||||
pub(crate) struct Single<T: SingleAttributeParser>(PhantomData<T>, Option<(AttributeKind, Span)>);
|
||||
|
||||
impl<T: SingleAttributeParser> Default for Single<T> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default(), Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: SingleAttributeParser> AttributeParser for Single<T> {
|
||||
const ATTRIBUTES: AcceptMapping<Self> = &[(T::PATH, |group: &mut Single<T>, cx, args| {
|
||||
if let Some((_, s)) = group.1 {
|
||||
T::on_duplicate(cx, s);
|
||||
return;
|
||||
}
|
||||
|
||||
if let Some(pa) = T::convert(cx, args) {
|
||||
group.1 = Some((pa, cx.attr_span));
|
||||
}
|
||||
})];
|
||||
|
||||
fn finalize(self, _cx: &FinalizeContext<'_>) -> Option<AttributeKind> {
|
||||
Some(self.1?.0)
|
||||
}
|
||||
}
|
||||
|
||||
type ConvertFn<E> = fn(ThinVec<E>) -> AttributeKind;
|
||||
|
||||
/// Alternative to [`AttributeParser`] that automatically handles state management.
|
||||
/// If multiple attributes appear on an element, combines the values of each into a
|
||||
/// [`ThinVec`].
|
||||
/// [`Combine<T> where T: CombineAttributeParser`](Combine) implements [`AttributeParser`].
|
||||
///
|
||||
/// [`CombineAttributeParser`] can only convert a single kind of attribute, and cannot combine multiple
|
||||
/// attributes together like is necessary for `#[stable()]` and `#[unstable()]` for example.
|
||||
pub(crate) trait CombineAttributeParser: 'static {
|
||||
const PATH: &'static [rustc_span::Symbol];
|
||||
|
||||
type Item;
|
||||
const CONVERT: ConvertFn<Self::Item>;
|
||||
|
||||
/// Converts a single syntactical attribute to a number of elements of the semantic attribute, or [`AttributeKind`]
|
||||
fn extend<'a>(
|
||||
cx: &'a AcceptContext<'a>,
|
||||
args: &'a ArgParser<'a>,
|
||||
) -> impl IntoIterator<Item = Self::Item> + 'a;
|
||||
}
|
||||
|
||||
pub(crate) struct Combine<T: CombineAttributeParser>(
|
||||
PhantomData<T>,
|
||||
ThinVec<<T as CombineAttributeParser>::Item>,
|
||||
);
|
||||
|
||||
impl<T: CombineAttributeParser> Default for Combine<T> {
|
||||
fn default() -> Self {
|
||||
Self(Default::default(), Default::default())
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: CombineAttributeParser> AttributeParser for Combine<T> {
|
||||
const ATTRIBUTES: AcceptMapping<Self> =
|
||||
&[(T::PATH, |group: &mut Combine<T>, cx, args| group.1.extend(T::extend(cx, args)))];
|
||||
|
||||
fn finalize(self, _cx: &FinalizeContext<'_>) -> Option<AttributeKind> {
|
||||
if self.1.is_empty() { None } else { Some(T::CONVERT(self.1)) }
|
||||
}
|
||||
}
|
||||
|
|
315
compiler/rustc_attr_parsing/src/context.rs
Normal file
315
compiler/rustc_attr_parsing/src/context.rs
Normal file
|
@ -0,0 +1,315 @@
|
|||
use std::collections::BTreeMap;
|
||||
use std::ops::Deref;
|
||||
use std::sync::LazyLock;
|
||||
|
||||
use rustc_ast::{self as ast, DelimArgs};
|
||||
use rustc_attr_data_structures::AttributeKind;
|
||||
use rustc_errors::{DiagCtxtHandle, Diagnostic};
|
||||
use rustc_feature::Features;
|
||||
use rustc_hir::{AttrArgs, AttrItem, AttrPath, Attribute, HashIgnoredAttrId};
|
||||
use rustc_session::Session;
|
||||
use rustc_span::symbol::kw;
|
||||
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol, sym};
|
||||
|
||||
use crate::attributes::AttributeParser as _;
|
||||
use crate::parser::{ArgParser, MetaItemParser};
|
||||
|
||||
macro_rules! attribute_groups {
|
||||
(
|
||||
pub(crate) static $name: ident = [$($names: ty),* $(,)?];
|
||||
) => {
|
||||
pub(crate) static $name: LazyLock<(
|
||||
BTreeMap<&'static [Symbol], Vec<Box<dyn Fn(&AcceptContext<'_>, &ArgParser<'_>) + Send + Sync>>>,
|
||||
Vec<Box<dyn Send + Sync + Fn(&FinalizeContext<'_>) -> Option<AttributeKind>>>
|
||||
)> = LazyLock::new(|| {
|
||||
let mut accepts = BTreeMap::<_, Vec<Box<dyn Fn(&AcceptContext<'_>, &ArgParser<'_>) + Send + Sync>>>::new();
|
||||
let mut finalizes = Vec::<Box<dyn Send + Sync + Fn(&FinalizeContext<'_>) -> Option<AttributeKind>>>::new();
|
||||
$(
|
||||
{
|
||||
thread_local! {
|
||||
static STATE_OBJECT: RefCell<$names> = RefCell::new(<$names>::default());
|
||||
};
|
||||
|
||||
for (k, v) in <$names>::ATTRIBUTES {
|
||||
accepts.entry(*k).or_default().push(Box::new(|cx, args| {
|
||||
STATE_OBJECT.with_borrow_mut(|s| {
|
||||
v(s, cx, args)
|
||||
})
|
||||
}));
|
||||
}
|
||||
|
||||
finalizes.push(Box::new(|cx| {
|
||||
let state = STATE_OBJECT.take();
|
||||
state.finalize(cx)
|
||||
}));
|
||||
}
|
||||
)*
|
||||
|
||||
(accepts, finalizes)
|
||||
});
|
||||
};
|
||||
}
|
||||
|
||||
attribute_groups!(
|
||||
pub(crate) static ATTRIBUTE_MAPPING = [
|
||||
];
|
||||
);
|
||||
|
||||
/// Context given to every attribute parser when accepting
|
||||
///
|
||||
/// Gives [`AttributeParser`]s enough information to create errors, for example.
|
||||
pub(crate) struct AcceptContext<'a> {
|
||||
pub(crate) group_cx: &'a FinalizeContext<'a>,
|
||||
/// The span of the attribute currently being parsed
|
||||
pub(crate) attr_span: Span,
|
||||
}
|
||||
|
||||
impl<'a> AcceptContext<'a> {
|
||||
pub(crate) fn emit_err(&self, diag: impl Diagnostic<'a>) -> ErrorGuaranteed {
|
||||
if self.limit_diagnostics {
|
||||
self.dcx().create_err(diag).delay_as_bug()
|
||||
} else {
|
||||
self.dcx().emit_err(diag)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> Deref for AcceptContext<'a> {
|
||||
type Target = FinalizeContext<'a>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.group_cx
|
||||
}
|
||||
}
|
||||
|
||||
/// Context given to every attribute parser during finalization.
|
||||
///
|
||||
/// Gives [`AttributeParser`](crate::attributes::AttributeParser)s enough information to create errors, for example.
|
||||
pub(crate) struct FinalizeContext<'a> {
|
||||
/// The parse context, gives access to the session and the
|
||||
/// diagnostics context.
|
||||
pub(crate) cx: &'a AttributeParser<'a>,
|
||||
/// The span of the syntactical component this attribute was applied to
|
||||
pub(crate) target_span: Span,
|
||||
}
|
||||
|
||||
impl<'a> Deref for FinalizeContext<'a> {
|
||||
type Target = AttributeParser<'a>;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.cx
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Clone, Copy, Debug)]
|
||||
pub enum OmitDoc {
|
||||
Lower,
|
||||
Skip,
|
||||
}
|
||||
|
||||
/// Context created once, for example as part of the ast lowering
|
||||
/// context, through which all attributes can be lowered.
|
||||
pub struct AttributeParser<'sess> {
|
||||
#[expect(dead_code)] // FIXME(jdonszelmann): needed later to verify we parsed all attributes
|
||||
tools: Vec<Symbol>,
|
||||
sess: &'sess Session,
|
||||
features: Option<&'sess Features>,
|
||||
|
||||
/// *only* parse attributes with this symbol.
|
||||
///
|
||||
/// Used in cases where we want the lowering infrastructure for
|
||||
/// parse just a single attribute.
|
||||
parse_only: Option<Symbol>,
|
||||
|
||||
/// Can be used to instruct parsers to reduce the number of diagnostics it emits.
|
||||
/// Useful when using `parse_limited` and you know the attr will be reparsed later.
|
||||
pub(crate) limit_diagnostics: bool,
|
||||
}
|
||||
|
||||
impl<'sess> AttributeParser<'sess> {
|
||||
/// This method allows you to parse attributes *before* you have access to features or tools.
|
||||
/// One example where this is necessary, is to parse `feature` attributes themselves for
|
||||
/// example.
|
||||
///
|
||||
/// Try to use this as little as possible. Attributes *should* be lowered during `rustc_ast_lowering`.
|
||||
/// Some attributes require access to features to parse, which would crash if you tried to do so
|
||||
/// through [`parse_limited`](Self::parse_limited).
|
||||
///
|
||||
/// To make sure use is limited, supply a `Symbol` you'd like to parse. Only attributes with
|
||||
/// that symbol are picked out of the list of instructions and parsed. Those are returned.
|
||||
pub fn parse_limited(
|
||||
sess: &'sess Session,
|
||||
attrs: &[ast::Attribute],
|
||||
sym: Symbol,
|
||||
target_span: Span,
|
||||
limit_diagnostics: bool,
|
||||
) -> Option<Attribute> {
|
||||
let mut parsed = Self {
|
||||
sess,
|
||||
features: None,
|
||||
tools: Vec::new(),
|
||||
parse_only: Some(sym),
|
||||
limit_diagnostics,
|
||||
}
|
||||
.parse_attribute_list(attrs, target_span, OmitDoc::Skip);
|
||||
|
||||
assert!(parsed.len() <= 1);
|
||||
|
||||
parsed.pop()
|
||||
}
|
||||
|
||||
pub fn new(sess: &'sess Session, features: &'sess Features, tools: Vec<Symbol>) -> Self {
|
||||
Self { sess, features: Some(features), tools, parse_only: None, limit_diagnostics: false }
|
||||
}
|
||||
|
||||
pub(crate) fn sess(&self) -> &'sess Session {
|
||||
self.sess
|
||||
}
|
||||
|
||||
pub(crate) fn features(&self) -> &'sess Features {
|
||||
self.features.expect("features not available at this point in the compiler")
|
||||
}
|
||||
|
||||
pub(crate) fn dcx(&self) -> DiagCtxtHandle<'sess> {
|
||||
self.sess.dcx()
|
||||
}
|
||||
|
||||
/// Parse a list of attributes.
|
||||
///
|
||||
/// `target_span` is the span of the thing this list of attributes is applied to,
|
||||
/// and when `omit_doc` is set, doc attributes are filtered out.
|
||||
pub fn parse_attribute_list<'a>(
|
||||
&'a self,
|
||||
attrs: &'a [ast::Attribute],
|
||||
target_span: Span,
|
||||
omit_doc: OmitDoc,
|
||||
) -> Vec<Attribute> {
|
||||
let mut attributes = Vec::new();
|
||||
|
||||
let group_cx = FinalizeContext { cx: self, target_span };
|
||||
|
||||
for attr in attrs {
|
||||
// if we're only looking for a single attribute,
|
||||
// skip all the ones we don't care about
|
||||
if let Some(expected) = self.parse_only {
|
||||
if attr.name_or_empty() != expected {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// sometimes, for example for `#![doc = include_str!("readme.md")]`,
|
||||
// doc still contains a non-literal. You might say, when we're lowering attributes
|
||||
// that's expanded right? But no, sometimes, when parsing attributes on macros,
|
||||
// we already use the lowering logic and these are still there. So, when `omit_doc`
|
||||
// is set we *also* want to ignore these
|
||||
if omit_doc == OmitDoc::Skip && attr.name_or_empty() == sym::doc {
|
||||
continue;
|
||||
}
|
||||
|
||||
match &attr.kind {
|
||||
ast::AttrKind::DocComment(comment_kind, symbol) => {
|
||||
if omit_doc == OmitDoc::Skip {
|
||||
continue;
|
||||
}
|
||||
|
||||
attributes.push(Attribute::Parsed(AttributeKind::DocComment {
|
||||
style: attr.style,
|
||||
kind: *comment_kind,
|
||||
span: attr.span,
|
||||
comment: *symbol,
|
||||
}))
|
||||
}
|
||||
// // FIXME: make doc attributes go through a proper attribute parser
|
||||
// ast::AttrKind::Normal(n) if n.name_or_empty() == sym::doc => {
|
||||
// let p = GenericMetaItemParser::from_attr(&n, self.dcx());
|
||||
//
|
||||
// attributes.push(Attribute::Parsed(AttributeKind::DocComment {
|
||||
// style: attr.style,
|
||||
// kind: CommentKind::Line,
|
||||
// span: attr.span,
|
||||
// comment: p.args().name_value(),
|
||||
// }))
|
||||
// }
|
||||
ast::AttrKind::Normal(n) => {
|
||||
let parser = MetaItemParser::from_attr(n, self.dcx());
|
||||
let (path, args) = parser.deconstruct();
|
||||
let parts = path.segments().map(|i| i.name).collect::<Vec<_>>();
|
||||
|
||||
if let Some(accepts) = ATTRIBUTE_MAPPING.0.get(parts.as_slice()) {
|
||||
for f in accepts {
|
||||
let cx = AcceptContext { group_cx: &group_cx, attr_span: attr.span };
|
||||
|
||||
f(&cx, &args)
|
||||
}
|
||||
} else {
|
||||
// if we're here, we must be compiling a tool attribute... Or someone forgot to
|
||||
// parse their fancy new attribute. Let's warn them in any case. If you are that
|
||||
// person, and you really your attribute should remain unparsed, carefully read the
|
||||
// documentation in this module and if you still think so you can add an exception
|
||||
// to this assertion.
|
||||
|
||||
// FIXME(jdonszelmann): convert other attributes, and check with this that
|
||||
// we caught em all
|
||||
// const FIXME_TEMPORARY_ATTR_ALLOWLIST: &[Symbol] = &[sym::cfg];
|
||||
// assert!(
|
||||
// self.tools.contains(&parts[0]) || true,
|
||||
// // || FIXME_TEMPORARY_ATTR_ALLOWLIST.contains(&parts[0]),
|
||||
// "attribute {path} wasn't parsed and isn't a know tool attribute",
|
||||
// );
|
||||
|
||||
attributes.push(Attribute::Unparsed(Box::new(AttrItem {
|
||||
path: AttrPath::from_ast(&n.item.path),
|
||||
args: self.lower_attr_args(&n.item.args),
|
||||
id: HashIgnoredAttrId { attr_id: attr.id },
|
||||
style: attr.style,
|
||||
span: attr.span,
|
||||
})));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let mut parsed_attributes = Vec::new();
|
||||
for f in &ATTRIBUTE_MAPPING.1 {
|
||||
if let Some(attr) = f(&group_cx) {
|
||||
parsed_attributes.push(Attribute::Parsed(attr));
|
||||
}
|
||||
}
|
||||
|
||||
attributes.extend(parsed_attributes);
|
||||
|
||||
attributes
|
||||
}
|
||||
|
||||
fn lower_attr_args(&self, args: &ast::AttrArgs) -> AttrArgs {
|
||||
match args {
|
||||
ast::AttrArgs::Empty => AttrArgs::Empty,
|
||||
ast::AttrArgs::Delimited(args) => AttrArgs::Delimited(DelimArgs {
|
||||
dspan: args.dspan,
|
||||
delim: args.delim,
|
||||
tokens: args.tokens.flattened(),
|
||||
}),
|
||||
// This is an inert key-value attribute - it will never be visible to macros
|
||||
// after it gets lowered to HIR. Therefore, we can extract literals to handle
|
||||
// nonterminals in `#[doc]` (e.g. `#[doc = $e]`).
|
||||
ast::AttrArgs::Eq { eq_span, expr } => {
|
||||
// In valid code the value always ends up as a single literal. Otherwise, a dummy
|
||||
// literal suffices because the error is handled elsewhere.
|
||||
let lit = if let ast::ExprKind::Lit(token_lit) = expr.kind
|
||||
&& let Ok(lit) = ast::MetaItemLit::from_token_lit(token_lit, expr.span)
|
||||
{
|
||||
lit
|
||||
} else {
|
||||
let guar = self.dcx().has_errors().unwrap();
|
||||
ast::MetaItemLit {
|
||||
symbol: kw::Empty,
|
||||
suffix: None,
|
||||
kind: ast::LitKind::Err(guar),
|
||||
span: DUMMY_SP,
|
||||
}
|
||||
};
|
||||
AttrArgs::Eq { eq_span: *eq_span, expr: lit }
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
|
@ -10,11 +10,49 @@
|
|||
//! These were then parsed or validated or both in places distributed all over the compiler.
|
||||
//! This was a mess...
|
||||
//!
|
||||
//! Attributes are markers on items. Most are actually attribute-like proc-macros, and are expanded
|
||||
//! but some remain as the so-called built-in attributes. These are not macros at all, and really
|
||||
//! are just markers to guide the compilation process. An example is `#[inline(...)]` which changes
|
||||
//! how code for functions is generated. Built-in attributes aren't macros because there's no rust
|
||||
//! syntax they could expand to.
|
||||
//! Attributes are markers on items.
|
||||
//! Many of them are actually attribute-like proc-macros, and are expanded to some other rust syntax.
|
||||
//! This could either be a user provided proc macro, or something compiler provided.
|
||||
//! `derive` is an example of one that the compiler provides.
|
||||
//! These are built-in, but they have a valid expansion to Rust tokens and are thus called "active".
|
||||
//! I personally like calling these *active* compiler-provided attributes, built-in *macros*,
|
||||
//! because they still expand, and this helps to differentiate them from built-in *attributes*.
|
||||
//! However, I'll be the first to admit that the naming here can be confusing.
|
||||
//!
|
||||
//! The alternative to active attributes, are inert attributes.
|
||||
//! These can occur in user code (proc-macro helper attributes).
|
||||
//! But what's important is, many built-in attributes are inert like this.
|
||||
//! There is nothing they expand to during the macro expansion process,
|
||||
//! sometimes because they literally cannot expand to something that is valid Rust.
|
||||
//! They are really just markers to guide the compilation process.
|
||||
//! An example is `#[inline(...)]` which changes how code for functions is generated.
|
||||
//!
|
||||
//! ```text
|
||||
//! Active Inert
|
||||
//! ┌──────────────────────┬──────────────────────┐
|
||||
//! │ (mostly in) │ these are parsed │
|
||||
//! │ rustc_builtin_macros │ here! │
|
||||
//! │ │ │
|
||||
//! │ │ │
|
||||
//! │ #[derive(...)] │ #[stable()] │
|
||||
//! Built-in │ #[cfg()] │ #[inline()] │
|
||||
//! │ #[cfg_attr()] │ #[repr()] │
|
||||
//! │ │ │
|
||||
//! │ │ │
|
||||
//! │ │ │
|
||||
//! ├──────────────────────┼──────────────────────┤
|
||||
//! │ │ │
|
||||
//! │ │ │
|
||||
//! │ │ `b` in │
|
||||
//! │ │ #[proc_macro_derive( │
|
||||
//! User created │ #[proc_macro_attr()] │ a, │
|
||||
//! │ │ attributes(b) │
|
||||
//! │ │ ] │
|
||||
//! │ │ │
|
||||
//! │ │ │
|
||||
//! │ │ │
|
||||
//! └──────────────────────┴──────────────────────┘
|
||||
//! ```
|
||||
//!
|
||||
//! In this crate, syntactical attributes (sequences of tokens that look like
|
||||
//! `#[something(something else)]`) are parsed into more semantic attributes, markers on items.
|
||||
|
@ -46,9 +84,12 @@
|
|||
// tidy-alphabetical-end
|
||||
|
||||
mod attributes;
|
||||
mod context;
|
||||
pub mod parser;
|
||||
mod session_diagnostics;
|
||||
|
||||
pub use attributes::*;
|
||||
pub use context::{AttributeParser, OmitDoc};
|
||||
pub use rustc_attr_data_structures::*;
|
||||
pub use util::{find_crate_name, is_builtin_attr, parse_version};
|
||||
|
||||
|
|
625
compiler/rustc_attr_parsing/src/parser.rs
Normal file
625
compiler/rustc_attr_parsing/src/parser.rs
Normal file
|
@ -0,0 +1,625 @@
|
|||
//! This is in essence an (improved) duplicate of `rustc_ast/attr/mod.rs`.
|
||||
//! That module is intended to be deleted in its entirety.
|
||||
//!
|
||||
//! FIXME(jdonszelmann): delete `rustc_ast/attr/mod.rs`
|
||||
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::iter::Peekable;
|
||||
|
||||
use rustc_ast::token::{self, Delimiter, Token};
|
||||
use rustc_ast::tokenstream::{TokenStreamIter, TokenTree};
|
||||
use rustc_ast::{AttrArgs, DelimArgs, Expr, ExprKind, LitKind, MetaItemLit, NormalAttr, Path};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_errors::DiagCtxtHandle;
|
||||
use rustc_hir::{self as hir, AttrPath};
|
||||
use rustc_span::symbol::{Ident, kw};
|
||||
use rustc_span::{DUMMY_SP, ErrorGuaranteed, Span, Symbol};
|
||||
|
||||
pub struct SegmentIterator<'a> {
|
||||
offset: usize,
|
||||
path: &'a PathParser<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Iterator for SegmentIterator<'a> {
|
||||
type Item = &'a Ident;
|
||||
|
||||
fn next(&mut self) -> Option<Self::Item> {
|
||||
if self.offset >= self.path.len() {
|
||||
return None;
|
||||
}
|
||||
|
||||
let res = match self.path {
|
||||
PathParser::Ast(ast_path) => &ast_path.segments[self.offset].ident,
|
||||
PathParser::Attr(attr_path) => &attr_path.segments[self.offset],
|
||||
};
|
||||
|
||||
self.offset += 1;
|
||||
Some(res)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub enum PathParser<'a> {
|
||||
Ast(&'a Path),
|
||||
Attr(AttrPath),
|
||||
}
|
||||
|
||||
impl<'a> PathParser<'a> {
|
||||
pub fn get_attribute_path(&self) -> hir::AttrPath {
|
||||
AttrPath {
|
||||
segments: self.segments().copied().collect::<Vec<_>>().into_boxed_slice(),
|
||||
span: self.span(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn segments(&'a self) -> impl Iterator<Item = &'a Ident> {
|
||||
SegmentIterator { offset: 0, path: self }
|
||||
}
|
||||
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
PathParser::Ast(path) => path.span,
|
||||
PathParser::Attr(attr_path) => attr_path.span,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
match self {
|
||||
PathParser::Ast(path) => path.segments.len(),
|
||||
PathParser::Attr(attr_path) => attr_path.segments.len(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn segments_is(&self, segments: &[Symbol]) -> bool {
|
||||
self.len() == segments.len() && self.segments().zip(segments).all(|(a, b)| a.name == *b)
|
||||
}
|
||||
|
||||
pub fn word(&self) -> Option<Ident> {
|
||||
(self.len() == 1).then(|| **self.segments().next().as_ref().unwrap())
|
||||
}
|
||||
|
||||
pub fn word_or_empty(&self) -> Ident {
|
||||
self.word().unwrap_or_else(Ident::empty)
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem is some specific word.
|
||||
///
|
||||
/// See [`word`](Self::word) for examples of what a word is.
|
||||
pub fn word_is(&self, sym: Symbol) -> bool {
|
||||
self.word().map(|i| i.name == sym).unwrap_or(false)
|
||||
}
|
||||
}
|
||||
|
||||
impl Display for PathParser<'_> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
match self {
|
||||
PathParser::Ast(path) => write!(f, "{}", pprust::path_to_string(path)),
|
||||
PathParser::Attr(attr_path) => write!(f, "{attr_path}"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
#[must_use]
|
||||
pub enum ArgParser<'a> {
|
||||
NoArgs,
|
||||
List(MetaItemListParser<'a>),
|
||||
NameValue(NameValueParser),
|
||||
}
|
||||
|
||||
impl<'a> ArgParser<'a> {
|
||||
pub fn span(&self) -> Option<Span> {
|
||||
match self {
|
||||
Self::NoArgs => None,
|
||||
Self::List(l) => Some(l.span),
|
||||
Self::NameValue(n) => Some(n.value_span.with_lo(n.eq_span.lo())),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_attr_args(value: &'a AttrArgs, dcx: DiagCtxtHandle<'a>) -> Self {
|
||||
match value {
|
||||
AttrArgs::Empty => Self::NoArgs,
|
||||
AttrArgs::Delimited(args) if args.delim == Delimiter::Parenthesis => {
|
||||
Self::List(MetaItemListParser::new(args, dcx))
|
||||
}
|
||||
AttrArgs::Delimited(args) => {
|
||||
Self::List(MetaItemListParser { sub_parsers: vec![], span: args.dspan.entire() })
|
||||
}
|
||||
AttrArgs::Eq { eq_span, expr } => Self::NameValue(NameValueParser {
|
||||
eq_span: *eq_span,
|
||||
value: expr_to_lit(dcx, &expr),
|
||||
value_span: expr.span,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem is a list
|
||||
///
|
||||
/// Some examples:
|
||||
///
|
||||
/// - `#[allow(clippy::complexity)]`: `(clippy::complexity)` is a list
|
||||
/// - `#[rustfmt::skip::macros(target_macro_name)]`: `(target_macro_name)` is a list
|
||||
pub fn list(&self) -> Option<&MetaItemListParser<'a>> {
|
||||
match self {
|
||||
Self::List(l) => Some(l),
|
||||
Self::NameValue(_) | Self::NoArgs => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem is a name-value pair.
|
||||
///
|
||||
/// Some examples:
|
||||
///
|
||||
/// - `#[clippy::cyclomatic_complexity = "100"]`: `clippy::cyclomatic_complexity = "100"` is a name value pair,
|
||||
/// where the name is a path (`clippy::cyclomatic_complexity`). You already checked the path
|
||||
/// to get an `ArgParser`, so this method will effectively only assert that the `= "100"` is
|
||||
/// there
|
||||
/// - `#[doc = "hello"]`: `doc = "hello` is also a name value pair
|
||||
pub fn name_value(&self) -> Option<&NameValueParser> {
|
||||
match self {
|
||||
Self::NameValue(n) => Some(n),
|
||||
Self::List(_) | Self::NoArgs => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Asserts that there are no arguments
|
||||
pub fn no_args(&self) -> bool {
|
||||
matches!(self, Self::NoArgs)
|
||||
}
|
||||
}
|
||||
|
||||
/// Inside lists, values could be either literals, or more deeply nested meta items.
|
||||
/// This enum represents that.
|
||||
///
|
||||
/// Choose which one you want using the provided methods.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum MetaItemOrLitParser<'a> {
|
||||
MetaItemParser(MetaItemParser<'a>),
|
||||
Lit(MetaItemLit),
|
||||
Err(Span, ErrorGuaranteed),
|
||||
}
|
||||
|
||||
impl<'a> MetaItemOrLitParser<'a> {
|
||||
pub fn span(&self) -> Span {
|
||||
match self {
|
||||
MetaItemOrLitParser::MetaItemParser(generic_meta_item_parser) => {
|
||||
generic_meta_item_parser.span()
|
||||
}
|
||||
MetaItemOrLitParser::Lit(meta_item_lit) => meta_item_lit.span,
|
||||
MetaItemOrLitParser::Err(span, _) => *span,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn lit(&self) -> Option<&MetaItemLit> {
|
||||
match self {
|
||||
MetaItemOrLitParser::Lit(meta_item_lit) => Some(meta_item_lit),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn meta_item(&self) -> Option<&MetaItemParser<'a>> {
|
||||
match self {
|
||||
MetaItemOrLitParser::MetaItemParser(parser) => Some(parser),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Utility that deconstructs a MetaItem into usable parts.
|
||||
///
|
||||
/// MetaItems are syntactically extremely flexible, but specific attributes want to parse
|
||||
/// them in custom, more restricted ways. This can be done using this struct.
|
||||
///
|
||||
/// MetaItems consist of some path, and some args. The args could be empty. In other words:
|
||||
///
|
||||
/// - `name` -> args are empty
|
||||
/// - `name(...)` -> args are a [`list`](ArgParser::list), which is the bit between the parentheses
|
||||
/// - `name = value`-> arg is [`name_value`](ArgParser::name_value), where the argument is the
|
||||
/// `= value` part
|
||||
///
|
||||
/// The syntax of MetaItems can be found at <https://doc.rust-lang.org/reference/attributes.html>
|
||||
#[derive(Clone)]
|
||||
pub struct MetaItemParser<'a> {
|
||||
path: PathParser<'a>,
|
||||
args: ArgParser<'a>,
|
||||
}
|
||||
|
||||
impl<'a> Debug for MetaItemParser<'a> {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("MetaItemParser")
|
||||
.field("path", &self.path)
|
||||
.field("args", &self.args)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MetaItemParser<'a> {
|
||||
/// Create a new parser from a [`NormalAttr`], which is stored inside of any
|
||||
/// [`ast::Attribute`](rustc_ast::Attribute)
|
||||
pub fn from_attr(attr: &'a NormalAttr, dcx: DiagCtxtHandle<'a>) -> Self {
|
||||
Self {
|
||||
path: PathParser::Ast(&attr.item.path),
|
||||
args: ArgParser::from_attr_args(&attr.item.args, dcx),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MetaItemParser<'a> {
|
||||
pub fn span(&self) -> Span {
|
||||
if let Some(other) = self.args.span() {
|
||||
self.path.span().with_hi(other.hi())
|
||||
} else {
|
||||
self.path.span()
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets just the path, without the args.
|
||||
pub fn path_without_args(&self) -> PathParser<'a> {
|
||||
self.path.clone()
|
||||
}
|
||||
|
||||
/// Gets just the args parser, without caring about the path.
|
||||
pub fn args(&self) -> &ArgParser<'a> {
|
||||
&self.args
|
||||
}
|
||||
|
||||
pub fn deconstruct(&self) -> (PathParser<'a>, &ArgParser<'a>) {
|
||||
(self.path_without_args(), self.args())
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem starts with a path. Some examples:
|
||||
///
|
||||
/// - `#[rustfmt::skip]`: `rustfmt::skip` is a path
|
||||
/// - `#[allow(clippy::complexity)]`: `clippy::complexity` is a path
|
||||
/// - `#[inline]`: `inline` is a single segment path
|
||||
pub fn path(&self) -> (PathParser<'a>, &ArgParser<'a>) {
|
||||
self.deconstruct()
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem starts with a word, or single segment path.
|
||||
/// Doesn't return the args parser.
|
||||
///
|
||||
/// For examples. see [`Self::word`]
|
||||
pub fn word_without_args(&self) -> Option<Ident> {
|
||||
Some(self.word()?.0)
|
||||
}
|
||||
|
||||
/// Like [`word`](Self::word), but returns an empty symbol instead of None
|
||||
pub fn word_or_empty_without_args(&self) -> Ident {
|
||||
self.word_or_empty().0
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem starts with a word, or single segment path.
|
||||
///
|
||||
/// Some examples:
|
||||
/// - `#[inline]`: `inline` is a word
|
||||
/// - `#[rustfmt::skip]`: `rustfmt::skip` is a path,
|
||||
/// and not a word and should instead be parsed using [`path`](Self::path)
|
||||
pub fn word(&self) -> Option<(Ident, &ArgParser<'a>)> {
|
||||
let (path, args) = self.deconstruct();
|
||||
Some((path.word()?, args))
|
||||
}
|
||||
|
||||
/// Like [`word`](Self::word), but returns an empty symbol instead of None
|
||||
pub fn word_or_empty(&self) -> (Ident, &ArgParser<'a>) {
|
||||
let (path, args) = self.deconstruct();
|
||||
(path.word().unwrap_or(Ident::empty()), args)
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem starts with some specific word.
|
||||
///
|
||||
/// See [`word`](Self::word) for examples of what a word is.
|
||||
pub fn word_is(&self, sym: Symbol) -> Option<&ArgParser<'a>> {
|
||||
self.path_without_args().word_is(sym).then(|| self.args())
|
||||
}
|
||||
|
||||
/// Asserts that this MetaItem starts with some specific path.
|
||||
///
|
||||
/// See [`word`](Self::path) for examples of what a word is.
|
||||
pub fn path_is(&self, segments: &[Symbol]) -> Option<&ArgParser<'a>> {
|
||||
self.path_without_args().segments_is(segments).then(|| self.args())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct NameValueParser {
|
||||
pub eq_span: Span,
|
||||
value: MetaItemLit,
|
||||
pub value_span: Span,
|
||||
}
|
||||
|
||||
impl Debug for NameValueParser {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_struct("NameValueParser")
|
||||
.field("eq_span", &self.eq_span)
|
||||
.field("value", &self.value)
|
||||
.field("value_span", &self.value_span)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl NameValueParser {
|
||||
pub fn value_as_lit(&self) -> &MetaItemLit {
|
||||
&self.value
|
||||
}
|
||||
|
||||
pub fn value_as_str(&self) -> Option<Symbol> {
|
||||
self.value_as_lit().kind.str()
|
||||
}
|
||||
}
|
||||
|
||||
fn expr_to_lit(dcx: DiagCtxtHandle<'_>, expr: &Expr) -> MetaItemLit {
|
||||
// In valid code the value always ends up as a single literal. Otherwise, a dummy
|
||||
// literal suffices because the error is handled elsewhere.
|
||||
if let ExprKind::Lit(token_lit) = expr.kind
|
||||
&& let Ok(lit) = MetaItemLit::from_token_lit(token_lit, expr.span)
|
||||
{
|
||||
lit
|
||||
} else {
|
||||
let guar = dcx.has_errors().unwrap();
|
||||
MetaItemLit { symbol: kw::Empty, suffix: None, kind: LitKind::Err(guar), span: DUMMY_SP }
|
||||
}
|
||||
}
|
||||
|
||||
struct MetaItemListParserContext<'a> {
|
||||
// the tokens inside the delimiters, so `#[some::attr(a b c)]` would have `a b c` inside
|
||||
inside_delimiters: Peekable<TokenStreamIter<'a>>,
|
||||
dcx: DiagCtxtHandle<'a>,
|
||||
}
|
||||
|
||||
impl<'a> MetaItemListParserContext<'a> {
|
||||
fn done(&mut self) -> bool {
|
||||
self.inside_delimiters.peek().is_none()
|
||||
}
|
||||
|
||||
fn next_path(&mut self) -> Option<AttrPath> {
|
||||
// FIXME: Share code with `parse_path`.
|
||||
let tt = self.inside_delimiters.next().map(|tt| TokenTree::uninterpolate(tt));
|
||||
|
||||
match tt.as_deref()? {
|
||||
&TokenTree::Token(
|
||||
Token { kind: ref kind @ (token::Ident(..) | token::PathSep), span },
|
||||
_,
|
||||
) => {
|
||||
// here we have either an ident or pathsep `::`.
|
||||
|
||||
let mut segments = if let &token::Ident(name, _) = kind {
|
||||
// when we lookahead another pathsep, more path's coming
|
||||
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
|
||||
self.inside_delimiters.peek()
|
||||
{
|
||||
self.inside_delimiters.next();
|
||||
vec![Ident::new(name, span)]
|
||||
} else {
|
||||
// else we have a single identifier path, that's all
|
||||
return Some(AttrPath {
|
||||
segments: vec![Ident::new(name, span)].into_boxed_slice(),
|
||||
span,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
// if `::` is all we get, we just got a path root
|
||||
vec![Ident::new(kw::PathRoot, span)]
|
||||
};
|
||||
|
||||
// one segment accepted. accept n more
|
||||
loop {
|
||||
// another ident?
|
||||
if let Some(&TokenTree::Token(Token { kind: token::Ident(name, _), span }, _)) =
|
||||
self.inside_delimiters
|
||||
.next()
|
||||
.map(|tt| TokenTree::uninterpolate(tt))
|
||||
.as_deref()
|
||||
{
|
||||
segments.push(Ident::new(name, span));
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
// stop unless we see another `::`
|
||||
if let Some(TokenTree::Token(Token { kind: token::PathSep, .. }, _)) =
|
||||
self.inside_delimiters.peek()
|
||||
{
|
||||
self.inside_delimiters.next();
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
let span = span.with_hi(segments.last().unwrap().span.hi());
|
||||
Some(AttrPath { segments: segments.into_boxed_slice(), span })
|
||||
}
|
||||
TokenTree::Token(
|
||||
Token { kind: token::OpenDelim(_) | token::CloseDelim(_), .. },
|
||||
_,
|
||||
) => None,
|
||||
_ => {
|
||||
// malformed attributes can get here. We can't crash, but somewhere else should've
|
||||
// already warned for this.
|
||||
None
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn value(&mut self) -> Option<MetaItemLit> {
|
||||
match self.inside_delimiters.next() {
|
||||
Some(TokenTree::Delimited(.., Delimiter::Invisible(_), inner_tokens)) => {
|
||||
MetaItemListParserContext {
|
||||
inside_delimiters: inner_tokens.iter().peekable(),
|
||||
dcx: self.dcx,
|
||||
}
|
||||
.value()
|
||||
}
|
||||
Some(TokenTree::Token(token, _)) => MetaItemLit::from_token(token),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// parses one element on the inside of a list attribute like `#[my_attr( <insides> )]`
|
||||
///
|
||||
/// parses a path followed be either:
|
||||
/// 1. nothing (a word attr)
|
||||
/// 2. a parenthesized list
|
||||
/// 3. an equals sign and a literal (name-value)
|
||||
///
|
||||
/// Can also parse *just* a literal. This is for cases like as `#[my_attr("literal")]`
|
||||
/// where no path is given before the literal
|
||||
///
|
||||
/// Some exceptions too for interpolated attributes which are already pre-processed
|
||||
fn next(&mut self) -> Option<MetaItemOrLitParser<'a>> {
|
||||
// a list element is either a literal
|
||||
if let Some(TokenTree::Token(token, _)) = self.inside_delimiters.peek()
|
||||
&& let Some(lit) = MetaItemLit::from_token(token)
|
||||
{
|
||||
self.inside_delimiters.next();
|
||||
return Some(MetaItemOrLitParser::Lit(lit));
|
||||
}
|
||||
|
||||
// or a path.
|
||||
let path =
|
||||
if let Some(TokenTree::Token(Token { kind: token::Interpolated(nt), span, .. }, _)) =
|
||||
self.inside_delimiters.peek()
|
||||
{
|
||||
match &**nt {
|
||||
// or maybe a full nt meta including the path but we return immediately
|
||||
token::Nonterminal::NtMeta(item) => {
|
||||
self.inside_delimiters.next();
|
||||
|
||||
return Some(MetaItemOrLitParser::MetaItemParser(MetaItemParser {
|
||||
path: PathParser::Ast(&item.path),
|
||||
args: ArgParser::from_attr_args(&item.args, self.dcx),
|
||||
}));
|
||||
}
|
||||
// an already interpolated path from a macro expansion is a path, no need to parse
|
||||
// one from tokens
|
||||
token::Nonterminal::NtPath(path) => {
|
||||
self.inside_delimiters.next();
|
||||
|
||||
AttrPath::from_ast(path)
|
||||
}
|
||||
_ => {
|
||||
self.inside_delimiters.next();
|
||||
// we go into this path if an expr ended up in an attribute that
|
||||
// expansion did not turn into a literal. Say, `#[repr(align(macro!()))]`
|
||||
// where the macro didn't expand to a literal. An error is already given
|
||||
// for this at this point, and then we do continue. This makes this path
|
||||
// reachable...
|
||||
let e = self.dcx.span_delayed_bug(
|
||||
*span,
|
||||
"expr in place where literal is expected (builtin attr parsing)",
|
||||
);
|
||||
|
||||
return Some(MetaItemOrLitParser::Err(*span, e));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
self.next_path()?
|
||||
};
|
||||
|
||||
// Paths can be followed by:
|
||||
// - `(more meta items)` (another list)
|
||||
// - `= lit` (a name-value)
|
||||
// - nothing
|
||||
Some(MetaItemOrLitParser::MetaItemParser(match self.inside_delimiters.peek() {
|
||||
Some(TokenTree::Delimited(dspan, _, Delimiter::Parenthesis, inner_tokens)) => {
|
||||
self.inside_delimiters.next();
|
||||
|
||||
MetaItemParser {
|
||||
path: PathParser::Attr(path),
|
||||
args: ArgParser::List(MetaItemListParser::new_tts(
|
||||
inner_tokens.iter(),
|
||||
dspan.entire(),
|
||||
self.dcx,
|
||||
)),
|
||||
}
|
||||
}
|
||||
Some(TokenTree::Delimited(_, ..)) => {
|
||||
self.inside_delimiters.next();
|
||||
// self.dcx.span_delayed_bug(span.entire(), "wrong delimiters");
|
||||
return None;
|
||||
}
|
||||
Some(TokenTree::Token(Token { kind: token::Eq, span }, _)) => {
|
||||
self.inside_delimiters.next();
|
||||
let value = self.value()?;
|
||||
MetaItemParser {
|
||||
path: PathParser::Attr(path),
|
||||
args: ArgParser::NameValue(NameValueParser {
|
||||
eq_span: *span,
|
||||
value_span: value.span,
|
||||
value,
|
||||
}),
|
||||
}
|
||||
}
|
||||
_ => MetaItemParser { path: PathParser::Attr(path), args: ArgParser::NoArgs },
|
||||
}))
|
||||
}
|
||||
|
||||
fn parse(mut self, span: Span) -> MetaItemListParser<'a> {
|
||||
let mut sub_parsers = Vec::new();
|
||||
|
||||
while !self.done() {
|
||||
let Some(n) = self.next() else {
|
||||
continue;
|
||||
};
|
||||
sub_parsers.push(n);
|
||||
|
||||
match self.inside_delimiters.peek() {
|
||||
None | Some(TokenTree::Token(Token { kind: token::Comma, .. }, _)) => {
|
||||
self.inside_delimiters.next();
|
||||
}
|
||||
Some(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
MetaItemListParser { sub_parsers, span }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MetaItemListParser<'a> {
|
||||
sub_parsers: Vec<MetaItemOrLitParser<'a>>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl<'a> MetaItemListParser<'a> {
|
||||
fn new(delim: &'a DelimArgs, dcx: DiagCtxtHandle<'a>) -> MetaItemListParser<'a> {
|
||||
MetaItemListParser::new_tts(delim.tokens.iter(), delim.dspan.entire(), dcx)
|
||||
}
|
||||
|
||||
fn new_tts(tts: TokenStreamIter<'a>, span: Span, dcx: DiagCtxtHandle<'a>) -> Self {
|
||||
MetaItemListParserContext { inside_delimiters: tts.peekable(), dcx }.parse(span)
|
||||
}
|
||||
|
||||
/// Lets you pick and choose as what you want to parse each element in the list
|
||||
pub fn mixed<'s>(&'s self) -> impl Iterator<Item = &'s MetaItemOrLitParser<'a>> + 's {
|
||||
self.sub_parsers.iter()
|
||||
}
|
||||
|
||||
pub fn len(&self) -> usize {
|
||||
self.sub_parsers.len()
|
||||
}
|
||||
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.len() == 0
|
||||
}
|
||||
|
||||
/// Asserts that every item in the list is another list starting with a word.
|
||||
///
|
||||
/// See [`MetaItemParser::word`] for examples of words.
|
||||
pub fn all_word_list<'s>(&'s self) -> Option<Vec<(Ident, &'s ArgParser<'a>)>> {
|
||||
self.mixed().map(|i| i.meta_item()?.word()).collect()
|
||||
}
|
||||
|
||||
/// Asserts that every item in the list is another list starting with a full path.
|
||||
///
|
||||
/// See [`MetaItemParser::path`] for examples of paths.
|
||||
pub fn all_path_list<'s>(&'s self) -> Option<Vec<(PathParser<'a>, &'s ArgParser<'a>)>> {
|
||||
self.mixed().map(|i| Some(i.meta_item()?.path())).collect()
|
||||
}
|
||||
|
||||
/// Returns Some if the list contains only a single element.
|
||||
///
|
||||
/// Inside the Some is the parser to parse this single element.
|
||||
pub fn single(&self) -> Option<&MetaItemOrLitParser<'a>> {
|
||||
let mut iter = self.mixed();
|
||||
iter.next().filter(|_| iter.next().is_none())
|
||||
}
|
||||
}
|
|
@ -872,7 +872,7 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> {
|
|||
_ => {
|
||||
//FIXME(ZuseZ4): Once we fixed our parser, we should also prohibit the two-attribute
|
||||
//branch above.
|
||||
span_bug!(attrs[1].span, "cg_ssa: rustc_autodiff should only exist once per source");
|
||||
span_bug!(attrs[1].span(), "cg_ssa: rustc_autodiff should only exist once per source");
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -884,12 +884,12 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> {
|
|||
}
|
||||
|
||||
let [mode, input_activities @ .., ret_activity] = &list[..] else {
|
||||
span_bug!(attr.span, "rustc_autodiff attribute must contain mode and activities");
|
||||
span_bug!(attr.span(), "rustc_autodiff attribute must contain mode and activities");
|
||||
};
|
||||
let mode = if let MetaItemInner::MetaItem(MetaItem { path: p1, .. }) = mode {
|
||||
p1.segments.first().unwrap().ident
|
||||
} else {
|
||||
span_bug!(attr.span, "rustc_autodiff attribute must contain mode");
|
||||
span_bug!(attr.span(), "rustc_autodiff attribute must contain mode");
|
||||
};
|
||||
|
||||
// parse mode
|
||||
|
@ -905,7 +905,7 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> {
|
|||
let ret_symbol = if let MetaItemInner::MetaItem(MetaItem { path: p1, .. }) = ret_activity {
|
||||
p1.segments.first().unwrap().ident
|
||||
} else {
|
||||
span_bug!(attr.span, "rustc_autodiff attribute must contain the return activity");
|
||||
span_bug!(attr.span(), "rustc_autodiff attribute must contain the return activity");
|
||||
};
|
||||
|
||||
// Then parse it into an actual DiffActivity
|
||||
|
@ -940,11 +940,11 @@ fn autodiff_attrs(tcx: TyCtxt<'_>, id: DefId) -> Option<AutoDiffAttrs> {
|
|||
|
||||
for &input in &arg_activities {
|
||||
if !valid_input_activity(mode, input) {
|
||||
span_bug!(attr.span, "Invalid input activity {} for {} mode", input, mode);
|
||||
span_bug!(attr.span(), "Invalid input activity {} for {} mode", input, mode);
|
||||
}
|
||||
}
|
||||
if !valid_ret_activity(mode, ret_activity) {
|
||||
span_bug!(attr.span, "Invalid return activity {} for {} mode", ret_activity, mode);
|
||||
span_bug!(attr.span(), "Invalid return activity {} for {} mode", ret_activity, mode);
|
||||
}
|
||||
|
||||
Some(AutoDiffAttrs { mode, ret_activity, input_activity: arg_activities })
|
||||
|
|
|
@ -9,6 +9,7 @@ odht = { version = "0.3.1", features = ["nightly"] }
|
|||
rustc_abi = { path = "../rustc_abi" }
|
||||
rustc_arena = { path = "../rustc_arena" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_attr_data_structures = { path = "../rustc_attr_data_structures" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_hashes = { path = "../rustc_hashes" }
|
||||
rustc_index = { path = "../rustc_index" }
|
||||
|
|
|
@ -1,18 +1,20 @@
|
|||
// ignore-tidy-filelength
|
||||
use std::fmt;
|
||||
|
||||
use rustc_abi::ExternAbi;
|
||||
// ignore-tidy-filelength
|
||||
use rustc_ast::attr::AttributeExt;
|
||||
use rustc_ast::token::CommentKind;
|
||||
use rustc_ast::util::parser::{AssocOp, ExprPrecedence};
|
||||
use rustc_ast::{
|
||||
self as ast, AttrId, AttrStyle, DelimArgs, FloatTy, InlineAsmOptions, InlineAsmTemplatePiece,
|
||||
IntTy, Label, LitIntType, LitKind, MetaItemInner, MetaItemLit, TraitObjectSyntax, UintTy,
|
||||
self as ast, FloatTy, InlineAsmOptions, InlineAsmTemplatePiece, IntTy, Label, LitIntType,
|
||||
LitKind, TraitObjectSyntax, UintTy, UnsafeBinderCastKind,
|
||||
};
|
||||
pub use rustc_ast::{
|
||||
BinOp, BinOpKind, BindingMode, BorrowKind, BoundConstness, BoundPolarity, ByRef, CaptureBy,
|
||||
ImplPolarity, IsAuto, Movability, Mutability, UnOp, UnsafeBinderCastKind,
|
||||
AttrId, AttrStyle, BinOp, BinOpKind, BindingMode, BorrowKind, BoundConstness, BoundPolarity,
|
||||
ByRef, CaptureBy, DelimArgs, ImplPolarity, IsAuto, MetaItemInner, MetaItemLit, Movability,
|
||||
Mutability, UnOp,
|
||||
};
|
||||
use rustc_attr_data_structures::AttributeKind;
|
||||
use rustc_data_structures::fingerprint::Fingerprint;
|
||||
use rustc_data_structures::sorted_map::SortedMap;
|
||||
use rustc_data_structures::tagged_ptr::TaggedRef;
|
||||
|
@ -1009,59 +1011,82 @@ pub enum AttrArgs {
|
|||
},
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
pub enum AttrKind {
|
||||
/// A normal attribute.
|
||||
Normal(Box<AttrItem>),
|
||||
|
||||
/// A doc comment (e.g. `/// ...`, `//! ...`, `/** ... */`, `/*! ... */`).
|
||||
/// Doc attributes (e.g. `#[doc="..."]`) are represented with the `Normal`
|
||||
/// variant (which is much less compact and thus more expensive).
|
||||
DocComment(CommentKind, Symbol),
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable)]
|
||||
pub struct AttrPath {
|
||||
pub segments: Box<[Ident]>,
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
impl AttrPath {
|
||||
pub fn from_ast(path: &ast::Path) -> Self {
|
||||
AttrPath {
|
||||
segments: path.segments.iter().map(|i| i.ident).collect::<Vec<_>>().into_boxed_slice(),
|
||||
span: path.span,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Display for AttrPath {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "{}", self.segments.iter().map(|i| i.to_string()).collect::<Vec<_>>().join("::"))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, HashStable_Generic, Encodable, Decodable)]
|
||||
pub struct AttrItem {
|
||||
pub unsafety: Safety,
|
||||
// Not lowered to hir::Path because we have no NodeId to resolve to.
|
||||
pub path: AttrPath,
|
||||
pub args: AttrArgs,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Encodable, Decodable)]
|
||||
pub struct Attribute {
|
||||
pub kind: AttrKind,
|
||||
pub id: AttrId,
|
||||
pub id: HashIgnoredAttrId,
|
||||
/// Denotes if the attribute decorates the following construct (outer)
|
||||
/// or the construct this attribute is contained within (inner).
|
||||
pub style: AttrStyle,
|
||||
/// Span of the entire attribute
|
||||
pub span: Span,
|
||||
}
|
||||
|
||||
/// The derived implementation of [`HashStable_Generic`] on [`Attribute`]s shouldn't hash
|
||||
/// [`AttrId`]s. By wrapping them in this, we make sure we never do.
|
||||
#[derive(Copy, Debug, Encodable, Decodable, Clone)]
|
||||
pub struct HashIgnoredAttrId {
|
||||
pub attr_id: AttrId,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Encodable, Decodable, HashStable_Generic)]
|
||||
pub enum Attribute {
|
||||
/// A parsed built-in attribute.
|
||||
///
|
||||
/// Each attribute has a span connected to it. However, you must be somewhat careful using it.
|
||||
/// That's because sometimes we merge multiple attributes together, like when an item has
|
||||
/// multiple `repr` attributes. In this case the span might not be very useful.
|
||||
Parsed(AttributeKind),
|
||||
|
||||
/// An attribute that could not be parsed, out of a token-like representation.
|
||||
/// This is the case for custom tool attributes.
|
||||
Unparsed(Box<AttrItem>),
|
||||
}
|
||||
|
||||
impl Attribute {
|
||||
pub fn get_normal_item(&self) -> &AttrItem {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(normal) => &normal,
|
||||
AttrKind::DocComment(..) => panic!("unexpected doc comment"),
|
||||
match &self {
|
||||
Attribute::Unparsed(normal) => &normal,
|
||||
_ => panic!("unexpected parsed attribute"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn unwrap_normal_item(self) -> AttrItem {
|
||||
match self.kind {
|
||||
AttrKind::Normal(normal) => *normal,
|
||||
AttrKind::DocComment(..) => panic!("unexpected doc comment"),
|
||||
match self {
|
||||
Attribute::Unparsed(normal) => *normal,
|
||||
_ => panic!("unexpected parsed attribute"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn value_lit(&self) -> Option<&MetaItemLit> {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(box AttrItem { args: AttrArgs::Eq { expr, .. }, .. }) => Some(expr),
|
||||
match &self {
|
||||
Attribute::Unparsed(n) => match n.as_ref() {
|
||||
AttrItem { args: AttrArgs::Eq { eq_span: _, expr }, .. } => Some(expr),
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
@ -1069,15 +1094,21 @@ impl Attribute {
|
|||
|
||||
impl AttributeExt for Attribute {
|
||||
fn id(&self) -> AttrId {
|
||||
self.id
|
||||
match &self {
|
||||
Attribute::Unparsed(u) => u.id.attr_id,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn meta_item_list(&self) -> Option<ThinVec<ast::MetaItemInner>> {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(box AttrItem { args: AttrArgs::Delimited(d), .. }) => {
|
||||
match &self {
|
||||
Attribute::Unparsed(n) => match n.as_ref() {
|
||||
AttrItem { args: AttrArgs::Delimited(d), .. } => {
|
||||
ast::MetaItemKind::list_from_tokens(d.tokens.clone())
|
||||
}
|
||||
_ => None,
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1091,51 +1122,68 @@ impl AttributeExt for Attribute {
|
|||
|
||||
/// For a single-segment attribute, returns its name; otherwise, returns `None`.
|
||||
fn ident(&self) -> Option<Ident> {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(box AttrItem {
|
||||
path: AttrPath { segments: box [ident], .. }, ..
|
||||
}) => Some(*ident),
|
||||
match &self {
|
||||
Attribute::Unparsed(n) => {
|
||||
if let [ident] = n.path.segments.as_ref() {
|
||||
Some(*ident)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn path_matches(&self, name: &[Symbol]) -> bool {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(n) => n.path.segments.iter().map(|segment| &segment.name).eq(name),
|
||||
AttrKind::DocComment(..) => false,
|
||||
match &self {
|
||||
Attribute::Unparsed(n) => {
|
||||
n.path.segments.len() == name.len()
|
||||
&& n.path.segments.iter().zip(name).all(|(s, n)| s.name == *n)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_doc_comment(&self) -> bool {
|
||||
matches!(self.kind, AttrKind::DocComment(..))
|
||||
matches!(self, Attribute::Parsed(AttributeKind::DocComment { .. }))
|
||||
}
|
||||
|
||||
fn span(&self) -> Span {
|
||||
self.span
|
||||
match &self {
|
||||
Attribute::Unparsed(u) => u.span,
|
||||
a => panic!("can't get the span of an arbitrary parsed attribute: {a:?}"),
|
||||
}
|
||||
}
|
||||
|
||||
fn is_word(&self) -> bool {
|
||||
matches!(self.kind, AttrKind::Normal(box AttrItem { args: AttrArgs::Empty, .. }))
|
||||
match &self {
|
||||
Attribute::Unparsed(n) => {
|
||||
matches!(n.args, AttrArgs::Empty)
|
||||
}
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
fn ident_path(&self) -> Option<SmallVec<[Ident; 1]>> {
|
||||
match &self.kind {
|
||||
AttrKind::Normal(n) => Some(n.path.segments.iter().copied().collect()),
|
||||
AttrKind::DocComment(..) => None,
|
||||
match &self {
|
||||
Attribute::Unparsed(n) => Some(n.path.segments.iter().copied().collect()),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
fn doc_str(&self) -> Option<Symbol> {
|
||||
match &self.kind {
|
||||
AttrKind::DocComment(.., data) => Some(*data),
|
||||
AttrKind::Normal(_) if self.has_name(sym::doc) => self.value_str(),
|
||||
match &self {
|
||||
Attribute::Parsed(AttributeKind::DocComment { comment, .. }) => Some(*comment),
|
||||
Attribute::Unparsed(_) if self.has_name(sym::doc) => self.value_str(),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
fn doc_str_and_comment_kind(&self) -> Option<(Symbol, CommentKind)> {
|
||||
match &self.kind {
|
||||
AttrKind::DocComment(kind, data) => Some((*data, *kind)),
|
||||
AttrKind::Normal(_) if self.name_or_empty() == sym::doc => {
|
||||
match &self {
|
||||
Attribute::Parsed(AttributeKind::DocComment { kind, comment, .. }) => {
|
||||
Some((*comment, *kind))
|
||||
}
|
||||
Attribute::Unparsed(_) if self.name_or_empty() == sym::doc => {
|
||||
self.value_str().map(|s| (s, CommentKind::Line))
|
||||
}
|
||||
_ => None,
|
||||
|
@ -1143,7 +1191,10 @@ impl AttributeExt for Attribute {
|
|||
}
|
||||
|
||||
fn style(&self) -> AttrStyle {
|
||||
self.style
|
||||
match &self {
|
||||
Attribute::Unparsed(u) => u.style,
|
||||
_ => panic!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -1,17 +1,17 @@
|
|||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, ToStableHashKey};
|
||||
use rustc_span::def_id::DefPathHash;
|
||||
|
||||
use crate::HashIgnoredAttrId;
|
||||
use crate::hir::{
|
||||
Attribute, AttributeMap, BodyId, Crate, ForeignItemId, ImplItemId, ItemId, OwnerNodes,
|
||||
TraitItemId,
|
||||
AttributeMap, BodyId, Crate, ForeignItemId, ImplItemId, ItemId, OwnerNodes, TraitItemId,
|
||||
};
|
||||
use crate::hir_id::{HirId, ItemLocalId};
|
||||
|
||||
/// Requirements for a `StableHashingContext` to be used in this crate.
|
||||
/// This is a hack to allow using the `HashStable_Generic` derive macro
|
||||
/// instead of implementing everything in `rustc_middle`.
|
||||
pub trait HashStableContext: rustc_ast::HashStableContext + rustc_abi::HashStableContext {
|
||||
fn hash_attr(&mut self, _: &Attribute, hasher: &mut StableHasher);
|
||||
pub trait HashStableContext: rustc_attr_data_structures::HashStableContext + rustc_ast::HashStableContext + rustc_abi::HashStableContext {
|
||||
fn hash_attr_id(&mut self, id: &HashIgnoredAttrId, hasher: &mut StableHasher);
|
||||
}
|
||||
|
||||
impl<HirCtx: crate::HashStableContext> ToStableHashKey<HirCtx> for HirId {
|
||||
|
@ -114,8 +114,8 @@ impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for Crate<'_> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for Attribute {
|
||||
impl<HirCtx: crate::HashStableContext> HashStable<HirCtx> for HashIgnoredAttrId {
|
||||
fn hash_stable(&self, hcx: &mut HirCtx, hasher: &mut StableHasher) {
|
||||
hcx.hash_attr(self, hasher)
|
||||
hcx.hash_attr_id(self, hasher)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -111,14 +111,14 @@ pub(crate) fn vtables<'tcx>(tcx: TyCtxt<'tcx>) {
|
|||
let trait_ref = tcx.impl_trait_ref(def_id).unwrap().instantiate_identity();
|
||||
if trait_ref.has_non_region_param() {
|
||||
tcx.dcx().span_err(
|
||||
attr.span,
|
||||
attr.span(),
|
||||
"`rustc_dump_vtable` must be applied to non-generic impl",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
if !tcx.is_dyn_compatible(trait_ref.def_id) {
|
||||
tcx.dcx().span_err(
|
||||
attr.span,
|
||||
attr.span(),
|
||||
"`rustc_dump_vtable` must be applied to dyn-compatible trait",
|
||||
);
|
||||
continue;
|
||||
|
@ -127,7 +127,7 @@ pub(crate) fn vtables<'tcx>(tcx: TyCtxt<'tcx>) {
|
|||
.try_normalize_erasing_regions(ty::TypingEnv::fully_monomorphized(), trait_ref)
|
||||
else {
|
||||
tcx.dcx().span_err(
|
||||
attr.span,
|
||||
attr.span(),
|
||||
"`rustc_dump_vtable` applied to impl header that cannot be normalized",
|
||||
);
|
||||
continue;
|
||||
|
@ -138,7 +138,7 @@ pub(crate) fn vtables<'tcx>(tcx: TyCtxt<'tcx>) {
|
|||
let ty = tcx.type_of(def_id).instantiate_identity();
|
||||
if ty.has_non_region_param() {
|
||||
tcx.dcx().span_err(
|
||||
attr.span,
|
||||
attr.span(),
|
||||
"`rustc_dump_vtable` must be applied to non-generic type",
|
||||
);
|
||||
continue;
|
||||
|
@ -147,13 +147,14 @@ pub(crate) fn vtables<'tcx>(tcx: TyCtxt<'tcx>) {
|
|||
tcx.try_normalize_erasing_regions(ty::TypingEnv::fully_monomorphized(), ty)
|
||||
else {
|
||||
tcx.dcx().span_err(
|
||||
attr.span,
|
||||
attr.span(),
|
||||
"`rustc_dump_vtable` applied to type alias that cannot be normalized",
|
||||
);
|
||||
continue;
|
||||
};
|
||||
let ty::Dynamic(data, _, _) = *ty.kind() else {
|
||||
tcx.dcx().span_err(attr.span, "`rustc_dump_vtable` to type alias of dyn type");
|
||||
tcx.dcx()
|
||||
.span_err(attr.span(), "`rustc_dump_vtable` to type alias of dyn type");
|
||||
continue;
|
||||
};
|
||||
if let Some(principal) = data.principal() {
|
||||
|
@ -166,7 +167,7 @@ pub(crate) fn vtables<'tcx>(tcx: TyCtxt<'tcx>) {
|
|||
}
|
||||
_ => {
|
||||
tcx.dcx().span_err(
|
||||
attr.span,
|
||||
attr.span(),
|
||||
"`rustc_dump_vtable` only applies to impl, or type alias of dyn type",
|
||||
);
|
||||
continue;
|
||||
|
|
|
@ -8,6 +8,7 @@ edition = "2024"
|
|||
rustc_abi = { path = "../rustc_abi" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_ast_pretty = { path = "../rustc_ast_pretty" }
|
||||
rustc_attr_parsing = { path = "../rustc_attr_parsing" }
|
||||
rustc_hir = { path = "../rustc_hir" }
|
||||
rustc_span = { path = "../rustc_span" }
|
||||
# tidy-alphabetical-end
|
||||
|
|
|
@ -11,11 +11,12 @@ use std::vec;
|
|||
|
||||
use rustc_abi::ExternAbi;
|
||||
use rustc_ast::util::parser::{self, AssocOp, ExprPrecedence, Fixity};
|
||||
use rustc_ast::{DUMMY_NODE_ID, DelimArgs};
|
||||
use rustc_ast::{AttrStyle, DUMMY_NODE_ID, DelimArgs};
|
||||
use rustc_ast_pretty::pp::Breaks::{Consistent, Inconsistent};
|
||||
use rustc_ast_pretty::pp::{self, Breaks};
|
||||
use rustc_ast_pretty::pprust::state::MacHeader;
|
||||
use rustc_ast_pretty::pprust::{Comments, PrintState};
|
||||
use rustc_attr_parsing::AttributeKind;
|
||||
use rustc_hir::{
|
||||
BindingMode, ByRef, ConstArgKind, GenericArg, GenericBound, GenericParam, GenericParamKind,
|
||||
HirId, ImplicitSelfKind, LifetimeParamKind, Node, PatKind, PreciseCapturingArg, RangeEnd, Term,
|
||||
|
@ -80,65 +81,43 @@ impl<'a> State<'a> {
|
|||
(self.attrs)(id)
|
||||
}
|
||||
|
||||
fn print_inner_attributes(&mut self, attrs: &[hir::Attribute]) -> bool {
|
||||
self.print_either_attributes(attrs, ast::AttrStyle::Inner, false, true)
|
||||
fn print_attrs_as_inner(&mut self, attrs: &[hir::Attribute]) {
|
||||
self.print_either_attributes(attrs, ast::AttrStyle::Inner)
|
||||
}
|
||||
|
||||
fn print_outer_attributes(&mut self, attrs: &[hir::Attribute]) -> bool {
|
||||
self.print_either_attributes(attrs, ast::AttrStyle::Outer, false, true)
|
||||
fn print_attrs_as_outer(&mut self, attrs: &[hir::Attribute]) {
|
||||
self.print_either_attributes(attrs, ast::AttrStyle::Outer)
|
||||
}
|
||||
|
||||
fn print_either_attributes(&mut self, attrs: &[hir::Attribute], style: ast::AttrStyle) {
|
||||
if attrs.is_empty() {
|
||||
return;
|
||||
}
|
||||
|
||||
fn print_either_attributes(
|
||||
&mut self,
|
||||
attrs: &[hir::Attribute],
|
||||
kind: ast::AttrStyle,
|
||||
is_inline: bool,
|
||||
trailing_hardbreak: bool,
|
||||
) -> bool {
|
||||
let mut printed = false;
|
||||
for attr in attrs {
|
||||
if attr.style == kind {
|
||||
self.print_attribute_inline(attr, is_inline);
|
||||
if is_inline {
|
||||
self.nbsp();
|
||||
self.print_attribute_inline(attr, style);
|
||||
}
|
||||
printed = true;
|
||||
}
|
||||
}
|
||||
if printed && trailing_hardbreak && !is_inline {
|
||||
self.hardbreak_if_not_bol();
|
||||
}
|
||||
printed
|
||||
}
|
||||
|
||||
fn print_attribute_inline(&mut self, attr: &hir::Attribute, is_inline: bool) {
|
||||
if !is_inline {
|
||||
self.hardbreak_if_not_bol();
|
||||
}
|
||||
self.maybe_print_comment(attr.span.lo());
|
||||
match &attr.kind {
|
||||
hir::AttrKind::Normal(normal) => {
|
||||
match attr.style {
|
||||
fn print_attribute_inline(&mut self, attr: &hir::Attribute, style: AttrStyle) {
|
||||
match &attr {
|
||||
hir::Attribute::Unparsed(unparsed) => {
|
||||
self.maybe_print_comment(unparsed.span.lo());
|
||||
match style {
|
||||
ast::AttrStyle::Inner => self.word("#!["),
|
||||
ast::AttrStyle::Outer => self.word("#["),
|
||||
}
|
||||
if normal.unsafety == hir::Safety::Unsafe {
|
||||
self.word("unsafe(");
|
||||
}
|
||||
self.print_attr_item(&normal, attr.span);
|
||||
if normal.unsafety == hir::Safety::Unsafe {
|
||||
self.word(")");
|
||||
}
|
||||
self.print_attr_item(&unparsed, unparsed.span);
|
||||
self.word("]");
|
||||
}
|
||||
hir::AttrKind::DocComment(comment_kind, data) => {
|
||||
hir::Attribute::Parsed(AttributeKind::DocComment { style, kind, comment, .. }) => {
|
||||
self.word(rustc_ast_pretty::pprust::state::doc_comment_to_string(
|
||||
*comment_kind,
|
||||
attr.style,
|
||||
*data,
|
||||
*kind, *style, *comment,
|
||||
));
|
||||
self.hardbreak()
|
||||
}
|
||||
_ => unimplemented!("pretty print parsed attributes"),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -162,7 +141,7 @@ impl<'a> State<'a> {
|
|||
false,
|
||||
None,
|
||||
*delim,
|
||||
tokens,
|
||||
&tokens,
|
||||
true,
|
||||
span,
|
||||
),
|
||||
|
@ -307,7 +286,7 @@ where
|
|||
}
|
||||
|
||||
pub fn attribute_to_string(ann: &dyn PpAnn, attr: &hir::Attribute) -> String {
|
||||
to_string(ann, |s| s.print_attribute_inline(attr, false))
|
||||
to_string(ann, |s| s.print_attribute_inline(attr, AttrStyle::Outer))
|
||||
}
|
||||
|
||||
pub fn ty_to_string(ann: &dyn PpAnn, ty: &hir::Ty<'_>) -> String {
|
||||
|
@ -370,7 +349,7 @@ impl<'a> State<'a> {
|
|||
}
|
||||
|
||||
fn print_mod(&mut self, _mod: &hir::Mod<'_>, attrs: &[hir::Attribute]) {
|
||||
self.print_inner_attributes(attrs);
|
||||
self.print_attrs_as_inner(attrs);
|
||||
for &item_id in _mod.item_ids {
|
||||
self.ann.nested(self, Nested::Item(item_id));
|
||||
}
|
||||
|
@ -487,7 +466,7 @@ impl<'a> State<'a> {
|
|||
fn print_foreign_item(&mut self, item: &hir::ForeignItem<'_>) {
|
||||
self.hardbreak_if_not_bol();
|
||||
self.maybe_print_comment(item.span.lo());
|
||||
self.print_outer_attributes(self.attrs(item.hir_id()));
|
||||
self.print_attrs_as_outer(self.attrs(item.hir_id()));
|
||||
match item.kind {
|
||||
hir::ForeignItemKind::Fn(sig, arg_names, generics) => {
|
||||
self.head("");
|
||||
|
@ -591,7 +570,7 @@ impl<'a> State<'a> {
|
|||
self.hardbreak_if_not_bol();
|
||||
self.maybe_print_comment(item.span.lo());
|
||||
let attrs = self.attrs(item.hir_id());
|
||||
self.print_outer_attributes(attrs);
|
||||
self.print_attrs_as_outer(attrs);
|
||||
self.ann.pre(self, AnnNode::Item(item));
|
||||
match item.kind {
|
||||
hir::ItemKind::ExternCrate(orig_name) => {
|
||||
|
@ -687,7 +666,7 @@ impl<'a> State<'a> {
|
|||
self.head("extern");
|
||||
self.word_nbsp(abi.to_string());
|
||||
self.bopen();
|
||||
self.print_inner_attributes(self.attrs(item.hir_id()));
|
||||
self.print_attrs_as_inner(self.attrs(item.hir_id()));
|
||||
for item in items {
|
||||
self.ann.nested(self, Nested::ForeignItem(item.id));
|
||||
}
|
||||
|
@ -755,7 +734,7 @@ impl<'a> State<'a> {
|
|||
|
||||
self.space();
|
||||
self.bopen();
|
||||
self.print_inner_attributes(attrs);
|
||||
self.print_attrs_as_inner(attrs);
|
||||
for impl_item in items {
|
||||
self.ann.nested(self, Nested::ImplItem(impl_item.id));
|
||||
}
|
||||
|
@ -847,7 +826,7 @@ impl<'a> State<'a> {
|
|||
for v in variants {
|
||||
self.space_if_not_bol();
|
||||
self.maybe_print_comment(v.span.lo());
|
||||
self.print_outer_attributes(self.attrs(v.hir_id));
|
||||
self.print_attrs_as_outer(self.attrs(v.hir_id));
|
||||
self.ibox(INDENT_UNIT);
|
||||
self.print_variant(v);
|
||||
self.word(",");
|
||||
|
@ -880,7 +859,7 @@ impl<'a> State<'a> {
|
|||
self.popen();
|
||||
self.commasep(Inconsistent, struct_def.fields(), |s, field| {
|
||||
s.maybe_print_comment(field.span.lo());
|
||||
s.print_outer_attributes(s.attrs(field.hir_id));
|
||||
s.print_attrs_as_outer(s.attrs(field.hir_id));
|
||||
s.print_type(field.ty);
|
||||
});
|
||||
self.pclose();
|
||||
|
@ -907,7 +886,7 @@ impl<'a> State<'a> {
|
|||
for field in fields {
|
||||
self.hardbreak_if_not_bol();
|
||||
self.maybe_print_comment(field.span.lo());
|
||||
self.print_outer_attributes(self.attrs(field.hir_id));
|
||||
self.print_attrs_as_outer(self.attrs(field.hir_id));
|
||||
self.print_ident(field.ident);
|
||||
self.word_nbsp(":");
|
||||
self.print_type(field.ty);
|
||||
|
@ -943,7 +922,7 @@ impl<'a> State<'a> {
|
|||
self.ann.pre(self, AnnNode::SubItem(ti.hir_id()));
|
||||
self.hardbreak_if_not_bol();
|
||||
self.maybe_print_comment(ti.span.lo());
|
||||
self.print_outer_attributes(self.attrs(ti.hir_id()));
|
||||
self.print_attrs_as_outer(self.attrs(ti.hir_id()));
|
||||
match ti.kind {
|
||||
hir::TraitItemKind::Const(ty, default) => {
|
||||
self.print_associated_const(ti.ident, ti.generics, ty, default);
|
||||
|
@ -971,7 +950,7 @@ impl<'a> State<'a> {
|
|||
self.ann.pre(self, AnnNode::SubItem(ii.hir_id()));
|
||||
self.hardbreak_if_not_bol();
|
||||
self.maybe_print_comment(ii.span.lo());
|
||||
self.print_outer_attributes(self.attrs(ii.hir_id()));
|
||||
self.print_attrs_as_outer(self.attrs(ii.hir_id()));
|
||||
|
||||
match ii.kind {
|
||||
hir::ImplItemKind::Const(ty, expr) => {
|
||||
|
@ -1074,7 +1053,7 @@ impl<'a> State<'a> {
|
|||
self.ann.pre(self, AnnNode::Block(blk));
|
||||
self.bopen();
|
||||
|
||||
self.print_inner_attributes(attrs);
|
||||
self.print_attrs_as_inner(attrs);
|
||||
|
||||
for st in blk.stmts {
|
||||
self.print_stmt(st);
|
||||
|
@ -1264,7 +1243,7 @@ impl<'a> State<'a> {
|
|||
self.space();
|
||||
}
|
||||
self.cbox(INDENT_UNIT);
|
||||
self.print_outer_attributes(self.attrs(field.hir_id));
|
||||
self.print_attrs_as_outer(self.attrs(field.hir_id));
|
||||
if !field.is_shorthand {
|
||||
self.print_ident(field.ident);
|
||||
self.word_space(":");
|
||||
|
@ -1461,7 +1440,7 @@ impl<'a> State<'a> {
|
|||
|
||||
fn print_expr(&mut self, expr: &hir::Expr<'_>) {
|
||||
self.maybe_print_comment(expr.span.lo());
|
||||
self.print_outer_attributes(self.attrs(expr.hir_id));
|
||||
self.print_attrs_as_outer(self.attrs(expr.hir_id));
|
||||
self.ibox(INDENT_UNIT);
|
||||
self.ann.pre(self, AnnNode::Expr(expr));
|
||||
match expr.kind {
|
||||
|
@ -1677,8 +1656,8 @@ impl<'a> State<'a> {
|
|||
}
|
||||
hir::ExprKind::UnsafeBinderCast(kind, expr, ty) => {
|
||||
match kind {
|
||||
hir::UnsafeBinderCastKind::Wrap => self.word("wrap_binder!("),
|
||||
hir::UnsafeBinderCastKind::Unwrap => self.word("unwrap_binder!("),
|
||||
ast::UnsafeBinderCastKind::Wrap => self.word("wrap_binder!("),
|
||||
ast::UnsafeBinderCastKind::Unwrap => self.word("unwrap_binder!("),
|
||||
}
|
||||
self.print_expr(expr);
|
||||
if let Some(ty) = ty {
|
||||
|
@ -2073,7 +2052,7 @@ impl<'a> State<'a> {
|
|||
self.space();
|
||||
}
|
||||
self.cbox(INDENT_UNIT);
|
||||
self.print_outer_attributes(self.attrs(field.hir_id));
|
||||
self.print_attrs_as_outer(self.attrs(field.hir_id));
|
||||
if !field.is_shorthand {
|
||||
self.print_ident(field.ident);
|
||||
self.word_nbsp(":");
|
||||
|
@ -2083,7 +2062,7 @@ impl<'a> State<'a> {
|
|||
}
|
||||
|
||||
fn print_param(&mut self, arg: &hir::Param<'_>) {
|
||||
self.print_outer_attributes(self.attrs(arg.hir_id));
|
||||
self.print_attrs_as_outer(self.attrs(arg.hir_id));
|
||||
self.print_pat(arg.pat);
|
||||
}
|
||||
|
||||
|
@ -2118,7 +2097,7 @@ impl<'a> State<'a> {
|
|||
self.cbox(INDENT_UNIT);
|
||||
self.ann.pre(self, AnnNode::Arm(arm));
|
||||
self.ibox(0);
|
||||
self.print_outer_attributes(self.attrs(arm.hir_id));
|
||||
self.print_attrs_as_outer(self.attrs(arm.hir_id));
|
||||
self.print_pat(arm.pat);
|
||||
self.space();
|
||||
if let Some(ref g) = arm.guard {
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
use rustc_abi::ExternAbi;
|
||||
use rustc_hir::def::{DefKind, Res};
|
||||
use rustc_hir::intravisit::FnKind;
|
||||
use rustc_hir::{AttrArgs, AttrItem, AttrKind, GenericParamKind, PatExprKind, PatKind};
|
||||
use rustc_hir::{AttrArgs, AttrItem, Attribute, GenericParamKind, PatExprKind, PatKind};
|
||||
use rustc_middle::ty;
|
||||
use rustc_session::config::CrateType;
|
||||
use rustc_session::{declare_lint, declare_lint_pass};
|
||||
|
@ -343,7 +343,7 @@ impl<'tcx> LateLintPass<'tcx> for NonSnakeCase {
|
|||
} else {
|
||||
ast::attr::find_by_name(cx.tcx.hir().attrs(hir::CRATE_HIR_ID), sym::crate_name)
|
||||
.and_then(|attr| {
|
||||
if let AttrKind::Normal(n) = &attr.kind
|
||||
if let Attribute::Unparsed(n) = attr
|
||||
&& let AttrItem { args: AttrArgs::Eq { eq_span: _, expr: lit }, .. } =
|
||||
n.as_ref()
|
||||
&& let ast::LitKind::Str(name, ..) = lit.kind
|
||||
|
|
|
@ -15,8 +15,8 @@ use rustc_feature::{AttributeDuplicates, AttributeType, BUILTIN_ATTRIBUTE_MAP, B
|
|||
use rustc_hir::def_id::LocalModDefId;
|
||||
use rustc_hir::intravisit::{self, Visitor};
|
||||
use rustc_hir::{
|
||||
self as hir, self, AssocItemKind, AttrKind, Attribute, CRATE_HIR_ID, CRATE_OWNER_ID, FnSig,
|
||||
ForeignItem, HirId, Item, ItemKind, MethodKind, Safety, Target, TraitItem,
|
||||
self as hir, self, AssocItemKind, Attribute, CRATE_HIR_ID, CRATE_OWNER_ID, FnSig, ForeignItem,
|
||||
HirId, Item, ItemKind, MethodKind, Safety, Target, TraitItem,
|
||||
};
|
||||
use rustc_macros::LintDiagnostic;
|
||||
use rustc_middle::hir::nested_filter;
|
||||
|
@ -1471,7 +1471,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
|
|||
self.tcx.emit_node_span_lint(
|
||||
UNUSED_ATTRIBUTES,
|
||||
hir_id,
|
||||
attr.span,
|
||||
attr.span(),
|
||||
errors::MustUseNoEffect { article, target },
|
||||
);
|
||||
}
|
||||
|
@ -1880,7 +1880,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
|
|||
feature_err(
|
||||
&self.tcx.sess,
|
||||
sym::fn_align,
|
||||
attr.span,
|
||||
attr.span(),
|
||||
fluent::passes_repr_align_function,
|
||||
)
|
||||
.emit();
|
||||
|
@ -1888,7 +1888,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
|
|||
_ => {
|
||||
self.dcx().emit_err(
|
||||
errors::AttrApplication::StructEnumFunctionMethodUnion {
|
||||
hint_span: attr.span,
|
||||
hint_span: attr.span(),
|
||||
span,
|
||||
},
|
||||
);
|
||||
|
@ -2380,17 +2380,17 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
|
|||
})
|
||||
{
|
||||
if hir_id != CRATE_HIR_ID {
|
||||
match attr.style {
|
||||
match attr.style() {
|
||||
ast::AttrStyle::Outer => self.tcx.emit_node_span_lint(
|
||||
UNUSED_ATTRIBUTES,
|
||||
hir_id,
|
||||
attr.span,
|
||||
attr.span(),
|
||||
errors::OuterCrateLevelAttr,
|
||||
),
|
||||
ast::AttrStyle::Inner => self.tcx.emit_node_span_lint(
|
||||
UNUSED_ATTRIBUTES,
|
||||
hir_id,
|
||||
attr.span,
|
||||
attr.span(),
|
||||
errors::InnerCrateLevelAttr,
|
||||
),
|
||||
};
|
||||
|
@ -2417,7 +2417,7 @@ impl<'tcx> CheckAttrVisitor<'tcx> {
|
|||
UNUSED_ATTRIBUTES,
|
||||
hir_id,
|
||||
attr.span(),
|
||||
errors::Unused { attr_span: attr.span, note },
|
||||
errors::Unused { attr_span: attr.span(), note },
|
||||
);
|
||||
}
|
||||
|
||||
|
@ -2760,23 +2760,23 @@ fn check_invalid_crate_level_attr(tcx: TyCtxt<'_>, attrs: &[Attribute]) {
|
|||
kind: item.kind.descr(),
|
||||
});
|
||||
let err = tcx.dcx().create_err(errors::InvalidAttrAtCrateLevel {
|
||||
span: attr.span,
|
||||
span: attr.span(),
|
||||
sugg_span: tcx
|
||||
.sess
|
||||
.source_map()
|
||||
.span_to_snippet(attr.span)
|
||||
.span_to_snippet(attr.span())
|
||||
.ok()
|
||||
.filter(|src| src.starts_with("#!["))
|
||||
.map(|_| {
|
||||
attr.span
|
||||
.with_lo(attr.span.lo() + BytePos(1))
|
||||
.with_hi(attr.span.lo() + BytePos(2))
|
||||
attr.span()
|
||||
.with_lo(attr.span().lo() + BytePos(1))
|
||||
.with_hi(attr.span().lo() + BytePos(2))
|
||||
}),
|
||||
name: *attr_to_check,
|
||||
item,
|
||||
});
|
||||
|
||||
if let AttrKind::Normal(ref p) = attr.kind {
|
||||
if let Attribute::Unparsed(p) = attr {
|
||||
tcx.dcx().try_steal_replace_and_emit_err(
|
||||
p.path.span,
|
||||
StashKey::UndeterminedMacroResolution,
|
||||
|
|
|
@ -6,6 +6,7 @@ edition = "2024"
|
|||
[dependencies]
|
||||
# tidy-alphabetical-start
|
||||
measureme = "11"
|
||||
rustc_attr_data_structures = { path = "../rustc_attr_data_structures" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_hashes = { path = "../rustc_hashes" }
|
||||
|
|
|
@ -9,6 +9,7 @@ parking_lot = "0.12"
|
|||
rustc-rayon-core = { version = "0.5.0" }
|
||||
rustc_abi = { path = "../rustc_abi" }
|
||||
rustc_ast = { path = "../rustc_ast" }
|
||||
rustc_attr_data_structures = { path = "../rustc_attr_data_structures" }
|
||||
rustc_data_structures = { path = "../rustc_data_structures" }
|
||||
rustc_errors = { path = "../rustc_errors" }
|
||||
rustc_feature = { path = "../rustc_feature" }
|
||||
|
|
|
@ -129,3 +129,4 @@ impl<'a> rustc_span::HashStableContext for StableHashingContext<'a> {
|
|||
}
|
||||
|
||||
impl<'a> rustc_session::HashStableContext for StableHashingContext<'a> {}
|
||||
impl<'a> rustc_attr_data_structures::HashStableContext for StableHashingContext<'a> {}
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
//! from various crates in no particular order.
|
||||
|
||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||
use rustc_hir as hir;
|
||||
use rustc_hir::{self as hir, HashIgnoredAttrId};
|
||||
use rustc_span::SourceFile;
|
||||
use smallvec::SmallVec;
|
||||
|
||||
|
@ -23,6 +23,7 @@ impl<'a> HashStable<StableHashingContext<'a>> for [hir::Attribute] {
|
|||
.iter()
|
||||
.filter(|attr| {
|
||||
!attr.is_doc_comment()
|
||||
// FIXME(jdonszelmann) have a better way to handle ignored attrs
|
||||
&& !attr.ident().is_some_and(|ident| hcx.is_ignored_attr(ident.name))
|
||||
})
|
||||
.collect();
|
||||
|
@ -35,19 +36,8 @@ impl<'a> HashStable<StableHashingContext<'a>> for [hir::Attribute] {
|
|||
}
|
||||
|
||||
impl<'ctx> rustc_hir::HashStableContext for StableHashingContext<'ctx> {
|
||||
fn hash_attr(&mut self, attr: &hir::Attribute, hasher: &mut StableHasher) {
|
||||
// Make sure that these have been filtered out.
|
||||
debug_assert!(!attr.ident().is_some_and(|ident| self.is_ignored_attr(ident.name)));
|
||||
debug_assert!(!attr.is_doc_comment());
|
||||
|
||||
let hir::Attribute { kind, id: _, style, span } = attr;
|
||||
if let hir::AttrKind::Normal(item) = kind {
|
||||
item.hash_stable(self, hasher);
|
||||
style.hash_stable(self, hasher);
|
||||
span.hash_stable(self, hasher);
|
||||
} else {
|
||||
unreachable!();
|
||||
}
|
||||
fn hash_attr_id(&mut self, _id: &HashIgnoredAttrId, _hasher: &mut StableHasher) {
|
||||
/* we don't hash HashIgnoredAttrId, we ignore them */
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -6,7 +6,7 @@ use rustc_data_structures::fx::FxHashMap;
|
|||
use rustc_errors::codes::*;
|
||||
use rustc_errors::{ErrorGuaranteed, struct_span_code_err};
|
||||
use rustc_hir::def_id::{DefId, LocalDefId};
|
||||
use rustc_hir::{AttrArgs, AttrKind, Attribute};
|
||||
use rustc_hir::{AttrArgs, Attribute};
|
||||
use rustc_macros::LintDiagnostic;
|
||||
use rustc_middle::bug;
|
||||
use rustc_middle::ty::print::PrintTraitRefExt as _;
|
||||
|
@ -666,8 +666,8 @@ impl<'tcx> OnUnimplementedDirective {
|
|||
Ok(None)
|
||||
}
|
||||
} else if is_diagnostic_namespace_variant {
|
||||
match &attr.kind {
|
||||
AttrKind::Normal(p) if !matches!(p.args, AttrArgs::Empty) => {
|
||||
match attr {
|
||||
Attribute::Unparsed(p) if !matches!(p.args, AttrArgs::Empty) => {
|
||||
if let Some(item_def_id) = item_def_id.as_local() {
|
||||
tcx.emit_node_span_lint(
|
||||
UNKNOWN_OR_MALFORMED_DIAGNOSTIC_ATTRIBUTES,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue