Auto merge of #77255 - Aaron1011:feature/collect-attr-tokens, r=petrochenkov
Unconditionally capture tokens for attributes. This allows us to avoid synthesizing tokens in `prepend_attr`, since we have the original tokens available. We still need to synthesize tokens when expanding `cfg_attr`, but this is an unavoidable consequence of the syntax of `cfg_attr` - the user does not supply the `#` and `[]` tokens that a `cfg_attr` expands to. This is based on PR https://github.com/rust-lang/rust/pull/77250 - this PR exposes a bug in the current `collect_tokens` implementation, which is fixed by the rewrite.
This commit is contained in:
commit
ffa2e7ae8f
19 changed files with 251 additions and 138 deletions
|
@ -2423,6 +2423,7 @@ pub struct Attribute {
|
||||||
/// or the construct this attribute is contained within (inner).
|
/// or the construct this attribute is contained within (inner).
|
||||||
pub style: AttrStyle,
|
pub style: AttrStyle,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
|
pub tokens: Option<LazyTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
|
|
|
@ -325,7 +325,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute {
|
pub fn mk_attr_from_item(style: AttrStyle, item: AttrItem, span: Span) -> Attribute {
|
||||||
Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span }
|
Attribute { kind: AttrKind::Normal(item), id: mk_attr_id(), style, span, tokens: None }
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns an inner attribute with the given value and span.
|
/// Returns an inner attribute with the given value and span.
|
||||||
|
@ -344,7 +344,13 @@ pub fn mk_doc_comment(
|
||||||
data: Symbol,
|
data: Symbol,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Attribute {
|
) -> Attribute {
|
||||||
Attribute { kind: AttrKind::DocComment(comment_kind, data), id: mk_attr_id(), style, span }
|
Attribute {
|
||||||
|
kind: AttrKind::DocComment(comment_kind, data),
|
||||||
|
id: mk_attr_id(),
|
||||||
|
style,
|
||||||
|
span,
|
||||||
|
tokens: None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
|
pub fn list_contains_name(items: &[NestedMetaItem], name: Symbol) -> bool {
|
||||||
|
|
|
@ -577,7 +577,7 @@ pub fn noop_visit_local<T: MutVisitor>(local: &mut P<Local>, vis: &mut T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
|
pub fn noop_visit_attribute<T: MutVisitor>(attr: &mut Attribute, vis: &mut T) {
|
||||||
let Attribute { kind, id: _, style: _, span } = attr;
|
let Attribute { kind, id: _, style: _, span, tokens: _ } = attr;
|
||||||
match kind {
|
match kind {
|
||||||
AttrKind::Normal(AttrItem { path, args, tokens: _ }) => {
|
AttrKind::Normal(AttrItem { path, args, tokens: _ }) => {
|
||||||
vis.visit_path(path);
|
vis.visit_path(path);
|
||||||
|
|
|
@ -210,9 +210,9 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
ex.span = e.span;
|
ex.span = e.span;
|
||||||
}
|
}
|
||||||
// Merge attributes into the inner expression.
|
// Merge attributes into the inner expression.
|
||||||
let mut attrs = e.attrs.clone();
|
let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
|
||||||
attrs.extend::<Vec<_>>(ex.attrs.into());
|
attrs.extend::<Vec<_>>(ex.attrs.into());
|
||||||
ex.attrs = attrs;
|
ex.attrs = attrs.into();
|
||||||
return ex;
|
return ex;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1471,13 +1471,15 @@ impl<'hir> LoweringContext<'_, 'hir> {
|
||||||
hir::MatchSource::ForLoopDesugar,
|
hir::MatchSource::ForLoopDesugar,
|
||||||
));
|
));
|
||||||
|
|
||||||
|
let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
|
||||||
|
|
||||||
// This is effectively `{ let _result = ...; _result }`.
|
// This is effectively `{ let _result = ...; _result }`.
|
||||||
// The construct was introduced in #21984 and is necessary to make sure that
|
// The construct was introduced in #21984 and is necessary to make sure that
|
||||||
// temporaries in the `head` expression are dropped and do not leak to the
|
// temporaries in the `head` expression are dropped and do not leak to the
|
||||||
// surrounding scope of the `match` since the `match` is not a terminating scope.
|
// surrounding scope of the `match` since the `match` is not a terminating scope.
|
||||||
//
|
//
|
||||||
// Also, add the attributes to the outer returned expr node.
|
// Also, add the attributes to the outer returned expr node.
|
||||||
self.expr_drop_temps_mut(desugared_span, match_expr, e.attrs.clone())
|
self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Desugar `ExprKind::Try` from: `<expr>?` into:
|
/// Desugar `ExprKind::Try` from: `<expr>?` into:
|
||||||
|
|
|
@ -972,7 +972,8 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data),
|
AttrKind::DocComment(comment_kind, data) => AttrKind::DocComment(comment_kind, data),
|
||||||
};
|
};
|
||||||
|
|
||||||
Attribute { kind, id: attr.id, style: attr.style, span: attr.span }
|
// Tokens aren't needed after macro expansion and parsing
|
||||||
|
Attribute { kind, id: attr.id, style: attr.style, span: attr.span, tokens: None }
|
||||||
}
|
}
|
||||||
|
|
||||||
fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs {
|
fn lower_mac_args(&mut self, args: &MacArgs) -> MacArgs {
|
||||||
|
@ -1713,7 +1714,7 @@ impl<'a, 'hir> LoweringContext<'a, 'hir> {
|
||||||
pat: self.lower_pat(&l.pat),
|
pat: self.lower_pat(&l.pat),
|
||||||
init,
|
init,
|
||||||
span: l.span,
|
span: l.span,
|
||||||
attrs: l.attrs.clone(),
|
attrs: l.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
|
||||||
source: hir::LocalSource::Normal,
|
source: hir::LocalSource::Normal,
|
||||||
},
|
},
|
||||||
ids,
|
ids,
|
||||||
|
|
|
@ -15,7 +15,7 @@ pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -
|
||||||
);
|
);
|
||||||
|
|
||||||
let start_span = parser.token.span;
|
let start_span = parser.token.span;
|
||||||
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item() {
|
let AttrItem { path, args, tokens: _ } = match parser.parse_attr_item(false) {
|
||||||
Ok(ai) => ai,
|
Ok(ai) => ai,
|
||||||
Err(mut err) => {
|
Err(mut err) => {
|
||||||
err.emit();
|
err.emit();
|
||||||
|
|
|
@ -3,10 +3,13 @@
|
||||||
use rustc_ast::attr::HasAttrs;
|
use rustc_ast::attr::HasAttrs;
|
||||||
use rustc_ast::mut_visit::*;
|
use rustc_ast::mut_visit::*;
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
|
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
||||||
|
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStreamInner, Spacing, TokenStream, TokenTree};
|
||||||
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
|
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
|
||||||
use rustc_attr as attr;
|
use rustc_attr as attr;
|
||||||
use rustc_data_structures::fx::FxHashMap;
|
use rustc_data_structures::fx::FxHashMap;
|
||||||
use rustc_data_structures::map_in_place::MapInPlace;
|
use rustc_data_structures::map_in_place::MapInPlace;
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
|
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
|
||||||
use rustc_feature::{Feature, Features, State as FeatureState};
|
use rustc_feature::{Feature, Features, State as FeatureState};
|
||||||
use rustc_feature::{
|
use rustc_feature::{
|
||||||
|
@ -289,7 +292,37 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
expanded_attrs
|
expanded_attrs
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.flat_map(|(item, span)| {
|
.flat_map(|(item, span)| {
|
||||||
let attr = attr::mk_attr_from_item(attr.style, item, span);
|
let orig_tokens =
|
||||||
|
attr.tokens.as_ref().unwrap_or_else(|| panic!("Missing tokens for {:?}", attr));
|
||||||
|
|
||||||
|
// We are taking an attribute of the form `#[cfg_attr(pred, attr)]`
|
||||||
|
// and producing an attribute of the form `#[attr]`. We
|
||||||
|
// have captured tokens for `attr` itself, but we need to
|
||||||
|
// synthesize tokens for the wrapper `#` and `[]`, which
|
||||||
|
// we do below.
|
||||||
|
|
||||||
|
// Use the `#` in `#[cfg_attr(pred, attr)]` as the `#` token
|
||||||
|
// for `attr` when we expand it to `#[attr]`
|
||||||
|
let pound_token = orig_tokens.into_token_stream().trees().next().unwrap();
|
||||||
|
if !matches!(pound_token, TokenTree::Token(Token { kind: TokenKind::Pound, .. })) {
|
||||||
|
panic!("Bad tokens for attribute {:?}", attr);
|
||||||
|
}
|
||||||
|
// We don't really have a good span to use for the syntheized `[]`
|
||||||
|
// in `#[attr]`, so just use the span of the `#` token.
|
||||||
|
let bracket_group = TokenTree::Delimited(
|
||||||
|
DelimSpan::from_single(pound_token.span()),
|
||||||
|
DelimToken::Bracket,
|
||||||
|
item.tokens
|
||||||
|
.clone()
|
||||||
|
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
|
||||||
|
.into_token_stream(),
|
||||||
|
);
|
||||||
|
|
||||||
|
let mut attr = attr::mk_attr_from_item(attr.style, item, span);
|
||||||
|
attr.tokens = Some(Lrc::new(LazyTokenStreamInner::Ready(TokenStream::new(vec![
|
||||||
|
(pound_token, Spacing::Alone),
|
||||||
|
(bracket_group, Spacing::Alone),
|
||||||
|
]))));
|
||||||
self.process_cfg_attr(attr)
|
self.process_cfg_attr(attr)
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
|
|
|
@ -1785,6 +1785,7 @@ impl<'a, 'b> MutVisitor for InvocationCollector<'a, 'b> {
|
||||||
span: at.span,
|
span: at.span,
|
||||||
id: at.id,
|
id: at.id,
|
||||||
style: at.style,
|
style: at.style,
|
||||||
|
tokens: None,
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
noop_visit_attribute(at, self)
|
noop_visit_attribute(at, self)
|
||||||
|
|
|
@ -2,8 +2,9 @@ use crate::interface::{Compiler, Result};
|
||||||
use crate::proc_macro_decls;
|
use crate::proc_macro_decls;
|
||||||
use crate::util;
|
use crate::util;
|
||||||
|
|
||||||
use rustc_ast::mut_visit::MutVisitor;
|
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||||
use rustc_ast::{self as ast, visit};
|
use rustc_ast::ptr::P;
|
||||||
|
use rustc_ast::{self as ast, token, visit};
|
||||||
use rustc_codegen_ssa::back::link::emit_metadata;
|
use rustc_codegen_ssa::back::link::emit_metadata;
|
||||||
use rustc_codegen_ssa::traits::CodegenBackend;
|
use rustc_codegen_ssa::traits::CodegenBackend;
|
||||||
use rustc_data_structures::sync::{par_iter, Lrc, OnceCell, ParallelIterator, WorkerLocal};
|
use rustc_data_structures::sync::{par_iter, Lrc, OnceCell, ParallelIterator, WorkerLocal};
|
||||||
|
@ -36,6 +37,7 @@ use rustc_span::symbol::Symbol;
|
||||||
use rustc_span::{FileName, RealFileName};
|
use rustc_span::{FileName, RealFileName};
|
||||||
use rustc_trait_selection::traits;
|
use rustc_trait_selection::traits;
|
||||||
use rustc_typeck as typeck;
|
use rustc_typeck as typeck;
|
||||||
|
use smallvec::SmallVec;
|
||||||
use tracing::{info, warn};
|
use tracing::{info, warn};
|
||||||
|
|
||||||
use rustc_serialize::json;
|
use rustc_serialize::json;
|
||||||
|
@ -50,6 +52,64 @@ use std::path::PathBuf;
|
||||||
use std::rc::Rc;
|
use std::rc::Rc;
|
||||||
use std::{env, fs, iter, mem};
|
use std::{env, fs, iter, mem};
|
||||||
|
|
||||||
|
/// Remove alls `LazyTokenStreams` from an AST struct
|
||||||
|
/// Normally, this is done during AST lowering. However,
|
||||||
|
/// printing the AST JSON requires us to serialize
|
||||||
|
/// the entire AST, and we don't want to serialize
|
||||||
|
/// a `LazyTokenStream`.
|
||||||
|
struct TokenStripper;
|
||||||
|
impl mut_visit::MutVisitor for TokenStripper {
|
||||||
|
fn flat_map_item(&mut self, mut i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
|
||||||
|
i.tokens = None;
|
||||||
|
mut_visit::noop_flat_map_item(i, self)
|
||||||
|
}
|
||||||
|
fn visit_block(&mut self, b: &mut P<ast::Block>) {
|
||||||
|
b.tokens = None;
|
||||||
|
mut_visit::noop_visit_block(b, self);
|
||||||
|
}
|
||||||
|
fn flat_map_stmt(&mut self, mut stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
|
||||||
|
stmt.tokens = None;
|
||||||
|
mut_visit::noop_flat_map_stmt(stmt, self)
|
||||||
|
}
|
||||||
|
fn visit_pat(&mut self, p: &mut P<ast::Pat>) {
|
||||||
|
p.tokens = None;
|
||||||
|
mut_visit::noop_visit_pat(p, self);
|
||||||
|
}
|
||||||
|
fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
|
||||||
|
ty.tokens = None;
|
||||||
|
mut_visit::noop_visit_ty(ty, self);
|
||||||
|
}
|
||||||
|
fn visit_attribute(&mut self, attr: &mut ast::Attribute) {
|
||||||
|
attr.tokens = None;
|
||||||
|
if let ast::AttrKind::Normal(ast::AttrItem { tokens, .. }) = &mut attr.kind {
|
||||||
|
*tokens = None;
|
||||||
|
}
|
||||||
|
mut_visit::noop_visit_attribute(attr, self);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_interpolated(&mut self, nt: &mut token::Nonterminal) {
|
||||||
|
if let token::Nonterminal::NtMeta(meta) = nt {
|
||||||
|
meta.tokens = None;
|
||||||
|
}
|
||||||
|
// Handles all of the other cases
|
||||||
|
mut_visit::noop_visit_interpolated(nt, self);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn visit_path(&mut self, p: &mut ast::Path) {
|
||||||
|
p.tokens = None;
|
||||||
|
mut_visit::noop_visit_path(p, self);
|
||||||
|
}
|
||||||
|
fn visit_vis(&mut self, vis: &mut ast::Visibility) {
|
||||||
|
vis.tokens = None;
|
||||||
|
mut_visit::noop_visit_vis(vis, self);
|
||||||
|
}
|
||||||
|
fn visit_expr(&mut self, e: &mut P<ast::Expr>) {
|
||||||
|
e.tokens = None;
|
||||||
|
mut_visit::noop_visit_expr(e, self);
|
||||||
|
}
|
||||||
|
fn visit_mac(&mut self, _mac: &mut ast::MacCall) {}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
|
pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
|
||||||
let krate = sess.time("parse_crate", || match input {
|
let krate = sess.time("parse_crate", || match input {
|
||||||
Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
|
Input::File(file) => parse_crate_from_file(file, &sess.parse_sess),
|
||||||
|
@ -59,6 +119,10 @@ pub fn parse<'a>(sess: &'a Session, input: &Input) -> PResult<'a, ast::Crate> {
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
if sess.opts.debugging_opts.ast_json_noexpand {
|
if sess.opts.debugging_opts.ast_json_noexpand {
|
||||||
|
// Set any `token` fields to `None` before
|
||||||
|
// we display the AST.
|
||||||
|
let mut krate = krate.clone();
|
||||||
|
TokenStripper.visit_crate(&mut krate);
|
||||||
println!("{}", json::as_json(&krate));
|
println!("{}", json::as_json(&krate));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -379,6 +443,10 @@ fn configure_and_expand_inner<'a>(
|
||||||
}
|
}
|
||||||
|
|
||||||
if sess.opts.debugging_opts.ast_json {
|
if sess.opts.debugging_opts.ast_json {
|
||||||
|
// Set any `token` fields to `None` before
|
||||||
|
// we display the AST.
|
||||||
|
let mut krate = krate.clone();
|
||||||
|
TokenStripper.visit_crate(&mut krate);
|
||||||
println!("{}", json::as_json(&krate));
|
println!("{}", json::as_json(&krate));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -40,11 +40,12 @@ impl<'ctx> rustc_ast::HashStableContext for StableHashingContext<'ctx> {
|
||||||
debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name)));
|
debug_assert!(!attr.ident().map_or(false, |ident| self.is_ignored_attr(ident.name)));
|
||||||
debug_assert!(!attr.is_doc_comment());
|
debug_assert!(!attr.is_doc_comment());
|
||||||
|
|
||||||
let ast::Attribute { kind, id: _, style, span } = attr;
|
let ast::Attribute { kind, id: _, style, span, tokens } = attr;
|
||||||
if let ast::AttrKind::Normal(item) = kind {
|
if let ast::AttrKind::Normal(item) = kind {
|
||||||
item.hash_stable(self, hasher);
|
item.hash_stable(self, hasher);
|
||||||
style.hash_stable(self, hasher);
|
style.hash_stable(self, hasher);
|
||||||
span.hash_stable(self, hasher);
|
span.hash_stable(self, hasher);
|
||||||
|
tokens.as_ref().expect_none("Tokens should have been removed during lowering!");
|
||||||
} else {
|
} else {
|
||||||
unreachable!();
|
unreachable!();
|
||||||
}
|
}
|
||||||
|
|
|
@ -252,9 +252,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
||||||
let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream());
|
let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream());
|
||||||
|
|
||||||
let tokens = match *nt {
|
let tokens = match *nt {
|
||||||
Nonterminal::NtItem(ref item) => {
|
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
|
||||||
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
|
||||||
}
|
|
||||||
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()),
|
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()),
|
||||||
Nonterminal::NtStmt(ref stmt) => {
|
Nonterminal::NtStmt(ref stmt) => {
|
||||||
// FIXME: We currently only collect tokens for `:stmt`
|
// FIXME: We currently only collect tokens for `:stmt`
|
||||||
|
@ -279,7 +277,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
||||||
if expr.tokens.is_none() {
|
if expr.tokens.is_none() {
|
||||||
debug!("missing tokens for expr {:?}", expr);
|
debug!("missing tokens for expr {:?}", expr);
|
||||||
}
|
}
|
||||||
prepend_attrs(sess, &expr.attrs, expr.tokens.as_ref(), span)
|
prepend_attrs(&expr.attrs, expr.tokens.as_ref())
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -603,10 +601,8 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn prepend_attrs(
|
fn prepend_attrs(
|
||||||
sess: &ParseSess,
|
|
||||||
attrs: &[ast::Attribute],
|
attrs: &[ast::Attribute],
|
||||||
tokens: Option<&tokenstream::LazyTokenStream>,
|
tokens: Option<&tokenstream::LazyTokenStream>,
|
||||||
span: rustc_span::Span,
|
|
||||||
) -> Option<tokenstream::TokenStream> {
|
) -> Option<tokenstream::TokenStream> {
|
||||||
let tokens = tokens?.clone().into_token_stream();
|
let tokens = tokens?.clone().into_token_stream();
|
||||||
if attrs.is_empty() {
|
if attrs.is_empty() {
|
||||||
|
@ -619,47 +615,12 @@ fn prepend_attrs(
|
||||||
ast::AttrStyle::Outer,
|
ast::AttrStyle::Outer,
|
||||||
"inner attributes should prevent cached tokens from existing"
|
"inner attributes should prevent cached tokens from existing"
|
||||||
);
|
);
|
||||||
|
builder.push(
|
||||||
let source = pprust::attribute_to_string(attr);
|
attr.tokens
|
||||||
let macro_filename = FileName::macro_expansion_source_code(&source);
|
.clone()
|
||||||
|
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
|
||||||
let item = match attr.kind {
|
.into_token_stream(),
|
||||||
ast::AttrKind::Normal(ref item) => item,
|
);
|
||||||
ast::AttrKind::DocComment(..) => {
|
|
||||||
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
|
|
||||||
builder.push(stream);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// synthesize # [ $path $tokens ] manually here
|
|
||||||
let mut brackets = tokenstream::TokenStreamBuilder::new();
|
|
||||||
|
|
||||||
// For simple paths, push the identifier directly
|
|
||||||
if item.path.segments.len() == 1 && item.path.segments[0].args.is_none() {
|
|
||||||
let ident = item.path.segments[0].ident;
|
|
||||||
let token = token::Ident(ident.name, ident.as_str().starts_with("r#"));
|
|
||||||
brackets.push(tokenstream::TokenTree::token(token, ident.span));
|
|
||||||
|
|
||||||
// ... and for more complicated paths, fall back to a reparse hack that
|
|
||||||
// should eventually be removed.
|
|
||||||
} else {
|
|
||||||
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
|
|
||||||
brackets.push(stream);
|
|
||||||
}
|
|
||||||
|
|
||||||
brackets.push(item.args.outer_tokens());
|
|
||||||
|
|
||||||
// The span we list here for `#` and for `[ ... ]` are both wrong in
|
|
||||||
// that it encompasses more than each token, but it hopefully is "good
|
|
||||||
// enough" for now at least.
|
|
||||||
builder.push(tokenstream::TokenTree::token(token::Pound, attr.span));
|
|
||||||
let delim_span = tokenstream::DelimSpan::from_single(attr.span);
|
|
||||||
builder.push(tokenstream::TokenTree::Delimited(
|
|
||||||
delim_span,
|
|
||||||
token::DelimToken::Bracket,
|
|
||||||
brackets.build(),
|
|
||||||
));
|
|
||||||
}
|
}
|
||||||
builder.push(tokens.clone());
|
builder.push(tokens.clone());
|
||||||
Some(builder.build())
|
Some(builder.build())
|
||||||
|
|
|
@ -30,41 +30,53 @@ impl<'a> Parser<'a> {
|
||||||
let mut just_parsed_doc_comment = false;
|
let mut just_parsed_doc_comment = false;
|
||||||
loop {
|
loop {
|
||||||
debug!("parse_outer_attributes: self.token={:?}", self.token);
|
debug!("parse_outer_attributes: self.token={:?}", self.token);
|
||||||
if self.check(&token::Pound) {
|
let (attr, tokens) = if self.check(&token::Pound) {
|
||||||
let inner_error_reason = if just_parsed_doc_comment {
|
self.collect_tokens(|this| {
|
||||||
"an inner attribute is not permitted following an outer doc comment"
|
let inner_error_reason = if just_parsed_doc_comment {
|
||||||
} else if !attrs.is_empty() {
|
"an inner attribute is not permitted following an outer doc comment"
|
||||||
"an inner attribute is not permitted following an outer attribute"
|
} else if !attrs.is_empty() {
|
||||||
} else {
|
"an inner attribute is not permitted following an outer attribute"
|
||||||
DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
|
} else {
|
||||||
};
|
DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
|
||||||
let inner_parse_policy = InnerAttrPolicy::Forbidden {
|
};
|
||||||
reason: inner_error_reason,
|
let inner_parse_policy = InnerAttrPolicy::Forbidden {
|
||||||
saw_doc_comment: just_parsed_doc_comment,
|
reason: inner_error_reason,
|
||||||
prev_attr_sp: attrs.last().map(|a| a.span),
|
saw_doc_comment: just_parsed_doc_comment,
|
||||||
};
|
prev_attr_sp: attrs.last().map(|a| a.span),
|
||||||
let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
|
};
|
||||||
attrs.push(attr);
|
let attr = this.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
|
||||||
just_parsed_doc_comment = false;
|
just_parsed_doc_comment = false;
|
||||||
|
Ok(Some(attr))
|
||||||
|
})?
|
||||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||||
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
|
self.collect_tokens(|this| {
|
||||||
if attr.style != ast::AttrStyle::Outer {
|
let attr =
|
||||||
self.sess
|
attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span);
|
||||||
.span_diagnostic
|
if attr.style != ast::AttrStyle::Outer {
|
||||||
.struct_span_err_with_code(
|
this.sess
|
||||||
self.token.span,
|
.span_diagnostic
|
||||||
"expected outer doc comment",
|
.struct_span_err_with_code(
|
||||||
error_code!(E0753),
|
this.token.span,
|
||||||
)
|
"expected outer doc comment",
|
||||||
.note(
|
error_code!(E0753),
|
||||||
"inner doc comments like this (starting with \
|
)
|
||||||
`//!` or `/*!`) can only appear before items",
|
.note(
|
||||||
)
|
"inner doc comments like this (starting with \
|
||||||
.emit();
|
`//!` or `/*!`) can only appear before items",
|
||||||
}
|
)
|
||||||
|
.emit();
|
||||||
|
}
|
||||||
|
this.bump();
|
||||||
|
just_parsed_doc_comment = true;
|
||||||
|
Ok(Some(attr))
|
||||||
|
})?
|
||||||
|
} else {
|
||||||
|
(None, None)
|
||||||
|
};
|
||||||
|
|
||||||
|
if let Some(mut attr) = attr {
|
||||||
|
attr.tokens = tokens;
|
||||||
attrs.push(attr);
|
attrs.push(attr);
|
||||||
self.bump();
|
|
||||||
just_parsed_doc_comment = true;
|
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -99,7 +111,7 @@ impl<'a> Parser<'a> {
|
||||||
if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
||||||
|
|
||||||
self.expect(&token::OpenDelim(token::Bracket))?;
|
self.expect(&token::OpenDelim(token::Bracket))?;
|
||||||
let item = self.parse_attr_item()?;
|
let item = self.parse_attr_item(false)?;
|
||||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||||
let attr_sp = lo.to(self.prev_token.span);
|
let attr_sp = lo.to(self.prev_token.span);
|
||||||
|
|
||||||
|
@ -148,7 +160,7 @@ impl<'a> Parser<'a> {
|
||||||
/// PATH
|
/// PATH
|
||||||
/// PATH `=` UNSUFFIXED_LIT
|
/// PATH `=` UNSUFFIXED_LIT
|
||||||
/// The delimiters or `=` are still put into the resulting token stream.
|
/// The delimiters or `=` are still put into the resulting token stream.
|
||||||
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
|
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
|
||||||
let item = match self.token.kind {
|
let item = match self.token.kind {
|
||||||
token::Interpolated(ref nt) => match **nt {
|
token::Interpolated(ref nt) => match **nt {
|
||||||
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
|
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
|
||||||
|
@ -160,9 +172,18 @@ impl<'a> Parser<'a> {
|
||||||
self.bump();
|
self.bump();
|
||||||
item
|
item
|
||||||
} else {
|
} else {
|
||||||
let path = self.parse_path(PathStyle::Mod)?;
|
let do_parse = |this: &mut Self| {
|
||||||
let args = self.parse_attr_args()?;
|
let path = this.parse_path(PathStyle::Mod)?;
|
||||||
ast::AttrItem { path, args, tokens: None }
|
let args = this.parse_attr_args()?;
|
||||||
|
Ok(ast::AttrItem { path, args, tokens: None })
|
||||||
|
};
|
||||||
|
if capture_tokens {
|
||||||
|
let (mut item, tokens) = self.collect_tokens(do_parse)?;
|
||||||
|
item.tokens = tokens;
|
||||||
|
item
|
||||||
|
} else {
|
||||||
|
do_parse(self)?
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,19 +196,31 @@ impl<'a> Parser<'a> {
|
||||||
let mut attrs: Vec<ast::Attribute> = vec![];
|
let mut attrs: Vec<ast::Attribute> = vec![];
|
||||||
loop {
|
loop {
|
||||||
// Only try to parse if it is an inner attribute (has `!`).
|
// Only try to parse if it is an inner attribute (has `!`).
|
||||||
if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
|
let (attr, tokens) =
|
||||||
let attr = self.parse_attribute(true)?;
|
if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
|
||||||
assert_eq!(attr.style, ast::AttrStyle::Inner);
|
self.collect_tokens(|this| {
|
||||||
attrs.push(attr);
|
let attr = this.parse_attribute(true)?;
|
||||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
assert_eq!(attr.style, ast::AttrStyle::Inner);
|
||||||
// We need to get the position of this token before we bump.
|
Ok(Some(attr))
|
||||||
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
|
})?
|
||||||
if attr.style == ast::AttrStyle::Inner {
|
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||||
attrs.push(attr);
|
self.collect_tokens(|this| {
|
||||||
self.bump();
|
// We need to get the position of this token before we bump.
|
||||||
|
let attr =
|
||||||
|
attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span);
|
||||||
|
if attr.style == ast::AttrStyle::Inner {
|
||||||
|
this.bump();
|
||||||
|
Ok(Some(attr))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
})?
|
||||||
} else {
|
} else {
|
||||||
break;
|
(None, None)
|
||||||
}
|
};
|
||||||
|
if let Some(mut attr) = attr {
|
||||||
|
attr.tokens = tokens;
|
||||||
|
attrs.push(attr);
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -220,7 +253,7 @@ impl<'a> Parser<'a> {
|
||||||
let mut expanded_attrs = Vec::with_capacity(1);
|
let mut expanded_attrs = Vec::with_capacity(1);
|
||||||
while self.token.kind != token::Eof {
|
while self.token.kind != token::Eof {
|
||||||
let lo = self.token.span;
|
let lo = self.token.span;
|
||||||
let item = self.parse_attr_item()?;
|
let item = self.parse_attr_item(true)?;
|
||||||
expanded_attrs.push((item, lo.to(self.prev_token.span)));
|
expanded_attrs.push((item, lo.to(self.prev_token.span)));
|
||||||
if !self.eat(&token::Comma) {
|
if !self.eat(&token::Comma) {
|
||||||
break;
|
break;
|
||||||
|
|
|
@ -1116,7 +1116,7 @@ impl<'a> Parser<'a> {
|
||||||
) -> PResult<'a, P<Expr>> {
|
) -> PResult<'a, P<Expr>> {
|
||||||
if needs_tokens {
|
if needs_tokens {
|
||||||
let (mut expr, tokens) = self.collect_tokens(f)?;
|
let (mut expr, tokens) = self.collect_tokens(f)?;
|
||||||
expr.tokens = Some(tokens);
|
expr.tokens = tokens;
|
||||||
Ok(expr)
|
Ok(expr)
|
||||||
} else {
|
} else {
|
||||||
f(self)
|
f(self)
|
||||||
|
|
|
@ -151,7 +151,7 @@ impl<'a> Parser<'a> {
|
||||||
if let Some(tokens) = tokens {
|
if let Some(tokens) = tokens {
|
||||||
if let Some(item) = &mut item {
|
if let Some(item) = &mut item {
|
||||||
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
|
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
|
||||||
item.tokens = Some(tokens);
|
item.tokens = tokens;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1178,8 +1178,9 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
/// Records all tokens consumed by the provided callback,
|
/// Records all tokens consumed by the provided callback,
|
||||||
/// including the current token. These tokens are collected
|
/// including the current token. These tokens are collected
|
||||||
/// into a `TokenStream`, and returned along with the result
|
/// into a `LazyTokenStream`, and returned along with the result
|
||||||
/// of the callback.
|
/// of the callback. The returned `LazyTokenStream` will be `None`
|
||||||
|
/// if not tokens were captured.
|
||||||
///
|
///
|
||||||
/// Note: If your callback consumes an opening delimiter
|
/// Note: If your callback consumes an opening delimiter
|
||||||
/// (including the case where you call `collect_tokens`
|
/// (including the case where you call `collect_tokens`
|
||||||
|
@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> {
|
||||||
pub fn collect_tokens<R>(
|
pub fn collect_tokens<R>(
|
||||||
&mut self,
|
&mut self,
|
||||||
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
||||||
) -> PResult<'a, (R, LazyTokenStream)> {
|
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
|
||||||
let start_token = (self.token.clone(), self.token_spacing);
|
let start_token = (self.token.clone(), self.token_spacing);
|
||||||
let mut cursor_snapshot = self.token_cursor.clone();
|
let mut cursor_snapshot = self.token_cursor.clone();
|
||||||
|
|
||||||
|
@ -1205,6 +1206,11 @@ impl<'a> Parser<'a> {
|
||||||
let num_calls = new_calls - cursor_snapshot.num_next_calls;
|
let num_calls = new_calls - cursor_snapshot.num_next_calls;
|
||||||
let desugar_doc_comments = self.desugar_doc_comments;
|
let desugar_doc_comments = self.desugar_doc_comments;
|
||||||
|
|
||||||
|
// We didn't capture any tokens
|
||||||
|
if num_calls == 0 {
|
||||||
|
return Ok((ret, None));
|
||||||
|
}
|
||||||
|
|
||||||
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
||||||
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
||||||
// by the callback. This allows us to avoid producing a `TokenStream`
|
// by the callback. This allows us to avoid producing a `TokenStream`
|
||||||
|
@ -1233,7 +1239,7 @@ impl<'a> Parser<'a> {
|
||||||
};
|
};
|
||||||
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
|
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
|
||||||
|
|
||||||
Ok((ret, stream))
|
Ok((ret, Some(stream)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// `::{` or `::*`
|
/// `::{` or `::*`
|
||||||
|
|
|
@ -103,7 +103,7 @@ impl<'a> Parser<'a> {
|
||||||
// If we captured tokens during parsing (due to outer attributes),
|
// If we captured tokens during parsing (due to outer attributes),
|
||||||
// use those.
|
// use those.
|
||||||
if item.tokens.is_none() {
|
if item.tokens.is_none() {
|
||||||
item.tokens = Some(tokens);
|
item.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtItem(item)
|
token::NtItem(item)
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ impl<'a> Parser<'a> {
|
||||||
let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?;
|
let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?;
|
||||||
// We have have eaten an NtBlock, which could already have tokens
|
// We have have eaten an NtBlock, which could already have tokens
|
||||||
if block.tokens.is_none() {
|
if block.tokens.is_none() {
|
||||||
block.tokens = Some(tokens);
|
block.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtBlock(block)
|
token::NtBlock(block)
|
||||||
}
|
}
|
||||||
|
@ -124,7 +124,7 @@ impl<'a> Parser<'a> {
|
||||||
match stmt {
|
match stmt {
|
||||||
Some(mut s) => {
|
Some(mut s) => {
|
||||||
if s.tokens.is_none() {
|
if s.tokens.is_none() {
|
||||||
s.tokens = Some(tokens);
|
s.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtStmt(s)
|
token::NtStmt(s)
|
||||||
}
|
}
|
||||||
|
@ -137,7 +137,7 @@ impl<'a> Parser<'a> {
|
||||||
let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?;
|
let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?;
|
||||||
// We have have eaten an NtPat, which could already have tokens
|
// We have have eaten an NtPat, which could already have tokens
|
||||||
if pat.tokens.is_none() {
|
if pat.tokens.is_none() {
|
||||||
pat.tokens = Some(tokens);
|
pat.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtPat(pat)
|
token::NtPat(pat)
|
||||||
}
|
}
|
||||||
|
@ -146,7 +146,7 @@ impl<'a> Parser<'a> {
|
||||||
// If we captured tokens during parsing (due to outer attributes),
|
// If we captured tokens during parsing (due to outer attributes),
|
||||||
// use those.
|
// use those.
|
||||||
if expr.tokens.is_none() {
|
if expr.tokens.is_none() {
|
||||||
expr.tokens = Some(tokens);
|
expr.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtExpr(expr)
|
token::NtExpr(expr)
|
||||||
}
|
}
|
||||||
|
@ -155,7 +155,7 @@ impl<'a> Parser<'a> {
|
||||||
self.collect_tokens(|this| this.parse_literal_maybe_minus())?;
|
self.collect_tokens(|this| this.parse_literal_maybe_minus())?;
|
||||||
// We have have eaten a nonterminal, which could already have tokens
|
// We have have eaten a nonterminal, which could already have tokens
|
||||||
if lit.tokens.is_none() {
|
if lit.tokens.is_none() {
|
||||||
lit.tokens = Some(tokens);
|
lit.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtLiteral(lit)
|
token::NtLiteral(lit)
|
||||||
}
|
}
|
||||||
|
@ -163,7 +163,7 @@ impl<'a> Parser<'a> {
|
||||||
let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?;
|
let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?;
|
||||||
// We have an eaten an NtTy, which could already have tokens
|
// We have an eaten an NtTy, which could already have tokens
|
||||||
if ty.tokens.is_none() {
|
if ty.tokens.is_none() {
|
||||||
ty.tokens = Some(tokens);
|
ty.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtTy(ty)
|
token::NtTy(ty)
|
||||||
}
|
}
|
||||||
|
@ -183,15 +183,15 @@ impl<'a> Parser<'a> {
|
||||||
self.collect_tokens(|this| this.parse_path(PathStyle::Type))?;
|
self.collect_tokens(|this| this.parse_path(PathStyle::Type))?;
|
||||||
// We have have eaten an NtPath, which could already have tokens
|
// We have have eaten an NtPath, which could already have tokens
|
||||||
if path.tokens.is_none() {
|
if path.tokens.is_none() {
|
||||||
path.tokens = Some(tokens);
|
path.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtPath(path)
|
token::NtPath(path)
|
||||||
}
|
}
|
||||||
NonterminalKind::Meta => {
|
NonterminalKind::Meta => {
|
||||||
let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item())?;
|
let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?;
|
||||||
// We may have eaten a nonterminal, which could already have tokens
|
// We may have eaten a nonterminal, which could already have tokens
|
||||||
if attr.tokens.is_none() {
|
if attr.tokens.is_none() {
|
||||||
attr.tokens = Some(tokens);
|
attr.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtMeta(P(attr))
|
token::NtMeta(P(attr))
|
||||||
}
|
}
|
||||||
|
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
|
||||||
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?;
|
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?;
|
||||||
// We may have etan an `NtVis`, which could already have tokens
|
// We may have etan an `NtVis`, which could already have tokens
|
||||||
if vis.tokens.is_none() {
|
if vis.tokens.is_none() {
|
||||||
vis.tokens = Some(tokens);
|
vis.tokens = tokens;
|
||||||
}
|
}
|
||||||
token::NtVis(vis)
|
token::NtVis(vis)
|
||||||
}
|
}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0},"tokens":null}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0}}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0}}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
{"module":{"inner":{"lo":0,"hi":0},"unsafety":"No","items":[{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"prelude_import","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0},"tokens":null}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"","span":{"lo":0,"hi":0}},"kind":{"variant":"Use","fields":[{"prefix":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"{{root}}","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"std","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"prelude","span":{"lo":0,"hi":0}},"id":0,"args":null},{"ident":{"name":"v1","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"kind":"Glob","span":{"lo":0,"hi":0}}]},"tokens":null},{"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"macro_use","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":"Empty","tokens":null}]},"id":null,"style":"Outer","span":{"lo":0,"hi":0},"tokens":null}],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"std","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null},{"attrs":[],"id":0,"span":{"lo":0,"hi":0},"vis":{"kind":"Inherited","span":{"lo":0,"hi":0},"tokens":null},"ident":{"name":"core","span":{"lo":0,"hi":0}},"kind":{"variant":"ExternCrate","fields":[null]},"tokens":null}],"inline":true},"attrs":[{"kind":{"variant":"Normal","fields":[{"path":{"span":{"lo":0,"hi":0},"segments":[{"ident":{"name":"crate_type","span":{"lo":0,"hi":0}},"id":0,"args":null}],"tokens":null},"args":{"variant":"Eq","fields":[{"lo":0,"hi":0},{"0":[[{"variant":"Token","fields":[{"kind":{"variant":"Literal","fields":[{"kind":"Str","symbol":"lib","suffix":null}]},"span":{"lo":0,"hi":0}}]},"Alone"]]}]},"tokens":null}]},"id":null,"style":"Inner","span":{"lo":0,"hi":0},"tokens":null}],"span":{"lo":0,"hi":0},"proc_macros":[]}
|
||||||
|
|
|
@ -26,7 +26,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||||
Punct {
|
Punct {
|
||||||
ch: '#',
|
ch: '#',
|
||||||
spacing: Alone,
|
spacing: Alone,
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:17:24: 17:40 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0),
|
||||||
},
|
},
|
||||||
Group {
|
Group {
|
||||||
delimiter: Bracket,
|
delimiter: Bracket,
|
||||||
|
@ -46,12 +46,12 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:17:29: 17:40 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:17:29: 17:40 (#0),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:17:24: 17:40 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:17:1: 17:2 (#0),
|
||||||
},
|
},
|
||||||
Punct {
|
Punct {
|
||||||
ch: '#',
|
ch: '#',
|
||||||
spacing: Alone,
|
spacing: Alone,
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:17 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:2 (#0),
|
||||||
},
|
},
|
||||||
Group {
|
Group {
|
||||||
delimiter: Bracket,
|
delimiter: Bracket,
|
||||||
|
@ -71,12 +71,12 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:19:9: 19:16 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:19:9: 19:16 (#0),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:19:1: 19:17 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:19:2: 19:17 (#0),
|
||||||
},
|
},
|
||||||
Punct {
|
Punct {
|
||||||
ch: '#',
|
ch: '#',
|
||||||
spacing: Alone,
|
spacing: Alone,
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:19 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:2 (#0),
|
||||||
},
|
},
|
||||||
Group {
|
Group {
|
||||||
delimiter: Bracket,
|
delimiter: Bracket,
|
||||||
|
@ -96,12 +96,12 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:20:15: 20:18 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:20:15: 20:18 (#0),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:20:1: 20:19 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:20:2: 20:19 (#0),
|
||||||
},
|
},
|
||||||
Punct {
|
Punct {
|
||||||
ch: '#',
|
ch: '#',
|
||||||
spacing: Alone,
|
spacing: Alone,
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:16:1: 16:19 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:16:1: 16:2 (#0),
|
||||||
},
|
},
|
||||||
Group {
|
Group {
|
||||||
delimiter: Bracket,
|
delimiter: Bracket,
|
||||||
|
@ -121,7 +121,7 @@ PRINT-ATTR INPUT (DEBUG): TokenStream [
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:16:15: 16:18 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:16:15: 16:18 (#0),
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
span: $DIR/issue-75930-derive-cfg.rs:16:1: 16:19 (#0),
|
span: $DIR/issue-75930-derive-cfg.rs:16:2: 16:19 (#0),
|
||||||
},
|
},
|
||||||
Ident {
|
Ident {
|
||||||
ident: "struct",
|
ident: "struct",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue