Auto merge of #77255 - Aaron1011:feature/collect-attr-tokens, r=petrochenkov
Unconditionally capture tokens for attributes. This allows us to avoid synthesizing tokens in `prepend_attr`, since we have the original tokens available. We still need to synthesize tokens when expanding `cfg_attr`, but this is an unavoidable consequence of the syntax of `cfg_attr` - the user does not supply the `#` and `[]` tokens that a `cfg_attr` expands to. This is based on PR https://github.com/rust-lang/rust/pull/77250 - this PR exposes a bug in the current `collect_tokens` implementation, which is fixed by the rewrite.
This commit is contained in:
commit
ffa2e7ae8f
19 changed files with 251 additions and 138 deletions
|
@ -252,9 +252,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
|||
let convert_tokens = |tokens: Option<LazyTokenStream>| tokens.map(|t| t.into_token_stream());
|
||||
|
||||
let tokens = match *nt {
|
||||
Nonterminal::NtItem(ref item) => {
|
||||
prepend_attrs(sess, &item.attrs, item.tokens.as_ref(), span)
|
||||
}
|
||||
Nonterminal::NtItem(ref item) => prepend_attrs(&item.attrs, item.tokens.as_ref()),
|
||||
Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.clone()),
|
||||
Nonterminal::NtStmt(ref stmt) => {
|
||||
// FIXME: We currently only collect tokens for `:stmt`
|
||||
|
@ -279,7 +277,7 @@ pub fn nt_to_tokenstream(nt: &Nonterminal, sess: &ParseSess, span: Span) -> Toke
|
|||
if expr.tokens.is_none() {
|
||||
debug!("missing tokens for expr {:?}", expr);
|
||||
}
|
||||
prepend_attrs(sess, &expr.attrs, expr.tokens.as_ref(), span)
|
||||
prepend_attrs(&expr.attrs, expr.tokens.as_ref())
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -603,10 +601,8 @@ fn token_probably_equal_for_proc_macro(first: &Token, other: &Token) -> bool {
|
|||
}
|
||||
|
||||
fn prepend_attrs(
|
||||
sess: &ParseSess,
|
||||
attrs: &[ast::Attribute],
|
||||
tokens: Option<&tokenstream::LazyTokenStream>,
|
||||
span: rustc_span::Span,
|
||||
) -> Option<tokenstream::TokenStream> {
|
||||
let tokens = tokens?.clone().into_token_stream();
|
||||
if attrs.is_empty() {
|
||||
|
@ -619,47 +615,12 @@ fn prepend_attrs(
|
|||
ast::AttrStyle::Outer,
|
||||
"inner attributes should prevent cached tokens from existing"
|
||||
);
|
||||
|
||||
let source = pprust::attribute_to_string(attr);
|
||||
let macro_filename = FileName::macro_expansion_source_code(&source);
|
||||
|
||||
let item = match attr.kind {
|
||||
ast::AttrKind::Normal(ref item) => item,
|
||||
ast::AttrKind::DocComment(..) => {
|
||||
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
|
||||
builder.push(stream);
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
// synthesize # [ $path $tokens ] manually here
|
||||
let mut brackets = tokenstream::TokenStreamBuilder::new();
|
||||
|
||||
// For simple paths, push the identifier directly
|
||||
if item.path.segments.len() == 1 && item.path.segments[0].args.is_none() {
|
||||
let ident = item.path.segments[0].ident;
|
||||
let token = token::Ident(ident.name, ident.as_str().starts_with("r#"));
|
||||
brackets.push(tokenstream::TokenTree::token(token, ident.span));
|
||||
|
||||
// ... and for more complicated paths, fall back to a reparse hack that
|
||||
// should eventually be removed.
|
||||
} else {
|
||||
let stream = parse_stream_from_source_str(macro_filename, source, sess, Some(span));
|
||||
brackets.push(stream);
|
||||
}
|
||||
|
||||
brackets.push(item.args.outer_tokens());
|
||||
|
||||
// The span we list here for `#` and for `[ ... ]` are both wrong in
|
||||
// that it encompasses more than each token, but it hopefully is "good
|
||||
// enough" for now at least.
|
||||
builder.push(tokenstream::TokenTree::token(token::Pound, attr.span));
|
||||
let delim_span = tokenstream::DelimSpan::from_single(attr.span);
|
||||
builder.push(tokenstream::TokenTree::Delimited(
|
||||
delim_span,
|
||||
token::DelimToken::Bracket,
|
||||
brackets.build(),
|
||||
));
|
||||
builder.push(
|
||||
attr.tokens
|
||||
.clone()
|
||||
.unwrap_or_else(|| panic!("Attribute {:?} is missing tokens!", attr))
|
||||
.into_token_stream(),
|
||||
);
|
||||
}
|
||||
builder.push(tokens.clone());
|
||||
Some(builder.build())
|
||||
|
|
|
@ -30,41 +30,53 @@ impl<'a> Parser<'a> {
|
|||
let mut just_parsed_doc_comment = false;
|
||||
loop {
|
||||
debug!("parse_outer_attributes: self.token={:?}", self.token);
|
||||
if self.check(&token::Pound) {
|
||||
let inner_error_reason = if just_parsed_doc_comment {
|
||||
"an inner attribute is not permitted following an outer doc comment"
|
||||
} else if !attrs.is_empty() {
|
||||
"an inner attribute is not permitted following an outer attribute"
|
||||
} else {
|
||||
DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
|
||||
};
|
||||
let inner_parse_policy = InnerAttrPolicy::Forbidden {
|
||||
reason: inner_error_reason,
|
||||
saw_doc_comment: just_parsed_doc_comment,
|
||||
prev_attr_sp: attrs.last().map(|a| a.span),
|
||||
};
|
||||
let attr = self.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
|
||||
attrs.push(attr);
|
||||
just_parsed_doc_comment = false;
|
||||
let (attr, tokens) = if self.check(&token::Pound) {
|
||||
self.collect_tokens(|this| {
|
||||
let inner_error_reason = if just_parsed_doc_comment {
|
||||
"an inner attribute is not permitted following an outer doc comment"
|
||||
} else if !attrs.is_empty() {
|
||||
"an inner attribute is not permitted following an outer attribute"
|
||||
} else {
|
||||
DEFAULT_UNEXPECTED_INNER_ATTR_ERR_MSG
|
||||
};
|
||||
let inner_parse_policy = InnerAttrPolicy::Forbidden {
|
||||
reason: inner_error_reason,
|
||||
saw_doc_comment: just_parsed_doc_comment,
|
||||
prev_attr_sp: attrs.last().map(|a| a.span),
|
||||
};
|
||||
let attr = this.parse_attribute_with_inner_parse_policy(inner_parse_policy)?;
|
||||
just_parsed_doc_comment = false;
|
||||
Ok(Some(attr))
|
||||
})?
|
||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
|
||||
if attr.style != ast::AttrStyle::Outer {
|
||||
self.sess
|
||||
.span_diagnostic
|
||||
.struct_span_err_with_code(
|
||||
self.token.span,
|
||||
"expected outer doc comment",
|
||||
error_code!(E0753),
|
||||
)
|
||||
.note(
|
||||
"inner doc comments like this (starting with \
|
||||
`//!` or `/*!`) can only appear before items",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
self.collect_tokens(|this| {
|
||||
let attr =
|
||||
attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span);
|
||||
if attr.style != ast::AttrStyle::Outer {
|
||||
this.sess
|
||||
.span_diagnostic
|
||||
.struct_span_err_with_code(
|
||||
this.token.span,
|
||||
"expected outer doc comment",
|
||||
error_code!(E0753),
|
||||
)
|
||||
.note(
|
||||
"inner doc comments like this (starting with \
|
||||
`//!` or `/*!`) can only appear before items",
|
||||
)
|
||||
.emit();
|
||||
}
|
||||
this.bump();
|
||||
just_parsed_doc_comment = true;
|
||||
Ok(Some(attr))
|
||||
})?
|
||||
} else {
|
||||
(None, None)
|
||||
};
|
||||
|
||||
if let Some(mut attr) = attr {
|
||||
attr.tokens = tokens;
|
||||
attrs.push(attr);
|
||||
self.bump();
|
||||
just_parsed_doc_comment = true;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
@ -99,7 +111,7 @@ impl<'a> Parser<'a> {
|
|||
if self.eat(&token::Not) { ast::AttrStyle::Inner } else { ast::AttrStyle::Outer };
|
||||
|
||||
self.expect(&token::OpenDelim(token::Bracket))?;
|
||||
let item = self.parse_attr_item()?;
|
||||
let item = self.parse_attr_item(false)?;
|
||||
self.expect(&token::CloseDelim(token::Bracket))?;
|
||||
let attr_sp = lo.to(self.prev_token.span);
|
||||
|
||||
|
@ -148,7 +160,7 @@ impl<'a> Parser<'a> {
|
|||
/// PATH
|
||||
/// PATH `=` UNSUFFIXED_LIT
|
||||
/// The delimiters or `=` are still put into the resulting token stream.
|
||||
pub fn parse_attr_item(&mut self) -> PResult<'a, ast::AttrItem> {
|
||||
pub fn parse_attr_item(&mut self, capture_tokens: bool) -> PResult<'a, ast::AttrItem> {
|
||||
let item = match self.token.kind {
|
||||
token::Interpolated(ref nt) => match **nt {
|
||||
Nonterminal::NtMeta(ref item) => Some(item.clone().into_inner()),
|
||||
|
@ -160,9 +172,18 @@ impl<'a> Parser<'a> {
|
|||
self.bump();
|
||||
item
|
||||
} else {
|
||||
let path = self.parse_path(PathStyle::Mod)?;
|
||||
let args = self.parse_attr_args()?;
|
||||
ast::AttrItem { path, args, tokens: None }
|
||||
let do_parse = |this: &mut Self| {
|
||||
let path = this.parse_path(PathStyle::Mod)?;
|
||||
let args = this.parse_attr_args()?;
|
||||
Ok(ast::AttrItem { path, args, tokens: None })
|
||||
};
|
||||
if capture_tokens {
|
||||
let (mut item, tokens) = self.collect_tokens(do_parse)?;
|
||||
item.tokens = tokens;
|
||||
item
|
||||
} else {
|
||||
do_parse(self)?
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -175,19 +196,31 @@ impl<'a> Parser<'a> {
|
|||
let mut attrs: Vec<ast::Attribute> = vec![];
|
||||
loop {
|
||||
// Only try to parse if it is an inner attribute (has `!`).
|
||||
if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
|
||||
let attr = self.parse_attribute(true)?;
|
||||
assert_eq!(attr.style, ast::AttrStyle::Inner);
|
||||
attrs.push(attr);
|
||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||
// We need to get the position of this token before we bump.
|
||||
let attr = attr::mk_doc_comment(comment_kind, attr_style, data, self.token.span);
|
||||
if attr.style == ast::AttrStyle::Inner {
|
||||
attrs.push(attr);
|
||||
self.bump();
|
||||
let (attr, tokens) =
|
||||
if self.check(&token::Pound) && self.look_ahead(1, |t| t == &token::Not) {
|
||||
self.collect_tokens(|this| {
|
||||
let attr = this.parse_attribute(true)?;
|
||||
assert_eq!(attr.style, ast::AttrStyle::Inner);
|
||||
Ok(Some(attr))
|
||||
})?
|
||||
} else if let token::DocComment(comment_kind, attr_style, data) = self.token.kind {
|
||||
self.collect_tokens(|this| {
|
||||
// We need to get the position of this token before we bump.
|
||||
let attr =
|
||||
attr::mk_doc_comment(comment_kind, attr_style, data, this.token.span);
|
||||
if attr.style == ast::AttrStyle::Inner {
|
||||
this.bump();
|
||||
Ok(Some(attr))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
})?
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
(None, None)
|
||||
};
|
||||
if let Some(mut attr) = attr {
|
||||
attr.tokens = tokens;
|
||||
attrs.push(attr);
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
|
@ -220,7 +253,7 @@ impl<'a> Parser<'a> {
|
|||
let mut expanded_attrs = Vec::with_capacity(1);
|
||||
while self.token.kind != token::Eof {
|
||||
let lo = self.token.span;
|
||||
let item = self.parse_attr_item()?;
|
||||
let item = self.parse_attr_item(true)?;
|
||||
expanded_attrs.push((item, lo.to(self.prev_token.span)));
|
||||
if !self.eat(&token::Comma) {
|
||||
break;
|
||||
|
|
|
@ -1116,7 +1116,7 @@ impl<'a> Parser<'a> {
|
|||
) -> PResult<'a, P<Expr>> {
|
||||
if needs_tokens {
|
||||
let (mut expr, tokens) = self.collect_tokens(f)?;
|
||||
expr.tokens = Some(tokens);
|
||||
expr.tokens = tokens;
|
||||
Ok(expr)
|
||||
} else {
|
||||
f(self)
|
||||
|
|
|
@ -151,7 +151,7 @@ impl<'a> Parser<'a> {
|
|||
if let Some(tokens) = tokens {
|
||||
if let Some(item) = &mut item {
|
||||
if !item.attrs.iter().any(|attr| attr.style == AttrStyle::Inner) {
|
||||
item.tokens = Some(tokens);
|
||||
item.tokens = tokens;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1178,8 +1178,9 @@ impl<'a> Parser<'a> {
|
|||
|
||||
/// Records all tokens consumed by the provided callback,
|
||||
/// including the current token. These tokens are collected
|
||||
/// into a `TokenStream`, and returned along with the result
|
||||
/// of the callback.
|
||||
/// into a `LazyTokenStream`, and returned along with the result
|
||||
/// of the callback. The returned `LazyTokenStream` will be `None`
|
||||
/// if not tokens were captured.
|
||||
///
|
||||
/// Note: If your callback consumes an opening delimiter
|
||||
/// (including the case where you call `collect_tokens`
|
||||
|
@ -1195,7 +1196,7 @@ impl<'a> Parser<'a> {
|
|||
pub fn collect_tokens<R>(
|
||||
&mut self,
|
||||
f: impl FnOnce(&mut Self) -> PResult<'a, R>,
|
||||
) -> PResult<'a, (R, LazyTokenStream)> {
|
||||
) -> PResult<'a, (R, Option<LazyTokenStream>)> {
|
||||
let start_token = (self.token.clone(), self.token_spacing);
|
||||
let mut cursor_snapshot = self.token_cursor.clone();
|
||||
|
||||
|
@ -1205,6 +1206,11 @@ impl<'a> Parser<'a> {
|
|||
let num_calls = new_calls - cursor_snapshot.num_next_calls;
|
||||
let desugar_doc_comments = self.desugar_doc_comments;
|
||||
|
||||
// We didn't capture any tokens
|
||||
if num_calls == 0 {
|
||||
return Ok((ret, None));
|
||||
}
|
||||
|
||||
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
||||
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
||||
// by the callback. This allows us to avoid producing a `TokenStream`
|
||||
|
@ -1233,7 +1239,7 @@ impl<'a> Parser<'a> {
|
|||
};
|
||||
let stream = LazyTokenStream::new(LazyTokenStreamInner::Lazy(Box::new(lazy_cb)));
|
||||
|
||||
Ok((ret, stream))
|
||||
Ok((ret, Some(stream)))
|
||||
}
|
||||
|
||||
/// `::{` or `::*`
|
||||
|
|
|
@ -103,7 +103,7 @@ impl<'a> Parser<'a> {
|
|||
// If we captured tokens during parsing (due to outer attributes),
|
||||
// use those.
|
||||
if item.tokens.is_none() {
|
||||
item.tokens = Some(tokens);
|
||||
item.tokens = tokens;
|
||||
}
|
||||
token::NtItem(item)
|
||||
}
|
||||
|
@ -115,7 +115,7 @@ impl<'a> Parser<'a> {
|
|||
let (mut block, tokens) = self.collect_tokens(|this| this.parse_block())?;
|
||||
// We have have eaten an NtBlock, which could already have tokens
|
||||
if block.tokens.is_none() {
|
||||
block.tokens = Some(tokens);
|
||||
block.tokens = tokens;
|
||||
}
|
||||
token::NtBlock(block)
|
||||
}
|
||||
|
@ -124,7 +124,7 @@ impl<'a> Parser<'a> {
|
|||
match stmt {
|
||||
Some(mut s) => {
|
||||
if s.tokens.is_none() {
|
||||
s.tokens = Some(tokens);
|
||||
s.tokens = tokens;
|
||||
}
|
||||
token::NtStmt(s)
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ impl<'a> Parser<'a> {
|
|||
let (mut pat, tokens) = self.collect_tokens(|this| this.parse_pat(None))?;
|
||||
// We have have eaten an NtPat, which could already have tokens
|
||||
if pat.tokens.is_none() {
|
||||
pat.tokens = Some(tokens);
|
||||
pat.tokens = tokens;
|
||||
}
|
||||
token::NtPat(pat)
|
||||
}
|
||||
|
@ -146,7 +146,7 @@ impl<'a> Parser<'a> {
|
|||
// If we captured tokens during parsing (due to outer attributes),
|
||||
// use those.
|
||||
if expr.tokens.is_none() {
|
||||
expr.tokens = Some(tokens);
|
||||
expr.tokens = tokens;
|
||||
}
|
||||
token::NtExpr(expr)
|
||||
}
|
||||
|
@ -155,7 +155,7 @@ impl<'a> Parser<'a> {
|
|||
self.collect_tokens(|this| this.parse_literal_maybe_minus())?;
|
||||
// We have have eaten a nonterminal, which could already have tokens
|
||||
if lit.tokens.is_none() {
|
||||
lit.tokens = Some(tokens);
|
||||
lit.tokens = tokens;
|
||||
}
|
||||
token::NtLiteral(lit)
|
||||
}
|
||||
|
@ -163,7 +163,7 @@ impl<'a> Parser<'a> {
|
|||
let (mut ty, tokens) = self.collect_tokens(|this| this.parse_ty())?;
|
||||
// We have an eaten an NtTy, which could already have tokens
|
||||
if ty.tokens.is_none() {
|
||||
ty.tokens = Some(tokens);
|
||||
ty.tokens = tokens;
|
||||
}
|
||||
token::NtTy(ty)
|
||||
}
|
||||
|
@ -183,15 +183,15 @@ impl<'a> Parser<'a> {
|
|||
self.collect_tokens(|this| this.parse_path(PathStyle::Type))?;
|
||||
// We have have eaten an NtPath, which could already have tokens
|
||||
if path.tokens.is_none() {
|
||||
path.tokens = Some(tokens);
|
||||
path.tokens = tokens;
|
||||
}
|
||||
token::NtPath(path)
|
||||
}
|
||||
NonterminalKind::Meta => {
|
||||
let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item())?;
|
||||
let (mut attr, tokens) = self.collect_tokens(|this| this.parse_attr_item(false))?;
|
||||
// We may have eaten a nonterminal, which could already have tokens
|
||||
if attr.tokens.is_none() {
|
||||
attr.tokens = Some(tokens);
|
||||
attr.tokens = tokens;
|
||||
}
|
||||
token::NtMeta(P(attr))
|
||||
}
|
||||
|
@ -201,7 +201,7 @@ impl<'a> Parser<'a> {
|
|||
self.collect_tokens(|this| this.parse_visibility(FollowedByType::Yes))?;
|
||||
// We may have etan an `NtVis`, which could already have tokens
|
||||
if vis.tokens.is_none() {
|
||||
vis.tokens = Some(tokens);
|
||||
vis.tokens = tokens;
|
||||
}
|
||||
token::NtVis(vis)
|
||||
}
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue