1
Fork 0

Rollup merge of #128483 - nnethercote:still-more-cfg-cleanups, r=petrochenkov

Still more `cfg` cleanups

Found while looking closely at `cfg`/`cfg_attr` processing code.

r? `````````@petrochenkov`````````
This commit is contained in:
Matthias Krüger 2024-08-03 11:17:44 +02:00 committed by GitHub
commit dee57ce043
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
7 changed files with 169 additions and 153 deletions

View file

@ -202,7 +202,7 @@ impl CfgEval<'_> {
} }
// Now that we have our re-parsed `AttrTokenStream`, recursively configuring // Now that we have our re-parsed `AttrTokenStream`, recursively configuring
// our attribute target will correctly the tokens as well. // our attribute target will correctly configure the tokens as well.
flat_map_annotatable(self, annotatable) flat_map_annotatable(self, annotatable)
} }
} }

View file

@ -8,7 +8,7 @@ use rustc_span::{sym, BytePos, Span};
use thin_vec::ThinVec; use thin_vec::ThinVec;
use tracing::debug; use tracing::debug;
use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, PathStyle}; use super::{AttrWrapper, Capturing, FnParseMode, ForceCollect, Parser, ParserRange, PathStyle};
use crate::{errors, fluent_generated as fluent, maybe_whole}; use crate::{errors, fluent_generated as fluent, maybe_whole};
// Public for rustfmt usage // Public for rustfmt usage
@ -313,8 +313,8 @@ impl<'a> Parser<'a> {
// inner attribute, for possible later processing in a `LazyAttrTokenStream`. // inner attribute, for possible later processing in a `LazyAttrTokenStream`.
if let Capturing::Yes = self.capture_state.capturing { if let Capturing::Yes = self.capture_state.capturing {
let end_pos = self.num_bump_calls; let end_pos = self.num_bump_calls;
let range = start_pos..end_pos; let parser_range = ParserRange(start_pos..end_pos);
self.capture_state.inner_attr_ranges.insert(attr.id, range); self.capture_state.inner_attr_parser_ranges.insert(attr.id, parser_range);
} }
attrs.push(attr); attrs.push(attr);
} else { } else {

View file

@ -10,7 +10,10 @@ use rustc_errors::PResult;
use rustc_session::parse::ParseSess; use rustc_session::parse::ParseSess;
use rustc_span::{sym, Span, DUMMY_SP}; use rustc_span::{sym, Span, DUMMY_SP};
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor}; use super::{
Capturing, FlatToken, ForceCollect, NodeRange, NodeReplacement, Parser, ParserRange,
TokenCursor,
};
/// A wrapper type to ensure that the parser handles outer attributes correctly. /// A wrapper type to ensure that the parser handles outer attributes correctly.
/// When we parse outer attributes, we need to ensure that we capture tokens /// When we parse outer attributes, we need to ensure that we capture tokens
@ -28,8 +31,8 @@ use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCurso
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct AttrWrapper { pub struct AttrWrapper {
attrs: AttrVec, attrs: AttrVec,
// The start of the outer attributes in the token cursor. // The start of the outer attributes in the parser's token stream.
// This allows us to create a `ReplaceRange` for the entire attribute // This lets us create a `NodeReplacement` for the entire attribute
// target, including outer attributes. // target, including outer attributes.
start_pos: u32, start_pos: u32,
} }
@ -53,10 +56,9 @@ impl AttrWrapper {
/// Prepend `self.attrs` to `attrs`. /// Prepend `self.attrs` to `attrs`.
// FIXME: require passing an NT to prevent misuse of this method // FIXME: require passing an NT to prevent misuse of this method
pub(crate) fn prepend_to_nt_inner(self, attrs: &mut AttrVec) { pub(crate) fn prepend_to_nt_inner(mut self, attrs: &mut AttrVec) {
let mut self_attrs = self.attrs; mem::swap(attrs, &mut self.attrs);
mem::swap(attrs, &mut self_attrs); attrs.extend(self.attrs);
attrs.extend(self_attrs);
} }
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
@ -89,7 +91,7 @@ struct LazyAttrTokenStreamImpl {
cursor_snapshot: TokenCursor, cursor_snapshot: TokenCursor,
num_calls: u32, num_calls: u32,
break_last_token: bool, break_last_token: bool,
replace_ranges: Box<[ReplaceRange]>, node_replacements: Box<[NodeReplacement]>,
} }
impl ToAttrTokenStream for LazyAttrTokenStreamImpl { impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
@ -104,21 +106,24 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
.chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next()))) .chain(iter::repeat_with(|| FlatToken::Token(cursor_snapshot.next())))
.take(self.num_calls as usize); .take(self.num_calls as usize);
if self.replace_ranges.is_empty() { if self.node_replacements.is_empty() {
make_attr_token_stream(tokens, self.break_last_token) make_attr_token_stream(tokens, self.break_last_token)
} else { } else {
let mut tokens: Vec<_> = tokens.collect(); let mut tokens: Vec<_> = tokens.collect();
let mut replace_ranges = self.replace_ranges.to_vec(); let mut node_replacements = self.node_replacements.to_vec();
replace_ranges.sort_by_key(|(range, _)| range.start); node_replacements.sort_by_key(|(range, _)| range.0.start);
#[cfg(debug_assertions)] #[cfg(debug_assertions)]
for [(range, tokens), (next_range, next_tokens)] in replace_ranges.array_windows() { for [(node_range, tokens), (next_node_range, next_tokens)] in
node_replacements.array_windows()
{
assert!( assert!(
range.end <= next_range.start || range.end >= next_range.end, node_range.0.end <= next_node_range.0.start
"Replace ranges should either be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})", || node_range.0.end >= next_node_range.0.end,
range, "Node ranges should be disjoint or nested: ({:?}, {:?}) ({:?}, {:?})",
node_range,
tokens, tokens,
next_range, next_node_range,
next_tokens, next_tokens,
); );
} }
@ -136,20 +141,23 @@ impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
// start position, we ensure that any (outer) replace range which // start position, we ensure that any (outer) replace range which
// encloses another (inner) replace range will fully overwrite the // encloses another (inner) replace range will fully overwrite the
// inner range's replacement. // inner range's replacement.
for (range, target) in replace_ranges.into_iter().rev() { for (node_range, target) in node_replacements.into_iter().rev() {
assert!(!range.is_empty(), "Cannot replace an empty range: {range:?}"); assert!(
!node_range.0.is_empty(),
"Cannot replace an empty node range: {:?}",
node_range.0
);
// Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus // Replace the tokens in range with zero or one `FlatToken::AttrsTarget`s, plus
// enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the // enough `FlatToken::Empty`s to fill up the rest of the range. This keeps the
// total length of `tokens` constant throughout the replacement process, allowing // total length of `tokens` constant throughout the replacement process, allowing
// us to use all of the `ReplaceRanges` entries without adjusting indices. // us to do all replacements without adjusting indices.
let target_len = target.is_some() as usize; let target_len = target.is_some() as usize;
tokens.splice( tokens.splice(
(range.start as usize)..(range.end as usize), (node_range.0.start as usize)..(node_range.0.end as usize),
target target.into_iter().map(|target| FlatToken::AttrsTarget(target)).chain(
.into_iter() iter::repeat(FlatToken::Empty).take(node_range.0.len() - target_len),
.map(|target| FlatToken::AttrsTarget(target)) ),
.chain(iter::repeat(FlatToken::Empty).take(range.len() - target_len)),
); );
} }
make_attr_token_stream(tokens.into_iter(), self.break_last_token) make_attr_token_stream(tokens.into_iter(), self.break_last_token)
@ -216,7 +224,7 @@ impl<'a> Parser<'a> {
let cursor_snapshot = self.token_cursor.clone(); let cursor_snapshot = self.token_cursor.clone();
let start_pos = self.num_bump_calls; let start_pos = self.num_bump_calls;
let has_outer_attrs = !attrs.attrs.is_empty(); let has_outer_attrs = !attrs.attrs.is_empty();
let replace_ranges_start = self.capture_state.replace_ranges.len(); let parser_replacements_start = self.capture_state.parser_replacements.len();
// We set and restore `Capturing::Yes` on either side of the call to // We set and restore `Capturing::Yes` on either side of the call to
// `f`, so we can distinguish the outermost call to // `f`, so we can distinguish the outermost call to
@ -271,7 +279,7 @@ impl<'a> Parser<'a> {
return Ok(ret); return Ok(ret);
} }
let replace_ranges_end = self.capture_state.replace_ranges.len(); let parser_replacements_end = self.capture_state.parser_replacements.len();
assert!( assert!(
!(self.break_last_token && capture_trailing), !(self.break_last_token && capture_trailing),
@ -288,15 +296,16 @@ impl<'a> Parser<'a> {
let num_calls = end_pos - start_pos; let num_calls = end_pos - start_pos;
// Take the captured ranges for any inner attributes that we parsed in // Take the captured `ParserRange`s for any inner attributes that we parsed in
// `Parser::parse_inner_attributes`, and pair them in a `ReplaceRange` // `Parser::parse_inner_attributes`, and pair them in a `ParserReplacement` with `None`,
// with `None`, which means the relevant tokens will be removed. (More // which means the relevant tokens will be removed. (More details below.)
// details below.) let mut inner_attr_parser_replacements = Vec::new();
let mut inner_attr_replace_ranges = Vec::new();
for attr in ret.attrs() { for attr in ret.attrs() {
if attr.style == ast::AttrStyle::Inner { if attr.style == ast::AttrStyle::Inner {
if let Some(attr_range) = self.capture_state.inner_attr_ranges.remove(&attr.id) { if let Some(inner_attr_parser_range) =
inner_attr_replace_ranges.push((attr_range, None)); self.capture_state.inner_attr_parser_ranges.remove(&attr.id)
{
inner_attr_parser_replacements.push((inner_attr_parser_range, None));
} else { } else {
self.dcx().span_delayed_bug(attr.span, "Missing token range for attribute"); self.dcx().span_delayed_bug(attr.span, "Missing token range for attribute");
} }
@ -305,18 +314,21 @@ impl<'a> Parser<'a> {
// This is hot enough for `deep-vector` that checking the conditions for an empty iterator // This is hot enough for `deep-vector` that checking the conditions for an empty iterator
// is measurably faster than actually executing the iterator. // is measurably faster than actually executing the iterator.
let replace_ranges: Box<[ReplaceRange]> = let node_replacements: Box<[_]> = if parser_replacements_start == parser_replacements_end
if replace_ranges_start == replace_ranges_end && inner_attr_replace_ranges.is_empty() { && inner_attr_parser_replacements.is_empty()
{
Box::new([]) Box::new([])
} else { } else {
// Grab any replace ranges that occur *inside* the current AST node. We will // Grab any replace ranges that occur *inside* the current AST node. Convert them
// perform the actual replacement only when we convert the `LazyAttrTokenStream` to // from `ParserRange` form to `NodeRange` form. We will perform the actual
// an `AttrTokenStream`. // replacement only when we convert the `LazyAttrTokenStream` to an
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end] // `AttrTokenStream`.
self.capture_state.parser_replacements
[parser_replacements_start..parser_replacements_end]
.iter() .iter()
.cloned() .cloned()
.chain(inner_attr_replace_ranges.iter().cloned()) .chain(inner_attr_parser_replacements.iter().cloned())
.map(|(range, data)| ((range.start - start_pos)..(range.end - start_pos), data)) .map(|(parser_range, data)| (NodeRange::new(parser_range, start_pos), data))
.collect() .collect()
}; };
@ -324,18 +336,19 @@ impl<'a> Parser<'a> {
// //
// When parsing `g`: // When parsing `g`:
// - `start_pos..end_pos` is `12..33` (`fn g { ... }`, excluding the outer attr). // - `start_pos..end_pos` is `12..33` (`fn g { ... }`, excluding the outer attr).
// - `inner_attr_replace_ranges` has one entry (`5..15`, when counting from `fn`), to // - `inner_attr_parser_replacements` has one entry (`ParserRange(17..27)`), to
// delete the inner attr's tokens. // delete the inner attr's tokens.
// - This entry is put into the lazy tokens for `g`, i.e. deleting the inner attr from // - This entry is converted to `NodeRange(5..15)` (relative to the `fn`) and put into
// those tokens (if they get evaluated). // the lazy tokens for `g`, i.e. deleting the inner attr from those tokens (if they get
// evaluated).
// - Those lazy tokens are also put into an `AttrsTarget` that is appended to `self`'s // - Those lazy tokens are also put into an `AttrsTarget` that is appended to `self`'s
// replace ranges at the bottom of this function, for processing when parsing `m`. // replace ranges at the bottom of this function, for processing when parsing `m`.
// - `replace_ranges_start..replace_ranges_end` is empty. // - `parser_replacements_start..parser_replacements_end` is empty.
// //
// When parsing `m`: // When parsing `m`:
// - `start_pos..end_pos` is `0..34` (`mod m`, excluding the `#[cfg_eval]` attribute). // - `start_pos..end_pos` is `0..34` (`mod m`, excluding the `#[cfg_eval]` attribute).
// - `inner_attr_replace_ranges` is empty. // - `inner_attr_parser_replacements` is empty.
// - `replace_range_start..replace_ranges_end` has one entry. // - `parser_replacements_start..parser_replacements_end` has one entry.
// - One `AttrsTarget` (added below when parsing `g`) to replace all of `g` (`3..33`, // - One `AttrsTarget` (added below when parsing `g`) to replace all of `g` (`3..33`,
// including its outer attribute), with: // including its outer attribute), with:
// - `attrs`: includes the outer and the inner attr. // - `attrs`: includes the outer and the inner attr.
@ -346,7 +359,7 @@ impl<'a> Parser<'a> {
num_calls, num_calls,
cursor_snapshot, cursor_snapshot,
break_last_token: self.break_last_token, break_last_token: self.break_last_token,
replace_ranges, node_replacements,
}); });
// If we support tokens and don't already have them, store the newly captured tokens. // If we support tokens and don't already have them, store the newly captured tokens.
@ -367,7 +380,7 @@ impl<'a> Parser<'a> {
// What is the status here when parsing the example code at the top of this method? // What is the status here when parsing the example code at the top of this method?
// //
// When parsing `g`, we add one entry: // When parsing `g`, we add one entry:
// - The `start_pos..end_pos` (`3..33`) entry has a new `AttrsTarget` with: // - The pushed entry (`ParserRange(3..33)`) has a new `AttrsTarget` with:
// - `attrs`: includes the outer and the inner attr. // - `attrs`: includes the outer and the inner attr.
// - `tokens`: lazy tokens for `g` (with its inner attr deleted). // - `tokens`: lazy tokens for `g` (with its inner attr deleted).
// //
@ -378,12 +391,14 @@ impl<'a> Parser<'a> {
// cfg-expand this AST node. // cfg-expand this AST node.
let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos }; let start_pos = if has_outer_attrs { attrs.start_pos } else { start_pos };
let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens }; let target = AttrsTarget { attrs: ret.attrs().iter().cloned().collect(), tokens };
self.capture_state.replace_ranges.push((start_pos..end_pos, Some(target))); self.capture_state
.parser_replacements
.push((ParserRange(start_pos..end_pos), Some(target)));
} else if matches!(self.capture_state.capturing, Capturing::No) { } else if matches!(self.capture_state.capturing, Capturing::No) {
// Only clear the ranges once we've finished capturing entirely, i.e. we've finished // Only clear the ranges once we've finished capturing entirely, i.e. we've finished
// the outermost call to this method. // the outermost call to this method.
self.capture_state.replace_ranges.clear(); self.capture_state.parser_replacements.clear();
self.capture_state.inner_attr_ranges.clear(); self.capture_state.inner_attr_parser_ranges.clear();
} }
Ok(ret) Ok(ret)
} }

View file

@ -40,14 +40,6 @@ use super::{
}; };
use crate::{errors, maybe_recover_from_interpolated_ty_qpath}; use crate::{errors, maybe_recover_from_interpolated_ty_qpath};
#[derive(Debug)]
pub(super) enum LhsExpr {
// Already parsed just the outer attributes.
Unparsed { attrs: AttrWrapper },
// Already parsed the expression.
Parsed { expr: P<Expr>, starts_statement: bool },
}
#[derive(Debug)] #[derive(Debug)]
enum DestructuredFloat { enum DestructuredFloat {
/// 1e2 /// 1e2
@ -113,30 +105,31 @@ impl<'a> Parser<'a> {
r: Restrictions, r: Restrictions,
attrs: AttrWrapper, attrs: AttrWrapper,
) -> PResult<'a, P<Expr>> { ) -> PResult<'a, P<Expr>> {
self.with_res(r, |this| this.parse_expr_assoc_with(0, LhsExpr::Unparsed { attrs })) self.with_res(r, |this| this.parse_expr_assoc_with(0, attrs))
} }
/// Parses an associative expression with operators of at least `min_prec` precedence. /// Parses an associative expression with operators of at least `min_prec` precedence.
pub(super) fn parse_expr_assoc_with( pub(super) fn parse_expr_assoc_with(
&mut self, &mut self,
min_prec: usize, min_prec: usize,
lhs: LhsExpr, attrs: AttrWrapper,
) -> PResult<'a, P<Expr>> { ) -> PResult<'a, P<Expr>> {
let mut starts_stmt = false; let lhs = if self.token.is_range_separator() {
let mut lhs = match lhs {
LhsExpr::Parsed { expr, starts_statement } => {
starts_stmt = starts_statement;
expr
}
LhsExpr::Unparsed { attrs } => {
if self.token.is_range_separator() {
return self.parse_expr_prefix_range(attrs); return self.parse_expr_prefix_range(attrs);
} else { } else {
self.parse_expr_prefix(attrs)? self.parse_expr_prefix(attrs)?
}
}
}; };
self.parse_expr_assoc_rest_with(min_prec, false, lhs)
}
/// Parses the rest of an associative expression (i.e. the part after the lhs) with operators
/// of at least `min_prec` precedence.
pub(super) fn parse_expr_assoc_rest_with(
&mut self,
min_prec: usize,
starts_stmt: bool,
mut lhs: P<Expr>,
) -> PResult<'a, P<Expr>> {
if !self.should_continue_as_assoc_expr(&lhs) { if !self.should_continue_as_assoc_expr(&lhs) {
return Ok(lhs); return Ok(lhs);
} }
@ -272,7 +265,7 @@ impl<'a> Parser<'a> {
}; };
let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| { let rhs = self.with_res(restrictions - Restrictions::STMT_EXPR, |this| {
let attrs = this.parse_outer_attributes()?; let attrs = this.parse_outer_attributes()?;
this.parse_expr_assoc_with(prec + prec_adjustment, LhsExpr::Unparsed { attrs }) this.parse_expr_assoc_with(prec + prec_adjustment, attrs)
})?; })?;
let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span); let span = self.mk_expr_sp(&lhs, lhs_span, rhs.span);
@ -447,7 +440,7 @@ impl<'a> Parser<'a> {
let maybe_lt = self.token.clone(); let maybe_lt = self.token.clone();
let attrs = self.parse_outer_attributes()?; let attrs = self.parse_outer_attributes()?;
Some( Some(
self.parse_expr_assoc_with(prec + 1, LhsExpr::Unparsed { attrs }) self.parse_expr_assoc_with(prec + 1, attrs)
.map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?, .map_err(|err| self.maybe_err_dotdotlt_syntax(maybe_lt, err))?,
) )
} else { } else {
@ -504,10 +497,7 @@ impl<'a> Parser<'a> {
let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() { let (span, opt_end) = if this.is_at_start_of_range_notation_rhs() {
// RHS must be parsed with more associativity than the dots. // RHS must be parsed with more associativity than the dots.
let attrs = this.parse_outer_attributes()?; let attrs = this.parse_outer_attributes()?;
this.parse_expr_assoc_with( this.parse_expr_assoc_with(op.unwrap().precedence() + 1, attrs)
op.unwrap().precedence() + 1,
LhsExpr::Unparsed { attrs },
)
.map(|x| (lo.to(x.span), Some(x))) .map(|x| (lo.to(x.span), Some(x)))
.map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))? .map_err(|err| this.maybe_err_dotdotlt_syntax(maybe_lt, err))?
} else { } else {
@ -889,7 +879,7 @@ impl<'a> Parser<'a> {
mut e: P<Expr>, mut e: P<Expr>,
lo: Span, lo: Span,
) -> PResult<'a, P<Expr>> { ) -> PResult<'a, P<Expr>> {
let res = ensure_sufficient_stack(|| { let mut res = ensure_sufficient_stack(|| {
loop { loop {
let has_question = let has_question =
if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) { if self.prev_token.kind == TokenKind::Ident(kw::Return, IdentIsRaw::No) {
@ -936,17 +926,13 @@ impl<'a> Parser<'a> {
// Stitch the list of outer attributes onto the return value. A little // Stitch the list of outer attributes onto the return value. A little
// bit ugly, but the best way given the current code structure. // bit ugly, but the best way given the current code structure.
if attrs.is_empty() { if !attrs.is_empty()
res && let Ok(expr) = &mut res
} else { {
res.map(|expr| { mem::swap(&mut expr.attrs, &mut attrs);
expr.map(|mut expr| { expr.attrs.extend(attrs)
attrs.extend(expr.attrs);
expr.attrs = attrs;
expr
})
})
} }
res
} }
pub(super) fn parse_dot_suffix_expr( pub(super) fn parse_dot_suffix_expr(
@ -2647,10 +2633,7 @@ impl<'a> Parser<'a> {
self.expect(&token::Eq)?; self.expect(&token::Eq)?;
} }
let attrs = self.parse_outer_attributes()?; let attrs = self.parse_outer_attributes()?;
let expr = self.parse_expr_assoc_with( let expr = self.parse_expr_assoc_with(1 + prec_let_scrutinee_needs_par(), attrs)?;
1 + prec_let_scrutinee_needs_par(),
LhsExpr::Unparsed { attrs },
)?;
let span = lo.to(expr.span); let span = lo.to(expr.span);
Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered))) Ok(self.mk_expr(span, ExprKind::Let(pat, expr, span, recovered)))
} }

View file

@ -192,24 +192,54 @@ struct ClosureSpans {
body: Span, body: Span,
} }
/// Indicates a range of tokens that should be replaced by /// A token range within a `Parser`'s full token stream.
/// the tokens in the provided `AttrsTarget`. This is used in two #[derive(Clone, Debug)]
/// places during token collection: struct ParserRange(Range<u32>);
/// A token range within an individual AST node's (lazy) token stream, i.e.
/// relative to that node's first token. Distinct from `ParserRange` so the two
/// kinds of range can't be mixed up.
#[derive(Clone, Debug)]
struct NodeRange(Range<u32>);
/// Indicates a range of tokens that should be replaced by an `AttrsTarget`
/// (replacement) or be replaced by nothing (deletion). This is used in two
/// places during token collection.
/// ///
/// 1. During the parsing of an AST node that may have a `#[derive]` /// 1. Replacement. During the parsing of an AST node that may have a
/// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]` /// `#[derive]` attribute, when we parse a nested AST node that has `#[cfg]`
/// In this case, we use a `ReplaceRange` to replace the entire inner AST node /// or `#[cfg_attr]`, we replace the entire inner AST node with
/// with `FlatToken::AttrsTarget`, allowing us to perform eager cfg-expansion /// `FlatToken::AttrsTarget`. This lets us perform eager cfg-expansion on an
/// on an `AttrTokenStream`. /// `AttrTokenStream`.
/// ///
/// 2. When we parse an inner attribute while collecting tokens. We /// 2. Deletion. We delete inner attributes from all collected token streams,
/// remove inner attributes from the token stream entirely, and /// and instead track them through the `attrs` field on the AST node. This
/// instead track them through the `attrs` field on the AST node. /// lets us manipulate them similarly to outer attributes. When we create a
/// This allows us to easily manipulate them (for example, removing /// `TokenStream`, the inner attributes are inserted into the proper place
/// the first macro inner attribute to invoke a proc-macro). /// in the token stream.
/// When create a `TokenStream`, the inner attributes get inserted ///
/// into the proper place in the token stream. /// Each replacement starts off in `ParserReplacement` form but is converted to
type ReplaceRange = (Range<u32>, Option<AttrsTarget>); /// `NodeReplacement` form when it is attached to a single AST node, via
/// `LazyAttrTokenStreamImpl`.
type ParserReplacement = (ParserRange, Option<AttrsTarget>);
/// See the comment on `ParserReplacement`.
type NodeReplacement = (NodeRange, Option<AttrsTarget>);
impl NodeRange {
// Converts a range within a parser's tokens to a range within a
// node's tokens beginning at `start_pos`.
//
// For example, imagine a parser with 50 tokens in its token stream, a
// function that spans `ParserRange(20..40)` and an inner attribute within
// that function that spans `ParserRange(30..35)`. We would find the inner
// attribute's range within the function's tokens by subtracting 20, which
// is the position of the function's start token. This gives
// `NodeRange(10..15)`.
fn new(ParserRange(parser_range): ParserRange, start_pos: u32) -> NodeRange {
NodeRange((parser_range.start - start_pos)..(parser_range.end - start_pos))
}
}
/// Controls how we capture tokens. Capturing can be expensive, /// Controls how we capture tokens. Capturing can be expensive,
/// so we try to avoid performing capturing in cases where /// so we try to avoid performing capturing in cases where
@ -226,8 +256,8 @@ enum Capturing {
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
struct CaptureState { struct CaptureState {
capturing: Capturing, capturing: Capturing,
replace_ranges: Vec<ReplaceRange>, parser_replacements: Vec<ParserReplacement>,
inner_attr_ranges: FxHashMap<AttrId, Range<u32>>, inner_attr_parser_ranges: FxHashMap<AttrId, ParserRange>,
} }
/// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that /// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
@ -417,8 +447,8 @@ impl<'a> Parser<'a> {
subparser_name, subparser_name,
capture_state: CaptureState { capture_state: CaptureState {
capturing: Capturing::No, capturing: Capturing::No,
replace_ranges: Vec::new(), parser_replacements: Vec::new(),
inner_attr_ranges: Default::default(), inner_attr_parser_ranges: Default::default(),
}, },
current_closure: None, current_closure: None,
recovery: Recovery::Allowed, recovery: Recovery::Allowed,

View file

@ -25,7 +25,7 @@ use crate::errors::{
UnexpectedParenInRangePat, UnexpectedParenInRangePatSugg, UnexpectedParenInRangePat, UnexpectedParenInRangePatSugg,
UnexpectedVertVertBeforeFunctionParam, UnexpectedVertVertInPattern, WrapInParens, UnexpectedVertVertBeforeFunctionParam, UnexpectedVertVertInPattern, WrapInParens,
}; };
use crate::parser::expr::{could_be_unclosed_char_literal, LhsExpr}; use crate::parser::expr::could_be_unclosed_char_literal;
use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole}; use crate::{maybe_recover_from_interpolated_ty_qpath, maybe_whole};
#[derive(PartialEq, Copy, Clone)] #[derive(PartialEq, Copy, Clone)]
@ -403,8 +403,9 @@ impl<'a> Parser<'a> {
// Parse an associative expression such as `+ expr`, `% expr`, ... // Parse an associative expression such as `+ expr`, `% expr`, ...
// Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`]. // Assignements, ranges and `|` are disabled by [`Restrictions::IS_PAT`].
let lhs = LhsExpr::Parsed { expr, starts_statement: false }; if let Ok(expr) =
if let Ok(expr) = snapshot.parse_expr_assoc_with(0, lhs).map_err(|err| err.cancel()) { snapshot.parse_expr_assoc_rest_with(0, false, expr).map_err(|err| err.cancel())
{
// We got a valid expression. // We got a valid expression.
self.restore_snapshot(snapshot); self.restore_snapshot(snapshot);
self.restrictions.remove(Restrictions::IS_PAT); self.restrictions.remove(Restrictions::IS_PAT);

View file

@ -17,7 +17,6 @@ use thin_vec::{thin_vec, ThinVec};
use super::attr::InnerAttrForbiddenReason; use super::attr::InnerAttrForbiddenReason;
use super::diagnostics::AttemptLocalParseRecovery; use super::diagnostics::AttemptLocalParseRecovery;
use super::expr::LhsExpr;
use super::pat::{PatternLocation, RecoverComma}; use super::pat::{PatternLocation, RecoverComma};
use super::path::PathStyle; use super::path::PathStyle;
use super::{ use super::{
@ -66,7 +65,12 @@ impl<'a> Parser<'a> {
} }
Ok(Some(if self.token.is_keyword(kw::Let) { Ok(Some(if self.token.is_keyword(kw::Let) {
self.parse_local_mk(lo, attrs, capture_semi, force_collect)? self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
this.expect_keyword(kw::Let)?;
let local = this.parse_local(attrs)?;
let trailing = capture_semi && this.token.kind == token::Semi;
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
})?
} else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() { } else if self.is_kw_followed_by_ident(kw::Mut) && self.may_recover() {
self.recover_stmt_local_after_let( self.recover_stmt_local_after_let(
lo, lo,
@ -112,13 +116,12 @@ impl<'a> Parser<'a> {
} }
} }
} else if let Some(item) = self.parse_item_common( } else if let Some(item) = self.parse_item_common(
attrs.clone(), attrs.clone(), // FIXME: unwanted clone of attrs
false, false,
true, true,
FnParseMode { req_name: |_| true, req_body: true }, FnParseMode { req_name: |_| true, req_body: true },
force_collect, force_collect,
)? { )? {
// FIXME: Bad copy of attrs
self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item))) self.mk_stmt(lo.to(item.span), StmtKind::Item(P(item)))
} else if self.eat(&token::Semi) { } else if self.eat(&token::Semi) {
// Do not attempt to parse an expression if we're done here. // Do not attempt to parse an expression if we're done here.
@ -173,7 +176,7 @@ impl<'a> Parser<'a> {
// Perform this outside of the `collect_tokens_trailing_token` closure, // Perform this outside of the `collect_tokens_trailing_token` closure,
// since our outer attributes do not apply to this part of the expression // since our outer attributes do not apply to this part of the expression
let expr = self.with_res(Restrictions::STMT_EXPR, |this| { let expr = self.with_res(Restrictions::STMT_EXPR, |this| {
this.parse_expr_assoc_with(0, LhsExpr::Parsed { expr, starts_statement: true }) this.parse_expr_assoc_rest_with(0, true, expr)
})?; })?;
Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr))) Ok(self.mk_stmt(lo.to(self.prev_token.span), StmtKind::Expr(expr)))
} else { } else {
@ -206,8 +209,7 @@ impl<'a> Parser<'a> {
let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac)); let e = self.mk_expr(lo.to(hi), ExprKind::MacCall(mac));
let e = self.maybe_recover_from_bad_qpath(e)?; let e = self.maybe_recover_from_bad_qpath(e)?;
let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?; let e = self.parse_expr_dot_or_call_with(attrs, e, lo)?;
let e = self let e = self.parse_expr_assoc_rest_with(0, false, e)?;
.parse_expr_assoc_with(0, LhsExpr::Parsed { expr: e, starts_statement: false })?;
StmtKind::Expr(e) StmtKind::Expr(e)
}; };
Ok(self.mk_stmt(lo.to(hi), kind)) Ok(self.mk_stmt(lo.to(hi), kind))
@ -247,21 +249,6 @@ impl<'a> Parser<'a> {
Ok(stmt) Ok(stmt)
} }
fn parse_local_mk(
&mut self,
lo: Span,
attrs: AttrWrapper,
capture_semi: bool,
force_collect: ForceCollect,
) -> PResult<'a, Stmt> {
self.collect_tokens_trailing_token(attrs, force_collect, |this, attrs| {
this.expect_keyword(kw::Let)?;
let local = this.parse_local(attrs)?;
let trailing = capture_semi && this.token.kind == token::Semi;
Ok((this.mk_stmt(lo.to(this.prev_token.span), StmtKind::Let(local)), trailing))
})
}
/// Parses a local variable declaration. /// Parses a local variable declaration.
fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> { fn parse_local(&mut self, attrs: AttrVec) -> PResult<'a, P<Local>> {
let lo = self.prev_token.span; let lo = self.prev_token.span;