Address review comments
This commit is contained in:
parent
0b411f56e1
commit
3321d70161
6 changed files with 201 additions and 179 deletions
|
@ -74,7 +74,7 @@ impl<'a> Parser<'a> {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Ok(AttrWrapper { attrs })
|
Ok(AttrWrapper::new(attrs))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Matches `attribute = # ! [ meta_item ]`.
|
/// Matches `attribute = # ! [ meta_item ]`.
|
||||||
|
|
185
compiler/rustc_parse/src/parser/attr_wrapper.rs
Normal file
185
compiler/rustc_parse/src/parser/attr_wrapper.rs
Normal file
|
@ -0,0 +1,185 @@
|
||||||
|
use super::attr;
|
||||||
|
use super::{ForceCollect, Parser, TokenCursor, TrailingToken};
|
||||||
|
use rustc_ast::token::{self, Token, TokenKind};
|
||||||
|
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
|
||||||
|
use rustc_ast::tokenstream::{DelimSpan, LazyTokenStream, Spacing};
|
||||||
|
use rustc_ast::HasTokens;
|
||||||
|
use rustc_ast::{self as ast};
|
||||||
|
use rustc_errors::PResult;
|
||||||
|
use rustc_span::{Span, DUMMY_SP};
|
||||||
|
|
||||||
|
/// A wrapper type to ensure that the parser handles outer attributes correctly.
|
||||||
|
/// When we parse outer attributes, we need to ensure that we capture tokens
|
||||||
|
/// for the attribute target. This allows us to perform cfg-expansion on
|
||||||
|
/// a token stream before we invoke a derive proc-macro.
|
||||||
|
///
|
||||||
|
/// This wrapper prevents direct access to the underlying `Vec<ast::Attribute>`.
|
||||||
|
/// Parsing code can only get access to the underlying attributes
|
||||||
|
/// by passing an `AttrWrapper` to `collect_tokens_trailing_tokens`.
|
||||||
|
/// This makes it difficult to accidentally construct an AST node
|
||||||
|
/// (which stores a `Vec<ast::Attribute>`) without first collecting tokens.
|
||||||
|
///
|
||||||
|
/// This struct has its own module, to ensure that the parser code
|
||||||
|
/// cannot directly access the `attrs` field
|
||||||
|
#[derive(Debug, Clone)]
|
||||||
|
pub struct AttrWrapper {
|
||||||
|
attrs: Vec<ast::Attribute>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl AttrWrapper {
|
||||||
|
pub fn empty() -> AttrWrapper {
|
||||||
|
AttrWrapper { attrs: vec![] }
|
||||||
|
}
|
||||||
|
pub fn new(attrs: Vec<ast::Attribute>) -> AttrWrapper {
|
||||||
|
AttrWrapper { attrs }
|
||||||
|
}
|
||||||
|
// FIXME: Delay span bug here?
|
||||||
|
pub(crate) fn take_for_recovery(self) -> Vec<ast::Attribute> {
|
||||||
|
self.attrs
|
||||||
|
}
|
||||||
|
pub fn is_empty(&self) -> bool {
|
||||||
|
self.attrs.is_empty()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a> Parser<'a> {
|
||||||
|
/// Records all tokens consumed by the provided callback,
|
||||||
|
/// including the current token. These tokens are collected
|
||||||
|
/// into a `LazyTokenStream`, and returned along with the result
|
||||||
|
/// of the callback.
|
||||||
|
///
|
||||||
|
/// Note: If your callback consumes an opening delimiter
|
||||||
|
/// (including the case where you call `collect_tokens`
|
||||||
|
/// when the current token is an opening delimeter),
|
||||||
|
/// you must also consume the corresponding closing delimiter.
|
||||||
|
///
|
||||||
|
/// That is, you can consume
|
||||||
|
/// `something ([{ }])` or `([{}])`, but not `([{}]`
|
||||||
|
///
|
||||||
|
/// This restriction shouldn't be an issue in practice,
|
||||||
|
/// since this function is used to record the tokens for
|
||||||
|
/// a parsed AST item, which always has matching delimiters.
|
||||||
|
pub fn collect_tokens_trailing_token<R: HasTokens>(
|
||||||
|
&mut self,
|
||||||
|
attrs: AttrWrapper,
|
||||||
|
force_collect: ForceCollect,
|
||||||
|
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
|
||||||
|
) -> PResult<'a, R> {
|
||||||
|
if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
|
||||||
|
return Ok(f(self, attrs.attrs)?.0);
|
||||||
|
}
|
||||||
|
let start_token = (self.token.clone(), self.token_spacing);
|
||||||
|
let cursor_snapshot = self.token_cursor.clone();
|
||||||
|
|
||||||
|
let (mut ret, trailing_token) = f(self, attrs.attrs)?;
|
||||||
|
|
||||||
|
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
||||||
|
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
||||||
|
// by the callback. This allows us to avoid producing a `TokenStream`
|
||||||
|
// if it is never needed - for example, a captured `macro_rules!`
|
||||||
|
// argument that is never passed to a proc macro.
|
||||||
|
// In practice token stream creation happens rarely compared to
|
||||||
|
// calls to `collect_tokens` (see some statistics in #78736),
|
||||||
|
// so we are doing as little up-front work as possible.
|
||||||
|
//
|
||||||
|
// This also makes `Parser` very cheap to clone, since
|
||||||
|
// there is no intermediate collection buffer to clone.
|
||||||
|
#[derive(Clone)]
|
||||||
|
struct LazyTokenStreamImpl {
|
||||||
|
start_token: (Token, Spacing),
|
||||||
|
cursor_snapshot: TokenCursor,
|
||||||
|
num_calls: usize,
|
||||||
|
desugar_doc_comments: bool,
|
||||||
|
append_unglued_token: Option<TreeAndSpacing>,
|
||||||
|
}
|
||||||
|
impl CreateTokenStream for LazyTokenStreamImpl {
|
||||||
|
fn create_token_stream(&self) -> TokenStream {
|
||||||
|
// The token produced by the final call to `next` or `next_desugared`
|
||||||
|
// was not actually consumed by the callback. The combination
|
||||||
|
// of chaining the initial token and using `take` produces the desired
|
||||||
|
// result - we produce an empty `TokenStream` if no calls were made,
|
||||||
|
// and omit the final token otherwise.
|
||||||
|
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
||||||
|
let tokens = std::iter::once(self.start_token.clone())
|
||||||
|
.chain((0..self.num_calls).map(|_| {
|
||||||
|
if self.desugar_doc_comments {
|
||||||
|
cursor_snapshot.next_desugared()
|
||||||
|
} else {
|
||||||
|
cursor_snapshot.next()
|
||||||
|
}
|
||||||
|
}))
|
||||||
|
.take(self.num_calls);
|
||||||
|
|
||||||
|
make_token_stream(tokens, self.append_unglued_token.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
|
||||||
|
match trailing_token {
|
||||||
|
TrailingToken::None => {}
|
||||||
|
TrailingToken::Semi => {
|
||||||
|
assert_eq!(self.token.kind, token::Semi);
|
||||||
|
num_calls += 1;
|
||||||
|
}
|
||||||
|
TrailingToken::MaybeComma => {
|
||||||
|
if self.token.kind == token::Comma {
|
||||||
|
num_calls += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let lazy_impl = LazyTokenStreamImpl {
|
||||||
|
start_token,
|
||||||
|
num_calls,
|
||||||
|
cursor_snapshot,
|
||||||
|
desugar_doc_comments: self.desugar_doc_comments,
|
||||||
|
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
|
||||||
|
};
|
||||||
|
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
|
||||||
|
Ok(ret)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
||||||
|
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
||||||
|
/// of open and close delims.
|
||||||
|
fn make_token_stream(
|
||||||
|
tokens: impl Iterator<Item = (Token, Spacing)>,
|
||||||
|
append_unglued_token: Option<TreeAndSpacing>,
|
||||||
|
) -> TokenStream {
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct FrameData {
|
||||||
|
open: Span,
|
||||||
|
inner: Vec<(TokenTree, Spacing)>,
|
||||||
|
}
|
||||||
|
let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
|
||||||
|
for (token, spacing) in tokens {
|
||||||
|
match token {
|
||||||
|
Token { kind: TokenKind::OpenDelim(_), span } => {
|
||||||
|
stack.push(FrameData { open: span, inner: vec![] });
|
||||||
|
}
|
||||||
|
Token { kind: TokenKind::CloseDelim(delim), span } => {
|
||||||
|
let frame_data = stack.pop().expect("Token stack was empty!");
|
||||||
|
let dspan = DelimSpan::from_pair(frame_data.open, span);
|
||||||
|
let stream = TokenStream::new(frame_data.inner);
|
||||||
|
let delimited = TokenTree::Delimited(dspan, delim, stream);
|
||||||
|
stack
|
||||||
|
.last_mut()
|
||||||
|
.unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
|
||||||
|
.inner
|
||||||
|
.push((delimited, Spacing::Alone));
|
||||||
|
}
|
||||||
|
token => {
|
||||||
|
stack
|
||||||
|
.last_mut()
|
||||||
|
.expect("Bottom token frame is missing!")
|
||||||
|
.inner
|
||||||
|
.push((TokenTree::Token(token), spacing));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
let mut final_buf = stack.pop().expect("Missing final buf!");
|
||||||
|
final_buf.inner.extend(append_unglued_token);
|
||||||
|
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
|
||||||
|
TokenStream::new(final_buf.inner)
|
||||||
|
}
|
|
@ -1,9 +1,7 @@
|
||||||
use super::pat::{GateOr, RecoverComma, PARAM_EXPECTED};
|
use super::pat::{GateOr, RecoverComma, PARAM_EXPECTED};
|
||||||
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
use super::ty::{AllowPlus, RecoverQPath, RecoverReturnSign};
|
||||||
use super::{
|
use super::{AttrWrapper, BlockMode, ForceCollect, Parser, PathStyle, Restrictions, TokenType};
|
||||||
AttrWrapper, BlockMode, ForceCollect, Parser, PathStyle, Restrictions, TokenType, TrailingToken,
|
use super::{SemiColonMode, SeqSep, TokenExpectType, TrailingToken};
|
||||||
};
|
|
||||||
use super::{SemiColonMode, SeqSep, TokenExpectType};
|
|
||||||
use crate::maybe_recover_from_interpolated_ty_qpath;
|
use crate::maybe_recover_from_interpolated_ty_qpath;
|
||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
|
@ -461,16 +459,11 @@ impl<'a> Parser<'a> {
|
||||||
_ => RangeLimits::Closed,
|
_ => RangeLimits::Closed,
|
||||||
};
|
};
|
||||||
let op = AssocOp::from_token(&self.token);
|
let op = AssocOp::from_token(&self.token);
|
||||||
|
// FIXME: `parse_prefix_range_expr` is called when the current
|
||||||
|
// token is `DotDot`, `DotDotDot`, or `DotDotEq`. If we haven't already
|
||||||
|
// parsed attributes, then trying to parse them here will always fail.
|
||||||
|
// We should figure out how we want attributes on range expressions to work.
|
||||||
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
let attrs = self.parse_or_use_outer_attributes(attrs)?;
|
||||||
// RESOLVED: It looks like we only haev non-empty attributes here when
|
|
||||||
// this is used as a statement:
|
|
||||||
// `#[my_attr] 25..;`
|
|
||||||
// We should still investigate `parse_or_use_outer_attributes`, since we haven't
|
|
||||||
// yet eaten the '..'
|
|
||||||
//
|
|
||||||
// FIXME - does this code ever haev attributes? `let a = #[attr] ..` doesn't even parse
|
|
||||||
// // We try to aprse attributes *before* bumping the token, so this can only
|
|
||||||
// ever succeeed if the `attrs` parameter is `Some`
|
|
||||||
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
self.collect_tokens_for_expr(attrs, |this, attrs| {
|
||||||
let lo = this.token.span;
|
let lo = this.token.span;
|
||||||
this.bump();
|
this.bump();
|
||||||
|
@ -518,8 +511,6 @@ impl<'a> Parser<'a> {
|
||||||
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
|
make_it!(this, attrs, |this, _| this.parse_box_expr(lo))
|
||||||
}
|
}
|
||||||
token::Ident(..) if this.is_mistaken_not_ident_negation() => {
|
token::Ident(..) if this.is_mistaken_not_ident_negation() => {
|
||||||
// FIXME - what is our polciy for handling tokens during recovery?
|
|
||||||
// Should we ever invoke a proc-macro with these tokens?
|
|
||||||
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
|
make_it!(this, attrs, |this, _| this.recover_not_expr(lo))
|
||||||
}
|
}
|
||||||
_ => return this.parse_dot_or_call_expr(Some(attrs.into())),
|
_ => return this.parse_dot_or_call_expr(Some(attrs.into())),
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
pub mod attr;
|
pub mod attr;
|
||||||
|
mod attr_wrapper;
|
||||||
mod diagnostics;
|
mod diagnostics;
|
||||||
mod expr;
|
mod expr;
|
||||||
mod generics;
|
mod generics;
|
||||||
|
@ -10,14 +11,15 @@ mod stmt;
|
||||||
mod ty;
|
mod ty;
|
||||||
|
|
||||||
use crate::lexer::UnmatchedBrace;
|
use crate::lexer::UnmatchedBrace;
|
||||||
|
pub use attr_wrapper::AttrWrapper;
|
||||||
pub use diagnostics::AttemptLocalParseRecovery;
|
pub use diagnostics::AttemptLocalParseRecovery;
|
||||||
use diagnostics::Error;
|
use diagnostics::Error;
|
||||||
pub use path::PathStyle;
|
pub use path::PathStyle;
|
||||||
|
|
||||||
use rustc_ast::ptr::P;
|
use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
|
use rustc_ast::tokenstream::{self, DelimSpan, Spacing};
|
||||||
use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
|
use rustc_ast::tokenstream::{TokenStream, TokenTree, TreeAndSpacing};
|
||||||
use rustc_ast::DUMMY_NODE_ID;
|
use rustc_ast::DUMMY_NODE_ID;
|
||||||
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens};
|
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, HasTokens};
|
||||||
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
|
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit, Unsafe};
|
||||||
|
@ -69,21 +71,6 @@ pub enum TrailingToken {
|
||||||
MaybeComma,
|
MaybeComma,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
|
||||||
pub struct AttrWrapper {
|
|
||||||
attrs: Vec<ast::Attribute>,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl AttrWrapper {
|
|
||||||
// FIXME: Delay span bug here?
|
|
||||||
fn take_for_recovery(self) -> Vec<ast::Attribute> {
|
|
||||||
self.attrs
|
|
||||||
}
|
|
||||||
fn is_empty(&self) -> bool {
|
|
||||||
self.attrs.is_empty()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Like `maybe_whole_expr`, but for things other than expressions.
|
/// Like `maybe_whole_expr`, but for things other than expressions.
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
macro_rules! maybe_whole {
|
macro_rules! maybe_whole {
|
||||||
|
@ -999,7 +986,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
// Collect tokens because they are used during lowering to HIR.
|
// Collect tokens because they are used during lowering to HIR.
|
||||||
let expr = self.collect_tokens(|this| this.parse_expr())?;
|
let expr = self.collect_tokens_no_attrs(|this| this.parse_expr())?;
|
||||||
let span = expr.span;
|
let span = expr.span;
|
||||||
|
|
||||||
match &expr.kind {
|
match &expr.kind {
|
||||||
|
@ -1251,108 +1238,12 @@ impl<'a> Parser<'a> {
|
||||||
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
// The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
|
||||||
// `ForceCollect::Yes`
|
// `ForceCollect::Yes`
|
||||||
self.collect_tokens_trailing_token(
|
self.collect_tokens_trailing_token(
|
||||||
AttrWrapper { attrs: Vec::new() },
|
AttrWrapper::empty(),
|
||||||
ForceCollect::Yes,
|
ForceCollect::Yes,
|
||||||
|this, _attrs| Ok((f(this)?, TrailingToken::None)),
|
|this, _attrs| Ok((f(this)?, TrailingToken::None)),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Records all tokens consumed by the provided callback,
|
|
||||||
/// including the current token. These tokens are collected
|
|
||||||
/// into a `LazyTokenStream`, and returned along with the result
|
|
||||||
/// of the callback.
|
|
||||||
///
|
|
||||||
/// Note: If your callback consumes an opening delimiter
|
|
||||||
/// (including the case where you call `collect_tokens`
|
|
||||||
/// when the current token is an opening delimeter),
|
|
||||||
/// you must also consume the corresponding closing delimiter.
|
|
||||||
///
|
|
||||||
/// That is, you can consume
|
|
||||||
/// `something ([{ }])` or `([{}])`, but not `([{}]`
|
|
||||||
///
|
|
||||||
/// This restriction shouldn't be an issue in practice,
|
|
||||||
/// since this function is used to record the tokens for
|
|
||||||
/// a parsed AST item, which always has matching delimiters.
|
|
||||||
pub fn collect_tokens_trailing_token<R: HasTokens>(
|
|
||||||
&mut self,
|
|
||||||
attrs: AttrWrapper,
|
|
||||||
force_collect: ForceCollect,
|
|
||||||
f: impl FnOnce(&mut Self, Vec<ast::Attribute>) -> PResult<'a, (R, TrailingToken)>,
|
|
||||||
) -> PResult<'a, R> {
|
|
||||||
if matches!(force_collect, ForceCollect::No) && !attr::maybe_needs_tokens(&attrs.attrs) {
|
|
||||||
return Ok(f(self, attrs.attrs)?.0);
|
|
||||||
}
|
|
||||||
let start_token = (self.token.clone(), self.token_spacing);
|
|
||||||
let cursor_snapshot = self.token_cursor.clone();
|
|
||||||
|
|
||||||
let (mut ret, trailing_token) = f(self, attrs.attrs)?;
|
|
||||||
|
|
||||||
// Produces a `TokenStream` on-demand. Using `cursor_snapshot`
|
|
||||||
// and `num_calls`, we can reconstruct the `TokenStream` seen
|
|
||||||
// by the callback. This allows us to avoid producing a `TokenStream`
|
|
||||||
// if it is never needed - for example, a captured `macro_rules!`
|
|
||||||
// argument that is never passed to a proc macro.
|
|
||||||
// In practice token stream creation happens rarely compared to
|
|
||||||
// calls to `collect_tokens` (see some statistics in #78736),
|
|
||||||
// so we are doing as little up-front work as possible.
|
|
||||||
//
|
|
||||||
// This also makes `Parser` very cheap to clone, since
|
|
||||||
// there is no intermediate collection buffer to clone.
|
|
||||||
#[derive(Clone)]
|
|
||||||
struct LazyTokenStreamImpl {
|
|
||||||
start_token: (Token, Spacing),
|
|
||||||
cursor_snapshot: TokenCursor,
|
|
||||||
num_calls: usize,
|
|
||||||
desugar_doc_comments: bool,
|
|
||||||
append_unglued_token: Option<TreeAndSpacing>,
|
|
||||||
}
|
|
||||||
impl CreateTokenStream for LazyTokenStreamImpl {
|
|
||||||
fn create_token_stream(&self) -> TokenStream {
|
|
||||||
// The token produced by the final call to `next` or `next_desugared`
|
|
||||||
// was not actually consumed by the callback. The combination
|
|
||||||
// of chaining the initial token and using `take` produces the desired
|
|
||||||
// result - we produce an empty `TokenStream` if no calls were made,
|
|
||||||
// and omit the final token otherwise.
|
|
||||||
let mut cursor_snapshot = self.cursor_snapshot.clone();
|
|
||||||
let tokens = std::iter::once(self.start_token.clone())
|
|
||||||
.chain((0..self.num_calls).map(|_| {
|
|
||||||
if self.desugar_doc_comments {
|
|
||||||
cursor_snapshot.next_desugared()
|
|
||||||
} else {
|
|
||||||
cursor_snapshot.next()
|
|
||||||
}
|
|
||||||
}))
|
|
||||||
.take(self.num_calls);
|
|
||||||
|
|
||||||
make_token_stream(tokens, self.append_unglued_token.clone())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut num_calls = self.token_cursor.num_next_calls - cursor_snapshot.num_next_calls;
|
|
||||||
match trailing_token {
|
|
||||||
TrailingToken::None => {}
|
|
||||||
TrailingToken::Semi => {
|
|
||||||
assert_eq!(self.token.kind, token::Semi);
|
|
||||||
num_calls += 1;
|
|
||||||
}
|
|
||||||
TrailingToken::MaybeComma => {
|
|
||||||
if self.token.kind == token::Comma {
|
|
||||||
num_calls += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let lazy_impl = LazyTokenStreamImpl {
|
|
||||||
start_token,
|
|
||||||
num_calls,
|
|
||||||
cursor_snapshot,
|
|
||||||
desugar_doc_comments: self.desugar_doc_comments,
|
|
||||||
append_unglued_token: self.token_cursor.append_unglued_token.clone(),
|
|
||||||
};
|
|
||||||
ret.finalize_tokens(LazyTokenStream::new(lazy_impl));
|
|
||||||
Ok(ret)
|
|
||||||
}
|
|
||||||
|
|
||||||
/// `::{` or `::*`
|
/// `::{` or `::*`
|
||||||
fn is_import_coupler(&mut self) -> bool {
|
fn is_import_coupler(&mut self) -> bool {
|
||||||
self.check(&token::ModSep)
|
self.check(&token::ModSep)
|
||||||
|
@ -1399,47 +1290,3 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
|
|
||||||
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
|
|
||||||
/// of open and close delims.
|
|
||||||
fn make_token_stream(
|
|
||||||
tokens: impl Iterator<Item = (Token, Spacing)>,
|
|
||||||
append_unglued_token: Option<TreeAndSpacing>,
|
|
||||||
) -> TokenStream {
|
|
||||||
#[derive(Debug)]
|
|
||||||
struct FrameData {
|
|
||||||
open: Span,
|
|
||||||
inner: Vec<(TokenTree, Spacing)>,
|
|
||||||
}
|
|
||||||
let mut stack = vec![FrameData { open: DUMMY_SP, inner: vec![] }];
|
|
||||||
for (token, spacing) in tokens {
|
|
||||||
match token {
|
|
||||||
Token { kind: TokenKind::OpenDelim(_), span } => {
|
|
||||||
stack.push(FrameData { open: span, inner: vec![] });
|
|
||||||
}
|
|
||||||
Token { kind: TokenKind::CloseDelim(delim), span } => {
|
|
||||||
let frame_data = stack.pop().expect("Token stack was empty!");
|
|
||||||
let dspan = DelimSpan::from_pair(frame_data.open, span);
|
|
||||||
let stream = TokenStream::new(frame_data.inner);
|
|
||||||
let delimited = TokenTree::Delimited(dspan, delim, stream);
|
|
||||||
stack
|
|
||||||
.last_mut()
|
|
||||||
.unwrap_or_else(|| panic!("Bottom token frame is missing for tokens!"))
|
|
||||||
.inner
|
|
||||||
.push((delimited, Spacing::Alone));
|
|
||||||
}
|
|
||||||
token => {
|
|
||||||
stack
|
|
||||||
.last_mut()
|
|
||||||
.expect("Bottom token frame is missing!")
|
|
||||||
.inner
|
|
||||||
.push((TokenTree::Token(token), spacing));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let mut final_buf = stack.pop().expect("Missing final buf!");
|
|
||||||
final_buf.inner.extend(append_unglued_token);
|
|
||||||
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
|
|
||||||
TokenStream::new(final_buf.inner)
|
|
||||||
}
|
|
||||||
|
|
|
@ -108,7 +108,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
NonterminalKind::Block => {
|
NonterminalKind::Block => {
|
||||||
// While an block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
|
// While a block *expression* may have attributes (e.g. `#[my_attr] { ... }`),
|
||||||
// the ':block' matcher does not support them
|
// the ':block' matcher does not support them
|
||||||
token::NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
|
token::NtBlock(self.collect_tokens_no_attrs(|this| this.parse_block())?)
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,9 +3,8 @@ use super::diagnostics::{AttemptLocalParseRecovery, Error};
|
||||||
use super::expr::LhsExpr;
|
use super::expr::LhsExpr;
|
||||||
use super::pat::{GateOr, RecoverComma};
|
use super::pat::{GateOr, RecoverComma};
|
||||||
use super::path::PathStyle;
|
use super::path::PathStyle;
|
||||||
use super::{
|
use super::TrailingToken;
|
||||||
AttrWrapper, BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode, TrailingToken,
|
use super::{AttrWrapper, BlockMode, ForceCollect, Parser, Restrictions, SemiColonMode};
|
||||||
};
|
|
||||||
use crate::maybe_whole;
|
use crate::maybe_whole;
|
||||||
|
|
||||||
use rustc_ast as ast;
|
use rustc_ast as ast;
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue