Rename {Create,Lazy}TokenStream
as {To,Lazy}AttrTokenStream
.
`To` is better than `Create` for indicating that this is a non-consuming conversion, rather than creating something out of nothing. And the addition of `Attr` is because the current names makes them sound like they relate to `TokenStream`, but really they relate to `AttrTokenStream`.
This commit is contained in:
parent
f6c9e1df59
commit
d2df07c425
9 changed files with 84 additions and 81 deletions
|
@ -24,7 +24,7 @@ pub use UnsafeSource::*;
|
||||||
|
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::token::{self, CommentKind, Delimiter};
|
use crate::token::{self, CommentKind, Delimiter};
|
||||||
use crate::tokenstream::{DelimSpan, LazyTokenStream, TokenStream};
|
use crate::tokenstream::{DelimSpan, LazyAttrTokenStream, TokenStream};
|
||||||
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
|
||||||
use rustc_data_structures::stack::ensure_sufficient_stack;
|
use rustc_data_structures::stack::ensure_sufficient_stack;
|
||||||
use rustc_data_structures::sync::Lrc;
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
@ -92,7 +92,7 @@ pub struct Path {
|
||||||
/// The segments in the path: the things separated by `::`.
|
/// The segments in the path: the things separated by `::`.
|
||||||
/// Global paths begin with `kw::PathRoot`.
|
/// Global paths begin with `kw::PathRoot`.
|
||||||
pub segments: Vec<PathSegment>,
|
pub segments: Vec<PathSegment>,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialEq<Symbol> for Path {
|
impl PartialEq<Symbol> for Path {
|
||||||
|
@ -564,7 +564,7 @@ pub struct Block {
|
||||||
/// Distinguishes between `unsafe { ... }` and `{ ... }`.
|
/// Distinguishes between `unsafe { ... }` and `{ ... }`.
|
||||||
pub rules: BlockCheckMode,
|
pub rules: BlockCheckMode,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
/// The following *isn't* a parse error, but will cause multiple errors in following stages.
|
/// The following *isn't* a parse error, but will cause multiple errors in following stages.
|
||||||
/// ```compile_fail
|
/// ```compile_fail
|
||||||
/// let x = {
|
/// let x = {
|
||||||
|
@ -583,7 +583,7 @@ pub struct Pat {
|
||||||
pub id: NodeId,
|
pub id: NodeId,
|
||||||
pub kind: PatKind,
|
pub kind: PatKind,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Pat {
|
impl Pat {
|
||||||
|
@ -967,8 +967,8 @@ impl Stmt {
|
||||||
/// a trailing semicolon.
|
/// a trailing semicolon.
|
||||||
///
|
///
|
||||||
/// This only modifies the parsed AST struct, not the attached
|
/// This only modifies the parsed AST struct, not the attached
|
||||||
/// `LazyTokenStream`. The parser is responsible for calling
|
/// `LazyAttrTokenStream`. The parser is responsible for calling
|
||||||
/// `CreateTokenStream::add_trailing_semi` when there is actually
|
/// `ToAttrTokenStream::add_trailing_semi` when there is actually
|
||||||
/// a semicolon in the tokenstream.
|
/// a semicolon in the tokenstream.
|
||||||
pub fn add_trailing_semicolon(mut self) -> Self {
|
pub fn add_trailing_semicolon(mut self) -> Self {
|
||||||
self.kind = match self.kind {
|
self.kind = match self.kind {
|
||||||
|
@ -1014,7 +1014,7 @@ pub struct MacCallStmt {
|
||||||
pub mac: P<MacCall>,
|
pub mac: P<MacCall>,
|
||||||
pub style: MacStmtStyle,
|
pub style: MacStmtStyle,
|
||||||
pub attrs: AttrVec,
|
pub attrs: AttrVec,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
|
#[derive(Clone, Copy, PartialEq, Encodable, Decodable, Debug)]
|
||||||
|
@ -1039,7 +1039,7 @@ pub struct Local {
|
||||||
pub kind: LocalKind,
|
pub kind: LocalKind,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub attrs: AttrVec,
|
pub attrs: AttrVec,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
|
@ -1138,7 +1138,7 @@ pub struct Expr {
|
||||||
pub kind: ExprKind,
|
pub kind: ExprKind,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub attrs: AttrVec,
|
pub attrs: AttrVec,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Expr {
|
impl Expr {
|
||||||
|
@ -1997,7 +1997,7 @@ pub struct Ty {
|
||||||
pub id: NodeId,
|
pub id: NodeId,
|
||||||
pub kind: TyKind,
|
pub kind: TyKind,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for Ty {
|
impl Clone for Ty {
|
||||||
|
@ -2562,7 +2562,7 @@ impl<D: Decoder> Decodable<D> for AttrId {
|
||||||
pub struct AttrItem {
|
pub struct AttrItem {
|
||||||
pub path: Path,
|
pub path: Path,
|
||||||
pub args: MacArgs,
|
pub args: MacArgs,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A list of attributes.
|
/// A list of attributes.
|
||||||
|
@ -2582,7 +2582,7 @@ pub struct Attribute {
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
pub struct NormalAttr {
|
pub struct NormalAttr {
|
||||||
pub item: AttrItem,
|
pub item: AttrItem,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
|
@ -2633,7 +2633,7 @@ impl PolyTraitRef {
|
||||||
pub struct Visibility {
|
pub struct Visibility {
|
||||||
pub kind: VisibilityKind,
|
pub kind: VisibilityKind,
|
||||||
pub span: Span,
|
pub span: Span,
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Encodable, Decodable, Debug)]
|
#[derive(Clone, Encodable, Decodable, Debug)]
|
||||||
|
@ -2719,7 +2719,7 @@ pub struct Item<K = ItemKind> {
|
||||||
///
|
///
|
||||||
/// Note that the tokens here do not include the outer attributes, but will
|
/// Note that the tokens here do not include the outer attributes, but will
|
||||||
/// include inner attributes.
|
/// include inner attributes.
|
||||||
pub tokens: Option<LazyTokenStream>,
|
pub tokens: Option<LazyAttrTokenStream>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Item {
|
impl Item {
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
|
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::token::Nonterminal;
|
use crate::token::Nonterminal;
|
||||||
use crate::tokenstream::LazyTokenStream;
|
use crate::tokenstream::LazyAttrTokenStream;
|
||||||
use crate::{Arm, Crate, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
|
use crate::{Arm, Crate, ExprField, FieldDef, GenericParam, Param, PatField, Variant};
|
||||||
use crate::{AssocItem, Expr, ForeignItem, Item, NodeId};
|
use crate::{AssocItem, Expr, ForeignItem, Item, NodeId};
|
||||||
use crate::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
|
use crate::{AttrItem, AttrKind, Block, Pat, Path, Ty, Visibility};
|
||||||
|
@ -124,18 +124,18 @@ impl HasSpan for AttrItem {
|
||||||
|
|
||||||
/// A trait for AST nodes having (or not having) collected tokens.
|
/// A trait for AST nodes having (or not having) collected tokens.
|
||||||
pub trait HasTokens {
|
pub trait HasTokens {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream>;
|
fn tokens(&self) -> Option<&LazyAttrTokenStream>;
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>>;
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>>;
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! impl_has_tokens {
|
macro_rules! impl_has_tokens {
|
||||||
($($T:ty),+ $(,)?) => {
|
($($T:ty),+ $(,)?) => {
|
||||||
$(
|
$(
|
||||||
impl HasTokens for $T {
|
impl HasTokens for $T {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
self.tokens.as_ref()
|
self.tokens.as_ref()
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
Some(&mut self.tokens)
|
Some(&mut self.tokens)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -147,10 +147,10 @@ macro_rules! impl_has_tokens_none {
|
||||||
($($T:ty),+ $(,)?) => {
|
($($T:ty),+ $(,)?) => {
|
||||||
$(
|
$(
|
||||||
impl HasTokens for $T {
|
impl HasTokens for $T {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
None
|
None
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -162,25 +162,25 @@ impl_has_tokens!(AssocItem, AttrItem, Block, Expr, ForeignItem, Item, Pat, Path,
|
||||||
impl_has_tokens_none!(Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant);
|
impl_has_tokens_none!(Arm, ExprField, FieldDef, GenericParam, Param, PatField, Variant);
|
||||||
|
|
||||||
impl<T: AstDeref<Target: HasTokens>> HasTokens for T {
|
impl<T: AstDeref<Target: HasTokens>> HasTokens for T {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
self.ast_deref().tokens()
|
self.ast_deref().tokens()
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
self.ast_deref_mut().tokens_mut()
|
self.ast_deref_mut().tokens_mut()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: HasTokens> HasTokens for Option<T> {
|
impl<T: HasTokens> HasTokens for Option<T> {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
self.as_ref().and_then(|inner| inner.tokens())
|
self.as_ref().and_then(|inner| inner.tokens())
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
self.as_mut().and_then(|inner| inner.tokens_mut())
|
self.as_mut().and_then(|inner| inner.tokens_mut())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasTokens for StmtKind {
|
impl HasTokens for StmtKind {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
match self {
|
match self {
|
||||||
StmtKind::Local(local) => local.tokens.as_ref(),
|
StmtKind::Local(local) => local.tokens.as_ref(),
|
||||||
StmtKind::Item(item) => item.tokens(),
|
StmtKind::Item(item) => item.tokens(),
|
||||||
|
@ -189,7 +189,7 @@ impl HasTokens for StmtKind {
|
||||||
StmtKind::MacCall(mac) => mac.tokens.as_ref(),
|
StmtKind::MacCall(mac) => mac.tokens.as_ref(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
match self {
|
match self {
|
||||||
StmtKind::Local(local) => Some(&mut local.tokens),
|
StmtKind::Local(local) => Some(&mut local.tokens),
|
||||||
StmtKind::Item(item) => item.tokens_mut(),
|
StmtKind::Item(item) => item.tokens_mut(),
|
||||||
|
@ -201,16 +201,16 @@ impl HasTokens for StmtKind {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasTokens for Stmt {
|
impl HasTokens for Stmt {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
self.kind.tokens()
|
self.kind.tokens()
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
self.kind.tokens_mut()
|
self.kind.tokens_mut()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasTokens for Attribute {
|
impl HasTokens for Attribute {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
match &self.kind {
|
match &self.kind {
|
||||||
AttrKind::Normal(normal) => normal.tokens.as_ref(),
|
AttrKind::Normal(normal) => normal.tokens.as_ref(),
|
||||||
kind @ AttrKind::DocComment(..) => {
|
kind @ AttrKind::DocComment(..) => {
|
||||||
|
@ -218,7 +218,7 @@ impl HasTokens for Attribute {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
Some(match &mut self.kind {
|
Some(match &mut self.kind {
|
||||||
AttrKind::Normal(normal) => &mut normal.tokens,
|
AttrKind::Normal(normal) => &mut normal.tokens,
|
||||||
kind @ AttrKind::DocComment(..) => {
|
kind @ AttrKind::DocComment(..) => {
|
||||||
|
@ -229,7 +229,7 @@ impl HasTokens for Attribute {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl HasTokens for Nonterminal {
|
impl HasTokens for Nonterminal {
|
||||||
fn tokens(&self) -> Option<&LazyTokenStream> {
|
fn tokens(&self) -> Option<&LazyAttrTokenStream> {
|
||||||
match self {
|
match self {
|
||||||
Nonterminal::NtItem(item) => item.tokens(),
|
Nonterminal::NtItem(item) => item.tokens(),
|
||||||
Nonterminal::NtStmt(stmt) => stmt.tokens(),
|
Nonterminal::NtStmt(stmt) => stmt.tokens(),
|
||||||
|
@ -243,7 +243,7 @@ impl HasTokens for Nonterminal {
|
||||||
Nonterminal::NtIdent(..) | Nonterminal::NtLifetime(..) => None,
|
Nonterminal::NtIdent(..) | Nonterminal::NtLifetime(..) => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
fn tokens_mut(&mut self) -> Option<&mut Option<LazyTokenStream>> {
|
fn tokens_mut(&mut self) -> Option<&mut Option<LazyAttrTokenStream>> {
|
||||||
match self {
|
match self {
|
||||||
Nonterminal::NtItem(item) => item.tokens_mut(),
|
Nonterminal::NtItem(item) => item.tokens_mut(),
|
||||||
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
|
Nonterminal::NtStmt(stmt) => stmt.tokens_mut(),
|
||||||
|
|
|
@ -8,7 +8,7 @@ use crate::ast::{Path, PathSegment};
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::token::{self, CommentKind, Delimiter, Token};
|
use crate::token::{self, CommentKind, Delimiter, Token};
|
||||||
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
|
use crate::tokenstream::{DelimSpan, Spacing, TokenTree};
|
||||||
use crate::tokenstream::{LazyTokenStream, TokenStream};
|
use crate::tokenstream::{LazyAttrTokenStream, TokenStream};
|
||||||
use crate::util::comments;
|
use crate::util::comments;
|
||||||
|
|
||||||
use rustc_index::bit_set::GrowableBitSet;
|
use rustc_index::bit_set::GrowableBitSet;
|
||||||
|
@ -301,7 +301,7 @@ impl Attribute {
|
||||||
.tokens
|
.tokens
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
|
.unwrap_or_else(|| panic!("attribute is missing tokens: {:?}", self))
|
||||||
.create_token_stream()
|
.to_attr_token_stream()
|
||||||
.to_tokenstream(),
|
.to_tokenstream(),
|
||||||
AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(
|
AttrKind::DocComment(comment_kind, data) => TokenStream::new(vec![TokenTree::Token(
|
||||||
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
|
Token::new(token::DocComment(comment_kind, self.style, data), self.span),
|
||||||
|
@ -353,7 +353,7 @@ pub fn mk_attr(style: AttrStyle, path: Path, args: MacArgs, span: Span) -> Attri
|
||||||
|
|
||||||
pub fn mk_attr_from_item(
|
pub fn mk_attr_from_item(
|
||||||
item: AttrItem,
|
item: AttrItem,
|
||||||
tokens: Option<LazyTokenStream>,
|
tokens: Option<LazyAttrTokenStream>,
|
||||||
style: AttrStyle,
|
style: AttrStyle,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> Attribute {
|
) -> Attribute {
|
||||||
|
|
|
@ -697,17 +697,20 @@ pub fn visit_attr_tts<T: MutVisitor>(AttrTokenStream(tts): &mut AttrTokenStream,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(lazy_tts: Option<&mut LazyTokenStream>, vis: &mut T) {
|
pub fn visit_lazy_tts_opt_mut<T: MutVisitor>(
|
||||||
|
lazy_tts: Option<&mut LazyAttrTokenStream>,
|
||||||
|
vis: &mut T,
|
||||||
|
) {
|
||||||
if T::VISIT_TOKENS {
|
if T::VISIT_TOKENS {
|
||||||
if let Some(lazy_tts) = lazy_tts {
|
if let Some(lazy_tts) = lazy_tts {
|
||||||
let mut tts = lazy_tts.create_token_stream();
|
let mut tts = lazy_tts.to_attr_token_stream();
|
||||||
visit_attr_tts(&mut tts, vis);
|
visit_attr_tts(&mut tts, vis);
|
||||||
*lazy_tts = LazyTokenStream::new(tts);
|
*lazy_tts = LazyAttrTokenStream::new(tts);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyTokenStream>, vis: &mut T) {
|
pub fn visit_lazy_tts<T: MutVisitor>(lazy_tts: &mut Option<LazyAttrTokenStream>, vis: &mut T) {
|
||||||
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
|
visit_lazy_tts_opt_mut(lazy_tts.as_mut(), vis);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -121,12 +121,12 @@ where
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait CreateTokenStream: sync::Send + sync::Sync {
|
pub trait ToAttrTokenStream: sync::Send + sync::Sync {
|
||||||
fn create_token_stream(&self) -> AttrTokenStream;
|
fn to_attr_token_stream(&self) -> AttrTokenStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CreateTokenStream for AttrTokenStream {
|
impl ToAttrTokenStream for AttrTokenStream {
|
||||||
fn create_token_stream(&self) -> AttrTokenStream {
|
fn to_attr_token_stream(&self) -> AttrTokenStream {
|
||||||
self.clone()
|
self.clone()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -135,40 +135,40 @@ impl CreateTokenStream for AttrTokenStream {
|
||||||
/// of an actual `TokenStream` until it is needed.
|
/// of an actual `TokenStream` until it is needed.
|
||||||
/// `Box` is here only to reduce the structure size.
|
/// `Box` is here only to reduce the structure size.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct LazyTokenStream(Lrc<Box<dyn CreateTokenStream>>);
|
pub struct LazyAttrTokenStream(Lrc<Box<dyn ToAttrTokenStream>>);
|
||||||
|
|
||||||
impl LazyTokenStream {
|
impl LazyAttrTokenStream {
|
||||||
pub fn new(inner: impl CreateTokenStream + 'static) -> LazyTokenStream {
|
pub fn new(inner: impl ToAttrTokenStream + 'static) -> LazyAttrTokenStream {
|
||||||
LazyTokenStream(Lrc::new(Box::new(inner)))
|
LazyAttrTokenStream(Lrc::new(Box::new(inner)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_token_stream(&self) -> AttrTokenStream {
|
pub fn to_attr_token_stream(&self) -> AttrTokenStream {
|
||||||
self.0.create_token_stream()
|
self.0.to_attr_token_stream()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl fmt::Debug for LazyTokenStream {
|
impl fmt::Debug for LazyAttrTokenStream {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(f, "LazyTokenStream({:?})", self.create_token_stream())
|
write!(f, "LazyAttrTokenStream({:?})", self.to_attr_token_stream())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Encoder> Encodable<S> for LazyTokenStream {
|
impl<S: Encoder> Encodable<S> for LazyAttrTokenStream {
|
||||||
fn encode(&self, s: &mut S) {
|
fn encode(&self, s: &mut S) {
|
||||||
// Used by AST json printing.
|
// Used by AST json printing.
|
||||||
Encodable::encode(&self.create_token_stream(), s);
|
Encodable::encode(&self.to_attr_token_stream(), s);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<D: Decoder> Decodable<D> for LazyTokenStream {
|
impl<D: Decoder> Decodable<D> for LazyAttrTokenStream {
|
||||||
fn decode(_d: &mut D) -> Self {
|
fn decode(_d: &mut D) -> Self {
|
||||||
panic!("Attempted to decode LazyTokenStream");
|
panic!("Attempted to decode LazyAttrTokenStream");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<CTX> HashStable<CTX> for LazyTokenStream {
|
impl<CTX> HashStable<CTX> for LazyAttrTokenStream {
|
||||||
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
|
fn hash_stable(&self, _hcx: &mut CTX, _hasher: &mut StableHasher) {
|
||||||
panic!("Attempted to compute stable hash for LazyTokenStream");
|
panic!("Attempted to compute stable hash for LazyAttrTokenStream");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -224,7 +224,7 @@ impl AttrTokenStream {
|
||||||
|
|
||||||
let mut target_tokens: Vec<_> = data
|
let mut target_tokens: Vec<_> = data
|
||||||
.tokens
|
.tokens
|
||||||
.create_token_stream()
|
.to_attr_token_stream()
|
||||||
.to_tokenstream()
|
.to_tokenstream()
|
||||||
.0
|
.0
|
||||||
.iter()
|
.iter()
|
||||||
|
@ -296,7 +296,7 @@ pub struct AttributesData {
|
||||||
pub attrs: AttrVec,
|
pub attrs: AttrVec,
|
||||||
/// The underlying tokens for the attribute target that `attrs`
|
/// The underlying tokens for the attribute target that `attrs`
|
||||||
/// are applied to
|
/// are applied to
|
||||||
pub tokens: LazyTokenStream,
|
pub tokens: LazyAttrTokenStream,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
|
/// A `TokenStream` is an abstract sequence of tokens, organized into [`TokenTree`]s.
|
||||||
|
@ -431,7 +431,7 @@ impl TokenStream {
|
||||||
};
|
};
|
||||||
let attrs = node.attrs();
|
let attrs = node.attrs();
|
||||||
let attr_stream = if attrs.is_empty() {
|
let attr_stream = if attrs.is_empty() {
|
||||||
tokens.create_token_stream()
|
tokens.to_attr_token_stream()
|
||||||
} else {
|
} else {
|
||||||
let attr_data =
|
let attr_data =
|
||||||
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
AttributesData { attrs: attrs.iter().cloned().collect(), tokens: tokens.clone() };
|
||||||
|
|
|
@ -4,7 +4,7 @@ use rustc_ast::ptr::P;
|
||||||
use rustc_ast::token::{Delimiter, Token, TokenKind};
|
use rustc_ast::token::{Delimiter, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree};
|
use rustc_ast::tokenstream::{AttrTokenStream, AttrTokenTree};
|
||||||
use rustc_ast::tokenstream::{DelimSpan, Spacing};
|
use rustc_ast::tokenstream::{DelimSpan, Spacing};
|
||||||
use rustc_ast::tokenstream::{LazyTokenStream, TokenTree};
|
use rustc_ast::tokenstream::{LazyAttrTokenStream, TokenTree};
|
||||||
use rustc_ast::NodeId;
|
use rustc_ast::NodeId;
|
||||||
use rustc_ast::{self as ast, AttrStyle, Attribute, HasAttrs, HasTokens, MetaItem};
|
use rustc_ast::{self as ast, AttrStyle, Attribute, HasAttrs, HasTokens, MetaItem};
|
||||||
use rustc_attr as attr;
|
use rustc_attr as attr;
|
||||||
|
@ -259,8 +259,8 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
fn try_configure_tokens<T: HasTokens>(&self, node: &mut T) {
|
fn try_configure_tokens<T: HasTokens>(&self, node: &mut T) {
|
||||||
if self.config_tokens {
|
if self.config_tokens {
|
||||||
if let Some(Some(tokens)) = node.tokens_mut() {
|
if let Some(Some(tokens)) = node.tokens_mut() {
|
||||||
let attr_stream = tokens.create_token_stream();
|
let attr_stream = tokens.to_attr_token_stream();
|
||||||
*tokens = LazyTokenStream::new(self.configure_tokens(&attr_stream));
|
*tokens = LazyAttrTokenStream::new(self.configure_tokens(&attr_stream));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -295,8 +295,8 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
|
data.attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
|
||||||
|
|
||||||
if self.in_cfg(&data.attrs) {
|
if self.in_cfg(&data.attrs) {
|
||||||
data.tokens = LazyTokenStream::new(
|
data.tokens = LazyAttrTokenStream::new(
|
||||||
self.configure_tokens(&data.tokens.create_token_stream()),
|
self.configure_tokens(&data.tokens.to_attr_token_stream()),
|
||||||
);
|
);
|
||||||
Some(AttrTokenTree::Attributes(data)).into_iter()
|
Some(AttrTokenTree::Attributes(data)).into_iter()
|
||||||
} else {
|
} else {
|
||||||
|
@ -420,10 +420,10 @@ impl<'a> StripUnconfigured<'a> {
|
||||||
item.tokens
|
item.tokens
|
||||||
.as_ref()
|
.as_ref()
|
||||||
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
|
.unwrap_or_else(|| panic!("Missing tokens for {:?}", item))
|
||||||
.create_token_stream(),
|
.to_attr_token_stream(),
|
||||||
);
|
);
|
||||||
trees.push(bracket_group);
|
trees.push(bracket_group);
|
||||||
let tokens = Some(LazyTokenStream::new(AttrTokenStream::new(trees)));
|
let tokens = Some(LazyAttrTokenStream::new(AttrTokenStream::new(trees)));
|
||||||
let attr = attr::mk_attr_from_item(item, tokens, attr.style, item_span);
|
let attr = attr::mk_attr_from_item(item, tokens, attr.style, item_span);
|
||||||
if attr.has_name(sym::crate_type) {
|
if attr.has_name(sym::crate_type) {
|
||||||
self.sess.parse_sess.buffer_lint(
|
self.sess.parse_sess.buffer_lint(
|
||||||
|
|
|
@ -301,7 +301,7 @@ impl<'a> Parser<'a> {
|
||||||
if let Some(attr) = attr {
|
if let Some(attr) = attr {
|
||||||
let end_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
|
let end_pos: u32 = self.token_cursor.num_next_calls.try_into().unwrap();
|
||||||
// If we are currently capturing tokens, mark the location of this inner attribute.
|
// If we are currently capturing tokens, mark the location of this inner attribute.
|
||||||
// If capturing ends up creating a `LazyTokenStream`, we will include
|
// If capturing ends up creating a `LazyAttrTokenStream`, we will include
|
||||||
// this replace range with it, removing the inner attribute from the final
|
// this replace range with it, removing the inner attribute from the final
|
||||||
// `AttrTokenStream`. Inner attributes are stored in the parsed AST note.
|
// `AttrTokenStream`. Inner attributes are stored in the parsed AST note.
|
||||||
// During macro expansion, they are selectively inserted back into the
|
// During macro expansion, they are selectively inserted back into the
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
use super::{Capturing, FlatToken, ForceCollect, Parser, ReplaceRange, TokenCursor, TrailingToken};
|
||||||
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
use rustc_ast::token::{self, Delimiter, Token, TokenKind};
|
||||||
use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, CreateTokenStream};
|
use rustc_ast::tokenstream::{AttrTokenStream, AttributesData, ToAttrTokenStream};
|
||||||
use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyTokenStream, Spacing};
|
use rustc_ast::tokenstream::{AttrTokenTree, DelimSpan, LazyAttrTokenStream, Spacing};
|
||||||
use rustc_ast::{self as ast};
|
use rustc_ast::{self as ast};
|
||||||
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
|
use rustc_ast::{AttrVec, Attribute, HasAttrs, HasTokens};
|
||||||
use rustc_errors::PResult;
|
use rustc_errors::PResult;
|
||||||
|
@ -88,7 +88,7 @@ fn has_cfg_or_cfg_attr(attrs: &[Attribute]) -> bool {
|
||||||
// This also makes `Parser` very cheap to clone, since
|
// This also makes `Parser` very cheap to clone, since
|
||||||
// there is no intermediate collection buffer to clone.
|
// there is no intermediate collection buffer to clone.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct LazyTokenStreamImpl {
|
struct LazyAttrTokenStreamImpl {
|
||||||
start_token: (Token, Spacing),
|
start_token: (Token, Spacing),
|
||||||
cursor_snapshot: TokenCursor,
|
cursor_snapshot: TokenCursor,
|
||||||
num_calls: usize,
|
num_calls: usize,
|
||||||
|
@ -97,10 +97,10 @@ struct LazyTokenStreamImpl {
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
|
||||||
rustc_data_structures::static_assert_size!(LazyTokenStreamImpl, 144);
|
rustc_data_structures::static_assert_size!(LazyAttrTokenStreamImpl, 144);
|
||||||
|
|
||||||
impl CreateTokenStream for LazyTokenStreamImpl {
|
impl ToAttrTokenStream for LazyAttrTokenStreamImpl {
|
||||||
fn create_token_stream(&self) -> AttrTokenStream {
|
fn to_attr_token_stream(&self) -> AttrTokenStream {
|
||||||
// The token produced by the final call to `{,inlined_}next` was not
|
// The token produced by the final call to `{,inlined_}next` was not
|
||||||
// actually consumed by the callback. The combination of chaining the
|
// actually consumed by the callback. The combination of chaining the
|
||||||
// initial token and using `take` produces the desired result - we
|
// initial token and using `take` produces the desired result - we
|
||||||
|
@ -179,7 +179,7 @@ impl CreateTokenStream for LazyTokenStreamImpl {
|
||||||
impl<'a> Parser<'a> {
|
impl<'a> Parser<'a> {
|
||||||
/// Records all tokens consumed by the provided callback,
|
/// Records all tokens consumed by the provided callback,
|
||||||
/// including the current token. These tokens are collected
|
/// including the current token. These tokens are collected
|
||||||
/// into a `LazyTokenStream`, and returned along with the result
|
/// into a `LazyAttrTokenStream`, and returned along with the result
|
||||||
/// of the callback.
|
/// of the callback.
|
||||||
///
|
///
|
||||||
/// Note: If your callback consumes an opening delimiter
|
/// Note: If your callback consumes an opening delimiter
|
||||||
|
@ -297,7 +297,7 @@ impl<'a> Parser<'a> {
|
||||||
|
|
||||||
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
// If we 'broke' the last token (e.g. breaking a '>>' token to two '>' tokens),
|
||||||
// then extend the range of captured tokens to include it, since the parser
|
// then extend the range of captured tokens to include it, since the parser
|
||||||
// was not actually bumped past it. When the `LazyTokenStream` gets converted
|
// was not actually bumped past it. When the `LazyAttrTokenStream` gets converted
|
||||||
// into an `AttrTokenStream`, we will create the proper token.
|
// into an `AttrTokenStream`, we will create the proper token.
|
||||||
if self.token_cursor.break_last_token {
|
if self.token_cursor.break_last_token {
|
||||||
assert_eq!(
|
assert_eq!(
|
||||||
|
@ -316,7 +316,7 @@ impl<'a> Parser<'a> {
|
||||||
Box::new([])
|
Box::new([])
|
||||||
} else {
|
} else {
|
||||||
// Grab any replace ranges that occur *inside* the current AST node.
|
// Grab any replace ranges that occur *inside* the current AST node.
|
||||||
// We will perform the actual replacement when we convert the `LazyTokenStream`
|
// We will perform the actual replacement when we convert the `LazyAttrTokenStream`
|
||||||
// to an `AttrTokenStream`.
|
// to an `AttrTokenStream`.
|
||||||
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
|
let start_calls: u32 = cursor_snapshot_next_calls.try_into().unwrap();
|
||||||
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
self.capture_state.replace_ranges[replace_ranges_start..replace_ranges_end]
|
||||||
|
@ -329,7 +329,7 @@ impl<'a> Parser<'a> {
|
||||||
.collect()
|
.collect()
|
||||||
};
|
};
|
||||||
|
|
||||||
let tokens = LazyTokenStream::new(LazyTokenStreamImpl {
|
let tokens = LazyAttrTokenStream::new(LazyAttrTokenStreamImpl {
|
||||||
start_token,
|
start_token,
|
||||||
num_calls,
|
num_calls,
|
||||||
cursor_snapshot,
|
cursor_snapshot,
|
||||||
|
|
|
@ -237,7 +237,7 @@ struct TokenCursor {
|
||||||
// the trailing `>>` token. The `break_last_token`
|
// the trailing `>>` token. The `break_last_token`
|
||||||
// field is used to track this token - it gets
|
// field is used to track this token - it gets
|
||||||
// appended to the captured stream when
|
// appended to the captured stream when
|
||||||
// we evaluate a `LazyTokenStream`
|
// we evaluate a `LazyAttrTokenStream`.
|
||||||
break_last_token: bool,
|
break_last_token: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1465,7 +1465,7 @@ pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &Pa
|
||||||
}
|
}
|
||||||
|
|
||||||
/// A helper struct used when building an `AttrTokenStream` from
|
/// A helper struct used when building an `AttrTokenStream` from
|
||||||
/// a `LazyTokenStream`. Both delimiter and non-delimited tokens
|
/// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
|
||||||
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
|
/// are stored as `FlatToken::Token`. A vector of `FlatToken`s
|
||||||
/// is then 'parsed' to build up an `AttrTokenStream` with nested
|
/// is then 'parsed' to build up an `AttrTokenStream` with nested
|
||||||
/// `AttrTokenTree::Delimited` tokens.
|
/// `AttrTokenTree::Delimited` tokens.
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue