Auto merge of #98186 - mystor:tokenstream_as_vec_tt, r=eddyb
Batch proc_macro RPC for TokenStream iteration and combination operations This is the first part of #86822, split off as requested in https://github.com/rust-lang/rust/pull/86822#pullrequestreview-1008655452. It reduces the number of RPC calls required for common operations such as iterating over and concatenating TokenStreams.
This commit is contained in:
commit
0182fd99af
6 changed files with 304 additions and 174 deletions
|
@ -277,12 +277,6 @@ impl ToInternal<rustc_errors::Level> for Level {
|
|||
|
||||
pub struct FreeFunctions;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TokenStreamIter {
|
||||
cursor: tokenstream::Cursor,
|
||||
stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Group {
|
||||
delimiter: Delimiter,
|
||||
|
@ -382,8 +376,6 @@ impl<'a, 'b> Rustc<'a, 'b> {
|
|||
impl server::Types for Rustc<'_, '_> {
|
||||
type FreeFunctions = FreeFunctions;
|
||||
type TokenStream = TokenStream;
|
||||
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
|
||||
type TokenStreamIter = TokenStreamIter;
|
||||
type Group = Group;
|
||||
type Punct = Punct;
|
||||
type Ident = Ident;
|
||||
|
@ -408,9 +400,6 @@ impl server::FreeFunctions for Rustc<'_, '_> {
|
|||
}
|
||||
|
||||
impl server::TokenStream for Rustc<'_, '_> {
|
||||
fn new(&mut self) -> Self::TokenStream {
|
||||
TokenStream::default()
|
||||
}
|
||||
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
||||
stream.is_empty()
|
||||
}
|
||||
|
@ -481,53 +470,75 @@ impl server::TokenStream for Rustc<'_, '_> {
|
|||
) -> Self::TokenStream {
|
||||
tree.to_internal()
|
||||
}
|
||||
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
|
||||
TokenStreamIter { cursor: stream.into_trees(), stack: vec![] }
|
||||
fn concat_trees(
|
||||
&mut self,
|
||||
base: Option<Self::TokenStream>,
|
||||
trees: Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
|
||||
) -> Self::TokenStream {
|
||||
let mut builder = tokenstream::TokenStreamBuilder::new();
|
||||
if let Some(base) = base {
|
||||
builder.push(base);
|
||||
}
|
||||
}
|
||||
|
||||
impl server::TokenStreamBuilder for Rustc<'_, '_> {
|
||||
fn new(&mut self) -> Self::TokenStreamBuilder {
|
||||
tokenstream::TokenStreamBuilder::new()
|
||||
for tree in trees {
|
||||
builder.push(tree.to_internal());
|
||||
}
|
||||
fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
|
||||
builder.push(stream);
|
||||
}
|
||||
fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
|
||||
impl server::TokenStreamIter for Rustc<'_, '_> {
|
||||
fn next(
|
||||
fn concat_streams(
|
||||
&mut self,
|
||||
iter: &mut Self::TokenStreamIter,
|
||||
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
||||
base: Option<Self::TokenStream>,
|
||||
streams: Vec<Self::TokenStream>,
|
||||
) -> Self::TokenStream {
|
||||
let mut builder = tokenstream::TokenStreamBuilder::new();
|
||||
if let Some(base) = base {
|
||||
builder.push(base);
|
||||
}
|
||||
for stream in streams {
|
||||
builder.push(stream);
|
||||
}
|
||||
builder.build()
|
||||
}
|
||||
fn into_trees(
|
||||
&mut self,
|
||||
stream: Self::TokenStream,
|
||||
) -> Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
||||
// FIXME: This is a raw port of the previous approach (which had a
|
||||
// `TokenStreamIter` server-side object with a single `next` method),
|
||||
// and can probably be optimized (for bulk conversion).
|
||||
let mut cursor = stream.into_trees();
|
||||
let mut stack = Vec::new();
|
||||
let mut tts = Vec::new();
|
||||
loop {
|
||||
let tree = iter.stack.pop().or_else(|| {
|
||||
let next = iter.cursor.next_with_spacing()?;
|
||||
Some(TokenTree::from_internal((next, &mut iter.stack, self)))
|
||||
})?;
|
||||
// A hack used to pass AST fragments to attribute and derive macros
|
||||
// as a single nonterminal token instead of a token stream.
|
||||
// Such token needs to be "unwrapped" and not represented as a delimited group.
|
||||
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||
if let TokenTree::Group(ref group) = tree {
|
||||
let next = stack.pop().or_else(|| {
|
||||
let next = cursor.next_with_spacing()?;
|
||||
Some(TokenTree::from_internal((next, &mut stack, self)))
|
||||
});
|
||||
match next {
|
||||
Some(TokenTree::Group(group)) => {
|
||||
// A hack used to pass AST fragments to attribute and derive
|
||||
// macros as a single nonterminal token instead of a token
|
||||
// stream. Such token needs to be "unwrapped" and not
|
||||
// represented as a delimited group.
|
||||
// FIXME: It needs to be removed, but there are some
|
||||
// compatibility issues (see #73345).
|
||||
if group.flatten {
|
||||
iter.cursor.append(group.stream.clone());
|
||||
cursor.append(group.stream);
|
||||
continue;
|
||||
}
|
||||
tts.push(TokenTree::Group(group));
|
||||
}
|
||||
Some(tt) => tts.push(tt),
|
||||
None => return tts,
|
||||
}
|
||||
return Some(tree);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Group for Rustc<'_, '_> {
|
||||
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
|
||||
fn new(&mut self, delimiter: Delimiter, stream: Option<Self::TokenStream>) -> Self::Group {
|
||||
Group {
|
||||
delimiter,
|
||||
stream,
|
||||
stream: stream.unwrap_or_default(),
|
||||
span: DelimSpan::from_single(server::Span::call_site(self)),
|
||||
flatten: false,
|
||||
}
|
||||
|
|
|
@ -178,8 +178,6 @@ define_handles! {
|
|||
'owned:
|
||||
FreeFunctions,
|
||||
TokenStream,
|
||||
TokenStreamBuilder,
|
||||
TokenStreamIter,
|
||||
Group,
|
||||
Literal,
|
||||
SourceFile,
|
||||
|
@ -204,12 +202,6 @@ impl Clone for TokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
impl Clone for TokenStreamIter {
|
||||
fn clone(&self) -> Self {
|
||||
self.clone()
|
||||
}
|
||||
}
|
||||
|
||||
impl Clone for Group {
|
||||
fn clone(&self) -> Self {
|
||||
self.clone()
|
||||
|
@ -435,7 +427,7 @@ impl Client<crate::TokenStream, crate::TokenStream> {
|
|||
Client {
|
||||
get_handle_counters: HandleCounters::get,
|
||||
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
|
||||
run_client(bridge, |input| f(crate::TokenStream(input)).0)
|
||||
run_client(bridge, |input| f(crate::TokenStream(Some(input))).0)
|
||||
}),
|
||||
_marker: PhantomData,
|
||||
}
|
||||
|
@ -450,7 +442,7 @@ impl Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> {
|
|||
get_handle_counters: HandleCounters::get,
|
||||
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
|
||||
run_client(bridge, |(input, input2)| {
|
||||
f(crate::TokenStream(input), crate::TokenStream(input2)).0
|
||||
f(crate::TokenStream(Some(input)), crate::TokenStream(Some(input2))).0
|
||||
})
|
||||
}),
|
||||
_marker: PhantomData,
|
||||
|
|
|
@ -60,7 +60,6 @@ macro_rules! with_api {
|
|||
TokenStream {
|
||||
fn drop($self: $S::TokenStream);
|
||||
fn clone($self: &$S::TokenStream) -> $S::TokenStream;
|
||||
fn new() -> $S::TokenStream;
|
||||
fn is_empty($self: &$S::TokenStream) -> bool;
|
||||
fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
|
||||
fn from_str(src: &str) -> $S::TokenStream;
|
||||
|
@ -68,25 +67,22 @@ macro_rules! with_api {
|
|||
fn from_token_tree(
|
||||
tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
|
||||
) -> $S::TokenStream;
|
||||
fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter;
|
||||
},
|
||||
TokenStreamBuilder {
|
||||
fn drop($self: $S::TokenStreamBuilder);
|
||||
fn new() -> $S::TokenStreamBuilder;
|
||||
fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream);
|
||||
fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream;
|
||||
},
|
||||
TokenStreamIter {
|
||||
fn drop($self: $S::TokenStreamIter);
|
||||
fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter;
|
||||
fn next(
|
||||
$self: &mut $S::TokenStreamIter,
|
||||
) -> Option<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
|
||||
fn concat_trees(
|
||||
base: Option<$S::TokenStream>,
|
||||
trees: Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>,
|
||||
) -> $S::TokenStream;
|
||||
fn concat_streams(
|
||||
base: Option<$S::TokenStream>,
|
||||
streams: Vec<$S::TokenStream>,
|
||||
) -> $S::TokenStream;
|
||||
fn into_trees(
|
||||
$self: $S::TokenStream
|
||||
) -> Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
|
||||
},
|
||||
Group {
|
||||
fn drop($self: $S::Group);
|
||||
fn clone($self: &$S::Group) -> $S::Group;
|
||||
fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group;
|
||||
fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group;
|
||||
fn delimiter($self: &$S::Group) -> Delimiter;
|
||||
fn stream($self: &$S::Group) -> $S::TokenStream;
|
||||
fn span($self: &$S::Group) -> $S::Span;
|
||||
|
@ -311,29 +307,18 @@ impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
|
|||
}
|
||||
}
|
||||
|
||||
impl<T: Mark> Mark for Option<T> {
|
||||
type Unmarked = Option<T::Unmarked>;
|
||||
impl<T: Mark> Mark for Vec<T> {
|
||||
type Unmarked = Vec<T::Unmarked>;
|
||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
||||
unmarked.map(T::mark)
|
||||
// Should be a no-op due to std's in-place collect optimizations.
|
||||
unmarked.into_iter().map(T::mark).collect()
|
||||
}
|
||||
}
|
||||
impl<T: Unmark> Unmark for Option<T> {
|
||||
type Unmarked = Option<T::Unmarked>;
|
||||
impl<T: Unmark> Unmark for Vec<T> {
|
||||
type Unmarked = Vec<T::Unmarked>;
|
||||
fn unmark(self) -> Self::Unmarked {
|
||||
self.map(T::unmark)
|
||||
}
|
||||
}
|
||||
|
||||
impl<T: Mark, E: Mark> Mark for Result<T, E> {
|
||||
type Unmarked = Result<T::Unmarked, E::Unmarked>;
|
||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
||||
unmarked.map(T::mark).map_err(E::mark)
|
||||
}
|
||||
}
|
||||
impl<T: Unmark, E: Unmark> Unmark for Result<T, E> {
|
||||
type Unmarked = Result<T::Unmarked, E::Unmarked>;
|
||||
fn unmark(self) -> Self::Unmarked {
|
||||
self.map(T::unmark).map_err(E::unmark)
|
||||
// Should be a no-op due to std's in-place collect optimizations.
|
||||
self.into_iter().map(T::unmark).collect()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -367,7 +352,6 @@ mark_noop! {
|
|||
Level,
|
||||
LineColumn,
|
||||
Spacing,
|
||||
Bound<usize>,
|
||||
}
|
||||
|
||||
rpc_encode_decode!(
|
||||
|
@ -394,6 +378,61 @@ rpc_encode_decode!(
|
|||
}
|
||||
);
|
||||
|
||||
macro_rules! mark_compound {
|
||||
(enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
|
||||
impl<$($T: Mark),+> Mark for $name <$($T),+> {
|
||||
type Unmarked = $name <$($T::Unmarked),+>;
|
||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
||||
match unmarked {
|
||||
$($name::$variant $(($field))? => {
|
||||
$name::$variant $((Mark::mark($field)))?
|
||||
})*
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
|
||||
type Unmarked = $name <$($T::Unmarked),+>;
|
||||
fn unmark(self) -> Self::Unmarked {
|
||||
match self {
|
||||
$($name::$variant $(($field))? => {
|
||||
$name::$variant $((Unmark::unmark($field)))?
|
||||
})*
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
macro_rules! compound_traits {
|
||||
($($t:tt)*) => {
|
||||
rpc_encode_decode!($($t)*);
|
||||
mark_compound!($($t)*);
|
||||
};
|
||||
}
|
||||
|
||||
compound_traits!(
|
||||
enum Bound<T> {
|
||||
Included(x),
|
||||
Excluded(x),
|
||||
Unbounded,
|
||||
}
|
||||
);
|
||||
|
||||
compound_traits!(
|
||||
enum Option<T> {
|
||||
Some(t),
|
||||
None,
|
||||
}
|
||||
);
|
||||
|
||||
compound_traits!(
|
||||
enum Result<T, E> {
|
||||
Ok(t),
|
||||
Err(e),
|
||||
}
|
||||
);
|
||||
|
||||
#[derive(Clone)]
|
||||
pub enum TokenTree<G, P, I, L> {
|
||||
Group(G),
|
||||
|
@ -402,30 +441,7 @@ pub enum TokenTree<G, P, I, L> {
|
|||
Literal(L),
|
||||
}
|
||||
|
||||
impl<G: Mark, P: Mark, I: Mark, L: Mark> Mark for TokenTree<G, P, I, L> {
|
||||
type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
|
||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
||||
match unmarked {
|
||||
TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)),
|
||||
TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)),
|
||||
TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)),
|
||||
TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)),
|
||||
}
|
||||
}
|
||||
}
|
||||
impl<G: Unmark, P: Unmark, I: Unmark, L: Unmark> Unmark for TokenTree<G, P, I, L> {
|
||||
type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
|
||||
fn unmark(self) -> Self::Unmarked {
|
||||
match self {
|
||||
TokenTree::Group(tt) => TokenTree::Group(tt.unmark()),
|
||||
TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()),
|
||||
TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()),
|
||||
TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rpc_encode_decode!(
|
||||
compound_traits!(
|
||||
enum TokenTree<G, P, I, L> {
|
||||
Group(tt),
|
||||
Punct(tt),
|
||||
|
|
|
@ -4,7 +4,6 @@ use std::any::Any;
|
|||
use std::char;
|
||||
use std::io::Write;
|
||||
use std::num::NonZeroU32;
|
||||
use std::ops::Bound;
|
||||
use std::str;
|
||||
|
||||
pub(super) type Writer = super::buffer::Buffer;
|
||||
|
@ -43,15 +42,17 @@ macro_rules! rpc_encode_decode {
|
|||
}
|
||||
}
|
||||
};
|
||||
(struct $name:ident { $($field:ident),* $(,)? }) => {
|
||||
impl<S> Encode<S> for $name {
|
||||
(struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
|
||||
impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
|
||||
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||
$(self.$field.encode(w, s);)*
|
||||
}
|
||||
}
|
||||
|
||||
impl<S> DecodeMut<'_, '_, S> for $name {
|
||||
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
|
||||
impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
|
||||
for $name $(<$($T),+>)?
|
||||
{
|
||||
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
|
||||
$name {
|
||||
$($field: DecodeMut::decode(r, s)),*
|
||||
}
|
||||
|
@ -184,28 +185,6 @@ impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> De
|
|||
}
|
||||
}
|
||||
|
||||
rpc_encode_decode!(
|
||||
enum Bound<T> {
|
||||
Included(x),
|
||||
Excluded(x),
|
||||
Unbounded,
|
||||
}
|
||||
);
|
||||
|
||||
rpc_encode_decode!(
|
||||
enum Option<T> {
|
||||
None,
|
||||
Some(x),
|
||||
}
|
||||
);
|
||||
|
||||
rpc_encode_decode!(
|
||||
enum Result<T, E> {
|
||||
Ok(x),
|
||||
Err(e),
|
||||
}
|
||||
);
|
||||
|
||||
impl<S> Encode<S> for &[u8] {
|
||||
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||
self.len().encode(w, s);
|
||||
|
@ -246,6 +225,26 @@ impl<S> DecodeMut<'_, '_, S> for String {
|
|||
}
|
||||
}
|
||||
|
||||
impl<S, T: Encode<S>> Encode<S> for Vec<T> {
|
||||
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||
self.len().encode(w, s);
|
||||
for x in self {
|
||||
x.encode(w, s);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
|
||||
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
|
||||
let len = usize::decode(r, s);
|
||||
let mut vec = Vec::with_capacity(len);
|
||||
for _ in 0..len {
|
||||
vec.push(T::decode(r, s));
|
||||
}
|
||||
vec
|
||||
}
|
||||
}
|
||||
|
||||
/// Simplified version of panic payloads, ignoring
|
||||
/// types other than `&'static str` and `String`.
|
||||
pub enum PanicMessage {
|
||||
|
|
|
@ -8,8 +8,6 @@ use super::client::HandleStore;
|
|||
pub trait Types {
|
||||
type FreeFunctions: 'static;
|
||||
type TokenStream: 'static + Clone;
|
||||
type TokenStreamBuilder: 'static;
|
||||
type TokenStreamIter: 'static + Clone;
|
||||
type Group: 'static + Clone;
|
||||
type Punct: 'static + Copy + Eq + Hash;
|
||||
type Ident: 'static + Copy + Eq + Hash;
|
||||
|
@ -275,13 +273,17 @@ fn run_server<
|
|||
}
|
||||
|
||||
impl client::Client<crate::TokenStream, crate::TokenStream> {
|
||||
pub fn run<S: Server>(
|
||||
pub fn run<S>(
|
||||
&self,
|
||||
strategy: &impl ExecutionStrategy,
|
||||
server: S,
|
||||
input: S::TokenStream,
|
||||
force_show_panics: bool,
|
||||
) -> Result<S::TokenStream, PanicMessage> {
|
||||
) -> Result<S::TokenStream, PanicMessage>
|
||||
where
|
||||
S: Server,
|
||||
S::TokenStream: Default,
|
||||
{
|
||||
let client::Client { get_handle_counters, run, _marker } = *self;
|
||||
run_server(
|
||||
strategy,
|
||||
|
@ -291,19 +293,23 @@ impl client::Client<crate::TokenStream, crate::TokenStream> {
|
|||
run,
|
||||
force_show_panics,
|
||||
)
|
||||
.map(<MarkedTypes<S> as Types>::TokenStream::unmark)
|
||||
.map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
||||
impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> {
|
||||
pub fn run<S: Server>(
|
||||
pub fn run<S>(
|
||||
&self,
|
||||
strategy: &impl ExecutionStrategy,
|
||||
server: S,
|
||||
input: S::TokenStream,
|
||||
input2: S::TokenStream,
|
||||
force_show_panics: bool,
|
||||
) -> Result<S::TokenStream, PanicMessage> {
|
||||
) -> Result<S::TokenStream, PanicMessage>
|
||||
where
|
||||
S: Server,
|
||||
S::TokenStream: Default,
|
||||
{
|
||||
let client::Client { get_handle_counters, run, _marker } = *self;
|
||||
run_server(
|
||||
strategy,
|
||||
|
@ -316,6 +322,6 @@ impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream
|
|||
run,
|
||||
force_show_panics,
|
||||
)
|
||||
.map(<MarkedTypes<S> as Types>::TokenStream::unmark)
|
||||
.map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
|
||||
}
|
||||
}
|
||||
|
|
|
@ -43,7 +43,7 @@ use std::cmp::Ordering;
|
|||
use std::ops::RangeBounds;
|
||||
use std::path::PathBuf;
|
||||
use std::str::FromStr;
|
||||
use std::{error, fmt, iter, mem};
|
||||
use std::{error, fmt, iter};
|
||||
|
||||
/// Determines whether proc_macro has been made accessible to the currently
|
||||
/// running program.
|
||||
|
@ -72,7 +72,7 @@ pub fn is_available() -> bool {
|
|||
/// and `#[proc_macro_derive]` definitions.
|
||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||
#[derive(Clone)]
|
||||
pub struct TokenStream(bridge::client::TokenStream);
|
||||
pub struct TokenStream(Option<bridge::client::TokenStream>);
|
||||
|
||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||
impl !Send for TokenStream {}
|
||||
|
@ -126,13 +126,13 @@ impl TokenStream {
|
|||
/// Returns an empty `TokenStream` containing no token trees.
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
pub fn new() -> TokenStream {
|
||||
TokenStream(bridge::client::TokenStream::new())
|
||||
TokenStream(None)
|
||||
}
|
||||
|
||||
/// Checks if this `TokenStream` is empty.
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
|
||||
}
|
||||
|
||||
/// Parses this `TokenStream` as an expression and attempts to expand any
|
||||
|
@ -147,8 +147,9 @@ impl TokenStream {
|
|||
/// considered errors, is unspecified and may change in the future.
|
||||
#[unstable(feature = "proc_macro_expand", issue = "90765")]
|
||||
pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
|
||||
match bridge::client::TokenStream::expand_expr(&self.0) {
|
||||
Ok(stream) => Ok(TokenStream(stream)),
|
||||
let stream = self.0.as_ref().ok_or(ExpandError)?;
|
||||
match bridge::client::TokenStream::expand_expr(stream) {
|
||||
Ok(stream) => Ok(TokenStream(Some(stream))),
|
||||
Err(_) => Err(ExpandError),
|
||||
}
|
||||
}
|
||||
|
@ -166,7 +167,7 @@ impl FromStr for TokenStream {
|
|||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
Ok(TokenStream(bridge::client::TokenStream::from_str(src)))
|
||||
Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -175,7 +176,7 @@ impl FromStr for TokenStream {
|
|||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||
impl ToString for TokenStream {
|
||||
fn to_string(&self) -> String {
|
||||
self.0.to_string()
|
||||
self.0.as_ref().map(|t| t.to_string()).unwrap_or_default()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -208,16 +209,103 @@ impl Default for TokenStream {
|
|||
#[unstable(feature = "proc_macro_quote", issue = "54722")]
|
||||
pub use quote::{quote, quote_span};
|
||||
|
||||
/// Creates a token stream containing a single token tree.
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(tree: TokenTree) -> TokenStream {
|
||||
TokenStream(bridge::client::TokenStream::from_token_tree(match tree {
|
||||
fn tree_to_bridge_tree(
|
||||
tree: TokenTree,
|
||||
) -> bridge::TokenTree<
|
||||
bridge::client::Group,
|
||||
bridge::client::Punct,
|
||||
bridge::client::Ident,
|
||||
bridge::client::Literal,
|
||||
> {
|
||||
match tree {
|
||||
TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
|
||||
TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
|
||||
TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
|
||||
TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
/// Creates a token stream containing a single token tree.
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
impl From<TokenTree> for TokenStream {
|
||||
fn from(tree: TokenTree) -> TokenStream {
|
||||
TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
|
||||
}
|
||||
}
|
||||
|
||||
/// Non-generic helper for implementing `FromIterator<TokenTree>` and
|
||||
/// `Extend<TokenTree>` with less monomorphization in calling crates.
|
||||
struct ConcatTreesHelper {
|
||||
trees: Vec<
|
||||
bridge::TokenTree<
|
||||
bridge::client::Group,
|
||||
bridge::client::Punct,
|
||||
bridge::client::Ident,
|
||||
bridge::client::Literal,
|
||||
>,
|
||||
>,
|
||||
}
|
||||
|
||||
impl ConcatTreesHelper {
|
||||
fn new(capacity: usize) -> Self {
|
||||
ConcatTreesHelper { trees: Vec::with_capacity(capacity) }
|
||||
}
|
||||
|
||||
fn push(&mut self, tree: TokenTree) {
|
||||
self.trees.push(tree_to_bridge_tree(tree));
|
||||
}
|
||||
|
||||
fn build(self) -> TokenStream {
|
||||
if self.trees.is_empty() {
|
||||
TokenStream(None)
|
||||
} else {
|
||||
TokenStream(Some(bridge::client::TokenStream::concat_trees(None, self.trees)))
|
||||
}
|
||||
}
|
||||
|
||||
fn append_to(self, stream: &mut TokenStream) {
|
||||
if self.trees.is_empty() {
|
||||
return;
|
||||
}
|
||||
stream.0 = Some(bridge::client::TokenStream::concat_trees(stream.0.take(), self.trees))
|
||||
}
|
||||
}
|
||||
|
||||
/// Non-generic helper for implementing `FromIterator<TokenStream>` and
|
||||
/// `Extend<TokenStream>` with less monomorphization in calling crates.
|
||||
struct ConcatStreamsHelper {
|
||||
streams: Vec<bridge::client::TokenStream>,
|
||||
}
|
||||
|
||||
impl ConcatStreamsHelper {
|
||||
fn new(capacity: usize) -> Self {
|
||||
ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
|
||||
}
|
||||
|
||||
fn push(&mut self, stream: TokenStream) {
|
||||
if let Some(stream) = stream.0 {
|
||||
self.streams.push(stream);
|
||||
}
|
||||
}
|
||||
|
||||
fn build(mut self) -> TokenStream {
|
||||
if self.streams.len() <= 1 {
|
||||
TokenStream(self.streams.pop())
|
||||
} else {
|
||||
TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
|
||||
}
|
||||
}
|
||||
|
||||
fn append_to(mut self, stream: &mut TokenStream) {
|
||||
if self.streams.is_empty() {
|
||||
return;
|
||||
}
|
||||
let base = stream.0.take();
|
||||
if base.is_none() && self.streams.len() == 1 {
|
||||
stream.0 = self.streams.pop();
|
||||
} else {
|
||||
stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -225,7 +313,10 @@ impl From<TokenTree> for TokenStream {
|
|||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||
trees.into_iter().map(TokenStream::from).collect()
|
||||
let iter = trees.into_iter();
|
||||
let mut builder = ConcatTreesHelper::new(iter.size_hint().0);
|
||||
iter.for_each(|tree| builder.push(tree));
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -234,24 +325,30 @@ impl iter::FromIterator<TokenTree> for TokenStream {
|
|||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||
let mut builder = bridge::client::TokenStreamBuilder::new();
|
||||
streams.into_iter().for_each(|stream| builder.push(stream.0));
|
||||
TokenStream(builder.build())
|
||||
let iter = streams.into_iter();
|
||||
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
|
||||
iter.for_each(|stream| builder.push(stream));
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
||||
impl Extend<TokenTree> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
|
||||
self.extend(trees.into_iter().map(TokenStream::from));
|
||||
let iter = trees.into_iter();
|
||||
let mut builder = ConcatTreesHelper::new(iter.size_hint().0);
|
||||
iter.for_each(|tree| builder.push(tree));
|
||||
builder.append_to(self);
|
||||
}
|
||||
}
|
||||
|
||||
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
||||
impl Extend<TokenStream> for TokenStream {
|
||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||
// FIXME(eddyb) Use an optimized implementation if/when possible.
|
||||
*self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
|
||||
let iter = streams.into_iter();
|
||||
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
|
||||
iter.for_each(|stream| builder.push(stream));
|
||||
builder.append_to(self);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -265,7 +362,16 @@ pub mod token_stream {
|
|||
/// and returns whole groups as token trees.
|
||||
#[derive(Clone)]
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
pub struct IntoIter(bridge::client::TokenStreamIter);
|
||||
pub struct IntoIter(
|
||||
std::vec::IntoIter<
|
||||
bridge::TokenTree<
|
||||
bridge::client::Group,
|
||||
bridge::client::Punct,
|
||||
bridge::client::Ident,
|
||||
bridge::client::Literal,
|
||||
>,
|
||||
>,
|
||||
);
|
||||
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
impl Iterator for IntoIter {
|
||||
|
@ -287,7 +393,7 @@ pub mod token_stream {
|
|||
type IntoIter = IntoIter;
|
||||
|
||||
fn into_iter(self) -> IntoIter {
|
||||
IntoIter(self.0.into_iter())
|
||||
IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -734,7 +840,7 @@ impl Group {
|
|||
/// returned above.
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
pub fn stream(&self) -> TokenStream {
|
||||
TokenStream(self.0.stream())
|
||||
TokenStream(Some(self.0.stream()))
|
||||
}
|
||||
|
||||
/// Returns the span for the delimiters of this token stream, spanning the
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue