Auto merge of #98186 - mystor:tokenstream_as_vec_tt, r=eddyb
Batch proc_macro RPC for TokenStream iteration and combination operations This is the first part of #86822, split off as requested in https://github.com/rust-lang/rust/pull/86822#pullrequestreview-1008655452. It reduces the number of RPC calls required for common operations such as iterating over and concatenating TokenStreams.
This commit is contained in:
commit
0182fd99af
6 changed files with 304 additions and 174 deletions
|
@ -277,12 +277,6 @@ impl ToInternal<rustc_errors::Level> for Level {
|
||||||
|
|
||||||
pub struct FreeFunctions;
|
pub struct FreeFunctions;
|
||||||
|
|
||||||
#[derive(Clone)]
|
|
||||||
pub struct TokenStreamIter {
|
|
||||||
cursor: tokenstream::Cursor,
|
|
||||||
stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Group {
|
pub struct Group {
|
||||||
delimiter: Delimiter,
|
delimiter: Delimiter,
|
||||||
|
@ -382,8 +376,6 @@ impl<'a, 'b> Rustc<'a, 'b> {
|
||||||
impl server::Types for Rustc<'_, '_> {
|
impl server::Types for Rustc<'_, '_> {
|
||||||
type FreeFunctions = FreeFunctions;
|
type FreeFunctions = FreeFunctions;
|
||||||
type TokenStream = TokenStream;
|
type TokenStream = TokenStream;
|
||||||
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
|
|
||||||
type TokenStreamIter = TokenStreamIter;
|
|
||||||
type Group = Group;
|
type Group = Group;
|
||||||
type Punct = Punct;
|
type Punct = Punct;
|
||||||
type Ident = Ident;
|
type Ident = Ident;
|
||||||
|
@ -408,9 +400,6 @@ impl server::FreeFunctions for Rustc<'_, '_> {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl server::TokenStream for Rustc<'_, '_> {
|
impl server::TokenStream for Rustc<'_, '_> {
|
||||||
fn new(&mut self) -> Self::TokenStream {
|
|
||||||
TokenStream::default()
|
|
||||||
}
|
|
||||||
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
||||||
stream.is_empty()
|
stream.is_empty()
|
||||||
}
|
}
|
||||||
|
@ -481,53 +470,75 @@ impl server::TokenStream for Rustc<'_, '_> {
|
||||||
) -> Self::TokenStream {
|
) -> Self::TokenStream {
|
||||||
tree.to_internal()
|
tree.to_internal()
|
||||||
}
|
}
|
||||||
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
|
fn concat_trees(
|
||||||
TokenStreamIter { cursor: stream.into_trees(), stack: vec![] }
|
&mut self,
|
||||||
|
base: Option<Self::TokenStream>,
|
||||||
|
trees: Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
|
||||||
|
) -> Self::TokenStream {
|
||||||
|
let mut builder = tokenstream::TokenStreamBuilder::new();
|
||||||
|
if let Some(base) = base {
|
||||||
|
builder.push(base);
|
||||||
}
|
}
|
||||||
}
|
for tree in trees {
|
||||||
|
builder.push(tree.to_internal());
|
||||||
impl server::TokenStreamBuilder for Rustc<'_, '_> {
|
|
||||||
fn new(&mut self) -> Self::TokenStreamBuilder {
|
|
||||||
tokenstream::TokenStreamBuilder::new()
|
|
||||||
}
|
}
|
||||||
fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
|
|
||||||
builder.push(stream);
|
|
||||||
}
|
|
||||||
fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
|
|
||||||
builder.build()
|
builder.build()
|
||||||
}
|
}
|
||||||
}
|
fn concat_streams(
|
||||||
|
|
||||||
impl server::TokenStreamIter for Rustc<'_, '_> {
|
|
||||||
fn next(
|
|
||||||
&mut self,
|
&mut self,
|
||||||
iter: &mut Self::TokenStreamIter,
|
base: Option<Self::TokenStream>,
|
||||||
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
streams: Vec<Self::TokenStream>,
|
||||||
|
) -> Self::TokenStream {
|
||||||
|
let mut builder = tokenstream::TokenStreamBuilder::new();
|
||||||
|
if let Some(base) = base {
|
||||||
|
builder.push(base);
|
||||||
|
}
|
||||||
|
for stream in streams {
|
||||||
|
builder.push(stream);
|
||||||
|
}
|
||||||
|
builder.build()
|
||||||
|
}
|
||||||
|
fn into_trees(
|
||||||
|
&mut self,
|
||||||
|
stream: Self::TokenStream,
|
||||||
|
) -> Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
||||||
|
// FIXME: This is a raw port of the previous approach (which had a
|
||||||
|
// `TokenStreamIter` server-side object with a single `next` method),
|
||||||
|
// and can probably be optimized (for bulk conversion).
|
||||||
|
let mut cursor = stream.into_trees();
|
||||||
|
let mut stack = Vec::new();
|
||||||
|
let mut tts = Vec::new();
|
||||||
loop {
|
loop {
|
||||||
let tree = iter.stack.pop().or_else(|| {
|
let next = stack.pop().or_else(|| {
|
||||||
let next = iter.cursor.next_with_spacing()?;
|
let next = cursor.next_with_spacing()?;
|
||||||
Some(TokenTree::from_internal((next, &mut iter.stack, self)))
|
Some(TokenTree::from_internal((next, &mut stack, self)))
|
||||||
})?;
|
});
|
||||||
// A hack used to pass AST fragments to attribute and derive macros
|
match next {
|
||||||
// as a single nonterminal token instead of a token stream.
|
Some(TokenTree::Group(group)) => {
|
||||||
// Such token needs to be "unwrapped" and not represented as a delimited group.
|
// A hack used to pass AST fragments to attribute and derive
|
||||||
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
// macros as a single nonterminal token instead of a token
|
||||||
if let TokenTree::Group(ref group) = tree {
|
// stream. Such token needs to be "unwrapped" and not
|
||||||
|
// represented as a delimited group.
|
||||||
|
// FIXME: It needs to be removed, but there are some
|
||||||
|
// compatibility issues (see #73345).
|
||||||
if group.flatten {
|
if group.flatten {
|
||||||
iter.cursor.append(group.stream.clone());
|
cursor.append(group.stream);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
tts.push(TokenTree::Group(group));
|
||||||
|
}
|
||||||
|
Some(tt) => tts.push(tt),
|
||||||
|
None => return tts,
|
||||||
}
|
}
|
||||||
return Some(tree);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl server::Group for Rustc<'_, '_> {
|
impl server::Group for Rustc<'_, '_> {
|
||||||
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
|
fn new(&mut self, delimiter: Delimiter, stream: Option<Self::TokenStream>) -> Self::Group {
|
||||||
Group {
|
Group {
|
||||||
delimiter,
|
delimiter,
|
||||||
stream,
|
stream: stream.unwrap_or_default(),
|
||||||
span: DelimSpan::from_single(server::Span::call_site(self)),
|
span: DelimSpan::from_single(server::Span::call_site(self)),
|
||||||
flatten: false,
|
flatten: false,
|
||||||
}
|
}
|
||||||
|
|
|
@ -178,8 +178,6 @@ define_handles! {
|
||||||
'owned:
|
'owned:
|
||||||
FreeFunctions,
|
FreeFunctions,
|
||||||
TokenStream,
|
TokenStream,
|
||||||
TokenStreamBuilder,
|
|
||||||
TokenStreamIter,
|
|
||||||
Group,
|
Group,
|
||||||
Literal,
|
Literal,
|
||||||
SourceFile,
|
SourceFile,
|
||||||
|
@ -204,12 +202,6 @@ impl Clone for TokenStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for TokenStreamIter {
|
|
||||||
fn clone(&self) -> Self {
|
|
||||||
self.clone()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Clone for Group {
|
impl Clone for Group {
|
||||||
fn clone(&self) -> Self {
|
fn clone(&self) -> Self {
|
||||||
self.clone()
|
self.clone()
|
||||||
|
@ -435,7 +427,7 @@ impl Client<crate::TokenStream, crate::TokenStream> {
|
||||||
Client {
|
Client {
|
||||||
get_handle_counters: HandleCounters::get,
|
get_handle_counters: HandleCounters::get,
|
||||||
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
|
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
|
||||||
run_client(bridge, |input| f(crate::TokenStream(input)).0)
|
run_client(bridge, |input| f(crate::TokenStream(Some(input))).0)
|
||||||
}),
|
}),
|
||||||
_marker: PhantomData,
|
_marker: PhantomData,
|
||||||
}
|
}
|
||||||
|
@ -450,7 +442,7 @@ impl Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> {
|
||||||
get_handle_counters: HandleCounters::get,
|
get_handle_counters: HandleCounters::get,
|
||||||
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
|
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
|
||||||
run_client(bridge, |(input, input2)| {
|
run_client(bridge, |(input, input2)| {
|
||||||
f(crate::TokenStream(input), crate::TokenStream(input2)).0
|
f(crate::TokenStream(Some(input)), crate::TokenStream(Some(input2))).0
|
||||||
})
|
})
|
||||||
}),
|
}),
|
||||||
_marker: PhantomData,
|
_marker: PhantomData,
|
||||||
|
|
|
@ -60,7 +60,6 @@ macro_rules! with_api {
|
||||||
TokenStream {
|
TokenStream {
|
||||||
fn drop($self: $S::TokenStream);
|
fn drop($self: $S::TokenStream);
|
||||||
fn clone($self: &$S::TokenStream) -> $S::TokenStream;
|
fn clone($self: &$S::TokenStream) -> $S::TokenStream;
|
||||||
fn new() -> $S::TokenStream;
|
|
||||||
fn is_empty($self: &$S::TokenStream) -> bool;
|
fn is_empty($self: &$S::TokenStream) -> bool;
|
||||||
fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
|
fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
|
||||||
fn from_str(src: &str) -> $S::TokenStream;
|
fn from_str(src: &str) -> $S::TokenStream;
|
||||||
|
@ -68,25 +67,22 @@ macro_rules! with_api {
|
||||||
fn from_token_tree(
|
fn from_token_tree(
|
||||||
tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
|
tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
|
||||||
) -> $S::TokenStream;
|
) -> $S::TokenStream;
|
||||||
fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter;
|
fn concat_trees(
|
||||||
},
|
base: Option<$S::TokenStream>,
|
||||||
TokenStreamBuilder {
|
trees: Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>,
|
||||||
fn drop($self: $S::TokenStreamBuilder);
|
) -> $S::TokenStream;
|
||||||
fn new() -> $S::TokenStreamBuilder;
|
fn concat_streams(
|
||||||
fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream);
|
base: Option<$S::TokenStream>,
|
||||||
fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream;
|
streams: Vec<$S::TokenStream>,
|
||||||
},
|
) -> $S::TokenStream;
|
||||||
TokenStreamIter {
|
fn into_trees(
|
||||||
fn drop($self: $S::TokenStreamIter);
|
$self: $S::TokenStream
|
||||||
fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter;
|
) -> Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
|
||||||
fn next(
|
|
||||||
$self: &mut $S::TokenStreamIter,
|
|
||||||
) -> Option<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
|
|
||||||
},
|
},
|
||||||
Group {
|
Group {
|
||||||
fn drop($self: $S::Group);
|
fn drop($self: $S::Group);
|
||||||
fn clone($self: &$S::Group) -> $S::Group;
|
fn clone($self: &$S::Group) -> $S::Group;
|
||||||
fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group;
|
fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group;
|
||||||
fn delimiter($self: &$S::Group) -> Delimiter;
|
fn delimiter($self: &$S::Group) -> Delimiter;
|
||||||
fn stream($self: &$S::Group) -> $S::TokenStream;
|
fn stream($self: &$S::Group) -> $S::TokenStream;
|
||||||
fn span($self: &$S::Group) -> $S::Span;
|
fn span($self: &$S::Group) -> $S::Span;
|
||||||
|
@ -311,29 +307,18 @@ impl<'a, T, M> Unmark for &'a mut Marked<T, M> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Mark> Mark for Option<T> {
|
impl<T: Mark> Mark for Vec<T> {
|
||||||
type Unmarked = Option<T::Unmarked>;
|
type Unmarked = Vec<T::Unmarked>;
|
||||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
fn mark(unmarked: Self::Unmarked) -> Self {
|
||||||
unmarked.map(T::mark)
|
// Should be a no-op due to std's in-place collect optimizations.
|
||||||
|
unmarked.into_iter().map(T::mark).collect()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
impl<T: Unmark> Unmark for Option<T> {
|
impl<T: Unmark> Unmark for Vec<T> {
|
||||||
type Unmarked = Option<T::Unmarked>;
|
type Unmarked = Vec<T::Unmarked>;
|
||||||
fn unmark(self) -> Self::Unmarked {
|
fn unmark(self) -> Self::Unmarked {
|
||||||
self.map(T::unmark)
|
// Should be a no-op due to std's in-place collect optimizations.
|
||||||
}
|
self.into_iter().map(T::unmark).collect()
|
||||||
}
|
|
||||||
|
|
||||||
impl<T: Mark, E: Mark> Mark for Result<T, E> {
|
|
||||||
type Unmarked = Result<T::Unmarked, E::Unmarked>;
|
|
||||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
|
||||||
unmarked.map(T::mark).map_err(E::mark)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<T: Unmark, E: Unmark> Unmark for Result<T, E> {
|
|
||||||
type Unmarked = Result<T::Unmarked, E::Unmarked>;
|
|
||||||
fn unmark(self) -> Self::Unmarked {
|
|
||||||
self.map(T::unmark).map_err(E::unmark)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -367,7 +352,6 @@ mark_noop! {
|
||||||
Level,
|
Level,
|
||||||
LineColumn,
|
LineColumn,
|
||||||
Spacing,
|
Spacing,
|
||||||
Bound<usize>,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
rpc_encode_decode!(
|
rpc_encode_decode!(
|
||||||
|
@ -394,6 +378,61 @@ rpc_encode_decode!(
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
|
macro_rules! mark_compound {
|
||||||
|
(enum $name:ident <$($T:ident),+> { $($variant:ident $(($field:ident))?),* $(,)? }) => {
|
||||||
|
impl<$($T: Mark),+> Mark for $name <$($T),+> {
|
||||||
|
type Unmarked = $name <$($T::Unmarked),+>;
|
||||||
|
fn mark(unmarked: Self::Unmarked) -> Self {
|
||||||
|
match unmarked {
|
||||||
|
$($name::$variant $(($field))? => {
|
||||||
|
$name::$variant $((Mark::mark($field)))?
|
||||||
|
})*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<$($T: Unmark),+> Unmark for $name <$($T),+> {
|
||||||
|
type Unmarked = $name <$($T::Unmarked),+>;
|
||||||
|
fn unmark(self) -> Self::Unmarked {
|
||||||
|
match self {
|
||||||
|
$($name::$variant $(($field))? => {
|
||||||
|
$name::$variant $((Unmark::unmark($field)))?
|
||||||
|
})*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
macro_rules! compound_traits {
|
||||||
|
($($t:tt)*) => {
|
||||||
|
rpc_encode_decode!($($t)*);
|
||||||
|
mark_compound!($($t)*);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
compound_traits!(
|
||||||
|
enum Bound<T> {
|
||||||
|
Included(x),
|
||||||
|
Excluded(x),
|
||||||
|
Unbounded,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
compound_traits!(
|
||||||
|
enum Option<T> {
|
||||||
|
Some(t),
|
||||||
|
None,
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
compound_traits!(
|
||||||
|
enum Result<T, E> {
|
||||||
|
Ok(t),
|
||||||
|
Err(e),
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub enum TokenTree<G, P, I, L> {
|
pub enum TokenTree<G, P, I, L> {
|
||||||
Group(G),
|
Group(G),
|
||||||
|
@ -402,30 +441,7 @@ pub enum TokenTree<G, P, I, L> {
|
||||||
Literal(L),
|
Literal(L),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<G: Mark, P: Mark, I: Mark, L: Mark> Mark for TokenTree<G, P, I, L> {
|
compound_traits!(
|
||||||
type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
|
|
||||||
fn mark(unmarked: Self::Unmarked) -> Self {
|
|
||||||
match unmarked {
|
|
||||||
TokenTree::Group(tt) => TokenTree::Group(G::mark(tt)),
|
|
||||||
TokenTree::Punct(tt) => TokenTree::Punct(P::mark(tt)),
|
|
||||||
TokenTree::Ident(tt) => TokenTree::Ident(I::mark(tt)),
|
|
||||||
TokenTree::Literal(tt) => TokenTree::Literal(L::mark(tt)),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
impl<G: Unmark, P: Unmark, I: Unmark, L: Unmark> Unmark for TokenTree<G, P, I, L> {
|
|
||||||
type Unmarked = TokenTree<G::Unmarked, P::Unmarked, I::Unmarked, L::Unmarked>;
|
|
||||||
fn unmark(self) -> Self::Unmarked {
|
|
||||||
match self {
|
|
||||||
TokenTree::Group(tt) => TokenTree::Group(tt.unmark()),
|
|
||||||
TokenTree::Punct(tt) => TokenTree::Punct(tt.unmark()),
|
|
||||||
TokenTree::Ident(tt) => TokenTree::Ident(tt.unmark()),
|
|
||||||
TokenTree::Literal(tt) => TokenTree::Literal(tt.unmark()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
rpc_encode_decode!(
|
|
||||||
enum TokenTree<G, P, I, L> {
|
enum TokenTree<G, P, I, L> {
|
||||||
Group(tt),
|
Group(tt),
|
||||||
Punct(tt),
|
Punct(tt),
|
||||||
|
|
|
@ -4,7 +4,6 @@ use std::any::Any;
|
||||||
use std::char;
|
use std::char;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::num::NonZeroU32;
|
use std::num::NonZeroU32;
|
||||||
use std::ops::Bound;
|
|
||||||
use std::str;
|
use std::str;
|
||||||
|
|
||||||
pub(super) type Writer = super::buffer::Buffer;
|
pub(super) type Writer = super::buffer::Buffer;
|
||||||
|
@ -43,15 +42,17 @@ macro_rules! rpc_encode_decode {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
(struct $name:ident { $($field:ident),* $(,)? }) => {
|
(struct $name:ident $(<$($T:ident),+>)? { $($field:ident),* $(,)? }) => {
|
||||||
impl<S> Encode<S> for $name {
|
impl<S, $($($T: Encode<S>),+)?> Encode<S> for $name $(<$($T),+>)? {
|
||||||
fn encode(self, w: &mut Writer, s: &mut S) {
|
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||||
$(self.$field.encode(w, s);)*
|
$(self.$field.encode(w, s);)*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S> DecodeMut<'_, '_, S> for $name {
|
impl<'a, S, $($($T: for<'s> DecodeMut<'a, 's, S>),+)?> DecodeMut<'a, '_, S>
|
||||||
fn decode(r: &mut Reader<'_>, s: &mut S) -> Self {
|
for $name $(<$($T),+>)?
|
||||||
|
{
|
||||||
|
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
|
||||||
$name {
|
$name {
|
||||||
$($field: DecodeMut::decode(r, s)),*
|
$($field: DecodeMut::decode(r, s)),*
|
||||||
}
|
}
|
||||||
|
@ -184,28 +185,6 @@ impl<'a, S, A: for<'s> DecodeMut<'a, 's, S>, B: for<'s> DecodeMut<'a, 's, S>> De
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
rpc_encode_decode!(
|
|
||||||
enum Bound<T> {
|
|
||||||
Included(x),
|
|
||||||
Excluded(x),
|
|
||||||
Unbounded,
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
rpc_encode_decode!(
|
|
||||||
enum Option<T> {
|
|
||||||
None,
|
|
||||||
Some(x),
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
rpc_encode_decode!(
|
|
||||||
enum Result<T, E> {
|
|
||||||
Ok(x),
|
|
||||||
Err(e),
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
impl<S> Encode<S> for &[u8] {
|
impl<S> Encode<S> for &[u8] {
|
||||||
fn encode(self, w: &mut Writer, s: &mut S) {
|
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||||
self.len().encode(w, s);
|
self.len().encode(w, s);
|
||||||
|
@ -246,6 +225,26 @@ impl<S> DecodeMut<'_, '_, S> for String {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl<S, T: Encode<S>> Encode<S> for Vec<T> {
|
||||||
|
fn encode(self, w: &mut Writer, s: &mut S) {
|
||||||
|
self.len().encode(w, s);
|
||||||
|
for x in self {
|
||||||
|
x.encode(w, s);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'a, S, T: for<'s> DecodeMut<'a, 's, S>> DecodeMut<'a, '_, S> for Vec<T> {
|
||||||
|
fn decode(r: &mut Reader<'a>, s: &mut S) -> Self {
|
||||||
|
let len = usize::decode(r, s);
|
||||||
|
let mut vec = Vec::with_capacity(len);
|
||||||
|
for _ in 0..len {
|
||||||
|
vec.push(T::decode(r, s));
|
||||||
|
}
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Simplified version of panic payloads, ignoring
|
/// Simplified version of panic payloads, ignoring
|
||||||
/// types other than `&'static str` and `String`.
|
/// types other than `&'static str` and `String`.
|
||||||
pub enum PanicMessage {
|
pub enum PanicMessage {
|
||||||
|
|
|
@ -8,8 +8,6 @@ use super::client::HandleStore;
|
||||||
pub trait Types {
|
pub trait Types {
|
||||||
type FreeFunctions: 'static;
|
type FreeFunctions: 'static;
|
||||||
type TokenStream: 'static + Clone;
|
type TokenStream: 'static + Clone;
|
||||||
type TokenStreamBuilder: 'static;
|
|
||||||
type TokenStreamIter: 'static + Clone;
|
|
||||||
type Group: 'static + Clone;
|
type Group: 'static + Clone;
|
||||||
type Punct: 'static + Copy + Eq + Hash;
|
type Punct: 'static + Copy + Eq + Hash;
|
||||||
type Ident: 'static + Copy + Eq + Hash;
|
type Ident: 'static + Copy + Eq + Hash;
|
||||||
|
@ -275,13 +273,17 @@ fn run_server<
|
||||||
}
|
}
|
||||||
|
|
||||||
impl client::Client<crate::TokenStream, crate::TokenStream> {
|
impl client::Client<crate::TokenStream, crate::TokenStream> {
|
||||||
pub fn run<S: Server>(
|
pub fn run<S>(
|
||||||
&self,
|
&self,
|
||||||
strategy: &impl ExecutionStrategy,
|
strategy: &impl ExecutionStrategy,
|
||||||
server: S,
|
server: S,
|
||||||
input: S::TokenStream,
|
input: S::TokenStream,
|
||||||
force_show_panics: bool,
|
force_show_panics: bool,
|
||||||
) -> Result<S::TokenStream, PanicMessage> {
|
) -> Result<S::TokenStream, PanicMessage>
|
||||||
|
where
|
||||||
|
S: Server,
|
||||||
|
S::TokenStream: Default,
|
||||||
|
{
|
||||||
let client::Client { get_handle_counters, run, _marker } = *self;
|
let client::Client { get_handle_counters, run, _marker } = *self;
|
||||||
run_server(
|
run_server(
|
||||||
strategy,
|
strategy,
|
||||||
|
@ -291,19 +293,23 @@ impl client::Client<crate::TokenStream, crate::TokenStream> {
|
||||||
run,
|
run,
|
||||||
force_show_panics,
|
force_show_panics,
|
||||||
)
|
)
|
||||||
.map(<MarkedTypes<S> as Types>::TokenStream::unmark)
|
.map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> {
|
impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> {
|
||||||
pub fn run<S: Server>(
|
pub fn run<S>(
|
||||||
&self,
|
&self,
|
||||||
strategy: &impl ExecutionStrategy,
|
strategy: &impl ExecutionStrategy,
|
||||||
server: S,
|
server: S,
|
||||||
input: S::TokenStream,
|
input: S::TokenStream,
|
||||||
input2: S::TokenStream,
|
input2: S::TokenStream,
|
||||||
force_show_panics: bool,
|
force_show_panics: bool,
|
||||||
) -> Result<S::TokenStream, PanicMessage> {
|
) -> Result<S::TokenStream, PanicMessage>
|
||||||
|
where
|
||||||
|
S: Server,
|
||||||
|
S::TokenStream: Default,
|
||||||
|
{
|
||||||
let client::Client { get_handle_counters, run, _marker } = *self;
|
let client::Client { get_handle_counters, run, _marker } = *self;
|
||||||
run_server(
|
run_server(
|
||||||
strategy,
|
strategy,
|
||||||
|
@ -316,6 +322,6 @@ impl client::Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream
|
||||||
run,
|
run,
|
||||||
force_show_panics,
|
force_show_panics,
|
||||||
)
|
)
|
||||||
.map(<MarkedTypes<S> as Types>::TokenStream::unmark)
|
.map(|s| <Option<<MarkedTypes<S> as Types>::TokenStream>>::unmark(s).unwrap_or_default())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,7 @@ use std::cmp::Ordering;
|
||||||
use std::ops::RangeBounds;
|
use std::ops::RangeBounds;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
use std::{error, fmt, iter, mem};
|
use std::{error, fmt, iter};
|
||||||
|
|
||||||
/// Determines whether proc_macro has been made accessible to the currently
|
/// Determines whether proc_macro has been made accessible to the currently
|
||||||
/// running program.
|
/// running program.
|
||||||
|
@ -72,7 +72,7 @@ pub fn is_available() -> bool {
|
||||||
/// and `#[proc_macro_derive]` definitions.
|
/// and `#[proc_macro_derive]` definitions.
|
||||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct TokenStream(bridge::client::TokenStream);
|
pub struct TokenStream(Option<bridge::client::TokenStream>);
|
||||||
|
|
||||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||||
impl !Send for TokenStream {}
|
impl !Send for TokenStream {}
|
||||||
|
@ -126,13 +126,13 @@ impl TokenStream {
|
||||||
/// Returns an empty `TokenStream` containing no token trees.
|
/// Returns an empty `TokenStream` containing no token trees.
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
pub fn new() -> TokenStream {
|
pub fn new() -> TokenStream {
|
||||||
TokenStream(bridge::client::TokenStream::new())
|
TokenStream(None)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Checks if this `TokenStream` is empty.
|
/// Checks if this `TokenStream` is empty.
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
pub fn is_empty(&self) -> bool {
|
pub fn is_empty(&self) -> bool {
|
||||||
self.0.is_empty()
|
self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses this `TokenStream` as an expression and attempts to expand any
|
/// Parses this `TokenStream` as an expression and attempts to expand any
|
||||||
|
@ -147,8 +147,9 @@ impl TokenStream {
|
||||||
/// considered errors, is unspecified and may change in the future.
|
/// considered errors, is unspecified and may change in the future.
|
||||||
#[unstable(feature = "proc_macro_expand", issue = "90765")]
|
#[unstable(feature = "proc_macro_expand", issue = "90765")]
|
||||||
pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
|
pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
|
||||||
match bridge::client::TokenStream::expand_expr(&self.0) {
|
let stream = self.0.as_ref().ok_or(ExpandError)?;
|
||||||
Ok(stream) => Ok(TokenStream(stream)),
|
match bridge::client::TokenStream::expand_expr(stream) {
|
||||||
|
Ok(stream) => Ok(TokenStream(Some(stream))),
|
||||||
Err(_) => Err(ExpandError),
|
Err(_) => Err(ExpandError),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -166,7 +167,7 @@ impl FromStr for TokenStream {
|
||||||
type Err = LexError;
|
type Err = LexError;
|
||||||
|
|
||||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||||
Ok(TokenStream(bridge::client::TokenStream::from_str(src)))
|
Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -175,7 +176,7 @@ impl FromStr for TokenStream {
|
||||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||||
impl ToString for TokenStream {
|
impl ToString for TokenStream {
|
||||||
fn to_string(&self) -> String {
|
fn to_string(&self) -> String {
|
||||||
self.0.to_string()
|
self.0.as_ref().map(|t| t.to_string()).unwrap_or_default()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -208,16 +209,103 @@ impl Default for TokenStream {
|
||||||
#[unstable(feature = "proc_macro_quote", issue = "54722")]
|
#[unstable(feature = "proc_macro_quote", issue = "54722")]
|
||||||
pub use quote::{quote, quote_span};
|
pub use quote::{quote, quote_span};
|
||||||
|
|
||||||
/// Creates a token stream containing a single token tree.
|
fn tree_to_bridge_tree(
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
tree: TokenTree,
|
||||||
impl From<TokenTree> for TokenStream {
|
) -> bridge::TokenTree<
|
||||||
fn from(tree: TokenTree) -> TokenStream {
|
bridge::client::Group,
|
||||||
TokenStream(bridge::client::TokenStream::from_token_tree(match tree {
|
bridge::client::Punct,
|
||||||
|
bridge::client::Ident,
|
||||||
|
bridge::client::Literal,
|
||||||
|
> {
|
||||||
|
match tree {
|
||||||
TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
|
TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
|
||||||
TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
|
TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
|
||||||
TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
|
TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
|
||||||
TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
|
TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
|
||||||
}))
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Creates a token stream containing a single token tree.
|
||||||
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
|
impl From<TokenTree> for TokenStream {
|
||||||
|
fn from(tree: TokenTree) -> TokenStream {
|
||||||
|
TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Non-generic helper for implementing `FromIterator<TokenTree>` and
|
||||||
|
/// `Extend<TokenTree>` with less monomorphization in calling crates.
|
||||||
|
struct ConcatTreesHelper {
|
||||||
|
trees: Vec<
|
||||||
|
bridge::TokenTree<
|
||||||
|
bridge::client::Group,
|
||||||
|
bridge::client::Punct,
|
||||||
|
bridge::client::Ident,
|
||||||
|
bridge::client::Literal,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConcatTreesHelper {
|
||||||
|
fn new(capacity: usize) -> Self {
|
||||||
|
ConcatTreesHelper { trees: Vec::with_capacity(capacity) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(&mut self, tree: TokenTree) {
|
||||||
|
self.trees.push(tree_to_bridge_tree(tree));
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build(self) -> TokenStream {
|
||||||
|
if self.trees.is_empty() {
|
||||||
|
TokenStream(None)
|
||||||
|
} else {
|
||||||
|
TokenStream(Some(bridge::client::TokenStream::concat_trees(None, self.trees)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn append_to(self, stream: &mut TokenStream) {
|
||||||
|
if self.trees.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
stream.0 = Some(bridge::client::TokenStream::concat_trees(stream.0.take(), self.trees))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Non-generic helper for implementing `FromIterator<TokenStream>` and
|
||||||
|
/// `Extend<TokenStream>` with less monomorphization in calling crates.
|
||||||
|
struct ConcatStreamsHelper {
|
||||||
|
streams: Vec<bridge::client::TokenStream>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ConcatStreamsHelper {
|
||||||
|
fn new(capacity: usize) -> Self {
|
||||||
|
ConcatStreamsHelper { streams: Vec::with_capacity(capacity) }
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(&mut self, stream: TokenStream) {
|
||||||
|
if let Some(stream) = stream.0 {
|
||||||
|
self.streams.push(stream);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn build(mut self) -> TokenStream {
|
||||||
|
if self.streams.len() <= 1 {
|
||||||
|
TokenStream(self.streams.pop())
|
||||||
|
} else {
|
||||||
|
TokenStream(Some(bridge::client::TokenStream::concat_streams(None, self.streams)))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn append_to(mut self, stream: &mut TokenStream) {
|
||||||
|
if self.streams.is_empty() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
let base = stream.0.take();
|
||||||
|
if base.is_none() && self.streams.len() == 1 {
|
||||||
|
stream.0 = self.streams.pop();
|
||||||
|
} else {
|
||||||
|
stream.0 = Some(bridge::client::TokenStream::concat_streams(base, self.streams));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -225,7 +313,10 @@ impl From<TokenTree> for TokenStream {
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
impl iter::FromIterator<TokenTree> for TokenStream {
|
impl iter::FromIterator<TokenTree> for TokenStream {
|
||||||
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
|
||||||
trees.into_iter().map(TokenStream::from).collect()
|
let iter = trees.into_iter();
|
||||||
|
let mut builder = ConcatTreesHelper::new(iter.size_hint().0);
|
||||||
|
iter.for_each(|tree| builder.push(tree));
|
||||||
|
builder.build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,24 +325,30 @@ impl iter::FromIterator<TokenTree> for TokenStream {
|
||||||
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
|
||||||
impl iter::FromIterator<TokenStream> for TokenStream {
|
impl iter::FromIterator<TokenStream> for TokenStream {
|
||||||
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
|
||||||
let mut builder = bridge::client::TokenStreamBuilder::new();
|
let iter = streams.into_iter();
|
||||||
streams.into_iter().for_each(|stream| builder.push(stream.0));
|
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
|
||||||
TokenStream(builder.build())
|
iter.for_each(|stream| builder.push(stream));
|
||||||
|
builder.build()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
||||||
impl Extend<TokenTree> for TokenStream {
|
impl Extend<TokenTree> for TokenStream {
|
||||||
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
|
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
|
||||||
self.extend(trees.into_iter().map(TokenStream::from));
|
let iter = trees.into_iter();
|
||||||
|
let mut builder = ConcatTreesHelper::new(iter.size_hint().0);
|
||||||
|
iter.for_each(|tree| builder.push(tree));
|
||||||
|
builder.append_to(self);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
||||||
impl Extend<TokenStream> for TokenStream {
|
impl Extend<TokenStream> for TokenStream {
|
||||||
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||||
// FIXME(eddyb) Use an optimized implementation if/when possible.
|
let iter = streams.into_iter();
|
||||||
*self = iter::once(mem::replace(self, Self::new())).chain(streams).collect();
|
let mut builder = ConcatStreamsHelper::new(iter.size_hint().0);
|
||||||
|
iter.for_each(|stream| builder.push(stream));
|
||||||
|
builder.append_to(self);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -265,7 +362,16 @@ pub mod token_stream {
|
||||||
/// and returns whole groups as token trees.
|
/// and returns whole groups as token trees.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
pub struct IntoIter(bridge::client::TokenStreamIter);
|
pub struct IntoIter(
|
||||||
|
std::vec::IntoIter<
|
||||||
|
bridge::TokenTree<
|
||||||
|
bridge::client::Group,
|
||||||
|
bridge::client::Punct,
|
||||||
|
bridge::client::Ident,
|
||||||
|
bridge::client::Literal,
|
||||||
|
>,
|
||||||
|
>,
|
||||||
|
);
|
||||||
|
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
impl Iterator for IntoIter {
|
impl Iterator for IntoIter {
|
||||||
|
@ -287,7 +393,7 @@ pub mod token_stream {
|
||||||
type IntoIter = IntoIter;
|
type IntoIter = IntoIter;
|
||||||
|
|
||||||
fn into_iter(self) -> IntoIter {
|
fn into_iter(self) -> IntoIter {
|
||||||
IntoIter(self.0.into_iter())
|
IntoIter(self.0.map(|v| v.into_trees()).unwrap_or_default().into_iter())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -734,7 +840,7 @@ impl Group {
|
||||||
/// returned above.
|
/// returned above.
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
pub fn stream(&self) -> TokenStream {
|
pub fn stream(&self) -> TokenStream {
|
||||||
TokenStream(self.0.stream())
|
TokenStream(Some(self.0.stream()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the span for the delimiters of this token stream, spanning the
|
/// Returns the span for the delimiters of this token stream, spanning the
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue