1
Fork 0

proc_macro: reduce the number of messages required to create, extend, and iterate TokenStreams

This significantly reduces the cost of common interactions with TokenStream
when running with the CrossThread execution strategy, by reducing the number of
RPC calls required.
This commit is contained in:
Nika Layzell 2021-07-01 15:03:51 -04:00
parent 2b17219468
commit 0a049fd30d
5 changed files with 129 additions and 97 deletions

View file

@ -277,12 +277,6 @@ impl ToInternal<rustc_errors::Level> for Level {
pub struct FreeFunctions; pub struct FreeFunctions;
#[derive(Clone)]
pub struct TokenStreamIter {
cursor: tokenstream::Cursor,
stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
}
#[derive(Clone)] #[derive(Clone)]
pub struct Group { pub struct Group {
delimiter: Delimiter, delimiter: Delimiter,
@ -382,8 +376,6 @@ impl<'a, 'b> Rustc<'a, 'b> {
impl server::Types for Rustc<'_, '_> { impl server::Types for Rustc<'_, '_> {
type FreeFunctions = FreeFunctions; type FreeFunctions = FreeFunctions;
type TokenStream = TokenStream; type TokenStream = TokenStream;
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
type TokenStreamIter = TokenStreamIter;
type Group = Group; type Group = Group;
type Punct = Punct; type Punct = Punct;
type Ident = Ident; type Ident = Ident;
@ -408,9 +400,6 @@ impl server::FreeFunctions for Rustc<'_, '_> {
} }
impl server::TokenStream for Rustc<'_, '_> { impl server::TokenStream for Rustc<'_, '_> {
fn new(&mut self) -> Self::TokenStream {
TokenStream::default()
}
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool { fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
stream.is_empty() stream.is_empty()
} }
@ -481,53 +470,74 @@ impl server::TokenStream for Rustc<'_, '_> {
) -> Self::TokenStream { ) -> Self::TokenStream {
tree.to_internal() tree.to_internal()
} }
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter { fn concat_trees(
TokenStreamIter { cursor: stream.into_trees(), stack: vec![] } &mut self,
} base: Option<Self::TokenStream>,
} trees: Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>>,
) -> Self::TokenStream {
impl server::TokenStreamBuilder for Rustc<'_, '_> { let mut builder = tokenstream::TokenStreamBuilder::new();
fn new(&mut self) -> Self::TokenStreamBuilder { if let Some(base) = base {
tokenstream::TokenStreamBuilder::new() builder.push(base);
} }
fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) { for tree in trees {
builder.push(stream); builder.push(tree.to_internal());
} }
fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
builder.build() builder.build()
} }
} fn concat_streams(
impl server::TokenStreamIter for Rustc<'_, '_> {
fn next(
&mut self, &mut self,
iter: &mut Self::TokenStreamIter, base: Option<Self::TokenStream>,
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> { streams: Vec<Self::TokenStream>,
) -> Self::TokenStream {
let mut builder = tokenstream::TokenStreamBuilder::new();
if let Some(base) = base {
builder.push(base);
}
for stream in streams {
builder.push(stream);
}
builder.build()
}
fn into_iter(
&mut self,
stream: Self::TokenStream,
) -> Vec<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
// XXX: This is a raw port of the previous approach, and can probably be
// optimized.
let mut cursor = stream.into_trees();
let mut stack = Vec::new();
let mut tts = Vec::new();
loop { loop {
let tree = iter.stack.pop().or_else(|| { let next = stack.pop().or_else(|| {
let next = iter.cursor.next_with_spacing()?; let next = cursor.next_with_spacing()?;
Some(TokenTree::from_internal((next, &mut iter.stack, self))) Some(TokenTree::from_internal((next, &mut stack, self)))
})?; });
// A hack used to pass AST fragments to attribute and derive macros match next {
// as a single nonterminal token instead of a token stream. Some(TokenTree::Group(group)) => {
// Such token needs to be "unwrapped" and not represented as a delimited group. // A hack used to pass AST fragments to attribute and derive
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345). // macros as a single nonterminal token instead of a token
if let TokenTree::Group(ref group) = tree { // stream. Such token needs to be "unwrapped" and not
if group.flatten { // represented as a delimited group.
iter.cursor.append(group.stream.clone()); // FIXME: It needs to be removed, but there are some
continue; // compatibility issues (see #73345).
if group.flatten {
cursor.append(group.stream);
continue;
}
tts.push(TokenTree::Group(group));
} }
Some(tt) => tts.push(tt),
None => return tts,
} }
return Some(tree);
} }
} }
} }
impl server::Group for Rustc<'_, '_> { impl server::Group for Rustc<'_, '_> {
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group { fn new(&mut self, delimiter: Delimiter, stream: Option<Self::TokenStream>) -> Self::Group {
Group { Group {
delimiter, delimiter,
stream, stream: stream.unwrap_or_default(),
span: DelimSpan::from_single(server::Span::call_site(self)), span: DelimSpan::from_single(server::Span::call_site(self)),
flatten: false, flatten: false,
} }

View file

@ -178,8 +178,6 @@ define_handles! {
'owned: 'owned:
FreeFunctions, FreeFunctions,
TokenStream, TokenStream,
TokenStreamBuilder,
TokenStreamIter,
Group, Group,
Literal, Literal,
SourceFile, SourceFile,
@ -204,12 +202,6 @@ impl Clone for TokenStream {
} }
} }
impl Clone for TokenStreamIter {
fn clone(&self) -> Self {
self.clone()
}
}
impl Clone for Group { impl Clone for Group {
fn clone(&self) -> Self { fn clone(&self) -> Self {
self.clone() self.clone()
@ -435,7 +427,11 @@ impl Client<crate::TokenStream, crate::TokenStream> {
Client { Client {
get_handle_counters: HandleCounters::get, get_handle_counters: HandleCounters::get,
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
run_client(bridge, |input| f(crate::TokenStream(input)).0) run_client(bridge, |input| {
f(crate::TokenStream(Some(input)))
.0
.unwrap_or_else(|| TokenStream::concat_streams(None, vec![]))
})
}), }),
_marker: PhantomData, _marker: PhantomData,
} }
@ -450,7 +446,9 @@ impl Client<(crate::TokenStream, crate::TokenStream), crate::TokenStream> {
get_handle_counters: HandleCounters::get, get_handle_counters: HandleCounters::get,
run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| { run: super::selfless_reify::reify_to_extern_c_fn_hrt_bridge(move |bridge| {
run_client(bridge, |(input, input2)| { run_client(bridge, |(input, input2)| {
f(crate::TokenStream(input), crate::TokenStream(input2)).0 f(crate::TokenStream(Some(input)), crate::TokenStream(Some(input2)))
.0
.unwrap_or_else(|| TokenStream::concat_streams(None, vec![]))
}) })
}), }),
_marker: PhantomData, _marker: PhantomData,

View file

@ -60,7 +60,6 @@ macro_rules! with_api {
TokenStream { TokenStream {
fn drop($self: $S::TokenStream); fn drop($self: $S::TokenStream);
fn clone($self: &$S::TokenStream) -> $S::TokenStream; fn clone($self: &$S::TokenStream) -> $S::TokenStream;
fn new() -> $S::TokenStream;
fn is_empty($self: &$S::TokenStream) -> bool; fn is_empty($self: &$S::TokenStream) -> bool;
fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>; fn expand_expr($self: &$S::TokenStream) -> Result<$S::TokenStream, ()>;
fn from_str(src: &str) -> $S::TokenStream; fn from_str(src: &str) -> $S::TokenStream;
@ -68,25 +67,22 @@ macro_rules! with_api {
fn from_token_tree( fn from_token_tree(
tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>, tree: TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>,
) -> $S::TokenStream; ) -> $S::TokenStream;
fn into_iter($self: $S::TokenStream) -> $S::TokenStreamIter; fn concat_trees(
}, base: Option<$S::TokenStream>,
TokenStreamBuilder { trees: Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>,
fn drop($self: $S::TokenStreamBuilder); ) -> $S::TokenStream;
fn new() -> $S::TokenStreamBuilder; fn concat_streams(
fn push($self: &mut $S::TokenStreamBuilder, stream: $S::TokenStream); base: Option<$S::TokenStream>,
fn build($self: $S::TokenStreamBuilder) -> $S::TokenStream; trees: Vec<$S::TokenStream>,
}, ) -> $S::TokenStream;
TokenStreamIter { fn into_iter(
fn drop($self: $S::TokenStreamIter); $self: $S::TokenStream
fn clone($self: &$S::TokenStreamIter) -> $S::TokenStreamIter; ) -> Vec<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
fn next(
$self: &mut $S::TokenStreamIter,
) -> Option<TokenTree<$S::Group, $S::Punct, $S::Ident, $S::Literal>>;
}, },
Group { Group {
fn drop($self: $S::Group); fn drop($self: $S::Group);
fn clone($self: &$S::Group) -> $S::Group; fn clone($self: &$S::Group) -> $S::Group;
fn new(delimiter: Delimiter, stream: $S::TokenStream) -> $S::Group; fn new(delimiter: Delimiter, stream: Option<$S::TokenStream>) -> $S::Group;
fn delimiter($self: &$S::Group) -> Delimiter; fn delimiter($self: &$S::Group) -> Delimiter;
fn stream($self: &$S::Group) -> $S::TokenStream; fn stream($self: &$S::Group) -> $S::TokenStream;
fn span($self: &$S::Group) -> $S::Span; fn span($self: &$S::Group) -> $S::Span;

View file

@ -8,8 +8,6 @@ use super::client::HandleStore;
pub trait Types { pub trait Types {
type FreeFunctions: 'static; type FreeFunctions: 'static;
type TokenStream: 'static + Clone; type TokenStream: 'static + Clone;
type TokenStreamBuilder: 'static;
type TokenStreamIter: 'static + Clone;
type Group: 'static + Clone; type Group: 'static + Clone;
type Punct: 'static + Copy + Eq + Hash; type Punct: 'static + Copy + Eq + Hash;
type Ident: 'static + Copy + Eq + Hash; type Ident: 'static + Copy + Eq + Hash;

View file

@ -43,7 +43,7 @@ use std::cmp::Ordering;
use std::ops::RangeBounds; use std::ops::RangeBounds;
use std::path::PathBuf; use std::path::PathBuf;
use std::str::FromStr; use std::str::FromStr;
use std::{error, fmt, iter, mem}; use std::{error, fmt, iter};
/// Determines whether proc_macro has been made accessible to the currently /// Determines whether proc_macro has been made accessible to the currently
/// running program. /// running program.
@ -72,7 +72,7 @@ pub fn is_available() -> bool {
/// and `#[proc_macro_derive]` definitions. /// and `#[proc_macro_derive]` definitions.
#[stable(feature = "proc_macro_lib", since = "1.15.0")] #[stable(feature = "proc_macro_lib", since = "1.15.0")]
#[derive(Clone)] #[derive(Clone)]
pub struct TokenStream(bridge::client::TokenStream); pub struct TokenStream(Option<bridge::client::TokenStream>);
#[stable(feature = "proc_macro_lib", since = "1.15.0")] #[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl !Send for TokenStream {} impl !Send for TokenStream {}
@ -126,13 +126,13 @@ impl TokenStream {
/// Returns an empty `TokenStream` containing no token trees. /// Returns an empty `TokenStream` containing no token trees.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub fn new() -> TokenStream { pub fn new() -> TokenStream {
TokenStream(bridge::client::TokenStream::new()) TokenStream(None)
} }
/// Checks if this `TokenStream` is empty. /// Checks if this `TokenStream` is empty.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub fn is_empty(&self) -> bool { pub fn is_empty(&self) -> bool {
self.0.is_empty() self.0.as_ref().map(|h| h.is_empty()).unwrap_or(true)
} }
/// Parses this `TokenStream` as an expression and attempts to expand any /// Parses this `TokenStream` as an expression and attempts to expand any
@ -147,8 +147,9 @@ impl TokenStream {
/// considered errors, is unspecified and may change in the future. /// considered errors, is unspecified and may change in the future.
#[unstable(feature = "proc_macro_expand", issue = "90765")] #[unstable(feature = "proc_macro_expand", issue = "90765")]
pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> { pub fn expand_expr(&self) -> Result<TokenStream, ExpandError> {
match bridge::client::TokenStream::expand_expr(&self.0) { let stream = self.0.as_ref().ok_or(ExpandError)?;
Ok(stream) => Ok(TokenStream(stream)), match bridge::client::TokenStream::expand_expr(stream) {
Ok(stream) => Ok(TokenStream(Some(stream))),
Err(_) => Err(ExpandError), Err(_) => Err(ExpandError),
} }
} }
@ -166,7 +167,7 @@ impl FromStr for TokenStream {
type Err = LexError; type Err = LexError;
fn from_str(src: &str) -> Result<TokenStream, LexError> { fn from_str(src: &str) -> Result<TokenStream, LexError> {
Ok(TokenStream(bridge::client::TokenStream::from_str(src))) Ok(TokenStream(Some(bridge::client::TokenStream::from_str(src))))
} }
} }
@ -175,7 +176,7 @@ impl FromStr for TokenStream {
#[stable(feature = "proc_macro_lib", since = "1.15.0")] #[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl ToString for TokenStream { impl ToString for TokenStream {
fn to_string(&self) -> String { fn to_string(&self) -> String {
self.0.to_string() self.0.as_ref().map(|t| t.to_string()).unwrap_or_default()
} }
} }
@ -208,16 +209,27 @@ impl Default for TokenStream {
#[unstable(feature = "proc_macro_quote", issue = "54722")] #[unstable(feature = "proc_macro_quote", issue = "54722")]
pub use quote::{quote, quote_span}; pub use quote::{quote, quote_span};
fn tree_to_bridge_tree(
tree: TokenTree,
) -> bridge::TokenTree<
bridge::client::Group,
bridge::client::Punct,
bridge::client::Ident,
bridge::client::Literal,
> {
match tree {
TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
}
}
/// Creates a token stream containing a single token tree. /// Creates a token stream containing a single token tree.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
impl From<TokenTree> for TokenStream { impl From<TokenTree> for TokenStream {
fn from(tree: TokenTree) -> TokenStream { fn from(tree: TokenTree) -> TokenStream {
TokenStream(bridge::client::TokenStream::from_token_tree(match tree { TokenStream(Some(bridge::client::TokenStream::from_token_tree(tree_to_bridge_tree(tree))))
TokenTree::Group(tt) => bridge::TokenTree::Group(tt.0),
TokenTree::Punct(tt) => bridge::TokenTree::Punct(tt.0),
TokenTree::Ident(tt) => bridge::TokenTree::Ident(tt.0),
TokenTree::Literal(tt) => bridge::TokenTree::Literal(tt.0),
}))
} }
} }
@ -225,7 +237,10 @@ impl From<TokenTree> for TokenStream {
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
impl iter::FromIterator<TokenTree> for TokenStream { impl iter::FromIterator<TokenTree> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenTree>>(trees: I) -> Self {
trees.into_iter().map(TokenStream::from).collect() TokenStream(Some(bridge::client::TokenStream::concat_trees(
None,
trees.into_iter().map(tree_to_bridge_tree).collect(),
)))
} }
} }
@ -234,24 +249,30 @@ impl iter::FromIterator<TokenTree> for TokenStream {
#[stable(feature = "proc_macro_lib", since = "1.15.0")] #[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl iter::FromIterator<TokenStream> for TokenStream { impl iter::FromIterator<TokenStream> for TokenStream {
fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self { fn from_iter<I: IntoIterator<Item = TokenStream>>(streams: I) -> Self {
let mut builder = bridge::client::TokenStreamBuilder::new(); TokenStream(Some(bridge::client::TokenStream::concat_streams(
streams.into_iter().for_each(|stream| builder.push(stream.0)); None,
TokenStream(builder.build()) streams.into_iter().filter_map(|stream| stream.0).collect(),
)))
} }
} }
#[stable(feature = "token_stream_extend", since = "1.30.0")] #[stable(feature = "token_stream_extend", since = "1.30.0")]
impl Extend<TokenTree> for TokenStream { impl Extend<TokenTree> for TokenStream {
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) { fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
self.extend(trees.into_iter().map(TokenStream::from)); *self = TokenStream(Some(bridge::client::TokenStream::concat_trees(
self.0.take(),
trees.into_iter().map(|tree| tree_to_bridge_tree(tree)).collect(),
)));
} }
} }
#[stable(feature = "token_stream_extend", since = "1.30.0")] #[stable(feature = "token_stream_extend", since = "1.30.0")]
impl Extend<TokenStream> for TokenStream { impl Extend<TokenStream> for TokenStream {
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) { fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
// FIXME(eddyb) Use an optimized implementation if/when possible. *self = TokenStream(Some(bridge::client::TokenStream::concat_streams(
*self = iter::once(mem::replace(self, Self::new())).chain(streams).collect(); self.0.take(),
streams.into_iter().filter_map(|stream| stream.0).collect(),
)));
} }
} }
@ -265,7 +286,16 @@ pub mod token_stream {
/// and returns whole groups as token trees. /// and returns whole groups as token trees.
#[derive(Clone)] #[derive(Clone)]
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub struct IntoIter(bridge::client::TokenStreamIter); pub struct IntoIter(
std::vec::IntoIter<
bridge::TokenTree<
bridge::client::Group,
bridge::client::Punct,
bridge::client::Ident,
bridge::client::Literal,
>,
>,
);
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
impl Iterator for IntoIter { impl Iterator for IntoIter {
@ -287,7 +317,7 @@ pub mod token_stream {
type IntoIter = IntoIter; type IntoIter = IntoIter;
fn into_iter(self) -> IntoIter { fn into_iter(self) -> IntoIter {
IntoIter(self.0.into_iter()) IntoIter(self.0.map(|v| v.into_iter()).unwrap_or_default().into_iter())
} }
} }
} }
@ -734,7 +764,7 @@ impl Group {
/// returned above. /// returned above.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")] #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub fn stream(&self) -> TokenStream { pub fn stream(&self) -> TokenStream {
TokenStream(self.0.stream()) TokenStream(Some(self.0.stream()))
} }
/// Returns the span for the delimiters of this token stream, spanning the /// Returns the span for the delimiters of this token stream, spanning the