1
Fork 0

Use SmallVec in TokenStreamBuilder.

This reduces by 12% the number of allocations done for a "clean
incremental" of `webrender_api`, which reduces the instruction count by
about 0.5%.

It also reduces instruction counts by up to 1.4% across a range of
rustc-perf benchmark runs.
This commit is contained in:
Nicholas Nethercote 2019-03-28 12:27:26 +11:00
parent 4c27fb19ba
commit 17a8aff20a
2 changed files with 10 additions and 7 deletions

View file

@ -6,6 +6,7 @@ use crate::parse::parser::{Parser, TokenType, PathStyle};
use crate::tokenstream::{TokenStream, TokenTree}; use crate::tokenstream::{TokenStream, TokenTree};
use log::debug; use log::debug;
use smallvec::smallvec;
#[derive(Debug)] #[derive(Debug)]
enum InnerAttributeParsePolicy<'a> { enum InnerAttributeParsePolicy<'a> {
@ -171,7 +172,7 @@ impl<'a> Parser<'a> {
} else { } else {
self.parse_unsuffixed_lit()?.tokens() self.parse_unsuffixed_lit()?.tokens()
}; };
TokenStream::from_streams(vec![eq.into(), tokens]) TokenStream::from_streams(smallvec![eq.into(), tokens])
} else { } else {
TokenStream::empty() TokenStream::empty()
}; };

View file

@ -24,6 +24,7 @@ use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
use rustc_data_structures::static_assert; use rustc_data_structures::static_assert;
use rustc_data_structures::sync::Lrc; use rustc_data_structures::sync::Lrc;
use serialize::{Decoder, Decodable, Encoder, Encodable}; use serialize::{Decoder, Decodable, Encoder, Encodable};
use smallvec::{SmallVec, smallvec};
use std::borrow::Cow; use std::borrow::Cow;
use std::{fmt, iter, mem}; use std::{fmt, iter, mem};
@ -224,7 +225,7 @@ impl From<Token> for TokenStream {
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream { impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self { fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<Vec<_>>()) TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
} }
} }
@ -256,7 +257,7 @@ impl TokenStream {
} }
} }
pub(crate) fn from_streams(mut streams: Vec<TokenStream>) -> TokenStream { pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
match streams.len() { match streams.len() {
0 => TokenStream::empty(), 0 => TokenStream::empty(),
1 => streams.pop().unwrap(), 1 => streams.pop().unwrap(),
@ -393,12 +394,13 @@ impl TokenStream {
} }
} }
// 99.5%+ of the time we have 1 or 2 elements in this vector.
#[derive(Clone)] #[derive(Clone)]
pub struct TokenStreamBuilder(Vec<TokenStream>); pub struct TokenStreamBuilder(SmallVec<[TokenStream; 2]>);
impl TokenStreamBuilder { impl TokenStreamBuilder {
pub fn new() -> TokenStreamBuilder { pub fn new() -> TokenStreamBuilder {
TokenStreamBuilder(Vec::new()) TokenStreamBuilder(SmallVec::new())
} }
pub fn push<T: Into<TokenStream>>(&mut self, stream: T) { pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
@ -485,7 +487,7 @@ impl Cursor {
} }
let index = self.index; let index = self.index;
let stream = mem::replace(&mut self.stream, TokenStream(None)); let stream = mem::replace(&mut self.stream, TokenStream(None));
*self = TokenStream::from_streams(vec![stream, new_stream]).into_trees(); *self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
self.index = index; self.index = index;
} }
@ -572,7 +574,7 @@ mod tests {
let test_res = string_to_ts("foo::bar::baz"); let test_res = string_to_ts("foo::bar::baz");
let test_fst = string_to_ts("foo::bar"); let test_fst = string_to_ts("foo::bar");
let test_snd = string_to_ts("::baz"); let test_snd = string_to_ts("::baz");
let eq_res = TokenStream::from_streams(vec![test_fst, test_snd]); let eq_res = TokenStream::from_streams(smallvec![test_fst, test_snd]);
assert_eq!(test_res.trees().count(), 5); assert_eq!(test_res.trees().count(), 5);
assert_eq!(eq_res.trees().count(), 5); assert_eq!(eq_res.trees().count(), 5);
assert_eq!(test_res.eq_unspanned(&eq_res), true); assert_eq!(test_res.eq_unspanned(&eq_res), true);