TokenStream::extend
This commit is contained in:
parent
0aa8d03202
commit
2fa1da9919
4 changed files with 284 additions and 16 deletions
|
@ -179,6 +179,20 @@ impl iter::FromIterator<TokenStream> for TokenStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
||||||
|
impl Extend<TokenTree> for TokenStream {
|
||||||
|
fn extend<I: IntoIterator<Item = TokenTree>>(&mut self, trees: I) {
|
||||||
|
self.extend(trees.into_iter().map(TokenStream::from));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[stable(feature = "token_stream_extend", since = "1.30.0")]
|
||||||
|
impl Extend<TokenStream> for TokenStream {
|
||||||
|
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, streams: I) {
|
||||||
|
self.0.extend(streams.into_iter().map(|stream| stream.0));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Public implementation details for the `TokenStream` type, such as iterators.
|
/// Public implementation details for the `TokenStream` type, such as iterators.
|
||||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||||
pub mod token_stream {
|
pub mod token_stream {
|
||||||
|
|
|
@ -130,6 +130,9 @@ pub mod util {
|
||||||
|
|
||||||
mod rc_slice;
|
mod rc_slice;
|
||||||
pub use self::rc_slice::RcSlice;
|
pub use self::rc_slice::RcSlice;
|
||||||
|
|
||||||
|
mod rc_vec;
|
||||||
|
pub use self::rc_vec::RcVec;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub mod json;
|
pub mod json;
|
||||||
|
|
|
@ -29,7 +29,7 @@ use parse::Directory;
|
||||||
use parse::token::{self, Token};
|
use parse::token::{self, Token};
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
use serialize::{Decoder, Decodable, Encoder, Encodable};
|
use serialize::{Decoder, Decodable, Encoder, Encodable};
|
||||||
use util::RcSlice;
|
use util::RcVec;
|
||||||
|
|
||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::{fmt, iter, mem};
|
use std::{fmt, iter, mem};
|
||||||
|
@ -221,7 +221,7 @@ impl TokenStream {
|
||||||
new_slice.extend_from_slice(parts.0);
|
new_slice.extend_from_slice(parts.0);
|
||||||
new_slice.push(comma);
|
new_slice.push(comma);
|
||||||
new_slice.extend_from_slice(parts.1);
|
new_slice.extend_from_slice(parts.1);
|
||||||
let slice = RcSlice::new(new_slice);
|
let slice = RcVec::new(new_slice);
|
||||||
return Some((TokenStream { kind: TokenStreamKind::Stream(slice) }, sp));
|
return Some((TokenStream { kind: TokenStreamKind::Stream(slice) }, sp));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -234,7 +234,7 @@ enum TokenStreamKind {
|
||||||
Empty,
|
Empty,
|
||||||
Tree(TokenTree),
|
Tree(TokenTree),
|
||||||
JointTree(TokenTree),
|
JointTree(TokenTree),
|
||||||
Stream(RcSlice<TokenStream>),
|
Stream(RcVec<TokenStream>),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<TokenTree> for TokenStream {
|
impl From<TokenTree> for TokenStream {
|
||||||
|
@ -255,6 +255,60 @@ impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Extend<TokenStream> for TokenStream {
|
||||||
|
fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, iter: I) {
|
||||||
|
let iter = iter.into_iter();
|
||||||
|
let kind = mem::replace(&mut self.kind, TokenStreamKind::Empty);
|
||||||
|
|
||||||
|
// Vector of token streams originally in self.
|
||||||
|
let tts: Vec<TokenStream> = match kind {
|
||||||
|
TokenStreamKind::Empty => {
|
||||||
|
let mut vec = Vec::new();
|
||||||
|
vec.reserve(iter.size_hint().0);
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
TokenStreamKind::Tree(_) | TokenStreamKind::JointTree(_) => {
|
||||||
|
let mut vec = Vec::new();
|
||||||
|
vec.reserve(1 + iter.size_hint().0);
|
||||||
|
vec.push(TokenStream { kind });
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
TokenStreamKind::Stream(rc_vec) => match RcVec::try_unwrap(rc_vec) {
|
||||||
|
Ok(mut vec) => {
|
||||||
|
// Extend in place using the existing capacity if possible.
|
||||||
|
// This is the fast path for libraries like `quote` that
|
||||||
|
// build a token stream.
|
||||||
|
vec.reserve(iter.size_hint().0);
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
Err(rc_vec) => {
|
||||||
|
// Self is shared so we need to copy and extend that.
|
||||||
|
let mut vec = Vec::new();
|
||||||
|
vec.reserve(rc_vec.len() + iter.size_hint().0);
|
||||||
|
vec.extend_from_slice(&rc_vec);
|
||||||
|
vec
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Perform the extend, joining tokens as needed along the way.
|
||||||
|
let mut builder = TokenStreamBuilder(tts);
|
||||||
|
for stream in iter {
|
||||||
|
builder.push(stream);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Build the resulting token stream. If it contains more than one token,
|
||||||
|
// preserve capacity in the vector in anticipation of the caller
|
||||||
|
// performing additional calls to extend.
|
||||||
|
let mut tts = builder.0;
|
||||||
|
*self = match tts.len() {
|
||||||
|
0 => TokenStream::empty(),
|
||||||
|
1 => tts.pop().unwrap(),
|
||||||
|
_ => TokenStream::concat_rc_vec(RcVec::new_preserving_capacity(tts)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl Eq for TokenStream {}
|
impl Eq for TokenStream {}
|
||||||
|
|
||||||
impl PartialEq<TokenStream> for TokenStream {
|
impl PartialEq<TokenStream> for TokenStream {
|
||||||
|
@ -287,11 +341,11 @@ impl TokenStream {
|
||||||
match streams.len() {
|
match streams.len() {
|
||||||
0 => TokenStream::empty(),
|
0 => TokenStream::empty(),
|
||||||
1 => streams.pop().unwrap(),
|
1 => streams.pop().unwrap(),
|
||||||
_ => TokenStream::concat_rc_slice(RcSlice::new(streams)),
|
_ => TokenStream::concat_rc_vec(RcVec::new(streams)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn concat_rc_slice(streams: RcSlice<TokenStream>) -> TokenStream {
|
fn concat_rc_vec(streams: RcVec<TokenStream>) -> TokenStream {
|
||||||
TokenStream { kind: TokenStreamKind::Stream(streams) }
|
TokenStream { kind: TokenStreamKind::Stream(streams) }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -434,7 +488,7 @@ impl TokenStreamBuilder {
|
||||||
match len {
|
match len {
|
||||||
1 => {}
|
1 => {}
|
||||||
2 => self.0.push(streams[0].clone().into()),
|
2 => self.0.push(streams[0].clone().into()),
|
||||||
_ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(0 .. len - 1))),
|
_ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(0 .. len - 1))),
|
||||||
}
|
}
|
||||||
self.push_all_but_last_tree(&streams[len - 1])
|
self.push_all_but_last_tree(&streams[len - 1])
|
||||||
}
|
}
|
||||||
|
@ -446,7 +500,7 @@ impl TokenStreamBuilder {
|
||||||
match len {
|
match len {
|
||||||
1 => {}
|
1 => {}
|
||||||
2 => self.0.push(streams[1].clone().into()),
|
2 => self.0.push(streams[1].clone().into()),
|
||||||
_ => self.0.push(TokenStream::concat_rc_slice(streams.sub_slice(1 .. len))),
|
_ => self.0.push(TokenStream::concat_rc_vec(streams.sub_slice(1 .. len))),
|
||||||
}
|
}
|
||||||
self.push_all_but_first_tree(&streams[0])
|
self.push_all_but_first_tree(&streams[0])
|
||||||
}
|
}
|
||||||
|
@ -466,13 +520,13 @@ enum CursorKind {
|
||||||
|
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
struct StreamCursor {
|
struct StreamCursor {
|
||||||
stream: RcSlice<TokenStream>,
|
stream: RcVec<TokenStream>,
|
||||||
index: usize,
|
index: usize,
|
||||||
stack: Vec<(RcSlice<TokenStream>, usize)>,
|
stack: Vec<(RcVec<TokenStream>, usize)>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl StreamCursor {
|
impl StreamCursor {
|
||||||
fn new(stream: RcSlice<TokenStream>) -> Self {
|
fn new(stream: RcVec<TokenStream>) -> Self {
|
||||||
StreamCursor { stream: stream, index: 0, stack: Vec::new() }
|
StreamCursor { stream: stream, index: 0, stack: Vec::new() }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -495,7 +549,7 @@ impl StreamCursor {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert(&mut self, stream: RcSlice<TokenStream>) {
|
fn insert(&mut self, stream: RcVec<TokenStream>) {
|
||||||
self.stack.push((mem::replace(&mut self.stream, stream),
|
self.stack.push((mem::replace(&mut self.stream, stream),
|
||||||
mem::replace(&mut self.index, 0)));
|
mem::replace(&mut self.index, 0)));
|
||||||
}
|
}
|
||||||
|
@ -557,7 +611,7 @@ impl Cursor {
|
||||||
CursorKind::Empty => TokenStream::empty(),
|
CursorKind::Empty => TokenStream::empty(),
|
||||||
CursorKind::Tree(ref tree, _) => tree.clone().into(),
|
CursorKind::Tree(ref tree, _) => tree.clone().into(),
|
||||||
CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
|
CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
|
||||||
CursorKind::Stream(ref cursor) => TokenStream::concat_rc_slice({
|
CursorKind::Stream(ref cursor) => TokenStream::concat_rc_vec({
|
||||||
cursor.stack.get(0).cloned().map(|(stream, _)| stream)
|
cursor.stack.get(0).cloned().map(|(stream, _)| stream)
|
||||||
.unwrap_or(cursor.stream.clone())
|
.unwrap_or(cursor.stream.clone())
|
||||||
}),
|
}),
|
||||||
|
@ -607,14 +661,14 @@ impl Cursor {
|
||||||
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
|
/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
|
||||||
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
|
/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct ThinTokenStream(Option<RcSlice<TokenStream>>);
|
pub struct ThinTokenStream(Option<RcVec<TokenStream>>);
|
||||||
|
|
||||||
impl From<TokenStream> for ThinTokenStream {
|
impl From<TokenStream> for ThinTokenStream {
|
||||||
fn from(stream: TokenStream) -> ThinTokenStream {
|
fn from(stream: TokenStream) -> ThinTokenStream {
|
||||||
ThinTokenStream(match stream.kind {
|
ThinTokenStream(match stream.kind {
|
||||||
TokenStreamKind::Empty => None,
|
TokenStreamKind::Empty => None,
|
||||||
TokenStreamKind::Tree(tree) => Some(RcSlice::new(vec![tree.into()])),
|
TokenStreamKind::Tree(tree) => Some(RcVec::new(vec![tree.into()])),
|
||||||
TokenStreamKind::JointTree(tree) => Some(RcSlice::new(vec![tree.joint()])),
|
TokenStreamKind::JointTree(tree) => Some(RcVec::new(vec![tree.joint()])),
|
||||||
TokenStreamKind::Stream(stream) => Some(stream),
|
TokenStreamKind::Stream(stream) => Some(stream),
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -622,7 +676,7 @@ impl From<TokenStream> for ThinTokenStream {
|
||||||
|
|
||||||
impl From<ThinTokenStream> for TokenStream {
|
impl From<ThinTokenStream> for TokenStream {
|
||||||
fn from(stream: ThinTokenStream) -> TokenStream {
|
fn from(stream: ThinTokenStream) -> TokenStream {
|
||||||
stream.0.map(TokenStream::concat_rc_slice).unwrap_or_else(TokenStream::empty)
|
stream.0.map(TokenStream::concat_rc_vec).unwrap_or_else(TokenStream::empty)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -773,4 +827,106 @@ mod tests {
|
||||||
assert_eq!(stream.trees().count(), 1);
|
assert_eq!(stream.trees().count(), 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extend_empty() {
|
||||||
|
with_globals(|| {
|
||||||
|
// Append a token onto an empty token stream.
|
||||||
|
let mut stream = TokenStream::empty();
|
||||||
|
stream.extend(vec![string_to_ts("t")]);
|
||||||
|
|
||||||
|
let expected = string_to_ts("t");
|
||||||
|
assert!(stream.eq_unspanned(&expected));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extend_nothing() {
|
||||||
|
with_globals(|| {
|
||||||
|
// Append nothing onto a token stream containing one token.
|
||||||
|
let mut stream = string_to_ts("t");
|
||||||
|
stream.extend(vec![]);
|
||||||
|
|
||||||
|
let expected = string_to_ts("t");
|
||||||
|
assert!(stream.eq_unspanned(&expected));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extend_single() {
|
||||||
|
with_globals(|| {
|
||||||
|
// Append a token onto token stream containing a single token.
|
||||||
|
let mut stream = string_to_ts("t1");
|
||||||
|
stream.extend(vec![string_to_ts("t2")]);
|
||||||
|
|
||||||
|
let expected = string_to_ts("t1 t2");
|
||||||
|
assert!(stream.eq_unspanned(&expected));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extend_in_place() {
|
||||||
|
with_globals(|| {
|
||||||
|
// Append a token onto token stream containing a reference counted
|
||||||
|
// vec of tokens. The token stream has a reference count of 1 so
|
||||||
|
// this can happen in place.
|
||||||
|
let mut stream = string_to_ts("t1 t2");
|
||||||
|
stream.extend(vec![string_to_ts("t3")]);
|
||||||
|
|
||||||
|
let expected = string_to_ts("t1 t2 t3");
|
||||||
|
assert!(stream.eq_unspanned(&expected));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extend_copy() {
|
||||||
|
with_globals(|| {
|
||||||
|
// Append a token onto token stream containing a reference counted
|
||||||
|
// vec of tokens. The token stream is shared so the extend takes
|
||||||
|
// place on a copy.
|
||||||
|
let mut stream = string_to_ts("t1 t2");
|
||||||
|
let _incref = stream.clone();
|
||||||
|
stream.extend(vec![string_to_ts("t3")]);
|
||||||
|
|
||||||
|
let expected = string_to_ts("t1 t2 t3");
|
||||||
|
assert!(stream.eq_unspanned(&expected));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extend_no_join() {
|
||||||
|
with_globals(|| {
|
||||||
|
let first = TokenTree::Token(DUMMY_SP, Token::Dot);
|
||||||
|
let second = TokenTree::Token(DUMMY_SP, Token::Dot);
|
||||||
|
|
||||||
|
// Append a dot onto a token stream containing a dot, but do not
|
||||||
|
// join them.
|
||||||
|
let mut stream = TokenStream::from(first);
|
||||||
|
stream.extend(vec![TokenStream::from(second)]);
|
||||||
|
|
||||||
|
let expected = string_to_ts(". .");
|
||||||
|
assert!(stream.eq_unspanned(&expected));
|
||||||
|
|
||||||
|
let unexpected = string_to_ts("..");
|
||||||
|
assert!(!stream.eq_unspanned(&unexpected));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_extend_join() {
|
||||||
|
with_globals(|| {
|
||||||
|
let first = TokenTree::Token(DUMMY_SP, Token::Dot).joint();
|
||||||
|
let second = TokenTree::Token(DUMMY_SP, Token::Dot);
|
||||||
|
|
||||||
|
// Append a dot onto a token stream containing a dot, forming a
|
||||||
|
// dotdot.
|
||||||
|
let mut stream = first;
|
||||||
|
stream.extend(vec![TokenStream::from(second)]);
|
||||||
|
|
||||||
|
let expected = string_to_ts("..");
|
||||||
|
assert!(stream.eq_unspanned(&expected));
|
||||||
|
|
||||||
|
let unexpected = string_to_ts(". .");
|
||||||
|
assert!(!stream.eq_unspanned(&unexpected));
|
||||||
|
});
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
95
src/libsyntax/util/rc_vec.rs
Normal file
95
src/libsyntax/util/rc_vec.rs
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
use std::fmt;
|
||||||
|
use std::ops::{Deref, Range};
|
||||||
|
|
||||||
|
use rustc_data_structures::stable_hasher::{HashStable, StableHasher, StableHasherResult};
|
||||||
|
use rustc_data_structures::sync::Lrc;
|
||||||
|
|
||||||
|
#[derive(Clone)]
|
||||||
|
pub struct RcVec<T> {
|
||||||
|
data: Lrc<Vec<T>>,
|
||||||
|
offset: u32,
|
||||||
|
len: u32,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> RcVec<T> {
|
||||||
|
pub fn new(mut vec: Vec<T>) -> Self {
|
||||||
|
// By default, constructing RcVec from Vec gives it just enough capacity
|
||||||
|
// to hold the initial elements. Callers that anticipate needing to
|
||||||
|
// extend the vector may prefer RcVec::new_preserving_capacity.
|
||||||
|
vec.shrink_to_fit();
|
||||||
|
|
||||||
|
RcVec {
|
||||||
|
offset: 0,
|
||||||
|
len: vec.len() as u32,
|
||||||
|
data: Lrc::new(vec),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn new_preserving_capacity(vec: Vec<T>) -> Self {
|
||||||
|
RcVec {
|
||||||
|
offset: 0,
|
||||||
|
len: vec.len() as u32,
|
||||||
|
data: Lrc::new(vec),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn sub_slice(&self, range: Range<usize>) -> Self {
|
||||||
|
RcVec {
|
||||||
|
data: self.data.clone(),
|
||||||
|
offset: self.offset + range.start as u32,
|
||||||
|
len: (range.end - range.start) as u32,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// If this RcVec has exactly one strong reference, returns ownership of the
|
||||||
|
/// underlying vector. Otherwise returns self unmodified.
|
||||||
|
pub fn try_unwrap(self) -> Result<Vec<T>, Self> {
|
||||||
|
match Lrc::try_unwrap(self.data) {
|
||||||
|
// If no other RcVec shares ownership of this data.
|
||||||
|
Ok(mut vec) => {
|
||||||
|
// Drop any elements after our view of the data.
|
||||||
|
vec.truncate(self.offset as usize + self.len as usize);
|
||||||
|
// Drop any elements before our view of the data.
|
||||||
|
if self.offset != 0 {
|
||||||
|
vec.drain(..self.offset as usize);
|
||||||
|
}
|
||||||
|
Ok(vec)
|
||||||
|
}
|
||||||
|
|
||||||
|
// If the data is shared.
|
||||||
|
Err(data) => Err(RcVec { data, ..self }),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Deref for RcVec<T> {
|
||||||
|
type Target = [T];
|
||||||
|
fn deref(&self) -> &[T] {
|
||||||
|
&self.data[self.offset as usize..(self.offset + self.len) as usize]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T: fmt::Debug> fmt::Debug for RcVec<T> {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||||
|
fmt::Debug::fmt(self.deref(), f)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<CTX, T> HashStable<CTX> for RcVec<T>
|
||||||
|
where
|
||||||
|
T: HashStable<CTX>,
|
||||||
|
{
|
||||||
|
fn hash_stable<W: StableHasherResult>(&self, hcx: &mut CTX, hasher: &mut StableHasher<W>) {
|
||||||
|
(**self).hash_stable(hcx, hasher);
|
||||||
|
}
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue