Rename TokenStream::concat
and remove TokenStream::concat_rc_vec
.
`TokenStream::new` is a better name for the former, and the latter is now just equivalent to `TokenStream::Stream`.
This commit is contained in:
parent
07c12fa89e
commit
e80c7ddb05
8 changed files with 30 additions and 34 deletions
|
@ -483,7 +483,7 @@ impl MetaItem {
|
||||||
last_pos = segment.ident.span.hi();
|
last_pos = segment.ident.span.hi();
|
||||||
}
|
}
|
||||||
idents.push(self.node.tokens(self.span));
|
idents.push(self.node.tokens(self.span));
|
||||||
TokenStream::concat(idents)
|
TokenStream::new(idents)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
|
fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItem>
|
||||||
|
@ -539,7 +539,7 @@ impl MetaItemKind {
|
||||||
match *self {
|
match *self {
|
||||||
MetaItemKind::Word => TokenStream::empty(),
|
MetaItemKind::Word => TokenStream::empty(),
|
||||||
MetaItemKind::NameValue(ref lit) => {
|
MetaItemKind::NameValue(ref lit) => {
|
||||||
TokenStream::concat(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
|
TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
|
||||||
}
|
}
|
||||||
MetaItemKind::List(ref list) => {
|
MetaItemKind::List(ref list) => {
|
||||||
let mut tokens = Vec::new();
|
let mut tokens = Vec::new();
|
||||||
|
@ -552,7 +552,7 @@ impl MetaItemKind {
|
||||||
TokenTree::Delimited(
|
TokenTree::Delimited(
|
||||||
DelimSpan::from_single(span),
|
DelimSpan::from_single(span),
|
||||||
token::Paren,
|
token::Paren,
|
||||||
TokenStream::concat(tokens).into(),
|
TokenStream::new(tokens).into(),
|
||||||
).into()
|
).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -247,7 +247,7 @@ pub mod rt {
|
||||||
|
|
||||||
let delim_span = DelimSpan::from_single(self.span);
|
let delim_span = DelimSpan::from_single(self.span);
|
||||||
r.push(TokenTree::Delimited(
|
r.push(TokenTree::Delimited(
|
||||||
delim_span, token::Bracket, TokenStream::concat(inner).into()
|
delim_span, token::Bracket, TokenStream::new(inner).into()
|
||||||
));
|
));
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,12 +103,12 @@ pub fn transcribe(cx: &ExtCtxt,
|
||||||
}
|
}
|
||||||
Frame::Delimited { forest, span, .. } => {
|
Frame::Delimited { forest, span, .. } => {
|
||||||
if result_stack.is_empty() {
|
if result_stack.is_empty() {
|
||||||
return TokenStream::concat(result);
|
return TokenStream::new(result);
|
||||||
}
|
}
|
||||||
let tree = TokenTree::Delimited(
|
let tree = TokenTree::Delimited(
|
||||||
span,
|
span,
|
||||||
forest.delim,
|
forest.delim,
|
||||||
TokenStream::concat(result).into(),
|
TokenStream::new(result).into(),
|
||||||
);
|
);
|
||||||
result = result_stack.pop().unwrap();
|
result = result_stack.pop().unwrap();
|
||||||
result.push(tree.into());
|
result.push(tree.into());
|
||||||
|
|
|
@ -170,7 +170,7 @@ impl<'a> Parser<'a> {
|
||||||
token::CloseDelim(_) | token::Eof => self.unexpected()?,
|
token::CloseDelim(_) | token::Eof => self.unexpected()?,
|
||||||
_ => self.parse_token_tree(),
|
_ => self.parse_token_tree(),
|
||||||
};
|
};
|
||||||
TokenStream::concat(vec![eq.into(), tree.into()])
|
TokenStream::new(vec![eq.into(), tree.into()])
|
||||||
} else {
|
} else {
|
||||||
TokenStream::empty()
|
TokenStream::empty()
|
||||||
};
|
};
|
||||||
|
|
|
@ -22,7 +22,7 @@ impl<'a> StringReader<'a> {
|
||||||
tts.push(self.parse_token_tree()?);
|
tts.push(self.parse_token_tree()?);
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(TokenStream::concat(tts))
|
Ok(TokenStream::new(tts))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
|
// Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
|
||||||
|
@ -30,14 +30,14 @@ impl<'a> StringReader<'a> {
|
||||||
let mut tts = vec![];
|
let mut tts = vec![];
|
||||||
loop {
|
loop {
|
||||||
if let token::CloseDelim(..) = self.token {
|
if let token::CloseDelim(..) = self.token {
|
||||||
return TokenStream::concat(tts);
|
return TokenStream::new(tts);
|
||||||
}
|
}
|
||||||
|
|
||||||
match self.parse_token_tree() {
|
match self.parse_token_tree() {
|
||||||
Ok(tree) => tts.push(tree),
|
Ok(tree) => tts.push(tree),
|
||||||
Err(mut e) => {
|
Err(mut e) => {
|
||||||
e.emit();
|
e.emit();
|
||||||
return TokenStream::concat(tts);
|
return TokenStream::new(tts);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -842,13 +842,13 @@ mod tests {
|
||||||
with_globals(|| {
|
with_globals(|| {
|
||||||
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
|
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
|
||||||
|
|
||||||
let expected = TokenStream::concat(vec![
|
let expected = TokenStream::new(vec![
|
||||||
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
|
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
|
||||||
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
||||||
TokenTree::Delimited(
|
TokenTree::Delimited(
|
||||||
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
||||||
token::DelimToken::Paren,
|
token::DelimToken::Paren,
|
||||||
TokenStream::concat(vec![
|
TokenStream::new(vec![
|
||||||
TokenTree::Token(sp(6, 7),
|
TokenTree::Token(sp(6, 7),
|
||||||
token::Ident(Ident::from_str("b"), false)).into(),
|
token::Ident(Ident::from_str("b"), false)).into(),
|
||||||
TokenTree::Token(sp(8, 9), token::Colon).into(),
|
TokenTree::Token(sp(8, 9), token::Colon).into(),
|
||||||
|
@ -859,7 +859,7 @@ mod tests {
|
||||||
TokenTree::Delimited(
|
TokenTree::Delimited(
|
||||||
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
||||||
token::DelimToken::Brace,
|
token::DelimToken::Brace,
|
||||||
TokenStream::concat(vec![
|
TokenStream::new(vec![
|
||||||
TokenTree::Token(sp(17, 18),
|
TokenTree::Token(sp(17, 18),
|
||||||
token::Ident(Ident::from_str("b"), false)).into(),
|
token::Ident(Ident::from_str("b"), false)).into(),
|
||||||
TokenTree::Token(sp(18, 19), token::Semi).into(),
|
TokenTree::Token(sp(18, 19), token::Semi).into(),
|
||||||
|
|
|
@ -2928,7 +2928,7 @@ impl<'a> Parser<'a> {
|
||||||
_ => result.push(self.parse_token_tree().into()),
|
_ => result.push(self.parse_token_tree().into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenStream::concat(result)
|
TokenStream::new(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse a prefix-unary-operator expr
|
/// Parse a prefix-unary-operator expr
|
||||||
|
@ -4624,7 +4624,7 @@ impl<'a> Parser<'a> {
|
||||||
self.unexpected()?;
|
self.unexpected()?;
|
||||||
unreachable!()
|
unreachable!()
|
||||||
};
|
};
|
||||||
TokenStream::concat(vec![
|
TokenStream::new(vec![
|
||||||
args.into(),
|
args.into(),
|
||||||
TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
|
TokenTree::Token(token_lo.to(self.prev_span), token::FatArrow).into(),
|
||||||
body.into(),
|
body.into(),
|
||||||
|
|
|
@ -195,7 +195,7 @@ impl TokenStream {
|
||||||
new_stream.extend_from_slice(parts.0);
|
new_stream.extend_from_slice(parts.0);
|
||||||
new_stream.push(comma);
|
new_stream.push(comma);
|
||||||
new_stream.extend_from_slice(parts.1);
|
new_stream.extend_from_slice(parts.1);
|
||||||
return Some((TokenStream::concat(new_stream), sp));
|
return Some((TokenStream::new(new_stream), sp));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
None
|
None
|
||||||
|
@ -216,7 +216,7 @@ impl From<Token> for TokenStream {
|
||||||
|
|
||||||
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
|
impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
|
||||||
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
|
fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
|
||||||
TokenStream::concat(iter.into_iter().map(Into::into).collect::<Vec<_>>())
|
TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<_>>())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -265,7 +265,7 @@ impl Extend<TokenStream> for TokenStream {
|
||||||
// Build the resulting token stream. If it contains more than one token,
|
// Build the resulting token stream. If it contains more than one token,
|
||||||
// preserve capacity in the vector in anticipation of the caller
|
// preserve capacity in the vector in anticipation of the caller
|
||||||
// performing additional calls to extend.
|
// performing additional calls to extend.
|
||||||
*self = TokenStream::concat(builder.0);
|
*self = TokenStream::new(builder.0);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -297,18 +297,14 @@ impl TokenStream {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream {
|
pub fn new(mut streams: Vec<TokenStream>) -> TokenStream {
|
||||||
match streams.len() {
|
match streams.len() {
|
||||||
0 => TokenStream::empty(),
|
0 => TokenStream::empty(),
|
||||||
1 => streams.pop().unwrap(),
|
1 => streams.pop().unwrap(),
|
||||||
_ => TokenStream::concat_rc_vec(Lrc::new(streams)),
|
_ => TokenStream::Stream(Lrc::new(streams)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn concat_rc_vec(streams: Lrc<Vec<TokenStream>>) -> TokenStream {
|
|
||||||
TokenStream::Stream(streams)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn trees(&self) -> Cursor {
|
pub fn trees(&self) -> Cursor {
|
||||||
self.clone().into_trees()
|
self.clone().into_trees()
|
||||||
}
|
}
|
||||||
|
@ -389,7 +385,7 @@ impl TokenStream {
|
||||||
});
|
});
|
||||||
i += 1;
|
i += 1;
|
||||||
}
|
}
|
||||||
TokenStream::concat(result)
|
TokenStream::new(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
|
||||||
|
@ -402,7 +398,7 @@ impl TokenStream {
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
TokenStream::concat(result)
|
TokenStream::new(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn first_tree_and_joint(&self) -> Option<(TokenTree, bool)> {
|
fn first_tree_and_joint(&self) -> Option<(TokenTree, bool)> {
|
||||||
|
@ -461,7 +457,7 @@ impl TokenStreamBuilder {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build(self) -> TokenStream {
|
pub fn build(self) -> TokenStream {
|
||||||
TokenStream::concat(self.0)
|
TokenStream::new(self.0)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
|
fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
|
||||||
|
@ -470,7 +466,7 @@ impl TokenStreamBuilder {
|
||||||
match len {
|
match len {
|
||||||
1 => {}
|
1 => {}
|
||||||
2 => self.0.push(streams[0].clone().into()),
|
2 => self.0.push(streams[0].clone().into()),
|
||||||
_ => self.0.push(TokenStream::concat(streams[0 .. len - 1].to_vec())),
|
_ => self.0.push(TokenStream::new(streams[0 .. len - 1].to_vec())),
|
||||||
}
|
}
|
||||||
self.push_all_but_last_tree(&streams[len - 1])
|
self.push_all_but_last_tree(&streams[len - 1])
|
||||||
}
|
}
|
||||||
|
@ -482,7 +478,7 @@ impl TokenStreamBuilder {
|
||||||
match len {
|
match len {
|
||||||
1 => {}
|
1 => {}
|
||||||
2 => self.0.push(streams[1].clone().into()),
|
2 => self.0.push(streams[1].clone().into()),
|
||||||
_ => self.0.push(TokenStream::concat(streams[1 .. len].to_vec())),
|
_ => self.0.push(TokenStream::new(streams[1 .. len].to_vec())),
|
||||||
}
|
}
|
||||||
self.push_all_but_first_tree(&streams[0])
|
self.push_all_but_first_tree(&streams[0])
|
||||||
}
|
}
|
||||||
|
@ -577,7 +573,7 @@ impl Cursor {
|
||||||
_ if stream.is_empty() => return,
|
_ if stream.is_empty() => return,
|
||||||
CursorKind::Empty => *self = stream.trees(),
|
CursorKind::Empty => *self = stream.trees(),
|
||||||
CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => {
|
CursorKind::Tree(_, consumed) | CursorKind::JointTree(_, consumed) => {
|
||||||
*self = TokenStream::concat(vec![self.original_stream(), stream]).trees();
|
*self = TokenStream::new(vec![self.original_stream(), stream]).trees();
|
||||||
if consumed {
|
if consumed {
|
||||||
self.next();
|
self.next();
|
||||||
}
|
}
|
||||||
|
@ -593,10 +589,10 @@ impl Cursor {
|
||||||
CursorKind::Empty => TokenStream::empty(),
|
CursorKind::Empty => TokenStream::empty(),
|
||||||
CursorKind::Tree(ref tree, _) => tree.clone().into(),
|
CursorKind::Tree(ref tree, _) => tree.clone().into(),
|
||||||
CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
|
CursorKind::JointTree(ref tree, _) => tree.clone().joint(),
|
||||||
CursorKind::Stream(ref cursor) => TokenStream::concat_rc_vec({
|
CursorKind::Stream(ref cursor) => TokenStream::Stream(
|
||||||
cursor.stack.get(0).cloned().map(|(stream, _)| stream)
|
cursor.stack.get(0).cloned().map(|(stream, _)| stream)
|
||||||
.unwrap_or_else(|| cursor.stream.clone())
|
.unwrap_or_else(|| cursor.stream.clone())
|
||||||
}),
|
),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -664,7 +660,7 @@ impl From<TokenStream> for ThinTokenStream {
|
||||||
|
|
||||||
impl From<ThinTokenStream> for TokenStream {
|
impl From<ThinTokenStream> for TokenStream {
|
||||||
fn from(stream: ThinTokenStream) -> TokenStream {
|
fn from(stream: ThinTokenStream) -> TokenStream {
|
||||||
stream.0.map(TokenStream::concat_rc_vec).unwrap_or_else(TokenStream::empty)
|
stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -763,7 +759,7 @@ mod tests {
|
||||||
let test_res = string_to_ts("foo::bar::baz");
|
let test_res = string_to_ts("foo::bar::baz");
|
||||||
let test_fst = string_to_ts("foo::bar");
|
let test_fst = string_to_ts("foo::bar");
|
||||||
let test_snd = string_to_ts("::baz");
|
let test_snd = string_to_ts("::baz");
|
||||||
let eq_res = TokenStream::concat(vec![test_fst, test_snd]);
|
let eq_res = TokenStream::new(vec![test_fst, test_snd]);
|
||||||
assert_eq!(test_res.trees().count(), 5);
|
assert_eq!(test_res.trees().count(), 5);
|
||||||
assert_eq!(eq_res.trees().count(), 5);
|
assert_eq!(eq_res.trees().count(), 5);
|
||||||
assert_eq!(test_res.eq_unspanned(&eq_res), true);
|
assert_eq!(test_res.eq_unspanned(&eq_res), true);
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue