Remove open_span
and close_span
from Delimited
.
This commit is contained in:
parent
31417efcd3
commit
49f5b0a8cf
10 changed files with 35 additions and 55 deletions
|
@ -52,8 +52,6 @@ fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
|
||||||
TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
|
TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
|
||||||
delim: delim,
|
delim: delim,
|
||||||
tts: stream.trees().cloned().collect(),
|
tts: stream.trees().cloned().collect(),
|
||||||
open_span: DUMMY_SP,
|
|
||||||
close_span: DUMMY_SP,
|
|
||||||
})).into()
|
})).into()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -129,8 +127,6 @@ impl Quote for TokenTree {
|
||||||
impl Quote for Rc<Delimited> {
|
impl Quote for Rc<Delimited> {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(&self) -> TokenStream {
|
||||||
quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
|
quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
|
||||||
open_span: ::syntax::ext::quote::rt::DUMMY_SP,
|
|
||||||
close_span: ::syntax::ext::quote::rt::DUMMY_SP,
|
|
||||||
delim: (quote self.delim),
|
delim: (quote self.delim),
|
||||||
tts: (quote self.tts),
|
tts: (quote self.tts),
|
||||||
}))
|
}))
|
||||||
|
|
|
@ -1034,18 +1034,14 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
|
||||||
hash_span!(self, span);
|
hash_span!(self, span);
|
||||||
let tokenstream::Delimited {
|
let tokenstream::Delimited {
|
||||||
ref delim,
|
ref delim,
|
||||||
open_span,
|
|
||||||
ref tts,
|
ref tts,
|
||||||
close_span,
|
|
||||||
} = **delimited;
|
} = **delimited;
|
||||||
|
|
||||||
delim.hash(self.st);
|
delim.hash(self.st);
|
||||||
hash_span!(self, open_span);
|
|
||||||
tts.len().hash(self.st);
|
tts.len().hash(self.st);
|
||||||
for sub_tt in tts {
|
for sub_tt in tts {
|
||||||
self.hash_token_tree(sub_tt);
|
self.hash_token_tree(sub_tt);
|
||||||
}
|
}
|
||||||
hash_span!(self, close_span);
|
|
||||||
}
|
}
|
||||||
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
|
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
|
||||||
hash_span!(self, span);
|
hash_span!(self, span);
|
||||||
|
|
|
@ -231,9 +231,7 @@ pub mod rt {
|
||||||
}
|
}
|
||||||
r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
|
r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
|
||||||
delim: token::Bracket,
|
delim: token::Bracket,
|
||||||
open_span: self.span,
|
|
||||||
tts: self.value.to_tokens(cx),
|
tts: self.value.to_tokens(cx),
|
||||||
close_span: self.span,
|
|
||||||
})));
|
})));
|
||||||
r
|
r
|
||||||
}
|
}
|
||||||
|
@ -250,9 +248,7 @@ pub mod rt {
|
||||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||||
vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
|
vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
|
||||||
delim: token::Paren,
|
delim: token::Paren,
|
||||||
open_span: DUMMY_SP,
|
|
||||||
tts: vec![],
|
tts: vec![],
|
||||||
close_span: DUMMY_SP,
|
|
||||||
}))]
|
}))]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -757,11 +753,11 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
|
||||||
vec![e_tok]);
|
vec![e_tok]);
|
||||||
vec![cx.stmt_expr(e_push)]
|
vec![cx.stmt_expr(e_push)]
|
||||||
},
|
},
|
||||||
TokenTree::Delimited(_, ref delimed) => {
|
TokenTree::Delimited(span, ref delimed) => {
|
||||||
statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter()
|
statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter()
|
||||||
.chain(delimed.tts.iter()
|
.chain(delimed.tts.iter()
|
||||||
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
|
.flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
|
||||||
.chain(statements_mk_tt(cx, &delimed.close_tt(), matcher))
|
.chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher))
|
||||||
.collect()
|
.collect()
|
||||||
},
|
},
|
||||||
TokenTree::Sequence(sp, ref seq) => {
|
TokenTree::Sequence(sp, ref seq) => {
|
||||||
|
|
|
@ -350,9 +350,9 @@ impl FirstSets {
|
||||||
TokenTree::Token(sp, ref tok) => {
|
TokenTree::Token(sp, ref tok) => {
|
||||||
first.replace_with((sp, tok.clone()));
|
first.replace_with((sp, tok.clone()));
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(_, ref delimited) => {
|
TokenTree::Delimited(span, ref delimited) => {
|
||||||
build_recur(sets, &delimited.tts[..]);
|
build_recur(sets, &delimited.tts[..]);
|
||||||
first.replace_with((delimited.open_span,
|
first.replace_with((delimited.open_tt(span).span(),
|
||||||
Token::OpenDelim(delimited.delim)));
|
Token::OpenDelim(delimited.delim)));
|
||||||
}
|
}
|
||||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||||
|
@ -410,8 +410,8 @@ impl FirstSets {
|
||||||
first.add_one((sp, tok.clone()));
|
first.add_one((sp, tok.clone()));
|
||||||
return first;
|
return first;
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(_, ref delimited) => {
|
TokenTree::Delimited(span, ref delimited) => {
|
||||||
first.add_one((delimited.open_span,
|
first.add_one((delimited.open_tt(span).span(),
|
||||||
Token::OpenDelim(delimited.delim)));
|
Token::OpenDelim(delimited.delim)));
|
||||||
return first;
|
return first;
|
||||||
}
|
}
|
||||||
|
@ -603,8 +603,9 @@ fn check_matcher_core(sess: &ParseSess,
|
||||||
suffix_first = build_suffix_first();
|
suffix_first = build_suffix_first();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenTree::Delimited(_, ref d) => {
|
TokenTree::Delimited(span, ref d) => {
|
||||||
let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim)));
|
let my_suffix = TokenSet::singleton((d.close_tt(span).span(),
|
||||||
|
Token::CloseDelim(d.delim)));
|
||||||
check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
|
check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
|
||||||
// don't track non NT tokens
|
// don't track non NT tokens
|
||||||
last.replace_with_irrelevant();
|
last.replace_with_irrelevant();
|
||||||
|
|
|
@ -543,9 +543,7 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
|
||||||
TokenTree::Delimited(fld.new_span(span), Rc::new(
|
TokenTree::Delimited(fld.new_span(span), Rc::new(
|
||||||
Delimited {
|
Delimited {
|
||||||
delim: delimed.delim,
|
delim: delimed.delim,
|
||||||
open_span: fld.new_span(delimed.open_span),
|
|
||||||
tts: fld.fold_tts(&delimed.tts),
|
tts: fld.fold_tts(&delimed.tts),
|
||||||
close_span: fld.new_span(delimed.close_span),
|
|
||||||
}
|
}
|
||||||
))
|
))
|
||||||
},
|
},
|
||||||
|
|
|
@ -59,7 +59,6 @@ impl<'a> StringReader<'a> {
|
||||||
|
|
||||||
// Parse the open delimiter.
|
// Parse the open delimiter.
|
||||||
self.open_braces.push((delim, self.span));
|
self.open_braces.push((delim, self.span));
|
||||||
let open_span = self.span;
|
|
||||||
self.real_token();
|
self.real_token();
|
||||||
|
|
||||||
// Parse the token trees within the delimiters.
|
// Parse the token trees within the delimiters.
|
||||||
|
@ -67,9 +66,8 @@ impl<'a> StringReader<'a> {
|
||||||
// uses an incorrect delimiter.
|
// uses an incorrect delimiter.
|
||||||
let tts = self.parse_token_trees_until_close_delim();
|
let tts = self.parse_token_trees_until_close_delim();
|
||||||
|
|
||||||
let close_span = self.span;
|
|
||||||
// Expand to cover the entire delimited token tree
|
// Expand to cover the entire delimited token tree
|
||||||
let span = Span { hi: close_span.hi, ..pre_span };
|
let span = Span { hi: self.span.hi, ..pre_span };
|
||||||
|
|
||||||
match self.token {
|
match self.token {
|
||||||
// Correct delimiter.
|
// Correct delimiter.
|
||||||
|
@ -115,9 +113,7 @@ impl<'a> StringReader<'a> {
|
||||||
|
|
||||||
Ok(TokenTree::Delimited(span, Rc::new(Delimited {
|
Ok(TokenTree::Delimited(span, Rc::new(Delimited {
|
||||||
delim: delim,
|
delim: delim,
|
||||||
open_span: open_span,
|
|
||||||
tts: tts,
|
tts: tts,
|
||||||
close_span: close_span,
|
|
||||||
})))
|
})))
|
||||||
},
|
},
|
||||||
token::CloseDelim(_) => {
|
token::CloseDelim(_) => {
|
||||||
|
|
|
@ -725,24 +725,20 @@ mod tests {
|
||||||
sp(5, 14),
|
sp(5, 14),
|
||||||
Rc::new(tokenstream::Delimited {
|
Rc::new(tokenstream::Delimited {
|
||||||
delim: token::DelimToken::Paren,
|
delim: token::DelimToken::Paren,
|
||||||
open_span: sp(5, 6),
|
|
||||||
tts: vec![
|
tts: vec![
|
||||||
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
|
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
|
||||||
TokenTree::Token(sp(8, 9), token::Colon),
|
TokenTree::Token(sp(8, 9), token::Colon),
|
||||||
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
|
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
|
||||||
],
|
],
|
||||||
close_span: sp(13, 14),
|
|
||||||
})),
|
})),
|
||||||
TokenTree::Delimited(
|
TokenTree::Delimited(
|
||||||
sp(15, 21),
|
sp(15, 21),
|
||||||
Rc::new(tokenstream::Delimited {
|
Rc::new(tokenstream::Delimited {
|
||||||
delim: token::DelimToken::Brace,
|
delim: token::DelimToken::Brace,
|
||||||
open_span: sp(15, 16),
|
|
||||||
tts: vec![
|
tts: vec![
|
||||||
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
|
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
|
||||||
TokenTree::Token(sp(18, 19), token::Semi),
|
TokenTree::Token(sp(18, 19), token::Semi),
|
||||||
],
|
],
|
||||||
close_span: sp(20, 21),
|
|
||||||
}))
|
}))
|
||||||
];
|
];
|
||||||
|
|
||||||
|
|
|
@ -254,9 +254,7 @@ impl<'a> Parser<'a> {
|
||||||
-> Self {
|
-> Self {
|
||||||
let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited {
|
let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited {
|
||||||
delim: token::NoDelim,
|
delim: token::NoDelim,
|
||||||
open_span: syntax_pos::DUMMY_SP,
|
|
||||||
tts: tokens,
|
tts: tokens,
|
||||||
close_span: syntax_pos::DUMMY_SP,
|
|
||||||
}));
|
}));
|
||||||
let mut parser = Parser {
|
let mut parser = Parser {
|
||||||
sess: sess,
|
sess: sess,
|
||||||
|
@ -2717,7 +2715,7 @@ impl<'a> Parser<'a> {
|
||||||
}
|
}
|
||||||
|
|
||||||
let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
|
let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
|
||||||
let open_span = self.span;
|
let lo = self.span.lo;
|
||||||
self.bump();
|
self.bump();
|
||||||
let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
|
let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
|
||||||
&token::CloseDelim(token::Paren),
|
&token::CloseDelim(token::Paren),
|
||||||
|
@ -2726,16 +2724,11 @@ impl<'a> Parser<'a> {
|
||||||
|p| p.parse_token_tree(),
|
|p| p.parse_token_tree(),
|
||||||
|mut e| e.emit());
|
|mut e| e.emit());
|
||||||
self.parsing_token_tree = parsing_token_tree;
|
self.parsing_token_tree = parsing_token_tree;
|
||||||
|
|
||||||
let close_span = self.span;
|
|
||||||
self.bump();
|
self.bump();
|
||||||
|
|
||||||
let span = Span { lo: open_span.lo, ..close_span };
|
Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited {
|
||||||
Ok(TokenTree::Delimited(span, Rc::new(Delimited {
|
|
||||||
delim: delim,
|
delim: delim,
|
||||||
open_span: open_span,
|
|
||||||
tts: tts,
|
tts: tts,
|
||||||
close_span: close_span,
|
|
||||||
})))
|
})))
|
||||||
},
|
},
|
||||||
token::CloseDelim(_) | token::Eof => unreachable!(),
|
token::CloseDelim(_) | token::Eof => unreachable!(),
|
||||||
|
|
|
@ -49,6 +49,12 @@ pub enum DelimToken {
|
||||||
NoDelim,
|
NoDelim,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl DelimToken {
|
||||||
|
pub fn len(&self) -> u32 {
|
||||||
|
if *self == NoDelim { 0 } else { 1 }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
||||||
pub enum Lit {
|
pub enum Lit {
|
||||||
Byte(ast::Name),
|
Byte(ast::Name),
|
||||||
|
|
|
@ -25,7 +25,7 @@
|
||||||
//! ownership of the original.
|
//! ownership of the original.
|
||||||
|
|
||||||
use ast::{self, AttrStyle, LitKind};
|
use ast::{self, AttrStyle, LitKind};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::{BytePos, Span, DUMMY_SP};
|
||||||
use codemap::Spanned;
|
use codemap::Spanned;
|
||||||
use ext::base;
|
use ext::base;
|
||||||
use ext::tt::macro_parser;
|
use ext::tt::macro_parser;
|
||||||
|
@ -45,12 +45,8 @@ use std::rc::Rc;
|
||||||
pub struct Delimited {
|
pub struct Delimited {
|
||||||
/// The type of delimiter
|
/// The type of delimiter
|
||||||
pub delim: token::DelimToken,
|
pub delim: token::DelimToken,
|
||||||
/// The span covering the opening delimiter
|
|
||||||
pub open_span: Span,
|
|
||||||
/// The delimited sequence of token trees
|
/// The delimited sequence of token trees
|
||||||
pub tts: Vec<TokenTree>,
|
pub tts: Vec<TokenTree>,
|
||||||
/// The span covering the closing delimiter
|
|
||||||
pub close_span: Span,
|
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Delimited {
|
impl Delimited {
|
||||||
|
@ -65,13 +61,21 @@ impl Delimited {
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the opening delimiter as a token tree.
|
/// Returns the opening delimiter as a token tree.
|
||||||
pub fn open_tt(&self) -> TokenTree {
|
pub fn open_tt(&self, span: Span) -> TokenTree {
|
||||||
TokenTree::Token(self.open_span, self.open_token())
|
let open_span = match span {
|
||||||
|
DUMMY_SP => DUMMY_SP,
|
||||||
|
_ => Span { hi: span.lo + BytePos(self.delim.len()), ..span },
|
||||||
|
};
|
||||||
|
TokenTree::Token(open_span, self.open_token())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the closing delimiter as a token tree.
|
/// Returns the closing delimiter as a token tree.
|
||||||
pub fn close_tt(&self) -> TokenTree {
|
pub fn close_tt(&self, span: Span) -> TokenTree {
|
||||||
TokenTree::Token(self.close_span, self.close_token())
|
let close_span = match span {
|
||||||
|
DUMMY_SP => DUMMY_SP,
|
||||||
|
_ => Span { lo: span.hi - BytePos(self.delim.len()), ..span },
|
||||||
|
};
|
||||||
|
TokenTree::Token(close_span, self.close_token())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the token trees inside the delimiters.
|
/// Returns the token trees inside the delimiters.
|
||||||
|
@ -175,23 +179,21 @@ impl TokenTree {
|
||||||
|
|
||||||
TokenTree::Delimited(sp, Rc::new(Delimited {
|
TokenTree::Delimited(sp, Rc::new(Delimited {
|
||||||
delim: token::Bracket,
|
delim: token::Bracket,
|
||||||
open_span: sp,
|
|
||||||
tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
|
tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
|
||||||
TokenTree::Token(sp, token::Eq),
|
TokenTree::Token(sp, token::Eq),
|
||||||
TokenTree::Token(sp, token::Literal(
|
TokenTree::Token(sp, token::Literal(
|
||||||
token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
|
token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
|
||||||
close_span: sp,
|
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
||||||
delimed.tts[index].clone()
|
delimed.tts[index].clone()
|
||||||
}
|
}
|
||||||
(&TokenTree::Delimited(_, ref delimed), _) => {
|
(&TokenTree::Delimited(span, ref delimed), _) => {
|
||||||
if index == 0 {
|
if index == 0 {
|
||||||
return delimed.open_tt();
|
return delimed.open_tt(span);
|
||||||
}
|
}
|
||||||
if index == delimed.tts.len() + 1 {
|
if index == delimed.tts.len() + 1 {
|
||||||
return delimed.close_tt();
|
return delimed.close_tt(span);
|
||||||
}
|
}
|
||||||
delimed.tts[index - 1].clone()
|
delimed.tts[index - 1].clone()
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue