Track distinct spans for open and close delimiter
This commit is contained in:
parent
c5a561c0ab
commit
a1dd39e724
17 changed files with 163 additions and 126 deletions
|
@ -63,8 +63,8 @@ use std::str::FromStr;
|
|||
use syntax::errors::DiagnosticBuilder;
|
||||
use syntax::parse::{self, token};
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream;
|
||||
use syntax_pos::{BytePos, Pos, FileName};
|
||||
use syntax::tokenstream::{self, DelimSpan};
|
||||
use syntax_pos::{Pos, FileName};
|
||||
|
||||
/// The main type provided by this crate, representing an abstract stream of
|
||||
/// tokens, or, more specifically, a sequence of token trees.
|
||||
|
@ -609,7 +609,7 @@ impl fmt::Display for TokenTree {
|
|||
pub struct Group {
|
||||
delimiter: Delimiter,
|
||||
stream: TokenStream,
|
||||
span: Span,
|
||||
span: DelimSpan,
|
||||
}
|
||||
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
|
@ -650,7 +650,7 @@ impl Group {
|
|||
Group {
|
||||
delimiter: delimiter,
|
||||
stream: stream,
|
||||
span: Span::call_site(),
|
||||
span: DelimSpan::from_single(Span::call_site().0),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -678,11 +678,10 @@ impl Group {
|
|||
/// ```
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
pub fn span(&self) -> Span {
|
||||
self.span
|
||||
Span(self.span.entire())
|
||||
}
|
||||
|
||||
/// Returns the span pointing to the opening delimiter of this group, or the
|
||||
/// span of the entire group if this is a None-delimited group.
|
||||
/// Returns the span pointing to the opening delimiter of this group.
|
||||
///
|
||||
/// ```text
|
||||
/// pub fn span_open(&self) -> Span {
|
||||
|
@ -690,17 +689,10 @@ impl Group {
|
|||
/// ```
|
||||
#[unstable(feature = "proc_macro_span", issue = "38356")]
|
||||
pub fn span_open(&self) -> Span {
|
||||
if self.delimiter == Delimiter::None {
|
||||
self.span
|
||||
} else {
|
||||
let lo = self.span.0.lo();
|
||||
let new_hi = BytePos::from_usize(lo.to_usize() + 1);
|
||||
Span(self.span.0.with_hi(new_hi))
|
||||
}
|
||||
Span(self.span.open)
|
||||
}
|
||||
|
||||
/// Returns the span pointing to the closing delimiter of this group, or the
|
||||
/// span of the entire group if this is a None-delimited group.
|
||||
/// Returns the span pointing to the closing delimiter of this group.
|
||||
///
|
||||
/// ```text
|
||||
/// pub fn span_close(&self) -> Span {
|
||||
|
@ -708,13 +700,7 @@ impl Group {
|
|||
/// ```
|
||||
#[unstable(feature = "proc_macro_span", issue = "38356")]
|
||||
pub fn span_close(&self) -> Span {
|
||||
let hi = self.span.0.hi();
|
||||
if self.delimiter == Delimiter::None || hi.to_usize() == 0 {
|
||||
self.span
|
||||
} else {
|
||||
let new_lo = BytePos::from_usize(hi.to_usize() - 1);
|
||||
Span(self.span.0.with_lo(new_lo))
|
||||
}
|
||||
Span(self.span.close)
|
||||
}
|
||||
|
||||
/// Configures the span for this `Group`'s delimiters, but not its internal
|
||||
|
@ -725,7 +711,7 @@ impl Group {
|
|||
/// tokens at the level of the `Group`.
|
||||
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
self.span = span;
|
||||
self.span = DelimSpan::from_single(span.0);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -64,7 +64,7 @@ impl TokenTree {
|
|||
tokenstream::TokenTree::Delimited(span, delimed) => {
|
||||
let delimiter = Delimiter::from_internal(delimed.delim);
|
||||
let mut g = Group::new(delimiter, ::TokenStream(delimed.tts.into()));
|
||||
g.set_span(Span(span));
|
||||
g.span = span;
|
||||
return g.into();
|
||||
}
|
||||
};
|
||||
|
@ -192,7 +192,7 @@ impl TokenTree {
|
|||
self::TokenTree::Punct(tt) => (tt.as_char(), tt.spacing(), tt.span()),
|
||||
self::TokenTree::Group(tt) => {
|
||||
return TokenTree::Delimited(
|
||||
tt.span.0,
|
||||
tt.span,
|
||||
Delimited {
|
||||
delim: tt.delimiter.to_internal(),
|
||||
tts: tt.stream.0.into(),
|
||||
|
|
|
@ -28,6 +28,7 @@ use syntax::ast;
|
|||
use syntax::source_map::SourceMap;
|
||||
use syntax::ext::hygiene::SyntaxContext;
|
||||
use syntax::symbol::Symbol;
|
||||
use syntax::tokenstream::DelimSpan;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax_pos::hygiene;
|
||||
|
||||
|
@ -396,6 +397,17 @@ impl<'a> HashStable<StableHashingContext<'a>> for Span {
|
|||
}
|
||||
}
|
||||
|
||||
impl<'a> HashStable<StableHashingContext<'a>> for DelimSpan {
|
||||
fn hash_stable<W: StableHasherResult>(
|
||||
&self,
|
||||
hcx: &mut StableHashingContext<'a>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
) {
|
||||
self.open.hash_stable(hcx, hasher);
|
||||
self.close.hash_stable(hcx, hasher);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn hash_stable_trait_impls<'a, 'gcx, W, R>(
|
||||
hcx: &mut StableHashingContext<'a>,
|
||||
hasher: &mut StableHasher<W>,
|
||||
|
|
|
@ -35,7 +35,7 @@ use syntax::parse::parser::PathStyle;
|
|||
use syntax::parse::token::{self, Token};
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
|
||||
use syntax::tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
|
||||
use syntax::util::lev_distance::find_best_match_for_name;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use errors::Applicability;
|
||||
|
@ -279,7 +279,8 @@ impl<'a, 'crateloader: 'a> base::Resolver for Resolver<'a, 'crateloader> {
|
|||
tokens.push(TokenTree::Token(path.span, tok).into());
|
||||
}
|
||||
}
|
||||
attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited {
|
||||
let delim_span = DelimSpan::from_single(attrs[i].span);
|
||||
attrs[i].tokens = TokenTree::Delimited(delim_span, Delimited {
|
||||
delim: token::Paren,
|
||||
tts: TokenStream::concat(tokens).into(),
|
||||
}).into();
|
||||
|
|
|
@ -34,7 +34,7 @@ use parse::token::{self, Token};
|
|||
use ptr::P;
|
||||
use symbol::Symbol;
|
||||
use ThinVec;
|
||||
use tokenstream::{TokenStream, TokenTree, Delimited};
|
||||
use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
|
||||
use GLOBALS;
|
||||
|
||||
use std::iter;
|
||||
|
@ -535,7 +535,7 @@ impl MetaItemKind {
|
|||
}
|
||||
tokens.push(item.node.tokens());
|
||||
}
|
||||
TokenTree::Delimited(span, Delimited {
|
||||
TokenTree::Delimited(DelimSpan::from_single(span), Delimited {
|
||||
delim: token::Paren,
|
||||
tts: TokenStream::concat(tokens).into(),
|
||||
}).into()
|
||||
|
|
|
@ -10,14 +10,14 @@
|
|||
|
||||
use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty};
|
||||
use source_map::respan;
|
||||
use syntax_pos::Span;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use ext::base::ExtCtxt;
|
||||
use ext::base;
|
||||
use ext::build::AstBuilder;
|
||||
use parse::parser::{Parser, PathStyle};
|
||||
use parse::token;
|
||||
use ptr::P;
|
||||
use tokenstream::{TokenStream, TokenTree};
|
||||
use tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
|
||||
/// Quasiquoting works via token trees.
|
||||
///
|
||||
|
@ -36,7 +36,7 @@ pub mod rt {
|
|||
use symbol::Symbol;
|
||||
use ThinVec;
|
||||
|
||||
use tokenstream::{self, TokenTree, TokenStream};
|
||||
use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
|
||||
|
||||
pub use parse::new_parser_from_tts;
|
||||
pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName};
|
||||
|
@ -245,7 +245,8 @@ pub mod rt {
|
|||
}
|
||||
inner.push(self.tokens.clone());
|
||||
|
||||
r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
|
||||
let delim_span = DelimSpan::from_single(self.span);
|
||||
r.push(TokenTree::Delimited(delim_span, tokenstream::Delimited {
|
||||
delim: token::Bracket, tts: TokenStream::concat(inner).into()
|
||||
}));
|
||||
r
|
||||
|
@ -261,7 +262,7 @@ pub mod rt {
|
|||
|
||||
impl ToTokens for () {
|
||||
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
|
||||
vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
|
||||
vec![TokenTree::Delimited(DelimSpan::dummy(), tokenstream::Delimited {
|
||||
delim: token::Paren,
|
||||
tts: TokenStream::empty().into(),
|
||||
})]
|
||||
|
@ -385,13 +386,16 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
|
|||
|
||||
let mut results = Vec::new();
|
||||
let mut result = Vec::new();
|
||||
let mut open_span = DUMMY_SP;
|
||||
for tree in tts {
|
||||
match tree {
|
||||
TokenTree::Token(_, token::OpenDelim(..)) => {
|
||||
TokenTree::Token(span, token::OpenDelim(..)) => {
|
||||
open_span = span;
|
||||
results.push(::std::mem::replace(&mut result, Vec::new()));
|
||||
}
|
||||
TokenTree::Token(span, token::CloseDelim(delim)) => {
|
||||
let tree = TokenTree::Delimited(span, Delimited {
|
||||
let delim_span = DelimSpan::from_pair(open_span, span);
|
||||
let tree = TokenTree::Delimited(delim_span, Delimited {
|
||||
delim,
|
||||
tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
|
||||
});
|
||||
|
@ -756,9 +760,9 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
|
|||
vec![cx.stmt_expr(e_push)]
|
||||
},
|
||||
TokenTree::Delimited(span, ref delimed) => {
|
||||
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
|
||||
let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span.open), false);
|
||||
stmts.extend(statements_mk_tts(cx, delimed.stream()));
|
||||
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
|
||||
stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span.close), false));
|
||||
stmts
|
||||
}
|
||||
}
|
||||
|
|
|
@ -85,7 +85,7 @@ pub use self::ParseResult::*;
|
|||
use self::TokenTreeOrTokenTreeSlice::*;
|
||||
|
||||
use ast::Ident;
|
||||
use syntax_pos::{self, BytePos, Span};
|
||||
use syntax_pos::{self, Span};
|
||||
use errors::FatalError;
|
||||
use ext::tt::quoted::{self, TokenTree};
|
||||
use parse::{Directory, ParseSess};
|
||||
|
@ -94,7 +94,7 @@ use parse::token::{self, DocComment, Nonterminal, Token};
|
|||
use print::pprust;
|
||||
use OneVector;
|
||||
use symbol::keywords;
|
||||
use tokenstream::TokenStream;
|
||||
use tokenstream::{DelimSpan, TokenStream};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
|
@ -154,7 +154,7 @@ struct MatcherPos<'a> {
|
|||
/// The beginning position in the source that the beginning of this matcher corresponds to. In
|
||||
/// other words, the token in the source at `sp_lo` is matched against the first token of the
|
||||
/// matcher.
|
||||
sp_lo: BytePos,
|
||||
sp_lo: Span,
|
||||
|
||||
/// For each named metavar in the matcher, we keep track of token trees matched against the
|
||||
/// metavar by the black box parser. In particular, there may be more than one match per
|
||||
|
@ -285,7 +285,7 @@ fn create_matches(len: usize) -> Vec<Rc<Vec<NamedMatch>>> {
|
|||
|
||||
/// Generate the top-level matcher position in which the "dot" is before the first token of the
|
||||
/// matcher `ms` and we are going to start matching at position `lo` in the source.
|
||||
fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos {
|
||||
fn initial_matcher_pos(ms: &[TokenTree], lo: Span) -> MatcherPos {
|
||||
let match_idx_hi = count_names(ms);
|
||||
let matches = create_matches(match_idx_hi);
|
||||
MatcherPos {
|
||||
|
@ -332,7 +332,7 @@ fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos {
|
|||
/// token tree it was derived from.
|
||||
#[derive(Debug, Clone)]
|
||||
pub enum NamedMatch {
|
||||
MatchedSeq(Rc<Vec<NamedMatch>>, syntax_pos::Span),
|
||||
MatchedSeq(Rc<Vec<NamedMatch>>, DelimSpan),
|
||||
MatchedNonterminal(Rc<Nonterminal>),
|
||||
}
|
||||
|
||||
|
@ -488,7 +488,7 @@ fn inner_parse_loop<'a>(
|
|||
// Add matches from this repetition to the `matches` of `up`
|
||||
for idx in item.match_lo..item.match_hi {
|
||||
let sub = item.matches[idx].clone();
|
||||
let span = span.with_lo(item.sp_lo);
|
||||
let span = DelimSpan::from_pair(item.sp_lo, span);
|
||||
new_pos.push_match(idx, MatchedSeq(sub, span));
|
||||
}
|
||||
|
||||
|
@ -556,7 +556,7 @@ fn inner_parse_loop<'a>(
|
|||
match_cur: item.match_cur,
|
||||
match_hi: item.match_cur + seq.num_captures,
|
||||
up: Some(item),
|
||||
sp_lo: sp.lo(),
|
||||
sp_lo: sp.open,
|
||||
top_elts: Tt(TokenTree::Sequence(sp, seq)),
|
||||
})));
|
||||
}
|
||||
|
@ -643,7 +643,7 @@ pub fn parse(
|
|||
//
|
||||
// This MatcherPos instance is allocated on the stack. All others -- and
|
||||
// there are frequently *no* others! -- are allocated on the heap.
|
||||
let mut initial = initial_matcher_pos(ms, parser.span.lo());
|
||||
let mut initial = initial_matcher_pos(ms, parser.span);
|
||||
let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
|
||||
let mut next_items = Vec::new();
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ use parse::parser::Parser;
|
|||
use parse::token::{self, NtTT};
|
||||
use parse::token::Token::*;
|
||||
use symbol::Symbol;
|
||||
use tokenstream::{TokenStream, TokenTree};
|
||||
use tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use std::borrow::Cow;
|
||||
|
@ -226,7 +226,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
|
|||
// ...quasiquoting this would be nice.
|
||||
// These spans won't matter, anyways
|
||||
let argument_gram = vec![
|
||||
quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
|
||||
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
|
||||
tts: vec![
|
||||
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
|
||||
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
|
||||
|
@ -237,7 +237,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
|
|||
num_captures: 2,
|
||||
})),
|
||||
// to phase into semicolon-termination instead of semicolon-separation
|
||||
quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
|
||||
quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
|
||||
tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
|
||||
separator: None,
|
||||
op: quoted::KleeneOp::ZeroOrMore,
|
||||
|
@ -400,7 +400,8 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
|||
_ => false,
|
||||
}
|
||||
}) {
|
||||
sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
|
||||
let sp = span.entire();
|
||||
sess.span_diagnostic.span_err(sp, "repetition matches empty token tree");
|
||||
return false;
|
||||
}
|
||||
if !check_lhs_no_empty_seq(sess, &seq.tts) {
|
||||
|
@ -474,12 +475,12 @@ impl FirstSets {
|
|||
}
|
||||
TokenTree::Delimited(span, ref delimited) => {
|
||||
build_recur(sets, &delimited.tts[..]);
|
||||
first.replace_with(delimited.open_tt(span));
|
||||
first.replace_with(delimited.open_tt(span.open));
|
||||
}
|
||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||
let subfirst = build_recur(sets, &seq_rep.tts[..]);
|
||||
|
||||
match sets.first.entry(sp) {
|
||||
match sets.first.entry(sp.entire()) {
|
||||
Entry::Vacant(vac) => {
|
||||
vac.insert(Some(subfirst.clone()));
|
||||
}
|
||||
|
@ -499,7 +500,7 @@ impl FirstSets {
|
|||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
|
||||
first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
|
||||
}
|
||||
|
||||
// Reverse scan: Sequence comes before `first`.
|
||||
|
@ -534,11 +535,11 @@ impl FirstSets {
|
|||
return first;
|
||||
}
|
||||
TokenTree::Delimited(span, ref delimited) => {
|
||||
first.add_one(delimited.open_tt(span));
|
||||
first.add_one(delimited.open_tt(span.open));
|
||||
return first;
|
||||
}
|
||||
TokenTree::Sequence(sp, ref seq_rep) => {
|
||||
match self.first.get(&sp) {
|
||||
match self.first.get(&sp.entire()) {
|
||||
Some(&Some(ref subfirst)) => {
|
||||
|
||||
// If the sequence contents can be empty, then the first
|
||||
|
@ -546,7 +547,7 @@ impl FirstSets {
|
|||
|
||||
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
|
||||
subfirst.maybe_empty) {
|
||||
first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
|
||||
first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
|
||||
}
|
||||
|
||||
assert!(first.maybe_empty);
|
||||
|
@ -727,7 +728,7 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
}
|
||||
}
|
||||
TokenTree::Delimited(span, ref d) => {
|
||||
let my_suffix = TokenSet::singleton(d.close_tt(span));
|
||||
let my_suffix = TokenSet::singleton(d.close_tt(span.close));
|
||||
check_matcher_core(sess, features, attrs, first_sets, &d.tts, &my_suffix);
|
||||
// don't track non NT tokens
|
||||
last.replace_with_irrelevant();
|
||||
|
@ -751,7 +752,7 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
let mut new;
|
||||
let my_suffix = if let Some(ref u) = seq_rep.separator {
|
||||
new = suffix_first.clone();
|
||||
new.add_one_maybe(TokenTree::Token(sp, u.clone()));
|
||||
new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
|
||||
&new
|
||||
} else {
|
||||
&suffix_first
|
||||
|
|
|
@ -16,7 +16,7 @@ use parse::{token, ParseSess};
|
|||
use print::pprust;
|
||||
use symbol::keywords;
|
||||
use syntax_pos::{edition::Edition, BytePos, Span};
|
||||
use tokenstream;
|
||||
use tokenstream::{self, DelimSpan};
|
||||
use {ast, attr};
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
@ -90,9 +90,9 @@ pub enum KleeneOp {
|
|||
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub enum TokenTree {
|
||||
Token(Span, token::Token),
|
||||
Delimited(Span, Lrc<Delimited>),
|
||||
Delimited(DelimSpan, Lrc<Delimited>),
|
||||
/// A kleene-style repetition sequence
|
||||
Sequence(Span, Lrc<SequenceRepetition>),
|
||||
Sequence(DelimSpan, Lrc<SequenceRepetition>),
|
||||
/// E.g. `$var`
|
||||
MetaVar(Span, ast::Ident),
|
||||
/// E.g. `$var:expr`. This is only used in the left hand side of MBE macros.
|
||||
|
@ -137,10 +137,10 @@ impl TokenTree {
|
|||
}
|
||||
(&TokenTree::Delimited(span, ref delimed), _) => {
|
||||
if index == 0 {
|
||||
return delimed.open_tt(span);
|
||||
return delimed.open_tt(span.open);
|
||||
}
|
||||
if index == delimed.tts.len() + 1 {
|
||||
return delimed.close_tt(span);
|
||||
return delimed.close_tt(span.close);
|
||||
}
|
||||
delimed.tts[index - 1].clone()
|
||||
}
|
||||
|
@ -154,9 +154,9 @@ impl TokenTree {
|
|||
match *self {
|
||||
TokenTree::Token(sp, _)
|
||||
| TokenTree::MetaVar(sp, _)
|
||||
| TokenTree::MetaVarDecl(sp, _, _)
|
||||
| TokenTree::Delimited(sp, _)
|
||||
| TokenTree::Sequence(sp, _) => sp,
|
||||
| TokenTree::MetaVarDecl(sp, _, _) => sp,
|
||||
TokenTree::Delimited(sp, _)
|
||||
| TokenTree::Sequence(sp, _) => sp.entire(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -286,7 +286,7 @@ where
|
|||
if delimited.delim != token::Paren {
|
||||
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
|
||||
let msg = format!("expected `(`, found `{}`", tok);
|
||||
sess.span_diagnostic.span_err(span, &msg);
|
||||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||
}
|
||||
// Parse the contents of the sequence itself
|
||||
let sequence = parse(
|
||||
|
@ -302,7 +302,7 @@ where
|
|||
let (separator, op) =
|
||||
parse_sep_and_kleene_op(
|
||||
trees,
|
||||
span,
|
||||
span.entire(),
|
||||
sess,
|
||||
features,
|
||||
attrs,
|
||||
|
|
|
@ -16,8 +16,8 @@ use ext::tt::quoted;
|
|||
use fold::noop_fold_tt;
|
||||
use parse::token::{self, Token, NtTT};
|
||||
use OneVector;
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{TokenStream, TokenTree, Delimited};
|
||||
use syntax_pos::DUMMY_SP;
|
||||
use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
@ -30,7 +30,7 @@ enum Frame {
|
|||
Delimited {
|
||||
forest: Lrc<quoted::Delimited>,
|
||||
idx: usize,
|
||||
span: Span,
|
||||
span: DelimSpan,
|
||||
},
|
||||
Sequence {
|
||||
forest: Lrc<quoted::SequenceRepetition>,
|
||||
|
@ -42,7 +42,7 @@ enum Frame {
|
|||
impl Frame {
|
||||
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
|
||||
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
|
||||
Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP }
|
||||
Frame::Delimited { forest: forest, idx: 0, span: DelimSpan::dummy() }
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -123,20 +123,20 @@ pub fn transcribe(cx: &ExtCtxt,
|
|||
&interpolations,
|
||||
&repeats) {
|
||||
LockstepIterSize::Unconstrained => {
|
||||
cx.span_fatal(sp, /* blame macro writer */
|
||||
cx.span_fatal(sp.entire(), /* blame macro writer */
|
||||
"attempted to repeat an expression \
|
||||
containing no syntax \
|
||||
variables matched as repeating at this depth");
|
||||
}
|
||||
LockstepIterSize::Contradiction(ref msg) => {
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
cx.span_fatal(sp, &msg[..]);
|
||||
cx.span_fatal(sp.entire(), &msg[..]);
|
||||
}
|
||||
LockstepIterSize::Constraint(len, _) => {
|
||||
if len == 0 {
|
||||
if seq.op == quoted::KleeneOp::OneOrMore {
|
||||
// FIXME #2887 blame invoker
|
||||
cx.span_fatal(sp, "this must repeat at least once");
|
||||
cx.span_fatal(sp.entire(), "this must repeat at least once");
|
||||
}
|
||||
} else {
|
||||
repeats.push((0, len));
|
||||
|
|
|
@ -594,10 +594,13 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
|
|||
match tt {
|
||||
TokenTree::Token(span, tok) =>
|
||||
TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
|
||||
TokenTree::Delimited(span, delimed) => TokenTree::Delimited(fld.new_span(span), Delimited {
|
||||
tts: fld.fold_tts(delimed.stream()).into(),
|
||||
delim: delimed.delim,
|
||||
}),
|
||||
TokenTree::Delimited(span, delimed) => TokenTree::Delimited(
|
||||
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
|
||||
Delimited {
|
||||
tts: fld.fold_tts(delimed.stream()).into(),
|
||||
delim: delimed.delim,
|
||||
}
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@
|
|||
use print::pprust::token_to_string;
|
||||
use parse::lexer::StringReader;
|
||||
use parse::{token, PResult};
|
||||
use tokenstream::{Delimited, TokenStream, TokenTree};
|
||||
use tokenstream::{Delimited, DelimSpan, TokenStream, TokenTree};
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
|
||||
|
@ -68,7 +68,7 @@ impl<'a> StringReader<'a> {
|
|||
let tts = self.parse_token_trees_until_close_delim();
|
||||
|
||||
// Expand to cover the entire delimited token tree
|
||||
let span = pre_span.with_hi(self.span.hi());
|
||||
let delim_span = DelimSpan::from_pair(pre_span, self.span);
|
||||
|
||||
match self.token {
|
||||
// Correct delimiter.
|
||||
|
@ -119,7 +119,7 @@ impl<'a> StringReader<'a> {
|
|||
_ => {}
|
||||
}
|
||||
|
||||
Ok(TokenTree::Delimited(span, Delimited {
|
||||
Ok(TokenTree::Delimited(delim_span, Delimited {
|
||||
delim,
|
||||
tts: tts.into(),
|
||||
}).into())
|
||||
|
|
|
@ -722,7 +722,7 @@ mod tests {
|
|||
use attr::first_attr_value_str_by_name;
|
||||
use parse;
|
||||
use print::pprust::item_to_string;
|
||||
use tokenstream::{self, TokenTree};
|
||||
use tokenstream::{self, DelimSpan, TokenTree};
|
||||
use util::parser_testing::string_to_stream;
|
||||
use util::parser_testing::{string_to_expr, string_to_item};
|
||||
use with_globals;
|
||||
|
@ -805,7 +805,7 @@ mod tests {
|
|||
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
|
||||
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
|
||||
TokenTree::Delimited(
|
||||
sp(5, 14),
|
||||
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
||||
tokenstream::Delimited {
|
||||
delim: token::DelimToken::Paren,
|
||||
tts: TokenStream::concat(vec![
|
||||
|
@ -817,7 +817,7 @@ mod tests {
|
|||
]).into(),
|
||||
}).into(),
|
||||
TokenTree::Delimited(
|
||||
sp(15, 21),
|
||||
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
||||
tokenstream::Delimited {
|
||||
delim: token::DelimToken::Brace,
|
||||
tts: TokenStream::concat(vec![
|
||||
|
|
|
@ -54,7 +54,7 @@ use print::pprust;
|
|||
use ptr::P;
|
||||
use parse::PResult;
|
||||
use ThinVec;
|
||||
use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream};
|
||||
use tokenstream::{self, Delimited, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
|
||||
use symbol::{Symbol, keywords};
|
||||
|
||||
use std::borrow::Cow;
|
||||
|
@ -262,7 +262,7 @@ struct TokenCursor {
|
|||
#[derive(Clone)]
|
||||
struct TokenCursorFrame {
|
||||
delim: token::DelimToken,
|
||||
span: Span,
|
||||
span: DelimSpan,
|
||||
open_delim: bool,
|
||||
tree_cursor: tokenstream::Cursor,
|
||||
close_delim: bool,
|
||||
|
@ -293,7 +293,7 @@ enum LastToken {
|
|||
}
|
||||
|
||||
impl TokenCursorFrame {
|
||||
fn new(sp: Span, delimited: &Delimited) -> Self {
|
||||
fn new(sp: DelimSpan, delimited: &Delimited) -> Self {
|
||||
TokenCursorFrame {
|
||||
delim: delimited.delim,
|
||||
span: sp,
|
||||
|
@ -311,13 +311,13 @@ impl TokenCursor {
|
|||
let tree = if !self.frame.open_delim {
|
||||
self.frame.open_delim = true;
|
||||
Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
|
||||
.open_tt(self.frame.span)
|
||||
.open_tt(self.frame.span.open)
|
||||
} else if let Some(tree) = self.frame.tree_cursor.next() {
|
||||
tree
|
||||
} else if !self.frame.close_delim {
|
||||
self.frame.close_delim = true;
|
||||
Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
|
||||
.close_tt(self.frame.span)
|
||||
.close_tt(self.frame.span.close)
|
||||
} else if let Some(frame) = self.stack.pop() {
|
||||
self.frame = frame;
|
||||
continue
|
||||
|
@ -361,7 +361,8 @@ impl TokenCursor {
|
|||
num_of_hashes = cmp::max(num_of_hashes, count);
|
||||
}
|
||||
|
||||
let body = TokenTree::Delimited(sp, Delimited {
|
||||
let delim_span = DelimSpan::from_single(sp);
|
||||
let body = TokenTree::Delimited(delim_span, Delimited {
|
||||
delim: token::Bracket,
|
||||
tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
|
||||
TokenTree::Token(sp, token::Eq),
|
||||
|
@ -370,7 +371,7 @@ impl TokenCursor {
|
|||
.iter().cloned().collect::<TokenStream>().into(),
|
||||
});
|
||||
|
||||
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited {
|
||||
self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(delim_span, &Delimited {
|
||||
delim: token::NoDelim,
|
||||
tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
|
||||
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
|
||||
|
@ -560,7 +561,7 @@ impl<'a> Parser<'a> {
|
|||
root_module_name: None,
|
||||
expected_tokens: Vec::new(),
|
||||
token_cursor: TokenCursor {
|
||||
frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited {
|
||||
frame: TokenCursorFrame::new(DelimSpan::dummy(), &Delimited {
|
||||
delim: token::NoDelim,
|
||||
tts: tokens.into(),
|
||||
}),
|
||||
|
@ -1229,7 +1230,8 @@ impl<'a> Parser<'a> {
|
|||
}
|
||||
|
||||
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
|
||||
Some(TokenTree::Token(span, _)) | Some(TokenTree::Delimited(span, _)) => span,
|
||||
Some(TokenTree::Token(span, _)) => span,
|
||||
Some(TokenTree::Delimited(span, _)) => span.entire(),
|
||||
None => self.look_ahead_span(dist - 1),
|
||||
}
|
||||
}
|
||||
|
@ -2796,7 +2798,7 @@ impl<'a> Parser<'a> {
|
|||
token::OpenDelim(..) => {
|
||||
let frame = mem::replace(&mut self.token_cursor.frame,
|
||||
self.token_cursor.stack.pop().unwrap());
|
||||
self.span = frame.span;
|
||||
self.span = frame.span.entire();
|
||||
self.bump();
|
||||
TokenTree::Delimited(frame.span, Delimited {
|
||||
delim: frame.delim,
|
||||
|
|
|
@ -23,8 +23,7 @@ use symbol::keywords;
|
|||
use syntax::parse::parse_stream_from_source_str;
|
||||
use syntax_pos::{self, Span, FileName};
|
||||
use syntax_pos::symbol::{self, Symbol};
|
||||
use tokenstream::{TokenStream, TokenTree};
|
||||
use tokenstream;
|
||||
use tokenstream::{self, DelimSpan, TokenStream, TokenTree};
|
||||
|
||||
use std::{cmp, fmt};
|
||||
use std::mem;
|
||||
|
@ -825,7 +824,8 @@ fn prepend_attrs(sess: &ParseSess,
|
|||
// that it encompasses more than each token, but it hopefully is "good
|
||||
// enough" for now at least.
|
||||
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
|
||||
builder.push(tokenstream::TokenTree::Delimited(attr.span, tokens));
|
||||
let delim_span = DelimSpan::from_single(attr.span);
|
||||
builder.push(tokenstream::TokenTree::Delimited(delim_span, tokens));
|
||||
}
|
||||
builder.push(tokens.clone());
|
||||
Some(builder.build())
|
||||
|
|
|
@ -22,7 +22,7 @@
|
|||
//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
|
||||
//! ownership of the original.
|
||||
|
||||
use syntax_pos::{BytePos, Span, DUMMY_SP};
|
||||
use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
|
||||
use ext::base;
|
||||
use ext::tt::{macro_parser, quoted};
|
||||
use parse::Directory;
|
||||
|
@ -97,7 +97,7 @@ pub enum TokenTree {
|
|||
/// A single token
|
||||
Token(Span, token::Token),
|
||||
/// A delimited sequence of token trees
|
||||
Delimited(Span, Delimited),
|
||||
Delimited(DelimSpan, Delimited),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
|
@ -145,16 +145,16 @@ impl TokenTree {
|
|||
/// Retrieve the TokenTree's span.
|
||||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp,
|
||||
TokenTree::Token(sp, _) => sp,
|
||||
TokenTree::Delimited(sp, _) => sp.entire(),
|
||||
}
|
||||
}
|
||||
|
||||
/// Modify the `TokenTree`'s span inplace.
|
||||
pub fn set_span(&mut self, span: Span) {
|
||||
match *self {
|
||||
TokenTree::Token(ref mut sp, _) | TokenTree::Delimited(ref mut sp, _) => {
|
||||
*sp = span;
|
||||
}
|
||||
TokenTree::Token(ref mut sp, _) => *sp = span,
|
||||
TokenTree::Delimited(ref mut sp, _) => *sp = DelimSpan::from_single(span),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -192,27 +192,20 @@ impl TokenStream {
|
|||
let mut iter = slice.iter().enumerate().peekable();
|
||||
while let Some((pos, ts)) = iter.next() {
|
||||
if let Some((_, next)) = iter.peek() {
|
||||
match (ts, next) {
|
||||
(TokenStream {
|
||||
kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))
|
||||
}, _) |
|
||||
(_, TokenStream {
|
||||
kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))
|
||||
}) => {}
|
||||
(TokenStream {
|
||||
kind: TokenStreamKind::Tree(TokenTree::Token(sp, _))
|
||||
}, _) |
|
||||
(TokenStream {
|
||||
kind: TokenStreamKind::Tree(TokenTree::Delimited(sp, _))
|
||||
}, _) => {
|
||||
let sp = sp.shrink_to_hi();
|
||||
let comma = TokenStream {
|
||||
kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)),
|
||||
};
|
||||
suggestion = Some((pos, comma, sp));
|
||||
let sp = match (&ts.kind, &next.kind) {
|
||||
(TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma)), _) |
|
||||
(_, TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))) => {
|
||||
continue;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
(TokenStreamKind::Tree(TokenTree::Token(sp, _)), _) => *sp,
|
||||
(TokenStreamKind::Tree(TokenTree::Delimited(sp, _)), _) => sp.entire(),
|
||||
_ => continue,
|
||||
};
|
||||
let sp = sp.shrink_to_hi();
|
||||
let comma = TokenStream {
|
||||
kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)),
|
||||
};
|
||||
suggestion = Some((pos, comma, sp));
|
||||
}
|
||||
}
|
||||
if let Some((pos, comma, sp)) = suggestion {
|
||||
|
@ -718,6 +711,40 @@ impl Decodable for ThinTokenStream {
|
|||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
|
||||
pub struct DelimSpan {
|
||||
pub open: Span,
|
||||
pub close: Span,
|
||||
}
|
||||
|
||||
impl DelimSpan {
|
||||
pub fn from_single(sp: Span) -> Self {
|
||||
DelimSpan {
|
||||
open: sp,
|
||||
close: sp,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_pair(open: Span, close: Span) -> Self {
|
||||
DelimSpan { open, close }
|
||||
}
|
||||
|
||||
pub fn dummy() -> Self {
|
||||
Self::from_single(DUMMY_SP)
|
||||
}
|
||||
|
||||
pub fn entire(self) -> Span {
|
||||
self.open.with_hi(self.close.hi())
|
||||
}
|
||||
|
||||
pub fn apply_mark(self, mark: Mark) -> Self {
|
||||
DelimSpan {
|
||||
open: self.open.apply_mark(mark),
|
||||
close: self.close.apply_mark(mark),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
|
|
@ -70,7 +70,8 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
|
|||
_ => unreachable!(),
|
||||
}
|
||||
}).collect();
|
||||
let arm = cx.arm(seq_sp, pats, cx.expr_bool(seq_sp, true));
|
||||
let span = seq_sp.entire();
|
||||
let arm = cx.arm(span, pats, cx.expr_bool(span, true));
|
||||
|
||||
quote_expr!(cx,
|
||||
match $matched_expr {
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue