1
Fork 0

Improve Path spans.

This commit is contained in:
Jeffrey Seyfried 2017-03-29 07:17:18 +00:00
parent f08d5ad4c5
commit 8fde04b4a2
9 changed files with 95 additions and 67 deletions

View file

@ -1015,9 +1015,10 @@ impl MetaItem {
{ {
let (mut span, name) = match tokens.next() { let (mut span, name) = match tokens.next() {
Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name), Some(TokenTree::Token(span, Token::Ident(ident))) => (span, ident.name),
Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => return match **nt { Some(TokenTree::Token(_, Token::Interpolated(ref nt))) => match **nt {
token::Nonterminal::NtMeta(ref meta) => Some(meta.clone()), token::Nonterminal::NtIdent(ident) => (ident.span, ident.node.name),
_ => None, token::Nonterminal::NtMeta(ref meta) => return Some(meta.clone()),
_ => return None,
}, },
_ => return None, _ => return None,
}; };

View file

@ -209,7 +209,26 @@ impl<F> TTMacroExpander for F
{ {
fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream) fn expand<'cx>(&self, ecx: &'cx mut ExtCtxt, span: Span, input: TokenStream)
-> Box<MacResult+'cx> { -> Box<MacResult+'cx> {
(*self)(ecx, span, &input.trees().collect::<Vec<_>>()) struct AvoidInterpolatedIdents;
impl Folder for AvoidInterpolatedIdents {
fn fold_tt(&mut self, tt: tokenstream::TokenTree) -> tokenstream::TokenTree {
if let tokenstream::TokenTree::Token(_, token::Interpolated(ref nt)) = tt {
if let token::NtIdent(ident) = **nt {
return tokenstream::TokenTree::Token(ident.span, token::Ident(ident.node));
}
}
fold::noop_fold_tt(tt, self)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}
let input: Vec<_> =
input.trees().map(|tt| AvoidInterpolatedIdents.fold_tt(tt)).collect();
(*self)(ecx, span, &input)
} }
} }

View file

@ -492,7 +492,7 @@ fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
_ => {} _ => {}
} }
// check at the beginning and the parser checks after each bump // check at the beginning and the parser checks after each bump
p.check_unknown_macro_variable(); p.process_potential_macro_variable();
match name { match name {
"item" => match panictry!(p.parse_item()) { "item" => match panictry!(p.parse_item()) {
Some(i) => token::NtItem(i), Some(i) => token::NtItem(i),

View file

@ -121,7 +121,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
p.root_module_name = cx.current_expansion.module.mod_path.last() p.root_module_name = cx.current_expansion.module.mod_path.last()
.map(|id| id.name.as_str().to_string()); .map(|id| id.name.as_str().to_string());
p.check_unknown_macro_variable(); p.process_potential_macro_variable();
// Let the context choose how to interpret the result. // Let the context choose how to interpret the result.
// Weird, but useful for X-macros. // Weird, but useful for X-macros.
return Box::new(ParserAnyMacro { return Box::new(ParserAnyMacro {

View file

@ -136,11 +136,14 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => { TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
let span = match trees.next() { let span = match trees.next() {
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() { Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
Some(tokenstream::TokenTree::Token(end_sp, token::Ident(kind))) => { Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
let span = Span { lo: start_sp.lo, ..end_sp }; Some(kind) => {
result.push(TokenTree::MetaVarDecl(span, ident, kind)); let span = Span { lo: start_sp.lo, ..end_sp };
continue result.push(TokenTree::MetaVarDecl(span, ident, kind));
} continue
}
_ => end_sp,
},
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span), tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
}, },
tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp), tree @ _ => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),

View file

@ -12,7 +12,7 @@ use ast::Ident;
use errors::Handler; use errors::Handler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
use ext::tt::quoted; use ext::tt::quoted;
use parse::token::{self, SubstNt, Token, NtIdent, NtTT}; use parse::token::{self, SubstNt, Token, NtTT};
use syntax_pos::{Span, DUMMY_SP}; use syntax_pos::{Span, DUMMY_SP};
use tokenstream::{TokenStream, TokenTree, Delimited}; use tokenstream::{TokenStream, TokenTree, Delimited};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
@ -154,13 +154,6 @@ pub fn transcribe(sp_diag: &Handler,
None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()), None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()),
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched { Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
match **nt { match **nt {
// sidestep the interpolation tricks for ident because
// (a) idents can be in lots of places, so it'd be a pain
// (b) we actually can, since it's a token.
NtIdent(ref sn) => {
let token = TokenTree::Token(sn.span, token::Ident(sn.node));
result.push(token.into());
}
NtTT(ref tt) => result.push(tt.clone().into()), NtTT(ref tt) => result.push(tt.clone().into()),
_ => { _ => {
let token = TokenTree::Token(sp, token::Interpolated(nt.clone())); let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));

View file

@ -218,9 +218,7 @@ pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream
/// Given stream and the ParseSess, produce a parser /// Given stream and the ParseSess, produce a parser
pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> { pub fn stream_to_parser<'a>(sess: &'a ParseSess, stream: TokenStream) -> Parser<'a> {
let mut p = Parser::new(sess, stream, None, false); Parser::new(sess, stream, None, false)
p.check_unknown_macro_variable();
p
} }
/// Parse a string representing a character literal into its final form. /// Parse a string representing a character literal into its final form.

View file

@ -160,6 +160,7 @@ pub struct Parser<'a> {
/// the span of the current token: /// the span of the current token:
pub span: Span, pub span: Span,
/// the span of the previous token: /// the span of the previous token:
pub meta_var_span: Option<Span>,
pub prev_span: Span, pub prev_span: Span,
/// the previous token kind /// the previous token kind
prev_token_kind: PrevTokenKind, prev_token_kind: PrevTokenKind,
@ -417,6 +418,7 @@ impl<'a> Parser<'a> {
token: token::Underscore, token: token::Underscore,
span: syntax_pos::DUMMY_SP, span: syntax_pos::DUMMY_SP,
prev_span: syntax_pos::DUMMY_SP, prev_span: syntax_pos::DUMMY_SP,
meta_var_span: None,
prev_token_kind: PrevTokenKind::Other, prev_token_kind: PrevTokenKind::Other,
restrictions: Restrictions::empty(), restrictions: Restrictions::empty(),
obsolete_set: HashSet::new(), obsolete_set: HashSet::new(),
@ -443,6 +445,7 @@ impl<'a> Parser<'a> {
parser.directory.path = PathBuf::from(sess.codemap().span_to_filename(parser.span)); parser.directory.path = PathBuf::from(sess.codemap().span_to_filename(parser.span));
parser.directory.path.pop(); parser.directory.path.pop();
} }
parser.process_potential_macro_variable();
parser parser
} }
@ -1012,7 +1015,7 @@ impl<'a> Parser<'a> {
self.bug("attempted to bump the parser past EOF (may be stuck in a loop)"); self.bug("attempted to bump the parser past EOF (may be stuck in a loop)");
} }
self.prev_span = self.span; self.prev_span = self.meta_var_span.take().unwrap_or(self.span);
// Record last token kind for possible error recovery. // Record last token kind for possible error recovery.
self.prev_token_kind = match self.token { self.prev_token_kind = match self.token {
@ -1028,7 +1031,7 @@ impl<'a> Parser<'a> {
self.token = next.tok; self.token = next.tok;
self.expected_tokens.clear(); self.expected_tokens.clear();
// check after each token // check after each token
self.check_unknown_macro_variable(); self.process_potential_macro_variable();
} }
/// Advance the parser using provided token as a next one. Use this when /// Advance the parser using provided token as a next one. Use this when
@ -1722,7 +1725,7 @@ impl<'a> Parser<'a> {
pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> { pub fn parse_path(&mut self, mode: PathStyle) -> PResult<'a, ast::Path> {
maybe_whole!(self, NtPath, |x| x); maybe_whole!(self, NtPath, |x| x);
let lo = self.span; let lo = self.meta_var_span.unwrap_or(self.span);
let is_global = self.eat(&token::ModSep); let is_global = self.eat(&token::ModSep);
// Parse any number of segments and bound sets. A segment is an // Parse any number of segments and bound sets. A segment is an
@ -1744,13 +1747,9 @@ impl<'a> Parser<'a> {
segments.insert(0, PathSegment::crate_root()); segments.insert(0, PathSegment::crate_root());
} }
// Assemble the span.
// FIXME(#39450) This is bogus if part of the path is macro generated.
let span = lo.to(self.prev_span);
// Assemble the result. // Assemble the result.
Ok(ast::Path { Ok(ast::Path {
span: span, span: lo.to(self.prev_span),
segments: segments, segments: segments,
}) })
} }
@ -1763,8 +1762,8 @@ impl<'a> Parser<'a> {
let mut segments = Vec::new(); let mut segments = Vec::new();
loop { loop {
// First, parse an identifier. // First, parse an identifier.
let ident_span = self.span;
let identifier = self.parse_path_segment_ident()?; let identifier = self.parse_path_segment_ident()?;
let ident_span = self.prev_span;
if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) { if self.check(&token::ModSep) && self.look_ahead(1, |t| *t == token::Lt) {
self.bump(); self.bump();
@ -1831,8 +1830,8 @@ impl<'a> Parser<'a> {
let mut segments = Vec::new(); let mut segments = Vec::new();
loop { loop {
// First, parse an identifier. // First, parse an identifier.
let ident_span = self.span;
let identifier = self.parse_path_segment_ident()?; let identifier = self.parse_path_segment_ident()?;
let ident_span = self.prev_span;
// If we do not see a `::`, stop. // If we do not see a `::`, stop.
if !self.eat(&token::ModSep) { if !self.eat(&token::ModSep) {
@ -1873,10 +1872,11 @@ impl<'a> Parser<'a> {
let mut segments = Vec::new(); let mut segments = Vec::new();
loop { loop {
// First, parse an identifier. // First, parse an identifier.
let ident_span = self.span;
let identifier = self.parse_path_segment_ident()?; let identifier = self.parse_path_segment_ident()?;
// Assemble and push the result. // Assemble and push the result.
segments.push(PathSegment::from_ident(identifier, self.prev_span)); segments.push(PathSegment::from_ident(identifier, ident_span));
// If we do not see a `::` or see `::{`/`::*`, stop. // If we do not see a `::` or see `::{`/`::*`, stop.
if !self.check(&token::ModSep) || self.is_import_coupler() { if !self.check(&token::ModSep) || self.is_import_coupler() {
@ -1896,8 +1896,9 @@ impl<'a> Parser<'a> {
fn expect_lifetime(&mut self) -> Lifetime { fn expect_lifetime(&mut self) -> Lifetime {
match self.token { match self.token {
token::Lifetime(ident) => { token::Lifetime(ident) => {
let ident_span = self.span;
self.bump(); self.bump();
Lifetime { name: ident.name, span: self.prev_span, id: ast::DUMMY_NODE_ID } Lifetime { name: ident.name, span: ident_span, id: ast::DUMMY_NODE_ID }
} }
_ => self.span_bug(self.span, "not a lifetime") _ => self.span_bug(self.span, "not a lifetime")
} }
@ -2568,10 +2569,23 @@ impl<'a> Parser<'a> {
return Ok(e); return Ok(e);
} }
pub fn check_unknown_macro_variable(&mut self) { pub fn process_potential_macro_variable(&mut self) {
if let token::SubstNt(name) = self.token { let ident = match self.token {
self.fatal(&format!("unknown macro variable `{}`", name)).emit() token::SubstNt(name) => {
} self.fatal(&format!("unknown macro variable `{}`", name)).emit();
return
}
token::Interpolated(ref nt) => {
self.meta_var_span = Some(self.span);
match **nt {
token::NtIdent(ident) => ident,
_ => return,
}
}
_ => return,
};
self.token = token::Ident(ident.node);
self.span = ident.span;
} }
/// parse a single token tree from the input. /// parse a single token tree from the input.
@ -2589,9 +2603,9 @@ impl<'a> Parser<'a> {
}, },
token::CloseDelim(_) | token::Eof => unreachable!(), token::CloseDelim(_) | token::Eof => unreachable!(),
_ => { _ => {
let token = mem::replace(&mut self.token, token::Underscore); let (token, span) = (mem::replace(&mut self.token, token::Underscore), self.span);
self.bump(); self.bump();
TokenTree::Token(self.prev_span, token) TokenTree::Token(span, token)
} }
} }
} }
@ -3489,9 +3503,9 @@ impl<'a> Parser<'a> {
fn parse_pat_ident(&mut self, fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode) binding_mode: ast::BindingMode)
-> PResult<'a, PatKind> { -> PResult<'a, PatKind> {
let ident_span = self.span;
let ident = self.parse_ident()?; let ident = self.parse_ident()?;
let prev_span = self.prev_span; let name = codemap::Spanned{span: ident_span, node: ident};
let name = codemap::Spanned{span: prev_span, node: ident};
let sub = if self.eat(&token::At) { let sub = if self.eat(&token::At) {
Some(self.parse_pat()?) Some(self.parse_pat()?)
} else { } else {
@ -4364,7 +4378,7 @@ impl<'a> Parser<'a> {
fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> { fn parse_self_arg(&mut self) -> PResult<'a, Option<Arg>> {
let expect_ident = |this: &mut Self| match this.token { let expect_ident = |this: &mut Self| match this.token {
// Preserve hygienic context. // Preserve hygienic context.
token::Ident(ident) => { this.bump(); codemap::respan(this.prev_span, ident) } token::Ident(ident) => { let sp = this.span; this.bump(); codemap::respan(sp, ident) }
_ => unreachable!() _ => unreachable!()
}; };
let isolated_self = |this: &mut Self, n| { let isolated_self = |this: &mut Self, n| {

View file

@ -211,9 +211,7 @@ impl Token {
ModSep => true, // global path ModSep => true, // global path
Pound => true, // expression attributes Pound => true, // expression attributes
Interpolated(ref nt) => match **nt { Interpolated(ref nt) => match **nt {
NtExpr(..) => true, NtIdent(..) | NtExpr(..) | NtBlock(..) | NtPath(..) => true,
NtBlock(..) => true,
NtPath(..) => true,
_ => false, _ => false,
}, },
_ => false, _ => false,
@ -236,8 +234,7 @@ impl Token {
Lt | BinOp(Shl) => true, // associated path Lt | BinOp(Shl) => true, // associated path
ModSep => true, // global path ModSep => true, // global path
Interpolated(ref nt) => match **nt { Interpolated(ref nt) => match **nt {
NtTy(..) => true, NtIdent(..) | NtTy(..) | NtPath(..) => true,
NtPath(..) => true,
_ => false, _ => false,
}, },
_ => false, _ => false,
@ -252,12 +249,20 @@ impl Token {
} }
} }
pub fn ident(&self) -> Option<ast::Ident> {
match *self {
Ident(ident) => Some(ident),
Interpolated(ref nt) => match **nt {
NtIdent(ident) => Some(ident.node),
_ => None,
},
_ => None,
}
}
/// Returns `true` if the token is an identifier. /// Returns `true` if the token is an identifier.
pub fn is_ident(&self) -> bool { pub fn is_ident(&self) -> bool {
match *self { self.ident().is_some()
Ident(..) => true,
_ => false,
}
} }
/// Returns `true` if the token is a documentation comment. /// Returns `true` if the token is a documentation comment.
@ -311,18 +316,15 @@ impl Token {
/// Returns `true` if the token is a given keyword, `kw`. /// Returns `true` if the token is a given keyword, `kw`.
pub fn is_keyword(&self, kw: keywords::Keyword) -> bool { pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
match *self { self.ident().map(|ident| ident.name == kw.name()).unwrap_or(false)
Ident(id) => id.name == kw.name(),
_ => false,
}
} }
pub fn is_path_segment_keyword(&self) -> bool { pub fn is_path_segment_keyword(&self) -> bool {
match *self { match self.ident() {
Ident(id) => id.name == keywords::Super.name() || Some(id) => id.name == keywords::Super.name() ||
id.name == keywords::SelfValue.name() || id.name == keywords::SelfValue.name() ||
id.name == keywords::SelfType.name(), id.name == keywords::SelfType.name(),
_ => false, None => false,
} }
} }
@ -333,18 +335,16 @@ impl Token {
/// Returns `true` if the token is a strict keyword. /// Returns `true` if the token is a strict keyword.
pub fn is_strict_keyword(&self) -> bool { pub fn is_strict_keyword(&self) -> bool {
match *self { match self.ident() {
Ident(id) => id.name >= keywords::As.name() && Some(id) => id.name >= keywords::As.name() && id.name <= keywords::While.name(),
id.name <= keywords::While.name(),
_ => false, _ => false,
} }
} }
/// Returns `true` if the token is a keyword reserved for possible future use. /// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_reserved_keyword(&self) -> bool { pub fn is_reserved_keyword(&self) -> bool {
match *self { match self.ident() {
Ident(id) => id.name >= keywords::Abstract.name() && Some(id) => id.name >= keywords::Abstract.name() && id.name <= keywords::Yield.name(),
id.name <= keywords::Yield.name(),
_ => false, _ => false,
} }
} }