Simplify hygiene::Mark
application, and
remove variant `Token::SubstNt` in favor of `quoted::TokenTree::MetaVar`.
This commit is contained in:
parent
fc9ccfdbe0
commit
d4488b7df9
26 changed files with 160 additions and 172 deletions
|
@ -87,6 +87,8 @@ pub mod __internal {
|
|||
use std::rc::Rc;
|
||||
|
||||
use syntax::ast;
|
||||
use syntax::ext::base::ExtCtxt;
|
||||
use syntax::ext::hygiene::Mark;
|
||||
use syntax::ptr::P;
|
||||
use syntax::parse::{self, token, ParseSess};
|
||||
use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_};
|
||||
|
@ -107,7 +109,7 @@ pub mod __internal {
|
|||
}
|
||||
|
||||
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
|
||||
with_parse_sess(move |sess| {
|
||||
with_sess(move |(sess, _)| {
|
||||
let mut parser = parse::stream_to_parser(sess, stream.inner);
|
||||
let mut items = Vec::new();
|
||||
|
||||
|
@ -140,13 +142,14 @@ pub mod __internal {
|
|||
|
||||
// Emulate scoped_thread_local!() here essentially
|
||||
thread_local! {
|
||||
static CURRENT_SESS: Cell<*const ParseSess> = Cell::new(0 as *const _);
|
||||
static CURRENT_SESS: Cell<(*const ParseSess, Mark)> =
|
||||
Cell::new((0 as *const _, Mark::root()));
|
||||
}
|
||||
|
||||
pub fn set_parse_sess<F, R>(sess: &ParseSess, f: F) -> R
|
||||
pub fn set_sess<F, R>(cx: &ExtCtxt, f: F) -> R
|
||||
where F: FnOnce() -> R
|
||||
{
|
||||
struct Reset { prev: *const ParseSess }
|
||||
struct Reset { prev: (*const ParseSess, Mark) }
|
||||
|
||||
impl Drop for Reset {
|
||||
fn drop(&mut self) {
|
||||
|
@ -156,18 +159,18 @@ pub mod __internal {
|
|||
|
||||
CURRENT_SESS.with(|p| {
|
||||
let _reset = Reset { prev: p.get() };
|
||||
p.set(sess);
|
||||
p.set((cx.parse_sess, cx.current_expansion.mark));
|
||||
f()
|
||||
})
|
||||
}
|
||||
|
||||
pub fn with_parse_sess<F, R>(f: F) -> R
|
||||
where F: FnOnce(&ParseSess) -> R
|
||||
pub fn with_sess<F, R>(f: F) -> R
|
||||
where F: FnOnce((&ParseSess, Mark)) -> R
|
||||
{
|
||||
let p = CURRENT_SESS.with(|p| p.get());
|
||||
assert!(!p.is_null(), "proc_macro::__internal::with_parse_sess() called \
|
||||
assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \
|
||||
before set_parse_sess()!");
|
||||
f(unsafe { &*p })
|
||||
f(unsafe { (&*p.0, p.1) })
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -181,10 +184,11 @@ impl FromStr for TokenStream {
|
|||
type Err = LexError;
|
||||
|
||||
fn from_str(src: &str) -> Result<TokenStream, LexError> {
|
||||
__internal::with_parse_sess(|sess| {
|
||||
__internal::with_sess(|(sess, mark)| {
|
||||
let src = src.to_string();
|
||||
let name = "<proc-macro source code>".to_string();
|
||||
let stream = parse::parse_stream_from_source_str(name, src, sess);
|
||||
let call_site = mark.expn_info().unwrap().call_site;
|
||||
let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site));
|
||||
Ok(__internal::token_stream_wrap(stream))
|
||||
})
|
||||
}
|
||||
|
|
|
@ -283,8 +283,7 @@ fn hash_token<'a, 'gcx, 'tcx, W: StableHasherResult>(token: &token::Token,
|
|||
}
|
||||
|
||||
token::Token::Ident(ident) |
|
||||
token::Token::Lifetime(ident) |
|
||||
token::Token::SubstNt(ident) => ident.name.hash_stable(hcx, hasher),
|
||||
token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
|
||||
|
||||
token::Token::Interpolated(ref non_terminal) => {
|
||||
// FIXME(mw): This could be implemented properly. It's just a
|
||||
|
|
|
@ -372,7 +372,7 @@ impl CrateStore for cstore::CStore {
|
|||
|
||||
let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
|
||||
let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION };
|
||||
let body = filemap_to_stream(&sess.parse_sess, filemap);
|
||||
let body = filemap_to_stream(&sess.parse_sess, filemap, None);
|
||||
|
||||
// Mark the attrs as used
|
||||
let attrs = data.get_item_attrs(id.index, &self.dep_graph);
|
||||
|
|
|
@ -319,7 +319,7 @@ impl<'a> Classifier<'a> {
|
|||
token::Lifetime(..) => Class::Lifetime,
|
||||
|
||||
token::Underscore | token::Eof | token::Interpolated(..) |
|
||||
token::SubstNt(..) | token::Tilde | token::At => Class::None,
|
||||
token::Tilde | token::At => Class::None,
|
||||
};
|
||||
|
||||
// Anything that didn't return above is the simple case where we the
|
||||
|
|
|
@ -903,17 +903,3 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
|
|||
}
|
||||
Some(es)
|
||||
}
|
||||
|
||||
pub struct ChangeSpan {
|
||||
pub span: Span
|
||||
}
|
||||
|
||||
impl Folder for ChangeSpan {
|
||||
fn new_span(&mut self, _sp: Span) -> Span {
|
||||
self.span
|
||||
}
|
||||
|
||||
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
|
||||
fold::noop_fold_mac(mac, self)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -16,7 +16,7 @@ use config::{is_test_or_bench, StripUnconfigured};
|
|||
use errors::FatalError;
|
||||
use ext::base::*;
|
||||
use ext::derive::{add_derived_markers, collect_derives};
|
||||
use ext::hygiene::Mark;
|
||||
use ext::hygiene::{Mark, SyntaxContext};
|
||||
use ext::placeholders::{placeholder, PlaceholderExpander};
|
||||
use feature_gate::{self, Features, is_builtin_attr};
|
||||
use fold;
|
||||
|
@ -470,7 +470,6 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
Ok(())
|
||||
};
|
||||
|
||||
let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
|
||||
let opt_expanded = match *ext {
|
||||
SyntaxExtension::DeclMacro(ref expand, def_span) => {
|
||||
if let Err(msg) = validate_and_set_expn_info(def_span.map(|(_, s)| s),
|
||||
|
@ -478,7 +477,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
self.cx.span_err(path.span, &msg);
|
||||
return kind.dummy(span);
|
||||
}
|
||||
kind.make_from(expand.expand(self.cx, span, marked_tts))
|
||||
kind.make_from(expand.expand(self.cx, span, mac.node.stream()))
|
||||
}
|
||||
|
||||
NormalTT(ref expandfun, def_info, allow_internal_unstable) => {
|
||||
|
@ -487,7 +486,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
self.cx.span_err(path.span, &msg);
|
||||
return kind.dummy(span);
|
||||
}
|
||||
kind.make_from(expandfun.expand(self.cx, span, marked_tts))
|
||||
kind.make_from(expandfun.expand(self.cx, span, mac.node.stream()))
|
||||
}
|
||||
|
||||
IdentTT(ref expander, tt_span, allow_internal_unstable) => {
|
||||
|
@ -506,7 +505,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
}
|
||||
});
|
||||
|
||||
let input: Vec<_> = marked_tts.into_trees().collect();
|
||||
let input: Vec<_> = mac.node.stream().into_trees().collect();
|
||||
kind.make_from(expander.expand(self.cx, span, ident, input))
|
||||
}
|
||||
|
||||
|
@ -541,21 +540,17 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
},
|
||||
});
|
||||
|
||||
let tok_result = expandfun.expand(self.cx, span, marked_tts);
|
||||
let tok_result = expandfun.expand(self.cx, span, mac.node.stream());
|
||||
Some(self.parse_expansion(tok_result, kind, path, span))
|
||||
}
|
||||
};
|
||||
|
||||
let expanded = if let Some(expanded) = opt_expanded {
|
||||
expanded
|
||||
} else {
|
||||
unwrap_or!(opt_expanded, {
|
||||
let msg = format!("non-{kind} macro in {kind} position: {name}",
|
||||
name = path.segments[0].identifier.name, kind = kind.name());
|
||||
self.cx.span_err(path.span, &msg);
|
||||
return kind.dummy(span);
|
||||
};
|
||||
|
||||
expanded.fold_with(&mut Marker(mark))
|
||||
kind.dummy(span)
|
||||
})
|
||||
}
|
||||
|
||||
/// Expand a derive invocation. Returns the result of expansion.
|
||||
|
@ -621,8 +616,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
|||
}
|
||||
};
|
||||
parser.ensure_complete_parse(path, kind.name(), span);
|
||||
// FIXME better span info
|
||||
expansion.fold_with(&mut ChangeSpan { span: span })
|
||||
expansion
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -673,7 +667,9 @@ impl<'a> Parser<'a> {
|
|||
if self.token != token::Eof {
|
||||
let msg = format!("macro expansion ignores token `{}` and any following",
|
||||
self.this_token_to_string());
|
||||
let mut err = self.diagnostic().struct_span_err(self.span, &msg);
|
||||
let mut def_site_span = self.span;
|
||||
def_site_span.ctxt = SyntaxContext::empty(); // Avoid emitting backtrace info twice.
|
||||
let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
|
||||
let msg = format!("caused by the macro expansion here; the usage \
|
||||
of `{}!` is likely invalid in {} context",
|
||||
macro_path, kind_name);
|
||||
|
@ -787,12 +783,12 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
|
|||
Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
|
||||
Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
|
||||
};
|
||||
string_to_stream(text, parse_sess)
|
||||
string_to_stream(text, parse_sess, item.span())
|
||||
}
|
||||
|
||||
fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
|
||||
fn string_to_stream(text: String, parse_sess: &ParseSess, span: Span) -> TokenStream {
|
||||
let filename = String::from("<macro expansion>");
|
||||
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text))
|
||||
filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text), Some(span))
|
||||
}
|
||||
|
||||
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
|
||||
|
@ -1070,7 +1066,7 @@ impl<'feat> ExpansionConfig<'feat> {
|
|||
}
|
||||
|
||||
// A Marker adds the given mark to the syntax context.
|
||||
struct Marker(Mark);
|
||||
pub struct Marker(pub Mark);
|
||||
|
||||
impl Folder for Marker {
|
||||
fn fold_ident(&mut self, mut ident: Ident) -> Ident {
|
||||
|
|
|
@ -364,7 +364,7 @@ pub mod rt {
|
|||
|
||||
fn parse_tts(&self, s: String) -> Vec<TokenTree> {
|
||||
let source_name = "<quote expansion>".to_owned();
|
||||
parse::parse_stream_from_source_str(source_name, s, self.parse_sess())
|
||||
parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None)
|
||||
.into_trees().collect()
|
||||
}
|
||||
}
|
||||
|
@ -700,7 +700,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
|
|||
token::Underscore => "Underscore",
|
||||
token::Eof => "Eof",
|
||||
|
||||
token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => {
|
||||
token::Whitespace | token::Comment | token::Shebang(_) => {
|
||||
panic!("unhandled token in quote!");
|
||||
}
|
||||
};
|
||||
|
|
|
@ -158,15 +158,10 @@ pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
|
|||
pub fn count_names(ms: &[TokenTree]) -> usize {
|
||||
ms.iter().fold(0, |count, elt| {
|
||||
count + match *elt {
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
seq.num_captures
|
||||
}
|
||||
TokenTree::Delimited(_, ref delim) => {
|
||||
count_names(&delim.tts)
|
||||
}
|
||||
TokenTree::MetaVarDecl(..) => {
|
||||
1
|
||||
}
|
||||
TokenTree::Sequence(_, ref seq) => seq.num_captures,
|
||||
TokenTree::Delimited(_, ref delim) => count_names(&delim.tts),
|
||||
TokenTree::MetaVar(..) => 0,
|
||||
TokenTree::MetaVarDecl(..) => 1,
|
||||
TokenTree::Token(..) => 0,
|
||||
}
|
||||
})
|
||||
|
@ -244,7 +239,7 @@ fn nameize<I: Iterator<Item=NamedMatch>>(sess: &ParseSess, ms: &[TokenTree], mut
|
|||
}
|
||||
}
|
||||
}
|
||||
TokenTree::Token(..) => (),
|
||||
TokenTree::MetaVar(..) | TokenTree::Token(..) => (),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
|
@ -409,12 +404,11 @@ fn inner_parse_loop(sess: &ParseSess,
|
|||
ei.idx = 0;
|
||||
cur_eis.push(ei);
|
||||
}
|
||||
TokenTree::Token(_, ref t) => {
|
||||
if token_name_eq(t, token) {
|
||||
TokenTree::Token(_, ref t) if token_name_eq(t, token) => {
|
||||
ei.idx += 1;
|
||||
next_eis.push(ei);
|
||||
}
|
||||
}
|
||||
TokenTree::Token(..) | TokenTree::MetaVar(..) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -120,7 +120,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
|
|||
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||
};
|
||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||
let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
|
||||
let tts = transcribe(cx, Some(named_matches), rhs);
|
||||
|
||||
if cx.trace_macros() {
|
||||
trace_macros_note(cx, sp, format!("to `{}`", tts));
|
||||
|
@ -292,7 +292,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
|
|||
use self::quoted::TokenTree;
|
||||
for tt in tts {
|
||||
match *tt {
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (),
|
||||
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
|
||||
TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
|
||||
return false;
|
||||
},
|
||||
|
@ -372,7 +372,7 @@ impl FirstSets {
|
|||
let mut first = TokenSet::empty();
|
||||
for tt in tts.iter().rev() {
|
||||
match *tt {
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
|
||||
first.replace_with(tt.clone());
|
||||
}
|
||||
TokenTree::Delimited(span, ref delimited) => {
|
||||
|
@ -432,7 +432,7 @@ impl FirstSets {
|
|||
for tt in tts.iter() {
|
||||
assert!(first.maybe_empty);
|
||||
match *tt {
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
|
||||
first.add_one(tt.clone());
|
||||
return first;
|
||||
}
|
||||
|
@ -602,7 +602,7 @@ fn check_matcher_core(sess: &ParseSess,
|
|||
// First, update `last` so that it corresponds to the set
|
||||
// of NT tokens that might end the sequence `... token`.
|
||||
match *token {
|
||||
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
|
||||
TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
|
||||
let can_be_followed_by_any;
|
||||
if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, token) {
|
||||
let msg = format!("invalid fragment specifier `{}`", bad_frag);
|
||||
|
@ -872,6 +872,7 @@ fn is_legal_fragment_specifier(sess: &ParseSess,
|
|||
fn quoted_tt_to_string(tt: "ed::TokenTree) -> String {
|
||||
match *tt {
|
||||
quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
|
||||
quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
|
||||
quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
|
||||
_ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
|
||||
in follow set checker"),
|
||||
|
|
|
@ -78,9 +78,11 @@ pub enum KleeneOp {
|
|||
pub enum TokenTree {
|
||||
Token(Span, token::Token),
|
||||
Delimited(Span, Rc<Delimited>),
|
||||
/// A kleene-style repetition sequence with a span
|
||||
/// A kleene-style repetition sequence
|
||||
Sequence(Span, Rc<SequenceRepetition>),
|
||||
/// Matches a nonterminal. This is only used in the left hand side of MBE macros.
|
||||
/// E.g. `$var`
|
||||
MetaVar(Span, ast::Ident),
|
||||
/// E.g. `$var:expr`. This is only used in the left hand side of MBE macros.
|
||||
MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
|
||||
}
|
||||
|
||||
|
@ -130,6 +132,7 @@ impl TokenTree {
|
|||
pub fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(sp, _) |
|
||||
TokenTree::MetaVar(sp, _) |
|
||||
TokenTree::MetaVarDecl(sp, _, _) |
|
||||
TokenTree::Delimited(sp, _) |
|
||||
TokenTree::Sequence(sp, _) => sp,
|
||||
|
@ -144,7 +147,7 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
|
|||
while let Some(tree) = trees.next() {
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
|
||||
match tree {
|
||||
TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
|
||||
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
|
||||
let span = match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
|
||||
|
@ -199,13 +202,13 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
|
|||
let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident };
|
||||
TokenTree::Token(span, token::Ident(ident))
|
||||
} else {
|
||||
TokenTree::Token(span, token::SubstNt(ident))
|
||||
TokenTree::MetaVar(span, ident)
|
||||
}
|
||||
}
|
||||
Some(tokenstream::TokenTree::Token(span, tok)) => {
|
||||
let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok));
|
||||
sess.span_diagnostic.span_err(span, &msg);
|
||||
TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident()))
|
||||
TokenTree::MetaVar(span, keywords::Invalid.ident())
|
||||
}
|
||||
None => TokenTree::Token(span, token::Dollar),
|
||||
},
|
||||
|
|
|
@ -9,10 +9,12 @@
|
|||
// except according to those terms.
|
||||
|
||||
use ast::Ident;
|
||||
use errors::Handler;
|
||||
use ext::base::ExtCtxt;
|
||||
use ext::expand::Marker;
|
||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||
use ext::tt::quoted;
|
||||
use parse::token::{self, SubstNt, Token, NtTT};
|
||||
use fold::noop_fold_tt;
|
||||
use parse::token::{self, Token, NtTT};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use tokenstream::{TokenStream, TokenTree, Delimited};
|
||||
use util::small_vector::SmallVector;
|
||||
|
@ -61,9 +63,9 @@ impl Iterator for Frame {
|
|||
}
|
||||
|
||||
/// This can do Macro-By-Example transcription. On the other hand, if
|
||||
/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can
|
||||
/// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can
|
||||
/// (and should) be None.
|
||||
pub fn transcribe(sp_diag: &Handler,
|
||||
pub fn transcribe(cx: &ExtCtxt,
|
||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||
src: Vec<quoted::TokenTree>)
|
||||
-> TokenStream {
|
||||
|
@ -120,22 +122,20 @@ pub fn transcribe(sp_diag: &Handler,
|
|||
&interpolations,
|
||||
&repeats) {
|
||||
LockstepIterSize::Unconstrained => {
|
||||
panic!(sp_diag.span_fatal(
|
||||
sp, /* blame macro writer */
|
||||
cx.span_fatal(sp, /* blame macro writer */
|
||||
"attempted to repeat an expression \
|
||||
containing no syntax \
|
||||
variables matched as repeating at this depth"));
|
||||
variables matched as repeating at this depth");
|
||||
}
|
||||
LockstepIterSize::Contradiction(ref msg) => {
|
||||
// FIXME #2887 blame macro invoker instead
|
||||
panic!(sp_diag.span_fatal(sp, &msg[..]));
|
||||
cx.span_fatal(sp, &msg[..]);
|
||||
}
|
||||
LockstepIterSize::Constraint(len, _) => {
|
||||
if len == 0 {
|
||||
if seq.op == quoted::KleeneOp::OneOrMore {
|
||||
// FIXME #2887 blame invoker
|
||||
panic!(sp_diag.span_fatal(sp,
|
||||
"this must repeat at least once"));
|
||||
cx.span_fatal(sp, "this must repeat at least once");
|
||||
}
|
||||
} else {
|
||||
repeats.push((0, len));
|
||||
|
@ -149,29 +149,37 @@ pub fn transcribe(sp_diag: &Handler,
|
|||
}
|
||||
}
|
||||
// FIXME #2887: think about span stuff here
|
||||
quoted::TokenTree::Token(sp, SubstNt(ident)) => {
|
||||
match lookup_cur_matched(ident, &interpolations, &repeats) {
|
||||
None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()),
|
||||
Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
|
||||
match **nt {
|
||||
NtTT(ref tt) => result.push(tt.clone().into()),
|
||||
_ => {
|
||||
quoted::TokenTree::MetaVar(mut sp, ident) => {
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, &interpolations, &repeats) {
|
||||
if let MatchedNonterminal(ref nt) = *cur_matched {
|
||||
if let NtTT(ref tt) = **nt {
|
||||
result.push(tt.clone().into());
|
||||
} else {
|
||||
sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
|
||||
let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
|
||||
result.push(token.into());
|
||||
}
|
||||
} else {
|
||||
cx.span_fatal(sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident));
|
||||
}
|
||||
} else {
|
||||
panic!(sp_diag.span_fatal(
|
||||
sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident)));
|
||||
let ident =
|
||||
Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
|
||||
sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
|
||||
result.push(TokenTree::Token(sp, token::Dollar).into());
|
||||
result.push(TokenTree::Token(sp, token::Ident(ident)).into());
|
||||
}
|
||||
}
|
||||
}
|
||||
quoted::TokenTree::Delimited(span, delimited) => {
|
||||
quoted::TokenTree::Delimited(mut span, delimited) => {
|
||||
span.ctxt = span.ctxt.apply_mark(cx.current_expansion.mark);
|
||||
stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
|
||||
result_stack.push(mem::replace(&mut result, Vec::new()));
|
||||
}
|
||||
quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()),
|
||||
quoted::TokenTree::Token(sp, tok) => {
|
||||
let mut marker = Marker(cx.current_expansion.mark);
|
||||
result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into())
|
||||
}
|
||||
quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||
}
|
||||
}
|
||||
|
@ -240,7 +248,7 @@ fn lockstep_iter_size(tree: "ed::TokenTree,
|
|||
size + lockstep_iter_size(tt, interpolations, repeats)
|
||||
})
|
||||
},
|
||||
TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) =>
|
||||
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) =>
|
||||
match lookup_cur_matched(name, interpolations, repeats) {
|
||||
Some(matched) => match *matched {
|
||||
MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
|
||||
|
|
|
@ -588,7 +588,6 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
|
|||
};
|
||||
token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
|
||||
}
|
||||
token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
|
||||
_ => t
|
||||
}
|
||||
}
|
||||
|
|
|
@ -66,14 +66,15 @@ pub struct StringReader<'a> {
|
|||
token: token::Token,
|
||||
span: Span,
|
||||
open_braces: Vec<(token::DelimToken, Span)>,
|
||||
}
|
||||
|
||||
fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
|
||||
Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }
|
||||
pub override_span: Option<Span>,
|
||||
}
|
||||
|
||||
impl<'a> StringReader<'a> {
|
||||
fn next_token(&mut self) -> TokenAndSpan {
|
||||
fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
|
||||
unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION})
|
||||
}
|
||||
|
||||
fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
|
||||
let res = self.try_next_token();
|
||||
self.unwrap_or_abort(res)
|
||||
}
|
||||
|
@ -175,6 +176,7 @@ impl<'a> StringReader<'a> {
|
|||
token: token::Eof,
|
||||
span: syntax_pos::DUMMY_SP,
|
||||
open_braces: Vec::new(),
|
||||
override_span: None,
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -229,12 +231,12 @@ impl<'a> StringReader<'a> {
|
|||
|
||||
/// Report a fatal error spanning [`from_pos`, `to_pos`).
|
||||
fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError {
|
||||
self.fatal_span(mk_sp(from_pos, to_pos), m)
|
||||
self.fatal_span(self.mk_sp(from_pos, to_pos), m)
|
||||
}
|
||||
|
||||
/// Report a lexical error spanning [`from_pos`, `to_pos`).
|
||||
fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
|
||||
self.err_span(mk_sp(from_pos, to_pos), m)
|
||||
self.err_span(self.mk_sp(from_pos, to_pos), m)
|
||||
}
|
||||
|
||||
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
|
||||
|
@ -258,7 +260,7 @@ impl<'a> StringReader<'a> {
|
|||
for c in c.escape_default() {
|
||||
m.push(c)
|
||||
}
|
||||
self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..])
|
||||
self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..])
|
||||
}
|
||||
|
||||
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
|
||||
|
@ -282,7 +284,7 @@ impl<'a> StringReader<'a> {
|
|||
for c in c.escape_default() {
|
||||
m.push(c)
|
||||
}
|
||||
self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..])
|
||||
self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..])
|
||||
}
|
||||
|
||||
/// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
|
||||
|
@ -306,11 +308,11 @@ impl<'a> StringReader<'a> {
|
|||
None => {
|
||||
if self.is_eof() {
|
||||
self.peek_tok = token::Eof;
|
||||
self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos);
|
||||
self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos);
|
||||
} else {
|
||||
let start_bytepos = self.pos;
|
||||
self.peek_tok = self.next_token_inner()?;
|
||||
self.peek_span = mk_sp(start_bytepos, self.pos);
|
||||
self.peek_span = self.mk_sp(start_bytepos, self.pos);
|
||||
};
|
||||
}
|
||||
}
|
||||
|
@ -502,7 +504,7 @@ impl<'a> StringReader<'a> {
|
|||
if let Some(c) = self.ch {
|
||||
if c.is_whitespace() {
|
||||
let msg = "called consume_any_line_comment, but there was whitespace";
|
||||
self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg);
|
||||
self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -545,13 +547,13 @@ impl<'a> StringReader<'a> {
|
|||
|
||||
Some(TokenAndSpan {
|
||||
tok: tok,
|
||||
sp: mk_sp(start_bpos, self.pos),
|
||||
sp: self.mk_sp(start_bpos, self.pos),
|
||||
})
|
||||
})
|
||||
} else {
|
||||
Some(TokenAndSpan {
|
||||
tok: token::Comment,
|
||||
sp: mk_sp(start_bpos, self.pos),
|
||||
sp: self.mk_sp(start_bpos, self.pos),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -584,7 +586,7 @@ impl<'a> StringReader<'a> {
|
|||
}
|
||||
return Some(TokenAndSpan {
|
||||
tok: token::Shebang(self.name_from(start)),
|
||||
sp: mk_sp(start, self.pos),
|
||||
sp: self.mk_sp(start, self.pos),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -612,7 +614,7 @@ impl<'a> StringReader<'a> {
|
|||
}
|
||||
let c = Some(TokenAndSpan {
|
||||
tok: token::Whitespace,
|
||||
sp: mk_sp(start_bpos, self.pos),
|
||||
sp: self.mk_sp(start_bpos, self.pos),
|
||||
});
|
||||
debug!("scanning whitespace: {:?}", c);
|
||||
c
|
||||
|
@ -674,7 +676,7 @@ impl<'a> StringReader<'a> {
|
|||
|
||||
Some(TokenAndSpan {
|
||||
tok: tok,
|
||||
sp: mk_sp(start_bpos, self.pos),
|
||||
sp: self.mk_sp(start_bpos, self.pos),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
@ -869,7 +871,7 @@ impl<'a> StringReader<'a> {
|
|||
let valid = if self.ch_is('{') {
|
||||
self.scan_unicode_escape(delim) && !ascii_only
|
||||
} else {
|
||||
let span = mk_sp(start, self.pos);
|
||||
let span = self.mk_sp(start, self.pos);
|
||||
self.sess.span_diagnostic
|
||||
.struct_span_err(span, "incorrect unicode escape sequence")
|
||||
.span_help(span,
|
||||
|
@ -907,13 +909,13 @@ impl<'a> StringReader<'a> {
|
|||
},
|
||||
c);
|
||||
if e == '\r' {
|
||||
err.span_help(mk_sp(escaped_pos, pos),
|
||||
err.span_help(self.mk_sp(escaped_pos, pos),
|
||||
"this is an isolated carriage return; consider \
|
||||
checking your editor and version control \
|
||||
settings");
|
||||
}
|
||||
if (e == '{' || e == '}') && !ascii_only {
|
||||
err.span_help(mk_sp(escaped_pos, pos),
|
||||
err.span_help(self.mk_sp(escaped_pos, pos),
|
||||
"if used in a formatting string, curly braces \
|
||||
are escaped with `{{` and `}}`");
|
||||
}
|
||||
|
|
|
@ -141,9 +141,10 @@ pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess
|
|||
new_parser_from_source_str(sess, name, source).parse_stmt()
|
||||
}
|
||||
|
||||
pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
|
||||
pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess,
|
||||
override_span: Option<Span>)
|
||||
-> TokenStream {
|
||||
filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
|
||||
filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span)
|
||||
}
|
||||
|
||||
// Create a new parser from a source string
|
||||
|
@ -177,7 +178,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
|
|||
/// Given a filemap and config, return a parser
|
||||
pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
|
||||
let end_pos = filemap.end_pos;
|
||||
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
|
||||
let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None));
|
||||
|
||||
if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
|
||||
parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION };
|
||||
|
@ -212,8 +213,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
|
|||
}
|
||||
|
||||
/// Given a filemap, produce a sequence of token-trees
|
||||
pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream {
|
||||
pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>, override_span: Option<Span>)
|
||||
-> TokenStream {
|
||||
let mut srdr = lexer::StringReader::new(sess, filemap);
|
||||
srdr.override_span = override_span;
|
||||
srdr.real_token();
|
||||
panictry!(srdr.parse_all_token_trees())
|
||||
}
|
||||
|
|
|
@ -2626,7 +2626,10 @@ impl<'a> Parser<'a> {
|
|||
|
||||
pub fn process_potential_macro_variable(&mut self) {
|
||||
let ident = match self.token {
|
||||
token::SubstNt(name) => {
|
||||
token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() &&
|
||||
self.look_ahead(1, |t| t.is_ident()) => {
|
||||
self.bump();
|
||||
let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() };
|
||||
self.fatal(&format!("unknown macro variable `{}`", name)).emit();
|
||||
return
|
||||
}
|
||||
|
|
|
@ -172,9 +172,6 @@ pub enum Token {
|
|||
// Can be expanded into several tokens.
|
||||
/// Doc comment
|
||||
DocComment(ast::Name),
|
||||
// In right-hand-sides of MBE macros:
|
||||
/// A syntactic variable that will be filled in by macro expansion.
|
||||
SubstNt(ast::Ident),
|
||||
|
||||
// Junk. These carry no data because we don't really care about the data
|
||||
// they *would* carry, and don't really want to allocate a new ident for
|
||||
|
|
|
@ -270,7 +270,6 @@ pub fn token_to_string(tok: &Token) -> String {
|
|||
|
||||
/* Other */
|
||||
token::DocComment(s) => s.to_string(),
|
||||
token::SubstNt(s) => format!("${}", s),
|
||||
token::Eof => "<eof>".to_string(),
|
||||
token::Whitespace => " ".to_string(),
|
||||
token::Comment => "/* */".to_string(),
|
||||
|
|
|
@ -15,6 +15,8 @@ use syntax::feature_gate;
|
|||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax_pos::Span;
|
||||
use syntax_pos::symbol::Symbol;
|
||||
use syntax_pos::hygiene::SyntaxContext;
|
||||
use syntax::tokenstream::TokenTree;
|
||||
|
||||
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
|
||||
|
@ -50,7 +52,10 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
|
|||
}
|
||||
}
|
||||
}
|
||||
let res = ast::Ident::from_str(&res_str);
|
||||
let res = ast::Ident {
|
||||
name: Symbol::intern(&res_str),
|
||||
ctxt: SyntaxContext::empty().apply_mark(cx.current_expansion.mark),
|
||||
};
|
||||
|
||||
struct Result {
|
||||
ident: ast::Ident,
|
||||
|
|
|
@ -16,7 +16,6 @@ use syntax::ast::{self, ItemKind, Attribute, Mac};
|
|||
use syntax::attr::{mark_used, mark_known};
|
||||
use syntax::codemap::Span;
|
||||
use syntax::ext::base::*;
|
||||
use syntax::fold::Folder;
|
||||
use syntax::visit::Visitor;
|
||||
|
||||
struct MarkAttrs<'a>(&'a [ast::Name]);
|
||||
|
@ -75,7 +74,7 @@ impl MultiItemModifier for ProcMacroDerive {
|
|||
MarkAttrs(&self.attrs).visit_item(&item);
|
||||
|
||||
let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item.clone()));
|
||||
let res = __internal::set_parse_sess(&ecx.parse_sess, || {
|
||||
let res = __internal::set_sess(ecx, || {
|
||||
let inner = self.inner;
|
||||
panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input)))
|
||||
});
|
||||
|
@ -97,9 +96,9 @@ impl MultiItemModifier for ProcMacroDerive {
|
|||
}
|
||||
};
|
||||
|
||||
let new_items = __internal::set_parse_sess(&ecx.parse_sess, || {
|
||||
__internal::set_sess(ecx, || {
|
||||
match __internal::token_stream_parse_items(stream) {
|
||||
Ok(new_items) => new_items,
|
||||
Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(),
|
||||
Err(_) => {
|
||||
// FIXME: handle this better
|
||||
let msg = "proc-macro derive produced unparseable tokens";
|
||||
|
@ -107,12 +106,6 @@ impl MultiItemModifier for ProcMacroDerive {
|
|||
panic!(FatalError);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Reassign spans of all expanded items to the input `item`
|
||||
// for better errors here.
|
||||
new_items.into_iter().map(|item| {
|
||||
Annotatable::Item(ChangeSpan { span: span }.fold_item(item).expect_one(""))
|
||||
}).collect()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@ use syntax::ext::build::AstBuilder;
|
|||
use syntax::parse::token;
|
||||
use syntax::ptr::P;
|
||||
use syntax::symbol::{Symbol, keywords};
|
||||
use syntax_pos::{Span, DUMMY_SP};
|
||||
use syntax_pos::Span;
|
||||
use syntax::tokenstream;
|
||||
|
||||
use std::collections::{HashMap, HashSet};
|
||||
|
@ -558,7 +558,9 @@ impl<'a, 'b> Context<'a, 'b> {
|
|||
// passed to this function.
|
||||
for (i, e) in self.args.into_iter().enumerate() {
|
||||
let name = self.ecx.ident_of(&format!("__arg{}", i));
|
||||
pats.push(self.ecx.pat_ident(DUMMY_SP, name));
|
||||
let span =
|
||||
Span { ctxt: e.span.ctxt.apply_mark(self.ecx.current_expansion.mark), ..e.span };
|
||||
pats.push(self.ecx.pat_ident(span, name));
|
||||
for ref arg_ty in self.arg_unique_types[i].iter() {
|
||||
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
|
||||
}
|
||||
|
@ -672,10 +674,10 @@ impl<'a, 'b> Context<'a, 'b> {
|
|||
}
|
||||
|
||||
pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt,
|
||||
sp: Span,
|
||||
mut sp: Span,
|
||||
tts: &[tokenstream::TokenTree])
|
||||
-> Box<base::MacResult + 'cx> {
|
||||
|
||||
sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark);
|
||||
match parse_args(ecx, sp, tts) {
|
||||
Some((efmt, args, names)) => {
|
||||
MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names))
|
||||
|
@ -696,7 +698,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
|
|||
// `ArgumentType` does not derive `Clone`.
|
||||
let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
|
||||
let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
|
||||
let macsp = ecx.call_site();
|
||||
let mut macsp = ecx.call_site();
|
||||
macsp.ctxt = macsp.ctxt.apply_mark(ecx.current_expansion.mark);
|
||||
let msg = "format argument must be a string literal.";
|
||||
let fmt = match expr_to_spanned_string(ecx, efmt, msg) {
|
||||
Some(fmt) => fmt,
|
||||
|
|
|
@ -34,7 +34,7 @@ impl base::AttrProcMacro for AttrProcMacro {
|
|||
let annotation = __internal::token_stream_wrap(annotation);
|
||||
let annotated = __internal::token_stream_wrap(annotated);
|
||||
|
||||
let res = __internal::set_parse_sess(&ecx.parse_sess, || {
|
||||
let res = __internal::set_sess(ecx, || {
|
||||
panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(annotation, annotated)))
|
||||
});
|
||||
|
||||
|
@ -69,7 +69,7 @@ impl base::ProcMacro for BangProcMacro {
|
|||
-> TokenStream {
|
||||
let input = __internal::token_stream_wrap(input);
|
||||
|
||||
let res = __internal::set_parse_sess(&ecx.parse_sess, || {
|
||||
let res = __internal::set_sess(ecx, || {
|
||||
panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(input)))
|
||||
});
|
||||
|
||||
|
|
|
@ -144,24 +144,18 @@ impl SyntaxContext {
|
|||
pub fn apply_mark(self, mark: Mark) -> SyntaxContext {
|
||||
HygieneData::with(|data| {
|
||||
let syntax_contexts = &mut data.syntax_contexts;
|
||||
let ctxt_data = syntax_contexts[self.0 as usize];
|
||||
if mark == ctxt_data.outer_mark {
|
||||
return ctxt_data.prev_ctxt;
|
||||
}
|
||||
|
||||
let modern = if data.marks[mark.0 as usize].modern {
|
||||
*data.markings.entry((ctxt_data.modern, mark)).or_insert_with(|| {
|
||||
let modern = SyntaxContext(syntax_contexts.len() as u32);
|
||||
let mut modern = syntax_contexts[self.0 as usize].modern;
|
||||
if data.marks[mark.0 as usize].modern {
|
||||
modern = *data.markings.entry((modern, mark)).or_insert_with(|| {
|
||||
let len = syntax_contexts.len() as u32;
|
||||
syntax_contexts.push(SyntaxContextData {
|
||||
outer_mark: mark,
|
||||
prev_ctxt: ctxt_data.modern,
|
||||
modern: modern,
|
||||
prev_ctxt: modern,
|
||||
modern: SyntaxContext(len),
|
||||
});
|
||||
modern
|
||||
})
|
||||
} else {
|
||||
ctxt_data.modern
|
||||
};
|
||||
SyntaxContext(len)
|
||||
});
|
||||
}
|
||||
|
||||
*data.markings.entry((self, mark)).or_insert_with(|| {
|
||||
syntax_contexts.push(SyntaxContextData {
|
||||
|
|
|
@ -186,7 +186,7 @@ impl Span {
|
|||
|
||||
pub fn to(self, end: Span) -> Span {
|
||||
// FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480)
|
||||
if end.ctxt == SyntaxContext::empty() {
|
||||
if self.ctxt == SyntaxContext::empty() {
|
||||
Span { lo: self.lo, ..end }
|
||||
} else {
|
||||
Span { hi: end.hi, ..self }
|
||||
|
|
|
@ -28,7 +28,6 @@ pub fn main() {
|
|||
asm!("mov $1, $0" : "=r"(x) : "r"(5));
|
||||
//~^ ERROR re-assignment of immutable variable `x`
|
||||
//~| NOTE re-assignment of immutable
|
||||
//~| NOTE in this expansion of asm!
|
||||
}
|
||||
foo(x);
|
||||
}
|
||||
|
|
|
@ -23,5 +23,5 @@ fn main() {
|
|||
m!() => {} //~ NOTE the usage of `m!` is likely invalid in pattern context
|
||||
}
|
||||
|
||||
m!();
|
||||
m!(); //~ NOTE in this expansion
|
||||
}
|
||||
|
|
|
@ -32,7 +32,7 @@ macro_rules! ignored_pat {
|
|||
ignored_item!(); //~ NOTE caused by the macro expansion here
|
||||
|
||||
fn main() {
|
||||
ignored_expr!();
|
||||
ignored_expr!(); //~ NOTE in this expansion
|
||||
match 1 {
|
||||
ignored_pat!() => (), //~ NOTE caused by the macro expansion here
|
||||
_ => (),
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue