Improve $crate
.
This commit is contained in:
parent
7b81106a85
commit
8b0c292a72
13 changed files with 47 additions and 76 deletions
|
@ -37,6 +37,7 @@ use syntax::ast::{self, Block, ForeignItem, ForeignItemKind, Item, ItemKind};
|
||||||
use syntax::ast::{Mutability, StmtKind, TraitItem, TraitItemKind};
|
use syntax::ast::{Mutability, StmtKind, TraitItem, TraitItemKind};
|
||||||
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
|
use syntax::ast::{Variant, ViewPathGlob, ViewPathList, ViewPathSimple};
|
||||||
use syntax::ext::base::{SyntaxExtension, Resolver as SyntaxResolver};
|
use syntax::ext::base::{SyntaxExtension, Resolver as SyntaxResolver};
|
||||||
|
use syntax::ext::expand::mark_tts;
|
||||||
use syntax::ext::hygiene::Mark;
|
use syntax::ext::hygiene::Mark;
|
||||||
use syntax::feature_gate::{self, emit_feature_err};
|
use syntax::feature_gate::{self, emit_feature_err};
|
||||||
use syntax::ext::tt::macro_rules;
|
use syntax::ext::tt::macro_rules;
|
||||||
|
@ -207,11 +208,16 @@ impl<'b> Resolver<'b> {
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut custom_derive_crate = false;
|
let mut custom_derive_crate = false;
|
||||||
|
// The mark of the expansion that generates the loaded macros.
|
||||||
|
let mut opt_mark = None;
|
||||||
for loaded_macro in self.crate_loader.load_macros(item, is_crate_root) {
|
for loaded_macro in self.crate_loader.load_macros(item, is_crate_root) {
|
||||||
|
let mark = opt_mark.unwrap_or_else(Mark::fresh);
|
||||||
|
opt_mark = Some(mark);
|
||||||
match loaded_macro.kind {
|
match loaded_macro.kind {
|
||||||
LoadedMacroKind::Def(mut def) => {
|
LoadedMacroKind::Def(mut def) => {
|
||||||
if def.use_locally {
|
if def.use_locally {
|
||||||
self.macro_names.insert(def.ident.name);
|
self.macro_names.insert(def.ident.name);
|
||||||
|
def.body = mark_tts(&def.body, mark);
|
||||||
let ext = macro_rules::compile(&self.session.parse_sess, &def);
|
let ext = macro_rules::compile(&self.session.parse_sess, &def);
|
||||||
import_macro(self, def.ident.name, ext, loaded_macro.import_site);
|
import_macro(self, def.ident.name, ext, loaded_macro.import_site);
|
||||||
}
|
}
|
||||||
|
@ -249,6 +255,17 @@ impl<'b> Resolver<'b> {
|
||||||
});
|
});
|
||||||
self.define(parent, name, TypeNS, (module, sp, vis));
|
self.define(parent, name, TypeNS, (module, sp, vis));
|
||||||
|
|
||||||
|
if let Some(mark) = opt_mark {
|
||||||
|
let invocation = self.arenas.alloc_invocation_data(InvocationData {
|
||||||
|
module: Cell::new(module),
|
||||||
|
def_index: CRATE_DEF_INDEX,
|
||||||
|
const_integer: false,
|
||||||
|
legacy_scope: Cell::new(LegacyScope::Empty),
|
||||||
|
expansion: Cell::new(LegacyScope::Empty),
|
||||||
|
});
|
||||||
|
self.invocations.insert(mark, invocation);
|
||||||
|
}
|
||||||
|
|
||||||
self.populate_module_if_necessary(module);
|
self.populate_module_if_necessary(module);
|
||||||
} else if custom_derive_crate {
|
} else if custom_derive_crate {
|
||||||
// Define an empty module
|
// Define an empty module
|
||||||
|
|
|
@ -53,7 +53,7 @@ use rustc::ty;
|
||||||
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
|
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
|
||||||
use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet};
|
use rustc::util::nodemap::{NodeMap, NodeSet, FnvHashMap, FnvHashSet};
|
||||||
|
|
||||||
use syntax::ext::hygiene::Mark;
|
use syntax::ext::hygiene::{Mark, SyntaxContext};
|
||||||
use syntax::ast::{self, FloatTy};
|
use syntax::ast::{self, FloatTy};
|
||||||
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, IntTy, UintTy};
|
use syntax::ast::{CRATE_NODE_ID, Name, NodeId, Ident, IntTy, UintTy};
|
||||||
use syntax::ext::base::SyntaxExtension;
|
use syntax::ext::base::SyntaxExtension;
|
||||||
|
@ -1579,6 +1579,17 @@ impl<'a> Resolver<'a> {
|
||||||
/// grammar: (SELF MOD_SEP ) ? (SUPER MOD_SEP) *
|
/// grammar: (SELF MOD_SEP ) ? (SUPER MOD_SEP) *
|
||||||
fn resolve_module_prefix(&mut self, module_path: &[Ident], span: Option<Span>)
|
fn resolve_module_prefix(&mut self, module_path: &[Ident], span: Option<Span>)
|
||||||
-> ResolveResult<ModulePrefixResult<'a>> {
|
-> ResolveResult<ModulePrefixResult<'a>> {
|
||||||
|
if &*module_path[0].name.as_str() == "$crate" {
|
||||||
|
let mut ctxt = module_path[0].ctxt;
|
||||||
|
while ctxt.source().0 != SyntaxContext::empty() {
|
||||||
|
ctxt = ctxt.source().0;
|
||||||
|
}
|
||||||
|
let module = self.invocations[&ctxt.source().1].module.get();
|
||||||
|
let crate_root =
|
||||||
|
if module.def_id().unwrap().is_local() { self.graph_root } else { module };
|
||||||
|
return Success(PrefixFound(crate_root, 1))
|
||||||
|
}
|
||||||
|
|
||||||
// Start at the current module if we see `self` or `super`, or at the
|
// Start at the current module if we see `self` or `super`, or at the
|
||||||
// top of the crate otherwise.
|
// top of the crate otherwise.
|
||||||
let mut i = match &*module_path[0].name.as_str() {
|
let mut i = match &*module_path[0].name.as_str() {
|
||||||
|
|
|
@ -29,10 +29,10 @@ use syntax_pos::Span;
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct InvocationData<'a> {
|
pub struct InvocationData<'a> {
|
||||||
pub module: Cell<Module<'a>>,
|
pub module: Cell<Module<'a>>,
|
||||||
def_index: DefIndex,
|
pub def_index: DefIndex,
|
||||||
// True if this expansion is in a `const_integer` position, for example `[u32; m!()]`.
|
// True if this expansion is in a `const_integer` position, for example `[u32; m!()]`.
|
||||||
// c.f. `DefCollector::visit_ast_const_integer`.
|
// c.f. `DefCollector::visit_ast_const_integer`.
|
||||||
const_integer: bool,
|
pub const_integer: bool,
|
||||||
// The scope in which the invocation path is resolved.
|
// The scope in which the invocation path is resolved.
|
||||||
pub legacy_scope: Cell<LegacyScope<'a>>,
|
pub legacy_scope: Cell<LegacyScope<'a>>,
|
||||||
// The smallest scope that includes this invocation's expansion,
|
// The smallest scope that includes this invocation's expansion,
|
||||||
|
|
|
@ -295,7 +295,9 @@ impl<'a> Classifier<'a> {
|
||||||
"Option" | "Result" => Class::PreludeTy,
|
"Option" | "Result" => Class::PreludeTy,
|
||||||
"Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
|
"Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
|
||||||
|
|
||||||
|
"$crate" => Class::KeyWord,
|
||||||
_ if tas.tok.is_any_keyword() => Class::KeyWord,
|
_ if tas.tok.is_any_keyword() => Class::KeyWord,
|
||||||
|
|
||||||
_ => {
|
_ => {
|
||||||
if self.in_macro_nonterminal {
|
if self.in_macro_nonterminal {
|
||||||
self.in_macro_nonterminal = false;
|
self.in_macro_nonterminal = false;
|
||||||
|
@ -310,9 +312,6 @@ impl<'a> Classifier<'a> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Special macro vars are like keywords.
|
|
||||||
token::SpecialVarNt(_) => Class::KeyWord,
|
|
||||||
|
|
||||||
token::Lifetime(..) => Class::Lifetime,
|
token::Lifetime(..) => Class::Lifetime,
|
||||||
|
|
||||||
token::Underscore | token::Eof | token::Interpolated(..) |
|
token::Underscore | token::Eof | token::Interpolated(..) |
|
||||||
|
|
|
@ -939,6 +939,6 @@ impl Folder for Marker {
|
||||||
}
|
}
|
||||||
|
|
||||||
// apply a given mark to the given token trees. Used prior to expansion of a macro.
|
// apply a given mark to the given token trees. Used prior to expansion of a macro.
|
||||||
fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec<TokenTree> {
|
pub fn mark_tts(tts: &[TokenTree], m: Mark) -> Vec<TokenTree> {
|
||||||
noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
|
noop_fold_tts(tts, &mut Marker{mark:m, expn_id: None})
|
||||||
}
|
}
|
||||||
|
|
|
@ -58,7 +58,6 @@ impl<'a> ParserAnyMacro<'a> {
|
||||||
|
|
||||||
struct MacroRulesMacroExpander {
|
struct MacroRulesMacroExpander {
|
||||||
name: ast::Ident,
|
name: ast::Ident,
|
||||||
imported_from: Option<ast::Ident>,
|
|
||||||
lhses: Vec<TokenTree>,
|
lhses: Vec<TokenTree>,
|
||||||
rhses: Vec<TokenTree>,
|
rhses: Vec<TokenTree>,
|
||||||
valid: bool,
|
valid: bool,
|
||||||
|
@ -76,7 +75,6 @@ impl TTMacroExpander for MacroRulesMacroExpander {
|
||||||
generic_extension(cx,
|
generic_extension(cx,
|
||||||
sp,
|
sp,
|
||||||
self.name,
|
self.name,
|
||||||
self.imported_from,
|
|
||||||
arg,
|
arg,
|
||||||
&self.lhses,
|
&self.lhses,
|
||||||
&self.rhses)
|
&self.rhses)
|
||||||
|
@ -87,7 +85,6 @@ impl TTMacroExpander for MacroRulesMacroExpander {
|
||||||
fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
name: ast::Ident,
|
name: ast::Ident,
|
||||||
imported_from: Option<ast::Ident>,
|
|
||||||
arg: &[TokenTree],
|
arg: &[TokenTree],
|
||||||
lhses: &[TokenTree],
|
lhses: &[TokenTree],
|
||||||
rhses: &[TokenTree])
|
rhses: &[TokenTree])
|
||||||
|
@ -116,10 +113,8 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
|
||||||
_ => cx.span_bug(sp, "malformed macro rhs"),
|
_ => cx.span_bug(sp, "malformed macro rhs"),
|
||||||
};
|
};
|
||||||
// rhs has holes ( `$id` and `$(...)` that need filled)
|
// rhs has holes ( `$id` and `$(...)` that need filled)
|
||||||
let trncbr = new_tt_reader(&cx.parse_sess.span_diagnostic,
|
let trncbr =
|
||||||
Some(named_matches),
|
new_tt_reader(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
|
||||||
imported_from,
|
|
||||||
rhs);
|
|
||||||
let mut p = Parser::new(cx.parse_sess(), cx.cfg().clone(), Box::new(trncbr));
|
let mut p = Parser::new(cx.parse_sess(), cx.cfg().clone(), Box::new(trncbr));
|
||||||
p.directory = cx.current_expansion.module.directory.clone();
|
p.directory = cx.current_expansion.module.directory.clone();
|
||||||
p.restrictions = match cx.current_expansion.no_noninline_mod {
|
p.restrictions = match cx.current_expansion.no_noninline_mod {
|
||||||
|
@ -223,7 +218,7 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||||
];
|
];
|
||||||
|
|
||||||
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
// Parse the macro_rules! invocation (`none` is for no interpolations):
|
||||||
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, None, def.body.clone());
|
let arg_reader = new_tt_reader(&sess.span_diagnostic, None, def.body.clone());
|
||||||
|
|
||||||
let argument_map = match parse(sess, &Vec::new(), arg_reader, &argument_gram) {
|
let argument_map = match parse(sess, &Vec::new(), arg_reader, &argument_gram) {
|
||||||
Success(m) => m,
|
Success(m) => m,
|
||||||
|
@ -269,7 +264,6 @@ pub fn compile(sess: &ParseSess, def: &ast::MacroDef) -> SyntaxExtension {
|
||||||
|
|
||||||
let exp: Box<_> = Box::new(MacroRulesMacroExpander {
|
let exp: Box<_> = Box::new(MacroRulesMacroExpander {
|
||||||
name: def.ident,
|
name: def.ident,
|
||||||
imported_from: def.imported_from,
|
|
||||||
lhses: lhses,
|
lhses: lhses,
|
||||||
rhses: rhses,
|
rhses: rhses,
|
||||||
valid: valid,
|
valid: valid,
|
||||||
|
|
|
@ -14,7 +14,7 @@ use syntax_pos::{Span, DUMMY_SP};
|
||||||
use errors::{Handler, DiagnosticBuilder};
|
use errors::{Handler, DiagnosticBuilder};
|
||||||
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
|
||||||
use parse::token::{DocComment, MatchNt, SubstNt};
|
use parse::token::{DocComment, MatchNt, SubstNt};
|
||||||
use parse::token::{Token, Interpolated, NtIdent, NtTT, SpecialMacroVar};
|
use parse::token::{Token, Interpolated, NtIdent, NtTT};
|
||||||
use parse::token;
|
use parse::token;
|
||||||
use parse::lexer::TokenAndSpan;
|
use parse::lexer::TokenAndSpan;
|
||||||
use tokenstream::{self, TokenTree};
|
use tokenstream::{self, TokenTree};
|
||||||
|
@ -39,10 +39,7 @@ pub struct TtReader<'a> {
|
||||||
stack: Vec<TtFrame>,
|
stack: Vec<TtFrame>,
|
||||||
/* for MBE-style macro transcription */
|
/* for MBE-style macro transcription */
|
||||||
interpolations: HashMap<Ident, Rc<NamedMatch>>,
|
interpolations: HashMap<Ident, Rc<NamedMatch>>,
|
||||||
imported_from: Option<Ident>,
|
|
||||||
|
|
||||||
// Some => return imported_from as the next token
|
|
||||||
crate_name_next: Option<Span>,
|
|
||||||
repeat_idx: Vec<usize>,
|
repeat_idx: Vec<usize>,
|
||||||
repeat_len: Vec<usize>,
|
repeat_len: Vec<usize>,
|
||||||
/* cached: */
|
/* cached: */
|
||||||
|
@ -59,10 +56,9 @@ pub struct TtReader<'a> {
|
||||||
/// (and should) be None.
|
/// (and should) be None.
|
||||||
pub fn new_tt_reader(sp_diag: &Handler,
|
pub fn new_tt_reader(sp_diag: &Handler,
|
||||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||||
imported_from: Option<Ident>,
|
|
||||||
src: Vec<tokenstream::TokenTree>)
|
src: Vec<tokenstream::TokenTree>)
|
||||||
-> TtReader {
|
-> TtReader {
|
||||||
new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
|
new_tt_reader_with_doc_flag(sp_diag, interp, src, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// The extra `desugar_doc_comments` flag enables reading doc comments
|
/// The extra `desugar_doc_comments` flag enables reading doc comments
|
||||||
|
@ -73,7 +69,6 @@ pub fn new_tt_reader(sp_diag: &Handler,
|
||||||
/// (and should) be None.
|
/// (and should) be None.
|
||||||
pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||||
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
|
||||||
imported_from: Option<Ident>,
|
|
||||||
src: Vec<tokenstream::TokenTree>,
|
src: Vec<tokenstream::TokenTree>,
|
||||||
desugar_doc_comments: bool)
|
desugar_doc_comments: bool)
|
||||||
-> TtReader {
|
-> TtReader {
|
||||||
|
@ -93,8 +88,6 @@ pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
|
||||||
None => HashMap::new(),
|
None => HashMap::new(),
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
},
|
},
|
||||||
imported_from: imported_from,
|
|
||||||
crate_name_next: None,
|
|
||||||
repeat_idx: Vec::new(),
|
repeat_idx: Vec::new(),
|
||||||
repeat_len: Vec::new(),
|
repeat_len: Vec::new(),
|
||||||
desugar_doc_comments: desugar_doc_comments,
|
desugar_doc_comments: desugar_doc_comments,
|
||||||
|
@ -189,14 +182,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
sp: r.cur_span.clone(),
|
sp: r.cur_span.clone(),
|
||||||
};
|
};
|
||||||
loop {
|
loop {
|
||||||
match r.crate_name_next.take() {
|
|
||||||
None => (),
|
|
||||||
Some(sp) => {
|
|
||||||
r.cur_span = sp;
|
|
||||||
r.cur_tok = token::Ident(r.imported_from.unwrap());
|
|
||||||
return ret_val;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
let should_pop = match r.stack.last() {
|
let should_pop = match r.stack.last() {
|
||||||
None => {
|
None => {
|
||||||
assert_eq!(ret_val.tok, token::Eof);
|
assert_eq!(ret_val.tok, token::Eof);
|
||||||
|
@ -346,18 +331,6 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
|
||||||
sep: None
|
sep: None
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
|
|
||||||
r.stack.last_mut().unwrap().idx += 1;
|
|
||||||
|
|
||||||
if r.imported_from.is_some() {
|
|
||||||
r.cur_span = sp;
|
|
||||||
r.cur_tok = token::ModSep;
|
|
||||||
r.crate_name_next = Some(sp);
|
|
||||||
return ret_val;
|
|
||||||
}
|
|
||||||
|
|
||||||
// otherwise emit nothing and proceed to the next token
|
|
||||||
}
|
|
||||||
TokenTree::Token(sp, tok) => {
|
TokenTree::Token(sp, tok) => {
|
||||||
r.cur_span = sp;
|
r.cur_span = sp;
|
||||||
r.cur_tok = tok;
|
r.cur_tok = tok;
|
||||||
|
|
|
@ -276,7 +276,7 @@ pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
|
||||||
pub fn tts_to_parser<'a>(sess: &'a ParseSess,
|
pub fn tts_to_parser<'a>(sess: &'a ParseSess,
|
||||||
tts: Vec<tokenstream::TokenTree>,
|
tts: Vec<tokenstream::TokenTree>,
|
||||||
cfg: ast::CrateConfig) -> Parser<'a> {
|
cfg: ast::CrateConfig) -> Parser<'a> {
|
||||||
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, None, tts);
|
let trdr = lexer::new_tt_reader(&sess.span_diagnostic, None, tts);
|
||||||
let mut p = Parser::new(sess, cfg, Box::new(trdr));
|
let mut p = Parser::new(sess, cfg, Box::new(trdr));
|
||||||
p.check_unknown_macro_variable();
|
p.check_unknown_macro_variable();
|
||||||
p
|
p
|
||||||
|
|
|
@ -48,8 +48,7 @@ use parse::classify;
|
||||||
use parse::common::SeqSep;
|
use parse::common::SeqSep;
|
||||||
use parse::lexer::{Reader, TokenAndSpan};
|
use parse::lexer::{Reader, TokenAndSpan};
|
||||||
use parse::obsolete::ObsoleteSyntax;
|
use parse::obsolete::ObsoleteSyntax;
|
||||||
use parse::token::{self, intern, MatchNt, SubstNt, SpecialVarNt, InternedString};
|
use parse::token::{self, intern, keywords, MatchNt, SubstNt, InternedString};
|
||||||
use parse::token::{keywords, SpecialMacroVar};
|
|
||||||
use parse::{new_sub_parser_from_file, ParseSess};
|
use parse::{new_sub_parser_from_file, ParseSess};
|
||||||
use util::parser::{AssocOp, Fixity};
|
use util::parser::{AssocOp, Fixity};
|
||||||
use print::pprust;
|
use print::pprust;
|
||||||
|
@ -2653,8 +2652,12 @@ impl<'a> Parser<'a> {
|
||||||
num_captures: name_num
|
num_captures: name_num
|
||||||
})));
|
})));
|
||||||
} else if self.token.is_keyword(keywords::Crate) {
|
} else if self.token.is_keyword(keywords::Crate) {
|
||||||
|
let ident = match self.token {
|
||||||
|
token::Ident(id) => ast::Ident { name: token::intern("$crate"), ..id },
|
||||||
|
_ => unreachable!(),
|
||||||
|
};
|
||||||
self.bump();
|
self.bump();
|
||||||
return Ok(TokenTree::Token(sp, SpecialVarNt(SpecialMacroVar::CrateMacroVar)));
|
return Ok(TokenTree::Token(sp, token::Ident(ident)));
|
||||||
} else {
|
} else {
|
||||||
sp = mk_sp(sp.lo, self.span.hi);
|
sp = mk_sp(sp.lo, self.span.hi);
|
||||||
self.parse_ident().unwrap_or_else(|mut e| {
|
self.parse_ident().unwrap_or_else(|mut e| {
|
||||||
|
|
|
@ -52,21 +52,6 @@ pub enum DelimToken {
|
||||||
NoDelim,
|
NoDelim,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
|
||||||
pub enum SpecialMacroVar {
|
|
||||||
/// `$crate` will be filled in with the name of the crate a macro was
|
|
||||||
/// imported from, if any.
|
|
||||||
CrateMacroVar,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl SpecialMacroVar {
|
|
||||||
pub fn as_str(self) -> &'static str {
|
|
||||||
match self {
|
|
||||||
SpecialMacroVar::CrateMacroVar => "crate",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
|
||||||
pub enum Lit {
|
pub enum Lit {
|
||||||
Byte(ast::Name),
|
Byte(ast::Name),
|
||||||
|
@ -148,8 +133,6 @@ pub enum Token {
|
||||||
// In right-hand-sides of MBE macros:
|
// In right-hand-sides of MBE macros:
|
||||||
/// A syntactic variable that will be filled in by macro expansion.
|
/// A syntactic variable that will be filled in by macro expansion.
|
||||||
SubstNt(ast::Ident),
|
SubstNt(ast::Ident),
|
||||||
/// A macro variable with special meaning.
|
|
||||||
SpecialVarNt(SpecialMacroVar),
|
|
||||||
|
|
||||||
// Junk. These carry no data because we don't really care about the data
|
// Junk. These carry no data because we don't really care about the data
|
||||||
// they *would* carry, and don't really want to allocate a new ident for
|
// they *would* carry, and don't really want to allocate a new ident for
|
||||||
|
|
|
@ -285,8 +285,6 @@ pub fn token_to_string(tok: &Token) -> String {
|
||||||
token::Comment => "/* */".to_string(),
|
token::Comment => "/* */".to_string(),
|
||||||
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
token::Shebang(s) => format!("/* shebang: {}*/", s),
|
||||||
|
|
||||||
token::SpecialVarNt(var) => format!("${}", var.as_str()),
|
|
||||||
|
|
||||||
token::Interpolated(ref nt) => match *nt {
|
token::Interpolated(ref nt) => match *nt {
|
||||||
token::NtExpr(ref e) => expr_to_string(&e),
|
token::NtExpr(ref e) => expr_to_string(&e),
|
||||||
token::NtMeta(ref e) => meta_item_to_string(&e),
|
token::NtMeta(ref e) => meta_item_to_string(&e),
|
||||||
|
|
|
@ -134,7 +134,6 @@ impl TokenTree {
|
||||||
AttrStyle::Inner => 3,
|
AttrStyle::Inner => 3,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
TokenTree::Token(_, token::SpecialVarNt(..)) => 2,
|
|
||||||
TokenTree::Token(_, token::MatchNt(..)) => 3,
|
TokenTree::Token(_, token::MatchNt(..)) => 3,
|
||||||
TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(..))) => 1,
|
TokenTree::Token(_, token::Interpolated(Nonterminal::NtTT(..))) => 1,
|
||||||
TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
|
TokenTree::Delimited(_, ref delimed) => delimed.tts.len() + 2,
|
||||||
|
@ -188,11 +187,6 @@ impl TokenTree {
|
||||||
}
|
}
|
||||||
delimed.tts[index - 1].clone()
|
delimed.tts[index - 1].clone()
|
||||||
}
|
}
|
||||||
(&TokenTree::Token(sp, token::SpecialVarNt(var)), _) => {
|
|
||||||
let v = [TokenTree::Token(sp, token::Dollar),
|
|
||||||
TokenTree::Token(sp, token::Ident(token::str_to_ident(var.as_str())))];
|
|
||||||
v[index].clone()
|
|
||||||
}
|
|
||||||
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
|
(&TokenTree::Token(sp, token::MatchNt(name, kind)), _) => {
|
||||||
let v = [TokenTree::Token(sp, token::SubstNt(name)),
|
let v = [TokenTree::Token(sp, token::SubstNt(name)),
|
||||||
TokenTree::Token(sp, token::Colon),
|
TokenTree::Token(sp, token::Colon),
|
||||||
|
@ -223,7 +217,6 @@ impl TokenTree {
|
||||||
-> macro_parser::NamedParseResult {
|
-> macro_parser::NamedParseResult {
|
||||||
// `None` is because we're not interpolating
|
// `None` is because we're not interpolating
|
||||||
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
|
let arg_rdr = lexer::new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
|
||||||
None,
|
|
||||||
None,
|
None,
|
||||||
tts.iter().cloned().collect(),
|
tts.iter().cloned().collect(),
|
||||||
true);
|
true);
|
||||||
|
|
|
@ -38,7 +38,7 @@ pub fn bar() {
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
((::std::fmt::format as
|
(($crate::fmt::format as
|
||||||
fn(std::fmt::Arguments<'_>) -> std::string::String {std::fmt::format})(((::std::fmt::Arguments::new_v1
|
fn(std::fmt::Arguments<'_>) -> std::string::String {std::fmt::format})(((::std::fmt::Arguments::new_v1
|
||||||
as
|
as
|
||||||
fn(&[&str], &[std::fmt::ArgumentV1<'_>]) -> std::fmt::Arguments<'_> {std::fmt::Arguments<'_>::new_v1})(({
|
fn(&[&str], &[std::fmt::ArgumentV1<'_>]) -> std::fmt::Arguments<'_> {std::fmt::Arguments<'_>::new_v1})(({
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue