use TokenStream rather than &[TokenTree] for built-in macros
That way, we don't loose the jointness info
This commit is contained in:
parent
b3146549ab
commit
fa893a3225
20 changed files with 109 additions and 113 deletions
|
@ -57,12 +57,12 @@ extern crate rustc;
|
||||||
extern crate rustc_driver;
|
extern crate rustc_driver;
|
||||||
|
|
||||||
use syntax::parse::token::{self, Token};
|
use syntax::parse::token::{self, Token};
|
||||||
use syntax::tokenstream::TokenTree;
|
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use rustc_driver::plugin::Registry;
|
use rustc_driver::plugin::Registry;
|
||||||
|
|
||||||
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: TokenStream)
|
||||||
-> Box<dyn MacResult + 'static> {
|
-> Box<dyn MacResult + 'static> {
|
||||||
|
|
||||||
static NUMERALS: &'static [(&'static str, usize)] = &[
|
static NUMERALS: &'static [(&'static str, usize)] = &[
|
||||||
|
@ -78,7 +78,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
}
|
}
|
||||||
|
|
||||||
let text = match args[0] {
|
let text = match args.into_trees().next().unwrap() {
|
||||||
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
|
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "argument should be a single identifier");
|
cx.span_err(sp, "argument should be a single identifier");
|
||||||
|
|
|
@ -1354,7 +1354,7 @@ impl EncodeContext<'tcx> {
|
||||||
let def_id = self.tcx.hir().local_def_id(macro_def.hir_id);
|
let def_id = self.tcx.hir().local_def_id(macro_def.hir_id);
|
||||||
Entry {
|
Entry {
|
||||||
kind: EntryKind::MacroDef(self.lazy(MacroDef {
|
kind: EntryKind::MacroDef(self.lazy(MacroDef {
|
||||||
body: pprust::tokens_to_string(macro_def.body.clone()),
|
body: pprust::tts_to_string(macro_def.body.clone()),
|
||||||
legacy: macro_def.legacy,
|
legacy: macro_def.legacy,
|
||||||
})),
|
})),
|
||||||
visibility: self.lazy(ty::Visibility::Public),
|
visibility: self.lazy(ty::Visibility::Public),
|
||||||
|
|
|
@ -6,7 +6,7 @@ use crate::ext::base::{ExtCtxt, MacEager, MacResult};
|
||||||
use crate::parse::token::{self, Token};
|
use crate::parse::token::{self, Token};
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::symbol::kw;
|
use crate::symbol::kw;
|
||||||
use crate::tokenstream::{TokenTree};
|
use crate::tokenstream::{TokenTree, TokenStream};
|
||||||
|
|
||||||
use smallvec::smallvec;
|
use smallvec::smallvec;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
@ -27,12 +27,11 @@ pub type ErrorMap = BTreeMap<Name, ErrorInfo>;
|
||||||
|
|
||||||
pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||||
span: Span,
|
span: Span,
|
||||||
token_tree: &[TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn MacResult+'cx> {
|
-> Box<dyn MacResult+'cx> {
|
||||||
let code = match token_tree {
|
assert_eq!(tts.len(), 1);
|
||||||
[
|
let code = match tts.into_trees().next() {
|
||||||
TokenTree::Token(Token { kind: token::Ident(code, _), .. })
|
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
|
||||||
] => code,
|
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -62,20 +61,21 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||||
|
|
||||||
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||||
span: Span,
|
span: Span,
|
||||||
token_tree: &[TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn MacResult+'cx> {
|
-> Box<dyn MacResult+'cx> {
|
||||||
let (code, description) = match token_tree {
|
assert!(tts.len() == 1 || tts.len() == 3);
|
||||||
[
|
let mut cursor = tts.into_trees();
|
||||||
TokenTree::Token(Token { kind: token::Ident(code, _), .. })
|
let code = match cursor.next() {
|
||||||
] => {
|
Some(TokenTree::Token(Token { kind: token::Ident(code, _), .. })) => code,
|
||||||
(*code, None)
|
_ => unreachable!()
|
||||||
},
|
};
|
||||||
[
|
let description = match (cursor.next(), cursor.next()) {
|
||||||
TokenTree::Token(Token { kind: token::Ident(code, _), .. }),
|
(None, None) => None,
|
||||||
TokenTree::Token(Token { kind: token::Comma, .. }),
|
(
|
||||||
TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..})
|
Some(TokenTree::Token(Token { kind: token::Comma, .. })),
|
||||||
] => {
|
Some(TokenTree::Token(Token { kind: token::Literal(token::Lit { symbol, .. }), ..}))
|
||||||
(*code, Some(*symbol))
|
) => {
|
||||||
|
Some(symbol)
|
||||||
},
|
},
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
};
|
};
|
||||||
|
@ -121,12 +121,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||||
|
|
||||||
pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt<'_>,
|
||||||
span: Span,
|
span: Span,
|
||||||
token_tree: &[TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn MacResult+'cx> {
|
-> Box<dyn MacResult+'cx> {
|
||||||
assert_eq!(token_tree.len(), 3);
|
assert_eq!(tts.len(), 3);
|
||||||
let ident = match &token_tree[2] {
|
let ident = match tts.into_trees().nth(2) {
|
||||||
// DIAGNOSTICS ident.
|
// DIAGNOSTICS ident.
|
||||||
&TokenTree::Token(Token { kind: token::Ident(name, _), span })
|
Some(TokenTree::Token(Token { kind: token::Ident(name, _), span }))
|
||||||
=> Ident::new(name, span),
|
=> Ident::new(name, span),
|
||||||
_ => unreachable!()
|
_ => unreachable!()
|
||||||
};
|
};
|
||||||
|
|
|
@ -10,7 +10,7 @@ use crate::parse::token;
|
||||||
use crate::ptr::P;
|
use crate::ptr::P;
|
||||||
use crate::symbol::{kw, sym, Ident, Symbol};
|
use crate::symbol::{kw, sym, Ident, Symbol};
|
||||||
use crate::{ThinVec, MACRO_ARGUMENTS};
|
use crate::{ThinVec, MACRO_ARGUMENTS};
|
||||||
use crate::tokenstream::{self, TokenStream, TokenTree};
|
use crate::tokenstream::{self, TokenStream};
|
||||||
use crate::visit::Visitor;
|
use crate::visit::Visitor;
|
||||||
|
|
||||||
use errors::{DiagnosticBuilder, DiagnosticId};
|
use errors::{DiagnosticBuilder, DiagnosticId};
|
||||||
|
@ -235,18 +235,18 @@ pub trait TTMacroExpander {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type MacroExpanderFn =
|
pub type MacroExpanderFn =
|
||||||
for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
|
for<'cx> fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
|
||||||
-> Box<dyn MacResult+'cx>;
|
-> Box<dyn MacResult+'cx>;
|
||||||
|
|
||||||
impl<F> TTMacroExpander for F
|
impl<F> TTMacroExpander for F
|
||||||
where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, &[tokenstream::TokenTree])
|
where F: for<'cx> Fn(&'cx mut ExtCtxt<'_>, Span, TokenStream)
|
||||||
-> Box<dyn MacResult+'cx>
|
-> Box<dyn MacResult+'cx>
|
||||||
{
|
{
|
||||||
fn expand<'cx>(
|
fn expand<'cx>(
|
||||||
&self,
|
&self,
|
||||||
ecx: &'cx mut ExtCtxt<'_>,
|
ecx: &'cx mut ExtCtxt<'_>,
|
||||||
span: Span,
|
span: Span,
|
||||||
input: TokenStream,
|
mut input: TokenStream,
|
||||||
) -> Box<dyn MacResult+'cx> {
|
) -> Box<dyn MacResult+'cx> {
|
||||||
struct AvoidInterpolatedIdents;
|
struct AvoidInterpolatedIdents;
|
||||||
|
|
||||||
|
@ -268,10 +268,8 @@ impl<F> TTMacroExpander for F
|
||||||
mut_visit::noop_visit_mac(mac, self)
|
mut_visit::noop_visit_mac(mac, self)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
AvoidInterpolatedIdents.visit_tts(&mut input);
|
||||||
let input: Vec<_> =
|
(*self)(ecx, span, input)
|
||||||
input.trees().map(|mut tt| { AvoidInterpolatedIdents.visit_tt(&mut tt); tt }).collect();
|
|
||||||
(*self)(ecx, span, &input)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -677,7 +675,7 @@ impl SyntaxExtension {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn dummy_bang(edition: Edition) -> SyntaxExtension {
|
pub fn dummy_bang(edition: Edition) -> SyntaxExtension {
|
||||||
fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: &[TokenTree])
|
fn expander<'cx>(_: &'cx mut ExtCtxt<'_>, span: Span, _: TokenStream)
|
||||||
-> Box<dyn MacResult + 'cx> {
|
-> Box<dyn MacResult + 'cx> {
|
||||||
DummyResult::any(span)
|
DummyResult::any(span)
|
||||||
}
|
}
|
||||||
|
@ -811,9 +809,8 @@ impl<'a> ExtCtxt<'a> {
|
||||||
pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
|
pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
|
||||||
expand::MacroExpander::new(self, true)
|
expand::MacroExpander::new(self, true)
|
||||||
}
|
}
|
||||||
|
pub fn new_parser_from_tts(&self, stream: TokenStream) -> parser::Parser<'a> {
|
||||||
pub fn new_parser_from_tts(&self, tts: &[tokenstream::TokenTree]) -> parser::Parser<'a> {
|
parse::stream_to_parser(self.parse_sess, stream, MACRO_ARGUMENTS)
|
||||||
parse::stream_to_parser(self.parse_sess, tts.iter().cloned().collect(), MACRO_ARGUMENTS)
|
|
||||||
}
|
}
|
||||||
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
pub fn source_map(&self) -> &'a SourceMap { self.parse_sess.source_map() }
|
||||||
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
pub fn parse_sess(&self) -> &'a parse::ParseSess { self.parse_sess }
|
||||||
|
@ -1019,7 +1016,7 @@ pub fn expr_to_string(cx: &mut ExtCtxt<'_>, expr: P<ast::Expr>, err_msg: &str)
|
||||||
/// done as rarely as possible).
|
/// done as rarely as possible).
|
||||||
pub fn check_zero_tts(cx: &ExtCtxt<'_>,
|
pub fn check_zero_tts(cx: &ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
name: &str) {
|
name: &str) {
|
||||||
if !tts.is_empty() {
|
if !tts.is_empty() {
|
||||||
cx.span_err(sp, &format!("{} takes no arguments", name));
|
cx.span_err(sp, &format!("{} takes no arguments", name));
|
||||||
|
@ -1030,7 +1027,7 @@ pub fn check_zero_tts(cx: &ExtCtxt<'_>,
|
||||||
/// expect exactly one string literal, or emit an error and return `None`.
|
/// expect exactly one string literal, or emit an error and return `None`.
|
||||||
pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
|
pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
name: &str)
|
name: &str)
|
||||||
-> Option<String> {
|
-> Option<String> {
|
||||||
let mut p = cx.new_parser_from_tts(tts);
|
let mut p = cx.new_parser_from_tts(tts);
|
||||||
|
@ -1053,7 +1050,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
|
||||||
/// parsing error, emit a non-fatal error and return `None`.
|
/// parsing error, emit a non-fatal error and return `None`.
|
||||||
pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
|
pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
|
tts: TokenStream) -> Option<Vec<P<ast::Expr>>> {
|
||||||
let mut p = cx.new_parser_from_tts(tts);
|
let mut p = cx.new_parser_from_tts(tts);
|
||||||
let mut es = Vec::new();
|
let mut es = Vec::new();
|
||||||
while p.token != token::Eof {
|
while p.token != token::Eof {
|
||||||
|
|
|
@ -701,7 +701,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
|
||||||
path: &Path,
|
path: &Path,
|
||||||
span: Span,
|
span: Span,
|
||||||
) -> AstFragment {
|
) -> AstFragment {
|
||||||
let mut parser = self.cx.new_parser_from_tts(&toks.into_trees().collect::<Vec<_>>());
|
let mut parser = self.cx.new_parser_from_tts(toks);
|
||||||
match parser.parse_ast_fragment(kind, false) {
|
match parser.parse_ast_fragment(kind, false) {
|
||||||
Ok(fragment) => {
|
Ok(fragment) => {
|
||||||
parser.ensure_complete_parse(path, kind.name(), span);
|
parser.ensure_complete_parse(path, kind.name(), span);
|
||||||
|
|
|
@ -356,11 +356,7 @@ pub fn tt_to_string(tt: tokenstream::TokenTree) -> String {
|
||||||
to_string(|s| s.print_tt(tt, false))
|
to_string(|s| s.print_tt(tt, false))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tts_to_string(tts: &[tokenstream::TokenTree]) -> String {
|
pub fn tts_to_string(tokens: TokenStream) -> String {
|
||||||
tokens_to_string(tts.iter().cloned().collect())
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn tokens_to_string(tokens: TokenStream) -> String {
|
|
||||||
to_string(|s| s.print_tts(tokens, false))
|
to_string(|s| s.print_tts(tokens, false))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -506,7 +506,7 @@ impl Cursor {
|
||||||
|
|
||||||
impl fmt::Display for TokenStream {
|
impl fmt::Display for TokenStream {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
f.write_str(&pprust::tokens_to_string(self.clone()))
|
f.write_str(&pprust::tts_to_string(self.clone()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,13 +8,12 @@ use errors::DiagnosticBuilder;
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ext::base::{self, *};
|
use syntax::ext::base::{self, *};
|
||||||
use syntax::parse;
|
|
||||||
use syntax::parse::token::{self, Token};
|
use syntax::parse::token::{self, Token};
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::{kw, sym, Symbol};
|
use syntax::symbol::{kw, sym, Symbol};
|
||||||
use syntax::ast::AsmDialect;
|
use syntax::ast::AsmDialect;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::{self, TokenStream};
|
||||||
use syntax::{span_err, struct_span_err};
|
use syntax::{span_err, struct_span_err};
|
||||||
|
|
||||||
enum State {
|
enum State {
|
||||||
|
@ -43,7 +42,7 @@ const OPTIONS: &[Symbol] = &[sym::volatile, sym::alignstack, sym::intel];
|
||||||
|
|
||||||
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult + 'cx> {
|
-> Box<dyn base::MacResult + 'cx> {
|
||||||
let mut inline_asm = match parse_inline_asm(cx, sp, tts) {
|
let mut inline_asm = match parse_inline_asm(cx, sp, tts) {
|
||||||
Ok(Some(inline_asm)) => inline_asm,
|
Ok(Some(inline_asm)) => inline_asm,
|
||||||
|
@ -71,20 +70,20 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
fn parse_inline_asm<'a>(
|
fn parse_inline_asm<'a>(
|
||||||
cx: &mut ExtCtxt<'a>,
|
cx: &mut ExtCtxt<'a>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
) -> Result<Option<ast::InlineAsm>, DiagnosticBuilder<'a>> {
|
) -> Result<Option<ast::InlineAsm>, DiagnosticBuilder<'a>> {
|
||||||
// Split the tts before the first colon, to avoid `asm!("x": y)` being
|
// Split the tts before the first colon, to avoid `asm!("x": y)` being
|
||||||
// parsed as `asm!(z)` with `z = "x": y` which is type ascription.
|
// parsed as `asm!(z)` with `z = "x": y` which is type ascription.
|
||||||
let first_colon = tts.iter()
|
let first_colon = tts.trees()
|
||||||
.position(|tt| {
|
.position(|tt| {
|
||||||
match *tt {
|
match tt {
|
||||||
tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
|
tokenstream::TokenTree::Token(Token { kind: token::Colon, .. }) |
|
||||||
tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
|
tokenstream::TokenTree::Token(Token { kind: token::ModSep, .. }) => true,
|
||||||
_ => false,
|
_ => false,
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.unwrap_or(tts.len());
|
.unwrap_or(tts.len());
|
||||||
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
|
let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect());
|
||||||
let mut asm = kw::Invalid;
|
let mut asm = kw::Invalid;
|
||||||
let mut asm_str_style = None;
|
let mut asm_str_style = None;
|
||||||
let mut outputs = Vec::new();
|
let mut outputs = Vec::new();
|
||||||
|
@ -110,7 +109,8 @@ fn parse_inline_asm<'a>(
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
// Nested parser, stop before the first colon (see above).
|
// Nested parser, stop before the first colon (see above).
|
||||||
let mut p2 = cx.new_parser_from_tts(&tts[..first_colon]);
|
let mut p2 =
|
||||||
|
cx.new_parser_from_tts(tts.trees().take(first_colon).collect());
|
||||||
|
|
||||||
if p2.token == token::Eof {
|
if p2.token == token::Eof {
|
||||||
let mut err =
|
let mut err =
|
||||||
|
@ -129,12 +129,8 @@ fn parse_inline_asm<'a>(
|
||||||
// This is most likely malformed.
|
// This is most likely malformed.
|
||||||
if p2.token != token::Eof {
|
if p2.token != token::Eof {
|
||||||
let mut extra_tts = p2.parse_all_token_trees()?;
|
let mut extra_tts = p2.parse_all_token_trees()?;
|
||||||
extra_tts.extend(tts[first_colon..].iter().cloned());
|
extra_tts.extend(tts.trees().skip(first_colon));
|
||||||
p = parse::stream_to_parser(
|
p = cx.new_parser_from_tts(extra_tts.into_iter().collect());
|
||||||
cx.parse_sess,
|
|
||||||
extra_tts.into_iter().collect(),
|
|
||||||
Some("inline assembly"),
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
asm = s;
|
asm = s;
|
||||||
|
|
|
@ -13,7 +13,7 @@ use syntax_pos::{Span, DUMMY_SP};
|
||||||
pub fn expand_assert<'cx>(
|
pub fn expand_assert<'cx>(
|
||||||
cx: &'cx mut ExtCtxt<'_>,
|
cx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[TokenTree],
|
tts: TokenStream,
|
||||||
) -> Box<dyn MacResult + 'cx> {
|
) -> Box<dyn MacResult + 'cx> {
|
||||||
let Assert { cond_expr, custom_message } = match parse_assert(cx, sp, tts) {
|
let Assert { cond_expr, custom_message } = match parse_assert(cx, sp, tts) {
|
||||||
Ok(assert) => assert,
|
Ok(assert) => assert,
|
||||||
|
@ -59,9 +59,9 @@ struct Assert {
|
||||||
fn parse_assert<'a>(
|
fn parse_assert<'a>(
|
||||||
cx: &mut ExtCtxt<'a>,
|
cx: &mut ExtCtxt<'a>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[TokenTree]
|
stream: TokenStream
|
||||||
) -> Result<Assert, DiagnosticBuilder<'a>> {
|
) -> Result<Assert, DiagnosticBuilder<'a>> {
|
||||||
let mut parser = cx.new_parser_from_tts(tts);
|
let mut parser = cx.new_parser_from_tts(stream);
|
||||||
|
|
||||||
if parser.token == token::Eof {
|
if parser.token == token::Eof {
|
||||||
let mut err = cx.struct_span_err(sp, "macro requires a boolean expression as an argument");
|
let mut err = cx.struct_span_err(sp, "macro requires a boolean expression as an argument");
|
||||||
|
|
|
@ -7,14 +7,14 @@ use errors::DiagnosticBuilder;
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ext::base::{self, *};
|
use syntax::ext::base::{self, *};
|
||||||
use syntax::attr;
|
use syntax::attr;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
|
|
||||||
pub fn expand_cfg(
|
pub fn expand_cfg(
|
||||||
cx: &mut ExtCtxt<'_>,
|
cx: &mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
) -> Box<dyn base::MacResult + 'static> {
|
) -> Box<dyn base::MacResult + 'static> {
|
||||||
let sp = cx.with_legacy_ctxt(sp);
|
let sp = cx.with_legacy_ctxt(sp);
|
||||||
|
|
||||||
|
@ -33,7 +33,7 @@ pub fn expand_cfg(
|
||||||
fn parse_cfg<'a>(
|
fn parse_cfg<'a>(
|
||||||
cx: &mut ExtCtxt<'a>,
|
cx: &mut ExtCtxt<'a>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
) -> Result<ast::MetaItem, DiagnosticBuilder<'a>> {
|
) -> Result<ast::MetaItem, DiagnosticBuilder<'a>> {
|
||||||
let mut p = cx.new_parser_from_tts(tts);
|
let mut p = cx.new_parser_from_tts(tts);
|
||||||
|
|
||||||
|
|
|
@ -2,11 +2,11 @@
|
||||||
|
|
||||||
use syntax::ext::base::{self, *};
|
use syntax::ext::base::{self, *};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
|
|
||||||
pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_compile_error<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult + 'cx> {
|
-> Box<dyn base::MacResult + 'cx> {
|
||||||
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
|
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
|
||||||
None => return DummyResult::any(sp),
|
None => return DummyResult::any(sp),
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ext::base::{self, DummyResult};
|
use syntax::ext::base::{self, DummyResult};
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
|
|
||||||
use std::string::String;
|
use std::string::String;
|
||||||
|
|
||||||
pub fn expand_syntax_ext(
|
pub fn expand_syntax_ext(
|
||||||
cx: &mut base::ExtCtxt<'_>,
|
cx: &mut base::ExtCtxt<'_>,
|
||||||
sp: syntax_pos::Span,
|
sp: syntax_pos::Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
) -> Box<dyn base::MacResult + 'static> {
|
) -> Box<dyn base::MacResult + 'static> {
|
||||||
let es = match base::get_exprs_from_tts(cx, sp, tts) {
|
let es = match base::get_exprs_from_tts(cx, sp, tts) {
|
||||||
Some(e) => e,
|
Some(e) => e,
|
||||||
|
|
|
@ -6,11 +6,11 @@ use syntax::parse::token::{self, Token};
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax_pos::symbol::Symbol;
|
use syntax_pos::symbol::Symbol;
|
||||||
use syntax::tokenstream::TokenTree;
|
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||||
|
|
||||||
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult + 'cx> {
|
-> Box<dyn base::MacResult + 'cx> {
|
||||||
if tts.is_empty() {
|
if tts.is_empty() {
|
||||||
cx.span_err(sp, "concat_idents! takes 1 or more arguments.");
|
cx.span_err(sp, "concat_idents! takes 1 or more arguments.");
|
||||||
|
@ -18,9 +18,9 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut res_str = String::new();
|
let mut res_str = String::new();
|
||||||
for (i, e) in tts.iter().enumerate() {
|
for (i, e) in tts.into_trees().enumerate() {
|
||||||
if i & 1 == 1 {
|
if i & 1 == 1 {
|
||||||
match *e {
|
match e {
|
||||||
TokenTree::Token(Token { kind: token::Comma, .. }) => {}
|
TokenTree::Token(Token { kind: token::Comma, .. }) => {}
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||||
|
@ -28,7 +28,7 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match *e {
|
match e {
|
||||||
TokenTree::Token(Token { kind: token::Ident(name, _), .. }) =>
|
TokenTree::Token(Token { kind: token::Ident(name, _), .. }) =>
|
||||||
res_str.push_str(&name.as_str()),
|
res_str.push_str(&name.as_str()),
|
||||||
_ => {
|
_ => {
|
||||||
|
|
|
@ -7,13 +7,13 @@ use syntax::ast::{self, Ident, GenericArg};
|
||||||
use syntax::ext::base::{self, *};
|
use syntax::ext::base::{self, *};
|
||||||
use syntax::symbol::{kw, sym, Symbol};
|
use syntax::symbol::{kw, sym, Symbol};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult + 'cx> {
|
-> Box<dyn base::MacResult + 'cx> {
|
||||||
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
|
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
|
||||||
None => return DummyResult::any(sp),
|
None => return DummyResult::any(sp),
|
||||||
|
@ -45,7 +45,7 @@ pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
|
|
||||||
pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult + 'cx> {
|
-> Box<dyn base::MacResult + 'cx> {
|
||||||
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
|
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
|
||||||
Some(ref exprs) if exprs.is_empty() => {
|
Some(ref exprs) if exprs.is_empty() => {
|
||||||
|
|
|
@ -11,7 +11,7 @@ use syntax::ext::base::{self, *};
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::{Symbol, sym};
|
use syntax::symbol::{Symbol, sym};
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
use syntax_pos::{MultiSpan, Span};
|
use syntax_pos::{MultiSpan, Span};
|
||||||
|
|
||||||
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
|
||||||
|
@ -126,7 +126,7 @@ struct Context<'a, 'b> {
|
||||||
fn parse_args<'a>(
|
fn parse_args<'a>(
|
||||||
ecx: &mut ExtCtxt<'a>,
|
ecx: &mut ExtCtxt<'a>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree]
|
tts: TokenStream,
|
||||||
) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<Symbol, usize>), DiagnosticBuilder<'a>> {
|
) -> Result<(P<ast::Expr>, Vec<P<ast::Expr>>, FxHashMap<Symbol, usize>), DiagnosticBuilder<'a>> {
|
||||||
let mut args = Vec::<P<ast::Expr>>::new();
|
let mut args = Vec::<P<ast::Expr>>::new();
|
||||||
let mut names = FxHashMap::<Symbol, usize>::default();
|
let mut names = FxHashMap::<Symbol, usize>::default();
|
||||||
|
@ -794,7 +794,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
fn expand_format_args_impl<'cx>(
|
fn expand_format_args_impl<'cx>(
|
||||||
ecx: &'cx mut ExtCtxt<'_>,
|
ecx: &'cx mut ExtCtxt<'_>,
|
||||||
mut sp: Span,
|
mut sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
nl: bool,
|
nl: bool,
|
||||||
) -> Box<dyn base::MacResult + 'cx> {
|
) -> Box<dyn base::MacResult + 'cx> {
|
||||||
sp = ecx.with_def_site_ctxt(sp);
|
sp = ecx.with_def_site_ctxt(sp);
|
||||||
|
@ -812,7 +812,7 @@ fn expand_format_args_impl<'cx>(
|
||||||
pub fn expand_format_args<'cx>(
|
pub fn expand_format_args<'cx>(
|
||||||
ecx: &'cx mut ExtCtxt<'_>,
|
ecx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
) -> Box<dyn base::MacResult + 'cx> {
|
) -> Box<dyn base::MacResult + 'cx> {
|
||||||
expand_format_args_impl(ecx, sp, tts, false)
|
expand_format_args_impl(ecx, sp, tts, false)
|
||||||
}
|
}
|
||||||
|
@ -820,7 +820,7 @@ pub fn expand_format_args<'cx>(
|
||||||
pub fn expand_format_args_nl<'cx>(
|
pub fn expand_format_args_nl<'cx>(
|
||||||
ecx: &'cx mut ExtCtxt<'_>,
|
ecx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree],
|
tts: TokenStream,
|
||||||
) -> Box<dyn base::MacResult + 'cx> {
|
) -> Box<dyn base::MacResult + 'cx> {
|
||||||
expand_format_args_impl(ecx, sp, tts, true)
|
expand_format_args_impl(ecx, sp, tts, true)
|
||||||
}
|
}
|
||||||
|
|
|
@ -16,12 +16,12 @@ use syntax::ext::base::{self, *};
|
||||||
use syntax::parse::token;
|
use syntax::parse::token;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
use smallvec::smallvec;
|
use smallvec::smallvec;
|
||||||
|
|
||||||
pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree]) -> Box<dyn base::MacResult + 'cx> {
|
tts: TokenStream) -> Box<dyn base::MacResult + 'cx> {
|
||||||
match parse_global_asm(cx, sp, tts) {
|
match parse_global_asm(cx, sp, tts) {
|
||||||
Ok(Some(global_asm)) => {
|
Ok(Some(global_asm)) => {
|
||||||
MacEager::items(smallvec![P(ast::Item {
|
MacEager::items(smallvec![P(ast::Item {
|
||||||
|
@ -45,7 +45,7 @@ pub fn expand_global_asm<'cx>(cx: &'cx mut ExtCtxt<'_>,
|
||||||
fn parse_global_asm<'a>(
|
fn parse_global_asm<'a>(
|
||||||
cx: &mut ExtCtxt<'a>,
|
cx: &mut ExtCtxt<'a>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree]
|
tts: TokenStream
|
||||||
) -> Result<Option<ast::GlobalAsm>, DiagnosticBuilder<'a>> {
|
) -> Result<Option<ast::GlobalAsm>, DiagnosticBuilder<'a>> {
|
||||||
let mut p = cx.new_parser_from_tts(tts);
|
let mut p = cx.new_parser_from_tts(tts);
|
||||||
|
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
use syntax::ext::base;
|
use syntax::ext::base;
|
||||||
use syntax::print;
|
use syntax::print;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
use syntax_pos;
|
use syntax_pos;
|
||||||
|
|
||||||
pub fn expand_syntax_ext<'cx>(_cx: &'cx mut base::ExtCtxt<'_>,
|
pub fn expand_syntax_ext<'cx>(_cx: &'cx mut base::ExtCtxt<'_>,
|
||||||
sp: syntax_pos::Span,
|
sp: syntax_pos::Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult + 'cx> {
|
-> Box<dyn base::MacResult + 'cx> {
|
||||||
println!("{}", print::pprust::tts_to_string(tts));
|
println!("{}", print::pprust::tts_to_string(tts));
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,7 @@ use syntax::parse::{self, token, DirectoryOwnership};
|
||||||
use syntax::print::pprust;
|
use syntax::print::pprust;
|
||||||
use syntax::ptr::P;
|
use syntax::ptr::P;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::symbol::Symbol;
|
||||||
use syntax::tokenstream;
|
use syntax::tokenstream::TokenStream;
|
||||||
|
|
||||||
use smallvec::SmallVec;
|
use smallvec::SmallVec;
|
||||||
use syntax_pos::{self, Pos, Span};
|
use syntax_pos::{self, Pos, Span};
|
||||||
|
@ -16,7 +16,7 @@ use rustc_data_structures::sync::Lrc;
|
||||||
// a given file into the current one.
|
// a given file into the current one.
|
||||||
|
|
||||||
/// line!(): expands to the current line number
|
/// line!(): expands to the current line number
|
||||||
pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'static> {
|
-> Box<dyn base::MacResult+'static> {
|
||||||
base::check_zero_tts(cx, sp, tts, "line!");
|
base::check_zero_tts(cx, sp, tts, "line!");
|
||||||
|
|
||||||
|
@ -27,7 +27,7 @@ pub fn expand_line(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree
|
||||||
}
|
}
|
||||||
|
|
||||||
/* column!(): expands to the current column number */
|
/* column!(): expands to the current column number */
|
||||||
pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'static> {
|
-> Box<dyn base::MacResult+'static> {
|
||||||
base::check_zero_tts(cx, sp, tts, "column!");
|
base::check_zero_tts(cx, sp, tts, "column!");
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ pub fn expand_column(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTr
|
||||||
/// file!(): expands to the current filename */
|
/// file!(): expands to the current filename */
|
||||||
/// The source_file (`loc.file`) contains a bunch more information we could spit
|
/// The source_file (`loc.file`) contains a bunch more information we could spit
|
||||||
/// out if we wanted.
|
/// out if we wanted.
|
||||||
pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'static> {
|
-> Box<dyn base::MacResult+'static> {
|
||||||
base::check_zero_tts(cx, sp, tts, "file!");
|
base::check_zero_tts(cx, sp, tts, "file!");
|
||||||
|
|
||||||
|
@ -49,13 +49,13 @@ pub fn expand_file(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree
|
||||||
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
|
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_stringify(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'static> {
|
-> Box<dyn base::MacResult+'static> {
|
||||||
let s = pprust::tts_to_string(tts);
|
let s = pprust::tts_to_string(tts);
|
||||||
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
|
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'static> {
|
-> Box<dyn base::MacResult+'static> {
|
||||||
base::check_zero_tts(cx, sp, tts, "module_path!");
|
base::check_zero_tts(cx, sp, tts, "module_path!");
|
||||||
let mod_path = &cx.current_expansion.module.mod_path;
|
let mod_path = &cx.current_expansion.module.mod_path;
|
||||||
|
@ -67,7 +67,7 @@ pub fn expand_mod(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree]
|
||||||
/// include! : parse the given file as an expr
|
/// include! : parse the given file as an expr
|
||||||
/// This is generally a bad idea because it's going to behave
|
/// This is generally a bad idea because it's going to behave
|
||||||
/// unhygienically.
|
/// unhygienically.
|
||||||
pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'cx> {
|
-> Box<dyn base::MacResult+'cx> {
|
||||||
let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
|
let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
|
||||||
Some(f) => f,
|
Some(f) => f,
|
||||||
|
@ -105,7 +105,7 @@ pub fn expand_include<'cx>(cx: &'cx mut ExtCtxt<'_>, sp: Span, tts: &[tokenstrea
|
||||||
}
|
}
|
||||||
|
|
||||||
// include_str! : read the given file, insert it as a literal string expr
|
// include_str! : read the given file, insert it as a literal string expr
|
||||||
pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'static> {
|
-> Box<dyn base::MacResult+'static> {
|
||||||
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
|
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
|
||||||
Some(f) => f,
|
Some(f) => f,
|
||||||
|
@ -130,7 +130,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::To
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_include_bytes(cx: &mut ExtCtxt<'_>, sp: Span, tts: TokenStream)
|
||||||
-> Box<dyn base::MacResult+'static> {
|
-> Box<dyn base::MacResult+'static> {
|
||||||
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
|
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
|
||||||
Some(f) => f,
|
Some(f) => f,
|
||||||
|
|
|
@ -1,20 +1,27 @@
|
||||||
use syntax::ext::base::{self, ExtCtxt};
|
use syntax::ext::base::{self, ExtCtxt};
|
||||||
use syntax::symbol::kw;
|
use syntax::symbol::kw;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream::TokenTree;
|
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||||
|
|
||||||
pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
|
pub fn expand_trace_macros(cx: &mut ExtCtxt<'_>,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tt: &[TokenTree])
|
tt: TokenStream)
|
||||||
-> Box<dyn base::MacResult + 'static> {
|
-> Box<dyn base::MacResult + 'static> {
|
||||||
match tt {
|
let mut cursor = tt.into_trees();
|
||||||
[TokenTree::Token(token)] if token.is_keyword(kw::True) => {
|
let mut err = false;
|
||||||
cx.set_trace_macros(true);
|
let value = match &cursor.next() {
|
||||||
}
|
Some(TokenTree::Token(token)) if token.is_keyword(kw::True) => true,
|
||||||
[TokenTree::Token(token)] if token.is_keyword(kw::False) => {
|
Some(TokenTree::Token(token)) if token.is_keyword(kw::False) => false,
|
||||||
cx.set_trace_macros(false);
|
_ => {
|
||||||
}
|
err = true;
|
||||||
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
|
false
|
||||||
|
},
|
||||||
|
};
|
||||||
|
err |= cursor.next().is_some();
|
||||||
|
if err {
|
||||||
|
cx.span_err(sp, "trace_macros! accepts only `true` or `false`")
|
||||||
|
} else {
|
||||||
|
cx.set_trace_macros(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
base::DummyResult::any_valid(sp)
|
base::DummyResult::any_valid(sp)
|
||||||
|
|
|
@ -15,12 +15,12 @@ extern crate rustc;
|
||||||
extern crate rustc_driver;
|
extern crate rustc_driver;
|
||||||
|
|
||||||
use syntax::parse::token::{self, Token};
|
use syntax::parse::token::{self, Token};
|
||||||
use syntax::tokenstream::TokenTree;
|
use syntax::tokenstream::{TokenTree, TokenStream};
|
||||||
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacEager};
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use rustc_driver::plugin::Registry;
|
use rustc_driver::plugin::Registry;
|
||||||
|
|
||||||
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: TokenStream)
|
||||||
-> Box<dyn MacResult + 'static> {
|
-> Box<dyn MacResult + 'static> {
|
||||||
|
|
||||||
static NUMERALS: &'static [(&'static str, usize)] = &[
|
static NUMERALS: &'static [(&'static str, usize)] = &[
|
||||||
|
@ -36,7 +36,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
|
||||||
return DummyResult::any(sp);
|
return DummyResult::any(sp);
|
||||||
}
|
}
|
||||||
|
|
||||||
let text = match args[0] {
|
let text = match args.into_trees().next().unwrap() {
|
||||||
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
|
TokenTree::Token(Token { kind: token::Ident(s, _), .. }) => s.to_string(),
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "argument should be a single identifier");
|
cx.span_err(sp, "argument should be a single identifier");
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue