Auto merge of #45791 - eddyb:quote-unquote, r=jseyfried
Prefer libproc_macro APIs to libsyntax ones in the quasi-quoter. The shift to using `proc_macro`'s own APIs in `proc_macro::quote`, both in the implementation of the quasi-quoter and the Rust code it generates to build `TokenStream`s at runtime, greatly reduces the dependency on `libsyntax`, with the generated runtime code being completely free of it. This is a prerequirement for introducing more abstraction/indirection between `proc_macro` and compiler implementation details (mainly those from `libsyntax`), which I want to attempt. cc @alexcrichton @jseyfried @nrc
This commit is contained in:
commit
563dc5171f
2 changed files with 174 additions and 172 deletions
|
@ -191,7 +191,7 @@ impl Default for Span {
|
||||||
/// This is needed to implement a custom quoter.
|
/// This is needed to implement a custom quoter.
|
||||||
#[unstable(feature = "proc_macro", issue = "38356")]
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
||||||
pub fn quote_span(span: Span) -> TokenStream {
|
pub fn quote_span(span: Span) -> TokenStream {
|
||||||
TokenStream(quote::Quote::quote(&span.0))
|
quote::Quote::quote(span)
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! diagnostic_method {
|
macro_rules! diagnostic_method {
|
||||||
|
@ -728,7 +728,7 @@ impl TokenTree {
|
||||||
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
#[unstable(feature = "proc_macro_internals", issue = "27812")]
|
||||||
#[doc(hidden)]
|
#[doc(hidden)]
|
||||||
pub mod __internal {
|
pub mod __internal {
|
||||||
pub use quote::{Quoter, __rt};
|
pub use quote::{LiteralKind, Quoter, unquote};
|
||||||
|
|
||||||
use std::cell::Cell;
|
use std::cell::Cell;
|
||||||
|
|
||||||
|
|
|
@ -11,253 +11,255 @@
|
||||||
//! # Quasiquoter
|
//! # Quasiquoter
|
||||||
//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
|
//! This file contains the implementation internals of the quasiquoter provided by `quote!`.
|
||||||
|
|
||||||
//! This quasiquoter uses macros 2.0 hygiene to reliably use items from `__rt`,
|
//! This quasiquoter uses macros 2.0 hygiene to reliably access
|
||||||
//! including re-exported API `libsyntax`, to build a `syntax::tokenstream::TokenStream`
|
//! items from `proc_macro`, to build a `proc_macro::TokenStream`.
|
||||||
//! and wrap it into a `proc_macro::TokenStream`.
|
|
||||||
|
use {Delimiter, Literal, Spacing, Span, Term, TokenNode, TokenStream, TokenTree};
|
||||||
|
|
||||||
use syntax::ast::Ident;
|
|
||||||
use syntax::ext::base::{ExtCtxt, ProcMacro};
|
use syntax::ext::base::{ExtCtxt, ProcMacro};
|
||||||
use syntax::parse::token::{self, Token, Lit};
|
use syntax::parse::token;
|
||||||
use syntax::symbol::Symbol;
|
use syntax::tokenstream;
|
||||||
use syntax::tokenstream::{Delimited, TokenTree, TokenStream, TokenStreamBuilder};
|
|
||||||
use syntax_pos::{DUMMY_SP, Span};
|
|
||||||
use syntax_pos::hygiene::SyntaxContext;
|
|
||||||
|
|
||||||
pub struct Quoter;
|
pub struct Quoter;
|
||||||
|
|
||||||
pub mod __rt {
|
pub fn unquote<T: Into<TokenStream> + Clone>(tokens: &T) -> TokenStream {
|
||||||
pub use syntax::ast::Ident;
|
T::into(tokens.clone())
|
||||||
pub use syntax::parse::token;
|
|
||||||
pub use syntax::symbol::Symbol;
|
|
||||||
pub use syntax::tokenstream::{TokenStream, TokenStreamBuilder, TokenTree, Delimited};
|
|
||||||
pub use super::{ctxt, span};
|
|
||||||
|
|
||||||
pub fn unquote<T: Into<::TokenStream> + Clone>(tokens: &T) -> TokenStream {
|
|
||||||
T::into(tokens.clone()).0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn ctxt() -> SyntaxContext {
|
|
||||||
::__internal::with_sess(|(_, mark)| SyntaxContext::empty().apply_mark(mark))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn span() -> Span {
|
|
||||||
::Span::default().0
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub trait Quote {
|
pub trait Quote {
|
||||||
fn quote(&self) -> TokenStream;
|
fn quote(self) -> TokenStream;
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! quote_tok {
|
macro_rules! quote_tok {
|
||||||
(,) => { Token::Comma };
|
(,) => { TokenNode::Op(',', Spacing::Alone) };
|
||||||
(.) => { Token::Dot };
|
(.) => { TokenNode::Op('.', Spacing::Alone) };
|
||||||
(:) => { Token::Colon };
|
(:) => { TokenNode::Op(':', Spacing::Alone) };
|
||||||
(::) => { Token::ModSep };
|
(::) => {
|
||||||
(!) => { Token::Not };
|
[
|
||||||
(<) => { Token::Lt };
|
TokenNode::Op(':', Spacing::Joint),
|
||||||
(>) => { Token::Gt };
|
TokenNode::Op(':', Spacing::Alone)
|
||||||
(_) => { Token::Underscore };
|
].iter().cloned().collect::<TokenStream>()
|
||||||
(0) => { Token::Literal(token::Lit::Integer(Symbol::intern("0")), None) };
|
};
|
||||||
(&) => { Token::BinOp(token::And) };
|
(!) => { TokenNode::Op('!', Spacing::Alone) };
|
||||||
($i:ident) => { Token::Ident(Ident { name: Symbol::intern(stringify!($i)), ctxt: ctxt() }) };
|
(<) => { TokenNode::Op('<', Spacing::Alone) };
|
||||||
|
(>) => { TokenNode::Op('>', Spacing::Alone) };
|
||||||
|
(_) => { TokenNode::Op('_', Spacing::Alone) };
|
||||||
|
(0) => { TokenNode::Literal(::Literal::integer(0)) };
|
||||||
|
(&) => { TokenNode::Op('&', Spacing::Alone) };
|
||||||
|
($i:ident) => { TokenNode::Term(Term::intern(stringify!($i))) };
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! quote_tree {
|
macro_rules! quote_tree {
|
||||||
((unquote $($t:tt)*)) => { TokenStream::from($($t)*) };
|
((unquote $($t:tt)*)) => { $($t)* };
|
||||||
((quote $($t:tt)*)) => { ($($t)*).quote() };
|
((quote $($t:tt)*)) => { ($($t)*).quote() };
|
||||||
(($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
|
(($($t:tt)*)) => { TokenNode::Group(Delimiter::Parenthesis, quote!($($t)*)) };
|
||||||
([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
|
([$($t:tt)*]) => { TokenNode::Group(Delimiter::Bracket, quote!($($t)*)) };
|
||||||
({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
|
({$($t:tt)*}) => { TokenNode::Group(Delimiter::Brace, quote!($($t)*)) };
|
||||||
(rt) => { quote!(::__internal::__rt) };
|
($t:tt) => { quote_tok!($t) };
|
||||||
($t:tt) => { TokenStream::from(TokenTree::Token(span(), quote_tok!($t))) };
|
|
||||||
}
|
|
||||||
|
|
||||||
fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
|
|
||||||
TokenTree::Delimited(span(), Delimited { delim: delim, tts: stream.into() }).into()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
macro_rules! quote {
|
macro_rules! quote {
|
||||||
() => { TokenStream::empty() };
|
() => { TokenStream::empty() };
|
||||||
($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
|
($($t:tt)*) => {
|
||||||
|
[
|
||||||
|
$(TokenStream::from(quote_tree!($t)),)*
|
||||||
|
].iter().cloned().collect::<TokenStream>()
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ProcMacro for Quoter {
|
impl ProcMacro for Quoter {
|
||||||
fn expand<'cx>(&self, cx: &'cx mut ExtCtxt, _: Span, stream: TokenStream) -> TokenStream {
|
fn expand<'cx>(&self, cx: &'cx mut ExtCtxt,
|
||||||
|
_: ::syntax_pos::Span,
|
||||||
|
stream: tokenstream::TokenStream)
|
||||||
|
-> tokenstream::TokenStream {
|
||||||
let mut info = cx.current_expansion.mark.expn_info().unwrap();
|
let mut info = cx.current_expansion.mark.expn_info().unwrap();
|
||||||
info.callee.allow_internal_unstable = true;
|
info.callee.allow_internal_unstable = true;
|
||||||
cx.current_expansion.mark.set_expn_info(info);
|
cx.current_expansion.mark.set_expn_info(info);
|
||||||
::__internal::set_sess(cx, || quote!(::TokenStream { 0: (quote stream) }))
|
::__internal::set_sess(cx, || TokenStream(stream).quote().0)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T: Quote> Quote for Option<T> {
|
impl<T: Quote> Quote for Option<T> {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
match *self {
|
match self {
|
||||||
Some(ref t) => quote!(Some((quote t))),
|
Some(t) => quote!(Some((quote t))),
|
||||||
None => quote!(None),
|
None => quote!(None),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quote for TokenStream {
|
impl Quote for TokenStream {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
let mut builder = TokenStreamBuilder::new();
|
if self.is_empty() {
|
||||||
builder.push(quote!(rt::TokenStreamBuilder::new()));
|
return quote!(::TokenStream::empty());
|
||||||
|
}
|
||||||
let mut trees = self.trees();
|
let mut after_dollar = false;
|
||||||
loop {
|
let tokens = self.into_iter().filter_map(|tree| {
|
||||||
let (mut tree, mut is_joint) = match trees.next_as_stream() {
|
if after_dollar {
|
||||||
Some(next) => next.as_tree(),
|
after_dollar = false;
|
||||||
None => return builder.add(quote!(.build())).build(),
|
match tree.kind {
|
||||||
};
|
TokenNode::Term(_) => {
|
||||||
if let TokenTree::Token(_, Token::Dollar) = tree {
|
return Some(quote!(::__internal::unquote(&(unquote tree)),));
|
||||||
let (next_tree, next_is_joint) = match trees.next_as_stream() {
|
|
||||||
Some(next) => next.as_tree(),
|
|
||||||
None => panic!("unexpected trailing `$` in `quote!`"),
|
|
||||||
};
|
|
||||||
match next_tree {
|
|
||||||
TokenTree::Token(_, Token::Ident(..)) => {
|
|
||||||
builder.push(quote!(.add(rt::unquote(&(unquote next_tree)))));
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
TokenTree::Token(_, Token::Dollar) => {
|
|
||||||
tree = next_tree;
|
|
||||||
is_joint = next_is_joint;
|
|
||||||
}
|
}
|
||||||
|
TokenNode::Op('$', _) => {}
|
||||||
_ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
|
_ => panic!("`$` must be followed by an ident or `$` in `quote!`"),
|
||||||
}
|
}
|
||||||
|
} else if let TokenNode::Op('$', _) = tree.kind {
|
||||||
|
after_dollar = true;
|
||||||
|
return None;
|
||||||
}
|
}
|
||||||
|
|
||||||
builder.push(match is_joint {
|
Some(quote!(::TokenStream::from((quote tree)),))
|
||||||
true => quote!(.add((quote tree).joint())),
|
}).collect::<TokenStream>();
|
||||||
false => quote!(.add(rt::TokenStream::from((quote tree)))),
|
|
||||||
});
|
if after_dollar {
|
||||||
|
panic!("unexpected trailing `$` in `quote!`");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
quote!([(unquote tokens)].iter().cloned().collect::<::TokenStream>())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quote for TokenTree {
|
impl Quote for TokenTree {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
match *self {
|
quote!(::TokenTree { span: (quote self.span), kind: (quote self.kind) })
|
||||||
TokenTree::Token(span, ref token) => quote! {
|
|
||||||
rt::TokenTree::Token((quote span), (quote token))
|
|
||||||
},
|
|
||||||
TokenTree::Delimited(span, ref delimited) => quote! {
|
|
||||||
rt::TokenTree::Delimited((quote span), (quote delimited))
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quote for Delimited {
|
impl Quote for TokenNode {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
quote!(rt::Delimited { delim: (quote self.delim), tts: (quote self.stream()).into() })
|
macro_rules! gen_match {
|
||||||
|
($($i:ident($($arg:ident),+)),*) => {
|
||||||
|
match self {
|
||||||
|
$(TokenNode::$i($($arg),+) => quote! {
|
||||||
|
::TokenNode::$i($((quote $arg)),+)
|
||||||
|
},)*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gen_match! { Op(op, kind), Group(delim, tokens), Term(term), Literal(lit) }
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Quote for char {
|
||||||
|
fn quote(self) -> TokenStream {
|
||||||
|
TokenNode::Literal(Literal::character(self)).into()
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Quote for &'a str {
|
impl<'a> Quote for &'a str {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
TokenTree::Token(span(), Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
|
TokenNode::Literal(Literal::string(self)).into()
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quote for usize {
|
impl Quote for usize {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
let integer_symbol = Symbol::intern(&self.to_string());
|
TokenNode::Literal(Literal::integer(self as i128)).into()
|
||||||
TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Integer(integer_symbol), None))
|
|
||||||
.into()
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quote for Ident {
|
impl Quote for Term {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
quote!(rt::Ident { name: (quote self.name), ctxt: rt::ctxt() })
|
quote!(::Term::intern((quote self.as_str())))
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Quote for Symbol {
|
|
||||||
fn quote(&self) -> TokenStream {
|
|
||||||
quote!(rt::Symbol::intern((quote &*self.as_str())))
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quote for Span {
|
impl Quote for Span {
|
||||||
fn quote(&self) -> TokenStream {
|
fn quote(self) -> TokenStream {
|
||||||
quote!(rt::span())
|
quote!(::Span::default())
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Quote for Token {
|
macro_rules! literals {
|
||||||
fn quote(&self) -> TokenStream {
|
($($i:ident),*; $($raw:ident),*) => {
|
||||||
macro_rules! gen_match {
|
pub enum LiteralKind {
|
||||||
($($i:ident),*; $($t:tt)*) => {
|
$($i,)*
|
||||||
match *self {
|
$($raw(usize),)*
|
||||||
$( Token::$i => quote!(rt::token::$i), )*
|
|
||||||
$( $t )*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
gen_match! {
|
impl LiteralKind {
|
||||||
Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
|
pub fn with_contents_and_suffix(self, contents: Term, suffix: Option<Term>)
|
||||||
DotDotEq, Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar,
|
-> Literal {
|
||||||
Question, Underscore;
|
let contents = contents.0;
|
||||||
|
let suffix = suffix.map(|t| t.0);
|
||||||
Token::OpenDelim(delim) => quote!(rt::token::OpenDelim((quote delim))),
|
match self {
|
||||||
Token::CloseDelim(delim) => quote!(rt::token::CloseDelim((quote delim))),
|
$(LiteralKind::$i => {
|
||||||
Token::BinOp(tok) => quote!(rt::token::BinOp((quote tok))),
|
Literal(token::Literal(token::Lit::$i(contents), suffix))
|
||||||
Token::BinOpEq(tok) => quote!(rt::token::BinOpEq((quote tok))),
|
})*
|
||||||
Token::Ident(ident) => quote!(rt::token::Ident((quote ident))),
|
$(LiteralKind::$raw(n) => {
|
||||||
Token::Lifetime(ident) => quote!(rt::token::Lifetime((quote ident))),
|
Literal(token::Literal(token::Lit::$raw(contents, n), suffix))
|
||||||
Token::Literal(lit, sfx) => quote!(rt::token::Literal((quote lit), (quote sfx))),
|
|
||||||
_ => panic!("Unhandled case!"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Quote for token::BinOpToken {
|
|
||||||
fn quote(&self) -> TokenStream {
|
|
||||||
macro_rules! gen_match {
|
|
||||||
($($i:ident),*) => {
|
|
||||||
match *self {
|
|
||||||
$( token::BinOpToken::$i => quote!(rt::token::BinOpToken::$i), )*
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Quote for Lit {
|
|
||||||
fn quote(&self) -> TokenStream {
|
|
||||||
macro_rules! gen_match {
|
|
||||||
($($i:ident),*; $($raw:ident),*) => {
|
|
||||||
match *self {
|
|
||||||
$( Lit::$i(lit) => quote!(rt::token::Lit::$i((quote lit))), )*
|
|
||||||
$( Lit::$raw(lit, n) => {
|
|
||||||
quote!(::syntax::parse::token::Lit::$raw((quote lit), (quote n)))
|
|
||||||
})*
|
})*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw)
|
impl Literal {
|
||||||
}
|
fn kind_contents_and_suffix(self) -> (LiteralKind, Term, Option<Term>) {
|
||||||
}
|
let (lit, suffix) = match self.0 {
|
||||||
|
token::Literal(lit, suffix) => (lit, suffix),
|
||||||
|
_ => panic!("unsupported literal {:?}", self.0),
|
||||||
|
};
|
||||||
|
|
||||||
impl Quote for token::DelimToken {
|
let (kind, contents) = match lit {
|
||||||
fn quote(&self) -> TokenStream {
|
$(token::Lit::$i(contents) => (LiteralKind::$i, contents),)*
|
||||||
macro_rules! gen_match {
|
$(token::Lit::$raw(contents, n) => (LiteralKind::$raw(n), contents),)*
|
||||||
($($i:ident),*) => {
|
};
|
||||||
match *self {
|
(kind, Term(contents), suffix.map(Term))
|
||||||
$(token::DelimToken::$i => { quote!(rt::token::DelimToken::$i) })*
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Quote for LiteralKind {
|
||||||
|
fn quote(self) -> TokenStream {
|
||||||
|
match self {
|
||||||
|
$(LiteralKind::$i => quote! {
|
||||||
|
::__internal::LiteralKind::$i
|
||||||
|
},)*
|
||||||
|
$(LiteralKind::$raw(n) => quote! {
|
||||||
|
::__internal::LiteralKind::$raw((quote n))
|
||||||
|
},)*
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
gen_match!(Paren, Bracket, Brace, NoDelim)
|
impl Quote for Literal {
|
||||||
|
fn quote(self) -> TokenStream {
|
||||||
|
let (kind, contents, suffix) = self.kind_contents_and_suffix();
|
||||||
|
quote! {
|
||||||
|
(quote kind).with_contents_and_suffix((quote contents), (quote suffix))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
literals!(Byte, Char, Float, Str_, Integer, ByteStr; StrRaw, ByteStrRaw);
|
||||||
|
|
||||||
|
impl Quote for Delimiter {
|
||||||
|
fn quote(self) -> TokenStream {
|
||||||
|
macro_rules! gen_match {
|
||||||
|
($($i:ident),*) => {
|
||||||
|
match self {
|
||||||
|
$(Delimiter::$i => { quote!(::Delimiter::$i) })*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gen_match!(Parenthesis, Brace, Bracket, None)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Quote for Spacing {
|
||||||
|
fn quote(self) -> TokenStream {
|
||||||
|
macro_rules! gen_match {
|
||||||
|
($($i:ident),*) => {
|
||||||
|
match self {
|
||||||
|
$(Spacing::$i => { quote!(::Spacing::$i) })*
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
gen_match!(Alone, Joint)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue