1
Fork 0

Auto merge of #39173 - jseyfried:tokenstream, r=nrc

Refactor `TokenStream`

r? @nrc
This commit is contained in:
bors 2017-01-24 09:29:18 +00:00
commit 65b17f53ef
33 changed files with 465 additions and 1786 deletions

View file

@ -60,7 +60,7 @@ RUSTC_CRATES := rustc rustc_typeck rustc_mir rustc_borrowck rustc_resolve rustc_
rustc_data_structures rustc_platform_intrinsics rustc_errors \ rustc_data_structures rustc_platform_intrinsics rustc_errors \
rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \ rustc_plugin rustc_metadata rustc_passes rustc_save_analysis \
rustc_const_eval rustc_const_math rustc_incremental proc_macro rustc_const_eval rustc_const_math rustc_incremental proc_macro
HOST_CRATES := syntax syntax_ext proc_macro_tokens proc_macro_plugin syntax_pos $(RUSTC_CRATES) \ HOST_CRATES := syntax syntax_ext proc_macro_plugin syntax_pos $(RUSTC_CRATES) \
rustdoc fmt_macros flate arena graphviz log serialize rustdoc fmt_macros flate arena graphviz log serialize
TOOLS := compiletest rustdoc rustc rustbook error_index_generator TOOLS := compiletest rustdoc rustc rustbook error_index_generator
@ -102,8 +102,7 @@ DEPS_syntax := std term serialize log arena libc rustc_bitflags std_unicode rust
DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro DEPS_syntax_ext := syntax syntax_pos rustc_errors fmt_macros proc_macro
DEPS_proc_macro := syntax syntax_pos rustc_plugin log DEPS_proc_macro := syntax syntax_pos rustc_plugin log
DEPS_syntax_pos := serialize DEPS_syntax_pos := serialize
DEPS_proc_macro_tokens := syntax syntax_pos log DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin
DEPS_proc_macro_plugin := syntax syntax_pos rustc_plugin log proc_macro_tokens
DEPS_rustc_const_math := std syntax log serialize rustc_i128 DEPS_rustc_const_math := std syntax log serialize rustc_i128
DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \ DEPS_rustc_const_eval := rustc_const_math rustc syntax log serialize \

10
src/Cargo.lock generated
View file

@ -208,17 +208,9 @@ dependencies = [
name = "proc_macro_plugin" name = "proc_macro_plugin"
version = "0.0.0" version = "0.0.0"
dependencies = [ dependencies = [
"log 0.0.0",
"proc_macro_tokens 0.0.0",
"rustc_plugin 0.0.0", "rustc_plugin 0.0.0",
"syntax 0.0.0", "syntax 0.0.0",
] "syntax_pos 0.0.0",
[[package]]
name = "proc_macro_tokens"
version = "0.0.0"
dependencies = [
"syntax 0.0.0",
] ]
[[package]] [[package]]

View file

@ -82,14 +82,15 @@ pub mod __internal {
use syntax::ast; use syntax::ast;
use syntax::ptr::P; use syntax::ptr::P;
use syntax::parse::{self, token, ParseSess}; use syntax::parse::{self, token, ParseSess};
use syntax::tokenstream::TokenStream as TokenStream_; use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_};
use super::{TokenStream, LexError}; use super::{TokenStream, LexError};
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream { pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
TokenStream { inner: TokenStream_::from_tokens(vec![ TokenStream {
token::Interpolated(Rc::new(token::NtItem(item))) inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item))))
])} .into()
}
} }
pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream { pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
@ -175,7 +176,7 @@ impl FromStr for TokenStream {
let tts = try!(parse::parse_tts_from_source_str(name, src, sess) let tts = try!(parse::parse_tts_from_source_str(name, src, sess)
.map_err(parse_to_lex_err)); .map_err(parse_to_lex_err));
Ok(__internal::token_stream_wrap(TokenStream_::from_tts(tts))) Ok(__internal::token_stream_wrap(tts.into_iter().collect()))
}) })
} }
} }

View file

@ -8,7 +8,6 @@ path = "lib.rs"
crate-type = ["dylib"] crate-type = ["dylib"]
[dependencies] [dependencies]
log = { path = "../liblog" }
rustc_plugin = { path = "../librustc_plugin" } rustc_plugin = { path = "../librustc_plugin" }
syntax = { path = "../libsyntax" } syntax = { path = "../libsyntax" }
proc_macro_tokens = { path = "../libproc_macro_tokens" } syntax_pos = { path = "../libsyntax_pos" }

View file

@ -15,11 +15,8 @@
//! ## Usage //! ## Usage
//! This crate provides the `qquote!` macro for syntax creation. //! This crate provides the `qquote!` macro for syntax creation.
//! //!
//! The `qquote!` macro imports `syntax::ext::proc_macro_shim::prelude::*`, so you //! The `qquote!` macro uses the crate `syntax`, so users must declare `extern crate syntax;`
//! will need to `extern crate syntax` for usage. (This is a temporary solution until more //! at the crate root. This is a temporary solution until we have better hygiene.
//! of the external API in libproc_macro_tokens is stabilized to support the token construction
//! operations that the qausiquoter relies on.) The shim file also provides additional
//! operations, such as `build_block_emitter` (as used in the `cond` example below).
//! //!
//! ## Quasiquotation //! ## Quasiquotation
//! //!
@ -88,19 +85,20 @@
extern crate rustc_plugin; extern crate rustc_plugin;
extern crate syntax; extern crate syntax;
extern crate proc_macro_tokens; extern crate syntax_pos;
#[macro_use] extern crate log;
mod qquote; mod qquote;
use qquote::qquote; use qquote::qquote;
use rustc_plugin::Registry; use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
// ____________________________________________________________________________________________ // ____________________________________________________________________________________________
// Main macro definition // Main macro definition
#[plugin_registrar] #[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) { pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("qquote", qquote); reg.register_syntax_extension(Symbol::intern("qquote"),
SyntaxExtension::ProcMacro(Box::new(qquote)));
} }

View file

@ -9,463 +9,219 @@
// except according to those terms. // except according to those terms.
//! # Quasiquoter //! # Quasiquoter
//! This file contains the implementation internals of the quasiquoter provided by `quote!`. //! This file contains the implementation internals of the quasiquoter provided by `qquote!`.
//!
//! ## Ouput
//! The quasiquoter produces output of the form:
//! let tmp0 = ...;
//! let tmp1 = ...;
//! ...
//! concat(from_tokens(...), concat(...))
//!
//! To the more explicit, the quasiquoter produces a series of bindings that each
//! construct TokenStreams via constructing Tokens and using `from_tokens`, ultimately
//! invoking `concat` on these bindings (and inlined expressions) to construct a
//! TokenStream that resembles the output syntax.
//!
use proc_macro_tokens::build::*;
use proc_macro_tokens::parse::lex;
use qquote::int_build::*;
use syntax::ast::Ident; use syntax::ast::Ident;
use syntax::codemap::Span; use syntax::parse::token::{self, Token, Lit};
use syntax::ext::base::*;
use syntax::ext::base;
use syntax::ext::proc_macro_shim::build_block_emitter;
use syntax::parse::token::{self, Token};
use syntax::print::pprust;
use syntax::symbol::Symbol; use syntax::symbol::Symbol;
use syntax::tokenstream::{TokenTree, TokenStream}; use syntax::tokenstream::{self, Delimited, TokenTree, TokenStream};
use syntax_pos::DUMMY_SP;
// ____________________________________________________________________________________________ use std::rc::Rc;
// Main definition
/// The user should use the macro, not this procedure.
pub fn qquote<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
-> Box<base::MacResult + 'cx> {
debug!("\nTTs in: {:?}\n", pprust::tts_to_string(&tts[..])); pub fn qquote<'cx>(stream: TokenStream) -> TokenStream {
let output = qquoter(cx, TokenStream::from_tts(tts.clone().to_owned())); stream.quote()
debug!("\nQQ out: {}\n", pprust::tts_to_string(&output.to_tts()[..]));
let imports = concat(lex("use syntax::ext::proc_macro_shim::prelude::*;"),
lex("use proc_macro_tokens::prelude::*;"));
build_block_emitter(cx, sp, build_brace_delimited(concat(imports, output)))
} }
// ____________________________________________________________________________________________ trait Quote {
// Datatype Definitions fn quote(&self) -> TokenStream;
#[derive(Debug)]
struct QDelimited {
delim: token::DelimToken,
open_span: Span,
tts: Vec<Qtt>,
close_span: Span,
} }
#[derive(Debug)] macro_rules! quote_tok {
enum Qtt { (,) => { Token::Comma };
TT(TokenTree), (.) => { Token::Dot };
Delimited(QDelimited), (:) => { Token::Colon };
QIdent(TokenTree), (::) => { Token::ModSep };
(!) => { Token::Not };
(<) => { Token::Lt };
(>) => { Token::Gt };
(_) => { Token::Underscore };
($i:ident) => { Token::Ident(Ident::from_str(stringify!($i))) };
} }
type Bindings = Vec<(Ident, TokenStream)>; macro_rules! quote_tree {
((unquote $($t:tt)*)) => { $($t)* };
((quote $($t:tt)*)) => { ($($t)*).quote() };
(($($t:tt)*)) => { delimit(token::Paren, quote!($($t)*)) };
([$($t:tt)*]) => { delimit(token::Bracket, quote!($($t)*)) };
({$($t:tt)*}) => { delimit(token::Brace, quote!($($t)*)) };
($t:tt) => { TokenStream::from(TokenTree::Token(DUMMY_SP, quote_tok!($t))) };
}
// ____________________________________________________________________________________________ fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
// Quasiquoter Algorithm TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
// This algorithm works as follows: delim: delim,
// Input: TokenStream tts: stream.trees().cloned().collect(),
// 1. Walk the TokenStream, gathering up the unquoted expressions and marking them separately. })).into()
// 2. Hoist any unquoted term into its own let-binding via a gensym'd identifier }
// 3. Convert the body from a `complex expression` into a simplified one via `convert_complex_tts
// 4. Stitch everything together with `concat`. macro_rules! quote {
fn qquoter<'cx>(cx: &'cx mut ExtCtxt, ts: TokenStream) -> TokenStream { () => { TokenStream::empty() };
if ts.is_empty() { ($($t:tt)*) => { [ $( quote_tree!($t), )* ].iter().cloned().collect::<TokenStream>() };
return lex("TokenStream::mk_empty()"); }
impl<T: Quote> Quote for Option<T> {
fn quote(&self) -> TokenStream {
match *self {
Some(ref t) => quote!(::std::option::Option::Some((quote t))),
None => quote!(::std::option::Option::None),
}
} }
let qq_res = qquote_iter(cx, 0, ts);
let mut bindings = qq_res.0;
let body = qq_res.1;
let mut cct_res = convert_complex_tts(cx, body);
bindings.append(&mut cct_res.0);
if bindings.is_empty() {
cct_res.1
} else {
debug!("BINDINGS");
for b in bindings.clone() {
debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..]));
}
TokenStream::concat(unravel(bindings), cct_res.1)
}
} }
fn qquote_iter<'cx>(cx: &'cx mut ExtCtxt, depth: i64, ts: TokenStream) -> (Bindings, Vec<Qtt>) { impl Quote for TokenStream {
let mut depth = depth; fn quote(&self) -> TokenStream {
let mut bindings: Bindings = Vec::new(); if self.is_empty() {
let mut output: Vec<Qtt> = Vec::new(); return quote!(::syntax::tokenstream::TokenStream::empty());
let mut iter = ts.iter();
loop {
let next = iter.next();
if next.is_none() {
break;
} }
let next = next.unwrap().clone();
match next { struct Quote<'a>(tokenstream::Cursor<'a>);
TokenTree::Token(_, Token::Ident(id)) if is_unquote(id) => {
if depth == 0 { impl<'a> Iterator for Quote<'a> {
let exp = iter.next(); type Item = TokenStream;
if exp.is_none() {
break; fn next(&mut self) -> Option<TokenStream> {
} // produce an error or something first let is_unquote = match self.0.peek() {
let exp = vec![exp.unwrap().to_owned()]; Some(&TokenTree::Token(_, Token::Ident(ident))) if ident.name == "unquote" => {
debug!("RHS: {:?}", exp.clone()); self.0.next();
let new_id = Ident::with_empty_ctxt(Symbol::gensym("tmp")); true
debug!("RHS TS: {:?}", TokenStream::from_tts(exp.clone()));
debug!("RHS TS TT: {:?}", TokenStream::from_tts(exp.clone()).to_vec());
bindings.push((new_id, TokenStream::from_tts(exp)));
debug!("BINDINGS");
for b in bindings.clone() {
debug!("{:?} = {}", b.0, pprust::tts_to_string(&b.1.to_tts()[..]));
} }
output.push(Qtt::QIdent(as_tt(Token::Ident(new_id.clone())))); _ => false,
} else {
depth = depth - 1;
output.push(Qtt::TT(next.clone()));
}
}
TokenTree::Token(_, Token::Ident(id)) if is_qquote(id) => {
depth = depth + 1;
}
TokenTree::Delimited(_, ref dl) => {
let br = qquote_iter(cx, depth, TokenStream::from_tts(dl.tts.clone().to_owned()));
let mut nested_bindings = br.0;
let nested = br.1;
bindings.append(&mut nested_bindings);
let new_dl = QDelimited {
delim: dl.delim,
open_span: dl.open_span,
tts: nested,
close_span: dl.close_span,
}; };
output.push(Qtt::Delimited(new_dl)); self.0.next().cloned().map(|tree| {
} let quoted_tree = if is_unquote { tree.into() } else { tree.quote() };
t => { quote!(::syntax::tokenstream::TokenStream::from((unquote quoted_tree)),)
output.push(Qtt::TT(t)); })
} }
} }
}
(bindings, output) let quoted = Quote(self.trees()).collect::<TokenStream>();
quote!([(unquote quoted)].iter().cloned().collect::<::syntax::tokenstream::TokenStream>())
}
} }
// ____________________________________________________________________________________________ impl Quote for Vec<TokenTree> {
// Turns QQTs into a TokenStream and some Bindings. fn quote(&self) -> TokenStream {
/// Construct a chain of concatenations. let stream = self.iter().cloned().collect::<TokenStream>();
fn unravel_concats(tss: Vec<TokenStream>) -> TokenStream { quote!((quote stream).trees().cloned().collect::<::std::vec::Vec<_> >())
let mut pushes: Vec<TokenStream> =
tss.into_iter().filter(|&ref ts| !ts.is_empty()).collect();
let mut output = match pushes.pop() {
Some(ts) => ts,
None => {
return TokenStream::mk_empty();
}
};
while let Some(ts) = pushes.pop() {
output = build_fn_call(Ident::from_str("concat"),
concat(concat(ts,
from_tokens(vec![Token::Comma])),
output));
} }
output
} }
/// This converts the vector of Qtts into a set of Bindings for construction and the main impl Quote for TokenTree {
/// body as a TokenStream. fn quote(&self) -> TokenStream {
fn convert_complex_tts<'cx>(cx: &'cx mut ExtCtxt, tts: Vec<Qtt>) -> (Bindings, TokenStream) { match *self {
let mut pushes: Vec<TokenStream> = Vec::new(); TokenTree::Token(_, ref token) => quote! {
let mut bindings: Bindings = Vec::new(); ::syntax::tokenstream::TokenTree::Token(::syntax::ext::quote::rt::DUMMY_SP,
(quote token))
let mut iter = tts.into_iter(); },
TokenTree::Delimited(_, ref delimited) => quote! {
loop { ::syntax::tokenstream::TokenTree::Delimited(::syntax::ext::quote::rt::DUMMY_SP,
let next = iter.next(); (quote delimited))
if next.is_none() { },
break; _ => panic!("unexpected `TokenTree::Sequence` in `qquote`"),
} }
let next = next.unwrap();
match next {
Qtt::TT(TokenTree::Token(_, t)) => {
let token_out = emit_token(t);
pushes.push(token_out);
}
// FIXME handle sequence repetition tokens
Qtt::Delimited(qdl) => {
debug!(" Delimited: {:?} ", qdl.tts);
let fresh_id = Ident::with_empty_ctxt(Symbol::gensym("qdl_tmp"));
let (mut nested_bindings, nested_toks) = convert_complex_tts(cx, qdl.tts);
let body = if nested_toks.is_empty() {
assert!(nested_bindings.is_empty());
build_mod_call(vec![Ident::from_str("TokenStream"),
Ident::from_str("mk_empty")],
TokenStream::mk_empty())
} else {
bindings.append(&mut nested_bindings);
bindings.push((fresh_id, nested_toks));
TokenStream::from_tokens(vec![Token::Ident(fresh_id)])
};
let delimitiers = build_delim_tok(qdl.delim);
pushes.push(build_mod_call(vec![Ident::from_str("proc_macro_tokens"),
Ident::from_str("build"),
Ident::from_str("build_delimited")],
flatten(vec![body,
lex(","),
delimitiers].into_iter())));
}
Qtt::QIdent(t) => {
pushes.push(TokenStream::from_tts(vec![t]));
pushes.push(TokenStream::mk_empty());
}
_ => panic!("Unhandled case!"),
}
} }
(bindings, unravel_concats(pushes))
} }
// ____________________________________________________________________________________________ impl Quote for Rc<Delimited> {
// Utilities fn quote(&self) -> TokenStream {
quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
/// Unravels Bindings into a TokenStream of `let` declarations. delim: (quote self.delim),
fn unravel(bindings: Bindings) -> TokenStream { tts: (quote self.tts),
flatten(bindings.into_iter().map(|(a, b)| build_let(a, b))) }))
}
} }
/// Checks if the Ident is `unquote`. impl<'a> Quote for &'a str {
fn is_unquote(id: Ident) -> bool { fn quote(&self) -> TokenStream {
let qq = Ident::from_str("unquote"); TokenTree::Token(DUMMY_SP, Token::Literal(token::Lit::Str_(Symbol::intern(self)), None))
id.name == qq.name // We disregard context; unquote is _reserved_ .into()
}
} }
/// Checks if the Ident is `quote`. impl Quote for Ident {
fn is_qquote(id: Ident) -> bool { fn quote(&self) -> TokenStream {
let qq = Ident::from_str("qquote"); // FIXME(jseyfried) quote hygiene
id.name == qq.name // We disregard context; qquote is _reserved_ quote!(::syntax::ast::Ident::from_str((quote &*self.name.as_str())))
}
} }
mod int_build { impl Quote for Symbol {
use proc_macro_tokens::build::*; fn quote(&self) -> TokenStream {
use proc_macro_tokens::parse::*; quote!(::syntax::symbol::Symbol::intern((quote &*self.as_str())))
use syntax::ast::{self, Ident};
use syntax::codemap::{DUMMY_SP};
use syntax::parse::token::{self, Token, Lit};
use syntax::symbol::keywords;
use syntax::tokenstream::{TokenTree, TokenStream};
// ____________________________________________________________________________________________
// Emitters
pub fn emit_token(t: Token) -> TokenStream {
concat(lex("TokenStream::from_tokens"),
build_paren_delimited(build_vec(build_token_tt(t))))
} }
}
pub fn emit_lit(l: Lit, n: Option<ast::Name>) -> TokenStream { impl Quote for Token {
let suf = match n { fn quote(&self) -> TokenStream {
Some(n) => format!("Some(ast::Name({}))", n.as_u32()), macro_rules! gen_match {
None => "None".to_string(), ($($i:ident),*; $($t:tt)*) => {
}; match *self {
$( Token::$i => quote!(::syntax::parse::token::$i), )*
let lit = match l { $( $t )*
Lit::Byte(n) => format!("Lit::Byte(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Char(n) => format!("Lit::Char(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Float(n) => format!("Lit::Float(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Str_(n) => format!("Lit::Str_(Symbol::intern(\"{}\"))", n.to_string()),
Lit::Integer(n) => format!("Lit::Integer(Symbol::intern(\"{}\"))", n.to_string()),
Lit::ByteStr(n) => format!("Lit::ByteStr(Symbol::intern(\"{}\"))", n.to_string()),
_ => panic!("Unsupported literal"),
};
let res = format!("Token::Literal({},{})", lit, suf);
debug!("{}", res);
lex(&res)
}
// ____________________________________________________________________________________________
// Token Builders
pub fn build_binop_tok(bot: token::BinOpToken) -> TokenStream {
match bot {
token::BinOpToken::Plus => lex("Token::BinOp(BinOpToken::Plus)"),
token::BinOpToken::Minus => lex("Token::BinOp(BinOpToken::Minus)"),
token::BinOpToken::Star => lex("Token::BinOp(BinOpToken::Star)"),
token::BinOpToken::Slash => lex("Token::BinOp(BinOpToken::Slash)"),
token::BinOpToken::Percent => lex("Token::BinOp(BinOpToken::Percent)"),
token::BinOpToken::Caret => lex("Token::BinOp(BinOpToken::Caret)"),
token::BinOpToken::And => lex("Token::BinOp(BinOpToken::And)"),
token::BinOpToken::Or => lex("Token::BinOp(BinOpToken::Or)"),
token::BinOpToken::Shl => lex("Token::BinOp(BinOpToken::Shl)"),
token::BinOpToken::Shr => lex("Token::BinOp(BinOpToken::Shr)"),
}
}
pub fn build_binopeq_tok(bot: token::BinOpToken) -> TokenStream {
match bot {
token::BinOpToken::Plus => lex("Token::BinOpEq(BinOpToken::Plus)"),
token::BinOpToken::Minus => lex("Token::BinOpEq(BinOpToken::Minus)"),
token::BinOpToken::Star => lex("Token::BinOpEq(BinOpToken::Star)"),
token::BinOpToken::Slash => lex("Token::BinOpEq(BinOpToken::Slash)"),
token::BinOpToken::Percent => lex("Token::BinOpEq(BinOpToken::Percent)"),
token::BinOpToken::Caret => lex("Token::BinOpEq(BinOpToken::Caret)"),
token::BinOpToken::And => lex("Token::BinOpEq(BinOpToken::And)"),
token::BinOpToken::Or => lex("Token::BinOpEq(BinOpToken::Or)"),
token::BinOpToken::Shl => lex("Token::BinOpEq(BinOpToken::Shl)"),
token::BinOpToken::Shr => lex("Token::BinOpEq(BinOpToken::Shr)"),
}
}
pub fn build_delim_tok(dt: token::DelimToken) -> TokenStream {
match dt {
token::DelimToken::Paren => lex("DelimToken::Paren"),
token::DelimToken::Bracket => lex("DelimToken::Bracket"),
token::DelimToken::Brace => lex("DelimToken::Brace"),
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
}
}
pub fn build_token_tt(t: Token) -> TokenStream {
match t {
Token::Eq => lex("Token::Eq"),
Token::Lt => lex("Token::Lt"),
Token::Le => lex("Token::Le"),
Token::EqEq => lex("Token::EqEq"),
Token::Ne => lex("Token::Ne"),
Token::Ge => lex("Token::Ge"),
Token::Gt => lex("Token::Gt"),
Token::AndAnd => lex("Token::AndAnd"),
Token::OrOr => lex("Token::OrOr"),
Token::Not => lex("Token::Not"),
Token::Tilde => lex("Token::Tilde"),
Token::BinOp(tok) => build_binop_tok(tok),
Token::BinOpEq(tok) => build_binopeq_tok(tok),
Token::At => lex("Token::At"),
Token::Dot => lex("Token::Dot"),
Token::DotDot => lex("Token::DotDot"),
Token::DotDotDot => lex("Token::DotDotDot"),
Token::Comma => lex("Token::Comma"),
Token::Semi => lex("Token::Semi"),
Token::Colon => lex("Token::Colon"),
Token::ModSep => lex("Token::ModSep"),
Token::RArrow => lex("Token::RArrow"),
Token::LArrow => lex("Token::LArrow"),
Token::FatArrow => lex("Token::FatArrow"),
Token::Pound => lex("Token::Pound"),
Token::Dollar => lex("Token::Dollar"),
Token::Question => lex("Token::Question"),
Token::OpenDelim(dt) => {
match dt {
token::DelimToken::Paren => lex("Token::OpenDelim(DelimToken::Paren)"),
token::DelimToken::Bracket => lex("Token::OpenDelim(DelimToken::Bracket)"),
token::DelimToken::Brace => lex("Token::OpenDelim(DelimToken::Brace)"),
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"),
} }
} }
Token::CloseDelim(dt) => { }
match dt {
token::DelimToken::Paren => lex("Token::CloseDelim(DelimToken::Paren)"), gen_match! {
token::DelimToken::Bracket => lex("Token::CloseDelim(DelimToken::Bracket)"), Eq, Lt, Le, EqEq, Ne, Ge, Gt, AndAnd, OrOr, Not, Tilde, At, Dot, DotDot, DotDotDot,
token::DelimToken::Brace => lex("Token::CloseDelim(DelimToken::Brace)"), Comma, Semi, Colon, ModSep, RArrow, LArrow, FatArrow, Pound, Dollar, Question,
token::DelimToken::NoDelim => lex("DelimToken::NoDelim"), Underscore;
}
} Token::OpenDelim(delim) => quote!(::syntax::parse::token::OpenDelim((quote delim))),
Token::Underscore => lex("_"), Token::CloseDelim(delim) => quote!(::syntax::parse::token::CloseDelim((quote delim))),
Token::Literal(lit, sfx) => emit_lit(lit, sfx), Token::BinOp(tok) => quote!(::syntax::parse::token::BinOp((quote tok))),
// fix ident expansion information... somehow Token::BinOpEq(tok) => quote!(::syntax::parse::token::BinOpEq((quote tok))),
Token::Ident(ident) => Token::Ident(ident) => quote!(::syntax::parse::token::Ident((quote ident))),
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), Token::Lifetime(ident) => quote!(::syntax::parse::token::Lifetime((quote ident))),
Token::Lifetime(ident) => Token::Literal(lit, sfx) => quote! {
lex(&format!("Token::Ident(Ident::from_str(\"{}\"))", ident.name)), ::syntax::parse::token::Literal((quote lit), (quote sfx))
},
_ => panic!("Unhandled case!"), _ => panic!("Unhandled case!"),
} }
} }
}
// ____________________________________________________________________________________________ impl Quote for token::BinOpToken {
// Conversion operators fn quote(&self) -> TokenStream {
macro_rules! gen_match {
pub fn as_tt(t: Token) -> TokenTree { ($($i:ident),*) => {
// FIXME do something nicer with the spans match *self {
TokenTree::Token(DUMMY_SP, t) $( token::BinOpToken::$i => quote!(::syntax::parse::token::BinOpToken::$i), )*
} }
}
// ____________________________________________________________________________________________
// Build Procedures
/// Takes `input` and returns `vec![input]`.
pub fn build_vec(ts: TokenStream) -> TokenStream {
build_mac_call(Ident::from_str("vec"), ts)
// tts.clone().to_owned()
}
/// Takes `ident` and `rhs` and produces `let ident = rhs;`.
pub fn build_let(id: Ident, tts: TokenStream) -> TokenStream {
concat(from_tokens(vec![keyword_to_token_ident(keywords::Let),
Token::Ident(id),
Token::Eq]),
concat(tts, from_tokens(vec![Token::Semi])))
}
/// Takes `ident ...`, and `args ...` and produces `ident::...(args ...)`.
pub fn build_mod_call(ids: Vec<Ident>, args: TokenStream) -> TokenStream {
let call = from_tokens(intersperse(ids.into_iter().map(|id| Token::Ident(id)).collect(),
Token::ModSep));
concat(call, build_paren_delimited(args))
}
/// Takes `ident` and `args ...` and produces `ident(args ...)`.
pub fn build_fn_call(name: Ident, args: TokenStream) -> TokenStream {
concat(from_tokens(vec![Token::Ident(name)]), build_paren_delimited(args))
}
/// Takes `ident` and `args ...` and produces `ident!(args ...)`.
pub fn build_mac_call(name: Ident, args: TokenStream) -> TokenStream {
concat(from_tokens(vec![Token::Ident(name), Token::Not]),
build_paren_delimited(args))
}
// ____________________________________________________________________________________________
// Utilities
/// A wrapper around `TokenStream::from_tokens` to avoid extra namespace specification and
/// provide it as a generic operator.
pub fn from_tokens(tokens: Vec<Token>) -> TokenStream {
TokenStream::from_tokens(tokens)
}
pub fn intersperse<T>(vs: Vec<T>, t: T) -> Vec<T>
where T: Clone
{
if vs.len() < 2 {
return vs;
} }
let mut output = vec![vs.get(0).unwrap().to_owned()];
for v in vs.into_iter().skip(1) { gen_match!(Plus, Minus, Star, Slash, Percent, Caret, And, Or, Shl, Shr)
output.push(t.clone()); }
output.push(v); }
}
output impl Quote for Lit {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*) => {
match *self {
$( Lit::$i(lit) => quote!(::syntax::parse::token::Lit::$i((quote lit))), )*
_ => panic!("Unsupported literal"),
}
}
}
gen_match!(Byte, Char, Float, Str_, Integer, ByteStr)
}
}
impl Quote for token::DelimToken {
fn quote(&self) -> TokenStream {
macro_rules! gen_match {
($($i:ident),*) => {
match *self {
$(token::DelimToken::$i => { quote!(::syntax::parse::token::DelimToken::$i) })*
}
}
}
gen_match!(Paren, Bracket, Brace, NoDelim)
} }
} }

View file

@ -1,12 +0,0 @@
[package]
authors = ["The Rust Project Developers"]
name = "proc_macro_tokens"
version = "0.0.0"
build = false
[lib]
path = "lib.rs"
crate-type = ["dylib"]
[dependencies]
syntax = { path = "../libsyntax" }

View file

@ -1,100 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use syntax::ast::Ident;
use syntax::codemap::DUMMY_SP;
use syntax::parse::token::{self, Token};
use syntax::symbol::keywords;
use syntax::tokenstream::{self, TokenTree, TokenStream};
use std::rc::Rc;
/// A wrapper around `TokenStream::concat` to avoid extra namespace specification and
/// provide TokenStream concatenation as a generic operator.
pub fn concat(ts1: TokenStream, ts2: TokenStream) -> TokenStream {
TokenStream::concat(ts1, ts2)
}
/// Flatten a sequence of TokenStreams into a single TokenStream.
pub fn flatten<T: Iterator<Item=TokenStream>>(mut iter: T) -> TokenStream {
match iter.next() {
Some(mut ts) => {
for next in iter {
ts = TokenStream::concat(ts, next);
}
ts
}
None => TokenStream::mk_empty()
}
}
/// Checks if two identifiers have the same name, disregarding context. This allows us to
/// fake 'reserved' keywords.
// FIXME We really want `free-identifier-=?` (a la Dybvig 1993). von Tander 2007 is
// probably the easiest way to do that.
pub fn ident_eq(tident: &TokenTree, id: Ident) -> bool {
let tid = match *tident {
TokenTree::Token(_, Token::Ident(ref id)) => id,
_ => {
return false;
}
};
tid.name == id.name
}
// ____________________________________________________________________________________________
// Conversion operators
/// Convert a `&str` into a Token.
pub fn str_to_token_ident(s: &str) -> Token {
Token::Ident(Ident::from_str(s))
}
/// Converts a keyword (from `syntax::parse::token::keywords`) into a Token that
/// corresponds to it.
pub fn keyword_to_token_ident(kw: keywords::Keyword) -> Token {
Token::Ident(Ident::from_str(&kw.name().as_str()[..]))
}
// ____________________________________________________________________________________________
// Build Procedures
/// Generically takes a `ts` and delimiter and returns `ts` delimited by the specified
/// delimiter.
pub fn build_delimited(ts: TokenStream, delim: token::DelimToken) -> TokenStream {
let tts = ts.to_tts();
TokenStream::from_tts(vec![TokenTree::Delimited(DUMMY_SP,
Rc::new(tokenstream::Delimited {
delim: delim,
open_span: DUMMY_SP,
tts: tts,
close_span: DUMMY_SP,
}))])
}
/// Takes `ts` and returns `[ts]`.
pub fn build_bracket_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Bracket)
}
/// Takes `ts` and returns `{ts}`.
pub fn build_brace_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Brace)
}
/// Takes `ts` and returns `(ts)`.
pub fn build_paren_delimited(ts: TokenStream) -> TokenStream {
build_delimited(ts, token::DelimToken::Paren)
}
/// Constructs `()`.
pub fn build_empty_args() -> TokenStream {
build_paren_delimited(TokenStream::mk_empty())
}

View file

@ -1,64 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! # Proc_Macro
//!
//! A library for procedural macro writers.
//!
//! ## Usage
//! This crate provides the prelude (at libproc_macro_tokens::prelude), which
//! provides a number of operations:
//! - `concat`, for concatenating two TokenStreams.
//! - `ident_eq`, for checking if two identifiers are equal regardless of syntax context.
//! - `str_to_token_ident`, for converting an `&str` into a Token.
//! - `keyword_to_token_delim`, for converting a `parse::token::keywords::Keyword` into a
//! Token.
//! - `build_delimited`, for creating a new TokenStream from an existing one and a delimiter
//! by wrapping the TokenStream in the delimiter.
//! - `build_bracket_delimited`, `build_brace_delimited`, and `build_paren_delimited`, for
//! easing the above.
//! - `build_empty_args`, which returns a TokenStream containing `()`.
//! - `lex`, which takes an `&str` and returns the TokenStream it represents.
//!
//! ## TokenStreams
//!
//! TokenStreams serve as the basis of the macro system. They are, in essence, vectors of
//! TokenTrees, where indexing treats delimited values as a single term. That is, the term
//! `even(a+c) && even(b)` will be indexibly encoded as `even | (a+c) | even | (b)` where,
//! in reality, `(a+c)` is actually a decorated pointer to `a | + | c`.
//!
//! If a user has a TokenStream that is a single, delimited value, they can use
//! `maybe_delimited` to destruct it and receive the internal vector as a new TokenStream
//! as:
//! ```
//! `(a+c)`.maybe_delimited() ~> Some(a | + | c)`
//! ```
//!
//! Check the TokenStream documentation for more information; the structure also provides
//! cheap concatenation and slicing.
//!
#![crate_name = "proc_macro_tokens"]
#![unstable(feature = "rustc_private", issue = "27812")]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "https://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "https://doc.rust-lang.org/favicon.ico",
html_root_url = "https://doc.rust-lang.org/nightly/")]
#![deny(warnings)]
#![feature(staged_api)]
#![feature(rustc_private)]
extern crate syntax;
pub mod build;
pub mod parse;
pub mod prelude;

View file

@ -1,24 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Parsing utilities for writing procedural macros.
use syntax::parse::{ParseSess, filemap_to_tts};
use syntax::tokenstream::TokenStream;
/// Map a string to tts, using a made-up filename. For example, `lex("15")` will return a
/// TokenStream containing the literal 15.
pub fn lex(source_str: &str) -> TokenStream {
let ps = ParseSess::new();
TokenStream::from_tts(filemap_to_tts(&ps,
ps.codemap().new_filemap("<procmacro_lex>".to_string(),
None,
source_str.to_owned())))
}

View file

@ -1,12 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
pub use build::*;
pub use parse::*;

View file

@ -1034,18 +1034,14 @@ impl<'a, 'hash, 'tcx> StrictVersionHashVisitor<'a, 'hash, 'tcx> {
hash_span!(self, span); hash_span!(self, span);
let tokenstream::Delimited { let tokenstream::Delimited {
ref delim, ref delim,
open_span,
ref tts, ref tts,
close_span,
} = **delimited; } = **delimited;
delim.hash(self.st); delim.hash(self.st);
hash_span!(self, open_span);
tts.len().hash(self.st); tts.len().hash(self.st);
for sub_tt in tts { for sub_tt in tts {
self.hash_token_tree(sub_tt); self.hash_token_tree(sub_tt);
} }
hash_span!(self, close_span);
} }
tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => { tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
hash_span!(self, span); hash_span!(self, span);

View file

@ -73,23 +73,6 @@ pub fn dummy_spanned<T>(t: T) -> Spanned<T> {
respan(DUMMY_SP, t) respan(DUMMY_SP, t)
} }
/// Build a span that covers the two provided spans.
pub fn combine_spans(sp1: Span, sp2: Span) -> Span {
if sp1 == DUMMY_SP && sp2 == DUMMY_SP {
DUMMY_SP
} else if sp1 == DUMMY_SP {
sp2
} else if sp2 == DUMMY_SP {
sp1
} else {
Span {
lo: if sp1.lo < sp2.lo { sp1.lo } else { sp2.lo },
hi: if sp1.hi > sp2.hi { sp1.hi } else { sp2.hi },
expn_id: if sp1.expn_id == sp2.expn_id { sp1.expn_id } else { NO_EXPANSION },
}
}
}
#[derive(Clone, Hash, Debug)] #[derive(Clone, Hash, Debug)]
pub struct NameAndSpan { pub struct NameAndSpan {
/// The format with which the macro was invoked. /// The format with which the macro was invoked.

View file

@ -364,10 +364,8 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
kind.expect_from_annotatables(items) kind.expect_from_annotatables(items)
} }
SyntaxExtension::AttrProcMacro(ref mac) => { SyntaxExtension::AttrProcMacro(ref mac) => {
let attr_toks = TokenStream::from_tts(tts_for_attr_args(&attr, let attr_toks = tts_for_attr_args(&attr, &self.cx.parse_sess).into_iter().collect();
&self.cx.parse_sess)); let item_toks = tts_for_item(&item, &self.cx.parse_sess).into_iter().collect();
let item_toks = TokenStream::from_tts(tts_for_item(&item, &self.cx.parse_sess));
let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks); let tok_result = mac.expand(self.cx, attr.span, attr_toks, item_toks);
self.parse_expansion(tok_result, kind, name, attr.span) self.parse_expansion(tok_result, kind, name, attr.span)
@ -467,7 +465,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
}, },
}); });
let toks = TokenStream::from_tts(marked_tts); let toks = marked_tts.into_iter().collect();
let tok_result = expandfun.expand(self.cx, span, toks); let tok_result = expandfun.expand(self.cx, span, toks);
Some(self.parse_expansion(tok_result, kind, extname, span)) Some(self.parse_expansion(tok_result, kind, extname, span))
} }
@ -490,7 +488,7 @@ impl<'a, 'b> MacroExpander<'a, 'b> {
fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span) fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, name: Name, span: Span)
-> Expansion { -> Expansion {
let mut parser = self.cx.new_parser_from_tts(&toks.to_tts()); let mut parser = self.cx.new_parser_from_tts(&toks.trees().cloned().collect::<Vec<_>>());
let expansion = match parser.parse_expansion(kind, false) { let expansion = match parser.parse_expansion(kind, false) {
Ok(expansion) => expansion, Ok(expansion) => expansion,
Err(mut err) => { Err(mut err) => {

View file

@ -1,72 +0,0 @@
// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! This is a shim file to ease the transition to the final procedural macro interface for
//! Macros 2.0. It currently exposes the `libsyntax` operations that the quasiquoter's
//! output needs to compile correctly, along with the following operators:
//!
//! - `build_block_emitter`, which produces a `block` output macro result from the
//! provided TokenStream.
use ast;
use codemap::Span;
use parse::parser::Parser;
use ptr::P;
use tokenstream::TokenStream;
use ext::base::*;
/// Take a `ExtCtxt`, `Span`, and `TokenStream`, and produce a Macro Result that parses
/// the TokenStream as a block and returns it as an `Expr`.
pub fn build_block_emitter<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
output: TokenStream)
-> Box<MacResult + 'cx> {
let parser = cx.new_parser_from_tts(&output.to_tts());
struct Result<'a> {
prsr: Parser<'a>,
span: Span,
}; //FIXME is this the right lifetime
impl<'a> Result<'a> {
fn block(&mut self) -> P<ast::Block> {
let res = self.prsr.parse_block().unwrap();
res
}
}
impl<'a> MacResult for Result<'a> {
fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
let mut me = *self;
Some(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
node: ast::ExprKind::Block(me.block()),
span: me.span,
attrs: ast::ThinVec::new(),
}))
}
}
Box::new(Result {
prsr: parser,
span: sp,
})
}
pub mod prelude {
pub use super::build_block_emitter;
pub use ast::Ident;
pub use codemap::{DUMMY_SP, Span};
pub use ext::base::{ExtCtxt, MacResult};
pub use parse::token::{self, Token, DelimToken};
pub use symbol::keywords;
pub use tokenstream::{TokenTree, TokenStream};
}

View file

@ -231,9 +231,7 @@ pub mod rt {
} }
r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited { r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
delim: token::Bracket, delim: token::Bracket,
open_span: self.span,
tts: self.value.to_tokens(cx), tts: self.value.to_tokens(cx),
close_span: self.span,
}))); })));
r r
} }
@ -250,9 +248,7 @@ pub mod rt {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> { fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited { vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
delim: token::Paren, delim: token::Paren,
open_span: DUMMY_SP,
tts: vec![], tts: vec![],
close_span: DUMMY_SP,
}))] }))]
} }
} }
@ -757,11 +753,11 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
vec![e_tok]); vec![e_tok]);
vec![cx.stmt_expr(e_push)] vec![cx.stmt_expr(e_push)]
}, },
TokenTree::Delimited(_, ref delimed) => { TokenTree::Delimited(span, ref delimed) => {
statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter() statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter()
.chain(delimed.tts.iter() .chain(delimed.tts.iter()
.flat_map(|tt| statements_mk_tt(cx, tt, matcher))) .flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
.chain(statements_mk_tt(cx, &delimed.close_tt(), matcher)) .chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher))
.collect() .collect()
}, },
TokenTree::Sequence(sp, ref seq) => { TokenTree::Sequence(sp, ref seq) => {

View file

@ -350,9 +350,9 @@ impl FirstSets {
TokenTree::Token(sp, ref tok) => { TokenTree::Token(sp, ref tok) => {
first.replace_with((sp, tok.clone())); first.replace_with((sp, tok.clone()));
} }
TokenTree::Delimited(_, ref delimited) => { TokenTree::Delimited(span, ref delimited) => {
build_recur(sets, &delimited.tts[..]); build_recur(sets, &delimited.tts[..]);
first.replace_with((delimited.open_span, first.replace_with((delimited.open_tt(span).span(),
Token::OpenDelim(delimited.delim))); Token::OpenDelim(delimited.delim)));
} }
TokenTree::Sequence(sp, ref seq_rep) => { TokenTree::Sequence(sp, ref seq_rep) => {
@ -410,8 +410,8 @@ impl FirstSets {
first.add_one((sp, tok.clone())); first.add_one((sp, tok.clone()));
return first; return first;
} }
TokenTree::Delimited(_, ref delimited) => { TokenTree::Delimited(span, ref delimited) => {
first.add_one((delimited.open_span, first.add_one((delimited.open_tt(span).span(),
Token::OpenDelim(delimited.delim))); Token::OpenDelim(delimited.delim)));
return first; return first;
} }
@ -603,8 +603,9 @@ fn check_matcher_core(sess: &ParseSess,
suffix_first = build_suffix_first(); suffix_first = build_suffix_first();
} }
} }
TokenTree::Delimited(_, ref d) => { TokenTree::Delimited(span, ref d) => {
let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim))); let my_suffix = TokenSet::singleton((d.close_tt(span).span(),
Token::CloseDelim(d.delim)));
check_matcher_core(sess, first_sets, &d.tts, &my_suffix); check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens // don't track non NT tokens
last.replace_with_irrelevant(); last.replace_with_irrelevant();

View file

@ -543,9 +543,7 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
TokenTree::Delimited(fld.new_span(span), Rc::new( TokenTree::Delimited(fld.new_span(span), Rc::new(
Delimited { Delimited {
delim: delimed.delim, delim: delimed.delim,
open_span: fld.new_span(delimed.open_span),
tts: fld.fold_tts(&delimed.tts), tts: fld.fold_tts(&delimed.tts),
close_span: fld.new_span(delimed.close_span),
} }
)) ))
}, },

View file

@ -89,6 +89,9 @@ pub mod util {
mod thin_vec; mod thin_vec;
pub use self::thin_vec::ThinVec; pub use self::thin_vec::ThinVec;
mod rc_slice;
pub use self::rc_slice::RcSlice;
} }
pub mod json; pub mod json;
@ -129,7 +132,6 @@ pub mod ext {
pub mod expand; pub mod expand;
pub mod placeholders; pub mod placeholders;
pub mod hygiene; pub mod hygiene;
pub mod proc_macro_shim;
pub mod quote; pub mod quote;
pub mod source_util; pub mod source_util;

View file

@ -59,7 +59,6 @@ impl<'a> StringReader<'a> {
// Parse the open delimiter. // Parse the open delimiter.
self.open_braces.push((delim, self.span)); self.open_braces.push((delim, self.span));
let open_span = self.span;
self.real_token(); self.real_token();
// Parse the token trees within the delimiters. // Parse the token trees within the delimiters.
@ -67,9 +66,8 @@ impl<'a> StringReader<'a> {
// uses an incorrect delimiter. // uses an incorrect delimiter.
let tts = self.parse_token_trees_until_close_delim(); let tts = self.parse_token_trees_until_close_delim();
let close_span = self.span;
// Expand to cover the entire delimited token tree // Expand to cover the entire delimited token tree
let span = Span { hi: close_span.hi, ..pre_span }; let span = Span { hi: self.span.hi, ..pre_span };
match self.token { match self.token {
// Correct delimiter. // Correct delimiter.
@ -115,9 +113,7 @@ impl<'a> StringReader<'a> {
Ok(TokenTree::Delimited(span, Rc::new(Delimited { Ok(TokenTree::Delimited(span, Rc::new(Delimited {
delim: delim, delim: delim,
open_span: open_span,
tts: tts, tts: tts,
close_span: close_span,
}))) })))
}, },
token::CloseDelim(_) => { token::CloseDelim(_) => {

View file

@ -196,7 +196,7 @@ pub fn new_parser_from_tts<'a>(sess: &'a ParseSess, tts: Vec<tokenstream::TokenT
} }
pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> { pub fn new_parser_from_ts<'a>(sess: &'a ParseSess, ts: tokenstream::TokenStream) -> Parser<'a> {
tts_to_parser(sess, ts.to_tts()) tts_to_parser(sess, ts.trees().cloned().collect())
} }
@ -725,24 +725,20 @@ mod tests {
sp(5, 14), sp(5, 14),
Rc::new(tokenstream::Delimited { Rc::new(tokenstream::Delimited {
delim: token::DelimToken::Paren, delim: token::DelimToken::Paren,
open_span: sp(5, 6),
tts: vec![ tts: vec![
TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))), TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
TokenTree::Token(sp(8, 9), token::Colon), TokenTree::Token(sp(8, 9), token::Colon),
TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))), TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
], ],
close_span: sp(13, 14),
})), })),
TokenTree::Delimited( TokenTree::Delimited(
sp(15, 21), sp(15, 21),
Rc::new(tokenstream::Delimited { Rc::new(tokenstream::Delimited {
delim: token::DelimToken::Brace, delim: token::DelimToken::Brace,
open_span: sp(15, 16),
tts: vec![ tts: vec![
TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))), TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
TokenTree::Token(sp(18, 19), token::Semi), TokenTree::Token(sp(18, 19), token::Semi),
], ],
close_span: sp(20, 21),
})) }))
]; ];

View file

@ -256,9 +256,7 @@ impl<'a> Parser<'a> {
-> Self { -> Self {
let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited { let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited {
delim: token::NoDelim, delim: token::NoDelim,
open_span: syntax_pos::DUMMY_SP,
tts: tokens, tts: tokens,
close_span: syntax_pos::DUMMY_SP,
})); }));
let mut parser = Parser { let mut parser = Parser {
sess: sess, sess: sess,
@ -2720,7 +2718,7 @@ impl<'a> Parser<'a> {
} }
let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true); let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
let open_span = self.span; let lo = self.span.lo;
self.bump(); self.bump();
let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace), let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
&token::CloseDelim(token::Paren), &token::CloseDelim(token::Paren),
@ -2729,16 +2727,11 @@ impl<'a> Parser<'a> {
|p| p.parse_token_tree(), |p| p.parse_token_tree(),
|mut e| e.emit()); |mut e| e.emit());
self.parsing_token_tree = parsing_token_tree; self.parsing_token_tree = parsing_token_tree;
let close_span = self.span;
self.bump(); self.bump();
let span = Span { lo: open_span.lo, ..close_span }; Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited {
Ok(TokenTree::Delimited(span, Rc::new(Delimited {
delim: delim, delim: delim,
open_span: open_span,
tts: tts, tts: tts,
close_span: close_span,
}))) })))
}, },
token::CloseDelim(_) | token::Eof => unreachable!(), token::CloseDelim(_) | token::Eof => unreachable!(),

View file

@ -49,6 +49,12 @@ pub enum DelimToken {
NoDelim, NoDelim,
} }
impl DelimToken {
pub fn len(&self) -> u32 {
if *self == NoDelim { 0 } else { 1 }
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)] #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
pub enum Lit { pub enum Lit {
Byte(ast::Name), Byte(ast::Name),

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,50 @@
// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::hash::{self, Hash};
use std::fmt;
use std::ops::Deref;
use std::rc::Rc;
#[derive(Clone)]
pub struct RcSlice<T> {
data: Rc<Box<[T]>>,
offset: u32,
len: u32,
}
impl<T> RcSlice<T> {
pub fn new(vec: Vec<T>) -> Self {
RcSlice {
offset: 0,
len: vec.len() as u32,
data: Rc::new(vec.into_boxed_slice()),
}
}
}
impl<T> Deref for RcSlice<T> {
type Target = [T];
fn deref(&self) -> &[T] {
&self.data[self.offset as usize .. (self.offset + self.len) as usize]
}
}
impl<T: Hash> Hash for RcSlice<T> {
fn hash<H: hash::Hasher>(&self, state: &mut H) {
self.deref().hash(state);
}
}
impl<T: fmt::Debug> fmt::Debug for RcSlice<T> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Debug::fmt(self.deref(), f)
}
}

View file

@ -1,65 +0,0 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_parens)]
#![feature(plugin)]
#![feature(plugin_registrar)]
#![feature(rustc_private)]
#![plugin(proc_macro_plugin)]
extern crate rustc_plugin;
extern crate proc_macro_tokens;
extern crate syntax;
use proc_macro_tokens::build::ident_eq;
use syntax::ast::Ident;
use syntax::ext::base::{ExtCtxt, MacResult};
use syntax::ext::proc_macro_shim::build_block_emitter;
use syntax::tokenstream::{TokenTree, TokenStream};
use syntax::codemap::Span;
use rustc_plugin::Registry;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("cond", cond);
}
fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'cx> {
let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned()));
build_block_emitter(cx, sp, output)
}
fn cond_rec(input: TokenStream) -> TokenStream {
if input.is_empty() {
return qquote!();
}
let next = input.slice(0..1);
let rest = input.slice_from(1..);
let clause : TokenStream = match next.maybe_delimited() {
Some(ts) => ts,
_ => panic!("Invalid input"),
};
// clause is ([test]) [rhs]
if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) }
let test: TokenStream = clause.slice(0..1);
let rhs: TokenStream = clause.slice_from(1..);
if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() {
qquote!({unquote(rhs)})
} else {
qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
}
}

View file

@ -15,52 +15,45 @@
#![plugin(proc_macro_plugin)] #![plugin(proc_macro_plugin)]
extern crate rustc_plugin; extern crate rustc_plugin;
extern crate proc_macro_tokens;
extern crate syntax; extern crate syntax;
use proc_macro_tokens::prelude::*;
use rustc_plugin::Registry; use rustc_plugin::Registry;
use syntax::ast::Ident; use syntax::ext::base::SyntaxExtension;
use syntax::codemap::{DUMMY_SP, Span}; use syntax::parse::token::Token;
use syntax::ext::proc_macro_shim::build_block_emitter; use syntax::symbol::Symbol;
use syntax::ext::base::{ExtCtxt, MacResult};
use syntax::parse::token::{self, Token, DelimToken};
use syntax::tokenstream::{TokenTree, TokenStream}; use syntax::tokenstream::{TokenTree, TokenStream};
#[plugin_registrar] #[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) { pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("cond", cond); reg.register_syntax_extension(Symbol::intern("cond"),
SyntaxExtension::ProcMacro(Box::new(cond)));
} }
fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'cx> { fn cond(input: TokenStream) -> TokenStream {
let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned())); let mut conds = Vec::new();
build_block_emitter(cx, sp, output) let mut input = input.trees();
} while let Some(tree) = input.next() {
let cond: TokenStream = match *tree {
fn cond_rec(input: TokenStream) -> TokenStream { TokenTree::Delimited(_, ref delimited) => delimited.tts.iter().cloned().collect(),
if input.is_empty() { _ => panic!("Invalid input"),
return qquote!(); };
} let mut trees = cond.trees().cloned();
let test = trees.next();
let next = input.slice(0..1); let rhs = trees.collect::<TokenStream>();
let rest = input.slice_from(1..); if rhs.is_empty() {
panic!("Invalid macro usage in cond: {}", cond);
let clause : TokenStream = match next.maybe_delimited() { }
Some(ts) => ts, let is_else = match test {
_ => panic!("Invalid input"), Some(TokenTree::Token(_, Token::Ident(ident))) if ident.name == "else" => true,
}; _ => false,
};
// clause is ([test]) [rhs] conds.push(if is_else || input.peek().is_none() {
if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) } qquote!({ unquote rhs })
} else {
let test: TokenStream = clause.slice(0..1); qquote!(if unquote(test.unwrap()) { unquote rhs } else)
let rhs: TokenStream = clause.slice_from(1..); });
}
if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() {
qquote!({unquote(rhs)}) conds.into_iter().collect()
} else {
qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
}
} }

View file

@ -1,60 +0,0 @@
// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![allow(unused_parens)]
#![feature(plugin)]
#![feature(plugin_registrar)]
#![feature(rustc_private)]
#![plugin(proc_macro_plugin)]
extern crate rustc_plugin;
extern crate proc_macro_tokens;
extern crate syntax;
use syntax::ext::proc_macro_shim::prelude::*;
use proc_macro_tokens::prelude::*;
use rustc_plugin::Registry;
#[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("cond", cond);
}
fn cond<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'cx> {
let output = cond_rec(TokenStream::from_tts(tts.clone().to_owned()));
build_block_emitter(cx, sp, output)
}
fn cond_rec(input: TokenStream) -> TokenStream {
if input.is_empty() {
return qquote!();
}
let next = input.slice(0..1);
let rest = input.slice_from(1..);
let clause : TokenStream = match next.maybe_delimited() {
Some(ts) => ts,
_ => panic!("Invalid input"),
};
// clause is ([test]) [rhs]
if clause.len() < 2 { panic!("Invalid macro usage in cond: {:?}", clause) }
let test: TokenStream = clause.slice(0..1);
let rhs: TokenStream = clause.slice_from(1..);
if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() {
qquote!({unquote(rhs)})
} else {
qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
}
}

View file

@ -14,22 +14,21 @@
#![plugin(proc_macro_plugin)] #![plugin(proc_macro_plugin)]
extern crate rustc_plugin; extern crate rustc_plugin;
extern crate proc_macro_tokens;
extern crate syntax; extern crate syntax;
use syntax::ext::proc_macro_shim::prelude::*;
use proc_macro_tokens::prelude::*;
use rustc_plugin::Registry; use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension;
use syntax::symbol::Symbol;
use syntax::tokenstream::TokenStream;
#[plugin_registrar] #[plugin_registrar]
pub fn plugin_registrar(reg: &mut Registry) { pub fn plugin_registrar(reg: &mut Registry) {
reg.register_macro("hello", hello); reg.register_syntax_extension(Symbol::intern("hello"),
SyntaxExtension::ProcMacro(Box::new(hello)));
} }
// This macro is not very interesting, but it does contain delimited tokens with // This macro is not very interesting, but it does contain delimited tokens with
// no content - `()` and `{}` - which has caused problems in the past. // no content - `()` and `{}` - which has caused problems in the past.
fn hello<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree]) -> Box<MacResult + 'cx> { fn hello(_: TokenStream) -> TokenStream {
let output = qquote!({ fn hello() {} hello(); }); qquote!({ fn hello() {} hello(); })
build_block_emitter(cx, sp, output)
} }

View file

@ -9,15 +9,14 @@
// except according to those terms. // except according to those terms.
#![feature(plugin, plugin_registrar, rustc_private)] #![feature(plugin, plugin_registrar, rustc_private)]
#![plugin(proc_macro_plugin)]
extern crate proc_macro_tokens;
extern crate rustc_plugin; extern crate rustc_plugin;
extern crate syntax; extern crate syntax;
use proc_macro_tokens::prelude::*;
use rustc_plugin::Registry; use rustc_plugin::Registry;
use syntax::ext::base::SyntaxExtension; use syntax::ext::base::SyntaxExtension;
use syntax::ext::proc_macro_shim::prelude::*; use syntax::tokenstream::TokenStream;
use syntax::symbol::Symbol; use syntax::symbol::Symbol;
#[plugin_registrar] #[plugin_registrar]
@ -35,23 +34,21 @@ pub fn plugin_registrar(reg: &mut Registry) {
} }
fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream { fn attr_tru(_attr: TokenStream, _item: TokenStream) -> TokenStream {
lex("fn f1() -> bool { true }") qquote!(fn f1() -> bool { true })
} }
fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream { fn attr_identity(_attr: TokenStream, item: TokenStream) -> TokenStream {
let source = item.to_string(); qquote!(unquote item)
lex(&source)
} }
fn tru(_ts: TokenStream) -> TokenStream { fn tru(_ts: TokenStream) -> TokenStream {
lex("true") qquote!(true)
} }
fn ret_tru(_ts: TokenStream) -> TokenStream { fn ret_tru(_ts: TokenStream) -> TokenStream {
lex("return true;") qquote!(return true;)
} }
fn identity(ts: TokenStream) -> TokenStream { fn identity(ts: TokenStream) -> TokenStream {
let source = ts.to_string(); qquote!(unquote ts)
lex(&source)
} }

View file

@ -14,12 +14,14 @@
#![feature(rustc_private)] #![feature(rustc_private)]
#![plugin(proc_macro_plugin)] #![plugin(proc_macro_plugin)]
extern crate proc_macro_tokens;
use proc_macro_tokens::prelude::*;
extern crate syntax; extern crate syntax;
extern crate syntax_pos;
use syntax::ast::Ident;
use syntax::parse::token;
use syntax::tokenstream::TokenTree;
fn main() { fn main() {
let lex_true = lex("true"); let true_tok = TokenTree::Token(syntax_pos::DUMMY_SP, token::Ident(Ident::from_str("true")));
assert_eq!(qquote!(true).eq_unspanned(&lex_true), true); assert!(qquote!(true).eq_unspanned(&true_tok.into()));
} }

View file

@ -1,54 +0,0 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cond_noprelude_plugin.rs
// ignore-stage1
#![feature(plugin)]
#![feature(rustc_private)]
#![plugin(cond_noprelude_plugin)]
fn fact(n : i64) -> i64 {
if n == 0 {
1
} else {
n * fact(n - 1)
}
}
fn fact_cond(n : i64) -> i64 {
cond!(
((n == 0) 1)
(else (n * fact_cond(n-1)))
)
}
fn fib(n : i64) -> i64 {
if n == 0 || n == 1 {
1
} else {
fib(n-1) + fib(n-2)
}
}
fn fib_cond(n : i64) -> i64 {
cond!(
((n == 0) 1)
((n == 1) 1)
(else (fib_cond(n-1) + fib_cond(n-2)))
)
}
fn main() {
assert_eq!(fact(3), fact_cond(3));
assert_eq!(fact(5), fact_cond(5));
assert_eq!(fib(5), fib_cond(5));
assert_eq!(fib(8), fib_cond(8));
}

View file

@ -1,54 +0,0 @@
// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// aux-build:cond_prelude_plugin.rs
// ignore-stage1
#![feature(plugin)]
#![feature(rustc_private)]
#![plugin(cond_prelude_plugin)]
fn fact(n : i64) -> i64 {
if n == 0 {
1
} else {
n * fact(n - 1)
}
}
fn fact_cond(n : i64) -> i64 {
cond!(
((n == 0) 1)
(else (n * fact_cond(n-1)))
)
}
fn fib(n : i64) -> i64 {
if n == 0 || n == 1 {
1
} else {
fib(n-1) + fib(n-2)
}
}
fn fib_cond(n : i64) -> i64 {
cond!(
((n == 0) 1)
((n == 1) 1)
(else (fib_cond(n-1) + fib_cond(n-2)))
)
}
fn main() {
assert_eq!(fact(3), fact_cond(3));
assert_eq!(fact(5), fact_cond(5));
assert_eq!(fib(5), fib_cond(5));
assert_eq!(fib(8), fib_cond(8));
}