Auto merge of #49545 - alexcrichton:proc-macro-fixes, r=eddyb
proc_macro: Tweak doc comments and negative literals This commit tweaks the tokenization of a doc comment to use `#[doc = "..."]` like `macro_rules!` does (instead of treating it as a `Literal` token). Additionally it fixes treatment of negative literals in the compiler, for exapmle `Literal::i32(-1)`. The current fix is a bit of a hack around the current compiler implementation, providing a fix at the proc-macro layer rather than the libsyntax layer. Closes #48889
This commit is contained in:
commit
85f0098405
4 changed files with 165 additions and 55 deletions
|
@ -40,7 +40,6 @@
|
||||||
#![feature(lang_items)]
|
#![feature(lang_items)]
|
||||||
#![feature(optin_builtin_traits)]
|
#![feature(optin_builtin_traits)]
|
||||||
|
|
||||||
#[macro_use]
|
|
||||||
extern crate syntax;
|
extern crate syntax;
|
||||||
extern crate syntax_pos;
|
extern crate syntax_pos;
|
||||||
extern crate rustc_errors;
|
extern crate rustc_errors;
|
||||||
|
@ -156,7 +155,7 @@ impl IntoIterator for TokenStream {
|
||||||
type IntoIter = TokenTreeIter;
|
type IntoIter = TokenTreeIter;
|
||||||
|
|
||||||
fn into_iter(self) -> TokenTreeIter {
|
fn into_iter(self) -> TokenTreeIter {
|
||||||
TokenTreeIter { cursor: self.0.trees(), next: None }
|
TokenTreeIter { cursor: self.0.trees(), stack: Vec::new() }
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -554,7 +553,7 @@ impl Literal {
|
||||||
#[unstable(feature = "proc_macro", issue = "38356")]
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
||||||
pub struct TokenTreeIter {
|
pub struct TokenTreeIter {
|
||||||
cursor: tokenstream::Cursor,
|
cursor: tokenstream::Cursor,
|
||||||
next: Option<tokenstream::TokenStream>,
|
stack: Vec<TokenTree>,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[unstable(feature = "proc_macro", issue = "38356")]
|
#[unstable(feature = "proc_macro", issue = "38356")]
|
||||||
|
@ -563,9 +562,10 @@ impl Iterator for TokenTreeIter {
|
||||||
|
|
||||||
fn next(&mut self) -> Option<TokenTree> {
|
fn next(&mut self) -> Option<TokenTree> {
|
||||||
loop {
|
loop {
|
||||||
let next =
|
let tree = self.stack.pop().or_else(|| {
|
||||||
unwrap_or!(self.next.take().or_else(|| self.cursor.next_as_stream()), return None);
|
let next = self.cursor.next_as_stream()?;
|
||||||
let tree = TokenTree::from_internal(next, &mut self.next);
|
Some(TokenTree::from_internal(next, &mut self.stack))
|
||||||
|
})?;
|
||||||
if tree.span.0 == DUMMY_SP {
|
if tree.span.0 == DUMMY_SP {
|
||||||
if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
|
if let TokenNode::Group(Delimiter::None, stream) = tree.kind {
|
||||||
self.cursor.insert(stream.0);
|
self.cursor.insert(stream.0);
|
||||||
|
@ -598,12 +598,12 @@ impl Delimiter {
|
||||||
}
|
}
|
||||||
|
|
||||||
impl TokenTree {
|
impl TokenTree {
|
||||||
fn from_internal(stream: tokenstream::TokenStream, next: &mut Option<tokenstream::TokenStream>)
|
fn from_internal(stream: tokenstream::TokenStream, stack: &mut Vec<TokenTree>)
|
||||||
-> TokenTree {
|
-> TokenTree {
|
||||||
use syntax::parse::token::*;
|
use syntax::parse::token::*;
|
||||||
|
|
||||||
let (tree, is_joint) = stream.as_tree();
|
let (tree, is_joint) = stream.as_tree();
|
||||||
let (mut span, token) = match tree {
|
let (span, token) = match tree {
|
||||||
tokenstream::TokenTree::Token(span, token) => (span, token),
|
tokenstream::TokenTree::Token(span, token) => (span, token),
|
||||||
tokenstream::TokenTree::Delimited(span, delimed) => {
|
tokenstream::TokenTree::Delimited(span, delimed) => {
|
||||||
let delimiter = Delimiter::from_internal(delimed.delim);
|
let delimiter = Delimiter::from_internal(delimed.delim);
|
||||||
|
@ -615,34 +615,32 @@ impl TokenTree {
|
||||||
};
|
};
|
||||||
|
|
||||||
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
|
let op_kind = if is_joint { Spacing::Joint } else { Spacing::Alone };
|
||||||
|
macro_rules! tt {
|
||||||
|
($e:expr) => (TokenTree { span: Span(span), kind: $e })
|
||||||
|
}
|
||||||
macro_rules! op {
|
macro_rules! op {
|
||||||
($op:expr) => { TokenNode::Op($op, op_kind) }
|
($a:expr) => (TokenNode::Op($a, op_kind));
|
||||||
}
|
($a:expr, $b:expr) => ({
|
||||||
|
stack.push(tt!(TokenNode::Op($b, op_kind).into()));
|
||||||
macro_rules! joint {
|
TokenNode::Op($a, Spacing::Joint)
|
||||||
($first:expr, $rest:expr) => { joint($first, $rest, is_joint, &mut span, next) }
|
});
|
||||||
}
|
($a:expr, $b:expr, $c:expr) => ({
|
||||||
|
stack.push(tt!(TokenNode::Op($c, op_kind)));
|
||||||
fn joint(first: char, rest: Token, is_joint: bool, span: &mut syntax_pos::Span,
|
stack.push(tt!(TokenNode::Op($b, Spacing::Joint)));
|
||||||
next: &mut Option<tokenstream::TokenStream>)
|
TokenNode::Op($a, Spacing::Joint)
|
||||||
-> TokenNode {
|
})
|
||||||
let (first_span, rest_span) = (*span, *span);
|
|
||||||
*span = first_span;
|
|
||||||
let tree = tokenstream::TokenTree::Token(rest_span, rest);
|
|
||||||
*next = Some(if is_joint { tree.joint() } else { tree.into() });
|
|
||||||
TokenNode::Op(first, Spacing::Joint)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let kind = match token {
|
let kind = match token {
|
||||||
Eq => op!('='),
|
Eq => op!('='),
|
||||||
Lt => op!('<'),
|
Lt => op!('<'),
|
||||||
Le => joint!('<', Eq),
|
Le => op!('<', '='),
|
||||||
EqEq => joint!('=', Eq),
|
EqEq => op!('=', '='),
|
||||||
Ne => joint!('!', Eq),
|
Ne => op!('!', '='),
|
||||||
Ge => joint!('>', Eq),
|
Ge => op!('>', '='),
|
||||||
Gt => op!('>'),
|
Gt => op!('>'),
|
||||||
AndAnd => joint!('&', BinOp(And)),
|
AndAnd => op!('&', '&'),
|
||||||
OrOr => joint!('|', BinOp(Or)),
|
OrOr => op!('|', '|'),
|
||||||
Not => op!('!'),
|
Not => op!('!'),
|
||||||
Tilde => op!('~'),
|
Tilde => op!('~'),
|
||||||
BinOp(Plus) => op!('+'),
|
BinOp(Plus) => op!('+'),
|
||||||
|
@ -653,37 +651,46 @@ impl TokenTree {
|
||||||
BinOp(Caret) => op!('^'),
|
BinOp(Caret) => op!('^'),
|
||||||
BinOp(And) => op!('&'),
|
BinOp(And) => op!('&'),
|
||||||
BinOp(Or) => op!('|'),
|
BinOp(Or) => op!('|'),
|
||||||
BinOp(Shl) => joint!('<', Lt),
|
BinOp(Shl) => op!('<', '<'),
|
||||||
BinOp(Shr) => joint!('>', Gt),
|
BinOp(Shr) => op!('>', '>'),
|
||||||
BinOpEq(Plus) => joint!('+', Eq),
|
BinOpEq(Plus) => op!('+', '='),
|
||||||
BinOpEq(Minus) => joint!('-', Eq),
|
BinOpEq(Minus) => op!('-', '='),
|
||||||
BinOpEq(Star) => joint!('*', Eq),
|
BinOpEq(Star) => op!('*', '='),
|
||||||
BinOpEq(Slash) => joint!('/', Eq),
|
BinOpEq(Slash) => op!('/', '='),
|
||||||
BinOpEq(Percent) => joint!('%', Eq),
|
BinOpEq(Percent) => op!('%', '='),
|
||||||
BinOpEq(Caret) => joint!('^', Eq),
|
BinOpEq(Caret) => op!('^', '='),
|
||||||
BinOpEq(And) => joint!('&', Eq),
|
BinOpEq(And) => op!('&', '='),
|
||||||
BinOpEq(Or) => joint!('|', Eq),
|
BinOpEq(Or) => op!('|', '='),
|
||||||
BinOpEq(Shl) => joint!('<', Le),
|
BinOpEq(Shl) => op!('<', '<', '='),
|
||||||
BinOpEq(Shr) => joint!('>', Ge),
|
BinOpEq(Shr) => op!('>', '>', '='),
|
||||||
At => op!('@'),
|
At => op!('@'),
|
||||||
Dot => op!('.'),
|
Dot => op!('.'),
|
||||||
DotDot => joint!('.', Dot),
|
DotDot => op!('.', '.'),
|
||||||
DotDotDot => joint!('.', DotDot),
|
DotDotDot => op!('.', '.', '.'),
|
||||||
DotDotEq => joint!('.', DotEq),
|
DotDotEq => op!('.', '.', '='),
|
||||||
Comma => op!(','),
|
Comma => op!(','),
|
||||||
Semi => op!(';'),
|
Semi => op!(';'),
|
||||||
Colon => op!(':'),
|
Colon => op!(':'),
|
||||||
ModSep => joint!(':', Colon),
|
ModSep => op!(':', ':'),
|
||||||
RArrow => joint!('-', Gt),
|
RArrow => op!('-', '>'),
|
||||||
LArrow => joint!('<', BinOp(Minus)),
|
LArrow => op!('<', '-'),
|
||||||
FatArrow => joint!('=', Gt),
|
FatArrow => op!('=', '>'),
|
||||||
Pound => op!('#'),
|
Pound => op!('#'),
|
||||||
Dollar => op!('$'),
|
Dollar => op!('$'),
|
||||||
Question => op!('?'),
|
Question => op!('?'),
|
||||||
|
|
||||||
Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
|
Ident(ident, false) | Lifetime(ident) => TokenNode::Term(Term(ident.name)),
|
||||||
Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
|
Ident(ident, true) => TokenNode::Term(Term(Symbol::intern(&format!("r#{}", ident)))),
|
||||||
Literal(..) | DocComment(..) => TokenNode::Literal(self::Literal(token)),
|
Literal(..) => TokenNode::Literal(self::Literal(token)),
|
||||||
|
DocComment(c) => {
|
||||||
|
let stream = vec![
|
||||||
|
tt!(TokenNode::Term(Term::intern("doc"))),
|
||||||
|
tt!(op!('=')),
|
||||||
|
tt!(TokenNode::Literal(self::Literal(Literal(Lit::Str_(c), None)))),
|
||||||
|
].into_iter().collect();
|
||||||
|
stack.push(tt!(TokenNode::Group(Delimiter::Bracket, stream)));
|
||||||
|
op!('#')
|
||||||
|
}
|
||||||
|
|
||||||
Interpolated(_) => {
|
Interpolated(_) => {
|
||||||
__internal::with_sess(|(sess, _)| {
|
__internal::with_sess(|(sess, _)| {
|
||||||
|
@ -692,7 +699,7 @@ impl TokenTree {
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
DotEq => joint!('.', Eq),
|
DotEq => op!('.', '='),
|
||||||
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
||||||
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
|
Whitespace | Comment | Shebang(..) | Eof => unreachable!(),
|
||||||
};
|
};
|
||||||
|
@ -724,7 +731,29 @@ impl TokenTree {
|
||||||
} else { Ident(ident, false) };
|
} else { Ident(ident, false) };
|
||||||
return TokenTree::Token(self.span.0, token).into();
|
return TokenTree::Token(self.span.0, token).into();
|
||||||
}
|
}
|
||||||
TokenNode::Literal(token) => return TokenTree::Token(self.span.0, token.0).into(),
|
TokenNode::Literal(self::Literal(Literal(Lit::Integer(ref a), b)))
|
||||||
|
if a.as_str().starts_with("-") =>
|
||||||
|
{
|
||||||
|
let minus = BinOp(BinOpToken::Minus);
|
||||||
|
let integer = Symbol::intern(&a.as_str()[1..]);
|
||||||
|
let integer = Literal(Lit::Integer(integer), b);
|
||||||
|
let a = TokenTree::Token(self.span.0, minus);
|
||||||
|
let b = TokenTree::Token(self.span.0, integer);
|
||||||
|
return vec![a, b].into_iter().collect()
|
||||||
|
}
|
||||||
|
TokenNode::Literal(self::Literal(Literal(Lit::Float(ref a), b)))
|
||||||
|
if a.as_str().starts_with("-") =>
|
||||||
|
{
|
||||||
|
let minus = BinOp(BinOpToken::Minus);
|
||||||
|
let float = Symbol::intern(&a.as_str()[1..]);
|
||||||
|
let float = Literal(Lit::Float(float), b);
|
||||||
|
let a = TokenTree::Token(self.span.0, minus);
|
||||||
|
let b = TokenTree::Token(self.span.0, float);
|
||||||
|
return vec![a, b].into_iter().collect()
|
||||||
|
}
|
||||||
|
TokenNode::Literal(token) => {
|
||||||
|
return TokenTree::Token(self.span.0, token.0).into()
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let token = match op {
|
let token = match op {
|
||||||
|
|
|
@ -16,7 +16,7 @@
|
||||||
|
|
||||||
extern crate proc_macro;
|
extern crate proc_macro;
|
||||||
|
|
||||||
use proc_macro::{TokenStream, TokenTree, TokenNode, Delimiter, Literal};
|
use proc_macro::{TokenStream, TokenTree, TokenNode, Delimiter, Literal, Spacing};
|
||||||
|
|
||||||
#[proc_macro_attribute]
|
#[proc_macro_attribute]
|
||||||
pub fn foo(attr: TokenStream, input: TokenStream) -> TokenStream {
|
pub fn foo(attr: TokenStream, input: TokenStream) -> TokenStream {
|
||||||
|
@ -65,10 +65,34 @@ fn assert_inline(slice: &mut &[TokenTree]) {
|
||||||
|
|
||||||
fn assert_doc(slice: &mut &[TokenTree]) {
|
fn assert_doc(slice: &mut &[TokenTree]) {
|
||||||
match slice[0].kind {
|
match slice[0].kind {
|
||||||
TokenNode::Literal(_) => {}
|
TokenNode::Op('#', Spacing::Alone) => {}
|
||||||
_ => panic!("expected literal doc comment got other"),
|
_ => panic!("expected #"),
|
||||||
}
|
}
|
||||||
*slice = &slice[1..];
|
let inner = match slice[1].kind {
|
||||||
|
TokenNode::Group(Delimiter::Bracket, ref s) => s.clone(),
|
||||||
|
_ => panic!("expected brackets"),
|
||||||
|
};
|
||||||
|
let tokens = inner.into_iter().collect::<Vec<_>>();
|
||||||
|
let tokens = &tokens[..];
|
||||||
|
|
||||||
|
if tokens.len() != 3 {
|
||||||
|
panic!("expected three tokens in doc")
|
||||||
|
}
|
||||||
|
|
||||||
|
match tokens[0].kind {
|
||||||
|
TokenNode::Term(ref t) => assert_eq!("doc", t.as_str()),
|
||||||
|
_ => panic!("expected `doc`"),
|
||||||
|
}
|
||||||
|
match tokens[1].kind {
|
||||||
|
TokenNode::Op('=', Spacing::Alone) => {}
|
||||||
|
_ => panic!("expected equals"),
|
||||||
|
}
|
||||||
|
match tokens[2].kind {
|
||||||
|
TokenNode::Literal(_) => {}
|
||||||
|
_ => panic!("expected literal"),
|
||||||
|
}
|
||||||
|
|
||||||
|
*slice = &slice[2..];
|
||||||
}
|
}
|
||||||
|
|
||||||
fn assert_invoc(slice: &mut &[TokenTree]) {
|
fn assert_invoc(slice: &mut &[TokenTree]) {
|
||||||
|
|
|
@ -0,0 +1,34 @@
|
||||||
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// no-prefer-dynamic
|
||||||
|
|
||||||
|
#![feature(proc_macro)]
|
||||||
|
#![crate_type = "proc-macro"]
|
||||||
|
|
||||||
|
extern crate proc_macro;
|
||||||
|
|
||||||
|
use proc_macro::*;
|
||||||
|
|
||||||
|
#[proc_macro]
|
||||||
|
pub fn neg_one(_input: TokenStream) -> TokenStream {
|
||||||
|
TokenTree {
|
||||||
|
span: Span::call_site(),
|
||||||
|
kind: TokenNode::Literal(Literal::i32(-1)),
|
||||||
|
}.into()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[proc_macro]
|
||||||
|
pub fn neg_one_float(_input: TokenStream) -> TokenStream {
|
||||||
|
TokenTree {
|
||||||
|
span: Span::call_site(),
|
||||||
|
kind: TokenNode::Literal(Literal::f32(-1.0)),
|
||||||
|
}.into()
|
||||||
|
}
|
23
src/test/run-pass-fulldeps/proc-macro/negative-token.rs
Normal file
23
src/test/run-pass-fulldeps/proc-macro/negative-token.rs
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
|
||||||
|
// file at the top-level directory of this distribution and at
|
||||||
|
// http://rust-lang.org/COPYRIGHT.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
|
||||||
|
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
|
||||||
|
// option. This file may not be copied, modified, or distributed
|
||||||
|
// except according to those terms.
|
||||||
|
|
||||||
|
// aux-build:negative-token.rs
|
||||||
|
// ignore-stage1
|
||||||
|
|
||||||
|
#![feature(proc_macro)]
|
||||||
|
|
||||||
|
extern crate negative_token;
|
||||||
|
|
||||||
|
use negative_token::*;
|
||||||
|
|
||||||
|
fn main() {
|
||||||
|
assert_eq!(-1, neg_one!());
|
||||||
|
assert_eq!(-1.0, neg_one_float!());
|
||||||
|
}
|
Loading…
Add table
Add a link
Reference in a new issue