run rustfmt on libsyntax_ext folder
This commit is contained in:
parent
8787a12334
commit
d652639524
11 changed files with 273 additions and 247 deletions
|
@ -8,9 +8,8 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
/*
|
// Inline assembly support.
|
||||||
* Inline assembly support.
|
//
|
||||||
*/
|
|
||||||
use self::State::*;
|
use self::State::*;
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
|
@ -31,43 +30,48 @@ enum State {
|
||||||
Inputs,
|
Inputs,
|
||||||
Clobbers,
|
Clobbers,
|
||||||
Options,
|
Options,
|
||||||
StateNone
|
StateNone,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl State {
|
impl State {
|
||||||
fn next(&self) -> State {
|
fn next(&self) -> State {
|
||||||
match *self {
|
match *self {
|
||||||
Asm => Outputs,
|
Asm => Outputs,
|
||||||
Outputs => Inputs,
|
Outputs => Inputs,
|
||||||
Inputs => Clobbers,
|
Inputs => Clobbers,
|
||||||
Clobbers => Options,
|
Clobbers => Options,
|
||||||
Options => StateNone,
|
Options => StateNone,
|
||||||
StateNone => StateNone
|
StateNone => StateNone,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const OPTIONS: &'static [&'static str] = &["volatile", "alignstack", "intel"];
|
const OPTIONS: &'static [&'static str] = &["volatile", "alignstack", "intel"];
|
||||||
|
|
||||||
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
-> Box<base::MacResult+'cx> {
|
sp: Span,
|
||||||
|
tts: &[tokenstream::TokenTree])
|
||||||
|
-> Box<base::MacResult + 'cx> {
|
||||||
if !cx.ecfg.enable_asm() {
|
if !cx.ecfg.enable_asm() {
|
||||||
feature_gate::emit_feature_err(
|
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
||||||
&cx.parse_sess.span_diagnostic, "asm", sp,
|
"asm",
|
||||||
feature_gate::GateIssue::Language,
|
sp,
|
||||||
feature_gate::EXPLAIN_ASM);
|
feature_gate::GateIssue::Language,
|
||||||
|
feature_gate::EXPLAIN_ASM);
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Split the tts before the first colon, to avoid `asm!("x": y)` being
|
// Split the tts before the first colon, to avoid `asm!("x": y)` being
|
||||||
// parsed as `asm!(z)` with `z = "x": y` which is type ascription.
|
// parsed as `asm!(z)` with `z = "x": y` which is type ascription.
|
||||||
let first_colon = tts.iter().position(|tt| {
|
let first_colon = tts.iter()
|
||||||
match *tt {
|
.position(|tt| {
|
||||||
tokenstream::TokenTree::Token(_, token::Colon) |
|
match *tt {
|
||||||
tokenstream::TokenTree::Token(_, token::ModSep) => true,
|
tokenstream::TokenTree::Token(_, token::Colon) |
|
||||||
_ => false
|
tokenstream::TokenTree::Token(_, token::ModSep) => true,
|
||||||
}
|
_ => false,
|
||||||
}).unwrap_or(tts.len());
|
}
|
||||||
|
})
|
||||||
|
.unwrap_or(tts.len());
|
||||||
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
|
let mut p = cx.new_parser_from_tts(&tts[first_colon..]);
|
||||||
let mut asm = token::InternedString::new("");
|
let mut asm = token::InternedString::new("");
|
||||||
let mut asm_str_style = None;
|
let mut asm_str_style = None;
|
||||||
|
@ -91,8 +95,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
|
||||||
}
|
}
|
||||||
// Nested parser, stop before the first colon (see above).
|
// Nested parser, stop before the first colon (see above).
|
||||||
let mut p2 = cx.new_parser_from_tts(&tts[..first_colon]);
|
let mut p2 = cx.new_parser_from_tts(&tts[..first_colon]);
|
||||||
let (s, style) = match expr_to_string(cx, panictry!(p2.parse_expr()),
|
let (s, style) = match expr_to_string(cx,
|
||||||
"inline assembly must be a string literal") {
|
panictry!(p2.parse_expr()),
|
||||||
|
"inline assembly must be a string literal") {
|
||||||
Some((s, st)) => (s, st),
|
Some((s, st)) => (s, st),
|
||||||
// let compilation continue
|
// let compilation continue
|
||||||
None => return DummyResult::expr(sp),
|
None => return DummyResult::expr(sp),
|
||||||
|
@ -109,9 +114,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
|
||||||
asm_str_style = Some(style);
|
asm_str_style = Some(style);
|
||||||
}
|
}
|
||||||
Outputs => {
|
Outputs => {
|
||||||
while p.token != token::Eof &&
|
while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep {
|
||||||
p.token != token::Colon &&
|
|
||||||
p.token != token::ModSep {
|
|
||||||
|
|
||||||
if !outputs.is_empty() {
|
if !outputs.is_empty() {
|
||||||
p.eat(&token::Comma);
|
p.eat(&token::Comma);
|
||||||
|
@ -136,8 +139,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
|
||||||
let output = match ch.next() {
|
let output = match ch.next() {
|
||||||
Some('=') => None,
|
Some('=') => None,
|
||||||
Some('+') => {
|
Some('+') => {
|
||||||
Some(token::intern_and_get_ident(&format!(
|
Some(token::intern_and_get_ident(&format!("={}", ch.as_str())))
|
||||||
"={}", ch.as_str())))
|
|
||||||
}
|
}
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(span, "output operand constraint lacks '=' or '+'");
|
cx.span_err(span, "output operand constraint lacks '=' or '+'");
|
||||||
|
@ -156,9 +158,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Inputs => {
|
Inputs => {
|
||||||
while p.token != token::Eof &&
|
while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep {
|
||||||
p.token != token::Colon &&
|
|
||||||
p.token != token::ModSep {
|
|
||||||
|
|
||||||
if !inputs.is_empty() {
|
if !inputs.is_empty() {
|
||||||
p.eat(&token::Comma);
|
p.eat(&token::Comma);
|
||||||
|
@ -180,9 +180,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Clobbers => {
|
Clobbers => {
|
||||||
while p.token != token::Eof &&
|
while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep {
|
||||||
p.token != token::Colon &&
|
|
||||||
p.token != token::ModSep {
|
|
||||||
|
|
||||||
if !clobs.is_empty() {
|
if !clobs.is_empty() {
|
||||||
p.eat(&token::Comma);
|
p.eat(&token::Comma);
|
||||||
|
@ -218,25 +216,25 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
|
||||||
p.eat(&token::Comma);
|
p.eat(&token::Comma);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
StateNone => ()
|
StateNone => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
loop {
|
loop {
|
||||||
// MOD_SEP is a double colon '::' without space in between.
|
// MOD_SEP is a double colon '::' without space in between.
|
||||||
// When encountered, the state must be advanced twice.
|
// When encountered, the state must be advanced twice.
|
||||||
match (&p.token, state.next(), state.next().next()) {
|
match (&p.token, state.next(), state.next().next()) {
|
||||||
(&token::Colon, StateNone, _) |
|
(&token::Colon, StateNone, _) |
|
||||||
(&token::ModSep, _, StateNone) => {
|
(&token::ModSep, _, StateNone) => {
|
||||||
p.bump();
|
p.bump();
|
||||||
break 'statement;
|
break 'statement;
|
||||||
}
|
}
|
||||||
(&token::Colon, st, _) |
|
(&token::Colon, st, _) |
|
||||||
(&token::ModSep, _, st) => {
|
(&token::ModSep, _, st) => {
|
||||||
p.bump();
|
p.bump();
|
||||||
state = st;
|
state = st;
|
||||||
}
|
}
|
||||||
(&token::Eof, _, _) => break 'statement,
|
(&token::Eof, _, _) => break 'statement,
|
||||||
_ => break
|
_ => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,7 @@ use syntax_pos::Span;
|
||||||
pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
|
pub fn expand_cfg<'cx>(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: &[tokenstream::TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult + 'static> {
|
||||||
let mut p = cx.new_parser_from_tts(tts);
|
let mut p = cx.new_parser_from_tts(tts);
|
||||||
let cfg = panictry!(p.parse_meta_item());
|
let cfg = panictry!(p.parse_meta_item());
|
||||||
|
|
||||||
|
|
|
@ -20,10 +20,10 @@ use std::string::String;
|
||||||
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||||
sp: syntax_pos::Span,
|
sp: syntax_pos::Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: &[tokenstream::TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult + 'static> {
|
||||||
let es = match base::get_exprs_from_tts(cx, sp, tts) {
|
let es = match base::get_exprs_from_tts(cx, sp, tts) {
|
||||||
Some(e) => e,
|
Some(e) => e,
|
||||||
None => return base::DummyResult::expr(sp)
|
None => return base::DummyResult::expr(sp),
|
||||||
};
|
};
|
||||||
let mut accumulator = String::new();
|
let mut accumulator = String::new();
|
||||||
for e in es {
|
for e in es {
|
||||||
|
@ -57,7 +57,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
base::MacEager::expr(cx.expr_str(
|
base::MacEager::expr(cx.expr_str(sp, token::intern_and_get_ident(&accumulator[..])))
|
||||||
sp,
|
|
||||||
token::intern_and_get_ident(&accumulator[..])))
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,10 @@ use syntax::ptr::P;
|
||||||
use syntax_pos::Span;
|
use syntax_pos::Span;
|
||||||
use syntax::tokenstream::TokenTree;
|
use syntax::tokenstream::TokenTree;
|
||||||
|
|
||||||
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
|
pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
-> Box<base::MacResult+'cx> {
|
sp: Span,
|
||||||
|
tts: &[TokenTree])
|
||||||
|
-> Box<base::MacResult + 'cx> {
|
||||||
if !cx.ecfg.enable_concat_idents() {
|
if !cx.ecfg.enable_concat_idents() {
|
||||||
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
||||||
"concat_idents",
|
"concat_idents",
|
||||||
|
@ -33,35 +35,40 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
|
||||||
for (i, e) in tts.iter().enumerate() {
|
for (i, e) in tts.iter().enumerate() {
|
||||||
if i & 1 == 1 {
|
if i & 1 == 1 {
|
||||||
match *e {
|
match *e {
|
||||||
TokenTree::Token(_, token::Comma) => {},
|
TokenTree::Token(_, token::Comma) => {}
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "concat_idents! expecting comma.");
|
cx.span_err(sp, "concat_idents! expecting comma.");
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
match *e {
|
match *e {
|
||||||
TokenTree::Token(_, token::Ident(ident)) => {
|
TokenTree::Token(_, token::Ident(ident)) => res_str.push_str(&ident.name.as_str()),
|
||||||
res_str.push_str(&ident.name.as_str())
|
|
||||||
},
|
|
||||||
_ => {
|
_ => {
|
||||||
cx.span_err(sp, "concat_idents! requires ident args.");
|
cx.span_err(sp, "concat_idents! requires ident args.");
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
},
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let res = str_to_ident(&res_str);
|
let res = str_to_ident(&res_str);
|
||||||
|
|
||||||
struct Result { ident: ast::Ident, span: Span };
|
struct Result {
|
||||||
|
ident: ast::Ident,
|
||||||
|
span: Span,
|
||||||
|
};
|
||||||
|
|
||||||
impl Result {
|
impl Result {
|
||||||
fn path(&self) -> ast::Path {
|
fn path(&self) -> ast::Path {
|
||||||
let segment = ast::PathSegment {
|
let segment = ast::PathSegment {
|
||||||
identifier: self.ident,
|
identifier: self.ident,
|
||||||
parameters: ast::PathParameters::none()
|
parameters: ast::PathParameters::none(),
|
||||||
};
|
};
|
||||||
ast::Path { span: self.span, global: false, segments: vec![segment] }
|
ast::Path {
|
||||||
|
span: self.span,
|
||||||
|
global: false,
|
||||||
|
segments: vec![segment],
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,5 +91,8 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[TokenTree])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Box::new(Result { ident: res, span: sp })
|
Box::new(Result {
|
||||||
|
ident: res,
|
||||||
|
span: sp,
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -36,20 +36,20 @@ pub enum PtrTy<'a> {
|
||||||
/// for type parameters and a lifetime.
|
/// for type parameters and a lifetime.
|
||||||
#[derive(Clone, Eq, PartialEq)]
|
#[derive(Clone, Eq, PartialEq)]
|
||||||
pub struct Path<'a> {
|
pub struct Path<'a> {
|
||||||
pub path: Vec<&'a str> ,
|
pub path: Vec<&'a str>,
|
||||||
pub lifetime: Option<&'a str>,
|
pub lifetime: Option<&'a str>,
|
||||||
pub params: Vec<Box<Ty<'a>>>,
|
pub params: Vec<Box<Ty<'a>>>,
|
||||||
pub global: bool,
|
pub global: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> Path<'a> {
|
impl<'a> Path<'a> {
|
||||||
pub fn new<'r>(path: Vec<&'r str> ) -> Path<'r> {
|
pub fn new<'r>(path: Vec<&'r str>) -> Path<'r> {
|
||||||
Path::new_(path, None, Vec::new(), true)
|
Path::new_(path, None, Vec::new(), true)
|
||||||
}
|
}
|
||||||
pub fn new_local<'r>(path: &'r str) -> Path<'r> {
|
pub fn new_local<'r>(path: &'r str) -> Path<'r> {
|
||||||
Path::new_(vec!( path ), None, Vec::new(), false)
|
Path::new_(vec![path], None, Vec::new(), false)
|
||||||
}
|
}
|
||||||
pub fn new_<'r>(path: Vec<&'r str> ,
|
pub fn new_<'r>(path: Vec<&'r str>,
|
||||||
lifetime: Option<&'r str>,
|
lifetime: Option<&'r str>,
|
||||||
params: Vec<Box<Ty<'r>>>,
|
params: Vec<Box<Ty<'r>>>,
|
||||||
global: bool)
|
global: bool)
|
||||||
|
@ -58,7 +58,7 @@ impl<'a> Path<'a> {
|
||||||
path: path,
|
path: path,
|
||||||
lifetime: lifetime,
|
lifetime: lifetime,
|
||||||
params: params,
|
params: params,
|
||||||
global: global
|
global: global,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ pub enum Ty<'a> {
|
||||||
/// parameter, and things like `i32`
|
/// parameter, and things like `i32`
|
||||||
Literal(Path<'a>),
|
Literal(Path<'a>),
|
||||||
/// includes unit
|
/// includes unit
|
||||||
Tuple(Vec<Ty<'a>> )
|
Tuple(Vec<Ty<'a>>),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn borrowed_ptrty<'r>() -> PtrTy<'r> {
|
pub fn borrowed_ptrty<'r>() -> PtrTy<'r> {
|
||||||
|
@ -119,14 +119,14 @@ pub fn nil_ty<'r>() -> Ty<'r> {
|
||||||
fn mk_lifetime(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Option<ast::Lifetime> {
|
fn mk_lifetime(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Option<ast::Lifetime> {
|
||||||
match *lt {
|
match *lt {
|
||||||
Some(ref s) => Some(cx.lifetime(span, cx.ident_of(*s).name)),
|
Some(ref s) => Some(cx.lifetime(span, cx.ident_of(*s).name)),
|
||||||
None => None
|
None => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_lifetimes(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Vec<ast::Lifetime> {
|
fn mk_lifetimes(cx: &ExtCtxt, span: Span, lt: &Option<&str>) -> Vec<ast::Lifetime> {
|
||||||
match *lt {
|
match *lt {
|
||||||
Some(ref s) => vec!(cx.lifetime(span, cx.ident_of(*s).name)),
|
Some(ref s) => vec![cx.lifetime(span, cx.ident_of(*s).name)],
|
||||||
None => vec!()
|
None => vec![],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -145,13 +145,11 @@ impl<'a> Ty<'a> {
|
||||||
let lt = mk_lifetime(cx, span, lt);
|
let lt = mk_lifetime(cx, span, lt);
|
||||||
cx.ty_rptr(span, raw_ty, lt, mutbl)
|
cx.ty_rptr(span, raw_ty, lt, mutbl)
|
||||||
}
|
}
|
||||||
Raw(mutbl) => cx.ty_ptr(span, raw_ty, mutbl)
|
Raw(mutbl) => cx.ty_ptr(span, raw_ty, mutbl),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Literal(ref p) => { p.to_ty(cx, span, self_ty, self_generics) }
|
Literal(ref p) => p.to_ty(cx, span, self_ty, self_generics),
|
||||||
Self_ => {
|
Self_ => cx.ty_path(self.to_path(cx, span, self_ty, self_generics)),
|
||||||
cx.ty_path(self.to_path(cx, span, self_ty, self_generics))
|
|
||||||
}
|
|
||||||
Tuple(ref fields) => {
|
Tuple(ref fields) => {
|
||||||
let ty = ast::TyKind::Tup(fields.iter()
|
let ty = ast::TyKind::Tup(fields.iter()
|
||||||
.map(|f| f.to_ty(cx, span, self_ty, self_generics))
|
.map(|f| f.to_ty(cx, span, self_ty, self_generics))
|
||||||
|
@ -169,20 +167,25 @@ impl<'a> Ty<'a> {
|
||||||
-> ast::Path {
|
-> ast::Path {
|
||||||
match *self {
|
match *self {
|
||||||
Self_ => {
|
Self_ => {
|
||||||
let self_params = self_generics.ty_params.iter().map(|ty_param| {
|
let self_params = self_generics.ty_params
|
||||||
cx.ty_ident(span, ty_param.ident)
|
.iter()
|
||||||
}).collect();
|
.map(|ty_param| cx.ty_ident(span, ty_param.ident))
|
||||||
let lifetimes = self_generics.lifetimes.iter()
|
.collect();
|
||||||
.map(|d| d.lifetime)
|
let lifetimes = self_generics.lifetimes
|
||||||
.collect();
|
.iter()
|
||||||
|
.map(|d| d.lifetime)
|
||||||
|
.collect();
|
||||||
|
|
||||||
cx.path_all(span, false, vec![self_ty], lifetimes, self_params, Vec::new())
|
cx.path_all(span,
|
||||||
|
false,
|
||||||
|
vec![self_ty],
|
||||||
|
lifetimes,
|
||||||
|
self_params,
|
||||||
|
Vec::new())
|
||||||
}
|
}
|
||||||
Literal(ref p) => {
|
Literal(ref p) => p.to_path(cx, span, self_ty, self_generics),
|
||||||
p.to_path(cx, span, self_ty, self_generics)
|
Ptr(..) => cx.span_bug(span, "pointer in a path in generic `derive`"),
|
||||||
}
|
Tuple(..) => cx.span_bug(span, "tuple in a path in generic `derive`"),
|
||||||
Ptr(..) => { cx.span_bug(span, "pointer in a path in generic `derive`") }
|
|
||||||
Tuple(..) => { cx.span_bug(span, "tuple in a path in generic `derive`") }
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -195,16 +198,16 @@ fn mk_ty_param(cx: &ExtCtxt,
|
||||||
self_ident: Ident,
|
self_ident: Ident,
|
||||||
self_generics: &Generics)
|
self_generics: &Generics)
|
||||||
-> ast::TyParam {
|
-> ast::TyParam {
|
||||||
let bounds =
|
let bounds = bounds.iter()
|
||||||
bounds.iter().map(|b| {
|
.map(|b| {
|
||||||
let path = b.to_path(cx, span, self_ident, self_generics);
|
let path = b.to_path(cx, span, self_ident, self_generics);
|
||||||
cx.typarambound(path)
|
cx.typarambound(path)
|
||||||
}).collect();
|
})
|
||||||
|
.collect();
|
||||||
cx.typaram(span, cx.ident_of(name), bounds, None)
|
cx.typaram(span, cx.ident_of(name), bounds, None)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn mk_generics(lifetimes: Vec<ast::LifetimeDef>, ty_params: Vec<ast::TyParam>)
|
fn mk_generics(lifetimes: Vec<ast::LifetimeDef>, ty_params: Vec<ast::TyParam>) -> Generics {
|
||||||
-> Generics {
|
|
||||||
Generics {
|
Generics {
|
||||||
lifetimes: lifetimes,
|
lifetimes: lifetimes,
|
||||||
ty_params: P::from_vec(ty_params),
|
ty_params: P::from_vec(ty_params),
|
||||||
|
@ -225,7 +228,8 @@ pub struct LifetimeBounds<'a> {
|
||||||
impl<'a> LifetimeBounds<'a> {
|
impl<'a> LifetimeBounds<'a> {
|
||||||
pub fn empty() -> LifetimeBounds<'a> {
|
pub fn empty() -> LifetimeBounds<'a> {
|
||||||
LifetimeBounds {
|
LifetimeBounds {
|
||||||
lifetimes: Vec::new(), bounds: Vec::new()
|
lifetimes: Vec::new(),
|
||||||
|
bounds: Vec::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
pub fn to_generics(&self,
|
pub fn to_generics(&self,
|
||||||
|
@ -234,46 +238,49 @@ impl<'a> LifetimeBounds<'a> {
|
||||||
self_ty: Ident,
|
self_ty: Ident,
|
||||||
self_generics: &Generics)
|
self_generics: &Generics)
|
||||||
-> Generics {
|
-> Generics {
|
||||||
let lifetimes = self.lifetimes.iter().map(|&(ref lt, ref bounds)| {
|
let lifetimes = self.lifetimes
|
||||||
let bounds =
|
.iter()
|
||||||
bounds.iter().map(
|
.map(|&(ref lt, ref bounds)| {
|
||||||
|b| cx.lifetime(span, cx.ident_of(*b).name)).collect();
|
let bounds = bounds.iter()
|
||||||
cx.lifetime_def(span, cx.ident_of(*lt).name, bounds)
|
.map(|b| cx.lifetime(span, cx.ident_of(*b).name))
|
||||||
}).collect();
|
.collect();
|
||||||
let ty_params = self.bounds.iter().map(|t| {
|
cx.lifetime_def(span, cx.ident_of(*lt).name, bounds)
|
||||||
match *t {
|
})
|
||||||
(ref name, ref bounds) => {
|
.collect();
|
||||||
mk_ty_param(cx,
|
let ty_params = self.bounds
|
||||||
span,
|
.iter()
|
||||||
*name,
|
.map(|t| {
|
||||||
bounds,
|
match *t {
|
||||||
self_ty,
|
(ref name, ref bounds) => {
|
||||||
self_generics)
|
mk_ty_param(cx, span, *name, bounds, self_ty, self_generics)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
})
|
||||||
}).collect();
|
.collect();
|
||||||
mk_generics(lifetimes, ty_params)
|
mk_generics(lifetimes, ty_params)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_explicit_self(cx: &ExtCtxt, span: Span, self_ptr: &Option<PtrTy>)
|
pub fn get_explicit_self(cx: &ExtCtxt,
|
||||||
-> (P<Expr>, ast::ExplicitSelf) {
|
span: Span,
|
||||||
|
self_ptr: &Option<PtrTy>)
|
||||||
|
-> (P<Expr>, ast::ExplicitSelf) {
|
||||||
// this constructs a fresh `self` path
|
// this constructs a fresh `self` path
|
||||||
let self_path = cx.expr_self(span);
|
let self_path = cx.expr_self(span);
|
||||||
match *self_ptr {
|
match *self_ptr {
|
||||||
None => {
|
None => (self_path, respan(span, SelfKind::Value(ast::Mutability::Immutable))),
|
||||||
(self_path, respan(span, SelfKind::Value(ast::Mutability::Immutable)))
|
|
||||||
}
|
|
||||||
Some(ref ptr) => {
|
Some(ref ptr) => {
|
||||||
let self_ty = respan(
|
let self_ty =
|
||||||
span,
|
respan(span,
|
||||||
match *ptr {
|
match *ptr {
|
||||||
Borrowed(ref lt, mutbl) => {
|
Borrowed(ref lt, mutbl) => {
|
||||||
let lt = lt.map(|s| cx.lifetime(span, cx.ident_of(s).name));
|
let lt = lt.map(|s| cx.lifetime(span, cx.ident_of(s).name));
|
||||||
SelfKind::Region(lt, mutbl)
|
SelfKind::Region(lt, mutbl)
|
||||||
}
|
}
|
||||||
Raw(_) => cx.span_bug(span, "attempted to use *self in deriving definition")
|
Raw(_) => {
|
||||||
});
|
cx.span_bug(span, "attempted to use *self in deriving definition")
|
||||||
|
}
|
||||||
|
});
|
||||||
let self_expr = cx.expr_deref(span, self_path);
|
let self_expr = cx.expr_deref(span, self_path);
|
||||||
(self_expr, self_ty)
|
(self_expr, self_ty)
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,7 @@
|
||||||
|
|
||||||
//! The compiler code necessary to implement the `#[derive]` extensions.
|
//! The compiler code necessary to implement the `#[derive]` extensions.
|
||||||
|
|
||||||
use syntax::ast::{MetaItem, self};
|
use syntax::ast::{self, MetaItem};
|
||||||
use syntax::attr::AttrMetaMethods;
|
use syntax::attr::AttrMetaMethods;
|
||||||
use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxEnv};
|
use syntax::ext::base::{Annotatable, ExtCtxt, SyntaxEnv};
|
||||||
use syntax::ext::base::{MultiDecorator, MultiItemDecorator, MultiModifier};
|
use syntax::ext::base::{MultiDecorator, MultiItemDecorator, MultiModifier};
|
||||||
|
@ -99,11 +99,11 @@ fn expand_derive(cx: &mut ExtCtxt,
|
||||||
|
|
||||||
for titem in traits.iter().rev() {
|
for titem in traits.iter().rev() {
|
||||||
let tname = if titem.is_word() {
|
let tname = if titem.is_word() {
|
||||||
titem.name() }
|
titem.name()
|
||||||
else {
|
} else {
|
||||||
cx.span_err(titem.span, "malformed `derive` entry");
|
cx.span_err(titem.span, "malformed `derive` entry");
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
|
|
||||||
if !(is_builtin_trait(&tname) || cx.ecfg.enable_custom_derive()) {
|
if !(is_builtin_trait(&tname) || cx.ecfg.enable_custom_derive()) {
|
||||||
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
||||||
|
|
|
@ -8,11 +8,10 @@
|
||||||
// option. This file may not be copied, modified, or distributed
|
// option. This file may not be copied, modified, or distributed
|
||||||
// except according to those terms.
|
// except according to those terms.
|
||||||
|
|
||||||
/*
|
// The compiler code necessary to support the env! extension. Eventually this
|
||||||
* The compiler code necessary to support the env! extension. Eventually this
|
// should all get sucked into either the compiler syntax extension plugin
|
||||||
* should all get sucked into either the compiler syntax extension plugin
|
// interface.
|
||||||
* interface.
|
//
|
||||||
*/
|
|
||||||
|
|
||||||
use syntax::ast;
|
use syntax::ast;
|
||||||
use syntax::ext::base::*;
|
use syntax::ext::base::*;
|
||||||
|
@ -24,66 +23,61 @@ use syntax::tokenstream;
|
||||||
|
|
||||||
use std::env;
|
use std::env;
|
||||||
|
|
||||||
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
-> Box<base::MacResult+'cx> {
|
sp: Span,
|
||||||
|
tts: &[tokenstream::TokenTree])
|
||||||
|
-> Box<base::MacResult + 'cx> {
|
||||||
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
|
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
|
||||||
None => return DummyResult::expr(sp),
|
None => return DummyResult::expr(sp),
|
||||||
Some(v) => v
|
Some(v) => v,
|
||||||
};
|
};
|
||||||
|
|
||||||
let e = match env::var(&var[..]) {
|
let e = match env::var(&var[..]) {
|
||||||
Err(..) => {
|
Err(..) => {
|
||||||
cx.expr_path(cx.path_all(sp,
|
cx.expr_path(cx.path_all(sp,
|
||||||
true,
|
true,
|
||||||
cx.std_path(&["option", "Option", "None"]),
|
cx.std_path(&["option", "Option", "None"]),
|
||||||
Vec::new(),
|
Vec::new(),
|
||||||
vec!(cx.ty_rptr(sp,
|
vec![cx.ty_rptr(sp,
|
||||||
cx.ty_ident(sp,
|
cx.ty_ident(sp, cx.ident_of("str")),
|
||||||
cx.ident_of("str")),
|
Some(cx.lifetime(sp,
|
||||||
Some(cx.lifetime(sp,
|
cx.ident_of("'static")
|
||||||
cx.ident_of(
|
.name)),
|
||||||
"'static").name)),
|
ast::Mutability::Immutable)],
|
||||||
ast::Mutability::Immutable)),
|
Vec::new()))
|
||||||
Vec::new()))
|
}
|
||||||
}
|
Ok(s) => {
|
||||||
Ok(s) => {
|
cx.expr_call_global(sp,
|
||||||
cx.expr_call_global(sp,
|
cx.std_path(&["option", "Option", "Some"]),
|
||||||
cx.std_path(&["option", "Option", "Some"]),
|
vec![cx.expr_str(sp, token::intern_and_get_ident(&s[..]))])
|
||||||
vec!(cx.expr_str(sp,
|
}
|
||||||
token::intern_and_get_ident(
|
|
||||||
&s[..]))))
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
MacEager::expr(e)
|
MacEager::expr(e)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt,
|
||||||
-> Box<base::MacResult+'cx> {
|
sp: Span,
|
||||||
|
tts: &[tokenstream::TokenTree])
|
||||||
|
-> Box<base::MacResult + 'cx> {
|
||||||
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
|
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
|
||||||
Some(ref exprs) if exprs.is_empty() => {
|
Some(ref exprs) if exprs.is_empty() => {
|
||||||
cx.span_err(sp, "env! takes 1 or 2 arguments");
|
cx.span_err(sp, "env! takes 1 or 2 arguments");
|
||||||
return DummyResult::expr(sp);
|
return DummyResult::expr(sp);
|
||||||
}
|
}
|
||||||
None => return DummyResult::expr(sp),
|
None => return DummyResult::expr(sp),
|
||||||
Some(exprs) => exprs.into_iter()
|
Some(exprs) => exprs.into_iter(),
|
||||||
};
|
};
|
||||||
|
|
||||||
let var = match expr_to_string(cx,
|
let var = match expr_to_string(cx, exprs.next().unwrap(), "expected string literal") {
|
||||||
exprs.next().unwrap(),
|
|
||||||
"expected string literal") {
|
|
||||||
None => return DummyResult::expr(sp),
|
None => return DummyResult::expr(sp),
|
||||||
Some((v, _style)) => v
|
Some((v, _style)) => v,
|
||||||
};
|
};
|
||||||
let msg = match exprs.next() {
|
let msg = match exprs.next() {
|
||||||
None => {
|
None => token::intern_and_get_ident(&format!("environment variable `{}` not defined", var)),
|
||||||
token::intern_and_get_ident(&format!("environment variable `{}` \
|
|
||||||
not defined",
|
|
||||||
var))
|
|
||||||
}
|
|
||||||
Some(second) => {
|
Some(second) => {
|
||||||
match expr_to_string(cx, second, "expected string literal") {
|
match expr_to_string(cx, second, "expected string literal") {
|
||||||
None => return DummyResult::expr(sp),
|
None => return DummyResult::expr(sp),
|
||||||
Some((s, _style)) => s
|
Some((s, _style)) => s,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -98,7 +92,7 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[tokenstream::Token
|
||||||
cx.span_err(sp, &msg);
|
cx.span_err(sp, &msg);
|
||||||
cx.expr_usize(sp, 0)
|
cx.expr_usize(sp, 0)
|
||||||
}
|
}
|
||||||
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s))
|
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s)),
|
||||||
};
|
};
|
||||||
MacEager::expr(e)
|
MacEager::expr(e)
|
||||||
}
|
}
|
||||||
|
|
|
@ -37,7 +37,7 @@ enum Position {
|
||||||
Named(String),
|
Named(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
struct Context<'a, 'b:'a> {
|
struct Context<'a, 'b: 'a> {
|
||||||
ecx: &'a mut ExtCtxt<'b>,
|
ecx: &'a mut ExtCtxt<'b>,
|
||||||
/// The macro's call site. References to unstable formatting internals must
|
/// The macro's call site. References to unstable formatting internals must
|
||||||
/// use this span to pass the stability checker.
|
/// use this span to pass the stability checker.
|
||||||
|
@ -120,7 +120,9 @@ struct Context<'a, 'b:'a> {
|
||||||
/// ```ignore
|
/// ```ignore
|
||||||
/// Some((fmtstr, parsed arguments, index map for named arguments))
|
/// Some((fmtstr, parsed arguments, index map for named arguments))
|
||||||
/// ```
|
/// ```
|
||||||
fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
fn parse_args(ecx: &mut ExtCtxt,
|
||||||
|
sp: Span,
|
||||||
|
tts: &[tokenstream::TokenTree])
|
||||||
-> Option<(P<ast::Expr>, Vec<P<ast::Expr>>, HashMap<String, usize>)> {
|
-> Option<(P<ast::Expr>, Vec<P<ast::Expr>>, HashMap<String, usize>)> {
|
||||||
let mut args = Vec::<P<ast::Expr>>::new();
|
let mut args = Vec::<P<ast::Expr>>::new();
|
||||||
let mut names = HashMap::<String, usize>::new();
|
let mut names = HashMap::<String, usize>::new();
|
||||||
|
@ -138,7 +140,9 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
ecx.span_err(sp, "expected token: `,`");
|
ecx.span_err(sp, "expected token: `,`");
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
if p.token == token::Eof { break } // accept trailing commas
|
if p.token == token::Eof {
|
||||||
|
break;
|
||||||
|
} // accept trailing commas
|
||||||
if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
|
if named || (p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq)) {
|
||||||
named = true;
|
named = true;
|
||||||
let ident = match p.token {
|
let ident = match p.token {
|
||||||
|
@ -155,7 +159,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
_ => {
|
_ => {
|
||||||
ecx.span_err(p.span,
|
ecx.span_err(p.span,
|
||||||
&format!("expected ident for named argument, found `{}`",
|
&format!("expected ident for named argument, found `{}`",
|
||||||
p.this_token_to_string()));
|
p.this_token_to_string()));
|
||||||
return None;
|
return None;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -164,9 +168,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[tokenstream::TokenTree])
|
||||||
panictry!(p.expect(&token::Eq));
|
panictry!(p.expect(&token::Eq));
|
||||||
let e = panictry!(p.parse_expr());
|
let e = panictry!(p.parse_expr());
|
||||||
if let Some(prev) = names.get(name) {
|
if let Some(prev) = names.get(name) {
|
||||||
ecx.struct_span_err(e.span,
|
ecx.struct_span_err(e.span, &format!("duplicate argument named `{}`", name))
|
||||||
&format!("duplicate argument named `{}`",
|
|
||||||
name))
|
|
||||||
.span_note(args[*prev].span, "previously here")
|
.span_note(args[*prev].span, "previously here")
|
||||||
.emit();
|
.emit();
|
||||||
continue;
|
continue;
|
||||||
|
@ -235,7 +237,8 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
|
|
||||||
fn verify_count(&mut self, c: parse::Count) {
|
fn verify_count(&mut self, c: parse::Count) {
|
||||||
match c {
|
match c {
|
||||||
parse::CountImplied | parse::CountIs(..) => {}
|
parse::CountImplied |
|
||||||
|
parse::CountIs(..) => {}
|
||||||
parse::CountIsParam(i) => {
|
parse::CountIsParam(i) => {
|
||||||
self.verify_arg_type(Exact(i), Count);
|
self.verify_arg_type(Exact(i), Count);
|
||||||
}
|
}
|
||||||
|
@ -260,7 +263,8 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
Exact(arg) => {
|
Exact(arg) => {
|
||||||
if self.args.len() <= arg {
|
if self.args.len() <= arg {
|
||||||
let msg = format!("invalid reference to argument `{}` ({})",
|
let msg = format!("invalid reference to argument `{}` ({})",
|
||||||
arg, self.describe_num_args());
|
arg,
|
||||||
|
self.describe_num_args());
|
||||||
|
|
||||||
self.ecx.span_err(self.fmtsp, &msg[..]);
|
self.ecx.span_err(self.fmtsp, &msg[..]);
|
||||||
return;
|
return;
|
||||||
|
@ -394,9 +398,7 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
let arg = self.ecx.expr_usize(sp, i);
|
let arg = self.ecx.expr_usize(sp, i);
|
||||||
self.ecx.expr_call_global(sp, path, vec![arg])
|
self.ecx.expr_call_global(sp, path, vec![arg])
|
||||||
}
|
}
|
||||||
None => {
|
None => self.ecx.expr_path(self.ecx.path_global(sp, path)),
|
||||||
self.ecx.expr_path(self.ecx.path_global(sp, path))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
match arg.position {
|
match arg.position {
|
||||||
|
@ -436,11 +438,14 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
flags: 0,
|
flags: 0,
|
||||||
precision: parse::CountImplied,
|
precision: parse::CountImplied,
|
||||||
width: parse::CountImplied,
|
width: parse::CountImplied,
|
||||||
ty: arg.format.ty
|
ty: arg.format.ty,
|
||||||
}
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let fill = match arg.format.fill { Some(c) => c, None => ' ' };
|
let fill = match arg.format.fill {
|
||||||
|
Some(c) => c,
|
||||||
|
None => ' ',
|
||||||
|
};
|
||||||
|
|
||||||
if *arg != simple_arg || fill != ' ' {
|
if *arg != simple_arg || fill != ' ' {
|
||||||
self.all_pieces_simple = false;
|
self.all_pieces_simple = false;
|
||||||
|
@ -464,17 +469,33 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
let prec = self.trans_count(arg.format.precision);
|
let prec = self.trans_count(arg.format.precision);
|
||||||
let width = self.trans_count(arg.format.width);
|
let width = self.trans_count(arg.format.width);
|
||||||
let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec"));
|
let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec"));
|
||||||
let fmt = self.ecx.expr_struct(sp, path, vec!(
|
let fmt =
|
||||||
self.ecx.field_imm(sp, self.ecx.ident_of("fill"), fill),
|
self.ecx.expr_struct(sp,
|
||||||
self.ecx.field_imm(sp, self.ecx.ident_of("align"), align),
|
path,
|
||||||
self.ecx.field_imm(sp, self.ecx.ident_of("flags"), flags),
|
vec![self.ecx
|
||||||
self.ecx.field_imm(sp, self.ecx.ident_of("precision"), prec),
|
.field_imm(sp, self.ecx.ident_of("fill"), fill),
|
||||||
self.ecx.field_imm(sp, self.ecx.ident_of("width"), width)));
|
self.ecx.field_imm(sp,
|
||||||
|
self.ecx.ident_of("align"),
|
||||||
|
align),
|
||||||
|
self.ecx.field_imm(sp,
|
||||||
|
self.ecx.ident_of("flags"),
|
||||||
|
flags),
|
||||||
|
self.ecx.field_imm(sp,
|
||||||
|
self.ecx.ident_of("precision"),
|
||||||
|
prec),
|
||||||
|
self.ecx.field_imm(sp,
|
||||||
|
self.ecx.ident_of("width"),
|
||||||
|
width)]);
|
||||||
|
|
||||||
let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "Argument"));
|
let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "Argument"));
|
||||||
Some(self.ecx.expr_struct(sp, path, vec!(
|
Some(self.ecx.expr_struct(sp,
|
||||||
self.ecx.field_imm(sp, self.ecx.ident_of("position"), pos),
|
path,
|
||||||
self.ecx.field_imm(sp, self.ecx.ident_of("format"), fmt))))
|
vec![self.ecx.field_imm(sp,
|
||||||
|
self.ecx.ident_of("position"),
|
||||||
|
pos),
|
||||||
|
self.ecx.field_imm(sp,
|
||||||
|
self.ecx.ident_of("format"),
|
||||||
|
fmt)]))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -486,9 +507,9 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
-> P<ast::Expr> {
|
-> P<ast::Expr> {
|
||||||
let sp = piece_ty.span;
|
let sp = piece_ty.span;
|
||||||
let ty = ecx.ty_rptr(sp,
|
let ty = ecx.ty_rptr(sp,
|
||||||
ecx.ty(sp, ast::TyKind::Vec(piece_ty)),
|
ecx.ty(sp, ast::TyKind::Vec(piece_ty)),
|
||||||
Some(ecx.lifetime(sp, keywords::StaticLifetime.name())),
|
Some(ecx.lifetime(sp, keywords::StaticLifetime.name())),
|
||||||
ast::Mutability::Immutable);
|
ast::Mutability::Immutable);
|
||||||
let slice = ecx.expr_vec_slice(sp, pieces);
|
let slice = ecx.expr_vec_slice(sp, pieces);
|
||||||
// static instead of const to speed up codegen by not requiring this to be inlined
|
// static instead of const to speed up codegen by not requiring this to be inlined
|
||||||
let st = ast::ItemKind::Static(ty, ast::Mutability::Immutable, slice);
|
let st = ast::ItemKind::Static(ty, ast::Mutability::Immutable, slice);
|
||||||
|
@ -516,15 +537,11 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
// First, build up the static array which will become our precompiled
|
// First, build up the static array which will become our precompiled
|
||||||
// format "string"
|
// format "string"
|
||||||
let static_lifetime = self.ecx.lifetime(self.fmtsp, keywords::StaticLifetime.name());
|
let static_lifetime = self.ecx.lifetime(self.fmtsp, keywords::StaticLifetime.name());
|
||||||
let piece_ty = self.ecx.ty_rptr(
|
let piece_ty = self.ecx.ty_rptr(self.fmtsp,
|
||||||
self.fmtsp,
|
self.ecx.ty_ident(self.fmtsp, self.ecx.ident_of("str")),
|
||||||
self.ecx.ty_ident(self.fmtsp, self.ecx.ident_of("str")),
|
Some(static_lifetime),
|
||||||
Some(static_lifetime),
|
ast::Mutability::Immutable);
|
||||||
ast::Mutability::Immutable);
|
let pieces = Context::static_array(self.ecx, "__STATIC_FMTSTR", piece_ty, self.str_pieces);
|
||||||
let pieces = Context::static_array(self.ecx,
|
|
||||||
"__STATIC_FMTSTR",
|
|
||||||
piece_ty,
|
|
||||||
self.str_pieces);
|
|
||||||
|
|
||||||
// Before consuming the expressions, we have to remember spans for
|
// Before consuming the expressions, we have to remember spans for
|
||||||
// count arguments as they are now generated separate from other
|
// count arguments as they are now generated separate from other
|
||||||
|
@ -542,7 +559,10 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
let name = self.ecx.ident_of(&format!("__arg{}", i));
|
let name = self.ecx.ident_of(&format!("__arg{}", i));
|
||||||
pats.push(self.ecx.pat_ident(DUMMY_SP, name));
|
pats.push(self.ecx.pat_ident(DUMMY_SP, name));
|
||||||
for ref arg_ty in self.arg_unique_types[i].iter() {
|
for ref arg_ty in self.arg_unique_types[i].iter() {
|
||||||
locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty,
|
locals.push(Context::format_arg(self.ecx,
|
||||||
|
self.macsp,
|
||||||
|
e.span,
|
||||||
|
arg_ty,
|
||||||
self.ecx.expr_ident(e.span, name)));
|
self.ecx.expr_ident(e.span, name)));
|
||||||
}
|
}
|
||||||
heads.push(self.ecx.expr_addr_of(e.span, e));
|
heads.push(self.ecx.expr_addr_of(e.span, e));
|
||||||
|
@ -556,7 +576,10 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
Exact(i) => spans_pos[i],
|
Exact(i) => spans_pos[i],
|
||||||
_ => panic!("should never happen"),
|
_ => panic!("should never happen"),
|
||||||
};
|
};
|
||||||
counts.push(Context::format_arg(self.ecx, self.macsp, span, &Count,
|
counts.push(Context::format_arg(self.ecx,
|
||||||
|
self.macsp,
|
||||||
|
span,
|
||||||
|
&Count,
|
||||||
self.ecx.expr_ident(span, name)));
|
self.ecx.expr_ident(span, name)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -593,9 +616,9 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
// But the nested match expression is proved to perform not as well
|
// But the nested match expression is proved to perform not as well
|
||||||
// as series of let's; the first approach does.
|
// as series of let's; the first approach does.
|
||||||
let pat = self.ecx.pat_tuple(self.fmtsp, pats);
|
let pat = self.ecx.pat_tuple(self.fmtsp, pats);
|
||||||
let arm = self.ecx.arm(self.fmtsp, vec!(pat), args_array);
|
let arm = self.ecx.arm(self.fmtsp, vec![pat], args_array);
|
||||||
let head = self.ecx.expr(self.fmtsp, ast::ExprKind::Tup(heads));
|
let head = self.ecx.expr(self.fmtsp, ast::ExprKind::Tup(heads));
|
||||||
let result = self.ecx.expr_match(self.fmtsp, head, vec!(arm));
|
let result = self.ecx.expr_match(self.fmtsp, head, vec![arm]);
|
||||||
|
|
||||||
let args_slice = self.ecx.expr_addr_of(self.fmtsp, result);
|
let args_slice = self.ecx.expr_addr_of(self.fmtsp, result);
|
||||||
|
|
||||||
|
@ -605,13 +628,9 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
} else {
|
} else {
|
||||||
// Build up the static array which will store our precompiled
|
// Build up the static array which will store our precompiled
|
||||||
// nonstandard placeholders, if there are any.
|
// nonstandard placeholders, if there are any.
|
||||||
let piece_ty = self.ecx.ty_path(self.ecx.path_global(
|
let piece_ty = self.ecx
|
||||||
self.macsp,
|
.ty_path(self.ecx.path_global(self.macsp, Context::rtpath(self.ecx, "Argument")));
|
||||||
Context::rtpath(self.ecx, "Argument")));
|
let fmt = Context::static_array(self.ecx, "__STATIC_FMTARGS", piece_ty, self.pieces);
|
||||||
let fmt = Context::static_array(self.ecx,
|
|
||||||
"__STATIC_FMTARGS",
|
|
||||||
piece_ty,
|
|
||||||
self.pieces);
|
|
||||||
|
|
||||||
("new_v1_formatted", vec![pieces, args_slice, fmt])
|
("new_v1_formatted", vec![pieces, args_slice, fmt])
|
||||||
};
|
};
|
||||||
|
@ -620,13 +639,16 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
self.ecx.expr_call_global(self.macsp, path, fn_args)
|
self.ecx.expr_call_global(self.macsp, path, fn_args)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn format_arg(ecx: &ExtCtxt, macsp: Span, sp: Span,
|
fn format_arg(ecx: &ExtCtxt,
|
||||||
ty: &ArgumentType, arg: P<ast::Expr>)
|
macsp: Span,
|
||||||
|
sp: Span,
|
||||||
|
ty: &ArgumentType,
|
||||||
|
arg: P<ast::Expr>)
|
||||||
-> P<ast::Expr> {
|
-> P<ast::Expr> {
|
||||||
let trait_ = match *ty {
|
let trait_ = match *ty {
|
||||||
Placeholder(ref tyname) => {
|
Placeholder(ref tyname) => {
|
||||||
match &tyname[..] {
|
match &tyname[..] {
|
||||||
"" => "Display",
|
"" => "Display",
|
||||||
"?" => "Debug",
|
"?" => "Debug",
|
||||||
"e" => "LowerExp",
|
"e" => "LowerExp",
|
||||||
"E" => "UpperExp",
|
"E" => "UpperExp",
|
||||||
|
@ -636,16 +658,14 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
"x" => "LowerHex",
|
"x" => "LowerHex",
|
||||||
"X" => "UpperHex",
|
"X" => "UpperHex",
|
||||||
_ => {
|
_ => {
|
||||||
ecx.span_err(sp,
|
ecx.span_err(sp, &format!("unknown format trait `{}`", *tyname));
|
||||||
&format!("unknown format trait `{}`",
|
|
||||||
*tyname));
|
|
||||||
"Dummy"
|
"Dummy"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Count => {
|
Count => {
|
||||||
let path = ecx.std_path(&["fmt", "ArgumentV1", "from_usize"]);
|
let path = ecx.std_path(&["fmt", "ArgumentV1", "from_usize"]);
|
||||||
return ecx.expr_call_global(macsp, path, vec![arg])
|
return ecx.expr_call_global(macsp, path, vec![arg]);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -656,22 +676,23 @@ impl<'a, 'b> Context<'a, 'b> {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt, sp: Span,
|
pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt,
|
||||||
|
sp: Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: &[tokenstream::TokenTree])
|
||||||
-> Box<base::MacResult+'cx> {
|
-> Box<base::MacResult + 'cx> {
|
||||||
|
|
||||||
match parse_args(ecx, sp, tts) {
|
match parse_args(ecx, sp, tts) {
|
||||||
Some((efmt, args, names)) => {
|
Some((efmt, args, names)) => {
|
||||||
MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt,
|
MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names))
|
||||||
args, names))
|
|
||||||
}
|
}
|
||||||
None => DummyResult::expr(sp)
|
None => DummyResult::expr(sp),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Take the various parts of `format_args!(efmt, args..., name=names...)`
|
/// Take the various parts of `format_args!(efmt, args..., name=names...)`
|
||||||
/// and construct the appropriate formatting expression.
|
/// and construct the appropriate formatting expression.
|
||||||
pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
|
||||||
|
sp: Span,
|
||||||
efmt: P<ast::Expr>,
|
efmt: P<ast::Expr>,
|
||||||
args: Vec<P<ast::Expr>>,
|
args: Vec<P<ast::Expr>>,
|
||||||
names: HashMap<String, usize>)
|
names: HashMap<String, usize>)
|
||||||
|
@ -704,11 +725,9 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
||||||
macsp: macsp,
|
macsp: macsp,
|
||||||
fmtsp: efmt.span,
|
fmtsp: efmt.span,
|
||||||
};
|
};
|
||||||
let fmt = match expr_to_string(cx.ecx,
|
let fmt = match expr_to_string(cx.ecx, efmt, "format argument must be a string literal.") {
|
||||||
efmt,
|
|
||||||
"format argument must be a string literal.") {
|
|
||||||
Some((fmt, _)) => fmt,
|
Some((fmt, _)) => fmt,
|
||||||
None => return DummyResult::raw_expr(sp)
|
None => return DummyResult::raw_expr(sp),
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut parser = parse::Parser::new(&fmt);
|
let mut parser = parse::Parser::new(&fmt);
|
||||||
|
@ -717,12 +736,14 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
||||||
loop {
|
loop {
|
||||||
match parser.next() {
|
match parser.next() {
|
||||||
Some(mut piece) => {
|
Some(mut piece) => {
|
||||||
if !parser.errors.is_empty() { break }
|
if !parser.errors.is_empty() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
cx.verify_piece(&piece);
|
cx.verify_piece(&piece);
|
||||||
cx.resolve_name_inplace(&mut piece);
|
cx.resolve_name_inplace(&mut piece);
|
||||||
pieces.push(piece);
|
pieces.push(piece);
|
||||||
}
|
}
|
||||||
None => break
|
None => break,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -738,8 +759,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
|
||||||
}
|
}
|
||||||
|
|
||||||
if !parser.errors.is_empty() {
|
if !parser.errors.is_empty() {
|
||||||
cx.ecx.span_err(cx.fmtsp, &format!("invalid format string: {}",
|
cx.ecx.span_err(cx.fmtsp,
|
||||||
parser.errors.remove(0)));
|
&format!("invalid format string: {}", parser.errors.remove(0)));
|
||||||
return DummyResult::raw_expr(sp);
|
return DummyResult::raw_expr(sp);
|
||||||
}
|
}
|
||||||
if !cx.literal.is_empty() {
|
if !cx.literal.is_empty() {
|
||||||
|
|
|
@ -23,7 +23,8 @@
|
||||||
#![feature(staged_api)]
|
#![feature(staged_api)]
|
||||||
|
|
||||||
extern crate fmt_macros;
|
extern crate fmt_macros;
|
||||||
#[macro_use] extern crate log;
|
#[macro_use]
|
||||||
|
extern crate log;
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate syntax;
|
extern crate syntax;
|
||||||
extern crate syntax_pos;
|
extern crate syntax_pos;
|
||||||
|
@ -52,16 +53,13 @@ pub fn register_builtins(env: &mut SyntaxEnv) {
|
||||||
NormalTT(Box::new(f), None, false)
|
NormalTT(Box::new(f), None, false)
|
||||||
}
|
}
|
||||||
|
|
||||||
env.insert(intern("asm"),
|
env.insert(intern("asm"), builtin_normal_expander(asm::expand_asm));
|
||||||
builtin_normal_expander(asm::expand_asm));
|
env.insert(intern("cfg"), builtin_normal_expander(cfg::expand_cfg));
|
||||||
env.insert(intern("cfg"),
|
|
||||||
builtin_normal_expander(cfg::expand_cfg));
|
|
||||||
env.insert(intern("concat"),
|
env.insert(intern("concat"),
|
||||||
builtin_normal_expander(concat::expand_syntax_ext));
|
builtin_normal_expander(concat::expand_syntax_ext));
|
||||||
env.insert(intern("concat_idents"),
|
env.insert(intern("concat_idents"),
|
||||||
builtin_normal_expander(concat_idents::expand_syntax_ext));
|
builtin_normal_expander(concat_idents::expand_syntax_ext));
|
||||||
env.insert(intern("env"),
|
env.insert(intern("env"), builtin_normal_expander(env::expand_env));
|
||||||
builtin_normal_expander(env::expand_env));
|
|
||||||
env.insert(intern("option_env"),
|
env.insert(intern("option_env"),
|
||||||
builtin_normal_expander(env::expand_option_env));
|
builtin_normal_expander(env::expand_option_env));
|
||||||
env.insert(intern("format_args"),
|
env.insert(intern("format_args"),
|
||||||
|
|
|
@ -17,7 +17,7 @@ use syntax_pos;
|
||||||
pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
|
pub fn expand_syntax_ext<'cx>(cx: &'cx mut base::ExtCtxt,
|
||||||
sp: syntax_pos::Span,
|
sp: syntax_pos::Span,
|
||||||
tts: &[tokenstream::TokenTree])
|
tts: &[tokenstream::TokenTree])
|
||||||
-> Box<base::MacResult+'cx> {
|
-> Box<base::MacResult + 'cx> {
|
||||||
if !cx.ecfg.enable_log_syntax() {
|
if !cx.ecfg.enable_log_syntax() {
|
||||||
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
||||||
"log_syntax",
|
"log_syntax",
|
||||||
|
|
|
@ -18,7 +18,7 @@ use syntax::tokenstream::TokenTree;
|
||||||
pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
pub fn expand_trace_macros(cx: &mut ExtCtxt,
|
||||||
sp: Span,
|
sp: Span,
|
||||||
tt: &[TokenTree])
|
tt: &[TokenTree])
|
||||||
-> Box<base::MacResult+'static> {
|
-> Box<base::MacResult + 'static> {
|
||||||
if !cx.ecfg.enable_trace_macros() {
|
if !cx.ecfg.enable_trace_macros() {
|
||||||
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
feature_gate::emit_feature_err(&cx.parse_sess.span_diagnostic,
|
||||||
"trace_macros",
|
"trace_macros",
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue