1
Fork 0

mv compiler to compiler/

This commit is contained in:
mark 2020-08-27 22:58:48 -05:00 committed by Vadim Petrochenkov
parent db534b3ac2
commit 9e5f7d5631
1686 changed files with 941 additions and 1051 deletions

View file

@ -0,0 +1,632 @@
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc_expand::base::{self, *};
use rustc_parse::parser::Parser;
use rustc_parse_format as parse;
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::{InnerSpan, Span};
struct AsmArgs {
templates: Vec<P<ast::Expr>>,
operands: Vec<(ast::InlineAsmOperand, Span)>,
named_args: FxHashMap<Symbol, usize>,
reg_args: FxHashSet<usize>,
options: ast::InlineAsmOptions,
options_spans: Vec<Span>,
}
fn parse_args<'a>(
ecx: &mut ExtCtxt<'a>,
sp: Span,
tts: TokenStream,
) -> Result<AsmArgs, DiagnosticBuilder<'a>> {
let mut p = ecx.new_parser_from_tts(tts);
if p.token == token::Eof {
return Err(ecx.struct_span_err(sp, "requires at least a template string argument"));
}
// Detect use of the legacy llvm_asm! syntax (which used to be called asm!)
if p.look_ahead(1, |t| *t == token::Colon || *t == token::ModSep) {
let mut err =
ecx.struct_span_err(sp, "the legacy LLVM-style asm! syntax is no longer supported");
err.note("consider migrating to the new asm! syntax specified in RFC 2873");
err.note("alternatively, switch to llvm_asm! to keep your code working as it is");
// Find the span of the "asm!" so that we can offer an automatic suggestion
let asm_span = sp.from_inner(InnerSpan::new(0, 4));
if let Ok(s) = ecx.source_map().span_to_snippet(asm_span) {
if s == "asm!" {
err.span_suggestion(
asm_span,
"replace with",
"llvm_asm!".into(),
Applicability::MachineApplicable,
);
}
}
return Err(err);
}
let first_template = p.parse_expr()?;
let mut args = AsmArgs {
templates: vec![first_template],
operands: vec![],
named_args: FxHashMap::default(),
reg_args: FxHashSet::default(),
options: ast::InlineAsmOptions::empty(),
options_spans: vec![],
};
let mut allow_templates = true;
while p.token != token::Eof {
if !p.eat(&token::Comma) {
if allow_templates {
// After a template string, we always expect *only* a comma...
let mut err = ecx.struct_span_err(p.token.span, "expected token: `,`");
err.span_label(p.token.span, "expected `,`");
p.maybe_annotate_with_ascription(&mut err, false);
return Err(err);
} else {
// ...after that delegate to `expect` to also include the other expected tokens.
return Err(p.expect(&token::Comma).err().unwrap());
}
}
if p.token == token::Eof {
break;
} // accept trailing commas
// Parse options
if p.eat(&token::Ident(sym::options, false)) {
parse_options(&mut p, &mut args)?;
allow_templates = false;
continue;
}
let span_start = p.token.span;
// Parse operand names
let name = if p.token.is_ident() && p.look_ahead(1, |t| *t == token::Eq) {
let (ident, _) = p.token.ident().unwrap();
p.bump();
p.expect(&token::Eq)?;
allow_templates = false;
Some(ident.name)
} else {
None
};
let mut explicit_reg = false;
let op = if p.eat(&token::Ident(kw::In, false)) {
let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = p.parse_expr()?;
ast::InlineAsmOperand::In { reg, expr }
} else if p.eat(&token::Ident(sym::out, false)) {
let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: false }
} else if p.eat(&token::Ident(sym::lateout, false)) {
let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::Out { reg, expr, late: true }
} else if p.eat(&token::Ident(sym::inout, false)) {
let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) {
let out_expr =
if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: false }
} else {
ast::InlineAsmOperand::InOut { reg, expr, late: false }
}
} else if p.eat(&token::Ident(sym::inlateout, false)) {
let reg = parse_reg(&mut p, &mut explicit_reg)?;
let expr = p.parse_expr()?;
if p.eat(&token::FatArrow) {
let out_expr =
if p.eat_keyword(kw::Underscore) { None } else { Some(p.parse_expr()?) };
ast::InlineAsmOperand::SplitInOut { reg, in_expr: expr, out_expr, late: true }
} else {
ast::InlineAsmOperand::InOut { reg, expr, late: true }
}
} else if p.eat(&token::Ident(kw::Const, false)) {
let expr = p.parse_expr()?;
ast::InlineAsmOperand::Const { expr }
} else if p.eat(&token::Ident(sym::sym, false)) {
let expr = p.parse_expr()?;
match expr.kind {
ast::ExprKind::Path(..) => {}
_ => {
let err = ecx
.struct_span_err(expr.span, "argument to `sym` must be a path expression");
return Err(err);
}
}
ast::InlineAsmOperand::Sym { expr }
} else if allow_templates {
let template = p.parse_expr()?;
// If it can't possibly expand to a string, provide diagnostics here to include other
// things it could have been.
match template.kind {
ast::ExprKind::Lit(ast::Lit { kind: ast::LitKind::Str(..), .. }) => {}
ast::ExprKind::MacCall(..) => {}
_ => {
let errstr = "expected operand, options, or additional template string";
let mut err = ecx.struct_span_err(template.span, errstr);
err.span_label(template.span, errstr);
return Err(err);
}
}
args.templates.push(template);
continue;
} else {
return Err(p.expect_one_of(&[], &[]).unwrap_err());
};
allow_templates = false;
let span = span_start.to(p.prev_token.span);
let slot = args.operands.len();
args.operands.push((op, span));
// Validate the order of named, positional & explicit register operands and options. We do
// this at the end once we have the full span of the argument available.
if !args.options_spans.is_empty() {
ecx.struct_span_err(span, "arguments are not allowed after options")
.span_labels(args.options_spans.clone(), "previous options")
.span_label(span, "argument")
.emit();
}
if explicit_reg {
if name.is_some() {
ecx.struct_span_err(span, "explicit register arguments cannot have names").emit();
}
args.reg_args.insert(slot);
} else if let Some(name) = name {
if let Some(&prev) = args.named_args.get(&name) {
ecx.struct_span_err(span, &format!("duplicate argument named `{}`", name))
.span_label(args.operands[prev].1, "previously here")
.span_label(span, "duplicate argument")
.emit();
continue;
}
if !args.reg_args.is_empty() {
let mut err = ecx.struct_span_err(
span,
"named arguments cannot follow explicit register arguments",
);
err.span_label(span, "named argument");
for pos in &args.reg_args {
err.span_label(args.operands[*pos].1, "explicit register argument");
}
err.emit();
}
args.named_args.insert(name, slot);
} else {
if !args.named_args.is_empty() || !args.reg_args.is_empty() {
let mut err = ecx.struct_span_err(
span,
"positional arguments cannot follow named arguments \
or explicit register arguments",
);
err.span_label(span, "positional argument");
for pos in args.named_args.values() {
err.span_label(args.operands[*pos].1, "named argument");
}
for pos in &args.reg_args {
err.span_label(args.operands[*pos].1, "explicit register argument");
}
err.emit();
}
}
}
if args.options.contains(ast::InlineAsmOptions::NOMEM)
&& args.options.contains(ast::InlineAsmOptions::READONLY)
{
let spans = args.options_spans.clone();
ecx.struct_span_err(spans, "the `nomem` and `readonly` options are mutually exclusive")
.emit();
}
if args.options.contains(ast::InlineAsmOptions::PURE)
&& args.options.contains(ast::InlineAsmOptions::NORETURN)
{
let spans = args.options_spans.clone();
ecx.struct_span_err(spans, "the `pure` and `noreturn` options are mutually exclusive")
.emit();
}
if args.options.contains(ast::InlineAsmOptions::PURE)
&& !args.options.intersects(ast::InlineAsmOptions::NOMEM | ast::InlineAsmOptions::READONLY)
{
let spans = args.options_spans.clone();
ecx.struct_span_err(
spans,
"the `pure` option must be combined with either `nomem` or `readonly`",
)
.emit();
}
let mut have_real_output = false;
let mut outputs_sp = vec![];
for (op, op_sp) in &args.operands {
match op {
ast::InlineAsmOperand::Out { expr, .. }
| ast::InlineAsmOperand::SplitInOut { out_expr: expr, .. } => {
outputs_sp.push(*op_sp);
have_real_output |= expr.is_some();
}
ast::InlineAsmOperand::InOut { .. } => {
outputs_sp.push(*op_sp);
have_real_output = true;
}
_ => {}
}
}
if args.options.contains(ast::InlineAsmOptions::PURE) && !have_real_output {
ecx.struct_span_err(
args.options_spans.clone(),
"asm with `pure` option must have at least one output",
)
.emit();
}
if args.options.contains(ast::InlineAsmOptions::NORETURN) && !outputs_sp.is_empty() {
let err = ecx
.struct_span_err(outputs_sp, "asm outputs are not allowed with the `noreturn` option");
// Bail out now since this is likely to confuse MIR
return Err(err);
}
Ok(args)
}
/// Report a duplicate option error.
///
/// This function must be called immediately after the option token is parsed.
/// Otherwise, the suggestion will be incorrect.
fn err_duplicate_option<'a>(p: &mut Parser<'a>, symbol: Symbol, span: Span) {
let mut err = p
.sess
.span_diagnostic
.struct_span_err(span, &format!("the `{}` option was already provided", symbol));
err.span_label(span, "this option was already provided");
// Tool-only output
let mut full_span = span;
if p.token.kind == token::Comma {
full_span = full_span.to(p.token.span);
}
err.tool_only_span_suggestion(
full_span,
"remove this option",
String::new(),
Applicability::MachineApplicable,
);
err.emit();
}
/// Try to set the provided option in the provided `AsmArgs`.
/// If it is already set, report a duplicate option error.
///
/// This function must be called immediately after the option token is parsed.
/// Otherwise, the error will not point to the correct spot.
fn try_set_option<'a>(
p: &mut Parser<'a>,
args: &mut AsmArgs,
symbol: Symbol,
option: ast::InlineAsmOptions,
) {
if !args.options.contains(option) {
args.options |= option;
} else {
err_duplicate_option(p, symbol, p.prev_token.span);
}
}
fn parse_options<'a>(p: &mut Parser<'a>, args: &mut AsmArgs) -> Result<(), DiagnosticBuilder<'a>> {
let span_start = p.prev_token.span;
p.expect(&token::OpenDelim(token::DelimToken::Paren))?;
while !p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
if p.eat(&token::Ident(sym::pure, false)) {
try_set_option(p, args, sym::pure, ast::InlineAsmOptions::PURE);
} else if p.eat(&token::Ident(sym::nomem, false)) {
try_set_option(p, args, sym::nomem, ast::InlineAsmOptions::NOMEM);
} else if p.eat(&token::Ident(sym::readonly, false)) {
try_set_option(p, args, sym::readonly, ast::InlineAsmOptions::READONLY);
} else if p.eat(&token::Ident(sym::preserves_flags, false)) {
try_set_option(p, args, sym::preserves_flags, ast::InlineAsmOptions::PRESERVES_FLAGS);
} else if p.eat(&token::Ident(sym::noreturn, false)) {
try_set_option(p, args, sym::noreturn, ast::InlineAsmOptions::NORETURN);
} else if p.eat(&token::Ident(sym::nostack, false)) {
try_set_option(p, args, sym::nostack, ast::InlineAsmOptions::NOSTACK);
} else {
p.expect(&token::Ident(sym::att_syntax, false))?;
try_set_option(p, args, sym::att_syntax, ast::InlineAsmOptions::ATT_SYNTAX);
}
// Allow trailing commas
if p.eat(&token::CloseDelim(token::DelimToken::Paren)) {
break;
}
p.expect(&token::Comma)?;
}
let new_span = span_start.to(p.prev_token.span);
args.options_spans.push(new_span);
Ok(())
}
fn parse_reg<'a>(
p: &mut Parser<'a>,
explicit_reg: &mut bool,
) -> Result<ast::InlineAsmRegOrRegClass, DiagnosticBuilder<'a>> {
p.expect(&token::OpenDelim(token::DelimToken::Paren))?;
let result = match p.token.kind {
token::Ident(name, false) => ast::InlineAsmRegOrRegClass::RegClass(name),
token::Literal(token::Lit { kind: token::LitKind::Str, symbol, suffix: _ }) => {
*explicit_reg = true;
ast::InlineAsmRegOrRegClass::Reg(symbol)
}
_ => {
return Err(
p.struct_span_err(p.token.span, "expected register class or explicit register")
);
}
};
p.bump();
p.expect(&token::CloseDelim(token::DelimToken::Paren))?;
Ok(result)
}
fn expand_preparsed_asm(ecx: &mut ExtCtxt<'_>, sp: Span, args: AsmArgs) -> P<ast::Expr> {
let mut template = vec![];
// Register operands are implicitly used since they are not allowed to be
// referenced in the template string.
let mut used = vec![false; args.operands.len()];
for pos in &args.reg_args {
used[*pos] = true;
}
let named_pos: FxHashMap<usize, Symbol> =
args.named_args.iter().map(|(&sym, &idx)| (idx, sym)).collect();
let mut line_spans = Vec::with_capacity(args.templates.len());
let mut curarg = 0;
for template_expr in args.templates.into_iter() {
if !template.is_empty() {
template.push(ast::InlineAsmTemplatePiece::String("\n".to_string()));
}
let msg = "asm template must be a string literal";
let template_sp = template_expr.span;
let (template_str, template_style, template_span) =
match expr_to_spanned_string(ecx, template_expr, msg) {
Ok(template_part) => template_part,
Err(err) => {
if let Some(mut err) = err {
err.emit();
}
return DummyResult::raw_expr(sp, true);
}
};
let str_style = match template_style {
ast::StrStyle::Cooked => None,
ast::StrStyle::Raw(raw) => Some(raw as usize),
};
let template_str = &template_str.as_str();
let template_snippet = ecx.source_map().span_to_snippet(template_sp).ok();
let mut parser = parse::Parser::new(
template_str,
str_style,
template_snippet,
false,
parse::ParseMode::InlineAsm,
);
parser.curarg = curarg;
let mut unverified_pieces = Vec::new();
while let Some(piece) = parser.next() {
if !parser.errors.is_empty() {
break;
} else {
unverified_pieces.push(piece);
}
}
if !parser.errors.is_empty() {
let err = parser.errors.remove(0);
let err_sp = template_span.from_inner(err.span);
let msg = &format!("invalid asm template string: {}", err.description);
let mut e = ecx.struct_span_err(err_sp, msg);
e.span_label(err_sp, err.label + " in asm template string");
if let Some(note) = err.note {
e.note(&note);
}
if let Some((label, span)) = err.secondary_label {
let err_sp = template_span.from_inner(span);
e.span_label(err_sp, label);
}
e.emit();
return DummyResult::raw_expr(sp, true);
}
curarg = parser.curarg;
let mut arg_spans = parser.arg_places.iter().map(|span| template_span.from_inner(*span));
for piece in unverified_pieces {
match piece {
parse::Piece::String(s) => {
template.push(ast::InlineAsmTemplatePiece::String(s.to_string()))
}
parse::Piece::NextArgument(arg) => {
let span = arg_spans.next().unwrap_or(template_sp);
let operand_idx = match arg.position {
parse::ArgumentIs(idx) | parse::ArgumentImplicitlyIs(idx) => {
if idx >= args.operands.len()
|| named_pos.contains_key(&idx)
|| args.reg_args.contains(&idx)
{
let msg = format!("invalid reference to argument at index {}", idx);
let mut err = ecx.struct_span_err(span, &msg);
err.span_label(span, "from here");
let positional_args = args.operands.len()
- args.named_args.len()
- args.reg_args.len();
let positional = if positional_args != args.operands.len() {
"positional "
} else {
""
};
let msg = match positional_args {
0 => format!("no {}arguments were given", positional),
1 => format!("there is 1 {}argument", positional),
x => format!("there are {} {}arguments", x, positional),
};
err.note(&msg);
if named_pos.contains_key(&idx) {
err.span_label(args.operands[idx].1, "named argument");
err.span_note(
args.operands[idx].1,
"named arguments cannot be referenced by position",
);
} else if args.reg_args.contains(&idx) {
err.span_label(
args.operands[idx].1,
"explicit register argument",
);
err.span_note(
args.operands[idx].1,
"explicit register arguments cannot be used in the asm template",
);
}
err.emit();
None
} else {
Some(idx)
}
}
parse::ArgumentNamed(name) => match args.named_args.get(&name) {
Some(&idx) => Some(idx),
None => {
let msg = format!("there is no argument named `{}`", name);
ecx.struct_span_err(span, &msg[..]).emit();
None
}
},
};
let mut chars = arg.format.ty.chars();
let mut modifier = chars.next();
if chars.next().is_some() {
let span = arg
.format
.ty_span
.map(|sp| template_sp.from_inner(sp))
.unwrap_or(template_sp);
ecx.struct_span_err(
span,
"asm template modifier must be a single character",
)
.emit();
modifier = None;
}
if let Some(operand_idx) = operand_idx {
used[operand_idx] = true;
template.push(ast::InlineAsmTemplatePiece::Placeholder {
operand_idx,
modifier,
span,
});
}
}
}
}
if parser.line_spans.is_empty() {
let template_num_lines = 1 + template_str.matches('\n').count();
line_spans.extend(std::iter::repeat(template_sp).take(template_num_lines));
} else {
line_spans.extend(parser.line_spans.iter().map(|span| template_span.from_inner(*span)));
};
}
let mut unused_operands = vec![];
let mut help_str = String::new();
for (idx, used) in used.into_iter().enumerate() {
if !used {
let msg = if let Some(sym) = named_pos.get(&idx) {
help_str.push_str(&format!(" {{{}}}", sym));
"named argument never used"
} else {
help_str.push_str(&format!(" {{{}}}", idx));
"argument never used"
};
unused_operands.push((args.operands[idx].1, msg));
}
}
match unused_operands.len() {
0 => {}
1 => {
let (sp, msg) = unused_operands.into_iter().next().unwrap();
let mut err = ecx.struct_span_err(sp, msg);
err.span_label(sp, msg);
err.help(&format!(
"if this argument is intentionally unused, \
consider using it in an asm comment: `\"/*{} */\"`",
help_str
));
err.emit();
}
_ => {
let mut err = ecx.struct_span_err(
unused_operands.iter().map(|&(sp, _)| sp).collect::<Vec<Span>>(),
"multiple unused asm arguments",
);
for (sp, msg) in unused_operands {
err.span_label(sp, msg);
}
err.help(&format!(
"if these arguments are intentionally unused, \
consider using them in an asm comment: `\"/*{} */\"`",
help_str
));
err.emit();
}
}
let inline_asm =
ast::InlineAsm { template, operands: args.operands, options: args.options, line_spans };
P(ast::Expr {
id: ast::DUMMY_NODE_ID,
kind: ast::ExprKind::InlineAsm(P(inline_asm)),
span: sp,
attrs: ast::AttrVec::new(),
tokens: None,
})
}
pub fn expand_asm<'cx>(
ecx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
match parse_args(ecx, sp, tts) {
Ok(args) => MacEager::expr(expand_preparsed_asm(ecx, sp, args)),
Err(mut err) => {
err.emit();
DummyResult::any(sp)
}
}
}

View file

@ -0,0 +1,133 @@
use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc_ast::ptr::P;
use rustc_ast::token::{self, TokenKind};
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_ast::{self as ast, *};
use rustc_ast_pretty::pprust;
use rustc_expand::base::*;
use rustc_parse::parser::Parser;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
pub fn expand_assert<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn MacResult + 'cx> {
let Assert { cond_expr, custom_message } = match parse_assert(cx, sp, tts) {
Ok(assert) => assert,
Err(mut err) => {
err.emit();
return DummyResult::any(sp);
}
};
// `core::panic` and `std::panic` are different macros, so we use call-site
// context to pick up whichever is currently in scope.
let sp = cx.with_call_site_ctxt(sp);
let tokens = custom_message.unwrap_or_else(|| {
TokenStream::from(TokenTree::token(
TokenKind::lit(
token::Str,
Symbol::intern(&format!(
"assertion failed: {}",
pprust::expr_to_string(&cond_expr).escape_debug()
)),
None,
),
DUMMY_SP,
))
});
let args = P(MacArgs::Delimited(DelimSpan::from_single(sp), MacDelimiter::Parenthesis, tokens));
let panic_call = MacCall {
path: Path::from_ident(Ident::new(sym::panic, sp)),
args,
prior_type_ascription: None,
};
let if_expr = cx.expr_if(
sp,
cx.expr(sp, ExprKind::Unary(UnOp::Not, cond_expr)),
cx.expr(sp, ExprKind::MacCall(panic_call)),
None,
);
MacEager::expr(if_expr)
}
struct Assert {
cond_expr: P<ast::Expr>,
custom_message: Option<TokenStream>,
}
fn parse_assert<'a>(
cx: &mut ExtCtxt<'a>,
sp: Span,
stream: TokenStream,
) -> Result<Assert, DiagnosticBuilder<'a>> {
let mut parser = cx.new_parser_from_tts(stream);
if parser.token == token::Eof {
let mut err = cx.struct_span_err(sp, "macro requires a boolean expression as an argument");
err.span_label(sp, "boolean expression required");
return Err(err);
}
let cond_expr = parser.parse_expr()?;
// Some crates use the `assert!` macro in the following form (note extra semicolon):
//
// assert!(
// my_function();
// );
//
// Emit an error about semicolon and suggest removing it.
if parser.token == token::Semi {
let mut err = cx.struct_span_err(sp, "macro requires an expression as an argument");
err.span_suggestion(
parser.token.span,
"try removing semicolon",
String::new(),
Applicability::MaybeIncorrect,
);
err.emit();
parser.bump();
}
// Some crates use the `assert!` macro in the following form (note missing comma before
// message):
//
// assert!(true "error message");
//
// Emit an error and suggest inserting a comma.
let custom_message =
if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token.kind {
let mut err = cx.struct_span_err(parser.token.span, "unexpected string literal");
let comma_span = parser.prev_token.span.shrink_to_hi();
err.span_suggestion_short(
comma_span,
"try adding a comma",
", ".to_string(),
Applicability::MaybeIncorrect,
);
err.emit();
parse_custom_message(&mut parser)
} else if parser.eat(&token::Comma) {
parse_custom_message(&mut parser)
} else {
None
};
if parser.token != token::Eof {
parser.expect_one_of(&[], &[])?;
unreachable!();
}
Ok(Assert { cond_expr, custom_message })
}
fn parse_custom_message(parser: &mut Parser<'_>) -> Option<TokenStream> {
let ts = parser.parse_tokens();
if !ts.is_empty() { Some(ts) } else { None }
}

View file

@ -0,0 +1,54 @@
//! The compiler code necessary to support the cfg! extension, which expands to
//! a literal `true` or `false` based on whether the given cfg matches the
//! current compilation environment.
use rustc_ast as ast;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_attr as attr;
use rustc_errors::DiagnosticBuilder;
use rustc_expand::base::{self, *};
use rustc_span::Span;
pub fn expand_cfg(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
match parse_cfg(cx, sp, tts) {
Ok(cfg) => {
let matches_cfg = attr::cfg_matches(&cfg, &cx.sess.parse_sess, cx.ecfg.features);
MacEager::expr(cx.expr_bool(sp, matches_cfg))
}
Err(mut err) => {
err.emit();
DummyResult::any(sp)
}
}
}
fn parse_cfg<'a>(
cx: &mut ExtCtxt<'a>,
sp: Span,
tts: TokenStream,
) -> Result<ast::MetaItem, DiagnosticBuilder<'a>> {
let mut p = cx.new_parser_from_tts(tts);
if p.token == token::Eof {
let mut err = cx.struct_span_err(sp, "macro requires a cfg-pattern as an argument");
err.span_label(sp, "cfg-pattern required");
return Err(err);
}
let cfg = p.parse_meta_item()?;
let _ = p.eat(&token::Comma);
if !p.eat(&token::Eof) {
return Err(cx.struct_span_err(sp, "expected 1 cfg-pattern"));
}
Ok(cfg)
}

View file

@ -0,0 +1,59 @@
//! Implementation of the `#[cfg_accessible(path)]` attribute macro.
use rustc_ast as ast;
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, MultiItemModifier};
use rustc_feature::AttributeTemplate;
use rustc_parse::validate_attr;
use rustc_span::symbol::sym;
use rustc_span::Span;
crate struct Expander;
fn validate_input<'a>(ecx: &mut ExtCtxt<'_>, mi: &'a ast::MetaItem) -> Option<&'a ast::Path> {
match mi.meta_item_list() {
None => {}
Some([]) => ecx.span_err(mi.span, "`cfg_accessible` path is not specified"),
Some([_, .., l]) => ecx.span_err(l.span(), "multiple `cfg_accessible` paths are specified"),
Some([nmi]) => match nmi.meta_item() {
None => ecx.span_err(nmi.span(), "`cfg_accessible` path cannot be a literal"),
Some(mi) => {
if !mi.is_word() {
ecx.span_err(mi.span, "`cfg_accessible` path cannot accept arguments");
}
return Some(&mi.path);
}
},
}
None
}
impl MultiItemModifier for Expander {
fn expand(
&self,
ecx: &mut ExtCtxt<'_>,
_span: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
let template = AttributeTemplate { list: Some("path"), ..Default::default() };
let attr = &ecx.attribute(meta_item.clone());
validate_attr::check_builtin_attribute(
&ecx.sess.parse_sess,
attr,
sym::cfg_accessible,
template,
);
let path = match validate_input(ecx, meta_item) {
Some(path) => path,
None => return ExpandResult::Ready(Vec::new()),
};
let failure_msg = "cannot determine whether the path is accessible or not";
match ecx.resolver.cfg_accessible(ecx.current_expansion.id, path) {
Ok(true) => ExpandResult::Ready(vec![item]),
Ok(false) => ExpandResult::Ready(Vec::new()),
Err(_) => ExpandResult::Retry(item, failure_msg.into()),
}
}
}

View file

@ -0,0 +1,35 @@
//! Attributes injected into the crate root from command line using `-Z crate-attr`.
use rustc_ast::attr::mk_attr;
use rustc_ast::token;
use rustc_ast::{self as ast, AttrItem, AttrStyle};
use rustc_session::parse::ParseSess;
use rustc_span::FileName;
pub fn inject(mut krate: ast::Crate, parse_sess: &ParseSess, attrs: &[String]) -> ast::Crate {
for raw_attr in attrs {
let mut parser = rustc_parse::new_parser_from_source_str(
parse_sess,
FileName::cli_crate_attr_source_code(&raw_attr),
raw_attr.clone(),
);
let start_span = parser.token.span;
let AttrItem { path, args } = match parser.parse_attr_item() {
Ok(ai) => ai,
Err(mut err) => {
err.emit();
continue;
}
};
let end_span = parser.token.span;
if parser.token != token::Eof {
parse_sess.span_diagnostic.span_err(start_span.to(end_span), "invalid crate attribute");
continue;
}
krate.attrs.push(mk_attr(AttrStyle::Inner, path, args, start_span.to(end_span)));
}
krate
}

View file

@ -0,0 +1,20 @@
// The compiler code necessary to support the compile_error! extension.
use rustc_ast::tokenstream::TokenStream;
use rustc_expand::base::{self, *};
use rustc_span::Span;
pub fn expand_compile_error<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "compile_error!") {
None => return DummyResult::any(sp),
Some(v) => v,
};
cx.span_err(sp, &var);
DummyResult::any(sp)
}

View file

@ -0,0 +1,65 @@
use rustc_ast as ast;
use rustc_ast::tokenstream::TokenStream;
use rustc_expand::base::{self, DummyResult};
use rustc_span::symbol::Symbol;
use std::string::String;
pub fn expand_concat(
cx: &mut base::ExtCtxt<'_>,
sp: rustc_span::Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let es = match base::get_exprs_from_tts(cx, sp, tts) {
Some(e) => e,
None => return DummyResult::any(sp),
};
let mut accumulator = String::new();
let mut missing_literal = vec![];
let mut has_errors = false;
for e in es {
match e.kind {
ast::ExprKind::Lit(ref lit) => match lit.kind {
ast::LitKind::Str(ref s, _) | ast::LitKind::Float(ref s, _) => {
accumulator.push_str(&s.as_str());
}
ast::LitKind::Char(c) => {
accumulator.push(c);
}
ast::LitKind::Int(
i,
ast::LitIntType::Unsigned(_)
| ast::LitIntType::Signed(_)
| ast::LitIntType::Unsuffixed,
) => {
accumulator.push_str(&i.to_string());
}
ast::LitKind::Bool(b) => {
accumulator.push_str(&b.to_string());
}
ast::LitKind::Byte(..) | ast::LitKind::ByteStr(..) => {
cx.span_err(e.span, "cannot concatenate a byte string literal");
}
ast::LitKind::Err(_) => {
has_errors = true;
}
},
ast::ExprKind::Err => {
has_errors = true;
}
_ => {
missing_literal.push(e.span);
}
}
}
if !missing_literal.is_empty() {
let mut err = cx.struct_span_err(missing_literal, "expected a literal");
err.note("only literals (like `\"foo\"`, `42` and `3.14`) can be passed to `concat!()`");
err.emit();
return DummyResult::any(sp);
} else if has_errors {
return DummyResult::any(sp);
}
let sp = cx.with_def_site_ctxt(sp);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&accumulator)))
}

View file

@ -0,0 +1,69 @@
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token};
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_expand::base::{self, *};
use rustc_span::symbol::{Ident, Symbol};
use rustc_span::Span;
pub fn expand_concat_idents<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
if tts.is_empty() {
cx.span_err(sp, "concat_idents! takes 1 or more arguments.");
return DummyResult::any(sp);
}
let mut res_str = String::new();
for (i, e) in tts.into_trees().enumerate() {
if i & 1 == 1 {
match e {
TokenTree::Token(Token { kind: token::Comma, .. }) => {}
_ => {
cx.span_err(sp, "concat_idents! expecting comma.");
return DummyResult::any(sp);
}
}
} else {
match e {
TokenTree::Token(Token { kind: token::Ident(name, _), .. }) => {
res_str.push_str(&name.as_str())
}
_ => {
cx.span_err(sp, "concat_idents! requires ident args.");
return DummyResult::any(sp);
}
}
}
}
let ident = Ident::new(Symbol::intern(&res_str), cx.with_call_site_ctxt(sp));
struct ConcatIdentsResult {
ident: Ident,
}
impl base::MacResult for ConcatIdentsResult {
fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
Some(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
kind: ast::ExprKind::Path(None, ast::Path::from_ident(self.ident)),
span: self.ident.span,
attrs: ast::AttrVec::new(),
tokens: None,
}))
}
fn make_ty(self: Box<Self>) -> Option<P<ast::Ty>> {
Some(P(ast::Ty {
id: ast::DUMMY_NODE_ID,
kind: ast::TyKind::Path(None, ast::Path::from_ident(self.ident)),
span: self.ident.span,
}))
}
}
Box::new(ConcatIdentsResult { ident })
}

View file

@ -0,0 +1,29 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::MetaItem;
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::Span;
pub fn expand_deriving_copy(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: path_std!(marker::Copy),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: true,
methods: Vec::new(),
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push);
}

View file

@ -0,0 +1,224 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, GenericArg, Generics, ItemKind, MetaItem, VariantData};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand_deriving_clone(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
// check if we can use a short form
//
// the short form is `fn clone(&self) -> Self { *self }`
//
// we can use the short form if:
// - the item is Copy (unfortunately, all we can check is whether it's also deriving Copy)
// - there are no generic parameters (after specialization this limitation can be removed)
// if we used the short form with generics, we'd have to bound the generics with
// Clone + Copy, and then there'd be no Clone impl at all if the user fills in something
// that is Clone but not Copy. and until specialization we can't write both impls.
// - the item is a union with Copy fields
// Unions with generic parameters still can derive Clone because they require Copy
// for deriving, Clone alone is not enough.
// Whever Clone is implemented for fields is irrelevant so we don't assert it.
let bounds;
let substructure;
let is_shallow;
match *item {
Annotatable::Item(ref annitem) => match annitem.kind {
ItemKind::Struct(_, Generics { ref params, .. })
| ItemKind::Enum(_, Generics { ref params, .. }) => {
let container_id = cx.current_expansion.id.expn_data().parent;
if cx.resolver.has_derive_copy(container_id)
&& !params.iter().any(|param| match param.kind {
ast::GenericParamKind::Type { .. } => true,
_ => false,
})
{
bounds = vec![];
is_shallow = true;
substructure = combine_substructure(Box::new(|c, s, sub| {
cs_clone_shallow("Clone", c, s, sub, false)
}));
} else {
bounds = vec![];
is_shallow = false;
substructure =
combine_substructure(Box::new(|c, s, sub| cs_clone("Clone", c, s, sub)));
}
}
ItemKind::Union(..) => {
bounds = vec![Literal(path_std!(marker::Copy))];
is_shallow = true;
substructure = combine_substructure(Box::new(|c, s, sub| {
cs_clone_shallow("Clone", c, s, sub, true)
}));
}
_ => {
bounds = vec![];
is_shallow = false;
substructure =
combine_substructure(Box::new(|c, s, sub| cs_clone("Clone", c, s, sub)));
}
},
_ => cx.span_bug(span, "`#[derive(Clone)]` on trait item or impl item"),
}
let inline = cx.meta_word(span, sym::inline);
let attrs = vec![cx.attribute(inline)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: path_std!(clone::Clone),
additional_bounds: bounds,
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: true,
methods: vec![MethodDef {
name: sym::clone,
generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: Vec::new(),
ret_ty: Self_,
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: false,
combine_substructure: substructure,
}],
associated_types: Vec::new(),
};
trait_def.expand_ext(cx, mitem, item, push, is_shallow)
}
fn cs_clone_shallow(
name: &str,
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
is_union: bool,
) -> P<Expr> {
fn assert_ty_bounds(
cx: &mut ExtCtxt<'_>,
stmts: &mut Vec<ast::Stmt>,
ty: P<ast::Ty>,
span: Span,
helper_name: &str,
) {
// Generate statement `let _: helper_name<ty>;`,
// set the expn ID so we can use the unstable struct.
let span = cx.with_def_site_ctxt(span);
let assert_path = cx.path_all(
span,
true,
cx.std_path(&[sym::clone, Symbol::intern(helper_name)]),
vec![GenericArg::Type(ty)],
);
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
}
fn process_variant(cx: &mut ExtCtxt<'_>, stmts: &mut Vec<ast::Stmt>, variant: &VariantData) {
for field in variant.fields() {
// let _: AssertParamIsClone<FieldTy>;
assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsClone");
}
}
let mut stmts = Vec::new();
if is_union {
// let _: AssertParamIsCopy<Self>;
let self_ty = cx.ty_path(cx.path_ident(trait_span, Ident::with_dummy_span(kw::SelfUpper)));
assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy");
} else {
match *substr.fields {
StaticStruct(vdata, ..) => {
process_variant(cx, &mut stmts, vdata);
}
StaticEnum(enum_def, ..) => {
for variant in &enum_def.variants {
process_variant(cx, &mut stmts, &variant.data);
}
}
_ => cx.span_bug(
trait_span,
&format!(
"unexpected substructure in \
shallow `derive({})`",
name
),
),
}
}
stmts.push(cx.stmt_expr(cx.expr_deref(trait_span, cx.expr_self(trait_span))));
cx.expr_block(cx.block(trait_span, stmts))
}
fn cs_clone(
name: &str,
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
) -> P<Expr> {
let ctor_path;
let all_fields;
let fn_path = cx.std_path(&[sym::clone, sym::Clone, sym::clone]);
let subcall = |cx: &mut ExtCtxt<'_>, field: &FieldInfo<'_>| {
let args = vec![cx.expr_addr_of(field.span, field.self_.clone())];
cx.expr_call_global(field.span, fn_path.clone(), args)
};
let vdata;
match *substr.fields {
Struct(vdata_, ref af) => {
ctor_path = cx.path(trait_span, vec![substr.type_ident]);
all_fields = af;
vdata = vdata_;
}
EnumMatching(.., variant, ref af) => {
ctor_path = cx.path(trait_span, vec![substr.type_ident, variant.ident]);
all_fields = af;
vdata = &variant.data;
}
EnumNonMatchingCollapsed(..) => {
cx.span_bug(trait_span, &format!("non-matching enum variants in `derive({})`", name,))
}
StaticEnum(..) | StaticStruct(..) => {
cx.span_bug(trait_span, &format!("associated function in `derive({})`", name))
}
}
match *vdata {
VariantData::Struct(..) => {
let fields = all_fields
.iter()
.map(|field| {
let ident = match field.name {
Some(i) => i,
None => cx.span_bug(
trait_span,
&format!("unnamed field in normal struct in `derive({})`", name,),
),
};
let call = subcall(cx, field);
cx.field_imm(field.span, ident, call)
})
.collect::<Vec<_>>();
cx.expr_struct(trait_span, ctor_path, fields)
}
VariantData::Tuple(..) => {
let subcalls = all_fields.iter().map(|f| subcall(cx, f)).collect();
let path = cx.expr_path(ctor_path);
cx.expr_call(trait_span, path, subcalls)
}
VariantData::Unit(..) => cx.expr_path(ctor_path),
}
}

View file

@ -0,0 +1,98 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, GenericArg, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand_deriving_eq(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
let inline = cx.meta_word(span, sym::inline);
let hidden = rustc_ast::attr::mk_nested_word_item(Ident::new(sym::hidden, span));
let doc = rustc_ast::attr::mk_list_item(Ident::new(sym::doc, span), vec![hidden]);
let attrs = vec![cx.attribute(inline), cx.attribute(doc)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: path_std!(cmp::Eq),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: true,
methods: vec![MethodDef {
name: sym::assert_receiver_is_total_eq,
generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![],
ret_ty: nil_ty(),
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
combine_substructure: combine_substructure(Box::new(|a, b, c| {
cs_total_eq_assert(a, b, c)
})),
}],
associated_types: Vec::new(),
};
super::inject_impl_of_structural_trait(cx, span, item, path_std!(marker::StructuralEq), push);
trait_def.expand_ext(cx, mitem, item, push, true)
}
fn cs_total_eq_assert(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
) -> P<Expr> {
fn assert_ty_bounds(
cx: &mut ExtCtxt<'_>,
stmts: &mut Vec<ast::Stmt>,
ty: P<ast::Ty>,
span: Span,
helper_name: &str,
) {
// Generate statement `let _: helper_name<ty>;`,
// set the expn ID so we can use the unstable struct.
let span = cx.with_def_site_ctxt(span);
let assert_path = cx.path_all(
span,
true,
cx.std_path(&[sym::cmp, Symbol::intern(helper_name)]),
vec![GenericArg::Type(ty)],
);
stmts.push(cx.stmt_let_type_only(span, cx.ty_path(assert_path)));
}
fn process_variant(
cx: &mut ExtCtxt<'_>,
stmts: &mut Vec<ast::Stmt>,
variant: &ast::VariantData,
) {
for field in variant.fields() {
// let _: AssertParamIsEq<FieldTy>;
assert_ty_bounds(cx, stmts, field.ty.clone(), field.span, "AssertParamIsEq");
}
}
let mut stmts = Vec::new();
match *substr.fields {
StaticStruct(vdata, ..) => {
process_variant(cx, &mut stmts, vdata);
}
StaticEnum(enum_def, ..) => {
for variant in &enum_def.variants {
process_variant(cx, &mut stmts, &variant.data);
}
}
_ => cx.span_bug(trait_span, "unexpected substructure in `derive(Eq)`"),
}
cx.expr_block(cx.block(trait_span, stmts))
}

View file

@ -0,0 +1,113 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident};
use rustc_span::Span;
pub fn expand_deriving_ord(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
let inline = cx.meta_word(span, sym::inline);
let attrs = vec![cx.attribute(inline)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: path_std!(cmp::Ord),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::cmp,
generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![(borrowed_self(), sym::other)],
ret_ty: Literal(path_std!(cmp::Ordering)),
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
combine_substructure: combine_substructure(Box::new(|a, b, c| cs_cmp(a, b, c))),
}],
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}
pub fn ordering_collapsed(
cx: &mut ExtCtxt<'_>,
span: Span,
self_arg_tags: &[Ident],
) -> P<ast::Expr> {
let lft = cx.expr_ident(span, self_arg_tags[0]);
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
cx.expr_method_call(span, lft, Ident::new(sym::cmp, span), vec![rgt])
}
pub fn cs_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
let test_id = Ident::new(sym::cmp, span);
let equals_path = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
let cmp_path = cx.std_path(&[sym::cmp, sym::Ord, sym::cmp]);
// Builds:
//
// match ::std::cmp::Ord::cmp(&self_field1, &other_field1) {
// ::std::cmp::Ordering::Equal =>
// match ::std::cmp::Ord::cmp(&self_field2, &other_field2) {
// ::std::cmp::Ordering::Equal => {
// ...
// }
// cmp => cmp
// },
// cmp => cmp
// }
//
cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
|cx, span, old, self_f, other_fs| {
// match new {
// ::std::cmp::Ordering::Equal => old,
// cmp => cmp
// }
let new = {
let other_f = match other_fs {
[o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`"),
};
let args =
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())];
cx.expr_call_global(span, cmp_path.clone(), args)
};
let eq_arm = cx.arm(span, cx.pat_path(span, equals_path.clone()), old);
let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id));
cx.expr_match(span, new, vec![eq_arm, neq_arm])
},
cx.expr_path(equals_path.clone()),
Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(Ord)`")
} else {
ordering_collapsed(cx, span, tag_tuple)
}
}),
cx,
span,
substr,
)
}

View file

@ -0,0 +1,112 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::{path_local, path_std};
use rustc_ast::ptr::P;
use rustc_ast::{BinOpKind, Expr, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::sym;
use rustc_span::Span;
pub fn expand_deriving_partial_eq(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
// structures are equal if all fields are equal, and non equal, if
// any fields are not equal or if the enum variants are different
fn cs_op(
cx: &mut ExtCtxt<'_>,
span: Span,
substr: &Substructure<'_>,
op: BinOpKind,
combiner: BinOpKind,
base: bool,
) -> P<Expr> {
let op = |cx: &mut ExtCtxt<'_>, span: Span, self_f: P<Expr>, other_fs: &[P<Expr>]| {
let other_f = match other_fs {
[o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialEq)`"),
};
cx.expr_binary(span, op, self_f, other_f.clone())
};
cs_fold1(
true, // use foldl
|cx, span, subexpr, self_f, other_fs| {
let eq = op(cx, span, self_f, other_fs);
cx.expr_binary(span, combiner, subexpr, eq)
},
|cx, args| {
match args {
Some((span, self_f, other_fs)) => {
// Special-case the base case to generate cleaner code.
op(cx, span, self_f, other_fs)
}
None => cx.expr_bool(span, base),
}
},
Box::new(|cx, span, _, _| cx.expr_bool(span, !base)),
cx,
span,
substr,
)
}
fn cs_eq(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
cs_op(cx, span, substr, BinOpKind::Eq, BinOpKind::And, true)
}
fn cs_ne(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
cs_op(cx, span, substr, BinOpKind::Ne, BinOpKind::Or, false)
}
macro_rules! md {
($name:expr, $f:ident) => {{
let inline = cx.meta_word(span, sym::inline);
let attrs = vec![cx.attribute(inline)];
MethodDef {
name: $name,
generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![(borrowed_self(), sym::other)],
ret_ty: Literal(path_local!(bool)),
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
combine_substructure: combine_substructure(Box::new(|a, b, c| $f(a, b, c))),
}
}};
}
super::inject_impl_of_structural_trait(
cx,
span,
item,
path_std!(marker::StructuralPartialEq),
push,
);
// avoid defining `ne` if we can
// c-like enums, enums without any fields and structs without fields
// can safely define only `eq`.
let mut methods = vec![md!(sym::eq, cs_eq)];
if !is_type_without_fields(item) {
methods.push(md!(sym::ne, cs_ne));
}
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: path_std!(cmp::PartialEq),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods,
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}

View file

@ -0,0 +1,302 @@
pub use OrderingOp::*;
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::{path_local, path_std, pathvec_std};
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, BinOpKind, Expr, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand_deriving_partial_ord(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
macro_rules! md {
($name:expr, $op:expr, $equal:expr) => {{
let inline = cx.meta_word(span, sym::inline);
let attrs = vec![cx.attribute(inline)];
MethodDef {
name: $name,
generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![(borrowed_self(), sym::other)],
ret_ty: Literal(path_local!(bool)),
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
combine_substructure: combine_substructure(Box::new(|cx, span, substr| {
cs_op($op, $equal, cx, span, substr)
})),
}
}};
}
let ordering_ty = Literal(path_std!(cmp::Ordering));
let ret_ty = Literal(Path::new_(
pathvec_std!(option::Option),
None,
vec![Box::new(ordering_ty)],
PathKind::Std,
));
let inline = cx.meta_word(span, sym::inline);
let attrs = vec![cx.attribute(inline)];
let partial_cmp_def = MethodDef {
name: sym::partial_cmp,
generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![(borrowed_self(), sym::other)],
ret_ty,
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
combine_substructure: combine_substructure(Box::new(|cx, span, substr| {
cs_partial_cmp(cx, span, substr)
})),
};
// avoid defining extra methods if we can
// c-like enums, enums without any fields and structs without fields
// can safely define only `partial_cmp`.
let methods = if is_type_without_fields(item) {
vec![partial_cmp_def]
} else {
vec![
partial_cmp_def,
md!(sym::lt, true, false),
md!(sym::le, true, true),
md!(sym::gt, false, false),
md!(sym::ge, false, true),
]
};
let trait_def = TraitDef {
span,
attributes: vec![],
path: path_std!(cmp::PartialOrd),
additional_bounds: vec![],
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods,
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}
#[derive(Copy, Clone)]
pub enum OrderingOp {
PartialCmpOp,
LtOp,
LeOp,
GtOp,
GeOp,
}
pub fn some_ordering_collapsed(
cx: &mut ExtCtxt<'_>,
span: Span,
op: OrderingOp,
self_arg_tags: &[Ident],
) -> P<ast::Expr> {
let lft = cx.expr_ident(span, self_arg_tags[0]);
let rgt = cx.expr_addr_of(span, cx.expr_ident(span, self_arg_tags[1]));
let op_sym = match op {
PartialCmpOp => sym::partial_cmp,
LtOp => sym::lt,
LeOp => sym::le,
GtOp => sym::gt,
GeOp => sym::ge,
};
cx.expr_method_call(span, lft, Ident::new(op_sym, span), vec![rgt])
}
pub fn cs_partial_cmp(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
let test_id = Ident::new(sym::cmp, span);
let ordering = cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::Equal]));
let ordering_expr = cx.expr_path(ordering.clone());
let equals_expr = cx.expr_some(span, ordering_expr);
let partial_cmp_path = cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp]);
// Builds:
//
// match ::std::cmp::PartialOrd::partial_cmp(&self_field1, &other_field1) {
// ::std::option::Option::Some(::std::cmp::Ordering::Equal) =>
// match ::std::cmp::PartialOrd::partial_cmp(&self_field2, &other_field2) {
// ::std::option::Option::Some(::std::cmp::Ordering::Equal) => {
// ...
// }
// cmp => cmp
// },
// cmp => cmp
// }
//
cs_fold(
// foldr nests the if-elses correctly, leaving the first field
// as the outermost one, and the last as the innermost.
false,
|cx, span, old, self_f, other_fs| {
// match new {
// Some(::std::cmp::Ordering::Equal) => old,
// cmp => cmp
// }
let new = {
let other_f = match other_fs {
[o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
};
let args =
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())];
cx.expr_call_global(span, partial_cmp_path.clone(), args)
};
let eq_arm = cx.arm(span, cx.pat_some(span, cx.pat_path(span, ordering.clone())), old);
let neq_arm = cx.arm(span, cx.pat_ident(span, test_id), cx.expr_ident(span, test_id));
cx.expr_match(span, new, vec![eq_arm, neq_arm])
},
equals_expr,
Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
} else {
some_ordering_collapsed(cx, span, PartialCmpOp, tag_tuple)
}
}),
cx,
span,
substr,
)
}
/// Strict inequality.
fn cs_op(
less: bool,
inclusive: bool,
cx: &mut ExtCtxt<'_>,
span: Span,
substr: &Substructure<'_>,
) -> P<Expr> {
let ordering_path = |cx: &mut ExtCtxt<'_>, name: &str| {
cx.expr_path(
cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, Symbol::intern(name)])),
)
};
let par_cmp = |cx: &mut ExtCtxt<'_>, span, self_f: P<Expr>, other_fs: &[P<Expr>], default| {
let other_f = match other_fs {
[o_f] => o_f,
_ => cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`"),
};
// `PartialOrd::partial_cmp(self.fi, other.fi)`
let cmp_path = cx.expr_path(
cx.path_global(span, cx.std_path(&[sym::cmp, sym::PartialOrd, sym::partial_cmp])),
);
let cmp = cx.expr_call(
span,
cmp_path,
vec![cx.expr_addr_of(span, self_f), cx.expr_addr_of(span, other_f.clone())],
);
let default = ordering_path(cx, default);
// `Option::unwrap_or(_, Ordering::Equal)`
let unwrap_path = cx.expr_path(
cx.path_global(span, cx.std_path(&[sym::option, sym::Option, sym::unwrap_or])),
);
cx.expr_call(span, unwrap_path, vec![cmp, default])
};
let fold = cs_fold1(
false, // need foldr
|cx, span, subexpr, self_f, other_fs| {
// build up a series of `partial_cmp`s from the inside
// out (hence foldr) to get lexical ordering, i.e., for op ==
// `ast::lt`
//
// ```
// Ordering::then_with(
// Option::unwrap_or(
// PartialOrd::partial_cmp(self.f1, other.f1), Ordering::Equal)
// ),
// Option::unwrap_or(
// PartialOrd::partial_cmp(self.f2, other.f2), Ordering::Greater)
// )
// )
// == Ordering::Less
// ```
//
// and for op ==
// `ast::le`
//
// ```
// Ordering::then_with(
// Option::unwrap_or(
// PartialOrd::partial_cmp(self.f1, other.f1), Ordering::Equal)
// ),
// Option::unwrap_or(
// PartialOrd::partial_cmp(self.f2, other.f2), Ordering::Greater)
// )
// )
// != Ordering::Greater
// ```
//
// The optimiser should remove the redundancy. We explicitly
// get use the binops to avoid auto-deref dereferencing too many
// layers of pointers, if the type includes pointers.
// `Option::unwrap_or(PartialOrd::partial_cmp(self.fi, other.fi), Ordering::Equal)`
let par_cmp = par_cmp(cx, span, self_f, other_fs, "Equal");
// `Ordering::then_with(Option::unwrap_or(..), ..)`
let then_with_path = cx.expr_path(
cx.path_global(span, cx.std_path(&[sym::cmp, sym::Ordering, sym::then_with])),
);
cx.expr_call(span, then_with_path, vec![par_cmp, cx.lambda0(span, subexpr)])
},
|cx, args| match args {
Some((span, self_f, other_fs)) => {
let opposite = if less { "Greater" } else { "Less" };
par_cmp(cx, span, self_f, other_fs, opposite)
}
None => cx.expr_bool(span, inclusive),
},
Box::new(|cx, span, (self_args, tag_tuple), _non_self_args| {
if self_args.len() != 2 {
cx.span_bug(span, "not exactly 2 arguments in `derive(PartialOrd)`")
} else {
let op = match (less, inclusive) {
(false, false) => GtOp,
(false, true) => GeOp,
(true, false) => LtOp,
(true, true) => LeOp,
};
some_ordering_collapsed(cx, span, op, tag_tuple)
}
}),
cx,
span,
substr,
);
match *substr.fields {
EnumMatching(.., ref all_fields) | Struct(.., ref all_fields) if !all_fields.is_empty() => {
let ordering = ordering_path(cx, if less ^ inclusive { "Less" } else { "Greater" });
let comp_op = if inclusive { BinOpKind::Ne } else { BinOpKind::Eq };
cx.expr_binary(span, comp_op, fold, ordering)
}
_ => fold,
}
}

View file

@ -0,0 +1,137 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::path_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, MetaItem};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident};
use rustc_span::{Span, DUMMY_SP};
pub fn expand_deriving_debug(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
// &mut ::std::fmt::Formatter
let fmtr =
Ptr(Box::new(Literal(path_std!(fmt::Formatter))), Borrowed(None, ast::Mutability::Mut));
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: path_std!(fmt::Debug),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::fmt,
generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![(fmtr, sym::f)],
ret_ty: Literal(path_std!(fmt::Result)),
attributes: Vec::new(),
is_unsafe: false,
unify_fieldless_variants: false,
combine_substructure: combine_substructure(Box::new(|a, b, c| {
show_substructure(a, b, c)
})),
}],
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}
/// We use the debug builders to do the heavy lifting here
fn show_substructure(cx: &mut ExtCtxt<'_>, span: Span, substr: &Substructure<'_>) -> P<Expr> {
// build fmt.debug_struct(<name>).field(<fieldname>, &<fieldval>)....build()
// or fmt.debug_tuple(<name>).field(&<fieldval>)....build()
// based on the "shape".
let (ident, vdata, fields) = match substr.fields {
Struct(vdata, fields) => (substr.type_ident, *vdata, fields),
EnumMatching(_, _, v, fields) => (v.ident, &v.data, fields),
EnumNonMatchingCollapsed(..) | StaticStruct(..) | StaticEnum(..) => {
cx.span_bug(span, "nonsensical .fields in `#[derive(Debug)]`")
}
};
// We want to make sure we have the ctxt set so that we can use unstable methods
let span = cx.with_def_site_ctxt(span);
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
let builder = Ident::new(sym::debug_trait_builder, span);
let builder_expr = cx.expr_ident(span, builder);
let fmt = substr.nonself_args[0].clone();
let mut stmts = vec![];
match vdata {
ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => {
// tuple struct/"normal" variant
let expr =
cx.expr_method_call(span, fmt, Ident::new(sym::debug_tuple, span), vec![name]);
stmts.push(cx.stmt_let(span, true, builder, expr));
for field in fields {
// Use double indirection to make sure this works for unsized types
let field = cx.expr_addr_of(field.span, field.self_.clone());
let field = cx.expr_addr_of(field.span, field);
let expr = cx.expr_method_call(
span,
builder_expr.clone(),
Ident::new(sym::field, span),
vec![field],
);
// Use `let _ = expr;` to avoid triggering the
// unused_results lint.
stmts.push(stmt_let_underscore(cx, span, expr));
}
}
ast::VariantData::Struct(..) => {
// normal struct/struct variant
let expr =
cx.expr_method_call(span, fmt, Ident::new(sym::debug_struct, span), vec![name]);
stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr));
for field in fields {
let name = cx.expr_lit(
field.span,
ast::LitKind::Str(field.name.unwrap().name, ast::StrStyle::Cooked),
);
// Use double indirection to make sure this works for unsized types
let field = cx.expr_addr_of(field.span, field.self_.clone());
let field = cx.expr_addr_of(field.span, field);
let expr = cx.expr_method_call(
span,
builder_expr.clone(),
Ident::new(sym::field, span),
vec![name, field],
);
stmts.push(stmt_let_underscore(cx, span, expr));
}
}
}
let expr = cx.expr_method_call(span, builder_expr, Ident::new(sym::finish, span), vec![]);
stmts.push(cx.stmt_expr(expr));
let block = cx.block(span, stmts);
cx.expr_block(block)
}
fn stmt_let_underscore(cx: &mut ExtCtxt<'_>, sp: Span, expr: P<ast::Expr>) -> ast::Stmt {
let local = P(ast::Local {
pat: cx.pat_wild(sp),
ty: None,
init: Some(expr),
id: ast::DUMMY_NODE_ID,
span: sp,
attrs: ast::AttrVec::new(),
});
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
}

View file

@ -0,0 +1,223 @@
//! The compiler code necessary for `#[derive(RustcDecodable)]`. See encodable.rs for more.
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::pathvec_std;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand_deriving_rustc_decodable(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
let krate = sym::rustc_serialize;
let typaram = sym::__D;
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: Path::new_(vec![krate, sym::Decodable], None, vec![], PathKind::Global),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::decode,
generics: Bounds {
bounds: vec![(
typaram,
vec![Path::new_(vec![krate, sym::Decoder], None, vec![], PathKind::Global)],
)],
},
explicit_self: None,
args: vec![(
Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)),
sym::d,
)],
ret_ty: Literal(Path::new_(
pathvec_std!(result::Result),
None,
vec![
Box::new(Self_),
Box::new(Literal(Path::new_(
vec![typaram, sym::Error],
None,
vec![],
PathKind::Local,
))),
],
PathKind::Std,
)),
attributes: Vec::new(),
is_unsafe: false,
unify_fieldless_variants: false,
combine_substructure: combine_substructure(Box::new(|a, b, c| {
decodable_substructure(a, b, c, krate)
})),
}],
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}
fn decodable_substructure(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
krate: Symbol,
) -> P<Expr> {
let decoder = substr.nonself_args[0].clone();
let recurse = vec![
Ident::new(krate, trait_span),
Ident::new(sym::Decodable, trait_span),
Ident::new(sym::decode, trait_span),
];
let exprdecode = cx.expr_path(cx.path_global(trait_span, recurse));
// throw an underscore in front to suppress unused variable warnings
let blkarg = Ident::new(sym::_d, trait_span);
let blkdecoder = cx.expr_ident(trait_span, blkarg);
match *substr.fields {
StaticStruct(_, ref summary) => {
let nfields = match *summary {
Unnamed(ref fields, _) => fields.len(),
Named(ref fields) => fields.len(),
};
let read_struct_field = Ident::new(sym::read_struct_field, trait_span);
let path = cx.path_ident(trait_span, substr.type_ident);
let result =
decode_static_fields(cx, trait_span, path, summary, |cx, span, name, field| {
cx.expr_try(
span,
cx.expr_method_call(
span,
blkdecoder.clone(),
read_struct_field,
vec![
cx.expr_str(span, name),
cx.expr_usize(span, field),
exprdecode.clone(),
],
),
)
});
let result = cx.expr_ok(trait_span, result);
cx.expr_method_call(
trait_span,
decoder,
Ident::new(sym::read_struct, trait_span),
vec![
cx.expr_str(trait_span, substr.type_ident.name),
cx.expr_usize(trait_span, nfields),
cx.lambda1(trait_span, result, blkarg),
],
)
}
StaticEnum(_, ref fields) => {
let variant = Ident::new(sym::i, trait_span);
let mut arms = Vec::with_capacity(fields.len() + 1);
let mut variants = Vec::with_capacity(fields.len());
let rvariant_arg = Ident::new(sym::read_enum_variant_arg, trait_span);
for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() {
variants.push(cx.expr_str(v_span, ident.name));
let path = cx.path(trait_span, vec![substr.type_ident, ident]);
let decoded =
decode_static_fields(cx, v_span, path, parts, |cx, span, _, field| {
let idx = cx.expr_usize(span, field);
cx.expr_try(
span,
cx.expr_method_call(
span,
blkdecoder.clone(),
rvariant_arg,
vec![idx, exprdecode.clone()],
),
)
});
arms.push(cx.arm(v_span, cx.pat_lit(v_span, cx.expr_usize(v_span, i)), decoded));
}
arms.push(cx.arm_unreachable(trait_span));
let result = cx.expr_ok(
trait_span,
cx.expr_match(trait_span, cx.expr_ident(trait_span, variant), arms),
);
let lambda = cx.lambda(trait_span, vec![blkarg, variant], result);
let variant_vec = cx.expr_vec(trait_span, variants);
let variant_vec = cx.expr_addr_of(trait_span, variant_vec);
let result = cx.expr_method_call(
trait_span,
blkdecoder,
Ident::new(sym::read_enum_variant, trait_span),
vec![variant_vec, lambda],
);
cx.expr_method_call(
trait_span,
decoder,
Ident::new(sym::read_enum, trait_span),
vec![
cx.expr_str(trait_span, substr.type_ident.name),
cx.lambda1(trait_span, result, blkarg),
],
)
}
_ => cx.bug("expected StaticEnum or StaticStruct in derive(Decodable)"),
}
}
/// Creates a decoder for a single enum variant/struct:
/// - `outer_pat_path` is the path to this enum variant/struct
/// - `getarg` should retrieve the `usize`-th field with name `@str`.
fn decode_static_fields<F>(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
outer_pat_path: ast::Path,
fields: &StaticFields,
mut getarg: F,
) -> P<Expr>
where
F: FnMut(&mut ExtCtxt<'_>, Span, Symbol, usize) -> P<Expr>,
{
match *fields {
Unnamed(ref fields, is_tuple) => {
let path_expr = cx.expr_path(outer_pat_path);
if !is_tuple {
path_expr
} else {
let fields = fields
.iter()
.enumerate()
.map(|(i, &span)| getarg(cx, span, Symbol::intern(&format!("_field{}", i)), i))
.collect();
cx.expr_call(trait_span, path_expr, fields)
}
}
Named(ref fields) => {
// use the field's span to get nicer error messages.
let fields = fields
.iter()
.enumerate()
.map(|(i, &(ident, span))| {
let arg = getarg(cx, span, ident.name, i);
cx.field_imm(span, ident, arg)
})
.collect();
cx.expr_struct(trait_span, outer_pat_path, fields)
}
}
}

View file

@ -0,0 +1,86 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use rustc_ast::ptr::P;
use rustc_ast::{Expr, MetaItem};
use rustc_errors::struct_span_err;
use rustc_expand::base::{Annotatable, DummyResult, ExtCtxt};
use rustc_span::symbol::{kw, sym};
use rustc_span::Span;
pub fn expand_deriving_default(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
let inline = cx.meta_word(span, sym::inline);
let attrs = vec![cx.attribute(inline)];
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: Path::new(vec![kw::Default, sym::Default]),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: kw::Default,
generics: Bounds::empty(),
explicit_self: None,
args: Vec::new(),
ret_ty: Self_,
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: false,
combine_substructure: combine_substructure(Box::new(|a, b, c| {
default_substructure(a, b, c)
})),
}],
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}
fn default_substructure(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
) -> P<Expr> {
// Note that `kw::Default` is "default" and `sym::Default` is "Default"!
let default_ident = cx.std_path(&[kw::Default, sym::Default, kw::Default]);
let default_call = |span| cx.expr_call_global(span, default_ident.clone(), Vec::new());
match *substr.fields {
StaticStruct(_, ref summary) => match *summary {
Unnamed(ref fields, is_tuple) => {
if !is_tuple {
cx.expr_ident(trait_span, substr.type_ident)
} else {
let exprs = fields.iter().map(|sp| default_call(*sp)).collect();
cx.expr_call_ident(trait_span, substr.type_ident, exprs)
}
}
Named(ref fields) => {
let default_fields = fields
.iter()
.map(|&(ident, span)| cx.field_imm(span, ident, default_call(span)))
.collect();
cx.expr_struct_ident(trait_span, substr.type_ident, default_fields)
}
},
StaticEnum(..) => {
struct_span_err!(
&cx.sess.parse_sess.span_diagnostic,
trait_span,
E0665,
"`Default` cannot be derived for enums, only structs"
)
.emit();
// let compilation continue
DummyResult::raw_expr(trait_span, true)
}
_ => cx.span_bug(trait_span, "method in `derive(Default)`"),
}
}

View file

@ -0,0 +1,291 @@
//! The compiler code necessary to implement the `#[derive(RustcEncodable)]`
//! (and `RustcDecodable`, in `decodable.rs`) extension. The idea here is that
//! type-defining items may be tagged with
//! `#[derive(RustcEncodable, RustcDecodable)]`.
//!
//! For example, a type like:
//!
//! ```
//! #[derive(RustcEncodable, RustcDecodable)]
//! struct Node { id: usize }
//! ```
//!
//! would generate two implementations like:
//!
//! ```
//! # struct Node { id: usize }
//! impl<S: Encoder<E>, E> Encodable<S, E> for Node {
//! fn encode(&self, s: &mut S) -> Result<(), E> {
//! s.emit_struct("Node", 1, |this| {
//! this.emit_struct_field("id", 0, |this| {
//! Encodable::encode(&self.id, this)
//! /* this.emit_usize(self.id) can also be used */
//! })
//! })
//! }
//! }
//!
//! impl<D: Decoder<E>, E> Decodable<D, E> for Node {
//! fn decode(d: &mut D) -> Result<Node, E> {
//! d.read_struct("Node", 1, |this| {
//! match this.read_struct_field("id", 0, |this| Decodable::decode(this)) {
//! Ok(id) => Ok(Node { id: id }),
//! Err(e) => Err(e),
//! }
//! })
//! }
//! }
//! ```
//!
//! Other interesting scenarios are when the item has type parameters or
//! references other non-built-in types. A type definition like:
//!
//! ```
//! # #[derive(RustcEncodable, RustcDecodable)]
//! # struct Span;
//! #[derive(RustcEncodable, RustcDecodable)]
//! struct Spanned<T> { node: T, span: Span }
//! ```
//!
//! would yield functions like:
//!
//! ```
//! # #[derive(RustcEncodable, RustcDecodable)]
//! # struct Span;
//! # struct Spanned<T> { node: T, span: Span }
//! impl<
//! S: Encoder<E>,
//! E,
//! T: Encodable<S, E>
//! > Encodable<S, E> for Spanned<T> {
//! fn encode(&self, s: &mut S) -> Result<(), E> {
//! s.emit_struct("Spanned", 2, |this| {
//! this.emit_struct_field("node", 0, |this| self.node.encode(this))
//! .unwrap();
//! this.emit_struct_field("span", 1, |this| self.span.encode(this))
//! })
//! }
//! }
//!
//! impl<
//! D: Decoder<E>,
//! E,
//! T: Decodable<D, E>
//! > Decodable<D, E> for Spanned<T> {
//! fn decode(d: &mut D) -> Result<Spanned<T>, E> {
//! d.read_struct("Spanned", 2, |this| {
//! Ok(Spanned {
//! node: this.read_struct_field("node", 0, |this| Decodable::decode(this))
//! .unwrap(),
//! span: this.read_struct_field("span", 1, |this| Decodable::decode(this))
//! .unwrap(),
//! })
//! })
//! }
//! }
//! ```
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::pathvec_std;
use rustc_ast::ptr::P;
use rustc_ast::{Expr, ExprKind, MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand_deriving_rustc_encodable(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
let krate = sym::rustc_serialize;
let typaram = sym::__S;
let trait_def = TraitDef {
span,
attributes: Vec::new(),
path: Path::new_(vec![krate, sym::Encodable], None, vec![], PathKind::Global),
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::encode,
generics: Bounds {
bounds: vec![(
typaram,
vec![Path::new_(vec![krate, sym::Encoder], None, vec![], PathKind::Global)],
)],
},
explicit_self: borrowed_explicit_self(),
args: vec![(
Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)),
// FIXME: we could use `sym::s` here, but making `s` a static
// symbol changes the symbol index ordering in a way that makes
// ui/lint/rfc-2457-non-ascii-idents/lint-confusable-idents.rs
// fail. The linting code should be fixed so that its output
// does not depend on the symbol index ordering.
Symbol::intern("s"),
)],
ret_ty: Literal(Path::new_(
pathvec_std!(result::Result),
None,
vec![
Box::new(Tuple(Vec::new())),
Box::new(Literal(Path::new_(
vec![typaram, sym::Error],
None,
vec![],
PathKind::Local,
))),
],
PathKind::Std,
)),
attributes: Vec::new(),
is_unsafe: false,
unify_fieldless_variants: false,
combine_substructure: combine_substructure(Box::new(|a, b, c| {
encodable_substructure(a, b, c, krate)
})),
}],
associated_types: Vec::new(),
};
trait_def.expand(cx, mitem, item, push)
}
fn encodable_substructure(
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
krate: Symbol,
) -> P<Expr> {
let encoder = substr.nonself_args[0].clone();
// throw an underscore in front to suppress unused variable warnings
let blkarg = Ident::new(sym::_e, trait_span);
let blkencoder = cx.expr_ident(trait_span, blkarg);
let fn_path = cx.expr_path(cx.path_global(
trait_span,
vec![
Ident::new(krate, trait_span),
Ident::new(sym::Encodable, trait_span),
Ident::new(sym::encode, trait_span),
],
));
match *substr.fields {
Struct(_, ref fields) => {
let emit_struct_field = Ident::new(sym::emit_struct_field, trait_span);
let mut stmts = Vec::new();
for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() {
let name = match name {
Some(id) => id.name,
None => Symbol::intern(&format!("_field{}", i)),
};
let self_ref = cx.expr_addr_of(span, self_.clone());
let enc = cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);
let lambda = cx.lambda1(span, enc, blkarg);
let call = cx.expr_method_call(
span,
blkencoder.clone(),
emit_struct_field,
vec![cx.expr_str(span, name), cx.expr_usize(span, i), lambda],
);
// last call doesn't need a try!
let last = fields.len() - 1;
let call = if i != last {
cx.expr_try(span, call)
} else {
cx.expr(span, ExprKind::Ret(Some(call)))
};
let stmt = cx.stmt_expr(call);
stmts.push(stmt);
}
// unit structs have no fields and need to return Ok()
let blk = if stmts.is_empty() {
let ok = cx.expr_ok(trait_span, cx.expr_tuple(trait_span, vec![]));
cx.lambda1(trait_span, ok, blkarg)
} else {
cx.lambda_stmts_1(trait_span, stmts, blkarg)
};
cx.expr_method_call(
trait_span,
encoder,
Ident::new(sym::emit_struct, trait_span),
vec![
cx.expr_str(trait_span, substr.type_ident.name),
cx.expr_usize(trait_span, fields.len()),
blk,
],
)
}
EnumMatching(idx, _, variant, ref fields) => {
// We're not generating an AST that the borrow checker is expecting,
// so we need to generate a unique local variable to take the
// mutable loan out on, otherwise we get conflicts which don't
// actually exist.
let me = cx.stmt_let(trait_span, false, blkarg, encoder);
let encoder = cx.expr_ident(trait_span, blkarg);
let emit_variant_arg = Ident::new(sym::emit_enum_variant_arg, trait_span);
let mut stmts = Vec::new();
if !fields.is_empty() {
let last = fields.len() - 1;
for (i, &FieldInfo { ref self_, span, .. }) in fields.iter().enumerate() {
let self_ref = cx.expr_addr_of(span, self_.clone());
let enc =
cx.expr_call(span, fn_path.clone(), vec![self_ref, blkencoder.clone()]);
let lambda = cx.lambda1(span, enc, blkarg);
let call = cx.expr_method_call(
span,
blkencoder.clone(),
emit_variant_arg,
vec![cx.expr_usize(span, i), lambda],
);
let call = if i != last {
cx.expr_try(span, call)
} else {
cx.expr(span, ExprKind::Ret(Some(call)))
};
stmts.push(cx.stmt_expr(call));
}
} else {
let ok = cx.expr_ok(trait_span, cx.expr_tuple(trait_span, vec![]));
let ret_ok = cx.expr(trait_span, ExprKind::Ret(Some(ok)));
stmts.push(cx.stmt_expr(ret_ok));
}
let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
let name = cx.expr_str(trait_span, variant.ident.name);
let call = cx.expr_method_call(
trait_span,
blkencoder,
Ident::new(sym::emit_enum_variant, trait_span),
vec![
name,
cx.expr_usize(trait_span, idx),
cx.expr_usize(trait_span, fields.len()),
blk,
],
);
let blk = cx.lambda1(trait_span, call, blkarg);
let ret = cx.expr_method_call(
trait_span,
encoder,
Ident::new(sym::emit_enum, trait_span),
vec![cx.expr_str(trait_span, substr.type_ident.name), blk],
);
cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)]))
}
_ => cx.bug("expected Struct or EnumMatching in derive(Encodable)"),
}
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,280 @@
//! A mini version of ast::Ty, which is easier to use, and features an explicit `Self` type to use
//! when specifying impls to be derived.
pub use PtrTy::*;
pub use Ty::*;
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Expr, GenericArg, GenericParamKind, Generics, SelfKind};
use rustc_expand::base::ExtCtxt;
use rustc_span::source_map::{respan, DUMMY_SP};
use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_span::Span;
/// The types of pointers
#[derive(Clone)]
pub enum PtrTy {
/// &'lifetime mut
Borrowed(Option<Ident>, ast::Mutability),
/// *mut
#[allow(dead_code)]
Raw(ast::Mutability),
}
/// A path, e.g., `::std::option::Option::<i32>` (global). Has support
/// for type parameters and a lifetime.
#[derive(Clone)]
pub struct Path {
path: Vec<Symbol>,
lifetime: Option<Ident>,
params: Vec<Box<Ty>>,
kind: PathKind,
}
#[derive(Clone)]
pub enum PathKind {
Local,
Global,
Std,
}
impl Path {
pub fn new(path: Vec<Symbol>) -> Path {
Path::new_(path, None, Vec::new(), PathKind::Std)
}
pub fn new_local(path: Symbol) -> Path {
Path::new_(vec![path], None, Vec::new(), PathKind::Local)
}
pub fn new_(
path: Vec<Symbol>,
lifetime: Option<Ident>,
params: Vec<Box<Ty>>,
kind: PathKind,
) -> Path {
Path { path, lifetime, params, kind }
}
pub fn to_ty(
&self,
cx: &ExtCtxt<'_>,
span: Span,
self_ty: Ident,
self_generics: &Generics,
) -> P<ast::Ty> {
cx.ty_path(self.to_path(cx, span, self_ty, self_generics))
}
pub fn to_path(
&self,
cx: &ExtCtxt<'_>,
span: Span,
self_ty: Ident,
self_generics: &Generics,
) -> ast::Path {
let mut idents = self.path.iter().map(|s| Ident::new(*s, span)).collect();
let lt = mk_lifetimes(cx, span, &self.lifetime);
let tys: Vec<P<ast::Ty>> =
self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect();
let params = lt
.into_iter()
.map(GenericArg::Lifetime)
.chain(tys.into_iter().map(GenericArg::Type))
.collect();
match self.kind {
PathKind::Global => cx.path_all(span, true, idents, params),
PathKind::Local => cx.path_all(span, false, idents, params),
PathKind::Std => {
let def_site = cx.with_def_site_ctxt(DUMMY_SP);
idents.insert(0, Ident::new(kw::DollarCrate, def_site));
cx.path_all(span, false, idents, params)
}
}
}
}
/// A type. Supports pointers, Self, and literals.
#[derive(Clone)]
pub enum Ty {
Self_,
/// &/Box/ Ty
Ptr(Box<Ty>, PtrTy),
/// `mod::mod::Type<[lifetime], [Params...]>`, including a plain type
/// parameter, and things like `i32`
Literal(Path),
/// includes unit
Tuple(Vec<Ty>),
}
pub fn borrowed_ptrty() -> PtrTy {
Borrowed(None, ast::Mutability::Not)
}
pub fn borrowed(ty: Box<Ty>) -> Ty {
Ptr(ty, borrowed_ptrty())
}
pub fn borrowed_explicit_self() -> Option<Option<PtrTy>> {
Some(Some(borrowed_ptrty()))
}
pub fn borrowed_self() -> Ty {
borrowed(Box::new(Self_))
}
pub fn nil_ty() -> Ty {
Tuple(Vec::new())
}
fn mk_lifetime(cx: &ExtCtxt<'_>, span: Span, lt: &Option<Ident>) -> Option<ast::Lifetime> {
lt.map(|ident| cx.lifetime(span, ident))
}
fn mk_lifetimes(cx: &ExtCtxt<'_>, span: Span, lt: &Option<Ident>) -> Vec<ast::Lifetime> {
mk_lifetime(cx, span, lt).into_iter().collect()
}
impl Ty {
pub fn to_ty(
&self,
cx: &ExtCtxt<'_>,
span: Span,
self_ty: Ident,
self_generics: &Generics,
) -> P<ast::Ty> {
match *self {
Ptr(ref ty, ref ptr) => {
let raw_ty = ty.to_ty(cx, span, self_ty, self_generics);
match *ptr {
Borrowed(ref lt, mutbl) => {
let lt = mk_lifetime(cx, span, lt);
cx.ty_rptr(span, raw_ty, lt, mutbl)
}
Raw(mutbl) => cx.ty_ptr(span, raw_ty, mutbl),
}
}
Literal(ref p) => p.to_ty(cx, span, self_ty, self_generics),
Self_ => cx.ty_path(self.to_path(cx, span, self_ty, self_generics)),
Tuple(ref fields) => {
let ty = ast::TyKind::Tup(
fields.iter().map(|f| f.to_ty(cx, span, self_ty, self_generics)).collect(),
);
cx.ty(span, ty)
}
}
}
pub fn to_path(
&self,
cx: &ExtCtxt<'_>,
span: Span,
self_ty: Ident,
generics: &Generics,
) -> ast::Path {
match *self {
Self_ => {
let params: Vec<_> = generics
.params
.iter()
.map(|param| match param.kind {
GenericParamKind::Lifetime { .. } => {
GenericArg::Lifetime(ast::Lifetime { id: param.id, ident: param.ident })
}
GenericParamKind::Type { .. } => {
GenericArg::Type(cx.ty_ident(span, param.ident))
}
GenericParamKind::Const { .. } => {
GenericArg::Const(cx.const_ident(span, param.ident))
}
})
.collect();
cx.path_all(span, false, vec![self_ty], params)
}
Literal(ref p) => p.to_path(cx, span, self_ty, generics),
Ptr(..) => cx.span_bug(span, "pointer in a path in generic `derive`"),
Tuple(..) => cx.span_bug(span, "tuple in a path in generic `derive`"),
}
}
}
fn mk_ty_param(
cx: &ExtCtxt<'_>,
span: Span,
name: Symbol,
attrs: &[ast::Attribute],
bounds: &[Path],
self_ident: Ident,
self_generics: &Generics,
) -> ast::GenericParam {
let bounds = bounds
.iter()
.map(|b| {
let path = b.to_path(cx, span, self_ident, self_generics);
cx.trait_bound(path)
})
.collect();
cx.typaram(span, Ident::new(name, span), attrs.to_owned(), bounds, None)
}
fn mk_generics(params: Vec<ast::GenericParam>, span: Span) -> Generics {
Generics {
params,
where_clause: ast::WhereClause { has_where_token: false, predicates: Vec::new(), span },
span,
}
}
/// Bounds on type parameters.
#[derive(Clone)]
pub struct Bounds {
pub bounds: Vec<(Symbol, Vec<Path>)>,
}
impl Bounds {
pub fn empty() -> Bounds {
Bounds { bounds: Vec::new() }
}
pub fn to_generics(
&self,
cx: &ExtCtxt<'_>,
span: Span,
self_ty: Ident,
self_generics: &Generics,
) -> Generics {
let generic_params = self
.bounds
.iter()
.map(|t| {
let (name, ref bounds) = *t;
mk_ty_param(cx, span, name, &[], &bounds, self_ty, self_generics)
})
.collect();
mk_generics(generic_params, span)
}
}
pub fn get_explicit_self(
cx: &ExtCtxt<'_>,
span: Span,
self_ptr: &Option<PtrTy>,
) -> (P<Expr>, ast::ExplicitSelf) {
// this constructs a fresh `self` path
let self_path = cx.expr_self(span);
match *self_ptr {
None => (self_path, respan(span, SelfKind::Value(ast::Mutability::Not))),
Some(ref ptr) => {
let self_ty = respan(
span,
match *ptr {
Borrowed(ref lt, mutbl) => {
let lt = lt.map(|s| cx.lifetime(span, s));
SelfKind::Region(lt, mutbl)
}
Raw(_) => cx.span_bug(span, "attempted to use *self in deriving definition"),
},
);
let self_expr = cx.expr_deref(span, self_path);
(self_expr, self_ty)
}
}
}

View file

@ -0,0 +1,89 @@
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
use crate::deriving::{self, path_std, pathvec_std};
use rustc_ast::ptr::P;
use rustc_ast::{Expr, MetaItem, Mutability};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::sym;
use rustc_span::Span;
pub fn expand_deriving_hash(
cx: &mut ExtCtxt<'_>,
span: Span,
mitem: &MetaItem,
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
let path = Path::new_(pathvec_std!(hash::Hash), None, vec![], PathKind::Std);
let typaram = sym::__H;
let arg = Path::new_local(typaram);
let hash_trait_def = TraitDef {
span,
attributes: Vec::new(),
path,
additional_bounds: Vec::new(),
generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::hash,
generics: Bounds { bounds: vec![(typaram, vec![path_std!(hash::Hasher)])] },
explicit_self: borrowed_explicit_self(),
args: vec![(Ptr(Box::new(Literal(arg)), Borrowed(None, Mutability::Mut)), sym::state)],
ret_ty: nil_ty(),
attributes: vec![],
is_unsafe: false,
unify_fieldless_variants: true,
combine_substructure: combine_substructure(Box::new(|a, b, c| {
hash_substructure(a, b, c)
})),
}],
associated_types: Vec::new(),
};
hash_trait_def.expand(cx, mitem, item, push);
}
fn hash_substructure(cx: &mut ExtCtxt<'_>, trait_span: Span, substr: &Substructure<'_>) -> P<Expr> {
let state_expr = match &substr.nonself_args {
&[o_f] => o_f,
_ => cx.span_bug(trait_span, "incorrect number of arguments in `derive(Hash)`"),
};
let call_hash = |span, thing_expr| {
let hash_path = {
let strs = cx.std_path(&[sym::hash, sym::Hash, sym::hash]);
cx.expr_path(cx.path_global(span, strs))
};
let ref_thing = cx.expr_addr_of(span, thing_expr);
let expr = cx.expr_call(span, hash_path, vec![ref_thing, state_expr.clone()]);
cx.stmt_expr(expr)
};
let mut stmts = Vec::new();
let fields = match *substr.fields {
Struct(_, ref fs) | EnumMatching(_, 1, .., ref fs) => fs,
EnumMatching(.., ref fs) => {
let variant_value = deriving::call_intrinsic(
cx,
trait_span,
sym::discriminant_value,
vec![cx.expr_self(trait_span)],
);
stmts.push(call_hash(trait_span, variant_value));
fs
}
_ => cx.span_bug(trait_span, "impossible substructure in `derive(Hash)`"),
};
stmts.extend(
fields.iter().map(|FieldInfo { ref self_, span, .. }| call_hash(*span, self_.clone())),
);
cx.expr_block(cx.block(trait_span, stmts))
}

View file

@ -0,0 +1,173 @@
//! The compiler code necessary to implement the `#[derive]` extensions.
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::{ItemKind, MetaItem};
use rustc_expand::base::{Annotatable, ExpandResult, ExtCtxt, MultiItemModifier};
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
macro path_local($x:ident) {
generic::ty::Path::new_local(sym::$x)
}
macro pathvec_std($($rest:ident)::+) {{
vec![ $( sym::$rest ),+ ]
}}
macro path_std($($x:tt)*) {
generic::ty::Path::new( pathvec_std!( $($x)* ) )
}
pub mod bounds;
pub mod clone;
pub mod debug;
pub mod decodable;
pub mod default;
pub mod encodable;
pub mod hash;
#[path = "cmp/eq.rs"]
pub mod eq;
#[path = "cmp/ord.rs"]
pub mod ord;
#[path = "cmp/partial_eq.rs"]
pub mod partial_eq;
#[path = "cmp/partial_ord.rs"]
pub mod partial_ord;
pub mod generic;
crate struct BuiltinDerive(
crate fn(&mut ExtCtxt<'_>, Span, &MetaItem, &Annotatable, &mut dyn FnMut(Annotatable)),
);
impl MultiItemModifier for BuiltinDerive {
fn expand(
&self,
ecx: &mut ExtCtxt<'_>,
span: Span,
meta_item: &MetaItem,
item: Annotatable,
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
// FIXME: Built-in derives often forget to give spans contexts,
// so we are doing it here in a centralized way.
let span = ecx.with_def_site_ctxt(span);
let mut items = Vec::new();
(self.0)(ecx, span, meta_item, &item, &mut |a| items.push(a));
ExpandResult::Ready(items)
}
}
/// Constructs an expression that calls an intrinsic
fn call_intrinsic(
cx: &ExtCtxt<'_>,
span: Span,
intrinsic: Symbol,
args: Vec<P<ast::Expr>>,
) -> P<ast::Expr> {
let span = cx.with_def_site_ctxt(span);
let path = cx.std_path(&[sym::intrinsics, intrinsic]);
let call = cx.expr_call_global(span, path, args);
cx.expr_block(P(ast::Block {
stmts: vec![cx.stmt_expr(call)],
id: ast::DUMMY_NODE_ID,
rules: ast::BlockCheckMode::Unsafe(ast::CompilerGenerated),
span,
}))
}
// Injects `impl<...> Structural for ItemType<...> { }`. In particular,
// does *not* add `where T: Structural` for parameters `T` in `...`.
// (That's the main reason we cannot use TraitDef here.)
fn inject_impl_of_structural_trait(
cx: &mut ExtCtxt<'_>,
span: Span,
item: &Annotatable,
structural_path: generic::ty::Path,
push: &mut dyn FnMut(Annotatable),
) {
let item = match *item {
Annotatable::Item(ref item) => item,
_ => {
// Non-Item derive is an error, but it should have been
// set earlier; see
// librustc_expand/expand.rs:MacroExpander::fully_expand_fragment()
// librustc_expand/base.rs:Annotatable::derive_allowed()
return;
}
};
let generics = match item.kind {
ItemKind::Struct(_, ref generics) | ItemKind::Enum(_, ref generics) => generics,
// Do not inject `impl Structural for Union`. (`PartialEq` does not
// support unions, so we will see error downstream.)
ItemKind::Union(..) => return,
_ => unreachable!(),
};
// Create generics param list for where clauses and impl headers
let mut generics = generics.clone();
// Create the type of `self`.
//
// in addition, remove defaults from type params (impls cannot have them).
let self_params: Vec<_> = generics
.params
.iter_mut()
.map(|param| match &mut param.kind {
ast::GenericParamKind::Lifetime => {
ast::GenericArg::Lifetime(cx.lifetime(span, param.ident))
}
ast::GenericParamKind::Type { default } => {
*default = None;
ast::GenericArg::Type(cx.ty_ident(span, param.ident))
}
ast::GenericParamKind::Const { ty: _, kw_span: _ } => {
ast::GenericArg::Const(cx.const_ident(span, param.ident))
}
})
.collect();
let type_ident = item.ident;
let trait_ref = cx.trait_ref(structural_path.to_path(cx, span, type_ident, &generics));
let self_type = cx.ty_path(cx.path_all(span, false, vec![type_ident], self_params));
// It would be nice to also encode constraint `where Self: Eq` (by adding it
// onto `generics` cloned above). Unfortunately, that strategy runs afoul of
// rust-lang/rust#48214. So we perform that additional check in the compiler
// itself, instead of encoding it here.
// Keep the lint and stability attributes of the original item, to control
// how the generated implementation is linted.
let mut attrs = Vec::new();
attrs.extend(
item.attrs
.iter()
.filter(|a| {
[sym::allow, sym::warn, sym::deny, sym::forbid, sym::stable, sym::unstable]
.contains(&a.name_or_empty())
})
.cloned(),
);
let newitem = cx.item(
span,
Ident::invalid(),
attrs,
ItemKind::Impl {
unsafety: ast::Unsafe::No,
polarity: ast::ImplPolarity::Positive,
defaultness: ast::Defaultness::Final,
constness: ast::Const::No,
generics,
of_trait: Some(trait_ref),
self_ty: self_type,
items: Vec::new(),
},
);
push(Annotatable::Item(newitem));
}

View file

@ -0,0 +1,93 @@
// The compiler code necessary to support the env! extension. Eventually this
// should all get sucked into either the compiler syntax extension plugin
// interface.
//
use rustc_ast::tokenstream::TokenStream;
use rustc_ast::{self as ast, GenericArg};
use rustc_expand::base::{self, *};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span;
use std::env;
pub fn expand_option_env<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let var = match get_single_str_from_tts(cx, sp, tts, "option_env!") {
None => return DummyResult::any(sp),
Some(v) => v,
};
let sp = cx.with_def_site_ctxt(sp);
let value = env::var(&var.as_str()).ok().as_deref().map(Symbol::intern);
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((Symbol::intern(&var), value));
let e = match value {
None => {
let lt = cx.lifetime(sp, Ident::new(kw::StaticLifetime, sp));
cx.expr_path(cx.path_all(
sp,
true,
cx.std_path(&[sym::option, sym::Option, sym::None]),
vec![GenericArg::Type(cx.ty_rptr(
sp,
cx.ty_ident(sp, Ident::new(sym::str, sp)),
Some(lt),
ast::Mutability::Not,
))],
))
}
Some(value) => cx.expr_call_global(
sp,
cx.std_path(&[sym::option, sym::Option, sym::Some]),
vec![cx.expr_str(sp, value)],
),
};
MacEager::expr(e)
}
pub fn expand_env<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let mut exprs = match get_exprs_from_tts(cx, sp, tts) {
Some(ref exprs) if exprs.is_empty() => {
cx.span_err(sp, "env! takes 1 or 2 arguments");
return DummyResult::any(sp);
}
None => return DummyResult::any(sp),
Some(exprs) => exprs.into_iter(),
};
let var = match expr_to_string(cx, exprs.next().unwrap(), "expected string literal") {
None => return DummyResult::any(sp),
Some((v, _style)) => v,
};
let msg = match exprs.next() {
None => Symbol::intern(&format!("environment variable `{}` not defined", var)),
Some(second) => match expr_to_string(cx, second, "expected string literal") {
None => return DummyResult::any(sp),
Some((s, _style)) => s,
},
};
if exprs.next().is_some() {
cx.span_err(sp, "env! takes 1 or 2 arguments");
return DummyResult::any(sp);
}
let sp = cx.with_def_site_ctxt(sp);
let value = env::var(&*var.as_str()).ok().as_deref().map(Symbol::intern);
cx.sess.parse_sess.env_depinfo.borrow_mut().insert((var, value));
let e = match value {
None => {
cx.span_err(sp, &msg.as_str());
return DummyResult::any(sp);
}
Some(value) => cx.expr_str(sp, value),
};
MacEager::expr(e)
}

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,823 @@
pub mod printf {
use super::strcursor::StrCursor as Cur;
use rustc_span::InnerSpan;
/// Represents a single `printf`-style substitution.
#[derive(Clone, PartialEq, Debug)]
pub enum Substitution<'a> {
/// A formatted output substitution with its internal byte offset.
Format(Format<'a>),
/// A literal `%%` escape.
Escape,
}
impl<'a> Substitution<'a> {
pub fn as_str(&self) -> &str {
match *self {
Substitution::Format(ref fmt) => fmt.span,
Substitution::Escape => "%%",
}
}
pub fn position(&self) -> Option<InnerSpan> {
match *self {
Substitution::Format(ref fmt) => Some(fmt.position),
_ => None,
}
}
pub fn set_position(&mut self, start: usize, end: usize) {
if let Substitution::Format(ref mut fmt) = self {
fmt.position = InnerSpan::new(start, end);
}
}
/// Translate this substitution into an equivalent Rust formatting directive.
///
/// This ignores cases where the substitution does not have an exact equivalent, or where
/// the substitution would be unnecessary.
pub fn translate(&self) -> Option<String> {
match *self {
Substitution::Format(ref fmt) => fmt.translate(),
Substitution::Escape => None,
}
}
}
#[derive(Clone, PartialEq, Debug)]
/// A single `printf`-style formatting directive.
pub struct Format<'a> {
/// The entire original formatting directive.
pub span: &'a str,
/// The (1-based) parameter to be converted.
pub parameter: Option<u16>,
/// Formatting flags.
pub flags: &'a str,
/// Minimum width of the output.
pub width: Option<Num>,
/// Precision of the conversion.
pub precision: Option<Num>,
/// Length modifier for the conversion.
pub length: Option<&'a str>,
/// Type of parameter being converted.
pub type_: &'a str,
/// Byte offset for the start and end of this formatting directive.
pub position: InnerSpan,
}
impl Format<'_> {
/// Translate this directive into an equivalent Rust formatting directive.
///
/// Returns `None` in cases where the `printf` directive does not have an exact Rust
/// equivalent, rather than guessing.
pub fn translate(&self) -> Option<String> {
use std::fmt::Write;
let (c_alt, c_zero, c_left, c_plus) = {
let mut c_alt = false;
let mut c_zero = false;
let mut c_left = false;
let mut c_plus = false;
for c in self.flags.chars() {
match c {
'#' => c_alt = true,
'0' => c_zero = true,
'-' => c_left = true,
'+' => c_plus = true,
_ => return None,
}
}
(c_alt, c_zero, c_left, c_plus)
};
// Has a special form in Rust for numbers.
let fill = c_zero.then_some("0");
let align = c_left.then_some("<");
// Rust doesn't have an equivalent to the `' '` flag.
let sign = c_plus.then_some("+");
// Not *quite* the same, depending on the type...
let alt = c_alt;
let width = match self.width {
Some(Num::Next) => {
// NOTE: Rust doesn't support this.
return None;
}
w @ Some(Num::Arg(_)) => w,
w @ Some(Num::Num(_)) => w,
None => None,
};
let precision = self.precision;
// NOTE: although length *can* have an effect, we can't duplicate the effect in Rust, so
// we just ignore it.
let (type_, use_zero_fill, is_int) = match self.type_ {
"d" | "i" | "u" => (None, true, true),
"f" | "F" => (None, false, false),
"s" | "c" => (None, false, false),
"e" | "E" => (Some(self.type_), true, false),
"x" | "X" | "o" => (Some(self.type_), true, true),
"p" => (Some(self.type_), false, true),
"g" => (Some("e"), true, false),
"G" => (Some("E"), true, false),
_ => return None,
};
let (fill, width, precision) = match (is_int, width, precision) {
(true, Some(_), Some(_)) => {
// Rust can't duplicate this insanity.
return None;
}
(true, None, Some(p)) => (Some("0"), Some(p), None),
(true, w, None) => (fill, w, None),
(false, w, p) => (fill, w, p),
};
let align = match (self.type_, width.is_some(), align.is_some()) {
("s", true, false) => Some(">"),
_ => align,
};
let (fill, zero_fill) = match (fill, use_zero_fill) {
(Some("0"), true) => (None, true),
(fill, _) => (fill, false),
};
let alt = match type_ {
Some("x" | "X") => alt,
_ => false,
};
let has_options = fill.is_some()
|| align.is_some()
|| sign.is_some()
|| alt
|| zero_fill
|| width.is_some()
|| precision.is_some()
|| type_.is_some();
// Initialise with a rough guess.
let cap = self.span.len() + if has_options { 2 } else { 0 };
let mut s = String::with_capacity(cap);
s.push_str("{");
if let Some(arg) = self.parameter {
write!(s, "{}", arg.checked_sub(1)?).ok()?;
}
if has_options {
s.push_str(":");
let align = if let Some(fill) = fill {
s.push_str(fill);
align.or(Some(">"))
} else {
align
};
if let Some(align) = align {
s.push_str(align);
}
if let Some(sign) = sign {
s.push_str(sign);
}
if alt {
s.push_str("#");
}
if zero_fill {
s.push_str("0");
}
if let Some(width) = width {
width.translate(&mut s).ok()?;
}
if let Some(precision) = precision {
s.push_str(".");
precision.translate(&mut s).ok()?;
}
if let Some(type_) = type_ {
s.push_str(type_);
}
}
s.push_str("}");
Some(s)
}
}
/// A general number used in a `printf` formatting directive.
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum Num {
// The range of these values is technically bounded by `NL_ARGMAX`... but, at least for GNU
// libc, it apparently has no real fixed limit. A `u16` is used here on the basis that it
// is *vanishingly* unlikely that *anyone* is going to try formatting something wider, or
// with more precision, than 32 thousand positions which is so wide it couldn't possibly fit
// on a screen.
/// A specific, fixed value.
Num(u16),
/// The value is derived from a positional argument.
Arg(u16),
/// The value is derived from the "next" unconverted argument.
Next,
}
impl Num {
fn from_str(s: &str, arg: Option<&str>) -> Self {
if let Some(arg) = arg {
Num::Arg(arg.parse().unwrap_or_else(|_| panic!("invalid format arg `{:?}`", arg)))
} else if s == "*" {
Num::Next
} else {
Num::Num(s.parse().unwrap_or_else(|_| panic!("invalid format num `{:?}`", s)))
}
}
fn translate(&self, s: &mut String) -> std::fmt::Result {
use std::fmt::Write;
match *self {
Num::Num(n) => write!(s, "{}", n),
Num::Arg(n) => {
let n = n.checked_sub(1).ok_or(std::fmt::Error)?;
write!(s, "{}$", n)
}
Num::Next => write!(s, "*"),
}
}
}
/// Returns an iterator over all substitutions in a given string.
pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> {
Substitutions { s, pos: start_pos }
}
/// Iterator over substitutions in a string.
pub struct Substitutions<'a> {
s: &'a str,
pos: usize,
}
impl<'a> Iterator for Substitutions<'a> {
type Item = Substitution<'a>;
fn next(&mut self) -> Option<Self::Item> {
let (mut sub, tail) = parse_next_substitution(self.s)?;
self.s = tail;
match sub {
Substitution::Format(_) => {
if let Some(inner_span) = sub.position() {
sub.set_position(inner_span.start + self.pos, inner_span.end + self.pos);
self.pos += inner_span.end;
}
}
Substitution::Escape => self.pos += 2,
}
Some(sub)
}
fn size_hint(&self) -> (usize, Option<usize>) {
// Substitutions are at least 2 characters long.
(0, Some(self.s.len() / 2))
}
}
enum State {
Start,
Flags,
Width,
WidthArg,
Prec,
PrecInner,
Length,
Type,
}
/// Parse the next substitution from the input string.
pub fn parse_next_substitution(s: &str) -> Option<(Substitution<'_>, &str)> {
use self::State::*;
let at = {
let start = s.find('%')?;
if let '%' = s[start + 1..].chars().next()? {
return Some((Substitution::Escape, &s[start + 2..]));
}
Cur::new_at(&s[..], start)
};
// This is meant to be a translation of the following regex:
//
// ```regex
// (?x)
// ^ %
// (?: (?P<parameter> \d+) \$ )?
// (?P<flags> [-+ 0\#']* )
// (?P<width> \d+ | \* (?: (?P<widtha> \d+) \$ )? )?
// (?: \. (?P<precision> \d+ | \* (?: (?P<precisiona> \d+) \$ )? ) )?
// (?P<length>
// # Standard
// hh | h | ll | l | L | z | j | t
//
// # Other
// | I32 | I64 | I | q
// )?
// (?P<type> . )
// ```
// Used to establish the full span at the end.
let start = at;
// The current position within the string.
let mut at = at.at_next_cp()?;
// `c` is the next codepoint, `next` is a cursor after it.
let (mut c, mut next) = at.next_cp()?;
// Update `at`, `c`, and `next`, exiting if we're out of input.
macro_rules! move_to {
($cur:expr) => {{
at = $cur;
let (c_, next_) = at.next_cp()?;
c = c_;
next = next_;
}};
}
// Constructs a result when parsing fails.
//
// Note: `move` used to capture copies of the cursors as they are *now*.
let fallback = move || {
Some((
Substitution::Format(Format {
span: start.slice_between(next).unwrap(),
parameter: None,
flags: "",
width: None,
precision: None,
length: None,
type_: at.slice_between(next).unwrap(),
position: InnerSpan::new(start.at, next.at),
}),
next.slice_after(),
))
};
// Next parsing state.
let mut state = Start;
// Sadly, Rust isn't *quite* smart enough to know these *must* be initialised by the end.
let mut parameter: Option<u16> = None;
let mut flags: &str = "";
let mut width: Option<Num> = None;
let mut precision: Option<Num> = None;
let mut length: Option<&str> = None;
let mut type_: &str = "";
let end: Cur<'_>;
if let Start = state {
match c {
'1'..='9' => {
let end = at_next_cp_while(next, is_digit);
match end.next_cp() {
// Yes, this *is* the parameter.
Some(('$', end2)) => {
state = Flags;
parameter = Some(at.slice_between(end).unwrap().parse().unwrap());
move_to!(end2);
}
// Wait, no, actually, it's the width.
Some(_) => {
state = Prec;
parameter = None;
flags = "";
width = Some(Num::from_str(at.slice_between(end).unwrap(), None));
move_to!(end);
}
// It's invalid, is what it is.
None => return fallback(),
}
}
_ => {
state = Flags;
parameter = None;
move_to!(at);
}
}
}
if let Flags = state {
let end = at_next_cp_while(at, is_flag);
state = Width;
flags = at.slice_between(end).unwrap();
move_to!(end);
}
if let Width = state {
match c {
'*' => {
state = WidthArg;
move_to!(next);
}
'1'..='9' => {
let end = at_next_cp_while(next, is_digit);
state = Prec;
width = Some(Num::from_str(at.slice_between(end).unwrap(), None));
move_to!(end);
}
_ => {
state = Prec;
width = None;
move_to!(at);
}
}
}
if let WidthArg = state {
let end = at_next_cp_while(at, is_digit);
match end.next_cp() {
Some(('$', end2)) => {
state = Prec;
width = Some(Num::from_str("", Some(at.slice_between(end).unwrap())));
move_to!(end2);
}
_ => {
state = Prec;
width = Some(Num::Next);
move_to!(end);
}
}
}
if let Prec = state {
match c {
'.' => {
state = PrecInner;
move_to!(next);
}
_ => {
state = Length;
precision = None;
move_to!(at);
}
}
}
if let PrecInner = state {
match c {
'*' => {
let end = at_next_cp_while(next, is_digit);
match end.next_cp() {
Some(('$', end2)) => {
state = Length;
precision = Some(Num::from_str("*", next.slice_between(end)));
move_to!(end2);
}
_ => {
state = Length;
precision = Some(Num::Next);
move_to!(end);
}
}
}
'0'..='9' => {
let end = at_next_cp_while(next, is_digit);
state = Length;
precision = Some(Num::from_str(at.slice_between(end).unwrap(), None));
move_to!(end);
}
_ => return fallback(),
}
}
if let Length = state {
let c1_next1 = next.next_cp();
match (c, c1_next1) {
('h', Some(('h', next1))) | ('l', Some(('l', next1))) => {
state = Type;
length = Some(at.slice_between(next1).unwrap());
move_to!(next1);
}
('h' | 'l' | 'L' | 'z' | 'j' | 't' | 'q', _) => {
state = Type;
length = Some(at.slice_between(next).unwrap());
move_to!(next);
}
('I', _) => {
let end = next
.at_next_cp()
.and_then(|end| end.at_next_cp())
.map(|end| (next.slice_between(end).unwrap(), end));
let end = match end {
Some(("32", end)) => end,
Some(("64", end)) => end,
_ => next,
};
state = Type;
length = Some(at.slice_between(end).unwrap());
move_to!(end);
}
_ => {
state = Type;
length = None;
move_to!(at);
}
}
}
if let Type = state {
drop(c);
type_ = at.slice_between(next).unwrap();
// Don't use `move_to!` here, as we *can* be at the end of the input.
at = next;
}
drop(c);
drop(next);
end = at;
let position = InnerSpan::new(start.at, end.at);
let f = Format {
span: start.slice_between(end).unwrap(),
parameter,
flags,
width,
precision,
length,
type_,
position,
};
Some((Substitution::Format(f), end.slice_after()))
}
fn at_next_cp_while<F>(mut cur: Cur<'_>, mut pred: F) -> Cur<'_>
where
F: FnMut(char) -> bool,
{
loop {
match cur.next_cp() {
Some((c, next)) => {
if pred(c) {
cur = next;
} else {
return cur;
}
}
None => return cur,
}
}
}
fn is_digit(c: char) -> bool {
match c {
'0'..='9' => true,
_ => false,
}
}
fn is_flag(c: char) -> bool {
match c {
'0' | '-' | '+' | ' ' | '#' | '\'' => true,
_ => false,
}
}
#[cfg(test)]
mod tests;
}
pub mod shell {
use super::strcursor::StrCursor as Cur;
use rustc_span::InnerSpan;
#[derive(Clone, PartialEq, Debug)]
pub enum Substitution<'a> {
Ordinal(u8, (usize, usize)),
Name(&'a str, (usize, usize)),
Escape((usize, usize)),
}
impl Substitution<'_> {
pub fn as_str(&self) -> String {
match self {
Substitution::Ordinal(n, _) => format!("${}", n),
Substitution::Name(n, _) => format!("${}", n),
Substitution::Escape(_) => "$$".into(),
}
}
pub fn position(&self) -> Option<InnerSpan> {
match self {
Substitution::Ordinal(_, pos)
| Substitution::Name(_, pos)
| Substitution::Escape(pos) => Some(InnerSpan::new(pos.0, pos.1)),
}
}
pub fn set_position(&mut self, start: usize, end: usize) {
match self {
Substitution::Ordinal(_, ref mut pos)
| Substitution::Name(_, ref mut pos)
| Substitution::Escape(ref mut pos) => *pos = (start, end),
}
}
pub fn translate(&self) -> Option<String> {
match *self {
Substitution::Ordinal(n, _) => Some(format!("{{{}}}", n)),
Substitution::Name(n, _) => Some(format!("{{{}}}", n)),
Substitution::Escape(_) => None,
}
}
}
/// Returns an iterator over all substitutions in a given string.
pub fn iter_subs(s: &str, start_pos: usize) -> Substitutions<'_> {
Substitutions { s, pos: start_pos }
}
/// Iterator over substitutions in a string.
pub struct Substitutions<'a> {
s: &'a str,
pos: usize,
}
impl<'a> Iterator for Substitutions<'a> {
type Item = Substitution<'a>;
fn next(&mut self) -> Option<Self::Item> {
match parse_next_substitution(self.s) {
Some((mut sub, tail)) => {
self.s = tail;
if let Some(InnerSpan { start, end }) = sub.position() {
sub.set_position(start + self.pos, end + self.pos);
self.pos += end;
}
Some(sub)
}
None => None,
}
}
fn size_hint(&self) -> (usize, Option<usize>) {
(0, Some(self.s.len()))
}
}
/// Parse the next substitution from the input string.
pub fn parse_next_substitution(s: &str) -> Option<(Substitution<'_>, &str)> {
let at = {
let start = s.find('$')?;
match s[start + 1..].chars().next()? {
'$' => return Some((Substitution::Escape((start, start + 2)), &s[start + 2..])),
c @ '0'..='9' => {
let n = (c as u8) - b'0';
return Some((Substitution::Ordinal(n, (start, start + 2)), &s[start + 2..]));
}
_ => { /* fall-through */ }
}
Cur::new_at(&s[..], start)
};
let at = at.at_next_cp()?;
let (c, inner) = at.next_cp()?;
if !is_ident_head(c) {
None
} else {
let end = at_next_cp_while(inner, is_ident_tail);
let slice = at.slice_between(end).unwrap();
let start = at.at - 1;
let end_pos = at.at + slice.len();
Some((Substitution::Name(slice, (start, end_pos)), end.slice_after()))
}
}
fn at_next_cp_while<F>(mut cur: Cur<'_>, mut pred: F) -> Cur<'_>
where
F: FnMut(char) -> bool,
{
loop {
match cur.next_cp() {
Some((c, next)) => {
if pred(c) {
cur = next;
} else {
return cur;
}
}
None => return cur,
}
}
}
fn is_ident_head(c: char) -> bool {
match c {
'a'..='z' | 'A'..='Z' | '_' => true,
_ => false,
}
}
fn is_ident_tail(c: char) -> bool {
match c {
'0'..='9' => true,
c => is_ident_head(c),
}
}
#[cfg(test)]
mod tests;
}
mod strcursor {
pub struct StrCursor<'a> {
s: &'a str,
pub at: usize,
}
impl<'a> StrCursor<'a> {
pub fn new_at(s: &'a str, at: usize) -> StrCursor<'a> {
StrCursor { s, at }
}
pub fn at_next_cp(mut self) -> Option<StrCursor<'a>> {
match self.try_seek_right_cp() {
true => Some(self),
false => None,
}
}
pub fn next_cp(mut self) -> Option<(char, StrCursor<'a>)> {
let cp = self.cp_after()?;
self.seek_right(cp.len_utf8());
Some((cp, self))
}
fn slice_before(&self) -> &'a str {
&self.s[0..self.at]
}
pub fn slice_after(&self) -> &'a str {
&self.s[self.at..]
}
pub fn slice_between(&self, until: StrCursor<'a>) -> Option<&'a str> {
if !str_eq_literal(self.s, until.s) {
None
} else {
use std::cmp::{max, min};
let beg = min(self.at, until.at);
let end = max(self.at, until.at);
Some(&self.s[beg..end])
}
}
fn cp_after(&self) -> Option<char> {
self.slice_after().chars().next()
}
fn try_seek_right_cp(&mut self) -> bool {
match self.slice_after().chars().next() {
Some(c) => {
self.at += c.len_utf8();
true
}
None => false,
}
}
fn seek_right(&mut self, bytes: usize) {
self.at += bytes;
}
}
impl Copy for StrCursor<'_> {}
impl<'a> Clone for StrCursor<'a> {
fn clone(&self) -> StrCursor<'a> {
*self
}
}
impl std::fmt::Debug for StrCursor<'_> {
fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(fmt, "StrCursor({:?} | {:?})", self.slice_before(), self.slice_after())
}
}
fn str_eq_literal(a: &str, b: &str) -> bool {
a.as_bytes().as_ptr() == b.as_bytes().as_ptr() && a.len() == b.len()
}
}

View file

@ -0,0 +1,145 @@
use super::{iter_subs, parse_next_substitution as pns, Format as F, Num as N, Substitution as S};
macro_rules! assert_eq_pnsat {
($lhs:expr, $rhs:expr) => {
assert_eq!(
pns($lhs).and_then(|(s, _)| s.translate()),
$rhs.map(<String as From<&str>>::from)
)
};
}
#[test]
fn test_escape() {
assert_eq!(pns("has no escapes"), None);
assert_eq!(pns("has no escapes, either %"), None);
assert_eq!(pns("*so* has a %% escape"), Some((S::Escape, " escape")));
assert_eq!(pns("%% leading escape"), Some((S::Escape, " leading escape")));
assert_eq!(pns("trailing escape %%"), Some((S::Escape, "")));
}
#[test]
fn test_parse() {
macro_rules! assert_pns_eq_sub {
($in_:expr, {
$param:expr, $flags:expr,
$width:expr, $prec:expr, $len:expr, $type_:expr,
$pos:expr,
}) => {
assert_eq!(
pns(concat!($in_, "!")),
Some((
S::Format(F {
span: $in_,
parameter: $param,
flags: $flags,
width: $width,
precision: $prec,
length: $len,
type_: $type_,
position: rustc_span::InnerSpan::new($pos.0, $pos.1),
}),
"!"
))
)
};
}
assert_pns_eq_sub!("%!",
{ None, "", None, None, None, "!", (0, 2), });
assert_pns_eq_sub!("%c",
{ None, "", None, None, None, "c", (0, 2), });
assert_pns_eq_sub!("%s",
{ None, "", None, None, None, "s", (0, 2), });
assert_pns_eq_sub!("%06d",
{ None, "0", Some(N::Num(6)), None, None, "d", (0, 4), });
assert_pns_eq_sub!("%4.2f",
{ None, "", Some(N::Num(4)), Some(N::Num(2)), None, "f", (0, 5), });
assert_pns_eq_sub!("%#x",
{ None, "#", None, None, None, "x", (0, 3), });
assert_pns_eq_sub!("%-10s",
{ None, "-", Some(N::Num(10)), None, None, "s", (0, 5), });
assert_pns_eq_sub!("%*s",
{ None, "", Some(N::Next), None, None, "s", (0, 3), });
assert_pns_eq_sub!("%-10.*s",
{ None, "-", Some(N::Num(10)), Some(N::Next), None, "s", (0, 7), });
assert_pns_eq_sub!("%-*.*s",
{ None, "-", Some(N::Next), Some(N::Next), None, "s", (0, 6), });
assert_pns_eq_sub!("%.6i",
{ None, "", None, Some(N::Num(6)), None, "i", (0, 4), });
assert_pns_eq_sub!("%+i",
{ None, "+", None, None, None, "i", (0, 3), });
assert_pns_eq_sub!("%08X",
{ None, "0", Some(N::Num(8)), None, None, "X", (0, 4), });
assert_pns_eq_sub!("%lu",
{ None, "", None, None, Some("l"), "u", (0, 3), });
assert_pns_eq_sub!("%Iu",
{ None, "", None, None, Some("I"), "u", (0, 3), });
assert_pns_eq_sub!("%I32u",
{ None, "", None, None, Some("I32"), "u", (0, 5), });
assert_pns_eq_sub!("%I64u",
{ None, "", None, None, Some("I64"), "u", (0, 5), });
assert_pns_eq_sub!("%'d",
{ None, "'", None, None, None, "d", (0, 3), });
assert_pns_eq_sub!("%10s",
{ None, "", Some(N::Num(10)), None, None, "s", (0, 4), });
assert_pns_eq_sub!("%-10.10s",
{ None, "-", Some(N::Num(10)), Some(N::Num(10)), None, "s", (0, 8), });
assert_pns_eq_sub!("%1$d",
{ Some(1), "", None, None, None, "d", (0, 4), });
assert_pns_eq_sub!("%2$.*3$d",
{ Some(2), "", None, Some(N::Arg(3)), None, "d", (0, 8), });
assert_pns_eq_sub!("%1$*2$.*3$d",
{ Some(1), "", Some(N::Arg(2)), Some(N::Arg(3)), None, "d", (0, 11), });
assert_pns_eq_sub!("%-8ld",
{ None, "-", Some(N::Num(8)), None, Some("l"), "d", (0, 5), });
}
#[test]
fn test_iter() {
let s = "The %d'th word %% is: `%.*s` %!\n";
let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect();
assert_eq!(
subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
vec![Some("{}"), None, Some("{:.*}"), None]
);
}
/// Checks that the translations are what we expect.
#[test]
fn test_translation() {
assert_eq_pnsat!("%c", Some("{}"));
assert_eq_pnsat!("%d", Some("{}"));
assert_eq_pnsat!("%u", Some("{}"));
assert_eq_pnsat!("%x", Some("{:x}"));
assert_eq_pnsat!("%X", Some("{:X}"));
assert_eq_pnsat!("%e", Some("{:e}"));
assert_eq_pnsat!("%E", Some("{:E}"));
assert_eq_pnsat!("%f", Some("{}"));
assert_eq_pnsat!("%g", Some("{:e}"));
assert_eq_pnsat!("%G", Some("{:E}"));
assert_eq_pnsat!("%s", Some("{}"));
assert_eq_pnsat!("%p", Some("{:p}"));
assert_eq_pnsat!("%06d", Some("{:06}"));
assert_eq_pnsat!("%4.2f", Some("{:4.2}"));
assert_eq_pnsat!("%#x", Some("{:#x}"));
assert_eq_pnsat!("%-10s", Some("{:<10}"));
assert_eq_pnsat!("%*s", None);
assert_eq_pnsat!("%-10.*s", Some("{:<10.*}"));
assert_eq_pnsat!("%-*.*s", None);
assert_eq_pnsat!("%.6i", Some("{:06}"));
assert_eq_pnsat!("%+i", Some("{:+}"));
assert_eq_pnsat!("%08X", Some("{:08X}"));
assert_eq_pnsat!("%lu", Some("{}"));
assert_eq_pnsat!("%Iu", Some("{}"));
assert_eq_pnsat!("%I32u", Some("{}"));
assert_eq_pnsat!("%I64u", Some("{}"));
assert_eq_pnsat!("%'d", None);
assert_eq_pnsat!("%10s", Some("{:>10}"));
assert_eq_pnsat!("%-10.10s", Some("{:<10.10}"));
assert_eq_pnsat!("%1$d", Some("{0}"));
assert_eq_pnsat!("%2$.*3$d", Some("{1:02$}"));
assert_eq_pnsat!("%1$*2$.*3$s", Some("{0:>1$.2$}"));
assert_eq_pnsat!("%-8ld", Some("{:<8}"));
}

View file

@ -0,0 +1,56 @@
use super::{parse_next_substitution as pns, Substitution as S};
macro_rules! assert_eq_pnsat {
($lhs:expr, $rhs:expr) => {
assert_eq!(
pns($lhs).and_then(|(f, _)| f.translate()),
$rhs.map(<String as From<&str>>::from)
)
};
}
#[test]
fn test_escape() {
assert_eq!(pns("has no escapes"), None);
assert_eq!(pns("has no escapes, either $"), None);
assert_eq!(pns("*so* has a $$ escape"), Some((S::Escape((11, 13)), " escape")));
assert_eq!(pns("$$ leading escape"), Some((S::Escape((0, 2)), " leading escape")));
assert_eq!(pns("trailing escape $$"), Some((S::Escape((16, 18)), "")));
}
#[test]
fn test_parse() {
macro_rules! assert_pns_eq_sub {
($in_:expr, $kind:ident($arg:expr, $pos:expr)) => {
assert_eq!(pns(concat!($in_, "!")), Some((S::$kind($arg.into(), $pos), "!")))
};
}
assert_pns_eq_sub!("$0", Ordinal(0, (0, 2)));
assert_pns_eq_sub!("$1", Ordinal(1, (0, 2)));
assert_pns_eq_sub!("$9", Ordinal(9, (0, 2)));
assert_pns_eq_sub!("$N", Name("N", (0, 2)));
assert_pns_eq_sub!("$NAME", Name("NAME", (0, 5)));
}
#[test]
fn test_iter() {
use super::iter_subs;
let s = "The $0'th word $$ is: `$WORD` $!\n";
let subs: Vec<_> = iter_subs(s, 0).map(|sub| sub.translate()).collect();
assert_eq!(
subs.iter().map(|ms| ms.as_ref().map(|s| &s[..])).collect::<Vec<_>>(),
vec![Some("{0}"), None, Some("{WORD}")]
);
}
#[test]
fn test_translation() {
assert_eq_pnsat!("$0", Some("{0}"));
assert_eq_pnsat!("$9", Some("{9}"));
assert_eq_pnsat!("$1", Some("{1}"));
assert_eq_pnsat!("$10", Some("{1}"));
assert_eq_pnsat!("$stuff", Some("{stuff}"));
assert_eq_pnsat!("$NAME", Some("{NAME}"));
assert_eq_pnsat!("$PREFIX/bin", Some("{PREFIX}"));
}

View file

@ -0,0 +1,171 @@
use crate::util::check_builtin_macro_attribute;
use rustc_ast::expand::allocator::{
AllocatorKind, AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS,
};
use rustc_ast::ptr::P;
use rustc_ast::{self as ast, Attribute, Expr, FnHeader, FnSig, Generics, Param};
use rustc_ast::{ItemKind, Mutability, Stmt, Ty, TyKind, Unsafe};
use rustc_expand::base::{Annotatable, ExtCtxt};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand(
ecx: &mut ExtCtxt<'_>,
_span: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
) -> Vec<Annotatable> {
check_builtin_macro_attribute(ecx, meta_item, sym::global_allocator);
let not_static = |item: Annotatable| {
ecx.sess.parse_sess.span_diagnostic.span_err(item.span(), "allocators must be statics");
vec![item]
};
let item = match item {
Annotatable::Item(item) => match item.kind {
ItemKind::Static(..) => item,
_ => return not_static(Annotatable::Item(item)),
},
_ => return not_static(item),
};
// Generate a bunch of new items using the AllocFnFactory
let span = ecx.with_def_site_ctxt(item.span);
let f = AllocFnFactory { span, kind: AllocatorKind::Global, global: item.ident, cx: ecx };
// Generate item statements for the allocator methods.
let stmts = ALLOCATOR_METHODS.iter().map(|method| f.allocator_fn(method)).collect();
// Generate anonymous constant serving as container for the allocator methods.
let const_ty = ecx.ty(span, TyKind::Tup(Vec::new()));
let const_body = ecx.expr_block(ecx.block(span, stmts));
let const_item = ecx.item_const(span, Ident::new(kw::Underscore, span), const_ty, const_body);
// Return the original item and the new methods.
vec![Annotatable::Item(item), Annotatable::Item(const_item)]
}
struct AllocFnFactory<'a, 'b> {
span: Span,
kind: AllocatorKind,
global: Ident,
cx: &'b ExtCtxt<'a>,
}
impl AllocFnFactory<'_, '_> {
fn allocator_fn(&self, method: &AllocatorMethod) -> Stmt {
let mut abi_args = Vec::new();
let mut i = 0;
let mut mk = || {
let name = Ident::from_str_and_span(&format!("arg{}", i), self.span);
i += 1;
name
};
let args = method.inputs.iter().map(|ty| self.arg_ty(ty, &mut abi_args, &mut mk)).collect();
let result = self.call_allocator(method.name, args);
let (output_ty, output_expr) = self.ret_ty(&method.output, result);
let decl = self.cx.fn_decl(abi_args, ast::FnRetTy::Ty(output_ty));
let header = FnHeader { unsafety: Unsafe::Yes(self.span), ..FnHeader::default() };
let sig = FnSig { decl, header, span: self.span };
let block = Some(self.cx.block_expr(output_expr));
let kind = ItemKind::Fn(ast::Defaultness::Final, sig, Generics::default(), block);
let item = self.cx.item(
self.span,
Ident::from_str_and_span(&self.kind.fn_name(method.name), self.span),
self.attrs(),
kind,
);
self.cx.stmt_item(self.span, item)
}
fn call_allocator(&self, method: Symbol, mut args: Vec<P<Expr>>) -> P<Expr> {
let method = self.cx.std_path(&[sym::alloc, sym::GlobalAlloc, method]);
let method = self.cx.expr_path(self.cx.path(self.span, method));
let allocator = self.cx.path_ident(self.span, self.global);
let allocator = self.cx.expr_path(allocator);
let allocator = self.cx.expr_addr_of(self.span, allocator);
args.insert(0, allocator);
self.cx.expr_call(self.span, method, args)
}
fn attrs(&self) -> Vec<Attribute> {
let special = sym::rustc_std_internal_symbol;
let special = self.cx.meta_word(self.span, special);
vec![self.cx.attribute(special)]
}
fn arg_ty(
&self,
ty: &AllocatorTy,
args: &mut Vec<Param>,
ident: &mut dyn FnMut() -> Ident,
) -> P<Expr> {
match *ty {
AllocatorTy::Layout => {
let usize = self.cx.path_ident(self.span, Ident::new(sym::usize, self.span));
let ty_usize = self.cx.ty_path(usize);
let size = ident();
let align = ident();
args.push(self.cx.param(self.span, size, ty_usize.clone()));
args.push(self.cx.param(self.span, align, ty_usize));
let layout_new =
self.cx.std_path(&[sym::alloc, sym::Layout, sym::from_size_align_unchecked]);
let layout_new = self.cx.expr_path(self.cx.path(self.span, layout_new));
let size = self.cx.expr_ident(self.span, size);
let align = self.cx.expr_ident(self.span, align);
let layout = self.cx.expr_call(self.span, layout_new, vec![size, align]);
layout
}
AllocatorTy::Ptr => {
let ident = ident();
args.push(self.cx.param(self.span, ident, self.ptr_u8()));
let arg = self.cx.expr_ident(self.span, ident);
self.cx.expr_cast(self.span, arg, self.ptr_u8())
}
AllocatorTy::Usize => {
let ident = ident();
args.push(self.cx.param(self.span, ident, self.usize()));
self.cx.expr_ident(self.span, ident)
}
AllocatorTy::ResultPtr | AllocatorTy::Unit => {
panic!("can't convert AllocatorTy to an argument")
}
}
}
fn ret_ty(&self, ty: &AllocatorTy, expr: P<Expr>) -> (P<Ty>, P<Expr>) {
match *ty {
AllocatorTy::ResultPtr => {
// We're creating:
//
// #expr as *mut u8
let expr = self.cx.expr_cast(self.span, expr, self.ptr_u8());
(self.ptr_u8(), expr)
}
AllocatorTy::Unit => (self.cx.ty(self.span, TyKind::Tup(Vec::new())), expr),
AllocatorTy::Layout | AllocatorTy::Usize | AllocatorTy::Ptr => {
panic!("can't convert `AllocatorTy` to an output")
}
}
}
fn usize(&self) -> P<Ty> {
let usize = self.cx.path_ident(self.span, Ident::new(sym::usize, self.span));
self.cx.ty_path(usize)
}
fn ptr_u8(&self) -> P<Ty> {
let u8 = self.cx.path_ident(self.span, Ident::new(sym::u8, self.span));
let ty_u8 = self.cx.ty_path(u8);
self.cx.ty_ptr(self.span, ty_u8, Mutability::Mut)
}
}

View file

@ -0,0 +1,65 @@
//! Module-level assembly support.
//!
//! The macro defined here allows you to specify "top-level",
//! "file-scoped", or "module-level" assembly. These synonyms
//! all correspond to LLVM's module-level inline assembly instruction.
//!
//! For example, `global_asm!("some assembly here")` codegens to
//! LLVM's `module asm "some assembly here"`. All of LLVM's caveats
//! therefore apply.
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_errors::DiagnosticBuilder;
use rustc_expand::base::{self, *};
use rustc_span::source_map::respan;
use rustc_span::symbol::Ident;
use rustc_span::Span;
use smallvec::smallvec;
pub fn expand_global_asm<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
match parse_global_asm(cx, sp, tts) {
Ok(Some(global_asm)) => MacEager::items(smallvec![P(ast::Item {
ident: Ident::invalid(),
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
kind: ast::ItemKind::GlobalAsm(P(global_asm)),
vis: respan(sp.shrink_to_lo(), ast::VisibilityKind::Inherited),
span: cx.with_def_site_ctxt(sp),
tokens: None,
})]),
Ok(None) => DummyResult::any(sp),
Err(mut err) => {
err.emit();
DummyResult::any(sp)
}
}
}
fn parse_global_asm<'a>(
cx: &mut ExtCtxt<'a>,
sp: Span,
tts: TokenStream,
) -> Result<Option<ast::GlobalAsm>, DiagnosticBuilder<'a>> {
let mut p = cx.new_parser_from_tts(tts);
if p.token == token::Eof {
let mut err = cx.struct_span_err(sp, "macro requires a string literal as an argument");
err.span_label(sp, "string literal required");
return Err(err);
}
let expr = p.parse_expr()?;
let (asm, _) = match expr_to_string(cx, expr, "inline assembly must be a string literal") {
Some((s, st)) => (s, st),
None => return Ok(None),
};
Ok(Some(ast::GlobalAsm { asm }))
}

View file

@ -0,0 +1,113 @@
//! This crate contains implementations of built-in macros and other code generating facilities
//! injecting code into the crate before it is lowered to HIR.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(bool_to_option)]
#![feature(crate_visibility_modifier)]
#![feature(decl_macro)]
#![feature(nll)]
#![feature(or_patterns)]
#![feature(proc_macro_internals)]
#![feature(proc_macro_quote)]
extern crate proc_macro;
use crate::deriving::*;
use rustc_expand::base::{MacroExpanderFn, ResolverExpand, SyntaxExtension, SyntaxExtensionKind};
use rustc_expand::proc_macro::BangProcMacro;
use rustc_span::edition::Edition;
use rustc_span::symbol::{sym, Ident};
mod asm;
mod assert;
mod cfg;
mod cfg_accessible;
mod compile_error;
mod concat;
mod concat_idents;
mod deriving;
mod env;
mod format;
mod format_foreign;
mod global_allocator;
mod global_asm;
mod llvm_asm;
mod log_syntax;
mod source_util;
mod test;
mod trace_macros;
mod util;
pub mod cmdline_attrs;
pub mod proc_macro_harness;
pub mod standard_library_imports;
pub mod test_harness;
pub fn register_builtin_macros(resolver: &mut dyn ResolverExpand, edition: Edition) {
let mut register = |name, kind| {
resolver.register_builtin_macro(
Ident::with_dummy_span(name),
SyntaxExtension { is_builtin: true, ..SyntaxExtension::default(kind, edition) },
)
};
macro register_bang($($name:ident: $f:expr,)*) {
$(register(sym::$name, SyntaxExtensionKind::LegacyBang(Box::new($f as MacroExpanderFn)));)*
}
macro register_attr($($name:ident: $f:expr,)*) {
$(register(sym::$name, SyntaxExtensionKind::LegacyAttr(Box::new($f)));)*
}
macro register_derive($($name:ident: $f:expr,)*) {
$(register(sym::$name, SyntaxExtensionKind::LegacyDerive(Box::new(BuiltinDerive($f))));)*
}
register_bang! {
asm: asm::expand_asm,
assert: assert::expand_assert,
cfg: cfg::expand_cfg,
column: source_util::expand_column,
compile_error: compile_error::expand_compile_error,
concat_idents: concat_idents::expand_concat_idents,
concat: concat::expand_concat,
env: env::expand_env,
file: source_util::expand_file,
format_args_nl: format::expand_format_args_nl,
format_args: format::expand_format_args,
global_asm: global_asm::expand_global_asm,
include_bytes: source_util::expand_include_bytes,
include_str: source_util::expand_include_str,
include: source_util::expand_include,
line: source_util::expand_line,
llvm_asm: llvm_asm::expand_llvm_asm,
log_syntax: log_syntax::expand_log_syntax,
module_path: source_util::expand_mod,
option_env: env::expand_option_env,
stringify: source_util::expand_stringify,
trace_macros: trace_macros::expand_trace_macros,
}
register_attr! {
bench: test::expand_bench,
cfg_accessible: cfg_accessible::Expander,
global_allocator: global_allocator::expand,
test: test::expand_test,
test_case: test::expand_test_case,
}
register_derive! {
Clone: clone::expand_deriving_clone,
Copy: bounds::expand_deriving_copy,
Debug: debug::expand_deriving_debug,
Default: default::expand_deriving_default,
Eq: eq::expand_deriving_eq,
Hash: hash::expand_deriving_hash,
Ord: ord::expand_deriving_ord,
PartialEq: partial_eq::expand_deriving_partial_eq,
PartialOrd: partial_ord::expand_deriving_partial_ord,
RustcDecodable: decodable::expand_deriving_rustc_decodable,
RustcEncodable: encodable::expand_deriving_rustc_encodable,
}
let client = proc_macro::bridge::client::Client::expand1(proc_macro::quote);
register(sym::quote, SyntaxExtensionKind::Bang(Box::new(BangProcMacro { client })));
}

View file

@ -0,0 +1,301 @@
// Llvm-style inline assembly support.
//
use State::*;
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token::{self, Token};
use rustc_ast::tokenstream::{self, TokenStream};
use rustc_ast::LlvmAsmDialect;
use rustc_errors::{struct_span_err, DiagnosticBuilder, PResult};
use rustc_expand::base::*;
use rustc_parse::parser::Parser;
use rustc_span::symbol::{kw, sym, Symbol};
use rustc_span::Span;
enum State {
Asm,
Outputs,
Inputs,
Clobbers,
Options,
StateNone,
}
impl State {
fn next(&self) -> State {
match *self {
Asm => Outputs,
Outputs => Inputs,
Inputs => Clobbers,
Clobbers => Options,
Options => StateNone,
StateNone => StateNone,
}
}
}
const OPTIONS: &[Symbol] = &[sym::volatile, sym::alignstack, sym::intel];
pub fn expand_llvm_asm<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn MacResult + 'cx> {
let mut inline_asm = match parse_inline_asm(cx, sp, tts) {
Ok(Some(inline_asm)) => inline_asm,
Ok(None) => return DummyResult::any(sp),
Err(mut err) => {
err.emit();
return DummyResult::any(sp);
}
};
// If there are no outputs, the inline assembly is executed just for its side effects,
// so ensure that it is volatile
if inline_asm.outputs.is_empty() {
inline_asm.volatile = true;
}
MacEager::expr(P(ast::Expr {
id: ast::DUMMY_NODE_ID,
kind: ast::ExprKind::LlvmInlineAsm(P(inline_asm)),
span: cx.with_def_site_ctxt(sp),
attrs: ast::AttrVec::new(),
tokens: None,
}))
}
fn parse_asm_str<'a>(p: &mut Parser<'a>) -> PResult<'a, Symbol> {
match p.parse_str_lit() {
Ok(str_lit) => Ok(str_lit.symbol_unescaped),
Err(opt_lit) => {
let span = opt_lit.map_or(p.token.span, |lit| lit.span);
let mut err = p.sess.span_diagnostic.struct_span_err(span, "expected string literal");
err.span_label(span, "not a string literal");
Err(err)
}
}
}
fn parse_inline_asm<'a>(
cx: &mut ExtCtxt<'a>,
sp: Span,
tts: TokenStream,
) -> Result<Option<ast::LlvmInlineAsm>, DiagnosticBuilder<'a>> {
// Split the tts before the first colon, to avoid `llvm_asm!("x": y)` being
// parsed as `llvm_asm!(z)` with `z = "x": y` which is type ascription.
let first_colon = tts
.trees()
.position(|tt| match tt {
tokenstream::TokenTree::Token(Token { kind: token::Colon | token::ModSep, .. }) => true,
_ => false,
})
.unwrap_or(tts.len());
let mut p = cx.new_parser_from_tts(tts.trees().skip(first_colon).collect());
let mut asm = kw::Invalid;
let mut asm_str_style = None;
let mut outputs = Vec::new();
let mut inputs = Vec::new();
let mut clobs = Vec::new();
let mut volatile = false;
let mut alignstack = false;
let mut dialect = LlvmAsmDialect::Att;
let mut state = Asm;
'statement: loop {
match state {
Asm => {
if asm_str_style.is_some() {
// If we already have a string with instructions,
// ending up in Asm state again is an error.
return Err(struct_span_err!(
cx.sess.parse_sess.span_diagnostic,
sp,
E0660,
"malformed inline assembly"
));
}
// Nested parser, stop before the first colon (see above).
let mut p2 = cx.new_parser_from_tts(tts.trees().take(first_colon).collect());
if p2.token == token::Eof {
let mut err =
cx.struct_span_err(sp, "macro requires a string literal as an argument");
err.span_label(sp, "string literal required");
return Err(err);
}
let expr = p2.parse_expr()?;
let (s, style) =
match expr_to_string(cx, expr, "inline assembly must be a string literal") {
Some((s, st)) => (s, st),
None => return Ok(None),
};
// This is most likely malformed.
if p2.token != token::Eof {
let mut extra_tts = p2.parse_all_token_trees()?;
extra_tts.extend(tts.trees().skip(first_colon));
p = cx.new_parser_from_tts(extra_tts.into_iter().collect());
}
asm = s;
asm_str_style = Some(style);
}
Outputs => {
while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep {
if !outputs.is_empty() {
p.eat(&token::Comma);
}
let constraint = parse_asm_str(&mut p)?;
let span = p.prev_token.span;
p.expect(&token::OpenDelim(token::Paren))?;
let expr = p.parse_expr()?;
p.expect(&token::CloseDelim(token::Paren))?;
// Expands a read+write operand into two operands.
//
// Use '+' modifier when you want the same expression
// to be both an input and an output at the same time.
// It's the opposite of '=&' which means that the memory
// cannot be shared with any other operand (usually when
// a register is clobbered early.)
let constraint_str = constraint.as_str();
let mut ch = constraint_str.chars();
let output = match ch.next() {
Some('=') => None,
Some('+') => Some(Symbol::intern(&format!("={}", ch.as_str()))),
_ => {
struct_span_err!(
cx.sess.parse_sess.span_diagnostic,
span,
E0661,
"output operand constraint lacks '=' or '+'"
)
.emit();
None
}
};
let is_rw = output.is_some();
let is_indirect = constraint_str.contains('*');
outputs.push(ast::LlvmInlineAsmOutput {
constraint: output.unwrap_or(constraint),
expr,
is_rw,
is_indirect,
});
}
}
Inputs => {
while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep {
if !inputs.is_empty() {
p.eat(&token::Comma);
}
let constraint = parse_asm_str(&mut p)?;
if constraint.as_str().starts_with('=') {
struct_span_err!(
cx.sess.parse_sess.span_diagnostic,
p.prev_token.span,
E0662,
"input operand constraint contains '='"
)
.emit();
} else if constraint.as_str().starts_with('+') {
struct_span_err!(
cx.sess.parse_sess.span_diagnostic,
p.prev_token.span,
E0663,
"input operand constraint contains '+'"
)
.emit();
}
p.expect(&token::OpenDelim(token::Paren))?;
let input = p.parse_expr()?;
p.expect(&token::CloseDelim(token::Paren))?;
inputs.push((constraint, input));
}
}
Clobbers => {
while p.token != token::Eof && p.token != token::Colon && p.token != token::ModSep {
if !clobs.is_empty() {
p.eat(&token::Comma);
}
let s = parse_asm_str(&mut p)?;
if OPTIONS.iter().any(|&opt| s == opt) {
cx.span_warn(p.prev_token.span, "expected a clobber, found an option");
} else if s.as_str().starts_with('{') || s.as_str().ends_with('}') {
struct_span_err!(
cx.sess.parse_sess.span_diagnostic,
p.prev_token.span,
E0664,
"clobber should not be surrounded by braces"
)
.emit();
}
clobs.push(s);
}
}
Options => {
let option = parse_asm_str(&mut p)?;
if option == sym::volatile {
// Indicates that the inline assembly has side effects
// and must not be optimized out along with its outputs.
volatile = true;
} else if option == sym::alignstack {
alignstack = true;
} else if option == sym::intel {
dialect = LlvmAsmDialect::Intel;
} else {
cx.span_warn(p.prev_token.span, "unrecognized option");
}
if p.token == token::Comma {
p.eat(&token::Comma);
}
}
StateNone => (),
}
loop {
// MOD_SEP is a double colon '::' without space in between.
// When encountered, the state must be advanced twice.
match (&p.token.kind, state.next(), state.next().next()) {
(&token::Colon, StateNone, _) | (&token::ModSep, _, StateNone) => {
p.bump();
break 'statement;
}
(&token::Colon, st, _) | (&token::ModSep, _, st) => {
p.bump();
state = st;
}
(&token::Eof, ..) => break 'statement,
_ => break,
}
}
}
Ok(Some(ast::LlvmInlineAsm {
asm,
asm_str_style: asm_str_style.unwrap(),
outputs,
inputs,
clobbers: clobs,
volatile,
alignstack,
dialect,
}))
}

View file

@ -0,0 +1,14 @@
use rustc_ast::tokenstream::TokenStream;
use rustc_ast_pretty::pprust;
use rustc_expand::base;
pub fn expand_log_syntax<'cx>(
_cx: &'cx mut base::ExtCtxt<'_>,
sp: rustc_span::Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
println!("{}", pprust::tts_to_string(&tts));
// any so that `log_syntax` can be invoked as an expression and item.
base::DummyResult::any_valid(sp)
}

View file

@ -0,0 +1,492 @@
use std::mem;
use rustc_ast::attr;
use rustc_ast::ptr::P;
use rustc_ast::visit::{self, Visitor};
use rustc_ast::{self as ast, NodeId};
use rustc_ast_pretty::pprust;
use rustc_expand::base::{ExtCtxt, ResolverExpand};
use rustc_expand::expand::{AstFragment, ExpansionConfig};
use rustc_session::Session;
use rustc_span::hygiene::AstPass;
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use smallvec::smallvec;
use std::cell::RefCell;
struct ProcMacroDerive {
id: NodeId,
trait_name: Symbol,
function_name: Ident,
span: Span,
attrs: Vec<Symbol>,
}
enum ProcMacroDefType {
Attr,
Bang,
}
struct ProcMacroDef {
id: NodeId,
function_name: Ident,
span: Span,
def_type: ProcMacroDefType,
}
enum ProcMacro {
Derive(ProcMacroDerive),
Def(ProcMacroDef),
}
struct CollectProcMacros<'a> {
sess: &'a Session,
macros: Vec<ProcMacro>,
in_root: bool,
handler: &'a rustc_errors::Handler,
source_map: &'a SourceMap,
is_proc_macro_crate: bool,
is_test_crate: bool,
}
pub fn inject(
sess: &Session,
resolver: &mut dyn ResolverExpand,
mut krate: ast::Crate,
is_proc_macro_crate: bool,
has_proc_macro_decls: bool,
is_test_crate: bool,
num_crate_types: usize,
handler: &rustc_errors::Handler,
) -> ast::Crate {
let ecfg = ExpansionConfig::default("proc_macro".to_string());
let mut cx = ExtCtxt::new(sess, ecfg, resolver, None);
let mut collect = CollectProcMacros {
sess,
macros: Vec::new(),
in_root: true,
handler,
source_map: sess.source_map(),
is_proc_macro_crate,
is_test_crate,
};
if has_proc_macro_decls || is_proc_macro_crate {
visit::walk_crate(&mut collect, &krate);
}
let macros = collect.macros;
if !is_proc_macro_crate {
return krate;
}
if num_crate_types > 1 {
handler.err("cannot mix `proc-macro` crate type with others");
}
if is_test_crate {
return krate;
}
let decls = mk_decls(&mut krate, &mut cx, &macros);
krate.module.items.push(decls);
krate
}
impl<'a> CollectProcMacros<'a> {
fn check_not_pub_in_root(&self, vis: &ast::Visibility, sp: Span) {
if self.is_proc_macro_crate && self.in_root && vis.node.is_pub() {
self.handler.span_err(
sp,
"`proc-macro` crate types currently cannot export any items other \
than functions tagged with `#[proc_macro]`, `#[proc_macro_derive]`, \
or `#[proc_macro_attribute]`",
);
}
}
fn collect_custom_derive(&mut self, item: &'a ast::Item, attr: &'a ast::Attribute) {
// Once we've located the `#[proc_macro_derive]` attribute, verify
// that it's of the form `#[proc_macro_derive(Foo)]` or
// `#[proc_macro_derive(Foo, attributes(A, ..))]`
let list = match attr.meta_item_list() {
Some(list) => list,
None => return,
};
if list.len() != 1 && list.len() != 2 {
self.handler.span_err(attr.span, "attribute must have either one or two arguments");
return;
}
let trait_attr = match list[0].meta_item() {
Some(meta_item) => meta_item,
_ => {
self.handler.span_err(list[0].span(), "not a meta item");
return;
}
};
let trait_ident = match trait_attr.ident() {
Some(trait_ident) if trait_attr.is_word() => trait_ident,
_ => {
self.handler.span_err(trait_attr.span, "must only be one word");
return;
}
};
if !trait_ident.name.can_be_raw() {
self.handler.span_err(
trait_attr.span,
&format!("`{}` cannot be a name of derive macro", trait_ident),
);
}
let attributes_attr = list.get(1);
let proc_attrs: Vec<_> = if let Some(attr) = attributes_attr {
if !attr.has_name(sym::attributes) {
self.handler.span_err(attr.span(), "second argument must be `attributes`")
}
attr.meta_item_list()
.unwrap_or_else(|| {
self.handler
.span_err(attr.span(), "attribute must be of form: `attributes(foo, bar)`");
&[]
})
.iter()
.filter_map(|attr| {
let attr = match attr.meta_item() {
Some(meta_item) => meta_item,
_ => {
self.handler.span_err(attr.span(), "not a meta item");
return None;
}
};
let ident = match attr.ident() {
Some(ident) if attr.is_word() => ident,
_ => {
self.handler.span_err(attr.span, "must only be one word");
return None;
}
};
if !ident.name.can_be_raw() {
self.handler.span_err(
attr.span,
&format!("`{}` cannot be a name of derive helper attribute", ident),
);
}
Some(ident.name)
})
.collect()
} else {
Vec::new()
};
if self.in_root && item.vis.node.is_pub() {
self.macros.push(ProcMacro::Derive(ProcMacroDerive {
id: item.id,
span: item.span,
trait_name: trait_ident.name,
function_name: item.ident,
attrs: proc_attrs,
}));
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro_derive]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro_derive]` must be `pub`"
};
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
fn collect_attr_proc_macro(&mut self, item: &'a ast::Item) {
if self.in_root && item.vis.node.is_pub() {
self.macros.push(ProcMacro::Def(ProcMacroDef {
id: item.id,
span: item.span,
function_name: item.ident,
def_type: ProcMacroDefType::Attr,
}));
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro_attribute]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro_attribute]` must be `pub`"
};
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
fn collect_bang_proc_macro(&mut self, item: &'a ast::Item) {
if self.in_root && item.vis.node.is_pub() {
self.macros.push(ProcMacro::Def(ProcMacroDef {
id: item.id,
span: item.span,
function_name: item.ident,
def_type: ProcMacroDefType::Bang,
}));
} else {
let msg = if !self.in_root {
"functions tagged with `#[proc_macro]` must \
currently reside in the root of the crate"
} else {
"functions tagged with `#[proc_macro]` must be `pub`"
};
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
}
impl<'a> Visitor<'a> for CollectProcMacros<'a> {
fn visit_item(&mut self, item: &'a ast::Item) {
if let ast::ItemKind::MacroDef(..) = item.kind {
if self.is_proc_macro_crate && self.sess.contains_name(&item.attrs, sym::macro_export) {
let msg =
"cannot export macro_rules! macros from a `proc-macro` crate type currently";
self.handler.span_err(self.source_map.guess_head_span(item.span), msg);
}
}
// First up, make sure we're checking a bare function. If we're not then
// we're just not interested in this item.
//
// If we find one, try to locate a `#[proc_macro_derive]` attribute on it.
let is_fn = match item.kind {
ast::ItemKind::Fn(..) => true,
_ => false,
};
let mut found_attr: Option<&'a ast::Attribute> = None;
for attr in &item.attrs {
if self.sess.is_proc_macro_attr(&attr) {
if let Some(prev_attr) = found_attr {
let prev_item = prev_attr.get_normal_item();
let item = attr.get_normal_item();
let path_str = pprust::path_to_string(&item.path);
let msg = if item.path.segments[0].ident.name
== prev_item.path.segments[0].ident.name
{
format!(
"only one `#[{}]` attribute is allowed on any given function",
path_str,
)
} else {
format!(
"`#[{}]` and `#[{}]` attributes cannot both be applied
to the same function",
path_str,
pprust::path_to_string(&prev_item.path),
)
};
self.handler
.struct_span_err(attr.span, &msg)
.span_label(prev_attr.span, "previous attribute here")
.emit();
return;
}
found_attr = Some(attr);
}
}
let attr = match found_attr {
None => {
self.check_not_pub_in_root(&item.vis, self.source_map.guess_head_span(item.span));
let prev_in_root = mem::replace(&mut self.in_root, false);
visit::walk_item(self, item);
self.in_root = prev_in_root;
return;
}
Some(attr) => attr,
};
if !is_fn {
let msg = format!(
"the `#[{}]` attribute may only be used on bare functions",
pprust::path_to_string(&attr.get_normal_item().path),
);
self.handler.span_err(attr.span, &msg);
return;
}
if self.is_test_crate {
return;
}
if !self.is_proc_macro_crate {
let msg = format!(
"the `#[{}]` attribute is only usable with crates of the `proc-macro` crate type",
pprust::path_to_string(&attr.get_normal_item().path),
);
self.handler.span_err(attr.span, &msg);
return;
}
if self.sess.check_name(attr, sym::proc_macro_derive) {
self.collect_custom_derive(item, attr);
} else if self.sess.check_name(attr, sym::proc_macro_attribute) {
self.collect_attr_proc_macro(item);
} else if self.sess.check_name(attr, sym::proc_macro) {
self.collect_bang_proc_macro(item);
};
let prev_in_root = mem::replace(&mut self.in_root, false);
visit::walk_item(self, item);
self.in_root = prev_in_root;
}
fn visit_mac(&mut self, mac: &'a ast::MacCall) {
visit::walk_mac(self, mac)
}
}
// Creates a new module which looks like:
//
// const _: () = {
// extern crate proc_macro;
//
// use proc_macro::bridge::client::ProcMacro;
//
// #[rustc_proc_macro_decls]
// #[allow(deprecated)]
// static DECLS: &[ProcMacro] = &[
// ProcMacro::custom_derive($name_trait1, &[], ::$name1);
// ProcMacro::custom_derive($name_trait2, &["attribute_name"], ::$name2);
// // ...
// ];
// }
fn mk_decls(
ast_krate: &mut ast::Crate,
cx: &mut ExtCtxt<'_>,
macros: &[ProcMacro],
) -> P<ast::Item> {
// We're the ones filling in this Vec,
// so it should be empty to start with
assert!(ast_krate.proc_macros.is_empty());
let expn_id = cx.resolver.expansion_for_ast_pass(
DUMMY_SP,
AstPass::ProcMacroHarness,
&[sym::rustc_attrs, sym::proc_macro_internals],
None,
);
let span = DUMMY_SP.with_def_site_ctxt(expn_id);
let proc_macro = Ident::new(sym::proc_macro, span);
let krate = cx.item(span, proc_macro, Vec::new(), ast::ItemKind::ExternCrate(None));
let bridge = Ident::new(sym::bridge, span);
let client = Ident::new(sym::client, span);
let proc_macro_ty = Ident::new(sym::ProcMacro, span);
let custom_derive = Ident::new(sym::custom_derive, span);
let attr = Ident::new(sym::attr, span);
let bang = Ident::new(sym::bang, span);
let krate_ref = RefCell::new(ast_krate);
// We add NodeIds to 'krate.proc_macros' in the order
// that we generate expressions. The position of each NodeId
// in the 'proc_macros' Vec corresponds to its position
// in the static array that will be generated
let decls = {
let local_path =
|sp: Span, name| cx.expr_path(cx.path(sp.with_ctxt(span.ctxt()), vec![name]));
let proc_macro_ty_method_path = |method| {
cx.expr_path(cx.path(span, vec![proc_macro, bridge, client, proc_macro_ty, method]))
};
macros
.iter()
.map(|m| match m {
ProcMacro::Derive(cd) => {
krate_ref.borrow_mut().proc_macros.push(cd.id);
cx.expr_call(
span,
proc_macro_ty_method_path(custom_derive),
vec![
cx.expr_str(cd.span, cd.trait_name),
cx.expr_vec_slice(
span,
cd.attrs
.iter()
.map(|&s| cx.expr_str(cd.span, s))
.collect::<Vec<_>>(),
),
local_path(cd.span, cd.function_name),
],
)
}
ProcMacro::Def(ca) => {
krate_ref.borrow_mut().proc_macros.push(ca.id);
let ident = match ca.def_type {
ProcMacroDefType::Attr => attr,
ProcMacroDefType::Bang => bang,
};
cx.expr_call(
span,
proc_macro_ty_method_path(ident),
vec![
cx.expr_str(ca.span, ca.function_name.name),
local_path(ca.span, ca.function_name),
],
)
}
})
.collect()
};
let decls_static = cx
.item_static(
span,
Ident::new(sym::_DECLS, span),
cx.ty_rptr(
span,
cx.ty(
span,
ast::TyKind::Slice(
cx.ty_path(cx.path(span, vec![proc_macro, bridge, client, proc_macro_ty])),
),
),
None,
ast::Mutability::Not,
),
ast::Mutability::Not,
cx.expr_vec_slice(span, decls),
)
.map(|mut i| {
let attr = cx.meta_word(span, sym::rustc_proc_macro_decls);
i.attrs.push(cx.attribute(attr));
let deprecated_attr = attr::mk_nested_word_item(Ident::new(sym::deprecated, span));
let allow_deprecated_attr =
attr::mk_list_item(Ident::new(sym::allow, span), vec![deprecated_attr]);
i.attrs.push(cx.attribute(allow_deprecated_attr));
i
});
let block = cx.expr_block(
cx.block(span, vec![cx.stmt_item(span, krate), cx.stmt_item(span, decls_static)]),
);
let anon_constant = cx.item_const(
span,
Ident::new(kw::Underscore, span),
cx.ty(span, ast::TyKind::Tup(Vec::new())),
block,
);
// Integrate the new item into existing module structures.
let items = AstFragment::Items(smallvec![anon_constant]);
cx.monotonic_expander().fully_expand_fragment(items).make_items().pop().unwrap()
}

View file

@ -0,0 +1,225 @@
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_ast::token;
use rustc_ast::tokenstream::TokenStream;
use rustc_ast_pretty::pprust;
use rustc_expand::base::{self, *};
use rustc_expand::module::DirectoryOwnership;
use rustc_parse::{self, new_parser_from_file, parser::Parser};
use rustc_session::lint::builtin::INCOMPLETE_INCLUDE;
use rustc_span::symbol::Symbol;
use rustc_span::{self, Pos, Span};
use smallvec::SmallVec;
use std::rc::Rc;
use rustc_data_structures::sync::Lrc;
// These macros all relate to the file system; they either return
// the column/row/filename of the expression, or they include
// a given file into the current one.
/// line!(): expands to the current line number
pub fn expand_line(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "line!");
let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_u32(topmost, loc.line as u32))
}
/* column!(): expands to the current column number */
pub fn expand_column(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "column!");
let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_u32(topmost, loc.col.to_usize() as u32 + 1))
}
/// file!(): expands to the current filename */
/// The source_file (`loc.file`) contains a bunch more information we could spit
/// out if we wanted.
pub fn expand_file(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "file!");
let topmost = cx.expansion_cause().unwrap_or(sp);
let loc = cx.source_map().lookup_char_pos(topmost.lo());
base::MacEager::expr(cx.expr_str(topmost, Symbol::intern(&loc.file.name.to_string())))
}
pub fn expand_stringify(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
let s = pprust::tts_to_string(&tts);
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&s)))
}
pub fn expand_mod(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
base::check_zero_tts(cx, sp, tts, "module_path!");
let mod_path = &cx.current_expansion.module.mod_path;
let string = mod_path.iter().map(|x| x.to_string()).collect::<Vec<String>>().join("::");
base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&string)))
}
/// include! : parse the given file as an expr
/// This is generally a bad idea because it's going to behave
/// unhygienically.
pub fn expand_include<'cx>(
cx: &'cx mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'cx> {
let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include!") {
Some(f) => f,
None => return DummyResult::any(sp),
};
// The file will be added to the code map by the parser
let mut file = match cx.resolve_path(file, sp) {
Ok(f) => f,
Err(mut err) => {
err.emit();
return DummyResult::any(sp);
}
};
let p = new_parser_from_file(cx.parse_sess(), &file, Some(sp));
// If in the included file we have e.g., `mod bar;`,
// then the path of `bar.rs` should be relative to the directory of `file`.
// See https://github.com/rust-lang/rust/pull/69838/files#r395217057 for a discussion.
// `MacroExpander::fully_expand_fragment` later restores, so "stack discipline" is maintained.
file.pop();
cx.current_expansion.directory_ownership = DirectoryOwnership::Owned { relative: None };
let mod_path = cx.current_expansion.module.mod_path.clone();
cx.current_expansion.module = Rc::new(ModuleData { mod_path, directory: file });
struct ExpandResult<'a> {
p: Parser<'a>,
node_id: ast::NodeId,
}
impl<'a> base::MacResult for ExpandResult<'a> {
fn make_expr(mut self: Box<ExpandResult<'a>>) -> Option<P<ast::Expr>> {
let r = base::parse_expr(&mut self.p)?;
if self.p.token != token::Eof {
self.p.sess.buffer_lint(
&INCOMPLETE_INCLUDE,
self.p.token.span,
self.node_id,
"include macro expected single expression in source",
);
}
Some(r)
}
fn make_items(mut self: Box<ExpandResult<'a>>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
let mut ret = SmallVec::new();
while self.p.token != token::Eof {
match self.p.parse_item() {
Err(mut err) => {
err.emit();
break;
}
Ok(Some(item)) => ret.push(item),
Ok(None) => {
let token = pprust::token_to_string(&self.p.token);
let msg = format!("expected item, found `{}`", token);
self.p.struct_span_err(self.p.token.span, &msg).emit();
break;
}
}
}
Some(ret)
}
}
Box::new(ExpandResult { p, node_id: cx.resolver.lint_node_id(cx.current_expansion.id) })
}
// include_str! : read the given file, insert it as a literal string expr
pub fn expand_include_str(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include_str!") {
Some(f) => f,
None => return DummyResult::any(sp),
};
let file = match cx.resolve_path(file, sp) {
Ok(f) => f,
Err(mut err) => {
err.emit();
return DummyResult::any(sp);
}
};
match cx.source_map().load_binary_file(&file) {
Ok(bytes) => match std::str::from_utf8(&bytes) {
Ok(src) => {
let interned_src = Symbol::intern(&src);
base::MacEager::expr(cx.expr_str(sp, interned_src))
}
Err(_) => {
cx.span_err(sp, &format!("{} wasn't a utf-8 file", file.display()));
DummyResult::any(sp)
}
},
Err(e) => {
cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
DummyResult::any(sp)
}
}
}
pub fn expand_include_bytes(
cx: &mut ExtCtxt<'_>,
sp: Span,
tts: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let sp = cx.with_def_site_ctxt(sp);
let file = match get_single_str_from_tts(cx, sp, tts, "include_bytes!") {
Some(f) => f,
None => return DummyResult::any(sp),
};
let file = match cx.resolve_path(file, sp) {
Ok(f) => f,
Err(mut err) => {
err.emit();
return DummyResult::any(sp);
}
};
match cx.source_map().load_binary_file(&file) {
Ok(bytes) => base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes)))),
Err(e) => {
cx.span_err(sp, &format!("couldn't read {}: {}", file.display(), e));
DummyResult::any(sp)
}
}
}

View file

@ -0,0 +1,85 @@
use rustc_ast as ast;
use rustc_ast::ptr::P;
use rustc_expand::base::{ExtCtxt, ResolverExpand};
use rustc_expand::expand::ExpansionConfig;
use rustc_session::Session;
use rustc_span::edition::Edition;
use rustc_span::hygiene::AstPass;
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::DUMMY_SP;
pub fn inject(
mut krate: ast::Crate,
resolver: &mut dyn ResolverExpand,
sess: &Session,
alt_std_name: Option<Symbol>,
) -> (ast::Crate, Option<Symbol>) {
let rust_2018 = sess.parse_sess.edition >= Edition::Edition2018;
// the first name in this list is the crate name of the crate with the prelude
let names: &[Symbol] = if sess.contains_name(&krate.attrs, sym::no_core) {
return (krate, None);
} else if sess.contains_name(&krate.attrs, sym::no_std) {
if sess.contains_name(&krate.attrs, sym::compiler_builtins) {
&[sym::core]
} else {
&[sym::core, sym::compiler_builtins]
}
} else {
&[sym::std]
};
let expn_id = resolver.expansion_for_ast_pass(
DUMMY_SP,
AstPass::StdImports,
&[sym::prelude_import],
None,
);
let span = DUMMY_SP.with_def_site_ctxt(expn_id);
let call_site = DUMMY_SP.with_call_site_ctxt(expn_id);
let ecfg = ExpansionConfig::default("std_lib_injection".to_string());
let cx = ExtCtxt::new(sess, ecfg, resolver, None);
// .rev() to preserve ordering above in combination with insert(0, ...)
for &name in names.iter().rev() {
let ident = if rust_2018 { Ident::new(name, span) } else { Ident::new(name, call_site) };
krate.module.items.insert(
0,
cx.item(
span,
ident,
vec![cx.attribute(cx.meta_word(span, sym::macro_use))],
ast::ItemKind::ExternCrate(alt_std_name),
),
);
}
// The crates have been injected, the assumption is that the first one is
// the one with the prelude.
let name = names[0];
let import_path = if rust_2018 {
[name, sym::prelude, sym::v1].iter().map(|symbol| Ident::new(*symbol, span)).collect()
} else {
[kw::PathRoot, name, sym::prelude, sym::v1]
.iter()
.map(|symbol| Ident::new(*symbol, span))
.collect()
};
let use_item = cx.item(
span,
Ident::invalid(),
vec![cx.attribute(cx.meta_word(span, sym::prelude_import))],
ast::ItemKind::Use(P(ast::UseTree {
prefix: cx.path(span, import_path),
kind: ast::UseTreeKind::Glob,
span,
})),
);
krate.module.items.insert(0, use_item);
(krate, Some(name))
}

View file

@ -0,0 +1,471 @@
/// The expansion from a test function to the appropriate test struct for libtest
/// Ideally, this code would be in libtest but for efficiency and error messages it lives here.
use crate::util::check_builtin_macro_attribute;
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast_pretty::pprust;
use rustc_expand::base::*;
use rustc_session::Session;
use rustc_span::source_map::respan;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
use std::iter;
// #[test_case] is used by custom test authors to mark tests
// When building for test, it needs to make the item public and gensym the name
// Otherwise, we'll omit the item. This behavior means that any item annotated
// with #[test_case] is never addressable.
//
// We mark item with an inert attribute "rustc_test_marker" which the test generation
// logic will pick up on.
pub fn expand_test_case(
ecx: &mut ExtCtxt<'_>,
attr_sp: Span,
meta_item: &ast::MetaItem,
anno_item: Annotatable,
) -> Vec<Annotatable> {
check_builtin_macro_attribute(ecx, meta_item, sym::test_case);
if !ecx.ecfg.should_test {
return vec![];
}
let sp = ecx.with_def_site_ctxt(attr_sp);
let mut item = anno_item.expect_item();
item = item.map(|mut item| {
item.vis = respan(item.vis.span, ast::VisibilityKind::Public);
item.ident.span = item.ident.span.with_ctxt(sp.ctxt());
item.attrs.push(ecx.attribute(ecx.meta_word(sp, sym::rustc_test_marker)));
item
});
return vec![Annotatable::Item(item)];
}
pub fn expand_test(
cx: &mut ExtCtxt<'_>,
attr_sp: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
) -> Vec<Annotatable> {
check_builtin_macro_attribute(cx, meta_item, sym::test);
expand_test_or_bench(cx, attr_sp, item, false)
}
pub fn expand_bench(
cx: &mut ExtCtxt<'_>,
attr_sp: Span,
meta_item: &ast::MetaItem,
item: Annotatable,
) -> Vec<Annotatable> {
check_builtin_macro_attribute(cx, meta_item, sym::bench);
expand_test_or_bench(cx, attr_sp, item, true)
}
pub fn expand_test_or_bench(
cx: &mut ExtCtxt<'_>,
attr_sp: Span,
item: Annotatable,
is_bench: bool,
) -> Vec<Annotatable> {
// If we're not in test configuration, remove the annotated item
if !cx.ecfg.should_test {
return vec![];
}
let item = match item {
Annotatable::Item(i) => i,
other => {
cx.struct_span_err(
other.span(),
"`#[test]` attribute is only allowed on non associated functions",
)
.emit();
return vec![other];
}
};
if let ast::ItemKind::MacCall(_) = item.kind {
cx.sess.parse_sess.span_diagnostic.span_warn(
item.span,
"`#[test]` attribute should not be used on macros. Use `#[cfg(test)]` instead.",
);
return vec![Annotatable::Item(item)];
}
// has_*_signature will report any errors in the type so compilation
// will fail. We shouldn't try to expand in this case because the errors
// would be spurious.
if (!is_bench && !has_test_signature(cx, &item))
|| (is_bench && !has_bench_signature(cx, &item))
{
return vec![Annotatable::Item(item)];
}
let (sp, attr_sp) = (cx.with_def_site_ctxt(item.span), cx.with_def_site_ctxt(attr_sp));
let test_id = Ident::new(sym::test, attr_sp);
// creates test::$name
let test_path = |name| cx.path(sp, vec![test_id, Ident::from_str_and_span(name, sp)]);
// creates test::ShouldPanic::$name
let should_panic_path = |name| {
cx.path(
sp,
vec![
test_id,
Ident::from_str_and_span("ShouldPanic", sp),
Ident::from_str_and_span(name, sp),
],
)
};
// creates test::TestType::$name
let test_type_path = |name| {
cx.path(
sp,
vec![
test_id,
Ident::from_str_and_span("TestType", sp),
Ident::from_str_and_span(name, sp),
],
)
};
// creates $name: $expr
let field = |name, expr| cx.field_imm(sp, Ident::from_str_and_span(name, sp), expr);
let test_fn = if is_bench {
// A simple ident for a lambda
let b = Ident::from_str_and_span("b", attr_sp);
cx.expr_call(
sp,
cx.expr_path(test_path("StaticBenchFn")),
vec![
// |b| self::test::assert_test_result(
cx.lambda1(
sp,
cx.expr_call(
sp,
cx.expr_path(test_path("assert_test_result")),
vec![
// super::$test_fn(b)
cx.expr_call(
sp,
cx.expr_path(cx.path(sp, vec![item.ident])),
vec![cx.expr_ident(sp, b)],
),
],
),
b,
), // )
],
)
} else {
cx.expr_call(
sp,
cx.expr_path(test_path("StaticTestFn")),
vec![
// || {
cx.lambda0(
sp,
// test::assert_test_result(
cx.expr_call(
sp,
cx.expr_path(test_path("assert_test_result")),
vec![
// $test_fn()
cx.expr_call(sp, cx.expr_path(cx.path(sp, vec![item.ident])), vec![]), // )
],
), // }
), // )
],
)
};
let mut test_const = cx.item(
sp,
Ident::new(item.ident.name, sp),
vec![
// #[cfg(test)]
cx.attribute(attr::mk_list_item(
Ident::new(sym::cfg, attr_sp),
vec![attr::mk_nested_word_item(Ident::new(sym::test, attr_sp))],
)),
// #[rustc_test_marker]
cx.attribute(cx.meta_word(attr_sp, sym::rustc_test_marker)),
],
// const $ident: test::TestDescAndFn =
ast::ItemKind::Const(
ast::Defaultness::Final,
cx.ty(sp, ast::TyKind::Path(None, test_path("TestDescAndFn"))),
// test::TestDescAndFn {
Some(
cx.expr_struct(
sp,
test_path("TestDescAndFn"),
vec![
// desc: test::TestDesc {
field(
"desc",
cx.expr_struct(
sp,
test_path("TestDesc"),
vec![
// name: "path::to::test"
field(
"name",
cx.expr_call(
sp,
cx.expr_path(test_path("StaticTestName")),
vec![cx.expr_str(
sp,
Symbol::intern(&item_path(
// skip the name of the root module
&cx.current_expansion.module.mod_path[1..],
&item.ident,
)),
)],
),
),
// ignore: true | false
field(
"ignore",
cx.expr_bool(sp, should_ignore(&cx.sess, &item)),
),
// allow_fail: true | false
field(
"allow_fail",
cx.expr_bool(sp, should_fail(&cx.sess, &item)),
),
// should_panic: ...
field(
"should_panic",
match should_panic(cx, &item) {
// test::ShouldPanic::No
ShouldPanic::No => {
cx.expr_path(should_panic_path("No"))
}
// test::ShouldPanic::Yes
ShouldPanic::Yes(None) => {
cx.expr_path(should_panic_path("Yes"))
}
// test::ShouldPanic::YesWithMessage("...")
ShouldPanic::Yes(Some(sym)) => cx.expr_call(
sp,
cx.expr_path(should_panic_path("YesWithMessage")),
vec![cx.expr_str(sp, sym)],
),
},
),
// test_type: ...
field(
"test_type",
match test_type(cx) {
// test::TestType::UnitTest
TestType::UnitTest => {
cx.expr_path(test_type_path("UnitTest"))
}
// test::TestType::IntegrationTest
TestType::IntegrationTest => {
cx.expr_path(test_type_path("IntegrationTest"))
}
// test::TestPath::Unknown
TestType::Unknown => {
cx.expr_path(test_type_path("Unknown"))
}
},
),
// },
],
),
),
// testfn: test::StaticTestFn(...) | test::StaticBenchFn(...)
field("testfn", test_fn), // }
],
), // }
),
),
);
test_const = test_const.map(|mut tc| {
tc.vis.node = ast::VisibilityKind::Public;
tc
});
// extern crate test
let test_extern = cx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(None));
tracing::debug!("synthetic test item:\n{}\n", pprust::item_to_string(&test_const));
vec![
// Access to libtest under a hygienic name
Annotatable::Item(test_extern),
// The generated test case
Annotatable::Item(test_const),
// The original item
Annotatable::Item(item),
]
}
fn item_path(mod_path: &[Ident], item_ident: &Ident) -> String {
mod_path
.iter()
.chain(iter::once(item_ident))
.map(|x| x.to_string())
.collect::<Vec<String>>()
.join("::")
}
enum ShouldPanic {
No,
Yes(Option<Symbol>),
}
fn should_ignore(sess: &Session, i: &ast::Item) -> bool {
sess.contains_name(&i.attrs, sym::ignore)
}
fn should_fail(sess: &Session, i: &ast::Item) -> bool {
sess.contains_name(&i.attrs, sym::allow_fail)
}
fn should_panic(cx: &ExtCtxt<'_>, i: &ast::Item) -> ShouldPanic {
match cx.sess.find_by_name(&i.attrs, sym::should_panic) {
Some(attr) => {
let sd = &cx.sess.parse_sess.span_diagnostic;
match attr.meta_item_list() {
// Handle #[should_panic(expected = "foo")]
Some(list) => {
let msg = list
.iter()
.find(|mi| mi.has_name(sym::expected))
.and_then(|mi| mi.meta_item())
.and_then(|mi| mi.value_str());
if list.len() != 1 || msg.is_none() {
sd.struct_span_warn(
attr.span,
"argument must be of the form: \
`expected = \"error message\"`",
)
.note(
"errors in this attribute were erroneously \
allowed and will become a hard error in a \
future release.",
)
.emit();
ShouldPanic::Yes(None)
} else {
ShouldPanic::Yes(msg)
}
}
// Handle #[should_panic] and #[should_panic = "expected"]
None => ShouldPanic::Yes(attr.value_str()),
}
}
None => ShouldPanic::No,
}
}
enum TestType {
UnitTest,
IntegrationTest,
Unknown,
}
/// Attempts to determine the type of test.
/// Since doctests are created without macro expanding, only possible variants here
/// are `UnitTest`, `IntegrationTest` or `Unknown`.
fn test_type(cx: &ExtCtxt<'_>) -> TestType {
// Root path from context contains the topmost sources directory of the crate.
// I.e., for `project` with sources in `src` and tests in `tests` folders
// (no matter how many nested folders lie inside),
// there will be two different root paths: `/project/src` and `/project/tests`.
let crate_path = cx.root_path.as_path();
if crate_path.ends_with("src") {
// `/src` folder contains unit-tests.
TestType::UnitTest
} else if crate_path.ends_with("tests") {
// `/tests` folder contains integration tests.
TestType::IntegrationTest
} else {
// Crate layout doesn't match expected one, test type is unknown.
TestType::Unknown
}
}
fn has_test_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
let has_should_panic_attr = cx.sess.contains_name(&i.attrs, sym::should_panic);
let sd = &cx.sess.parse_sess.span_diagnostic;
if let ast::ItemKind::Fn(_, ref sig, ref generics, _) = i.kind {
if let ast::Unsafe::Yes(span) = sig.header.unsafety {
sd.struct_span_err(i.span, "unsafe functions cannot be used for tests")
.span_label(span, "`unsafe` because of this")
.emit();
return false;
}
if let ast::Async::Yes { span, .. } = sig.header.asyncness {
sd.struct_span_err(i.span, "async functions cannot be used for tests")
.span_label(span, "`async` because of this")
.emit();
return false;
}
// If the termination trait is active, the compiler will check that the output
// type implements the `Termination` trait as `libtest` enforces that.
let has_output = match sig.decl.output {
ast::FnRetTy::Default(..) => false,
ast::FnRetTy::Ty(ref t) if t.kind.is_unit() => false,
_ => true,
};
if !sig.decl.inputs.is_empty() {
sd.span_err(i.span, "functions used as tests can not have any arguments");
return false;
}
match (has_output, has_should_panic_attr) {
(true, true) => {
sd.span_err(i.span, "functions using `#[should_panic]` must return `()`");
false
}
(true, false) => {
if !generics.params.is_empty() {
sd.span_err(i.span, "functions used as tests must have signature fn() -> ()");
false
} else {
true
}
}
(false, _) => true,
}
} else {
sd.span_err(i.span, "only functions may be used as tests");
false
}
}
fn has_bench_signature(cx: &ExtCtxt<'_>, i: &ast::Item) -> bool {
let has_sig = if let ast::ItemKind::Fn(_, ref sig, _, _) = i.kind {
// N.B., inadequate check, but we're running
// well before resolve, can't get too deep.
sig.decl.inputs.len() == 1
} else {
false
};
if !has_sig {
cx.sess.parse_sess.span_diagnostic.span_err(
i.span,
"functions used as benches must have \
signature `fn(&mut Bencher) -> impl Termination`",
);
}
has_sig
}

View file

@ -0,0 +1,383 @@
// Code that generates a test runner to run all the tests in a crate
use rustc_ast as ast;
use rustc_ast::attr;
use rustc_ast::entry::EntryPointType;
use rustc_ast::mut_visit::{ExpectOne, *};
use rustc_ast::ptr::P;
use rustc_expand::base::{ExtCtxt, ResolverExpand};
use rustc_expand::expand::{AstFragment, ExpansionConfig};
use rustc_feature::Features;
use rustc_session::Session;
use rustc_span::hygiene::{AstPass, SyntaxContext, Transparency};
use rustc_span::source_map::respan;
use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::{Span, DUMMY_SP};
use rustc_target::spec::PanicStrategy;
use smallvec::{smallvec, SmallVec};
use tracing::debug;
use std::{iter, mem};
struct Test {
span: Span,
ident: Ident,
}
struct TestCtxt<'a> {
ext_cx: ExtCtxt<'a>,
panic_strategy: PanicStrategy,
def_site: Span,
test_cases: Vec<Test>,
reexport_test_harness_main: Option<Symbol>,
test_runner: Option<ast::Path>,
}
// Traverse the crate, collecting all the test functions, eliding any
// existing main functions, and synthesizing a main test harness
pub fn inject(sess: &Session, resolver: &mut dyn ResolverExpand, krate: &mut ast::Crate) {
let span_diagnostic = sess.diagnostic();
let panic_strategy = sess.panic_strategy();
let platform_panic_strategy = sess.target.target.options.panic_strategy;
// Check for #![reexport_test_harness_main = "some_name"] which gives the
// main test function the name `some_name` without hygiene. This needs to be
// unconditional, so that the attribute is still marked as used in
// non-test builds.
let reexport_test_harness_main =
sess.first_attr_value_str_by_name(&krate.attrs, sym::reexport_test_harness_main);
// Do this here so that the test_runner crate attribute gets marked as used
// even in non-test builds
let test_runner = get_test_runner(sess, span_diagnostic, &krate);
if sess.opts.test {
let panic_strategy = match (panic_strategy, sess.opts.debugging_opts.panic_abort_tests) {
(PanicStrategy::Abort, true) => PanicStrategy::Abort,
(PanicStrategy::Abort, false) => {
if panic_strategy == platform_panic_strategy {
// Silently allow compiling with panic=abort on these platforms,
// but with old behavior (abort if a test fails).
} else {
span_diagnostic.err(
"building tests with panic=abort is not supported \
without `-Zpanic_abort_tests`",
);
}
PanicStrategy::Unwind
}
(PanicStrategy::Unwind, _) => PanicStrategy::Unwind,
};
generate_test_harness(
sess,
resolver,
reexport_test_harness_main,
krate,
&sess.features_untracked(),
panic_strategy,
test_runner,
)
}
}
struct TestHarnessGenerator<'a> {
cx: TestCtxt<'a>,
tests: Vec<Test>,
}
impl<'a> MutVisitor for TestHarnessGenerator<'a> {
fn visit_crate(&mut self, c: &mut ast::Crate) {
noop_visit_crate(c, self);
// Create a main function to run our tests
c.module.items.push(mk_main(&mut self.cx));
}
fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
let mut item = i.into_inner();
if is_test_case(&self.cx.ext_cx.sess, &item) {
debug!("this is a test item");
let test = Test { span: item.span, ident: item.ident };
self.tests.push(test);
}
// We don't want to recurse into anything other than mods, since
// mods or tests inside of functions will break things
if let ast::ItemKind::Mod(mut module) = item.kind {
let tests = mem::take(&mut self.tests);
noop_visit_mod(&mut module, self);
let mut tests = mem::replace(&mut self.tests, tests);
if !tests.is_empty() {
let parent =
if item.id == ast::DUMMY_NODE_ID { ast::CRATE_NODE_ID } else { item.id };
// Create an identifier that will hygienically resolve the test
// case name, even in another module.
let expn_id = self.cx.ext_cx.resolver.expansion_for_ast_pass(
module.inner,
AstPass::TestHarness,
&[],
Some(parent),
);
for test in &mut tests {
// See the comment on `mk_main` for why we're using
// `apply_mark` directly.
test.ident.span = test.ident.span.apply_mark(expn_id, Transparency::Opaque);
}
self.cx.test_cases.extend(tests);
}
item.kind = ast::ItemKind::Mod(module);
}
smallvec![P(item)]
}
fn visit_mac(&mut self, _mac: &mut ast::MacCall) {
// Do nothing.
}
}
// Beware, this is duplicated in librustc_passes/entry.rs (with
// `rustc_hir::Item`), so make sure to keep them in sync.
fn entry_point_type(sess: &Session, item: &ast::Item, depth: usize) -> EntryPointType {
match item.kind {
ast::ItemKind::Fn(..) => {
if sess.contains_name(&item.attrs, sym::start) {
EntryPointType::Start
} else if sess.contains_name(&item.attrs, sym::main) {
EntryPointType::MainAttr
} else if item.ident.name == sym::main {
if depth == 1 {
// This is a top-level function so can be 'main'
EntryPointType::MainNamed
} else {
EntryPointType::OtherMain
}
} else {
EntryPointType::None
}
}
_ => EntryPointType::None,
}
}
/// A folder used to remove any entry points (like fn main) because the harness
/// generator will provide its own
struct EntryPointCleaner<'a> {
// Current depth in the ast
sess: &'a Session,
depth: usize,
def_site: Span,
}
impl<'a> MutVisitor for EntryPointCleaner<'a> {
fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
self.depth += 1;
let item = noop_flat_map_item(i, self).expect_one("noop did something");
self.depth -= 1;
// Remove any #[main] or #[start] from the AST so it doesn't
// clash with the one we're going to add, but mark it as
// #[allow(dead_code)] to avoid printing warnings.
let item = match entry_point_type(self.sess, &item, self.depth) {
EntryPointType::MainNamed | EntryPointType::MainAttr | EntryPointType::Start => item
.map(|ast::Item { id, ident, attrs, kind, vis, span, tokens }| {
let allow_ident = Ident::new(sym::allow, self.def_site);
let dc_nested =
attr::mk_nested_word_item(Ident::new(sym::dead_code, self.def_site));
let allow_dead_code_item = attr::mk_list_item(allow_ident, vec![dc_nested]);
let allow_dead_code = attr::mk_attr_outer(allow_dead_code_item);
let attrs = attrs
.into_iter()
.filter(|attr| {
!self.sess.check_name(attr, sym::main)
&& !self.sess.check_name(attr, sym::start)
})
.chain(iter::once(allow_dead_code))
.collect();
ast::Item { id, ident, attrs, kind, vis, span, tokens }
}),
EntryPointType::None | EntryPointType::OtherMain => item,
};
smallvec![item]
}
fn visit_mac(&mut self, _mac: &mut ast::MacCall) {
// Do nothing.
}
}
/// Crawl over the crate, inserting test reexports and the test main function
fn generate_test_harness(
sess: &Session,
resolver: &mut dyn ResolverExpand,
reexport_test_harness_main: Option<Symbol>,
krate: &mut ast::Crate,
features: &Features,
panic_strategy: PanicStrategy,
test_runner: Option<ast::Path>,
) {
let mut econfig = ExpansionConfig::default("test".to_string());
econfig.features = Some(features);
let ext_cx = ExtCtxt::new(sess, econfig, resolver, None);
let expn_id = ext_cx.resolver.expansion_for_ast_pass(
DUMMY_SP,
AstPass::TestHarness,
&[sym::main, sym::test, sym::rustc_attrs],
None,
);
let def_site = DUMMY_SP.with_def_site_ctxt(expn_id);
// Remove the entry points
let mut cleaner = EntryPointCleaner { sess, depth: 0, def_site };
cleaner.visit_crate(krate);
let cx = TestCtxt {
ext_cx,
panic_strategy,
def_site,
test_cases: Vec::new(),
reexport_test_harness_main,
test_runner,
};
TestHarnessGenerator { cx, tests: Vec::new() }.visit_crate(krate);
}
/// Creates a function item for use as the main function of a test build.
/// This function will call the `test_runner` as specified by the crate attribute
///
/// By default this expands to
///
/// ```
/// #[main]
/// pub fn main() {
/// extern crate test;
/// test::test_main_static(&[
/// &test_const1,
/// &test_const2,
/// &test_const3,
/// ]);
/// }
/// ```
///
/// Most of the Ident have the usual def-site hygiene for the AST pass. The
/// exception is the `test_const`s. These have a syntax context that has two
/// opaque marks: one from the expansion of `test` or `test_case`, and one
/// generated in `TestHarnessGenerator::flat_map_item`. When resolving this
/// identifier after failing to find a matching identifier in the root module
/// we remove the outer mark, and try resolving at its def-site, which will
/// then resolve to `test_const`.
///
/// The expansion here can be controlled by two attributes:
///
/// [`TestCtxt::reexport_test_harness_main`] provides a different name for the `main`
/// function and [`TestCtxt::test_runner`] provides a path that replaces
/// `test::test_main_static`.
fn mk_main(cx: &mut TestCtxt<'_>) -> P<ast::Item> {
let sp = cx.def_site;
let ecx = &cx.ext_cx;
let test_id = Ident::new(sym::test, sp);
let runner_name = match cx.panic_strategy {
PanicStrategy::Unwind => "test_main_static",
PanicStrategy::Abort => "test_main_static_abort",
};
// test::test_main_static(...)
let mut test_runner = cx
.test_runner
.clone()
.unwrap_or(ecx.path(sp, vec![test_id, Ident::from_str_and_span(runner_name, sp)]));
test_runner.span = sp;
let test_main_path_expr = ecx.expr_path(test_runner);
let call_test_main = ecx.expr_call(sp, test_main_path_expr, vec![mk_tests_slice(cx, sp)]);
let call_test_main = ecx.stmt_expr(call_test_main);
// extern crate test
let test_extern_stmt =
ecx.stmt_item(sp, ecx.item(sp, test_id, vec![], ast::ItemKind::ExternCrate(None)));
// #[main]
let main_meta = ecx.meta_word(sp, sym::main);
let main_attr = ecx.attribute(main_meta);
// pub fn main() { ... }
let main_ret_ty = ecx.ty(sp, ast::TyKind::Tup(vec![]));
// If no test runner is provided we need to import the test crate
let main_body = if cx.test_runner.is_none() {
ecx.block(sp, vec![test_extern_stmt, call_test_main])
} else {
ecx.block(sp, vec![call_test_main])
};
let decl = ecx.fn_decl(vec![], ast::FnRetTy::Ty(main_ret_ty));
let sig = ast::FnSig { decl, header: ast::FnHeader::default(), span: sp };
let def = ast::Defaultness::Final;
let main = ast::ItemKind::Fn(def, sig, ast::Generics::default(), Some(main_body));
// Honor the reexport_test_harness_main attribute
let main_id = match cx.reexport_test_harness_main {
Some(sym) => Ident::new(sym, sp.with_ctxt(SyntaxContext::root())),
None => Ident::new(sym::main, sp),
};
let main = P(ast::Item {
ident: main_id,
attrs: vec![main_attr],
id: ast::DUMMY_NODE_ID,
kind: main,
vis: respan(sp, ast::VisibilityKind::Public),
span: sp,
tokens: None,
});
// Integrate the new item into existing module structures.
let main = AstFragment::Items(smallvec![main]);
cx.ext_cx.monotonic_expander().fully_expand_fragment(main).make_items().pop().unwrap()
}
/// Creates a slice containing every test like so:
/// &[&test1, &test2]
fn mk_tests_slice(cx: &TestCtxt<'_>, sp: Span) -> P<ast::Expr> {
debug!("building test vector from {} tests", cx.test_cases.len());
let ecx = &cx.ext_cx;
ecx.expr_vec_slice(
sp,
cx.test_cases
.iter()
.map(|test| {
ecx.expr_addr_of(test.span, ecx.expr_path(ecx.path(test.span, vec![test.ident])))
})
.collect(),
)
}
fn is_test_case(sess: &Session, i: &ast::Item) -> bool {
sess.contains_name(&i.attrs, sym::rustc_test_marker)
}
fn get_test_runner(
sess: &Session,
sd: &rustc_errors::Handler,
krate: &ast::Crate,
) -> Option<ast::Path> {
let test_attr = sess.find_by_name(&krate.attrs, sym::test_runner)?;
let meta_list = test_attr.meta_item_list()?;
let span = test_attr.span;
match &*meta_list {
[single] => match single.meta_item() {
Some(meta_item) if meta_item.is_word() => return Some(meta_item.path.clone()),
_ => sd.struct_span_err(span, "`test_runner` argument must be a path").emit(),
},
_ => sd.struct_span_err(span, "`#![test_runner(..)]` accepts exactly 1 argument").emit(),
}
None
}

View file

@ -0,0 +1,29 @@
use rustc_ast::tokenstream::{TokenStream, TokenTree};
use rustc_expand::base::{self, ExtCtxt};
use rustc_span::symbol::kw;
use rustc_span::Span;
pub fn expand_trace_macros(
cx: &mut ExtCtxt<'_>,
sp: Span,
tt: TokenStream,
) -> Box<dyn base::MacResult + 'static> {
let mut cursor = tt.into_trees();
let mut err = false;
let value = match &cursor.next() {
Some(TokenTree::Token(token)) if token.is_keyword(kw::True) => true,
Some(TokenTree::Token(token)) if token.is_keyword(kw::False) => false,
_ => {
err = true;
false
}
};
err |= cursor.next().is_some();
if err {
cx.span_err(sp, "trace_macros! accepts only `true` or `false`")
} else {
cx.set_trace_macros(value);
}
base::DummyResult::any_valid(sp)
}

View file

@ -0,0 +1,12 @@
use rustc_ast::MetaItem;
use rustc_expand::base::ExtCtxt;
use rustc_feature::AttributeTemplate;
use rustc_parse::validate_attr;
use rustc_span::Symbol;
pub fn check_builtin_macro_attribute(ecx: &ExtCtxt<'_>, meta_item: &MetaItem, name: Symbol) {
// All the built-in macro attributes are "words" at the moment.
let template = AttributeTemplate { word: true, ..Default::default() };
let attr = ecx.attribute(meta_item.clone());
validate_attr::check_builtin_attribute(&ecx.sess.parse_sess, &attr, name, template);
}