mv compiler to compiler/
This commit is contained in:
parent
db534b3ac2
commit
9e5f7d5631
1686 changed files with 941 additions and 1051 deletions
1227
compiler/rustc_expand/src/base.rs
Normal file
1227
compiler/rustc_expand/src/base.rs
Normal file
File diff suppressed because it is too large
Load diff
649
compiler/rustc_expand/src/build.rs
Normal file
649
compiler/rustc_expand/src/build.rs
Normal file
|
@ -0,0 +1,649 @@
|
|||
use crate::base::ExtCtxt;
|
||||
|
||||
use rustc_ast::attr;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{self as ast, AttrVec, BlockCheckMode, Expr, PatKind, UnOp};
|
||||
use rustc_span::source_map::{respan, Spanned};
|
||||
use rustc_span::symbol::{kw, sym, Ident, Symbol};
|
||||
|
||||
use rustc_span::Span;
|
||||
|
||||
impl<'a> ExtCtxt<'a> {
|
||||
pub fn path(&self, span: Span, strs: Vec<Ident>) -> ast::Path {
|
||||
self.path_all(span, false, strs, vec![])
|
||||
}
|
||||
pub fn path_ident(&self, span: Span, id: Ident) -> ast::Path {
|
||||
self.path(span, vec![id])
|
||||
}
|
||||
pub fn path_global(&self, span: Span, strs: Vec<Ident>) -> ast::Path {
|
||||
self.path_all(span, true, strs, vec![])
|
||||
}
|
||||
pub fn path_all(
|
||||
&self,
|
||||
span: Span,
|
||||
global: bool,
|
||||
mut idents: Vec<Ident>,
|
||||
args: Vec<ast::GenericArg>,
|
||||
) -> ast::Path {
|
||||
assert!(!idents.is_empty());
|
||||
let add_root = global && !idents[0].is_path_segment_keyword();
|
||||
let mut segments = Vec::with_capacity(idents.len() + add_root as usize);
|
||||
if add_root {
|
||||
segments.push(ast::PathSegment::path_root(span));
|
||||
}
|
||||
let last_ident = idents.pop().unwrap();
|
||||
segments.extend(
|
||||
idents.into_iter().map(|ident| ast::PathSegment::from_ident(ident.with_span_pos(span))),
|
||||
);
|
||||
let args = if !args.is_empty() {
|
||||
let args = args.into_iter().map(ast::AngleBracketedArg::Arg).collect();
|
||||
ast::AngleBracketedArgs { args, span }.into()
|
||||
} else {
|
||||
None
|
||||
};
|
||||
segments.push(ast::PathSegment {
|
||||
ident: last_ident.with_span_pos(span),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
args,
|
||||
});
|
||||
ast::Path { span, segments }
|
||||
}
|
||||
|
||||
pub fn ty_mt(&self, ty: P<ast::Ty>, mutbl: ast::Mutability) -> ast::MutTy {
|
||||
ast::MutTy { ty, mutbl }
|
||||
}
|
||||
|
||||
pub fn ty(&self, span: Span, kind: ast::TyKind) -> P<ast::Ty> {
|
||||
P(ast::Ty { id: ast::DUMMY_NODE_ID, span, kind })
|
||||
}
|
||||
|
||||
pub fn ty_path(&self, path: ast::Path) -> P<ast::Ty> {
|
||||
self.ty(path.span, ast::TyKind::Path(None, path))
|
||||
}
|
||||
|
||||
// Might need to take bounds as an argument in the future, if you ever want
|
||||
// to generate a bounded existential trait type.
|
||||
pub fn ty_ident(&self, span: Span, ident: Ident) -> P<ast::Ty> {
|
||||
self.ty_path(self.path_ident(span, ident))
|
||||
}
|
||||
|
||||
pub fn anon_const(&self, span: Span, kind: ast::ExprKind) -> ast::AnonConst {
|
||||
ast::AnonConst {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
value: P(ast::Expr {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind,
|
||||
span,
|
||||
attrs: AttrVec::new(),
|
||||
tokens: None,
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn const_ident(&self, span: Span, ident: Ident) -> ast::AnonConst {
|
||||
self.anon_const(span, ast::ExprKind::Path(None, self.path_ident(span, ident)))
|
||||
}
|
||||
|
||||
pub fn ty_rptr(
|
||||
&self,
|
||||
span: Span,
|
||||
ty: P<ast::Ty>,
|
||||
lifetime: Option<ast::Lifetime>,
|
||||
mutbl: ast::Mutability,
|
||||
) -> P<ast::Ty> {
|
||||
self.ty(span, ast::TyKind::Rptr(lifetime, self.ty_mt(ty, mutbl)))
|
||||
}
|
||||
|
||||
pub fn ty_ptr(&self, span: Span, ty: P<ast::Ty>, mutbl: ast::Mutability) -> P<ast::Ty> {
|
||||
self.ty(span, ast::TyKind::Ptr(self.ty_mt(ty, mutbl)))
|
||||
}
|
||||
|
||||
pub fn typaram(
|
||||
&self,
|
||||
span: Span,
|
||||
ident: Ident,
|
||||
attrs: Vec<ast::Attribute>,
|
||||
bounds: ast::GenericBounds,
|
||||
default: Option<P<ast::Ty>>,
|
||||
) -> ast::GenericParam {
|
||||
ast::GenericParam {
|
||||
ident: ident.with_span_pos(span),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
attrs: attrs.into(),
|
||||
bounds,
|
||||
kind: ast::GenericParamKind::Type { default },
|
||||
is_placeholder: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trait_ref(&self, path: ast::Path) -> ast::TraitRef {
|
||||
ast::TraitRef { path, ref_id: ast::DUMMY_NODE_ID }
|
||||
}
|
||||
|
||||
pub fn poly_trait_ref(&self, span: Span, path: ast::Path) -> ast::PolyTraitRef {
|
||||
ast::PolyTraitRef {
|
||||
bound_generic_params: Vec::new(),
|
||||
trait_ref: self.trait_ref(path),
|
||||
span,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn trait_bound(&self, path: ast::Path) -> ast::GenericBound {
|
||||
ast::GenericBound::Trait(
|
||||
self.poly_trait_ref(path.span, path),
|
||||
ast::TraitBoundModifier::None,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn lifetime(&self, span: Span, ident: Ident) -> ast::Lifetime {
|
||||
ast::Lifetime { id: ast::DUMMY_NODE_ID, ident: ident.with_span_pos(span) }
|
||||
}
|
||||
|
||||
pub fn lifetime_def(
|
||||
&self,
|
||||
span: Span,
|
||||
ident: Ident,
|
||||
attrs: Vec<ast::Attribute>,
|
||||
bounds: ast::GenericBounds,
|
||||
) -> ast::GenericParam {
|
||||
let lifetime = self.lifetime(span, ident);
|
||||
ast::GenericParam {
|
||||
ident: lifetime.ident,
|
||||
id: lifetime.id,
|
||||
attrs: attrs.into(),
|
||||
bounds,
|
||||
kind: ast::GenericParamKind::Lifetime,
|
||||
is_placeholder: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stmt_expr(&self, expr: P<ast::Expr>) -> ast::Stmt {
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, span: expr.span, kind: ast::StmtKind::Expr(expr) }
|
||||
}
|
||||
|
||||
pub fn stmt_let(&self, sp: Span, mutbl: bool, ident: Ident, ex: P<ast::Expr>) -> ast::Stmt {
|
||||
let pat = if mutbl {
|
||||
let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Mut);
|
||||
self.pat_ident_binding_mode(sp, ident, binding_mode)
|
||||
} else {
|
||||
self.pat_ident(sp, ident)
|
||||
};
|
||||
let local = P(ast::Local {
|
||||
pat,
|
||||
ty: None,
|
||||
init: Some(ex),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: sp,
|
||||
attrs: AttrVec::new(),
|
||||
});
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span: sp }
|
||||
}
|
||||
|
||||
// Generates `let _: Type;`, which is usually used for type assertions.
|
||||
pub fn stmt_let_type_only(&self, span: Span, ty: P<ast::Ty>) -> ast::Stmt {
|
||||
let local = P(ast::Local {
|
||||
pat: self.pat_wild(span),
|
||||
ty: Some(ty),
|
||||
init: None,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span,
|
||||
attrs: AttrVec::new(),
|
||||
});
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Local(local), span }
|
||||
}
|
||||
|
||||
pub fn stmt_item(&self, sp: Span, item: P<ast::Item>) -> ast::Stmt {
|
||||
ast::Stmt { id: ast::DUMMY_NODE_ID, kind: ast::StmtKind::Item(item), span: sp }
|
||||
}
|
||||
|
||||
pub fn block_expr(&self, expr: P<ast::Expr>) -> P<ast::Block> {
|
||||
self.block(
|
||||
expr.span,
|
||||
vec![ast::Stmt {
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
span: expr.span,
|
||||
kind: ast::StmtKind::Expr(expr),
|
||||
}],
|
||||
)
|
||||
}
|
||||
pub fn block(&self, span: Span, stmts: Vec<ast::Stmt>) -> P<ast::Block> {
|
||||
P(ast::Block { stmts, id: ast::DUMMY_NODE_ID, rules: BlockCheckMode::Default, span })
|
||||
}
|
||||
|
||||
pub fn expr(&self, span: Span, kind: ast::ExprKind) -> P<ast::Expr> {
|
||||
P(ast::Expr { id: ast::DUMMY_NODE_ID, kind, span, attrs: AttrVec::new(), tokens: None })
|
||||
}
|
||||
|
||||
pub fn expr_path(&self, path: ast::Path) -> P<ast::Expr> {
|
||||
self.expr(path.span, ast::ExprKind::Path(None, path))
|
||||
}
|
||||
|
||||
pub fn expr_ident(&self, span: Span, id: Ident) -> P<ast::Expr> {
|
||||
self.expr_path(self.path_ident(span, id))
|
||||
}
|
||||
pub fn expr_self(&self, span: Span) -> P<ast::Expr> {
|
||||
self.expr_ident(span, Ident::with_dummy_span(kw::SelfLower))
|
||||
}
|
||||
|
||||
pub fn expr_binary(
|
||||
&self,
|
||||
sp: Span,
|
||||
op: ast::BinOpKind,
|
||||
lhs: P<ast::Expr>,
|
||||
rhs: P<ast::Expr>,
|
||||
) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Binary(Spanned { node: op, span: sp }, lhs, rhs))
|
||||
}
|
||||
|
||||
pub fn expr_deref(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Unary(UnOp::Deref, e))
|
||||
}
|
||||
|
||||
pub fn expr_addr_of(&self, sp: Span, e: P<ast::Expr>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::AddrOf(ast::BorrowKind::Ref, ast::Mutability::Not, e))
|
||||
}
|
||||
|
||||
pub fn expr_call(
|
||||
&self,
|
||||
span: Span,
|
||||
expr: P<ast::Expr>,
|
||||
args: Vec<P<ast::Expr>>,
|
||||
) -> P<ast::Expr> {
|
||||
self.expr(span, ast::ExprKind::Call(expr, args))
|
||||
}
|
||||
pub fn expr_call_ident(&self, span: Span, id: Ident, args: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
self.expr(span, ast::ExprKind::Call(self.expr_ident(span, id), args))
|
||||
}
|
||||
pub fn expr_call_global(
|
||||
&self,
|
||||
sp: Span,
|
||||
fn_path: Vec<Ident>,
|
||||
args: Vec<P<ast::Expr>>,
|
||||
) -> P<ast::Expr> {
|
||||
let pathexpr = self.expr_path(self.path_global(sp, fn_path));
|
||||
self.expr_call(sp, pathexpr, args)
|
||||
}
|
||||
pub fn expr_method_call(
|
||||
&self,
|
||||
span: Span,
|
||||
expr: P<ast::Expr>,
|
||||
ident: Ident,
|
||||
mut args: Vec<P<ast::Expr>>,
|
||||
) -> P<ast::Expr> {
|
||||
args.insert(0, expr);
|
||||
let segment = ast::PathSegment::from_ident(ident.with_span_pos(span));
|
||||
self.expr(span, ast::ExprKind::MethodCall(segment, args, span))
|
||||
}
|
||||
pub fn expr_block(&self, b: P<ast::Block>) -> P<ast::Expr> {
|
||||
self.expr(b.span, ast::ExprKind::Block(b, None))
|
||||
}
|
||||
pub fn field_imm(&self, span: Span, ident: Ident, e: P<ast::Expr>) -> ast::Field {
|
||||
ast::Field {
|
||||
ident: ident.with_span_pos(span),
|
||||
expr: e,
|
||||
span,
|
||||
is_shorthand: false,
|
||||
attrs: AttrVec::new(),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
}
|
||||
}
|
||||
pub fn expr_struct(
|
||||
&self,
|
||||
span: Span,
|
||||
path: ast::Path,
|
||||
fields: Vec<ast::Field>,
|
||||
) -> P<ast::Expr> {
|
||||
self.expr(span, ast::ExprKind::Struct(path, fields, None))
|
||||
}
|
||||
pub fn expr_struct_ident(
|
||||
&self,
|
||||
span: Span,
|
||||
id: Ident,
|
||||
fields: Vec<ast::Field>,
|
||||
) -> P<ast::Expr> {
|
||||
self.expr_struct(span, self.path_ident(span, id), fields)
|
||||
}
|
||||
|
||||
pub fn expr_lit(&self, span: Span, lit_kind: ast::LitKind) -> P<ast::Expr> {
|
||||
let lit = ast::Lit::from_lit_kind(lit_kind, span);
|
||||
self.expr(span, ast::ExprKind::Lit(lit))
|
||||
}
|
||||
pub fn expr_usize(&self, span: Span, i: usize) -> P<ast::Expr> {
|
||||
self.expr_lit(
|
||||
span,
|
||||
ast::LitKind::Int(i as u128, ast::LitIntType::Unsigned(ast::UintTy::Usize)),
|
||||
)
|
||||
}
|
||||
pub fn expr_u32(&self, sp: Span, u: u32) -> P<ast::Expr> {
|
||||
self.expr_lit(sp, ast::LitKind::Int(u as u128, ast::LitIntType::Unsigned(ast::UintTy::U32)))
|
||||
}
|
||||
pub fn expr_bool(&self, sp: Span, value: bool) -> P<ast::Expr> {
|
||||
self.expr_lit(sp, ast::LitKind::Bool(value))
|
||||
}
|
||||
|
||||
pub fn expr_vec(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Array(exprs))
|
||||
}
|
||||
pub fn expr_vec_slice(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
self.expr_addr_of(sp, self.expr_vec(sp, exprs))
|
||||
}
|
||||
pub fn expr_str(&self, sp: Span, s: Symbol) -> P<ast::Expr> {
|
||||
self.expr_lit(sp, ast::LitKind::Str(s, ast::StrStyle::Cooked))
|
||||
}
|
||||
|
||||
pub fn expr_cast(&self, sp: Span, expr: P<ast::Expr>, ty: P<ast::Ty>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Cast(expr, ty))
|
||||
}
|
||||
|
||||
pub fn expr_some(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
|
||||
let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
|
||||
self.expr_call_global(sp, some, vec![expr])
|
||||
}
|
||||
|
||||
pub fn expr_tuple(&self, sp: Span, exprs: Vec<P<ast::Expr>>) -> P<ast::Expr> {
|
||||
self.expr(sp, ast::ExprKind::Tup(exprs))
|
||||
}
|
||||
|
||||
pub fn expr_fail(&self, span: Span, msg: Symbol) -> P<ast::Expr> {
|
||||
self.expr_call_global(
|
||||
span,
|
||||
[sym::std, sym::rt, sym::begin_panic].iter().map(|s| Ident::new(*s, span)).collect(),
|
||||
vec![self.expr_str(span, msg)],
|
||||
)
|
||||
}
|
||||
|
||||
pub fn expr_unreachable(&self, span: Span) -> P<ast::Expr> {
|
||||
self.expr_fail(span, Symbol::intern("internal error: entered unreachable code"))
|
||||
}
|
||||
|
||||
pub fn expr_ok(&self, sp: Span, expr: P<ast::Expr>) -> P<ast::Expr> {
|
||||
let ok = self.std_path(&[sym::result, sym::Result, sym::Ok]);
|
||||
self.expr_call_global(sp, ok, vec![expr])
|
||||
}
|
||||
|
||||
pub fn expr_try(&self, sp: Span, head: P<ast::Expr>) -> P<ast::Expr> {
|
||||
let ok = self.std_path(&[sym::result, sym::Result, sym::Ok]);
|
||||
let ok_path = self.path_global(sp, ok);
|
||||
let err = self.std_path(&[sym::result, sym::Result, sym::Err]);
|
||||
let err_path = self.path_global(sp, err);
|
||||
|
||||
let binding_variable = Ident::new(sym::__try_var, sp);
|
||||
let binding_pat = self.pat_ident(sp, binding_variable);
|
||||
let binding_expr = self.expr_ident(sp, binding_variable);
|
||||
|
||||
// `Ok(__try_var)` pattern
|
||||
let ok_pat = self.pat_tuple_struct(sp, ok_path, vec![binding_pat.clone()]);
|
||||
|
||||
// `Err(__try_var)` (pattern and expression respectively)
|
||||
let err_pat = self.pat_tuple_struct(sp, err_path.clone(), vec![binding_pat]);
|
||||
let err_inner_expr =
|
||||
self.expr_call(sp, self.expr_path(err_path), vec![binding_expr.clone()]);
|
||||
// `return Err(__try_var)`
|
||||
let err_expr = self.expr(sp, ast::ExprKind::Ret(Some(err_inner_expr)));
|
||||
|
||||
// `Ok(__try_var) => __try_var`
|
||||
let ok_arm = self.arm(sp, ok_pat, binding_expr);
|
||||
// `Err(__try_var) => return Err(__try_var)`
|
||||
let err_arm = self.arm(sp, err_pat, err_expr);
|
||||
|
||||
// `match head { Ok() => ..., Err() => ... }`
|
||||
self.expr_match(sp, head, vec![ok_arm, err_arm])
|
||||
}
|
||||
|
||||
pub fn pat(&self, span: Span, kind: PatKind) -> P<ast::Pat> {
|
||||
P(ast::Pat { id: ast::DUMMY_NODE_ID, kind, span, tokens: None })
|
||||
}
|
||||
pub fn pat_wild(&self, span: Span) -> P<ast::Pat> {
|
||||
self.pat(span, PatKind::Wild)
|
||||
}
|
||||
pub fn pat_lit(&self, span: Span, expr: P<ast::Expr>) -> P<ast::Pat> {
|
||||
self.pat(span, PatKind::Lit(expr))
|
||||
}
|
||||
pub fn pat_ident(&self, span: Span, ident: Ident) -> P<ast::Pat> {
|
||||
let binding_mode = ast::BindingMode::ByValue(ast::Mutability::Not);
|
||||
self.pat_ident_binding_mode(span, ident, binding_mode)
|
||||
}
|
||||
|
||||
pub fn pat_ident_binding_mode(
|
||||
&self,
|
||||
span: Span,
|
||||
ident: Ident,
|
||||
bm: ast::BindingMode,
|
||||
) -> P<ast::Pat> {
|
||||
let pat = PatKind::Ident(bm, ident.with_span_pos(span), None);
|
||||
self.pat(span, pat)
|
||||
}
|
||||
pub fn pat_path(&self, span: Span, path: ast::Path) -> P<ast::Pat> {
|
||||
self.pat(span, PatKind::Path(None, path))
|
||||
}
|
||||
pub fn pat_tuple_struct(
|
||||
&self,
|
||||
span: Span,
|
||||
path: ast::Path,
|
||||
subpats: Vec<P<ast::Pat>>,
|
||||
) -> P<ast::Pat> {
|
||||
self.pat(span, PatKind::TupleStruct(path, subpats))
|
||||
}
|
||||
pub fn pat_struct(
|
||||
&self,
|
||||
span: Span,
|
||||
path: ast::Path,
|
||||
field_pats: Vec<ast::FieldPat>,
|
||||
) -> P<ast::Pat> {
|
||||
self.pat(span, PatKind::Struct(path, field_pats, false))
|
||||
}
|
||||
pub fn pat_tuple(&self, span: Span, pats: Vec<P<ast::Pat>>) -> P<ast::Pat> {
|
||||
self.pat(span, PatKind::Tuple(pats))
|
||||
}
|
||||
|
||||
pub fn pat_some(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
|
||||
let some = self.std_path(&[sym::option, sym::Option, sym::Some]);
|
||||
let path = self.path_global(span, some);
|
||||
self.pat_tuple_struct(span, path, vec![pat])
|
||||
}
|
||||
|
||||
pub fn pat_none(&self, span: Span) -> P<ast::Pat> {
|
||||
let some = self.std_path(&[sym::option, sym::Option, sym::None]);
|
||||
let path = self.path_global(span, some);
|
||||
self.pat_path(span, path)
|
||||
}
|
||||
|
||||
pub fn pat_ok(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
|
||||
let some = self.std_path(&[sym::result, sym::Result, sym::Ok]);
|
||||
let path = self.path_global(span, some);
|
||||
self.pat_tuple_struct(span, path, vec![pat])
|
||||
}
|
||||
|
||||
pub fn pat_err(&self, span: Span, pat: P<ast::Pat>) -> P<ast::Pat> {
|
||||
let some = self.std_path(&[sym::result, sym::Result, sym::Err]);
|
||||
let path = self.path_global(span, some);
|
||||
self.pat_tuple_struct(span, path, vec![pat])
|
||||
}
|
||||
|
||||
pub fn arm(&self, span: Span, pat: P<ast::Pat>, expr: P<ast::Expr>) -> ast::Arm {
|
||||
ast::Arm {
|
||||
attrs: vec![],
|
||||
pat,
|
||||
guard: None,
|
||||
body: expr,
|
||||
span,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn arm_unreachable(&self, span: Span) -> ast::Arm {
|
||||
self.arm(span, self.pat_wild(span), self.expr_unreachable(span))
|
||||
}
|
||||
|
||||
pub fn expr_match(&self, span: Span, arg: P<ast::Expr>, arms: Vec<ast::Arm>) -> P<Expr> {
|
||||
self.expr(span, ast::ExprKind::Match(arg, arms))
|
||||
}
|
||||
|
||||
pub fn expr_if(
|
||||
&self,
|
||||
span: Span,
|
||||
cond: P<ast::Expr>,
|
||||
then: P<ast::Expr>,
|
||||
els: Option<P<ast::Expr>>,
|
||||
) -> P<ast::Expr> {
|
||||
let els = els.map(|x| self.expr_block(self.block_expr(x)));
|
||||
self.expr(span, ast::ExprKind::If(cond, self.block_expr(then), els))
|
||||
}
|
||||
|
||||
pub fn lambda_fn_decl(
|
||||
&self,
|
||||
span: Span,
|
||||
fn_decl: P<ast::FnDecl>,
|
||||
body: P<ast::Expr>,
|
||||
fn_decl_span: Span,
|
||||
) -> P<ast::Expr> {
|
||||
self.expr(
|
||||
span,
|
||||
ast::ExprKind::Closure(
|
||||
ast::CaptureBy::Ref,
|
||||
ast::Async::No,
|
||||
ast::Movability::Movable,
|
||||
fn_decl,
|
||||
body,
|
||||
fn_decl_span,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn lambda(&self, span: Span, ids: Vec<Ident>, body: P<ast::Expr>) -> P<ast::Expr> {
|
||||
let fn_decl = self.fn_decl(
|
||||
ids.iter().map(|id| self.param(span, *id, self.ty(span, ast::TyKind::Infer))).collect(),
|
||||
ast::FnRetTy::Default(span),
|
||||
);
|
||||
|
||||
// FIXME -- We are using `span` as the span of the `|...|`
|
||||
// part of the lambda, but it probably (maybe?) corresponds to
|
||||
// the entire lambda body. Probably we should extend the API
|
||||
// here, but that's not entirely clear.
|
||||
self.expr(
|
||||
span,
|
||||
ast::ExprKind::Closure(
|
||||
ast::CaptureBy::Ref,
|
||||
ast::Async::No,
|
||||
ast::Movability::Movable,
|
||||
fn_decl,
|
||||
body,
|
||||
span,
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
pub fn lambda0(&self, span: Span, body: P<ast::Expr>) -> P<ast::Expr> {
|
||||
self.lambda(span, Vec::new(), body)
|
||||
}
|
||||
|
||||
pub fn lambda1(&self, span: Span, body: P<ast::Expr>, ident: Ident) -> P<ast::Expr> {
|
||||
self.lambda(span, vec![ident], body)
|
||||
}
|
||||
|
||||
pub fn lambda_stmts_1(&self, span: Span, stmts: Vec<ast::Stmt>, ident: Ident) -> P<ast::Expr> {
|
||||
self.lambda1(span, self.expr_block(self.block(span, stmts)), ident)
|
||||
}
|
||||
|
||||
pub fn param(&self, span: Span, ident: Ident, ty: P<ast::Ty>) -> ast::Param {
|
||||
let arg_pat = self.pat_ident(span, ident);
|
||||
ast::Param {
|
||||
attrs: AttrVec::default(),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
pat: arg_pat,
|
||||
span,
|
||||
ty,
|
||||
is_placeholder: false,
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME: unused `self`
|
||||
pub fn fn_decl(&self, inputs: Vec<ast::Param>, output: ast::FnRetTy) -> P<ast::FnDecl> {
|
||||
P(ast::FnDecl { inputs, output })
|
||||
}
|
||||
|
||||
pub fn item(
|
||||
&self,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
attrs: Vec<ast::Attribute>,
|
||||
kind: ast::ItemKind,
|
||||
) -> P<ast::Item> {
|
||||
// FIXME: Would be nice if our generated code didn't violate
|
||||
// Rust coding conventions
|
||||
P(ast::Item {
|
||||
ident: name,
|
||||
attrs,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
kind,
|
||||
vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
|
||||
span,
|
||||
tokens: None,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn variant(&self, span: Span, ident: Ident, tys: Vec<P<ast::Ty>>) -> ast::Variant {
|
||||
let vis_span = span.shrink_to_lo();
|
||||
let fields: Vec<_> = tys
|
||||
.into_iter()
|
||||
.map(|ty| ast::StructField {
|
||||
span: ty.span,
|
||||
ty,
|
||||
ident: None,
|
||||
vis: respan(vis_span, ast::VisibilityKind::Inherited),
|
||||
attrs: Vec::new(),
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
is_placeholder: false,
|
||||
})
|
||||
.collect();
|
||||
|
||||
let vdata = if fields.is_empty() {
|
||||
ast::VariantData::Unit(ast::DUMMY_NODE_ID)
|
||||
} else {
|
||||
ast::VariantData::Tuple(fields, ast::DUMMY_NODE_ID)
|
||||
};
|
||||
|
||||
ast::Variant {
|
||||
attrs: Vec::new(),
|
||||
data: vdata,
|
||||
disr_expr: None,
|
||||
id: ast::DUMMY_NODE_ID,
|
||||
ident,
|
||||
vis: respan(vis_span, ast::VisibilityKind::Inherited),
|
||||
span,
|
||||
is_placeholder: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn item_static(
|
||||
&self,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
ty: P<ast::Ty>,
|
||||
mutbl: ast::Mutability,
|
||||
expr: P<ast::Expr>,
|
||||
) -> P<ast::Item> {
|
||||
self.item(span, name, Vec::new(), ast::ItemKind::Static(ty, mutbl, Some(expr)))
|
||||
}
|
||||
|
||||
pub fn item_const(
|
||||
&self,
|
||||
span: Span,
|
||||
name: Ident,
|
||||
ty: P<ast::Ty>,
|
||||
expr: P<ast::Expr>,
|
||||
) -> P<ast::Item> {
|
||||
let def = ast::Defaultness::Final;
|
||||
self.item(span, name, Vec::new(), ast::ItemKind::Const(def, ty, Some(expr)))
|
||||
}
|
||||
|
||||
pub fn attribute(&self, mi: ast::MetaItem) -> ast::Attribute {
|
||||
attr::mk_attr_outer(mi)
|
||||
}
|
||||
|
||||
pub fn meta_word(&self, sp: Span, w: Symbol) -> ast::MetaItem {
|
||||
attr::mk_word_item(Ident::new(w, sp))
|
||||
}
|
||||
}
|
533
compiler/rustc_expand/src/config.rs
Normal file
533
compiler/rustc_expand/src/config.rs
Normal file
|
@ -0,0 +1,533 @@
|
|||
//! Conditional compilation stripping.
|
||||
|
||||
use rustc_ast::attr::HasAttrs;
|
||||
use rustc_ast::mut_visit::*;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::{self as ast, AttrItem, Attribute, MetaItem};
|
||||
use rustc_attr as attr;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::map_in_place::MapInPlace;
|
||||
use rustc_errors::{error_code, struct_span_err, Applicability, Handler};
|
||||
use rustc_feature::{Feature, Features, State as FeatureState};
|
||||
use rustc_feature::{
|
||||
ACCEPTED_FEATURES, ACTIVE_FEATURES, REMOVED_FEATURES, STABLE_REMOVED_FEATURES,
|
||||
};
|
||||
use rustc_parse::{parse_in, validate_attr};
|
||||
use rustc_session::parse::feature_err;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::edition::{Edition, ALL_EDITIONS};
|
||||
use rustc_span::symbol::{sym, Symbol};
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// A folder that strips out items that do not belong in the current configuration.
|
||||
pub struct StripUnconfigured<'a> {
|
||||
pub sess: &'a Session,
|
||||
pub features: Option<&'a Features>,
|
||||
}
|
||||
|
||||
fn get_features(
|
||||
sess: &Session,
|
||||
span_handler: &Handler,
|
||||
krate_attrs: &[ast::Attribute],
|
||||
) -> Features {
|
||||
fn feature_removed(span_handler: &Handler, span: Span, reason: Option<&str>) {
|
||||
let mut err = struct_span_err!(span_handler, span, E0557, "feature has been removed");
|
||||
err.span_label(span, "feature has been removed");
|
||||
if let Some(reason) = reason {
|
||||
err.note(reason);
|
||||
}
|
||||
err.emit();
|
||||
}
|
||||
|
||||
fn active_features_up_to(edition: Edition) -> impl Iterator<Item = &'static Feature> {
|
||||
ACTIVE_FEATURES.iter().filter(move |feature| {
|
||||
if let Some(feature_edition) = feature.edition {
|
||||
feature_edition <= edition
|
||||
} else {
|
||||
false
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let mut features = Features::default();
|
||||
let mut edition_enabled_features = FxHashMap::default();
|
||||
let crate_edition = sess.edition();
|
||||
|
||||
for &edition in ALL_EDITIONS {
|
||||
if edition <= crate_edition {
|
||||
// The `crate_edition` implies its respective umbrella feature-gate
|
||||
// (i.e., `#![feature(rust_20XX_preview)]` isn't needed on edition 20XX).
|
||||
edition_enabled_features.insert(edition.feature_name(), edition);
|
||||
}
|
||||
}
|
||||
|
||||
for feature in active_features_up_to(crate_edition) {
|
||||
feature.set(&mut features, DUMMY_SP);
|
||||
edition_enabled_features.insert(feature.name, crate_edition);
|
||||
}
|
||||
|
||||
// Process the edition umbrella feature-gates first, to ensure
|
||||
// `edition_enabled_features` is completed before it's queried.
|
||||
for attr in krate_attrs {
|
||||
if !sess.check_name(attr, sym::feature) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let list = match attr.meta_item_list() {
|
||||
Some(list) => list,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
for mi in list {
|
||||
if !mi.is_word() {
|
||||
continue;
|
||||
}
|
||||
|
||||
let name = mi.name_or_empty();
|
||||
|
||||
let edition = ALL_EDITIONS.iter().find(|e| name == e.feature_name()).copied();
|
||||
if let Some(edition) = edition {
|
||||
if edition <= crate_edition {
|
||||
continue;
|
||||
}
|
||||
|
||||
for feature in active_features_up_to(edition) {
|
||||
// FIXME(Manishearth) there is currently no way to set
|
||||
// lib features by edition
|
||||
feature.set(&mut features, DUMMY_SP);
|
||||
edition_enabled_features.insert(feature.name, edition);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for attr in krate_attrs {
|
||||
if !sess.check_name(attr, sym::feature) {
|
||||
continue;
|
||||
}
|
||||
|
||||
let list = match attr.meta_item_list() {
|
||||
Some(list) => list,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let bad_input = |span| {
|
||||
struct_span_err!(span_handler, span, E0556, "malformed `feature` attribute input")
|
||||
};
|
||||
|
||||
for mi in list {
|
||||
let name = match mi.ident() {
|
||||
Some(ident) if mi.is_word() => ident.name,
|
||||
Some(ident) => {
|
||||
bad_input(mi.span())
|
||||
.span_suggestion(
|
||||
mi.span(),
|
||||
"expected just one word",
|
||||
format!("{}", ident.name),
|
||||
Applicability::MaybeIncorrect,
|
||||
)
|
||||
.emit();
|
||||
continue;
|
||||
}
|
||||
None => {
|
||||
bad_input(mi.span()).span_label(mi.span(), "expected just one word").emit();
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
if let Some(edition) = edition_enabled_features.get(&name) {
|
||||
let msg =
|
||||
&format!("the feature `{}` is included in the Rust {} edition", name, edition);
|
||||
span_handler.struct_span_warn_with_code(mi.span(), msg, error_code!(E0705)).emit();
|
||||
continue;
|
||||
}
|
||||
|
||||
if ALL_EDITIONS.iter().any(|e| name == e.feature_name()) {
|
||||
// Handled in the separate loop above.
|
||||
continue;
|
||||
}
|
||||
|
||||
let removed = REMOVED_FEATURES.iter().find(|f| name == f.name);
|
||||
let stable_removed = STABLE_REMOVED_FEATURES.iter().find(|f| name == f.name);
|
||||
if let Some(Feature { state, .. }) = removed.or(stable_removed) {
|
||||
if let FeatureState::Removed { reason } | FeatureState::Stabilized { reason } =
|
||||
state
|
||||
{
|
||||
feature_removed(span_handler, mi.span(), *reason);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(Feature { since, .. }) = ACCEPTED_FEATURES.iter().find(|f| name == f.name) {
|
||||
let since = Some(Symbol::intern(since));
|
||||
features.declared_lang_features.push((name, mi.span(), since));
|
||||
continue;
|
||||
}
|
||||
|
||||
if let Some(allowed) = sess.opts.debugging_opts.allow_features.as_ref() {
|
||||
if allowed.iter().find(|&f| name.as_str() == *f).is_none() {
|
||||
struct_span_err!(
|
||||
span_handler,
|
||||
mi.span(),
|
||||
E0725,
|
||||
"the feature `{}` is not in the list of allowed features",
|
||||
name
|
||||
)
|
||||
.emit();
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(f) = ACTIVE_FEATURES.iter().find(|f| name == f.name) {
|
||||
f.set(&mut features, mi.span());
|
||||
features.declared_lang_features.push((name, mi.span(), None));
|
||||
continue;
|
||||
}
|
||||
|
||||
features.declared_lib_features.push((name, mi.span()));
|
||||
}
|
||||
}
|
||||
|
||||
features
|
||||
}
|
||||
|
||||
// `cfg_attr`-process the crate's attributes and compute the crate's features.
|
||||
pub fn features(sess: &Session, mut krate: ast::Crate) -> (ast::Crate, Features) {
|
||||
let mut strip_unconfigured = StripUnconfigured { sess, features: None };
|
||||
|
||||
let unconfigured_attrs = krate.attrs.clone();
|
||||
let diag = &sess.parse_sess.span_diagnostic;
|
||||
let err_count = diag.err_count();
|
||||
let features = match strip_unconfigured.configure(krate.attrs) {
|
||||
None => {
|
||||
// The entire crate is unconfigured.
|
||||
krate.attrs = Vec::new();
|
||||
krate.module.items = Vec::new();
|
||||
Features::default()
|
||||
}
|
||||
Some(attrs) => {
|
||||
krate.attrs = attrs;
|
||||
let features = get_features(sess, diag, &krate.attrs);
|
||||
if err_count == diag.err_count() {
|
||||
// Avoid reconfiguring malformed `cfg_attr`s.
|
||||
strip_unconfigured.features = Some(&features);
|
||||
strip_unconfigured.configure(unconfigured_attrs);
|
||||
}
|
||||
features
|
||||
}
|
||||
};
|
||||
(krate, features)
|
||||
}
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! configure {
|
||||
($this:ident, $node:ident) => {
|
||||
match $this.configure($node) {
|
||||
Some(node) => node,
|
||||
None => return Default::default(),
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
const CFG_ATTR_GRAMMAR_HELP: &str = "#[cfg_attr(condition, attribute, other_attribute, ...)]";
|
||||
const CFG_ATTR_NOTE_REF: &str = "for more information, visit \
|
||||
<https://doc.rust-lang.org/reference/conditional-compilation.html\
|
||||
#the-cfg_attr-attribute>";
|
||||
|
||||
impl<'a> StripUnconfigured<'a> {
|
||||
pub fn configure<T: HasAttrs>(&mut self, mut node: T) -> Option<T> {
|
||||
self.process_cfg_attrs(&mut node);
|
||||
self.in_cfg(node.attrs()).then_some(node)
|
||||
}
|
||||
|
||||
/// Parse and expand all `cfg_attr` attributes into a list of attributes
|
||||
/// that are within each `cfg_attr` that has a true configuration predicate.
|
||||
///
|
||||
/// Gives compiler warnings if any `cfg_attr` does not contain any
|
||||
/// attributes and is in the original source code. Gives compiler errors if
|
||||
/// the syntax of any `cfg_attr` is incorrect.
|
||||
pub fn process_cfg_attrs<T: HasAttrs>(&mut self, node: &mut T) {
|
||||
node.visit_attrs(|attrs| {
|
||||
attrs.flat_map_in_place(|attr| self.process_cfg_attr(attr));
|
||||
});
|
||||
}
|
||||
|
||||
/// Parse and expand a single `cfg_attr` attribute into a list of attributes
|
||||
/// when the configuration predicate is true, or otherwise expand into an
|
||||
/// empty list of attributes.
|
||||
///
|
||||
/// Gives a compiler warning when the `cfg_attr` contains no attributes and
|
||||
/// is in the original source file. Gives a compiler error if the syntax of
|
||||
/// the attribute is incorrect.
|
||||
fn process_cfg_attr(&mut self, attr: Attribute) -> Vec<Attribute> {
|
||||
if !attr.has_name(sym::cfg_attr) {
|
||||
return vec![attr];
|
||||
}
|
||||
|
||||
let (cfg_predicate, expanded_attrs) = match self.parse_cfg_attr(&attr) {
|
||||
None => return vec![],
|
||||
Some(r) => r,
|
||||
};
|
||||
|
||||
// Lint on zero attributes in source.
|
||||
if expanded_attrs.is_empty() {
|
||||
return vec![attr];
|
||||
}
|
||||
|
||||
// At this point we know the attribute is considered used.
|
||||
self.sess.mark_attr_used(&attr);
|
||||
|
||||
if !attr::cfg_matches(&cfg_predicate, &self.sess.parse_sess, self.features) {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
// We call `process_cfg_attr` recursively in case there's a
|
||||
// `cfg_attr` inside of another `cfg_attr`. E.g.
|
||||
// `#[cfg_attr(false, cfg_attr(true, some_attr))]`.
|
||||
expanded_attrs
|
||||
.into_iter()
|
||||
.flat_map(|(item, span)| {
|
||||
let attr = attr::mk_attr_from_item(attr.style, item, span);
|
||||
self.process_cfg_attr(attr)
|
||||
})
|
||||
.collect()
|
||||
}
|
||||
|
||||
fn parse_cfg_attr(&self, attr: &Attribute) -> Option<(MetaItem, Vec<(AttrItem, Span)>)> {
|
||||
match attr.get_normal_item().args {
|
||||
ast::MacArgs::Delimited(dspan, delim, ref tts) if !tts.is_empty() => {
|
||||
let msg = "wrong `cfg_attr` delimiters";
|
||||
validate_attr::check_meta_bad_delim(&self.sess.parse_sess, dspan, delim, msg);
|
||||
match parse_in(&self.sess.parse_sess, tts.clone(), "`cfg_attr` input", |p| {
|
||||
p.parse_cfg_attr()
|
||||
}) {
|
||||
Ok(r) => return Some(r),
|
||||
Err(mut e) => {
|
||||
e.help(&format!("the valid syntax is `{}`", CFG_ATTR_GRAMMAR_HELP))
|
||||
.note(CFG_ATTR_NOTE_REF)
|
||||
.emit();
|
||||
}
|
||||
}
|
||||
}
|
||||
_ => self.error_malformed_cfg_attr_missing(attr.span),
|
||||
}
|
||||
None
|
||||
}
|
||||
|
||||
fn error_malformed_cfg_attr_missing(&self, span: Span) {
|
||||
self.sess
|
||||
.parse_sess
|
||||
.span_diagnostic
|
||||
.struct_span_err(span, "malformed `cfg_attr` attribute input")
|
||||
.span_suggestion(
|
||||
span,
|
||||
"missing condition and attribute",
|
||||
CFG_ATTR_GRAMMAR_HELP.to_string(),
|
||||
Applicability::HasPlaceholders,
|
||||
)
|
||||
.note(CFG_ATTR_NOTE_REF)
|
||||
.emit();
|
||||
}
|
||||
|
||||
/// Determines if a node with the given attributes should be included in this configuration.
|
||||
pub fn in_cfg(&self, attrs: &[Attribute]) -> bool {
|
||||
attrs.iter().all(|attr| {
|
||||
if !is_cfg(self.sess, attr) {
|
||||
return true;
|
||||
}
|
||||
let meta_item = match validate_attr::parse_meta(&self.sess.parse_sess, attr) {
|
||||
Ok(meta_item) => meta_item,
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
return true;
|
||||
}
|
||||
};
|
||||
let error = |span, msg, suggestion: &str| {
|
||||
let mut err = self.sess.parse_sess.span_diagnostic.struct_span_err(span, msg);
|
||||
if !suggestion.is_empty() {
|
||||
err.span_suggestion(
|
||||
span,
|
||||
"expected syntax is",
|
||||
suggestion.into(),
|
||||
Applicability::MaybeIncorrect,
|
||||
);
|
||||
}
|
||||
err.emit();
|
||||
true
|
||||
};
|
||||
let span = meta_item.span;
|
||||
match meta_item.meta_item_list() {
|
||||
None => error(span, "`cfg` is not followed by parentheses", "cfg(/* predicate */)"),
|
||||
Some([]) => error(span, "`cfg` predicate is not specified", ""),
|
||||
Some([_, .., l]) => error(l.span(), "multiple `cfg` predicates are specified", ""),
|
||||
Some([single]) => match single.meta_item() {
|
||||
Some(meta_item) => {
|
||||
attr::cfg_matches(meta_item, &self.sess.parse_sess, self.features)
|
||||
}
|
||||
None => error(single.span(), "`cfg` predicate key cannot be a literal", ""),
|
||||
},
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// Visit attributes on expression and statements (but not attributes on items in blocks).
|
||||
fn visit_expr_attrs(&mut self, attrs: &[Attribute]) {
|
||||
// flag the offending attributes
|
||||
for attr in attrs.iter() {
|
||||
self.maybe_emit_expr_attr_err(attr);
|
||||
}
|
||||
}
|
||||
|
||||
/// If attributes are not allowed on expressions, emit an error for `attr`
|
||||
pub fn maybe_emit_expr_attr_err(&self, attr: &Attribute) {
|
||||
if !self.features.map(|features| features.stmt_expr_attributes).unwrap_or(true) {
|
||||
let mut err = feature_err(
|
||||
&self.sess.parse_sess,
|
||||
sym::stmt_expr_attributes,
|
||||
attr.span,
|
||||
"attributes on expressions are experimental",
|
||||
);
|
||||
|
||||
if attr.is_doc_comment() {
|
||||
err.help("`///` is for documentation comments. For a plain comment, use `//`.");
|
||||
}
|
||||
|
||||
err.emit();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn configure_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) {
|
||||
let ast::ForeignMod { abi: _, items } = foreign_mod;
|
||||
items.flat_map_in_place(|item| self.configure(item));
|
||||
}
|
||||
|
||||
pub fn configure_generic_params(&mut self, params: &mut Vec<ast::GenericParam>) {
|
||||
params.flat_map_in_place(|param| self.configure(param));
|
||||
}
|
||||
|
||||
fn configure_variant_data(&mut self, vdata: &mut ast::VariantData) {
|
||||
match vdata {
|
||||
ast::VariantData::Struct(fields, ..) | ast::VariantData::Tuple(fields, _) => {
|
||||
fields.flat_map_in_place(|field| self.configure(field))
|
||||
}
|
||||
ast::VariantData::Unit(_) => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn configure_item_kind(&mut self, item: &mut ast::ItemKind) {
|
||||
match item {
|
||||
ast::ItemKind::Struct(def, _generics) | ast::ItemKind::Union(def, _generics) => {
|
||||
self.configure_variant_data(def)
|
||||
}
|
||||
ast::ItemKind::Enum(ast::EnumDef { variants }, _generics) => {
|
||||
variants.flat_map_in_place(|variant| self.configure(variant));
|
||||
for variant in variants {
|
||||
self.configure_variant_data(&mut variant.data);
|
||||
}
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn configure_expr_kind(&mut self, expr_kind: &mut ast::ExprKind) {
|
||||
match expr_kind {
|
||||
ast::ExprKind::Match(_m, arms) => {
|
||||
arms.flat_map_in_place(|arm| self.configure(arm));
|
||||
}
|
||||
ast::ExprKind::Struct(_path, fields, _base) => {
|
||||
fields.flat_map_in_place(|field| self.configure(field));
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn configure_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
self.visit_expr_attrs(expr.attrs());
|
||||
|
||||
// If an expr is valid to cfg away it will have been removed by the
|
||||
// outer stmt or expression folder before descending in here.
|
||||
// Anything else is always required, and thus has to error out
|
||||
// in case of a cfg attr.
|
||||
//
|
||||
// N.B., this is intentionally not part of the visit_expr() function
|
||||
// in order for filter_map_expr() to be able to avoid this check
|
||||
if let Some(attr) = expr.attrs().iter().find(|a| is_cfg(self.sess, a)) {
|
||||
let msg = "removing an expression is not supported in this position";
|
||||
self.sess.parse_sess.span_diagnostic.span_err(attr.span, msg);
|
||||
}
|
||||
|
||||
self.process_cfg_attrs(expr)
|
||||
}
|
||||
|
||||
pub fn configure_pat(&mut self, pat: &mut P<ast::Pat>) {
|
||||
if let ast::PatKind::Struct(_path, fields, _etc) = &mut pat.kind {
|
||||
fields.flat_map_in_place(|field| self.configure(field));
|
||||
}
|
||||
}
|
||||
|
||||
pub fn configure_fn_decl(&mut self, fn_decl: &mut ast::FnDecl) {
|
||||
fn_decl.inputs.flat_map_in_place(|arg| self.configure(arg));
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> MutVisitor for StripUnconfigured<'a> {
|
||||
fn visit_foreign_mod(&mut self, foreign_mod: &mut ast::ForeignMod) {
|
||||
self.configure_foreign_mod(foreign_mod);
|
||||
noop_visit_foreign_mod(foreign_mod, self);
|
||||
}
|
||||
|
||||
fn visit_item_kind(&mut self, item: &mut ast::ItemKind) {
|
||||
self.configure_item_kind(item);
|
||||
noop_visit_item_kind(item, self);
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
self.configure_expr(expr);
|
||||
self.configure_expr_kind(&mut expr.kind);
|
||||
noop_visit_expr(expr, self);
|
||||
}
|
||||
|
||||
fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
let mut expr = configure!(self, expr);
|
||||
self.configure_expr_kind(&mut expr.kind);
|
||||
noop_visit_expr(&mut expr, self);
|
||||
Some(expr)
|
||||
}
|
||||
|
||||
fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
|
||||
noop_flat_map_stmt(configure!(self, stmt), self)
|
||||
}
|
||||
|
||||
fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
|
||||
noop_flat_map_item(configure!(self, item), self)
|
||||
}
|
||||
|
||||
fn flat_map_impl_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
|
||||
noop_flat_map_assoc_item(configure!(self, item), self)
|
||||
}
|
||||
|
||||
fn flat_map_trait_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
|
||||
noop_flat_map_assoc_item(configure!(self, item), self)
|
||||
}
|
||||
|
||||
fn visit_mac(&mut self, _mac: &mut ast::MacCall) {
|
||||
// Don't configure interpolated AST (cf. issue #34171).
|
||||
// Interpolated AST will get configured once the surrounding tokens are parsed.
|
||||
}
|
||||
|
||||
fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
|
||||
self.configure_pat(pat);
|
||||
noop_visit_pat(pat, self)
|
||||
}
|
||||
|
||||
fn visit_fn_decl(&mut self, mut fn_decl: &mut P<ast::FnDecl>) {
|
||||
self.configure_fn_decl(&mut fn_decl);
|
||||
noop_visit_fn_decl(fn_decl, self);
|
||||
}
|
||||
}
|
||||
|
||||
fn is_cfg(sess: &Session, attr: &Attribute) -> bool {
|
||||
sess.check_name(attr, sym::cfg)
|
||||
}
|
1812
compiler/rustc_expand/src/expand.rs
Normal file
1812
compiler/rustc_expand/src/expand.rs
Normal file
File diff suppressed because it is too large
Load diff
57
compiler/rustc_expand/src/lib.rs
Normal file
57
compiler/rustc_expand/src/lib.rs
Normal file
|
@ -0,0 +1,57 @@
|
|||
#![feature(bool_to_option)]
|
||||
#![feature(cow_is_borrowed)]
|
||||
#![feature(crate_visibility_modifier)]
|
||||
#![feature(decl_macro)]
|
||||
#![feature(or_patterns)]
|
||||
#![feature(proc_macro_diagnostic)]
|
||||
#![feature(proc_macro_internals)]
|
||||
#![feature(proc_macro_span)]
|
||||
#![feature(try_blocks)]
|
||||
|
||||
#[macro_use]
|
||||
extern crate rustc_macros;
|
||||
|
||||
extern crate proc_macro as pm;
|
||||
|
||||
mod placeholders;
|
||||
mod proc_macro_server;
|
||||
|
||||
pub use mbe::macro_rules::compile_declarative_macro;
|
||||
crate use rustc_span::hygiene;
|
||||
pub mod base;
|
||||
pub mod build;
|
||||
#[macro_use]
|
||||
pub mod config;
|
||||
pub mod expand;
|
||||
pub mod module;
|
||||
pub mod proc_macro;
|
||||
|
||||
crate mod mbe;
|
||||
|
||||
// HACK(Centril, #64197): These shouldn't really be here.
|
||||
// Rather, they should be with their respective modules which are defined in other crates.
|
||||
// However, since for now constructing a `ParseSess` sorta requires `config` from this crate,
|
||||
// these tests will need to live here in the iterim.
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
#[cfg(test)]
|
||||
mod parse {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
#[cfg(test)]
|
||||
mod lexer {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
}
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod tokenstream {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
}
|
||||
#[cfg(test)]
|
||||
mod mut_visit {
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
}
|
152
compiler/rustc_expand/src/mbe.rs
Normal file
152
compiler/rustc_expand/src/mbe.rs
Normal file
|
@ -0,0 +1,152 @@
|
|||
//! This module implements declarative macros: old `macro_rules` and the newer
|
||||
//! `macro`. Declarative macros are also known as "macro by example", and that's
|
||||
//! why we call this module `mbe`. For external documentation, prefer the
|
||||
//! official terminology: "declarative macros".
|
||||
|
||||
crate mod macro_check;
|
||||
crate mod macro_parser;
|
||||
crate mod macro_rules;
|
||||
crate mod quoted;
|
||||
crate mod transcribe;
|
||||
|
||||
use rustc_ast::token::{self, NonterminalKind, Token, TokenKind};
|
||||
use rustc_ast::tokenstream::DelimSpan;
|
||||
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::Span;
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
/// Contains the sub-token-trees of a "delimited" token tree, such as the contents of `(`. Note
|
||||
/// that the delimiter itself might be `NoDelim`.
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
|
||||
struct Delimited {
|
||||
delim: token::DelimToken,
|
||||
tts: Vec<TokenTree>,
|
||||
}
|
||||
|
||||
impl Delimited {
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
|
||||
fn open_tt(&self, span: DelimSpan) -> TokenTree {
|
||||
TokenTree::token(token::OpenDelim(self.delim), span.open)
|
||||
}
|
||||
|
||||
/// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
|
||||
fn close_tt(&self, span: DelimSpan) -> TokenTree {
|
||||
TokenTree::token(token::CloseDelim(self.delim), span.close)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug)]
|
||||
struct SequenceRepetition {
|
||||
/// The sequence of token trees
|
||||
tts: Vec<TokenTree>,
|
||||
/// The optional separator
|
||||
separator: Option<Token>,
|
||||
/// Whether the sequence can be repeated zero (*), or one or more times (+)
|
||||
kleene: KleeneToken,
|
||||
/// The number of `Match`s that appear in the sequence (and subsequences)
|
||||
num_captures: usize,
|
||||
}
|
||||
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy)]
|
||||
struct KleeneToken {
|
||||
span: Span,
|
||||
op: KleeneOp,
|
||||
}
|
||||
|
||||
impl KleeneToken {
|
||||
fn new(op: KleeneOp, span: Span) -> KleeneToken {
|
||||
KleeneToken { span, op }
|
||||
}
|
||||
}
|
||||
|
||||
/// A Kleene-style [repetition operator](https://en.wikipedia.org/wiki/Kleene_star)
|
||||
/// for token sequences.
|
||||
#[derive(Clone, PartialEq, Encodable, Decodable, Debug, Copy)]
|
||||
enum KleeneOp {
|
||||
/// Kleene star (`*`) for zero or more repetitions
|
||||
ZeroOrMore,
|
||||
/// Kleene plus (`+`) for one or more repetitions
|
||||
OneOrMore,
|
||||
/// Kleene optional (`?`) for zero or one reptitions
|
||||
ZeroOrOne,
|
||||
}
|
||||
|
||||
/// Similar to `tokenstream::TokenTree`, except that `$i`, `$i:ident`, and `$(...)`
|
||||
/// are "first-class" token trees. Useful for parsing macros.
|
||||
#[derive(Debug, Clone, PartialEq, Encodable, Decodable)]
|
||||
enum TokenTree {
|
||||
Token(Token),
|
||||
Delimited(DelimSpan, Lrc<Delimited>),
|
||||
/// A kleene-style repetition sequence
|
||||
Sequence(DelimSpan, Lrc<SequenceRepetition>),
|
||||
/// e.g., `$var`
|
||||
MetaVar(Span, Ident),
|
||||
/// e.g., `$var:expr`. This is only used in the left hand side of MBE macros.
|
||||
MetaVarDecl(Span, Ident /* name to bind */, NonterminalKind),
|
||||
}
|
||||
|
||||
impl TokenTree {
|
||||
/// Return the number of tokens in the tree.
|
||||
fn len(&self) -> usize {
|
||||
match *self {
|
||||
TokenTree::Delimited(_, ref delimed) => match delimed.delim {
|
||||
token::NoDelim => delimed.tts.len(),
|
||||
_ => delimed.tts.len() + 2,
|
||||
},
|
||||
TokenTree::Sequence(_, ref seq) => seq.tts.len(),
|
||||
_ => 0,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given token tree is delimited.
|
||||
fn is_delimited(&self) -> bool {
|
||||
match *self {
|
||||
TokenTree::Delimited(..) => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns `true` if the given token tree is a token of the given kind.
|
||||
fn is_token(&self, expected_kind: &TokenKind) -> bool {
|
||||
match self {
|
||||
TokenTree::Token(Token { kind: actual_kind, .. }) => actual_kind == expected_kind,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
|
||||
fn get_tt(&self, index: usize) -> TokenTree {
|
||||
match (self, index) {
|
||||
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
|
||||
delimed.tts[index].clone()
|
||||
}
|
||||
(&TokenTree::Delimited(span, ref delimed), _) => {
|
||||
if index == 0 {
|
||||
return delimed.open_tt(span);
|
||||
}
|
||||
if index == delimed.tts.len() + 1 {
|
||||
return delimed.close_tt(span);
|
||||
}
|
||||
delimed.tts[index - 1].clone()
|
||||
}
|
||||
(&TokenTree::Sequence(_, ref seq), _) => seq.tts[index].clone(),
|
||||
_ => panic!("Cannot expand a token tree"),
|
||||
}
|
||||
}
|
||||
|
||||
/// Retrieves the `TokenTree`'s span.
|
||||
fn span(&self) -> Span {
|
||||
match *self {
|
||||
TokenTree::Token(Token { span, .. })
|
||||
| TokenTree::MetaVar(span, _)
|
||||
| TokenTree::MetaVarDecl(span, _, _) => span,
|
||||
TokenTree::Delimited(span, _) | TokenTree::Sequence(span, _) => span.entire(),
|
||||
}
|
||||
}
|
||||
|
||||
fn token(kind: TokenKind, span: Span) -> TokenTree {
|
||||
TokenTree::Token(Token::new(kind, span))
|
||||
}
|
||||
}
|
633
compiler/rustc_expand/src/mbe/macro_check.rs
Normal file
633
compiler/rustc_expand/src/mbe/macro_check.rs
Normal file
|
@ -0,0 +1,633 @@
|
|||
//! Checks that meta-variables in macro definition are correctly declared and used.
|
||||
//!
|
||||
//! # What is checked
|
||||
//!
|
||||
//! ## Meta-variables must not be bound twice
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! foo { ($x:tt $x:tt) => { $x }; }
|
||||
//! ```
|
||||
//!
|
||||
//! This check is sound (no false-negative) and complete (no false-positive).
|
||||
//!
|
||||
//! ## Meta-variables must not be free
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! foo { () => { $x }; }
|
||||
//! ```
|
||||
//!
|
||||
//! This check is also done at macro instantiation but only if the branch is taken.
|
||||
//!
|
||||
//! ## Meta-variables must repeat at least as many times as their binder
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! foo { ($($x:tt)*) => { $x }; }
|
||||
//! ```
|
||||
//!
|
||||
//! This check is also done at macro instantiation but only if the branch is taken.
|
||||
//!
|
||||
//! ## Meta-variables must repeat with the same Kleene operators as their binder
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! foo { ($($x:tt)+) => { $($x)* }; }
|
||||
//! ```
|
||||
//!
|
||||
//! This check is not done at macro instantiation.
|
||||
//!
|
||||
//! # Disclaimer
|
||||
//!
|
||||
//! In the presence of nested macros (a macro defined in a macro), those checks may have false
|
||||
//! positives and false negatives. We try to detect those cases by recognizing potential macro
|
||||
//! definitions in RHSes, but nested macros may be hidden through the use of particular values of
|
||||
//! meta-variables.
|
||||
//!
|
||||
//! ## Examples of false positive
|
||||
//!
|
||||
//! False positives can come from cases where we don't recognize a nested macro, because it depends
|
||||
//! on particular values of meta-variables. In the following example, we think both instances of
|
||||
//! `$x` are free, which is a correct statement if `$name` is anything but `macro_rules`. But when
|
||||
//! `$name` is `macro_rules`, like in the instantiation below, then `$x:tt` is actually a binder of
|
||||
//! the nested macro and `$x` is bound to it.
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! foo { ($name:ident) => { $name! bar { ($x:tt) => { $x }; } }; }
|
||||
//! foo!(macro_rules);
|
||||
//! ```
|
||||
//!
|
||||
//! False positives can also come from cases where we think there is a nested macro while there
|
||||
//! isn't. In the following example, we think `$x` is free, which is incorrect because `bar` is not
|
||||
//! a nested macro since it is not evaluated as code by `stringify!`.
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! foo { () => { stringify!(macro_rules! bar { () => { $x }; }) }; }
|
||||
//! ```
|
||||
//!
|
||||
//! ## Examples of false negative
|
||||
//!
|
||||
//! False negatives can come from cases where we don't recognize a meta-variable, because it depends
|
||||
//! on particular values of meta-variables. In the following examples, we don't see that if `$d` is
|
||||
//! instantiated with `$` then `$d z` becomes `$z` in the nested macro definition and is thus a free
|
||||
//! meta-variable. Note however, that if `foo` is instantiated, then we would check the definition
|
||||
//! of `bar` and would see the issue.
|
||||
//!
|
||||
//! ```
|
||||
//! macro_rules! foo { ($d:tt) => { macro_rules! bar { ($y:tt) => { $d z }; } }; }
|
||||
//! ```
|
||||
//!
|
||||
//! # How it is checked
|
||||
//!
|
||||
//! There are 3 main functions: `check_binders`, `check_occurrences`, and `check_nested_macro`. They
|
||||
//! all need some kind of environment.
|
||||
//!
|
||||
//! ## Environments
|
||||
//!
|
||||
//! Environments are used to pass information.
|
||||
//!
|
||||
//! ### From LHS to RHS
|
||||
//!
|
||||
//! When checking a LHS with `check_binders`, we produce (and use) an environment for binders,
|
||||
//! namely `Binders`. This is a mapping from binder name to information about that binder: the span
|
||||
//! of the binder for error messages and the stack of Kleene operators under which it was bound in
|
||||
//! the LHS.
|
||||
//!
|
||||
//! This environment is used by both the LHS and RHS. The LHS uses it to detect duplicate binders.
|
||||
//! The RHS uses it to detect the other errors.
|
||||
//!
|
||||
//! ### From outer macro to inner macro
|
||||
//!
|
||||
//! When checking the RHS of an outer macro and we detect a nested macro definition, we push the
|
||||
//! current state, namely `MacroState`, to an environment of nested macro definitions. Each state
|
||||
//! stores the LHS binders when entering the macro definition as well as the stack of Kleene
|
||||
//! operators under which the inner macro is defined in the RHS.
|
||||
//!
|
||||
//! This environment is a stack representing the nesting of macro definitions. As such, the stack of
|
||||
//! Kleene operators under which a meta-variable is repeating is the concatenation of the stacks
|
||||
//! stored when entering a macro definition starting from the state in which the meta-variable is
|
||||
//! bound.
|
||||
use crate::mbe::{KleeneToken, TokenTree};
|
||||
|
||||
use rustc_ast::token::{DelimToken, Token, TokenKind};
|
||||
use rustc_ast::{NodeId, DUMMY_NODE_ID};
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_session::lint::builtin::META_VARIABLE_MISUSE;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::kw;
|
||||
use rustc_span::{symbol::MacroRulesNormalizedIdent, MultiSpan, Span};
|
||||
|
||||
use smallvec::SmallVec;
|
||||
|
||||
/// Stack represented as linked list.
|
||||
///
|
||||
/// Those are used for environments because they grow incrementally and are not mutable.
|
||||
enum Stack<'a, T> {
|
||||
/// Empty stack.
|
||||
Empty,
|
||||
/// A non-empty stack.
|
||||
Push {
|
||||
/// The top element.
|
||||
top: T,
|
||||
/// The previous elements.
|
||||
prev: &'a Stack<'a, T>,
|
||||
},
|
||||
}
|
||||
|
||||
impl<'a, T> Stack<'a, T> {
|
||||
/// Returns whether a stack is empty.
|
||||
fn is_empty(&self) -> bool {
|
||||
match *self {
|
||||
Stack::Empty => true,
|
||||
_ => false,
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a new stack with an element of top.
|
||||
fn push(&'a self, top: T) -> Stack<'a, T> {
|
||||
Stack::Push { top, prev: self }
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T> Iterator for &'a Stack<'a, T> {
|
||||
type Item = &'a T;
|
||||
|
||||
// Iterates from top to bottom of the stack.
|
||||
fn next(&mut self) -> Option<&'a T> {
|
||||
match *self {
|
||||
Stack::Empty => None,
|
||||
Stack::Push { ref top, ref prev } => {
|
||||
*self = prev;
|
||||
Some(top)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Stack<'_, KleeneToken>> for SmallVec<[KleeneToken; 1]> {
|
||||
fn from(ops: &Stack<'_, KleeneToken>) -> SmallVec<[KleeneToken; 1]> {
|
||||
let mut ops: SmallVec<[KleeneToken; 1]> = ops.cloned().collect();
|
||||
// The stack is innermost on top. We want outermost first.
|
||||
ops.reverse();
|
||||
ops
|
||||
}
|
||||
}
|
||||
|
||||
/// Information attached to a meta-variable binder in LHS.
|
||||
struct BinderInfo {
|
||||
/// The span of the meta-variable in LHS.
|
||||
span: Span,
|
||||
/// The stack of Kleene operators (outermost first).
|
||||
ops: SmallVec<[KleeneToken; 1]>,
|
||||
}
|
||||
|
||||
/// An environment of meta-variables to their binder information.
|
||||
type Binders = FxHashMap<MacroRulesNormalizedIdent, BinderInfo>;
|
||||
|
||||
/// The state at which we entered a macro definition in the RHS of another macro definition.
|
||||
struct MacroState<'a> {
|
||||
/// The binders of the branch where we entered the macro definition.
|
||||
binders: &'a Binders,
|
||||
/// The stack of Kleene operators (outermost first) where we entered the macro definition.
|
||||
ops: SmallVec<[KleeneToken; 1]>,
|
||||
}
|
||||
|
||||
/// Checks that meta-variables are used correctly in a macro definition.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `sess` is used to emit diagnostics and lints
|
||||
/// - `node_id` is used to emit lints
|
||||
/// - `span` is used when no spans are available
|
||||
/// - `lhses` and `rhses` should have the same length and represent the macro definition
|
||||
pub(super) fn check_meta_variables(
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
span: Span,
|
||||
lhses: &[TokenTree],
|
||||
rhses: &[TokenTree],
|
||||
) -> bool {
|
||||
if lhses.len() != rhses.len() {
|
||||
sess.span_diagnostic.span_bug(span, "length mismatch between LHSes and RHSes")
|
||||
}
|
||||
let mut valid = true;
|
||||
for (lhs, rhs) in lhses.iter().zip(rhses.iter()) {
|
||||
let mut binders = Binders::default();
|
||||
check_binders(sess, node_id, lhs, &Stack::Empty, &mut binders, &Stack::Empty, &mut valid);
|
||||
check_occurrences(sess, node_id, rhs, &Stack::Empty, &binders, &Stack::Empty, &mut valid);
|
||||
}
|
||||
valid
|
||||
}
|
||||
|
||||
/// Checks `lhs` as part of the LHS of a macro definition, extends `binders` with new binders, and
|
||||
/// sets `valid` to false in case of errors.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `sess` is used to emit diagnostics and lints
|
||||
/// - `node_id` is used to emit lints
|
||||
/// - `lhs` is checked as part of a LHS
|
||||
/// - `macros` is the stack of possible outer macros
|
||||
/// - `binders` contains the binders of the LHS
|
||||
/// - `ops` is the stack of Kleene operators from the LHS
|
||||
/// - `valid` is set in case of errors
|
||||
fn check_binders(
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
lhs: &TokenTree,
|
||||
macros: &Stack<'_, MacroState<'_>>,
|
||||
binders: &mut Binders,
|
||||
ops: &Stack<'_, KleeneToken>,
|
||||
valid: &mut bool,
|
||||
) {
|
||||
match *lhs {
|
||||
TokenTree::Token(..) => {}
|
||||
// This can only happen when checking a nested macro because this LHS is then in the RHS of
|
||||
// the outer macro. See ui/macros/macro-of-higher-order.rs where $y:$fragment in the
|
||||
// LHS of the nested macro (and RHS of the outer macro) is parsed as MetaVar(y) Colon
|
||||
// MetaVar(fragment) and not as MetaVarDecl(y, fragment).
|
||||
TokenTree::MetaVar(span, name) => {
|
||||
if macros.is_empty() {
|
||||
sess.span_diagnostic.span_bug(span, "unexpected MetaVar in lhs");
|
||||
}
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
// There are 3 possibilities:
|
||||
if let Some(prev_info) = binders.get(&name) {
|
||||
// 1. The meta-variable is already bound in the current LHS: This is an error.
|
||||
let mut span = MultiSpan::from_span(span);
|
||||
span.push_span_label(prev_info.span, "previous declaration".into());
|
||||
buffer_lint(sess, span, node_id, "duplicate matcher binding");
|
||||
} else if get_binder_info(macros, binders, name).is_none() {
|
||||
// 2. The meta-variable is free: This is a binder.
|
||||
binders.insert(name, BinderInfo { span, ops: ops.into() });
|
||||
} else {
|
||||
// 3. The meta-variable is bound: This is an occurrence.
|
||||
check_occurrences(sess, node_id, lhs, macros, binders, ops, valid);
|
||||
}
|
||||
}
|
||||
// Similarly, this can only happen when checking a toplevel macro.
|
||||
TokenTree::MetaVarDecl(span, name, _kind) => {
|
||||
if !macros.is_empty() {
|
||||
sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in nested lhs");
|
||||
}
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
if let Some(prev_info) = get_binder_info(macros, binders, name) {
|
||||
// Duplicate binders at the top-level macro definition are errors. The lint is only
|
||||
// for nested macro definitions.
|
||||
sess.span_diagnostic
|
||||
.struct_span_err(span, "duplicate matcher binding")
|
||||
.span_label(span, "duplicate binding")
|
||||
.span_label(prev_info.span, "previous binding")
|
||||
.emit();
|
||||
*valid = false;
|
||||
} else {
|
||||
binders.insert(name, BinderInfo { span, ops: ops.into() });
|
||||
}
|
||||
}
|
||||
TokenTree::Delimited(_, ref del) => {
|
||||
for tt in &del.tts {
|
||||
check_binders(sess, node_id, tt, macros, binders, ops, valid);
|
||||
}
|
||||
}
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
let ops = ops.push(seq.kleene);
|
||||
for tt in &seq.tts {
|
||||
check_binders(sess, node_id, tt, macros, binders, &ops, valid);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns the binder information of a meta-variable.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `macros` is the stack of possible outer macros
|
||||
/// - `binders` contains the current binders
|
||||
/// - `name` is the name of the meta-variable we are looking for
|
||||
fn get_binder_info<'a>(
|
||||
mut macros: &'a Stack<'a, MacroState<'a>>,
|
||||
binders: &'a Binders,
|
||||
name: MacroRulesNormalizedIdent,
|
||||
) -> Option<&'a BinderInfo> {
|
||||
binders.get(&name).or_else(|| macros.find_map(|state| state.binders.get(&name)))
|
||||
}
|
||||
|
||||
/// Checks `rhs` as part of the RHS of a macro definition and sets `valid` to false in case of
|
||||
/// errors.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `sess` is used to emit diagnostics and lints
|
||||
/// - `node_id` is used to emit lints
|
||||
/// - `rhs` is checked as part of a RHS
|
||||
/// - `macros` is the stack of possible outer macros
|
||||
/// - `binders` contains the binders of the associated LHS
|
||||
/// - `ops` is the stack of Kleene operators from the RHS
|
||||
/// - `valid` is set in case of errors
|
||||
fn check_occurrences(
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
rhs: &TokenTree,
|
||||
macros: &Stack<'_, MacroState<'_>>,
|
||||
binders: &Binders,
|
||||
ops: &Stack<'_, KleeneToken>,
|
||||
valid: &mut bool,
|
||||
) {
|
||||
match *rhs {
|
||||
TokenTree::Token(..) => {}
|
||||
TokenTree::MetaVarDecl(span, _name, _kind) => {
|
||||
sess.span_diagnostic.span_bug(span, "unexpected MetaVarDecl in rhs")
|
||||
}
|
||||
TokenTree::MetaVar(span, name) => {
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
check_ops_is_prefix(sess, node_id, macros, binders, ops, span, name);
|
||||
}
|
||||
TokenTree::Delimited(_, ref del) => {
|
||||
check_nested_occurrences(sess, node_id, &del.tts, macros, binders, ops, valid);
|
||||
}
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
let ops = ops.push(seq.kleene);
|
||||
check_nested_occurrences(sess, node_id, &seq.tts, macros, binders, &ops, valid);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the processed prefix of a nested macro.
|
||||
#[derive(Clone, Copy, PartialEq, Eq)]
|
||||
enum NestedMacroState {
|
||||
/// Nothing that matches a nested macro definition was processed yet.
|
||||
Empty,
|
||||
/// The token `macro_rules` was processed.
|
||||
MacroRules,
|
||||
/// The tokens `macro_rules!` were processed.
|
||||
MacroRulesNot,
|
||||
/// The tokens `macro_rules!` followed by a name were processed. The name may be either directly
|
||||
/// an identifier or a meta-variable (that hopefully would be instantiated by an identifier).
|
||||
MacroRulesNotName,
|
||||
/// The keyword `macro` was processed.
|
||||
Macro,
|
||||
/// The keyword `macro` followed by a name was processed.
|
||||
MacroName,
|
||||
/// The keyword `macro` followed by a name and a token delimited by parentheses was processed.
|
||||
MacroNameParen,
|
||||
}
|
||||
|
||||
/// Checks `tts` as part of the RHS of a macro definition, tries to recognize nested macro
|
||||
/// definitions, and sets `valid` to false in case of errors.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `sess` is used to emit diagnostics and lints
|
||||
/// - `node_id` is used to emit lints
|
||||
/// - `tts` is checked as part of a RHS and may contain macro definitions
|
||||
/// - `macros` is the stack of possible outer macros
|
||||
/// - `binders` contains the binders of the associated LHS
|
||||
/// - `ops` is the stack of Kleene operators from the RHS
|
||||
/// - `valid` is set in case of errors
|
||||
fn check_nested_occurrences(
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
tts: &[TokenTree],
|
||||
macros: &Stack<'_, MacroState<'_>>,
|
||||
binders: &Binders,
|
||||
ops: &Stack<'_, KleeneToken>,
|
||||
valid: &mut bool,
|
||||
) {
|
||||
let mut state = NestedMacroState::Empty;
|
||||
let nested_macros = macros.push(MacroState { binders, ops: ops.into() });
|
||||
let mut nested_binders = Binders::default();
|
||||
for tt in tts {
|
||||
match (state, tt) {
|
||||
(
|
||||
NestedMacroState::Empty,
|
||||
&TokenTree::Token(Token { kind: TokenKind::Ident(name, false), .. }),
|
||||
) => {
|
||||
if name == kw::MacroRules {
|
||||
state = NestedMacroState::MacroRules;
|
||||
} else if name == kw::Macro {
|
||||
state = NestedMacroState::Macro;
|
||||
}
|
||||
}
|
||||
(
|
||||
NestedMacroState::MacroRules,
|
||||
&TokenTree::Token(Token { kind: TokenKind::Not, .. }),
|
||||
) => {
|
||||
state = NestedMacroState::MacroRulesNot;
|
||||
}
|
||||
(
|
||||
NestedMacroState::MacroRulesNot,
|
||||
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
|
||||
) => {
|
||||
state = NestedMacroState::MacroRulesNotName;
|
||||
}
|
||||
(NestedMacroState::MacroRulesNot, &TokenTree::MetaVar(..)) => {
|
||||
state = NestedMacroState::MacroRulesNotName;
|
||||
// We check that the meta-variable is correctly used.
|
||||
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
|
||||
}
|
||||
(NestedMacroState::MacroRulesNotName, &TokenTree::Delimited(_, ref del))
|
||||
| (NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
|
||||
if del.delim == DelimToken::Brace =>
|
||||
{
|
||||
let macro_rules = state == NestedMacroState::MacroRulesNotName;
|
||||
state = NestedMacroState::Empty;
|
||||
let rest =
|
||||
check_nested_macro(sess, node_id, macro_rules, &del.tts, &nested_macros, valid);
|
||||
// If we did not check the whole macro definition, then check the rest as if outside
|
||||
// the macro definition.
|
||||
check_nested_occurrences(
|
||||
sess,
|
||||
node_id,
|
||||
&del.tts[rest..],
|
||||
macros,
|
||||
binders,
|
||||
ops,
|
||||
valid,
|
||||
);
|
||||
}
|
||||
(
|
||||
NestedMacroState::Macro,
|
||||
&TokenTree::Token(Token { kind: TokenKind::Ident(..), .. }),
|
||||
) => {
|
||||
state = NestedMacroState::MacroName;
|
||||
}
|
||||
(NestedMacroState::Macro, &TokenTree::MetaVar(..)) => {
|
||||
state = NestedMacroState::MacroName;
|
||||
// We check that the meta-variable is correctly used.
|
||||
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
|
||||
}
|
||||
(NestedMacroState::MacroName, &TokenTree::Delimited(_, ref del))
|
||||
if del.delim == DelimToken::Paren =>
|
||||
{
|
||||
state = NestedMacroState::MacroNameParen;
|
||||
nested_binders = Binders::default();
|
||||
check_binders(
|
||||
sess,
|
||||
node_id,
|
||||
tt,
|
||||
&nested_macros,
|
||||
&mut nested_binders,
|
||||
&Stack::Empty,
|
||||
valid,
|
||||
);
|
||||
}
|
||||
(NestedMacroState::MacroNameParen, &TokenTree::Delimited(_, ref del))
|
||||
if del.delim == DelimToken::Brace =>
|
||||
{
|
||||
state = NestedMacroState::Empty;
|
||||
check_occurrences(
|
||||
sess,
|
||||
node_id,
|
||||
tt,
|
||||
&nested_macros,
|
||||
&nested_binders,
|
||||
&Stack::Empty,
|
||||
valid,
|
||||
);
|
||||
}
|
||||
(_, ref tt) => {
|
||||
state = NestedMacroState::Empty;
|
||||
check_occurrences(sess, node_id, tt, macros, binders, ops, valid);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks the body of nested macro, returns where the check stopped, and sets `valid` to false in
|
||||
/// case of errors.
|
||||
///
|
||||
/// The token trees are checked as long as they look like a list of (LHS) => {RHS} token trees. This
|
||||
/// check is a best-effort to detect a macro definition. It returns the position in `tts` where we
|
||||
/// stopped checking because we detected we were not in a macro definition anymore.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `sess` is used to emit diagnostics and lints
|
||||
/// - `node_id` is used to emit lints
|
||||
/// - `macro_rules` specifies whether the macro is `macro_rules`
|
||||
/// - `tts` is checked as a list of (LHS) => {RHS}
|
||||
/// - `macros` is the stack of outer macros
|
||||
/// - `valid` is set in case of errors
|
||||
fn check_nested_macro(
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
macro_rules: bool,
|
||||
tts: &[TokenTree],
|
||||
macros: &Stack<'_, MacroState<'_>>,
|
||||
valid: &mut bool,
|
||||
) -> usize {
|
||||
let n = tts.len();
|
||||
let mut i = 0;
|
||||
let separator = if macro_rules { TokenKind::Semi } else { TokenKind::Comma };
|
||||
loop {
|
||||
// We expect 3 token trees: `(LHS) => {RHS}`. The separator is checked after.
|
||||
if i + 2 >= n
|
||||
|| !tts[i].is_delimited()
|
||||
|| !tts[i + 1].is_token(&TokenKind::FatArrow)
|
||||
|| !tts[i + 2].is_delimited()
|
||||
{
|
||||
break;
|
||||
}
|
||||
let lhs = &tts[i];
|
||||
let rhs = &tts[i + 2];
|
||||
let mut binders = Binders::default();
|
||||
check_binders(sess, node_id, lhs, macros, &mut binders, &Stack::Empty, valid);
|
||||
check_occurrences(sess, node_id, rhs, macros, &binders, &Stack::Empty, valid);
|
||||
// Since the last semicolon is optional for `macro_rules` macros and decl_macro are not terminated,
|
||||
// we increment our checked position by how many token trees we already checked (the 3
|
||||
// above) before checking for the separator.
|
||||
i += 3;
|
||||
if i == n || !tts[i].is_token(&separator) {
|
||||
break;
|
||||
}
|
||||
// We increment our checked position for the semicolon.
|
||||
i += 1;
|
||||
}
|
||||
i
|
||||
}
|
||||
|
||||
/// Checks that a meta-variable occurrence is valid.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `sess` is used to emit diagnostics and lints
|
||||
/// - `node_id` is used to emit lints
|
||||
/// - `macros` is the stack of possible outer macros
|
||||
/// - `binders` contains the binders of the associated LHS
|
||||
/// - `ops` is the stack of Kleene operators from the RHS
|
||||
/// - `span` is the span of the meta-variable to check
|
||||
/// - `name` is the name of the meta-variable to check
|
||||
fn check_ops_is_prefix(
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
macros: &Stack<'_, MacroState<'_>>,
|
||||
binders: &Binders,
|
||||
ops: &Stack<'_, KleeneToken>,
|
||||
span: Span,
|
||||
name: MacroRulesNormalizedIdent,
|
||||
) {
|
||||
let macros = macros.push(MacroState { binders, ops: ops.into() });
|
||||
// Accumulates the stacks the operators of each state until (and including when) the
|
||||
// meta-variable is found. The innermost stack is first.
|
||||
let mut acc: SmallVec<[&SmallVec<[KleeneToken; 1]>; 1]> = SmallVec::new();
|
||||
for state in ¯os {
|
||||
acc.push(&state.ops);
|
||||
if let Some(binder) = state.binders.get(&name) {
|
||||
// This variable concatenates the stack of operators from the RHS of the LHS where the
|
||||
// meta-variable was defined to where it is used (in possibly nested macros). The
|
||||
// outermost operator is first.
|
||||
let mut occurrence_ops: SmallVec<[KleeneToken; 2]> = SmallVec::new();
|
||||
// We need to iterate from the end to start with outermost stack.
|
||||
for ops in acc.iter().rev() {
|
||||
occurrence_ops.extend_from_slice(ops);
|
||||
}
|
||||
ops_is_prefix(sess, node_id, span, name, &binder.ops, &occurrence_ops);
|
||||
return;
|
||||
}
|
||||
}
|
||||
buffer_lint(sess, span.into(), node_id, &format!("unknown macro variable `{}`", name));
|
||||
}
|
||||
|
||||
/// Returns whether `binder_ops` is a prefix of `occurrence_ops`.
|
||||
///
|
||||
/// The stack of Kleene operators of a meta-variable occurrence just needs to have the stack of
|
||||
/// Kleene operators of its binder as a prefix.
|
||||
///
|
||||
/// Consider $i in the following example:
|
||||
///
|
||||
/// ( $( $i:ident = $($j:ident),+ );* ) => { $($( $i += $j; )+)* }
|
||||
///
|
||||
/// It occurs under the Kleene stack ["*", "+"] and is bound under ["*"] only.
|
||||
///
|
||||
/// Arguments:
|
||||
/// - `sess` is used to emit diagnostics and lints
|
||||
/// - `node_id` is used to emit lints
|
||||
/// - `span` is the span of the meta-variable being check
|
||||
/// - `name` is the name of the meta-variable being check
|
||||
/// - `binder_ops` is the stack of Kleene operators for the binder
|
||||
/// - `occurrence_ops` is the stack of Kleene operators for the occurrence
|
||||
fn ops_is_prefix(
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
span: Span,
|
||||
name: MacroRulesNormalizedIdent,
|
||||
binder_ops: &[KleeneToken],
|
||||
occurrence_ops: &[KleeneToken],
|
||||
) {
|
||||
for (i, binder) in binder_ops.iter().enumerate() {
|
||||
if i >= occurrence_ops.len() {
|
||||
let mut span = MultiSpan::from_span(span);
|
||||
span.push_span_label(binder.span, "expected repetition".into());
|
||||
let message = &format!("variable '{}' is still repeating at this depth", name);
|
||||
buffer_lint(sess, span, node_id, message);
|
||||
return;
|
||||
}
|
||||
let occurrence = &occurrence_ops[i];
|
||||
if occurrence.op != binder.op {
|
||||
let mut span = MultiSpan::from_span(span);
|
||||
span.push_span_label(binder.span, "expected repetition".into());
|
||||
span.push_span_label(occurrence.span, "conflicting repetition".into());
|
||||
let message = "meta-variable repeats with different Kleene operator";
|
||||
buffer_lint(sess, span, node_id, message);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn buffer_lint(sess: &ParseSess, span: MultiSpan, node_id: NodeId, message: &str) {
|
||||
// Macros loaded from other crates have dummy node ids.
|
||||
if node_id != DUMMY_NODE_ID {
|
||||
sess.buffer_lint(&META_VARIABLE_MISUSE, span, node_id, message);
|
||||
}
|
||||
}
|
745
compiler/rustc_expand/src/mbe/macro_parser.rs
Normal file
745
compiler/rustc_expand/src/mbe/macro_parser.rs
Normal file
|
@ -0,0 +1,745 @@
|
|||
//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals
|
||||
//! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
|
||||
//! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
|
||||
//! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
|
||||
//! fit for Macro-by-Example-style rules.
|
||||
//!
|
||||
//! (In order to prevent the pathological case, we'd need to lazily construct the resulting
|
||||
//! `NamedMatch`es at the very end. It'd be a pain, and require more memory to keep around old
|
||||
//! items, but it would also save overhead)
|
||||
//!
|
||||
//! We don't say this parser uses the Earley algorithm, because it's unnecessarily inaccurate.
|
||||
//! The macro parser restricts itself to the features of finite state automata. Earley parsers
|
||||
//! can be described as an extension of NFAs with completion rules, prediction rules, and recursion.
|
||||
//!
|
||||
//! Quick intro to how the parser works:
|
||||
//!
|
||||
//! A 'position' is a dot in the middle of a matcher, usually represented as a
|
||||
//! dot. For example `· a $( a )* a b` is a position, as is `a $( · a )* a b`.
|
||||
//!
|
||||
//! The parser walks through the input a character at a time, maintaining a list
|
||||
//! of threads consistent with the current position in the input string: `cur_items`.
|
||||
//!
|
||||
//! As it processes them, it fills up `eof_items` with threads that would be valid if
|
||||
//! the macro invocation is now over, `bb_items` with threads that are waiting on
|
||||
//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
|
||||
//! on a particular token. Most of the logic concerns moving the · through the
|
||||
//! repetitions indicated by Kleene stars. The rules for moving the · without
|
||||
//! consuming any input are called epsilon transitions. It only advances or calls
|
||||
//! out to the real Rust parser when no `cur_items` threads remain.
|
||||
//!
|
||||
//! Example:
|
||||
//!
|
||||
//! ```text, ignore
|
||||
//! Start parsing a a a a b against [· a $( a )* a b].
|
||||
//!
|
||||
//! Remaining input: a a a a b
|
||||
//! next: [· a $( a )* a b]
|
||||
//!
|
||||
//! - - - Advance over an a. - - -
|
||||
//!
|
||||
//! Remaining input: a a a b
|
||||
//! cur: [a · $( a )* a b]
|
||||
//! Descend/Skip (first item).
|
||||
//! next: [a $( · a )* a b] [a $( a )* · a b].
|
||||
//!
|
||||
//! - - - Advance over an a. - - -
|
||||
//!
|
||||
//! Remaining input: a a b
|
||||
//! cur: [a $( a · )* a b] [a $( a )* a · b]
|
||||
//! Follow epsilon transition: Finish/Repeat (first item)
|
||||
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
|
||||
//!
|
||||
//! - - - Advance over an a. - - - (this looks exactly like the last step)
|
||||
//!
|
||||
//! Remaining input: a b
|
||||
//! cur: [a $( a · )* a b] [a $( a )* a · b]
|
||||
//! Follow epsilon transition: Finish/Repeat (first item)
|
||||
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
|
||||
//!
|
||||
//! - - - Advance over an a. - - - (this looks exactly like the last step)
|
||||
//!
|
||||
//! Remaining input: b
|
||||
//! cur: [a $( a · )* a b] [a $( a )* a · b]
|
||||
//! Follow epsilon transition: Finish/Repeat (first item)
|
||||
//! next: [a $( a )* · a b] [a $( · a )* a b] [a $( a )* a · b]
|
||||
//!
|
||||
//! - - - Advance over a b. - - -
|
||||
//!
|
||||
//! Remaining input: ''
|
||||
//! eof: [a $( a )* a b ·]
|
||||
//! ```
|
||||
|
||||
crate use NamedMatch::*;
|
||||
crate use ParseResult::*;
|
||||
use TokenTreeOrTokenTreeSlice::*;
|
||||
|
||||
use crate::mbe::{self, TokenTree};
|
||||
|
||||
use rustc_ast::token::{self, DocComment, Nonterminal, Token};
|
||||
use rustc_parse::parser::Parser;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::MacroRulesNormalizedIdent;
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use std::borrow::Cow;
|
||||
use std::collections::hash_map::Entry::{Occupied, Vacant};
|
||||
use std::mem;
|
||||
use std::ops::{Deref, DerefMut};
|
||||
|
||||
// To avoid costly uniqueness checks, we require that `MatchSeq` always has a nonempty body.
|
||||
|
||||
/// Either a sequence of token trees or a single one. This is used as the representation of the
|
||||
/// sequence of tokens that make up a matcher.
|
||||
#[derive(Clone)]
|
||||
enum TokenTreeOrTokenTreeSlice<'tt> {
|
||||
Tt(TokenTree),
|
||||
TtSeq(&'tt [TokenTree]),
|
||||
}
|
||||
|
||||
impl<'tt> TokenTreeOrTokenTreeSlice<'tt> {
|
||||
/// Returns the number of constituent top-level token trees of `self` (top-level in that it
|
||||
/// will not recursively descend into subtrees).
|
||||
fn len(&self) -> usize {
|
||||
match *self {
|
||||
TtSeq(ref v) => v.len(),
|
||||
Tt(ref tt) => tt.len(),
|
||||
}
|
||||
}
|
||||
|
||||
/// The `index`-th token tree of `self`.
|
||||
fn get_tt(&self, index: usize) -> TokenTree {
|
||||
match *self {
|
||||
TtSeq(ref v) => v[index].clone(),
|
||||
Tt(ref tt) => tt.get_tt(index),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An unzipping of `TokenTree`s... see the `stack` field of `MatcherPos`.
|
||||
///
|
||||
/// This is used by `inner_parse_loop` to keep track of delimited submatchers that we have
|
||||
/// descended into.
|
||||
#[derive(Clone)]
|
||||
struct MatcherTtFrame<'tt> {
|
||||
/// The "parent" matcher that we are descending into.
|
||||
elts: TokenTreeOrTokenTreeSlice<'tt>,
|
||||
/// The position of the "dot" in `elts` at the time we descended.
|
||||
idx: usize,
|
||||
}
|
||||
|
||||
type NamedMatchVec = SmallVec<[NamedMatch; 4]>;
|
||||
|
||||
/// Represents a single "position" (aka "matcher position", aka "item"), as
|
||||
/// described in the module documentation.
|
||||
///
|
||||
/// Here:
|
||||
///
|
||||
/// - `'root` represents the lifetime of the stack slot that holds the root
|
||||
/// `MatcherPos`. As described in `MatcherPosHandle`, the root `MatcherPos`
|
||||
/// structure is stored on the stack, but subsequent instances are put into
|
||||
/// the heap.
|
||||
/// - `'tt` represents the lifetime of the token trees that this matcher
|
||||
/// position refers to.
|
||||
///
|
||||
/// It is important to distinguish these two lifetimes because we have a
|
||||
/// `SmallVec<TokenTreeOrTokenTreeSlice<'tt>>` below, and the destructor of
|
||||
/// that is considered to possibly access the data from its elements (it lacks
|
||||
/// a `#[may_dangle]` attribute). As a result, the compiler needs to know that
|
||||
/// all the elements in that `SmallVec` strictly outlive the root stack slot
|
||||
/// lifetime. By separating `'tt` from `'root`, we can show that.
|
||||
#[derive(Clone)]
|
||||
struct MatcherPos<'root, 'tt> {
|
||||
/// The token or sequence of tokens that make up the matcher
|
||||
top_elts: TokenTreeOrTokenTreeSlice<'tt>,
|
||||
|
||||
/// The position of the "dot" in this matcher
|
||||
idx: usize,
|
||||
|
||||
/// For each named metavar in the matcher, we keep track of token trees matched against the
|
||||
/// metavar by the black box parser. In particular, there may be more than one match per
|
||||
/// metavar if we are in a repetition (each repetition matches each of the variables).
|
||||
/// Moreover, matchers and repetitions can be nested; the `matches` field is shared (hence the
|
||||
/// `Rc`) among all "nested" matchers. `match_lo`, `match_cur`, and `match_hi` keep track of
|
||||
/// the current position of the `self` matcher position in the shared `matches` list.
|
||||
///
|
||||
/// Also, note that while we are descending into a sequence, matchers are given their own
|
||||
/// `matches` vector. Only once we reach the end of a full repetition of the sequence do we add
|
||||
/// all bound matches from the submatcher into the shared top-level `matches` vector. If `sep`
|
||||
/// and `up` are `Some`, then `matches` is _not_ the shared top-level list. Instead, if one
|
||||
/// wants the shared `matches`, one should use `up.matches`.
|
||||
matches: Box<[Lrc<NamedMatchVec>]>,
|
||||
/// The position in `matches` corresponding to the first metavar in this matcher's sequence of
|
||||
/// token trees. In other words, the first metavar in the first token of `top_elts` corresponds
|
||||
/// to `matches[match_lo]`.
|
||||
match_lo: usize,
|
||||
/// The position in `matches` corresponding to the metavar we are currently trying to match
|
||||
/// against the source token stream. `match_lo <= match_cur <= match_hi`.
|
||||
match_cur: usize,
|
||||
/// Similar to `match_lo` except `match_hi` is the position in `matches` of the _last_ metavar
|
||||
/// in this matcher.
|
||||
match_hi: usize,
|
||||
|
||||
// The following fields are used if we are matching a repetition. If we aren't, they should be
|
||||
// `None`.
|
||||
/// The KleeneOp of this sequence if we are in a repetition.
|
||||
seq_op: Option<mbe::KleeneOp>,
|
||||
|
||||
/// The separator if we are in a repetition.
|
||||
sep: Option<Token>,
|
||||
|
||||
/// The "parent" matcher position if we are in a repetition. That is, the matcher position just
|
||||
/// before we enter the sequence.
|
||||
up: Option<MatcherPosHandle<'root, 'tt>>,
|
||||
|
||||
/// Specifically used to "unzip" token trees. By "unzip", we mean to unwrap the delimiters from
|
||||
/// a delimited token tree (e.g., something wrapped in `(` `)`) or to get the contents of a doc
|
||||
/// comment...
|
||||
///
|
||||
/// When matching against matchers with nested delimited submatchers (e.g., `pat ( pat ( .. )
|
||||
/// pat ) pat`), we need to keep track of the matchers we are descending into. This stack does
|
||||
/// that where the bottom of the stack is the outermost matcher.
|
||||
/// Also, throughout the comments, this "descent" is often referred to as "unzipping"...
|
||||
stack: SmallVec<[MatcherTtFrame<'tt>; 1]>,
|
||||
}
|
||||
|
||||
impl<'root, 'tt> MatcherPos<'root, 'tt> {
|
||||
/// Adds `m` as a named match for the `idx`-th metavar.
|
||||
fn push_match(&mut self, idx: usize, m: NamedMatch) {
|
||||
let matches = Lrc::make_mut(&mut self.matches[idx]);
|
||||
matches.push(m);
|
||||
}
|
||||
}
|
||||
|
||||
// Lots of MatcherPos instances are created at runtime. Allocating them on the
|
||||
// heap is slow. Furthermore, using SmallVec<MatcherPos> to allocate them all
|
||||
// on the stack is also slow, because MatcherPos is quite a large type and
|
||||
// instances get moved around a lot between vectors, which requires lots of
|
||||
// slow memcpy calls.
|
||||
//
|
||||
// Therefore, the initial MatcherPos is always allocated on the stack,
|
||||
// subsequent ones (of which there aren't that many) are allocated on the heap,
|
||||
// and this type is used to encapsulate both cases.
|
||||
enum MatcherPosHandle<'root, 'tt> {
|
||||
Ref(&'root mut MatcherPos<'root, 'tt>),
|
||||
Box(Box<MatcherPos<'root, 'tt>>),
|
||||
}
|
||||
|
||||
impl<'root, 'tt> Clone for MatcherPosHandle<'root, 'tt> {
|
||||
// This always produces a new Box.
|
||||
fn clone(&self) -> Self {
|
||||
MatcherPosHandle::Box(match *self {
|
||||
MatcherPosHandle::Ref(ref r) => Box::new((**r).clone()),
|
||||
MatcherPosHandle::Box(ref b) => b.clone(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl<'root, 'tt> Deref for MatcherPosHandle<'root, 'tt> {
|
||||
type Target = MatcherPos<'root, 'tt>;
|
||||
fn deref(&self) -> &Self::Target {
|
||||
match *self {
|
||||
MatcherPosHandle::Ref(ref r) => r,
|
||||
MatcherPosHandle::Box(ref b) => b,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'root, 'tt> DerefMut for MatcherPosHandle<'root, 'tt> {
|
||||
fn deref_mut(&mut self) -> &mut MatcherPos<'root, 'tt> {
|
||||
match *self {
|
||||
MatcherPosHandle::Ref(ref mut r) => r,
|
||||
MatcherPosHandle::Box(ref mut b) => b,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Represents the possible results of an attempted parse.
|
||||
crate enum ParseResult<T> {
|
||||
/// Parsed successfully.
|
||||
Success(T),
|
||||
/// Arm failed to match. If the second parameter is `token::Eof`, it indicates an unexpected
|
||||
/// end of macro invocation. Otherwise, it indicates that no rules expected the given token.
|
||||
Failure(Token, &'static str),
|
||||
/// Fatal error (malformed macro?). Abort compilation.
|
||||
Error(rustc_span::Span, String),
|
||||
ErrorReported,
|
||||
}
|
||||
|
||||
/// A `ParseResult` where the `Success` variant contains a mapping of
|
||||
/// `MacroRulesNormalizedIdent`s to `NamedMatch`es. This represents the mapping
|
||||
/// of metavars to the token trees they bind to.
|
||||
crate type NamedParseResult = ParseResult<FxHashMap<MacroRulesNormalizedIdent, NamedMatch>>;
|
||||
|
||||
/// Count how many metavars are named in the given matcher `ms`.
|
||||
pub(super) fn count_names(ms: &[TokenTree]) -> usize {
|
||||
ms.iter().fold(0, |count, elt| {
|
||||
count
|
||||
+ match *elt {
|
||||
TokenTree::Sequence(_, ref seq) => seq.num_captures,
|
||||
TokenTree::Delimited(_, ref delim) => count_names(&delim.tts),
|
||||
TokenTree::MetaVar(..) => 0,
|
||||
TokenTree::MetaVarDecl(..) => 1,
|
||||
TokenTree::Token(..) => 0,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
/// `len` `Vec`s (initially shared and empty) that will store matches of metavars.
|
||||
fn create_matches(len: usize) -> Box<[Lrc<NamedMatchVec>]> {
|
||||
if len == 0 {
|
||||
vec![]
|
||||
} else {
|
||||
let empty_matches = Lrc::new(SmallVec::new());
|
||||
vec![empty_matches; len]
|
||||
}
|
||||
.into_boxed_slice()
|
||||
}
|
||||
|
||||
/// Generates the top-level matcher position in which the "dot" is before the first token of the
|
||||
/// matcher `ms`.
|
||||
fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree]) -> MatcherPos<'root, 'tt> {
|
||||
let match_idx_hi = count_names(ms);
|
||||
let matches = create_matches(match_idx_hi);
|
||||
MatcherPos {
|
||||
// Start with the top level matcher given to us
|
||||
top_elts: TtSeq(ms), // "elts" is an abbr. for "elements"
|
||||
// The "dot" is before the first token of the matcher
|
||||
idx: 0,
|
||||
|
||||
// Initialize `matches` to a bunch of empty `Vec`s -- one for each metavar in `top_elts`.
|
||||
// `match_lo` for `top_elts` is 0 and `match_hi` is `matches.len()`. `match_cur` is 0 since
|
||||
// we haven't actually matched anything yet.
|
||||
matches,
|
||||
match_lo: 0,
|
||||
match_cur: 0,
|
||||
match_hi: match_idx_hi,
|
||||
|
||||
// Haven't descended into any delimiters, so empty stack
|
||||
stack: smallvec![],
|
||||
|
||||
// Haven't descended into any sequences, so both of these are `None`.
|
||||
seq_op: None,
|
||||
sep: None,
|
||||
up: None,
|
||||
}
|
||||
}
|
||||
|
||||
/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
|
||||
/// so it is associated with a single ident in a parse, and all
|
||||
/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type
|
||||
/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
|
||||
/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
|
||||
///
|
||||
/// The in-memory structure of a particular `NamedMatch` represents the match
|
||||
/// that occurred when a particular subset of a matcher was applied to a
|
||||
/// particular token tree.
|
||||
///
|
||||
/// The width of each `MatchedSeq` in the `NamedMatch`, and the identity of
|
||||
/// the `MatchedNonterminal`s, will depend on the token tree it was applied
|
||||
/// to: each `MatchedSeq` corresponds to a single `TTSeq` in the originating
|
||||
/// token tree. The depth of the `NamedMatch` structure will therefore depend
|
||||
/// only on the nesting depth of `ast::TTSeq`s in the originating
|
||||
/// token tree it was derived from.
|
||||
#[derive(Debug, Clone)]
|
||||
crate enum NamedMatch {
|
||||
MatchedSeq(Lrc<NamedMatchVec>),
|
||||
MatchedNonterminal(Lrc<Nonterminal>),
|
||||
}
|
||||
|
||||
/// Takes a sequence of token trees `ms` representing a matcher which successfully matched input
|
||||
/// and an iterator of items that matched input and produces a `NamedParseResult`.
|
||||
fn nameize<I: Iterator<Item = NamedMatch>>(
|
||||
sess: &ParseSess,
|
||||
ms: &[TokenTree],
|
||||
mut res: I,
|
||||
) -> NamedParseResult {
|
||||
// Recursively descend into each type of matcher (e.g., sequences, delimited, metavars) and make
|
||||
// sure that each metavar has _exactly one_ binding. If a metavar does not have exactly one
|
||||
// binding, then there is an error. If it does, then we insert the binding into the
|
||||
// `NamedParseResult`.
|
||||
fn n_rec<I: Iterator<Item = NamedMatch>>(
|
||||
sess: &ParseSess,
|
||||
m: &TokenTree,
|
||||
res: &mut I,
|
||||
ret_val: &mut FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
) -> Result<(), (rustc_span::Span, String)> {
|
||||
match *m {
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
for next_m in &seq.tts {
|
||||
n_rec(sess, next_m, res.by_ref(), ret_val)?
|
||||
}
|
||||
}
|
||||
TokenTree::Delimited(_, ref delim) => {
|
||||
for next_m in &delim.tts {
|
||||
n_rec(sess, next_m, res.by_ref(), ret_val)?;
|
||||
}
|
||||
}
|
||||
TokenTree::MetaVarDecl(sp, bind_name, _) => match ret_val
|
||||
.entry(MacroRulesNormalizedIdent::new(bind_name))
|
||||
{
|
||||
Vacant(spot) => {
|
||||
spot.insert(res.next().unwrap());
|
||||
}
|
||||
Occupied(..) => return Err((sp, format!("duplicated bind name: {}", bind_name))),
|
||||
},
|
||||
TokenTree::MetaVar(..) | TokenTree::Token(..) => (),
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
let mut ret_val = FxHashMap::default();
|
||||
for m in ms {
|
||||
match n_rec(sess, m, res.by_ref(), &mut ret_val) {
|
||||
Ok(_) => {}
|
||||
Err((sp, msg)) => return Error(sp, msg),
|
||||
}
|
||||
}
|
||||
|
||||
Success(ret_val)
|
||||
}
|
||||
|
||||
/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
|
||||
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
|
||||
if let (Some((ident1, is_raw1)), Some((ident2, is_raw2))) = (t1.ident(), t2.ident()) {
|
||||
ident1.name == ident2.name && is_raw1 == is_raw2
|
||||
} else if let (Some(ident1), Some(ident2)) = (t1.lifetime(), t2.lifetime()) {
|
||||
ident1.name == ident2.name
|
||||
} else {
|
||||
t1.kind == t2.kind
|
||||
}
|
||||
}
|
||||
|
||||
/// Process the matcher positions of `cur_items` until it is empty. In the process, this will
|
||||
/// produce more items in `next_items`, `eof_items`, and `bb_items`.
|
||||
///
|
||||
/// For more info about the how this happens, see the module-level doc comments and the inline
|
||||
/// comments of this function.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `sess`: the parsing session into which errors are emitted.
|
||||
/// - `cur_items`: the set of current items to be processed. This should be empty by the end of a
|
||||
/// successful execution of this function.
|
||||
/// - `next_items`: the set of newly generated items. These are used to replenish `cur_items` in
|
||||
/// the function `parse`.
|
||||
/// - `eof_items`: the set of items that would be valid if this was the EOF.
|
||||
/// - `bb_items`: the set of items that are waiting for the black-box parser.
|
||||
/// - `token`: the current token of the parser.
|
||||
/// - `span`: the `Span` in the source code corresponding to the token trees we are trying to match
|
||||
/// against the matcher positions in `cur_items`.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A `ParseResult`. Note that matches are kept track of through the items generated.
|
||||
fn inner_parse_loop<'root, 'tt>(
|
||||
cur_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
|
||||
next_items: &mut Vec<MatcherPosHandle<'root, 'tt>>,
|
||||
eof_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
|
||||
bb_items: &mut SmallVec<[MatcherPosHandle<'root, 'tt>; 1]>,
|
||||
token: &Token,
|
||||
) -> ParseResult<()> {
|
||||
// Pop items from `cur_items` until it is empty.
|
||||
while let Some(mut item) = cur_items.pop() {
|
||||
// When unzipped trees end, remove them. This corresponds to backtracking out of a
|
||||
// delimited submatcher into which we already descended. In backtracking out again, we need
|
||||
// to advance the "dot" past the delimiters in the outer matcher.
|
||||
while item.idx >= item.top_elts.len() {
|
||||
match item.stack.pop() {
|
||||
Some(MatcherTtFrame { elts, idx }) => {
|
||||
item.top_elts = elts;
|
||||
item.idx = idx + 1;
|
||||
}
|
||||
None => break,
|
||||
}
|
||||
}
|
||||
|
||||
// Get the current position of the "dot" (`idx`) in `item` and the number of token trees in
|
||||
// the matcher (`len`).
|
||||
let idx = item.idx;
|
||||
let len = item.top_elts.len();
|
||||
|
||||
// If `idx >= len`, then we are at or past the end of the matcher of `item`.
|
||||
if idx >= len {
|
||||
// We are repeating iff there is a parent. If the matcher is inside of a repetition,
|
||||
// then we could be at the end of a sequence or at the beginning of the next
|
||||
// repetition.
|
||||
if item.up.is_some() {
|
||||
// At this point, regardless of whether there is a separator, we should add all
|
||||
// matches from the complete repetition of the sequence to the shared, top-level
|
||||
// `matches` list (actually, `up.matches`, which could itself not be the top-level,
|
||||
// but anyway...). Moreover, we add another item to `cur_items` in which the "dot"
|
||||
// is at the end of the `up` matcher. This ensures that the "dot" in the `up`
|
||||
// matcher is also advanced sufficiently.
|
||||
//
|
||||
// NOTE: removing the condition `idx == len` allows trailing separators.
|
||||
if idx == len {
|
||||
// Get the `up` matcher
|
||||
let mut new_pos = item.up.clone().unwrap();
|
||||
|
||||
// Add matches from this repetition to the `matches` of `up`
|
||||
for idx in item.match_lo..item.match_hi {
|
||||
let sub = item.matches[idx].clone();
|
||||
new_pos.push_match(idx, MatchedSeq(sub));
|
||||
}
|
||||
|
||||
// Move the "dot" past the repetition in `up`
|
||||
new_pos.match_cur = item.match_hi;
|
||||
new_pos.idx += 1;
|
||||
cur_items.push(new_pos);
|
||||
}
|
||||
|
||||
// Check if we need a separator.
|
||||
if idx == len && item.sep.is_some() {
|
||||
// We have a separator, and it is the current token. We can advance past the
|
||||
// separator token.
|
||||
if item.sep.as_ref().map(|sep| token_name_eq(token, sep)).unwrap_or(false) {
|
||||
item.idx += 1;
|
||||
next_items.push(item);
|
||||
}
|
||||
}
|
||||
// We don't need a separator. Move the "dot" back to the beginning of the matcher
|
||||
// and try to match again UNLESS we are only allowed to have _one_ repetition.
|
||||
else if item.seq_op != Some(mbe::KleeneOp::ZeroOrOne) {
|
||||
item.match_cur = item.match_lo;
|
||||
item.idx = 0;
|
||||
cur_items.push(item);
|
||||
}
|
||||
}
|
||||
// If we are not in a repetition, then being at the end of a matcher means that we have
|
||||
// reached the potential end of the input.
|
||||
else {
|
||||
eof_items.push(item);
|
||||
}
|
||||
}
|
||||
// We are in the middle of a matcher.
|
||||
else {
|
||||
// Look at what token in the matcher we are trying to match the current token (`token`)
|
||||
// against. Depending on that, we may generate new items.
|
||||
match item.top_elts.get_tt(idx) {
|
||||
// Need to descend into a sequence
|
||||
TokenTree::Sequence(sp, seq) => {
|
||||
// Examine the case where there are 0 matches of this sequence. We are
|
||||
// implicitly disallowing OneOrMore from having 0 matches here. Thus, that will
|
||||
// result in a "no rules expected token" error by virtue of this matcher not
|
||||
// working.
|
||||
if seq.kleene.op == mbe::KleeneOp::ZeroOrMore
|
||||
|| seq.kleene.op == mbe::KleeneOp::ZeroOrOne
|
||||
{
|
||||
let mut new_item = item.clone();
|
||||
new_item.match_cur += seq.num_captures;
|
||||
new_item.idx += 1;
|
||||
for idx in item.match_cur..item.match_cur + seq.num_captures {
|
||||
new_item.push_match(idx, MatchedSeq(Lrc::new(smallvec![])));
|
||||
}
|
||||
cur_items.push(new_item);
|
||||
}
|
||||
|
||||
let matches = create_matches(item.matches.len());
|
||||
cur_items.push(MatcherPosHandle::Box(Box::new(MatcherPos {
|
||||
stack: smallvec![],
|
||||
sep: seq.separator.clone(),
|
||||
seq_op: Some(seq.kleene.op),
|
||||
idx: 0,
|
||||
matches,
|
||||
match_lo: item.match_cur,
|
||||
match_cur: item.match_cur,
|
||||
match_hi: item.match_cur + seq.num_captures,
|
||||
up: Some(item),
|
||||
top_elts: Tt(TokenTree::Sequence(sp, seq)),
|
||||
})));
|
||||
}
|
||||
|
||||
// We need to match a metavar with a valid ident... call out to the black-box
|
||||
// parser by adding an item to `bb_items`.
|
||||
TokenTree::MetaVarDecl(_, _, kind) => {
|
||||
// Built-in nonterminals never start with these tokens,
|
||||
// so we can eliminate them from consideration.
|
||||
if Parser::nonterminal_may_begin_with(kind, token) {
|
||||
bb_items.push(item);
|
||||
}
|
||||
}
|
||||
|
||||
// We need to descend into a delimited submatcher or a doc comment. To do this, we
|
||||
// push the current matcher onto a stack and push a new item containing the
|
||||
// submatcher onto `cur_items`.
|
||||
//
|
||||
// At the beginning of the loop, if we reach the end of the delimited submatcher,
|
||||
// we pop the stack to backtrack out of the descent.
|
||||
seq
|
||||
@
|
||||
(TokenTree::Delimited(..)
|
||||
| TokenTree::Token(Token { kind: DocComment(..), .. })) => {
|
||||
let lower_elts = mem::replace(&mut item.top_elts, Tt(seq));
|
||||
let idx = item.idx;
|
||||
item.stack.push(MatcherTtFrame { elts: lower_elts, idx });
|
||||
item.idx = 0;
|
||||
cur_items.push(item);
|
||||
}
|
||||
|
||||
// We just matched a normal token. We can just advance the parser.
|
||||
TokenTree::Token(t) if token_name_eq(&t, token) => {
|
||||
item.idx += 1;
|
||||
next_items.push(item);
|
||||
}
|
||||
|
||||
// There was another token that was not `token`... This means we can't add any
|
||||
// rules. NOTE that this is not necessarily an error unless _all_ items in
|
||||
// `cur_items` end up doing this. There may still be some other matchers that do
|
||||
// end up working out.
|
||||
TokenTree::Token(..) | TokenTree::MetaVar(..) => {}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Yay a successful parse (so far)!
|
||||
Success(())
|
||||
}
|
||||
|
||||
/// Use the given sequence of token trees (`ms`) as a matcher. Match the token
|
||||
/// stream from the given `parser` against it and return the match.
|
||||
pub(super) fn parse_tt(parser: &mut Cow<'_, Parser<'_>>, ms: &[TokenTree]) -> NamedParseResult {
|
||||
// A queue of possible matcher positions. We initialize it with the matcher position in which
|
||||
// the "dot" is before the first token of the first token tree in `ms`. `inner_parse_loop` then
|
||||
// processes all of these possible matcher positions and produces possible next positions into
|
||||
// `next_items`. After some post-processing, the contents of `next_items` replenish `cur_items`
|
||||
// and we start over again.
|
||||
//
|
||||
// This MatcherPos instance is allocated on the stack. All others -- and
|
||||
// there are frequently *no* others! -- are allocated on the heap.
|
||||
let mut initial = initial_matcher_pos(ms);
|
||||
let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
|
||||
let mut next_items = Vec::new();
|
||||
|
||||
loop {
|
||||
// Matcher positions black-box parsed by parser.rs (`parser`)
|
||||
let mut bb_items = SmallVec::new();
|
||||
|
||||
// Matcher positions that would be valid if the macro invocation was over now
|
||||
let mut eof_items = SmallVec::new();
|
||||
assert!(next_items.is_empty());
|
||||
|
||||
// Process `cur_items` until either we have finished the input or we need to get some
|
||||
// parsing from the black-box parser done. The result is that `next_items` will contain a
|
||||
// bunch of possible next matcher positions in `next_items`.
|
||||
match inner_parse_loop(
|
||||
&mut cur_items,
|
||||
&mut next_items,
|
||||
&mut eof_items,
|
||||
&mut bb_items,
|
||||
&parser.token,
|
||||
) {
|
||||
Success(_) => {}
|
||||
Failure(token, msg) => return Failure(token, msg),
|
||||
Error(sp, msg) => return Error(sp, msg),
|
||||
ErrorReported => return ErrorReported,
|
||||
}
|
||||
|
||||
// inner parse loop handled all cur_items, so it's empty
|
||||
assert!(cur_items.is_empty());
|
||||
|
||||
// We need to do some post processing after the `inner_parser_loop`.
|
||||
//
|
||||
// Error messages here could be improved with links to original rules.
|
||||
|
||||
// If we reached the EOF, check that there is EXACTLY ONE possible matcher. Otherwise,
|
||||
// either the parse is ambiguous (which should never happen) or there is a syntax error.
|
||||
if parser.token == token::Eof {
|
||||
if eof_items.len() == 1 {
|
||||
let matches =
|
||||
eof_items[0].matches.iter_mut().map(|dv| Lrc::make_mut(dv).pop().unwrap());
|
||||
return nameize(parser.sess, ms, matches);
|
||||
} else if eof_items.len() > 1 {
|
||||
return Error(
|
||||
parser.token.span,
|
||||
"ambiguity: multiple successful parses".to_string(),
|
||||
);
|
||||
} else {
|
||||
return Failure(
|
||||
Token::new(
|
||||
token::Eof,
|
||||
if parser.token.span.is_dummy() {
|
||||
parser.token.span
|
||||
} else {
|
||||
parser.token.span.shrink_to_hi()
|
||||
},
|
||||
),
|
||||
"missing tokens in macro arguments",
|
||||
);
|
||||
}
|
||||
}
|
||||
// Performance hack: eof_items may share matchers via Rc with other things that we want
|
||||
// to modify. Dropping eof_items now may drop these refcounts to 1, preventing an
|
||||
// unnecessary implicit clone later in Rc::make_mut.
|
||||
drop(eof_items);
|
||||
|
||||
// If there are no possible next positions AND we aren't waiting for the black-box parser,
|
||||
// then there is a syntax error.
|
||||
if bb_items.is_empty() && next_items.is_empty() {
|
||||
return Failure(parser.token.clone(), "no rules expected this token in macro call");
|
||||
}
|
||||
// Another possibility is that we need to call out to parse some rust nonterminal
|
||||
// (black-box) parser. However, if there is not EXACTLY ONE of these, something is wrong.
|
||||
else if (!bb_items.is_empty() && !next_items.is_empty()) || bb_items.len() > 1 {
|
||||
let nts = bb_items
|
||||
.iter()
|
||||
.map(|item| match item.top_elts.get_tt(item.idx) {
|
||||
TokenTree::MetaVarDecl(_, bind, kind) => format!("{} ('{}')", kind, bind),
|
||||
_ => panic!(),
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
.join(" or ");
|
||||
|
||||
return Error(
|
||||
parser.token.span,
|
||||
format!(
|
||||
"local ambiguity: multiple parsing options: {}",
|
||||
match next_items.len() {
|
||||
0 => format!("built-in NTs {}.", nts),
|
||||
1 => format!("built-in NTs {} or 1 other option.", nts),
|
||||
n => format!("built-in NTs {} or {} other options.", nts, n),
|
||||
}
|
||||
),
|
||||
);
|
||||
}
|
||||
// Dump all possible `next_items` into `cur_items` for the next iteration.
|
||||
else if !next_items.is_empty() {
|
||||
// Now process the next token
|
||||
cur_items.extend(next_items.drain(..));
|
||||
parser.to_mut().bump();
|
||||
}
|
||||
// Finally, we have the case where we need to call the black-box parser to get some
|
||||
// nonterminal.
|
||||
else {
|
||||
assert_eq!(bb_items.len(), 1);
|
||||
|
||||
let mut item = bb_items.pop().unwrap();
|
||||
if let TokenTree::MetaVarDecl(span, _, kind) = item.top_elts.get_tt(item.idx) {
|
||||
let match_cur = item.match_cur;
|
||||
let nt = match parser.to_mut().parse_nonterminal(kind) {
|
||||
Err(mut err) => {
|
||||
err.span_label(
|
||||
span,
|
||||
format!("while parsing argument for this `{}` macro fragment", kind),
|
||||
)
|
||||
.emit();
|
||||
return ErrorReported;
|
||||
}
|
||||
Ok(nt) => nt,
|
||||
};
|
||||
item.push_match(match_cur, MatchedNonterminal(Lrc::new(nt)));
|
||||
item.idx += 1;
|
||||
item.match_cur += 1;
|
||||
} else {
|
||||
unreachable!()
|
||||
}
|
||||
cur_items.push(item);
|
||||
}
|
||||
|
||||
assert!(!cur_items.is_empty());
|
||||
}
|
||||
}
|
1193
compiler/rustc_expand/src/mbe/macro_rules.rs
Normal file
1193
compiler/rustc_expand/src/mbe/macro_rules.rs
Normal file
File diff suppressed because it is too large
Load diff
282
compiler/rustc_expand/src/mbe/quoted.rs
Normal file
282
compiler/rustc_expand/src/mbe/quoted.rs
Normal file
|
@ -0,0 +1,282 @@
|
|||
use crate::mbe::macro_parser;
|
||||
use crate::mbe::{Delimited, KleeneOp, KleeneToken, SequenceRepetition, TokenTree};
|
||||
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::tokenstream;
|
||||
use rustc_ast::NodeId;
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::{kw, Ident};
|
||||
|
||||
use rustc_span::Span;
|
||||
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
|
||||
const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
|
||||
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, \
|
||||
`literal`, `path`, `meta`, `tt`, `item` and `vis`";
|
||||
|
||||
/// Takes a `tokenstream::TokenStream` and returns a `Vec<self::TokenTree>`. Specifically, this
|
||||
/// takes a generic `TokenStream`, such as is used in the rest of the compiler, and returns a
|
||||
/// collection of `TokenTree` for use in parsing a macro.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `input`: a token stream to read from, the contents of which we are parsing.
|
||||
/// - `expect_matchers`: `parse` can be used to parse either the "patterns" or the "body" of a
|
||||
/// macro. Both take roughly the same form _except_ that in a pattern, metavars are declared with
|
||||
/// their "matcher" type. For example `$var:expr` or `$id:ident`. In this example, `expr` and
|
||||
/// `ident` are "matchers". They are not present in the body of a macro rule -- just in the
|
||||
/// pattern, so we pass a parameter to indicate whether to expect them or not.
|
||||
/// - `sess`: the parsing session. Any errors will be emitted to this session.
|
||||
/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
|
||||
/// unstable features or not.
|
||||
/// - `edition`: which edition are we in.
|
||||
/// - `macro_node_id`: the NodeId of the macro we are parsing.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// A collection of `self::TokenTree`. There may also be some errors emitted to `sess`.
|
||||
pub(super) fn parse(
|
||||
input: tokenstream::TokenStream,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
) -> Vec<TokenTree> {
|
||||
// Will contain the final collection of `self::TokenTree`
|
||||
let mut result = Vec::new();
|
||||
|
||||
// For each token tree in `input`, parse the token into a `self::TokenTree`, consuming
|
||||
// additional trees if need be.
|
||||
let mut trees = input.trees();
|
||||
while let Some(tree) = trees.next() {
|
||||
// Given the parsed tree, if there is a metavar and we are expecting matchers, actually
|
||||
// parse out the matcher (i.e., in `$id:ident` this would parse the `:` and `ident`).
|
||||
let tree = parse_tree(tree, &mut trees, expect_matchers, sess, node_id);
|
||||
match tree {
|
||||
TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
|
||||
let span = match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(Token { kind: token::Colon, span })) => {
|
||||
match trees.next() {
|
||||
Some(tokenstream::TokenTree::Token(token)) => match token.ident() {
|
||||
Some((frag, _)) => {
|
||||
let span = token.span.with_lo(start_sp.lo());
|
||||
let kind = token::NonterminalKind::from_symbol(frag.name)
|
||||
.unwrap_or_else(|| {
|
||||
let msg = format!(
|
||||
"invalid fragment specifier `{}`",
|
||||
frag.name
|
||||
);
|
||||
sess.span_diagnostic
|
||||
.struct_span_err(span, &msg)
|
||||
.help(VALID_FRAGMENT_NAMES_MSG)
|
||||
.emit();
|
||||
token::NonterminalKind::Ident
|
||||
});
|
||||
result.push(TokenTree::MetaVarDecl(span, ident, kind));
|
||||
continue;
|
||||
}
|
||||
_ => token.span,
|
||||
},
|
||||
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span),
|
||||
}
|
||||
}
|
||||
tree => tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(start_sp),
|
||||
};
|
||||
sess.span_diagnostic.struct_span_err(span, "missing fragment specifier").emit();
|
||||
continue;
|
||||
}
|
||||
|
||||
// Not a metavar or no matchers allowed, so just return the tree
|
||||
_ => result.push(tree),
|
||||
}
|
||||
}
|
||||
result
|
||||
}
|
||||
|
||||
/// Takes a `tokenstream::TokenTree` and returns a `self::TokenTree`. Specifically, this takes a
|
||||
/// generic `TokenTree`, such as is used in the rest of the compiler, and returns a `TokenTree`
|
||||
/// for use in parsing a macro.
|
||||
///
|
||||
/// Converting the given tree may involve reading more tokens.
|
||||
///
|
||||
/// # Parameters
|
||||
///
|
||||
/// - `tree`: the tree we wish to convert.
|
||||
/// - `outer_trees`: an iterator over trees. We may need to read more tokens from it in order to finish
|
||||
/// converting `tree`
|
||||
/// - `expect_matchers`: same as for `parse` (see above).
|
||||
/// - `sess`: the parsing session. Any errors will be emitted to this session.
|
||||
/// - `features`, `attrs`: language feature flags and attributes so that we know whether to use
|
||||
/// unstable features or not.
|
||||
fn parse_tree(
|
||||
tree: tokenstream::TokenTree,
|
||||
outer_trees: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
expect_matchers: bool,
|
||||
sess: &ParseSess,
|
||||
node_id: NodeId,
|
||||
) -> TokenTree {
|
||||
// Depending on what `tree` is, we could be parsing different parts of a macro
|
||||
match tree {
|
||||
// `tree` is a `$` token. Look at the next token in `trees`
|
||||
tokenstream::TokenTree::Token(Token { kind: token::Dollar, span }) => {
|
||||
// FIXME: Handle `None`-delimited groups in a more systematic way
|
||||
// during parsing.
|
||||
let mut next = outer_trees.next();
|
||||
let mut trees: Box<dyn Iterator<Item = tokenstream::TokenTree>>;
|
||||
if let Some(tokenstream::TokenTree::Delimited(_, token::NoDelim, tts)) = next {
|
||||
trees = Box::new(tts.into_trees());
|
||||
next = trees.next();
|
||||
} else {
|
||||
trees = Box::new(outer_trees);
|
||||
}
|
||||
|
||||
match next {
|
||||
// `tree` is followed by a delimited set of token trees. This indicates the beginning
|
||||
// of a repetition sequence in the macro (e.g. `$(pat)*`).
|
||||
Some(tokenstream::TokenTree::Delimited(span, delim, tts)) => {
|
||||
// Must have `(` not `{` or `[`
|
||||
if delim != token::Paren {
|
||||
let tok = pprust::token_kind_to_string(&token::OpenDelim(delim));
|
||||
let msg = format!("expected `(`, found `{}`", tok);
|
||||
sess.span_diagnostic.span_err(span.entire(), &msg);
|
||||
}
|
||||
// Parse the contents of the sequence itself
|
||||
let sequence = parse(tts, expect_matchers, sess, node_id);
|
||||
// Get the Kleene operator and optional separator
|
||||
let (separator, kleene) =
|
||||
parse_sep_and_kleene_op(&mut trees, span.entire(), sess);
|
||||
// Count the number of captured "names" (i.e., named metavars)
|
||||
let name_captures = macro_parser::count_names(&sequence);
|
||||
TokenTree::Sequence(
|
||||
span,
|
||||
Lrc::new(SequenceRepetition {
|
||||
tts: sequence,
|
||||
separator,
|
||||
kleene,
|
||||
num_captures: name_captures,
|
||||
}),
|
||||
)
|
||||
}
|
||||
|
||||
// `tree` is followed by an `ident`. This could be `$meta_var` or the `$crate` special
|
||||
// metavariable that names the crate of the invocation.
|
||||
Some(tokenstream::TokenTree::Token(token)) if token.is_ident() => {
|
||||
let (ident, is_raw) = token.ident().unwrap();
|
||||
let span = ident.span.with_lo(span.lo());
|
||||
if ident.name == kw::Crate && !is_raw {
|
||||
TokenTree::token(token::Ident(kw::DollarCrate, is_raw), span)
|
||||
} else {
|
||||
TokenTree::MetaVar(span, ident)
|
||||
}
|
||||
}
|
||||
|
||||
// `tree` is followed by a random token. This is an error.
|
||||
Some(tokenstream::TokenTree::Token(token)) => {
|
||||
let msg = format!(
|
||||
"expected identifier, found `{}`",
|
||||
pprust::token_to_string(&token),
|
||||
);
|
||||
sess.span_diagnostic.span_err(token.span, &msg);
|
||||
TokenTree::MetaVar(token.span, Ident::invalid())
|
||||
}
|
||||
|
||||
// There are no more tokens. Just return the `$` we already have.
|
||||
None => TokenTree::token(token::Dollar, span),
|
||||
}
|
||||
}
|
||||
|
||||
// `tree` is an arbitrary token. Keep it.
|
||||
tokenstream::TokenTree::Token(token) => TokenTree::Token(token),
|
||||
|
||||
// `tree` is the beginning of a delimited set of tokens (e.g., `(` or `{`). We need to
|
||||
// descend into the delimited set and further parse it.
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
|
||||
span,
|
||||
Lrc::new(Delimited { delim, tts: parse(tts, expect_matchers, sess, node_id) }),
|
||||
),
|
||||
}
|
||||
}
|
||||
|
||||
/// Takes a token and returns `Some(KleeneOp)` if the token is `+` `*` or `?`. Otherwise, return
|
||||
/// `None`.
|
||||
fn kleene_op(token: &Token) -> Option<KleeneOp> {
|
||||
match token.kind {
|
||||
token::BinOp(token::Star) => Some(KleeneOp::ZeroOrMore),
|
||||
token::BinOp(token::Plus) => Some(KleeneOp::OneOrMore),
|
||||
token::Question => Some(KleeneOp::ZeroOrOne),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
|
||||
/// Parse the next token tree of the input looking for a KleeneOp. Returns
|
||||
///
|
||||
/// - Ok(Ok((op, span))) if the next token tree is a KleeneOp
|
||||
/// - Ok(Err(tok, span)) if the next token tree is a token but not a KleeneOp
|
||||
/// - Err(span) if the next token tree is not a token
|
||||
fn parse_kleene_op(
|
||||
input: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
span: Span,
|
||||
) -> Result<Result<(KleeneOp, Span), Token>, Span> {
|
||||
match input.next() {
|
||||
Some(tokenstream::TokenTree::Token(token)) => match kleene_op(&token) {
|
||||
Some(op) => Ok(Ok((op, token.span))),
|
||||
None => Ok(Err(token)),
|
||||
},
|
||||
tree => Err(tree.as_ref().map(tokenstream::TokenTree::span).unwrap_or(span)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Attempt to parse a single Kleene star, possibly with a separator.
|
||||
///
|
||||
/// For example, in a pattern such as `$(a),*`, `a` is the pattern to be repeated, `,` is the
|
||||
/// separator, and `*` is the Kleene operator. This function is specifically concerned with parsing
|
||||
/// the last two tokens of such a pattern: namely, the optional separator and the Kleene operator
|
||||
/// itself. Note that here we are parsing the _macro_ itself, rather than trying to match some
|
||||
/// stream of tokens in an invocation of a macro.
|
||||
///
|
||||
/// This function will take some input iterator `input` corresponding to `span` and a parsing
|
||||
/// session `sess`. If the next one (or possibly two) tokens in `input` correspond to a Kleene
|
||||
/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
|
||||
/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
|
||||
fn parse_sep_and_kleene_op(
|
||||
input: &mut impl Iterator<Item = tokenstream::TokenTree>,
|
||||
span: Span,
|
||||
sess: &ParseSess,
|
||||
) -> (Option<Token>, KleeneToken) {
|
||||
// We basically look at two token trees here, denoted as #1 and #2 below
|
||||
let span = match parse_kleene_op(input, span) {
|
||||
// #1 is a `?`, `+`, or `*` KleeneOp
|
||||
Ok(Ok((op, span))) => return (None, KleeneToken::new(op, span)),
|
||||
|
||||
// #1 is a separator followed by #2, a KleeneOp
|
||||
Ok(Err(token)) => match parse_kleene_op(input, token.span) {
|
||||
// #2 is the `?` Kleene op, which does not take a separator (error)
|
||||
Ok(Ok((KleeneOp::ZeroOrOne, span))) => {
|
||||
// Error!
|
||||
sess.span_diagnostic.span_err(
|
||||
token.span,
|
||||
"the `?` macro repetition operator does not take a separator",
|
||||
);
|
||||
|
||||
// Return a dummy
|
||||
return (None, KleeneToken::new(KleeneOp::ZeroOrMore, span));
|
||||
}
|
||||
|
||||
// #2 is a KleeneOp :D
|
||||
Ok(Ok((op, span))) => return (Some(token), KleeneToken::new(op, span)),
|
||||
|
||||
// #2 is a random token or not a token at all :(
|
||||
Ok(Err(Token { span, .. })) | Err(span) => span,
|
||||
},
|
||||
|
||||
// #1 is not a token
|
||||
Err(span) => span,
|
||||
};
|
||||
|
||||
// If we ever get to this point, we have experienced an "unexpected token" error
|
||||
sess.span_diagnostic.span_err(span, "expected one of: `*`, `+`, or `?`");
|
||||
|
||||
// Return a dummy
|
||||
(None, KleeneToken::new(KleeneOp::ZeroOrMore, span))
|
||||
}
|
395
compiler/rustc_expand/src/mbe/transcribe.rs
Normal file
395
compiler/rustc_expand/src/mbe/transcribe.rs
Normal file
|
@ -0,0 +1,395 @@
|
|||
use crate::base::ExtCtxt;
|
||||
use crate::mbe;
|
||||
use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
|
||||
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast::token::{self, NtTT, Token};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
|
||||
use rustc_ast::MacCall;
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{pluralize, PResult};
|
||||
use rustc_span::hygiene::{ExpnId, Transparency};
|
||||
use rustc_span::symbol::MacroRulesNormalizedIdent;
|
||||
use rustc_span::Span;
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
use std::mem;
|
||||
|
||||
// A Marker adds the given mark to the syntax context.
|
||||
struct Marker(ExpnId, Transparency);
|
||||
|
||||
impl MutVisitor for Marker {
|
||||
fn visit_span(&mut self, span: &mut Span) {
|
||||
*span = span.apply_mark(self.0, self.1)
|
||||
}
|
||||
|
||||
fn visit_mac(&mut self, mac: &mut MacCall) {
|
||||
mut_visit::noop_visit_mac(mac, self)
|
||||
}
|
||||
}
|
||||
|
||||
/// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
|
||||
enum Frame {
|
||||
Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
|
||||
Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
|
||||
}
|
||||
|
||||
impl Frame {
|
||||
/// Construct a new frame around the delimited set of tokens.
|
||||
fn new(tts: Vec<mbe::TokenTree>) -> Frame {
|
||||
let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
|
||||
Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
|
||||
}
|
||||
}
|
||||
|
||||
impl Iterator for Frame {
|
||||
type Item = mbe::TokenTree;
|
||||
|
||||
fn next(&mut self) -> Option<mbe::TokenTree> {
|
||||
match *self {
|
||||
Frame::Delimited { ref forest, ref mut idx, .. } => {
|
||||
*idx += 1;
|
||||
forest.tts.get(*idx - 1).cloned()
|
||||
}
|
||||
Frame::Sequence { ref forest, ref mut idx, .. } => {
|
||||
*idx += 1;
|
||||
forest.tts.get(*idx - 1).cloned()
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// This can do Macro-By-Example transcription.
|
||||
/// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
|
||||
/// invocation. We are assuming we already know there is a match.
|
||||
/// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
|
||||
///
|
||||
/// For example,
|
||||
///
|
||||
/// ```rust
|
||||
/// macro_rules! foo {
|
||||
/// ($id:ident) => { println!("{}", stringify!($id)); }
|
||||
/// }
|
||||
///
|
||||
/// foo!(bar);
|
||||
/// ```
|
||||
///
|
||||
/// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
|
||||
///
|
||||
/// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
|
||||
///
|
||||
/// Along the way, we do some additional error checking.
|
||||
pub(super) fn transcribe<'a>(
|
||||
cx: &ExtCtxt<'a>,
|
||||
interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
src: Vec<mbe::TokenTree>,
|
||||
transparency: Transparency,
|
||||
) -> PResult<'a, TokenStream> {
|
||||
// Nothing for us to transcribe...
|
||||
if src.is_empty() {
|
||||
return Ok(TokenStream::default());
|
||||
}
|
||||
|
||||
// We descend into the RHS (`src`), expanding things as we go. This stack contains the things
|
||||
// we have yet to expand/are still expanding. We start the stack off with the whole RHS.
|
||||
let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
|
||||
|
||||
// As we descend in the RHS, we will need to be able to match nested sequences of matchers.
|
||||
// `repeats` keeps track of where we are in matching at each level, with the last element being
|
||||
// the most deeply nested sequence. This is used as a stack.
|
||||
let mut repeats = Vec::new();
|
||||
|
||||
// `result` contains resulting token stream from the TokenTree we just finished processing. At
|
||||
// the end, this will contain the full result of transcription, but at arbitrary points during
|
||||
// `transcribe`, `result` will contain subsets of the final result.
|
||||
//
|
||||
// Specifically, as we descend into each TokenTree, we will push the existing results onto the
|
||||
// `result_stack` and clear `results`. We will then produce the results of transcribing the
|
||||
// TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
|
||||
// `result_stack` and append `results` too it to produce the new `results` up to that point.
|
||||
//
|
||||
// Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
|
||||
// again, and we are done transcribing.
|
||||
let mut result: Vec<TreeAndJoint> = Vec::new();
|
||||
let mut result_stack = Vec::new();
|
||||
let mut marker = Marker(cx.current_expansion.id, transparency);
|
||||
|
||||
loop {
|
||||
// Look at the last frame on the stack.
|
||||
let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
|
||||
// If it still has a TokenTree we have not looked at yet, use that tree.
|
||||
tree
|
||||
} else {
|
||||
// This else-case never produces a value for `tree` (it `continue`s or `return`s).
|
||||
|
||||
// Otherwise, if we have just reached the end of a sequence and we can keep repeating,
|
||||
// go back to the beginning of the sequence.
|
||||
if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
|
||||
let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
|
||||
*repeat_idx += 1;
|
||||
if repeat_idx < repeat_len {
|
||||
*idx = 0;
|
||||
if let Some(sep) = sep {
|
||||
result.push(TokenTree::Token(sep.clone()).into());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// We are done with the top of the stack. Pop it. Depending on what it was, we do
|
||||
// different things. Note that the outermost item must be the delimited, wrapped RHS
|
||||
// that was passed in originally to `transcribe`.
|
||||
match stack.pop().unwrap() {
|
||||
// Done with a sequence. Pop from repeats.
|
||||
Frame::Sequence { .. } => {
|
||||
repeats.pop();
|
||||
}
|
||||
|
||||
// We are done processing a Delimited. If this is the top-level delimited, we are
|
||||
// done. Otherwise, we unwind the result_stack to append what we have produced to
|
||||
// any previous results.
|
||||
Frame::Delimited { forest, span, .. } => {
|
||||
if result_stack.is_empty() {
|
||||
// No results left to compute! We are back at the top-level.
|
||||
return Ok(TokenStream::new(result));
|
||||
}
|
||||
|
||||
// Step back into the parent Delimited.
|
||||
let tree = TokenTree::Delimited(span, forest.delim, TokenStream::new(result));
|
||||
result = result_stack.pop().unwrap();
|
||||
result.push(tree.into());
|
||||
}
|
||||
}
|
||||
continue;
|
||||
};
|
||||
|
||||
// At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
|
||||
// `tree` contains the next `TokenTree` to be processed.
|
||||
match tree {
|
||||
// We are descending into a sequence. We first make sure that the matchers in the RHS
|
||||
// and the matches in `interp` have the same shape. Otherwise, either the caller or the
|
||||
// macro writer has made a mistake.
|
||||
seq @ mbe::TokenTree::Sequence(..) => {
|
||||
match lockstep_iter_size(&seq, interp, &repeats) {
|
||||
LockstepIterSize::Unconstrained => {
|
||||
return Err(cx.struct_span_err(
|
||||
seq.span(), /* blame macro writer */
|
||||
"attempted to repeat an expression containing no syntax variables \
|
||||
matched as repeating at this depth",
|
||||
));
|
||||
}
|
||||
|
||||
LockstepIterSize::Contradiction(ref msg) => {
|
||||
// FIXME: this really ought to be caught at macro definition time... It
|
||||
// happens when two meta-variables are used in the same repetition in a
|
||||
// sequence, but they come from different sequence matchers and repeat
|
||||
// different amounts.
|
||||
return Err(cx.struct_span_err(seq.span(), &msg[..]));
|
||||
}
|
||||
|
||||
LockstepIterSize::Constraint(len, _) => {
|
||||
// We do this to avoid an extra clone above. We know that this is a
|
||||
// sequence already.
|
||||
let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
|
||||
(sp, seq)
|
||||
} else {
|
||||
unreachable!()
|
||||
};
|
||||
|
||||
// Is the repetition empty?
|
||||
if len == 0 {
|
||||
if seq.kleene.op == mbe::KleeneOp::OneOrMore {
|
||||
// FIXME: this really ought to be caught at macro definition
|
||||
// time... It happens when the Kleene operator in the matcher and
|
||||
// the body for the same meta-variable do not match.
|
||||
return Err(cx.struct_span_err(
|
||||
sp.entire(),
|
||||
"this must repeat at least once",
|
||||
));
|
||||
}
|
||||
} else {
|
||||
// 0 is the initial counter (we have done 0 repretitions so far). `len`
|
||||
// is the total number of reptitions we should generate.
|
||||
repeats.push((0, len));
|
||||
|
||||
// The first time we encounter the sequence we push it to the stack. It
|
||||
// then gets reused (see the beginning of the loop) until we are done
|
||||
// repeating.
|
||||
stack.push(Frame::Sequence {
|
||||
idx: 0,
|
||||
sep: seq.separator.clone(),
|
||||
forest: seq,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Replace the meta-var with the matched token tree from the invocation.
|
||||
mbe::TokenTree::MetaVar(mut sp, mut orignal_ident) => {
|
||||
// Find the matched nonterminal from the macro invocation, and use it to replace
|
||||
// the meta-var.
|
||||
let ident = MacroRulesNormalizedIdent::new(orignal_ident);
|
||||
if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
|
||||
if let MatchedNonterminal(ref nt) = cur_matched {
|
||||
// FIXME #2887: why do we apply a mark when matching a token tree meta-var
|
||||
// (e.g. `$x:tt`), but not when we are matching any other type of token
|
||||
// tree?
|
||||
if let NtTT(ref tt) = **nt {
|
||||
result.push(tt.clone().into());
|
||||
} else {
|
||||
marker.visit_span(&mut sp);
|
||||
let token = TokenTree::token(token::Interpolated(nt.clone()), sp);
|
||||
result.push(token.into());
|
||||
}
|
||||
} else {
|
||||
// We were unable to descend far enough. This is an error.
|
||||
return Err(cx.struct_span_err(
|
||||
sp, /* blame the macro writer */
|
||||
&format!("variable '{}' is still repeating at this depth", ident),
|
||||
));
|
||||
}
|
||||
} else {
|
||||
// If we aren't able to match the meta-var, we push it back into the result but
|
||||
// with modified syntax context. (I believe this supports nested macros).
|
||||
marker.visit_span(&mut sp);
|
||||
marker.visit_ident(&mut orignal_ident);
|
||||
result.push(TokenTree::token(token::Dollar, sp).into());
|
||||
result.push(TokenTree::Token(Token::from_ast_ident(orignal_ident)).into());
|
||||
}
|
||||
}
|
||||
|
||||
// If we are entering a new delimiter, we push its contents to the `stack` to be
|
||||
// processed, and we push all of the currently produced results to the `result_stack`.
|
||||
// We will produce all of the results of the inside of the `Delimited` and then we will
|
||||
// jump back out of the Delimited, pop the result_stack and add the new results back to
|
||||
// the previous results (from outside the Delimited).
|
||||
mbe::TokenTree::Delimited(mut span, delimited) => {
|
||||
mut_visit::visit_delim_span(&mut span, &mut marker);
|
||||
stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
|
||||
result_stack.push(mem::take(&mut result));
|
||||
}
|
||||
|
||||
// Nothing much to do here. Just push the token to the result, being careful to
|
||||
// preserve syntax context.
|
||||
mbe::TokenTree::Token(token) => {
|
||||
let mut tt = TokenTree::Token(token);
|
||||
marker.visit_tt(&mut tt);
|
||||
result.push(tt.into());
|
||||
}
|
||||
|
||||
// There should be no meta-var declarations in the invocation of a macro.
|
||||
mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
|
||||
/// the set of matches `interpolations`.
|
||||
///
|
||||
/// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
|
||||
/// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
|
||||
/// made a mistake, and we return `None`.
|
||||
fn lookup_cur_matched<'a>(
|
||||
ident: MacroRulesNormalizedIdent,
|
||||
interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
repeats: &[(usize, usize)],
|
||||
) -> Option<&'a NamedMatch> {
|
||||
interpolations.get(&ident).map(|matched| {
|
||||
let mut matched = matched;
|
||||
for &(idx, _) in repeats {
|
||||
match matched {
|
||||
MatchedNonterminal(_) => break,
|
||||
MatchedSeq(ref ads) => matched = ads.get(idx).unwrap(),
|
||||
}
|
||||
}
|
||||
|
||||
matched
|
||||
})
|
||||
}
|
||||
|
||||
/// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
|
||||
/// sure that the size of each sequence and all of its nested sequences are the same as the sizes
|
||||
/// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
|
||||
/// has made a mistake (either the macro writer or caller).
|
||||
#[derive(Clone)]
|
||||
enum LockstepIterSize {
|
||||
/// No constraints on length of matcher. This is true for any TokenTree variants except a
|
||||
/// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
|
||||
Unconstrained,
|
||||
|
||||
/// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
|
||||
/// meta-var are returned.
|
||||
Constraint(usize, MacroRulesNormalizedIdent),
|
||||
|
||||
/// Two `Constraint`s on the same sequence had different lengths. This is an error.
|
||||
Contradiction(String),
|
||||
}
|
||||
|
||||
impl LockstepIterSize {
|
||||
/// Find incompatibilities in matcher/invocation sizes.
|
||||
/// - `Unconstrained` is compatible with everything.
|
||||
/// - `Contradiction` is incompatible with everything.
|
||||
/// - `Constraint(len)` is only compatible with other constraints of the same length.
|
||||
fn with(self, other: LockstepIterSize) -> LockstepIterSize {
|
||||
match self {
|
||||
LockstepIterSize::Unconstrained => other,
|
||||
LockstepIterSize::Contradiction(_) => self,
|
||||
LockstepIterSize::Constraint(l_len, ref l_id) => match other {
|
||||
LockstepIterSize::Unconstrained => self,
|
||||
LockstepIterSize::Contradiction(_) => other,
|
||||
LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
|
||||
LockstepIterSize::Constraint(r_len, r_id) => {
|
||||
let msg = format!(
|
||||
"meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
|
||||
l_id,
|
||||
l_len,
|
||||
pluralize!(l_len),
|
||||
r_id,
|
||||
r_len,
|
||||
pluralize!(r_len),
|
||||
);
|
||||
LockstepIterSize::Contradiction(msg)
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Given a `tree`, make sure that all sequences have the same length as the matches for the
|
||||
/// appropriate meta-vars in `interpolations`.
|
||||
///
|
||||
/// Note that if `repeats` does not match the exact correct depth of a meta-var,
|
||||
/// `lookup_cur_matched` will return `None`, which is why this still works even in the presnece of
|
||||
/// multiple nested matcher sequences.
|
||||
fn lockstep_iter_size(
|
||||
tree: &mbe::TokenTree,
|
||||
interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
|
||||
repeats: &[(usize, usize)],
|
||||
) -> LockstepIterSize {
|
||||
use mbe::TokenTree;
|
||||
match *tree {
|
||||
TokenTree::Delimited(_, ref delimed) => {
|
||||
delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||
size.with(lockstep_iter_size(tt, interpolations, repeats))
|
||||
})
|
||||
}
|
||||
TokenTree::Sequence(_, ref seq) => {
|
||||
seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
|
||||
size.with(lockstep_iter_size(tt, interpolations, repeats))
|
||||
})
|
||||
}
|
||||
TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
|
||||
let name = MacroRulesNormalizedIdent::new(name);
|
||||
match lookup_cur_matched(name, interpolations, repeats) {
|
||||
Some(matched) => match matched {
|
||||
MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
|
||||
MatchedSeq(ref ads) => LockstepIterSize::Constraint(ads.len(), name),
|
||||
},
|
||||
_ => LockstepIterSize::Unconstrained,
|
||||
}
|
||||
}
|
||||
TokenTree::Token(..) => LockstepIterSize::Unconstrained,
|
||||
}
|
||||
}
|
312
compiler/rustc_expand/src/module.rs
Normal file
312
compiler/rustc_expand/src/module.rs
Normal file
|
@ -0,0 +1,312 @@
|
|||
use rustc_ast::{token, Attribute, Mod};
|
||||
use rustc_errors::{struct_span_err, PResult};
|
||||
use rustc_parse::new_parser_from_file;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_session::Session;
|
||||
use rustc_span::source_map::{FileName, Span};
|
||||
use rustc_span::symbol::{sym, Ident};
|
||||
|
||||
use std::path::{self, Path, PathBuf};
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Directory {
|
||||
pub path: PathBuf,
|
||||
pub ownership: DirectoryOwnership,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone)]
|
||||
pub enum DirectoryOwnership {
|
||||
Owned {
|
||||
// None if `mod.rs`, `Some("foo")` if we're in `foo.rs`.
|
||||
relative: Option<Ident>,
|
||||
},
|
||||
UnownedViaBlock,
|
||||
UnownedViaMod,
|
||||
}
|
||||
|
||||
/// Information about the path to a module.
|
||||
// Public for rustfmt usage.
|
||||
pub struct ModulePath<'a> {
|
||||
name: String,
|
||||
path_exists: bool,
|
||||
pub result: PResult<'a, ModulePathSuccess>,
|
||||
}
|
||||
|
||||
// Public for rustfmt usage.
|
||||
pub struct ModulePathSuccess {
|
||||
pub path: PathBuf,
|
||||
pub ownership: DirectoryOwnership,
|
||||
}
|
||||
|
||||
crate fn parse_external_mod(
|
||||
sess: &Session,
|
||||
id: Ident,
|
||||
span: Span, // The span to blame on errors.
|
||||
Directory { mut ownership, path }: Directory,
|
||||
attrs: &mut Vec<Attribute>,
|
||||
pop_mod_stack: &mut bool,
|
||||
) -> (Mod, Directory) {
|
||||
// We bail on the first error, but that error does not cause a fatal error... (1)
|
||||
let result: PResult<'_, _> = try {
|
||||
// Extract the file path and the new ownership.
|
||||
let mp = submod_path(sess, id, span, &attrs, ownership, &path)?;
|
||||
ownership = mp.ownership;
|
||||
|
||||
// Ensure file paths are acyclic.
|
||||
let mut included_mod_stack = sess.parse_sess.included_mod_stack.borrow_mut();
|
||||
error_on_circular_module(&sess.parse_sess, span, &mp.path, &included_mod_stack)?;
|
||||
included_mod_stack.push(mp.path.clone());
|
||||
*pop_mod_stack = true; // We have pushed, so notify caller.
|
||||
drop(included_mod_stack);
|
||||
|
||||
// Actually parse the external file as a module.
|
||||
let mut module =
|
||||
new_parser_from_file(&sess.parse_sess, &mp.path, Some(span)).parse_mod(&token::Eof)?;
|
||||
module.0.inline = false;
|
||||
module
|
||||
};
|
||||
// (1) ...instead, we return a dummy module.
|
||||
let (module, mut new_attrs) = result.map_err(|mut err| err.emit()).unwrap_or_default();
|
||||
attrs.append(&mut new_attrs);
|
||||
|
||||
// Extract the directory path for submodules of `module`.
|
||||
let path = sess.source_map().span_to_unmapped_path(module.inner);
|
||||
let mut path = match path {
|
||||
FileName::Real(name) => name.into_local_path(),
|
||||
other => PathBuf::from(other.to_string()),
|
||||
};
|
||||
path.pop();
|
||||
|
||||
(module, Directory { ownership, path })
|
||||
}
|
||||
|
||||
fn error_on_circular_module<'a>(
|
||||
sess: &'a ParseSess,
|
||||
span: Span,
|
||||
path: &Path,
|
||||
included_mod_stack: &[PathBuf],
|
||||
) -> PResult<'a, ()> {
|
||||
if let Some(i) = included_mod_stack.iter().position(|p| *p == path) {
|
||||
let mut err = String::from("circular modules: ");
|
||||
for p in &included_mod_stack[i..] {
|
||||
err.push_str(&p.to_string_lossy());
|
||||
err.push_str(" -> ");
|
||||
}
|
||||
err.push_str(&path.to_string_lossy());
|
||||
return Err(sess.span_diagnostic.struct_span_err(span, &err[..]));
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
crate fn push_directory(
|
||||
sess: &Session,
|
||||
id: Ident,
|
||||
attrs: &[Attribute],
|
||||
Directory { mut ownership, mut path }: Directory,
|
||||
) -> Directory {
|
||||
if let Some(filename) = sess.first_attr_value_str_by_name(attrs, sym::path) {
|
||||
path.push(&*filename.as_str());
|
||||
ownership = DirectoryOwnership::Owned { relative: None };
|
||||
} else {
|
||||
// We have to push on the current module name in the case of relative
|
||||
// paths in order to ensure that any additional module paths from inline
|
||||
// `mod x { ... }` come after the relative extension.
|
||||
//
|
||||
// For example, a `mod z { ... }` inside `x/y.rs` should set the current
|
||||
// directory path to `/x/y/z`, not `/x/z` with a relative offset of `y`.
|
||||
if let DirectoryOwnership::Owned { relative } = &mut ownership {
|
||||
if let Some(ident) = relative.take() {
|
||||
// Remove the relative offset.
|
||||
path.push(&*ident.as_str());
|
||||
}
|
||||
}
|
||||
path.push(&*id.as_str());
|
||||
}
|
||||
Directory { ownership, path }
|
||||
}
|
||||
|
||||
fn submod_path<'a>(
|
||||
sess: &'a Session,
|
||||
id: Ident,
|
||||
span: Span,
|
||||
attrs: &[Attribute],
|
||||
ownership: DirectoryOwnership,
|
||||
dir_path: &Path,
|
||||
) -> PResult<'a, ModulePathSuccess> {
|
||||
if let Some(path) = submod_path_from_attr(sess, attrs, dir_path) {
|
||||
let ownership = match path.file_name().and_then(|s| s.to_str()) {
|
||||
// All `#[path]` files are treated as though they are a `mod.rs` file.
|
||||
// This means that `mod foo;` declarations inside `#[path]`-included
|
||||
// files are siblings,
|
||||
//
|
||||
// Note that this will produce weirdness when a file named `foo.rs` is
|
||||
// `#[path]` included and contains a `mod foo;` declaration.
|
||||
// If you encounter this, it's your own darn fault :P
|
||||
Some(_) => DirectoryOwnership::Owned { relative: None },
|
||||
_ => DirectoryOwnership::UnownedViaMod,
|
||||
};
|
||||
return Ok(ModulePathSuccess { ownership, path });
|
||||
}
|
||||
|
||||
let relative = match ownership {
|
||||
DirectoryOwnership::Owned { relative } => relative,
|
||||
DirectoryOwnership::UnownedViaBlock | DirectoryOwnership::UnownedViaMod => None,
|
||||
};
|
||||
let ModulePath { path_exists, name, result } =
|
||||
default_submod_path(&sess.parse_sess, id, span, relative, dir_path);
|
||||
match ownership {
|
||||
DirectoryOwnership::Owned { .. } => Ok(result?),
|
||||
DirectoryOwnership::UnownedViaBlock => {
|
||||
let _ = result.map_err(|mut err| err.cancel());
|
||||
error_decl_mod_in_block(&sess.parse_sess, span, path_exists, &name)
|
||||
}
|
||||
DirectoryOwnership::UnownedViaMod => {
|
||||
let _ = result.map_err(|mut err| err.cancel());
|
||||
error_cannot_declare_mod_here(&sess.parse_sess, span, path_exists, &name)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn error_decl_mod_in_block<'a, T>(
|
||||
sess: &'a ParseSess,
|
||||
span: Span,
|
||||
path_exists: bool,
|
||||
name: &str,
|
||||
) -> PResult<'a, T> {
|
||||
let msg = "Cannot declare a non-inline module inside a block unless it has a path attribute";
|
||||
let mut err = sess.span_diagnostic.struct_span_err(span, msg);
|
||||
if path_exists {
|
||||
let msg = format!("Maybe `use` the module `{}` instead of redeclaring it", name);
|
||||
err.span_note(span, &msg);
|
||||
}
|
||||
Err(err)
|
||||
}
|
||||
|
||||
fn error_cannot_declare_mod_here<'a, T>(
|
||||
sess: &'a ParseSess,
|
||||
span: Span,
|
||||
path_exists: bool,
|
||||
name: &str,
|
||||
) -> PResult<'a, T> {
|
||||
let mut err =
|
||||
sess.span_diagnostic.struct_span_err(span, "cannot declare a new module at this location");
|
||||
if !span.is_dummy() {
|
||||
if let FileName::Real(src_name) = sess.source_map().span_to_filename(span) {
|
||||
let src_path = src_name.into_local_path();
|
||||
if let Some(stem) = src_path.file_stem() {
|
||||
let mut dest_path = src_path.clone();
|
||||
dest_path.set_file_name(stem);
|
||||
dest_path.push("mod.rs");
|
||||
err.span_note(
|
||||
span,
|
||||
&format!(
|
||||
"maybe move this module `{}` to its own directory via `{}`",
|
||||
src_path.display(),
|
||||
dest_path.display()
|
||||
),
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
if path_exists {
|
||||
err.span_note(
|
||||
span,
|
||||
&format!("... or maybe `use` the module `{}` instead of possibly redeclaring it", name),
|
||||
);
|
||||
}
|
||||
Err(err)
|
||||
}
|
||||
|
||||
/// Derive a submodule path from the first found `#[path = "path_string"]`.
|
||||
/// The provided `dir_path` is joined with the `path_string`.
|
||||
// Public for rustfmt usage.
|
||||
pub fn submod_path_from_attr(
|
||||
sess: &Session,
|
||||
attrs: &[Attribute],
|
||||
dir_path: &Path,
|
||||
) -> Option<PathBuf> {
|
||||
// Extract path string from first `#[path = "path_string"]` attribute.
|
||||
let path_string = sess.first_attr_value_str_by_name(attrs, sym::path)?;
|
||||
let path_string = path_string.as_str();
|
||||
|
||||
// On windows, the base path might have the form
|
||||
// `\\?\foo\bar` in which case it does not tolerate
|
||||
// mixed `/` and `\` separators, so canonicalize
|
||||
// `/` to `\`.
|
||||
#[cfg(windows)]
|
||||
let path_string = path_string.replace("/", "\\");
|
||||
|
||||
Some(dir_path.join(&*path_string))
|
||||
}
|
||||
|
||||
/// Returns a path to a module.
|
||||
// Public for rustfmt usage.
|
||||
pub fn default_submod_path<'a>(
|
||||
sess: &'a ParseSess,
|
||||
id: Ident,
|
||||
span: Span,
|
||||
relative: Option<Ident>,
|
||||
dir_path: &Path,
|
||||
) -> ModulePath<'a> {
|
||||
// If we're in a foo.rs file instead of a mod.rs file,
|
||||
// we need to look for submodules in
|
||||
// `./foo/<id>.rs` and `./foo/<id>/mod.rs` rather than
|
||||
// `./<id>.rs` and `./<id>/mod.rs`.
|
||||
let relative_prefix_string;
|
||||
let relative_prefix = if let Some(ident) = relative {
|
||||
relative_prefix_string = format!("{}{}", ident.name, path::MAIN_SEPARATOR);
|
||||
&relative_prefix_string
|
||||
} else {
|
||||
""
|
||||
};
|
||||
|
||||
let mod_name = id.name.to_string();
|
||||
let default_path_str = format!("{}{}.rs", relative_prefix, mod_name);
|
||||
let secondary_path_str =
|
||||
format!("{}{}{}mod.rs", relative_prefix, mod_name, path::MAIN_SEPARATOR);
|
||||
let default_path = dir_path.join(&default_path_str);
|
||||
let secondary_path = dir_path.join(&secondary_path_str);
|
||||
let default_exists = sess.source_map().file_exists(&default_path);
|
||||
let secondary_exists = sess.source_map().file_exists(&secondary_path);
|
||||
|
||||
let result = match (default_exists, secondary_exists) {
|
||||
(true, false) => Ok(ModulePathSuccess {
|
||||
path: default_path,
|
||||
ownership: DirectoryOwnership::Owned { relative: Some(id) },
|
||||
}),
|
||||
(false, true) => Ok(ModulePathSuccess {
|
||||
path: secondary_path,
|
||||
ownership: DirectoryOwnership::Owned { relative: None },
|
||||
}),
|
||||
(false, false) => {
|
||||
let mut err = struct_span_err!(
|
||||
sess.span_diagnostic,
|
||||
span,
|
||||
E0583,
|
||||
"file not found for module `{}`",
|
||||
mod_name,
|
||||
);
|
||||
err.help(&format!(
|
||||
"to create the module `{}`, create file \"{}\"",
|
||||
mod_name,
|
||||
default_path.display(),
|
||||
));
|
||||
Err(err)
|
||||
}
|
||||
(true, true) => {
|
||||
let mut err = struct_span_err!(
|
||||
sess.span_diagnostic,
|
||||
span,
|
||||
E0761,
|
||||
"file for module `{}` found at both {} and {}",
|
||||
mod_name,
|
||||
default_path_str,
|
||||
secondary_path_str,
|
||||
);
|
||||
err.help("delete or rename one of them to remove the ambiguity");
|
||||
Err(err)
|
||||
}
|
||||
};
|
||||
|
||||
ModulePath { name: mod_name, path_exists: default_exists || secondary_exists, result }
|
||||
}
|
73
compiler/rustc_expand/src/mut_visit/tests.rs
Normal file
73
compiler/rustc_expand/src/mut_visit/tests.rs
Normal file
|
@ -0,0 +1,73 @@
|
|||
use crate::tests::{matches_codepattern, string_to_crate};
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::mut_visit::{self, MutVisitor};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_span::symbol::Ident;
|
||||
use rustc_span::with_default_session_globals;
|
||||
|
||||
// This version doesn't care about getting comments or doc-strings in.
|
||||
fn fake_print_crate(s: &mut pprust::State<'_>, krate: &ast::Crate) {
|
||||
s.print_mod(&krate.module, &krate.attrs)
|
||||
}
|
||||
|
||||
// Change every identifier to "zz".
|
||||
struct ToZzIdentMutVisitor;
|
||||
|
||||
impl MutVisitor for ToZzIdentMutVisitor {
|
||||
fn visit_ident(&mut self, ident: &mut Ident) {
|
||||
*ident = Ident::from_str("zz");
|
||||
}
|
||||
fn visit_mac(&mut self, mac: &mut ast::MacCall) {
|
||||
mut_visit::noop_visit_mac(mac, self)
|
||||
}
|
||||
}
|
||||
|
||||
// Maybe add to `expand.rs`.
|
||||
macro_rules! assert_pred {
|
||||
($pred:expr, $predname:expr, $a:expr , $b:expr) => {{
|
||||
let pred_val = $pred;
|
||||
let a_val = $a;
|
||||
let b_val = $b;
|
||||
if !(pred_val(&a_val, &b_val)) {
|
||||
panic!("expected args satisfying {}, got {} and {}", $predname, a_val, b_val);
|
||||
}
|
||||
}};
|
||||
}
|
||||
|
||||
// Make sure idents get transformed everywhere.
|
||||
#[test]
|
||||
fn ident_transformation() {
|
||||
with_default_session_globals(|| {
|
||||
let mut zz_visitor = ToZzIdentMutVisitor;
|
||||
let mut krate =
|
||||
string_to_crate("#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}".to_string());
|
||||
zz_visitor.visit_crate(&mut krate);
|
||||
assert_pred!(
|
||||
matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_string(|s| fake_print_crate(s, &krate)),
|
||||
"#[zz]mod zz{fn zz(zz:zz,zz:zz){zz!(zz,zz,zz);zz;zz}}".to_string()
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
// Make sure idents get transformed even inside macro defs.
|
||||
#[test]
|
||||
fn ident_transformation_in_defs() {
|
||||
with_default_session_globals(|| {
|
||||
let mut zz_visitor = ToZzIdentMutVisitor;
|
||||
let mut krate = string_to_crate(
|
||||
"macro_rules! a {(b $c:expr $(d $e:token)f+ => \
|
||||
(g $(d $d $e)+))} "
|
||||
.to_string(),
|
||||
);
|
||||
zz_visitor.visit_crate(&mut krate);
|
||||
assert_pred!(
|
||||
matches_codepattern,
|
||||
"matches_codepattern",
|
||||
pprust::to_string(|s| fake_print_crate(s, &krate)),
|
||||
"macro_rules! zz{(zz$zz:zz$(zz $zz:zz)zz+=>(zz$(zz$zz$zz)+))}".to_string()
|
||||
);
|
||||
})
|
||||
}
|
252
compiler/rustc_expand/src/parse/lexer/tests.rs
Normal file
252
compiler/rustc_expand/src/parse/lexer/tests.rs
Normal file
|
@ -0,0 +1,252 @@
|
|||
use rustc_ast::ast::AttrStyle;
|
||||
use rustc_ast::token::{self, CommentKind, Token, TokenKind};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{emitter::EmitterWriter, Handler};
|
||||
use rustc_parse::lexer::StringReader;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::{FilePathMapping, SourceMap};
|
||||
use rustc_span::symbol::Symbol;
|
||||
use rustc_span::with_default_session_globals;
|
||||
use rustc_span::{BytePos, Span};
|
||||
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
|
||||
let emitter = EmitterWriter::new(
|
||||
Box::new(io::sink()),
|
||||
Some(sm.clone()),
|
||||
false,
|
||||
false,
|
||||
false,
|
||||
None,
|
||||
false,
|
||||
);
|
||||
ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
|
||||
}
|
||||
|
||||
// Creates a string reader for the given string.
|
||||
fn setup<'a>(sm: &SourceMap, sess: &'a ParseSess, teststr: String) -> StringReader<'a> {
|
||||
let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
|
||||
StringReader::new(sess, sf, None)
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn t1() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
let mut string_reader = setup(
|
||||
&sm,
|
||||
&sh,
|
||||
"/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
|
||||
);
|
||||
assert_eq!(string_reader.next_token(), token::Comment);
|
||||
assert_eq!(string_reader.next_token(), token::Whitespace);
|
||||
let tok1 = string_reader.next_token();
|
||||
let tok2 = Token::new(mk_ident("fn"), Span::with_root_ctxt(BytePos(21), BytePos(23)));
|
||||
assert_eq!(tok1.kind, tok2.kind);
|
||||
assert_eq!(tok1.span, tok2.span);
|
||||
assert_eq!(string_reader.next_token(), token::Whitespace);
|
||||
// Read another token.
|
||||
let tok3 = string_reader.next_token();
|
||||
assert_eq!(string_reader.pos(), BytePos(28));
|
||||
let tok4 = Token::new(mk_ident("main"), Span::with_root_ctxt(BytePos(24), BytePos(28)));
|
||||
assert_eq!(tok3.kind, tok4.kind);
|
||||
assert_eq!(tok3.span, tok4.span);
|
||||
|
||||
assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
|
||||
assert_eq!(string_reader.pos(), BytePos(29))
|
||||
})
|
||||
}
|
||||
|
||||
// Checks that the given reader produces the desired stream
|
||||
// of tokens (stop checking after exhausting `expected`).
|
||||
fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
|
||||
for expected_tok in &expected {
|
||||
assert_eq!(&string_reader.next_token(), expected_tok);
|
||||
}
|
||||
}
|
||||
|
||||
// Makes the identifier by looking up the string in the interner.
|
||||
fn mk_ident(id: &str) -> TokenKind {
|
||||
token::Ident(Symbol::intern(id), false)
|
||||
}
|
||||
|
||||
fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
|
||||
TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doublecolon_parsing() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
check_tokenization(
|
||||
setup(&sm, &sh, "a b".to_string()),
|
||||
vec![mk_ident("a"), token::Whitespace, mk_ident("b")],
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doublecolon_parsing_2() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
check_tokenization(
|
||||
setup(&sm, &sh, "a::b".to_string()),
|
||||
vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")],
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doublecolon_parsing_3() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
check_tokenization(
|
||||
setup(&sm, &sh, "a ::b".to_string()),
|
||||
vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")],
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn doublecolon_parsing_4() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
check_tokenization(
|
||||
setup(&sm, &sh, "a:: b".to_string()),
|
||||
vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")],
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn character_a() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), mk_lit(token::Char, "a", None),);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn character_space() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), mk_lit(token::Char, " ", None),);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn character_escaped() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, "'\\n'".to_string()).next_token(),
|
||||
mk_lit(token::Char, "\\n", None),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn lifetime_name() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, "'abc".to_string()).next_token(),
|
||||
token::Lifetime(Symbol::intern("'abc")),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn raw_string() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
|
||||
mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn literal_suffixes() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
macro_rules! test {
|
||||
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
|
||||
mk_lit(token::$tok_type, $tok_contents, Some("suffix")),
|
||||
);
|
||||
// with a whitespace separator
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
|
||||
mk_lit(token::$tok_type, $tok_contents, None),
|
||||
);
|
||||
}};
|
||||
}
|
||||
|
||||
test!("'a'", Char, "a");
|
||||
test!("b'a'", Byte, "a");
|
||||
test!("\"a\"", Str, "a");
|
||||
test!("b\"a\"", ByteStr, "a");
|
||||
test!("1234", Integer, "1234");
|
||||
test!("0b101", Integer, "0b101");
|
||||
test!("0xABC", Integer, "0xABC");
|
||||
test!("1.0", Float, "1.0");
|
||||
test!("1.0e10", Float, "1.0e10");
|
||||
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, "2us".to_string()).next_token(),
|
||||
mk_lit(token::Integer, "2", Some("us")),
|
||||
);
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
|
||||
mk_lit(token::StrRaw(3), "raw", Some("suffix")),
|
||||
);
|
||||
assert_eq!(
|
||||
setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
|
||||
mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn nested_block_comments() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
|
||||
assert_eq!(lexer.next_token(), token::Comment);
|
||||
assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn crlf_comments() {
|
||||
with_default_session_globals(|| {
|
||||
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
|
||||
let sh = mk_sess(sm.clone());
|
||||
let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
|
||||
let comment = lexer.next_token();
|
||||
assert_eq!(comment.kind, token::Comment);
|
||||
assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
|
||||
assert_eq!(lexer.next_token(), token::Whitespace);
|
||||
assert_eq!(
|
||||
lexer.next_token(),
|
||||
token::DocComment(CommentKind::Line, AttrStyle::Outer, Symbol::intern(" test"))
|
||||
);
|
||||
})
|
||||
}
|
348
compiler/rustc_expand/src/parse/tests.rs
Normal file
348
compiler/rustc_expand/src/parse/tests.rs
Normal file
|
@ -0,0 +1,348 @@
|
|||
use crate::tests::{matches_codepattern, string_to_stream, with_error_checking_parse};
|
||||
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_ast::token::{self, Token};
|
||||
use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree};
|
||||
use rustc_ast::visit;
|
||||
use rustc_ast::{self as ast, PatKind};
|
||||
use rustc_ast_pretty::pprust::item_to_string;
|
||||
use rustc_errors::PResult;
|
||||
use rustc_parse::new_parser_from_source_str;
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::source_map::FilePathMapping;
|
||||
use rustc_span::symbol::{kw, sym, Symbol};
|
||||
use rustc_span::with_default_session_globals;
|
||||
use rustc_span::{BytePos, FileName, Pos, Span};
|
||||
|
||||
use std::path::PathBuf;
|
||||
|
||||
fn sess() -> ParseSess {
|
||||
ParseSess::new(FilePathMapping::empty())
|
||||
}
|
||||
|
||||
/// Parses an item.
|
||||
///
|
||||
/// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err`
|
||||
/// when a syntax error occurred.
|
||||
fn parse_item_from_source_str(
|
||||
name: FileName,
|
||||
source: String,
|
||||
sess: &ParseSess,
|
||||
) -> PResult<'_, Option<P<ast::Item>>> {
|
||||
new_parser_from_source_str(sess, name, source).parse_item()
|
||||
}
|
||||
|
||||
// Produces a `rustc_span::span`.
|
||||
fn sp(a: u32, b: u32) -> Span {
|
||||
Span::with_root_ctxt(BytePos(a), BytePos(b))
|
||||
}
|
||||
|
||||
/// Parses a string, return an expression.
|
||||
fn string_to_expr(source_str: String) -> P<ast::Expr> {
|
||||
with_error_checking_parse(source_str, &sess(), |p| p.parse_expr())
|
||||
}
|
||||
|
||||
/// Parses a string, returns an item.
|
||||
fn string_to_item(source_str: String) -> Option<P<ast::Item>> {
|
||||
with_error_checking_parse(source_str, &sess(), |p| p.parse_item())
|
||||
}
|
||||
|
||||
#[should_panic]
|
||||
#[test]
|
||||
fn bad_path_expr_1() {
|
||||
with_default_session_globals(|| {
|
||||
string_to_expr("::abc::def::return".to_string());
|
||||
})
|
||||
}
|
||||
|
||||
// Checks the token-tree-ization of macros.
|
||||
#[test]
|
||||
fn string_to_tts_macro() {
|
||||
with_default_session_globals(|| {
|
||||
let tts: Vec<_> =
|
||||
string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect();
|
||||
let tts: &[TokenTree] = &tts[..];
|
||||
|
||||
match tts {
|
||||
[TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }), TokenTree::Token(Token { kind: token::Not, .. }), TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }), TokenTree::Delimited(_, macro_delim, macro_tts)]
|
||||
if name_macro_rules == &kw::MacroRules && name_zip.as_str() == "zip" =>
|
||||
{
|
||||
let tts = ¯o_tts.trees().collect::<Vec<_>>();
|
||||
match &tts[..] {
|
||||
[TokenTree::Delimited(_, first_delim, first_tts), TokenTree::Token(Token { kind: token::FatArrow, .. }), TokenTree::Delimited(_, second_delim, second_tts)]
|
||||
if macro_delim == &token::Paren =>
|
||||
{
|
||||
let tts = &first_tts.trees().collect::<Vec<_>>();
|
||||
match &tts[..] {
|
||||
[TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. })]
|
||||
if first_delim == &token::Paren && name.as_str() == "a" => {}
|
||||
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
|
||||
}
|
||||
let tts = &second_tts.trees().collect::<Vec<_>>();
|
||||
match &tts[..] {
|
||||
[TokenTree::Token(Token { kind: token::Dollar, .. }), TokenTree::Token(Token { kind: token::Ident(name, false), .. })]
|
||||
if second_delim == &token::Paren && name.as_str() == "a" => {}
|
||||
_ => panic!("value 4: {:?} {:?}", second_delim, second_tts),
|
||||
}
|
||||
}
|
||||
_ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts),
|
||||
}
|
||||
}
|
||||
_ => panic!("value: {:?}", tts),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn string_to_tts_1() {
|
||||
with_default_session_globals(|| {
|
||||
let tts = string_to_stream("fn a (b : i32) { b; }".to_string());
|
||||
|
||||
let expected = TokenStream::new(vec![
|
||||
TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(),
|
||||
TokenTree::token(token::Ident(Symbol::intern("a"), false), sp(3, 4)).into(),
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
|
||||
token::DelimToken::Paren,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(6, 7)).into(),
|
||||
TokenTree::token(token::Colon, sp(8, 9)).into(),
|
||||
TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(),
|
||||
])
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
TokenTree::Delimited(
|
||||
DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
|
||||
token::DelimToken::Brace,
|
||||
TokenStream::new(vec![
|
||||
TokenTree::token(token::Ident(Symbol::intern("b"), false), sp(17, 18)).into(),
|
||||
TokenTree::token(token::Semi, sp(18, 19)).into(),
|
||||
])
|
||||
.into(),
|
||||
)
|
||||
.into(),
|
||||
]);
|
||||
|
||||
assert_eq!(tts, expected);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_use() {
|
||||
with_default_session_globals(|| {
|
||||
let use_s = "use foo::bar::baz;";
|
||||
let vitem = string_to_item(use_s.to_string()).unwrap();
|
||||
let vitem_s = item_to_string(&vitem);
|
||||
assert_eq!(&vitem_s[..], use_s);
|
||||
|
||||
let use_s = "use foo::bar as baz;";
|
||||
let vitem = string_to_item(use_s.to_string()).unwrap();
|
||||
let vitem_s = item_to_string(&vitem);
|
||||
assert_eq!(&vitem_s[..], use_s);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_extern_crate() {
|
||||
with_default_session_globals(|| {
|
||||
let ex_s = "extern crate foo;";
|
||||
let vitem = string_to_item(ex_s.to_string()).unwrap();
|
||||
let vitem_s = item_to_string(&vitem);
|
||||
assert_eq!(&vitem_s[..], ex_s);
|
||||
|
||||
let ex_s = "extern crate foo as bar;";
|
||||
let vitem = string_to_item(ex_s.to_string()).unwrap();
|
||||
let vitem_s = item_to_string(&vitem);
|
||||
assert_eq!(&vitem_s[..], ex_s);
|
||||
})
|
||||
}
|
||||
|
||||
fn get_spans_of_pat_idents(src: &str) -> Vec<Span> {
|
||||
let item = string_to_item(src.to_string()).unwrap();
|
||||
|
||||
struct PatIdentVisitor {
|
||||
spans: Vec<Span>,
|
||||
}
|
||||
impl<'a> visit::Visitor<'a> for PatIdentVisitor {
|
||||
fn visit_pat(&mut self, p: &'a ast::Pat) {
|
||||
match p.kind {
|
||||
PatKind::Ident(_, ref ident, _) => {
|
||||
self.spans.push(ident.span.clone());
|
||||
}
|
||||
_ => {
|
||||
visit::walk_pat(self, p);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
let mut v = PatIdentVisitor { spans: Vec::new() };
|
||||
visit::walk_item(&mut v, &item);
|
||||
return v.spans;
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn span_of_self_arg_pat_idents_are_correct() {
|
||||
with_default_session_globals(|| {
|
||||
let srcs = [
|
||||
"impl z { fn a (&self, &myarg: i32) {} }",
|
||||
"impl z { fn a (&mut self, &myarg: i32) {} }",
|
||||
"impl z { fn a (&'a self, &myarg: i32) {} }",
|
||||
"impl z { fn a (self, &myarg: i32) {} }",
|
||||
"impl z { fn a (self: Foo, &myarg: i32) {} }",
|
||||
];
|
||||
|
||||
for &src in &srcs {
|
||||
let spans = get_spans_of_pat_idents(src);
|
||||
let (lo, hi) = (spans[0].lo(), spans[0].hi());
|
||||
assert!(
|
||||
"self" == &src[lo.to_usize()..hi.to_usize()],
|
||||
"\"{}\" != \"self\". src=\"{}\"",
|
||||
&src[lo.to_usize()..hi.to_usize()],
|
||||
src
|
||||
)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn parse_exprs() {
|
||||
with_default_session_globals(|| {
|
||||
// just make sure that they parse....
|
||||
string_to_expr("3 + 4".to_string());
|
||||
string_to_expr("a::z.froob(b,&(987+3))".to_string());
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn attrs_fix_bug() {
|
||||
with_default_session_globals(|| {
|
||||
string_to_item(
|
||||
"pub fn mk_file_writer(path: &Path, flags: &[FileFlag])
|
||||
-> Result<Box<Writer>, String> {
|
||||
#[cfg(windows)]
|
||||
fn wb() -> c_int {
|
||||
(O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int
|
||||
}
|
||||
|
||||
#[cfg(unix)]
|
||||
fn wb() -> c_int { O_WRONLY as c_int }
|
||||
|
||||
let mut fflags: c_int = wb();
|
||||
}"
|
||||
.to_string(),
|
||||
);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn crlf_doc_comments() {
|
||||
with_default_session_globals(|| {
|
||||
let sess = sess();
|
||||
|
||||
let name_1 = FileName::Custom("crlf_source_1".to_string());
|
||||
let source = "/// doc comment\r\nfn foo() {}".to_string();
|
||||
let item = parse_item_from_source_str(name_1, source, &sess).unwrap().unwrap();
|
||||
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
|
||||
assert_eq!(doc.as_str(), " doc comment");
|
||||
|
||||
let name_2 = FileName::Custom("crlf_source_2".to_string());
|
||||
let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
|
||||
let item = parse_item_from_source_str(name_2, source, &sess).unwrap().unwrap();
|
||||
let docs = item.attrs.iter().filter_map(|at| at.doc_str()).collect::<Vec<_>>();
|
||||
let b: &[_] = &[Symbol::intern(" doc comment"), Symbol::intern(" line 2")];
|
||||
assert_eq!(&docs[..], b);
|
||||
|
||||
let name_3 = FileName::Custom("clrf_source_3".to_string());
|
||||
let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string();
|
||||
let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap();
|
||||
let doc = item.attrs.iter().filter_map(|at| at.doc_str()).next().unwrap();
|
||||
assert_eq!(doc.as_str(), " doc comment\n * with CRLF ");
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn ttdelim_span() {
|
||||
fn parse_expr_from_source_str(
|
||||
name: FileName,
|
||||
source: String,
|
||||
sess: &ParseSess,
|
||||
) -> PResult<'_, P<ast::Expr>> {
|
||||
new_parser_from_source_str(sess, name, source).parse_expr()
|
||||
}
|
||||
|
||||
with_default_session_globals(|| {
|
||||
let sess = sess();
|
||||
let expr = parse_expr_from_source_str(
|
||||
PathBuf::from("foo").into(),
|
||||
"foo!( fn main() { body } )".to_string(),
|
||||
&sess,
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
let tts: Vec<_> = match expr.kind {
|
||||
ast::ExprKind::MacCall(ref mac) => mac.args.inner_tokens().trees().collect(),
|
||||
_ => panic!("not a macro"),
|
||||
};
|
||||
|
||||
let span = tts.iter().rev().next().unwrap().span();
|
||||
|
||||
match sess.source_map().span_to_snippet(span) {
|
||||
Ok(s) => assert_eq!(&s[..], "{ body }"),
|
||||
Err(_) => panic!("could not get snippet"),
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// This tests that when parsing a string (rather than a file) we don't try
|
||||
// and read in a file for a module declaration and just parse a stub.
|
||||
// See `recurse_into_file_modules` in the parser.
|
||||
#[test]
|
||||
fn out_of_line_mod() {
|
||||
with_default_session_globals(|| {
|
||||
let item = parse_item_from_source_str(
|
||||
PathBuf::from("foo").into(),
|
||||
"mod foo { struct S; mod this_does_not_exist; }".to_owned(),
|
||||
&sess(),
|
||||
)
|
||||
.unwrap()
|
||||
.unwrap();
|
||||
|
||||
if let ast::ItemKind::Mod(ref m) = item.kind {
|
||||
assert!(m.items.len() == 2);
|
||||
} else {
|
||||
panic!();
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn eqmodws() {
|
||||
assert_eq!(matches_codepattern("", ""), true);
|
||||
assert_eq!(matches_codepattern("", "a"), false);
|
||||
assert_eq!(matches_codepattern("a", ""), false);
|
||||
assert_eq!(matches_codepattern("a", "a"), true);
|
||||
assert_eq!(matches_codepattern("a b", "a \n\t\r b"), true);
|
||||
assert_eq!(matches_codepattern("a b ", "a \n\t\r b"), true);
|
||||
assert_eq!(matches_codepattern("a b", "a \n\t\r b "), false);
|
||||
assert_eq!(matches_codepattern("a b", "a b"), true);
|
||||
assert_eq!(matches_codepattern("ab", "a b"), false);
|
||||
assert_eq!(matches_codepattern("a b", "ab"), true);
|
||||
assert_eq!(matches_codepattern(" a b", "ab"), true);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn pattern_whitespace() {
|
||||
assert_eq!(matches_codepattern("", "\x0C"), false);
|
||||
assert_eq!(matches_codepattern("a b ", "a \u{0085}\n\t\r b"), true);
|
||||
assert_eq!(matches_codepattern("a b", "a \u{0085}\n\t\r b "), false);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn non_pattern_whitespace() {
|
||||
// These have the property 'White_Space' but not 'Pattern_White_Space'
|
||||
assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
|
||||
assert_eq!(matches_codepattern("a b", "a\u{2002}b"), false);
|
||||
assert_eq!(matches_codepattern("\u{205F}a b", "ab"), false);
|
||||
assert_eq!(matches_codepattern("a \u{3000}b", "ab"), false);
|
||||
}
|
345
compiler/rustc_expand/src/placeholders.rs
Normal file
345
compiler/rustc_expand/src/placeholders.rs
Normal file
|
@ -0,0 +1,345 @@
|
|||
use crate::base::ExtCtxt;
|
||||
use crate::expand::{AstFragment, AstFragmentKind};
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::mut_visit::*;
|
||||
use rustc_ast::ptr::P;
|
||||
use rustc_span::source_map::{dummy_spanned, DUMMY_SP};
|
||||
use rustc_span::symbol::Ident;
|
||||
|
||||
use smallvec::{smallvec, SmallVec};
|
||||
|
||||
use rustc_data_structures::fx::FxHashMap;
|
||||
|
||||
pub fn placeholder(
|
||||
kind: AstFragmentKind,
|
||||
id: ast::NodeId,
|
||||
vis: Option<ast::Visibility>,
|
||||
) -> AstFragment {
|
||||
fn mac_placeholder() -> ast::MacCall {
|
||||
ast::MacCall {
|
||||
path: ast::Path { span: DUMMY_SP, segments: Vec::new() },
|
||||
args: P(ast::MacArgs::Empty),
|
||||
prior_type_ascription: None,
|
||||
}
|
||||
}
|
||||
|
||||
let ident = Ident::invalid();
|
||||
let attrs = Vec::new();
|
||||
let vis = vis.unwrap_or_else(|| dummy_spanned(ast::VisibilityKind::Inherited));
|
||||
let span = DUMMY_SP;
|
||||
let expr_placeholder = || {
|
||||
P(ast::Expr {
|
||||
id,
|
||||
span,
|
||||
attrs: ast::AttrVec::new(),
|
||||
kind: ast::ExprKind::MacCall(mac_placeholder()),
|
||||
tokens: None,
|
||||
})
|
||||
};
|
||||
let ty = || P(ast::Ty { id, kind: ast::TyKind::MacCall(mac_placeholder()), span });
|
||||
let pat =
|
||||
|| P(ast::Pat { id, kind: ast::PatKind::MacCall(mac_placeholder()), span, tokens: None });
|
||||
|
||||
match kind {
|
||||
AstFragmentKind::Expr => AstFragment::Expr(expr_placeholder()),
|
||||
AstFragmentKind::OptExpr => AstFragment::OptExpr(Some(expr_placeholder())),
|
||||
AstFragmentKind::Items => AstFragment::Items(smallvec![P(ast::Item {
|
||||
id,
|
||||
span,
|
||||
ident,
|
||||
vis,
|
||||
attrs,
|
||||
kind: ast::ItemKind::MacCall(mac_placeholder()),
|
||||
tokens: None,
|
||||
})]),
|
||||
AstFragmentKind::TraitItems => AstFragment::TraitItems(smallvec![P(ast::AssocItem {
|
||||
id,
|
||||
span,
|
||||
ident,
|
||||
vis,
|
||||
attrs,
|
||||
kind: ast::AssocItemKind::MacCall(mac_placeholder()),
|
||||
tokens: None,
|
||||
})]),
|
||||
AstFragmentKind::ImplItems => AstFragment::ImplItems(smallvec![P(ast::AssocItem {
|
||||
id,
|
||||
span,
|
||||
ident,
|
||||
vis,
|
||||
attrs,
|
||||
kind: ast::AssocItemKind::MacCall(mac_placeholder()),
|
||||
tokens: None,
|
||||
})]),
|
||||
AstFragmentKind::ForeignItems => {
|
||||
AstFragment::ForeignItems(smallvec![P(ast::ForeignItem {
|
||||
id,
|
||||
span,
|
||||
ident,
|
||||
vis,
|
||||
attrs,
|
||||
kind: ast::ForeignItemKind::MacCall(mac_placeholder()),
|
||||
tokens: None,
|
||||
})])
|
||||
}
|
||||
AstFragmentKind::Pat => AstFragment::Pat(P(ast::Pat {
|
||||
id,
|
||||
span,
|
||||
kind: ast::PatKind::MacCall(mac_placeholder()),
|
||||
tokens: None,
|
||||
})),
|
||||
AstFragmentKind::Ty => {
|
||||
AstFragment::Ty(P(ast::Ty { id, span, kind: ast::TyKind::MacCall(mac_placeholder()) }))
|
||||
}
|
||||
AstFragmentKind::Stmts => AstFragment::Stmts(smallvec![{
|
||||
let mac = P((mac_placeholder(), ast::MacStmtStyle::Braces, ast::AttrVec::new()));
|
||||
ast::Stmt { id, span, kind: ast::StmtKind::MacCall(mac) }
|
||||
}]),
|
||||
AstFragmentKind::Arms => AstFragment::Arms(smallvec![ast::Arm {
|
||||
attrs: Default::default(),
|
||||
body: expr_placeholder(),
|
||||
guard: None,
|
||||
id,
|
||||
pat: pat(),
|
||||
span,
|
||||
is_placeholder: true,
|
||||
}]),
|
||||
AstFragmentKind::Fields => AstFragment::Fields(smallvec![ast::Field {
|
||||
attrs: Default::default(),
|
||||
expr: expr_placeholder(),
|
||||
id,
|
||||
ident,
|
||||
is_shorthand: false,
|
||||
span,
|
||||
is_placeholder: true,
|
||||
}]),
|
||||
AstFragmentKind::FieldPats => AstFragment::FieldPats(smallvec![ast::FieldPat {
|
||||
attrs: Default::default(),
|
||||
id,
|
||||
ident,
|
||||
is_shorthand: false,
|
||||
pat: pat(),
|
||||
span,
|
||||
is_placeholder: true,
|
||||
}]),
|
||||
AstFragmentKind::GenericParams => AstFragment::GenericParams(smallvec![{
|
||||
ast::GenericParam {
|
||||
attrs: Default::default(),
|
||||
bounds: Default::default(),
|
||||
id,
|
||||
ident,
|
||||
is_placeholder: true,
|
||||
kind: ast::GenericParamKind::Lifetime,
|
||||
}
|
||||
}]),
|
||||
AstFragmentKind::Params => AstFragment::Params(smallvec![ast::Param {
|
||||
attrs: Default::default(),
|
||||
id,
|
||||
pat: pat(),
|
||||
span,
|
||||
ty: ty(),
|
||||
is_placeholder: true,
|
||||
}]),
|
||||
AstFragmentKind::StructFields => AstFragment::StructFields(smallvec![ast::StructField {
|
||||
attrs: Default::default(),
|
||||
id,
|
||||
ident: None,
|
||||
span,
|
||||
ty: ty(),
|
||||
vis,
|
||||
is_placeholder: true,
|
||||
}]),
|
||||
AstFragmentKind::Variants => AstFragment::Variants(smallvec![ast::Variant {
|
||||
attrs: Default::default(),
|
||||
data: ast::VariantData::Struct(Default::default(), false),
|
||||
disr_expr: None,
|
||||
id,
|
||||
ident,
|
||||
span,
|
||||
vis,
|
||||
is_placeholder: true,
|
||||
}]),
|
||||
}
|
||||
}
|
||||
|
||||
pub struct PlaceholderExpander<'a, 'b> {
|
||||
expanded_fragments: FxHashMap<ast::NodeId, AstFragment>,
|
||||
cx: &'a mut ExtCtxt<'b>,
|
||||
monotonic: bool,
|
||||
}
|
||||
|
||||
impl<'a, 'b> PlaceholderExpander<'a, 'b> {
|
||||
pub fn new(cx: &'a mut ExtCtxt<'b>, monotonic: bool) -> Self {
|
||||
PlaceholderExpander { cx, expanded_fragments: FxHashMap::default(), monotonic }
|
||||
}
|
||||
|
||||
pub fn add(&mut self, id: ast::NodeId, mut fragment: AstFragment) {
|
||||
fragment.mut_visit_with(self);
|
||||
self.expanded_fragments.insert(id, fragment);
|
||||
}
|
||||
|
||||
fn remove(&mut self, id: ast::NodeId) -> AstFragment {
|
||||
self.expanded_fragments.remove(&id).unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, 'b> MutVisitor for PlaceholderExpander<'a, 'b> {
|
||||
fn flat_map_arm(&mut self, arm: ast::Arm) -> SmallVec<[ast::Arm; 1]> {
|
||||
if arm.is_placeholder {
|
||||
self.remove(arm.id).make_arms()
|
||||
} else {
|
||||
noop_flat_map_arm(arm, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_field(&mut self, field: ast::Field) -> SmallVec<[ast::Field; 1]> {
|
||||
if field.is_placeholder {
|
||||
self.remove(field.id).make_fields()
|
||||
} else {
|
||||
noop_flat_map_field(field, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_field_pattern(&mut self, fp: ast::FieldPat) -> SmallVec<[ast::FieldPat; 1]> {
|
||||
if fp.is_placeholder {
|
||||
self.remove(fp.id).make_field_patterns()
|
||||
} else {
|
||||
noop_flat_map_field_pattern(fp, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_generic_param(
|
||||
&mut self,
|
||||
param: ast::GenericParam,
|
||||
) -> SmallVec<[ast::GenericParam; 1]> {
|
||||
if param.is_placeholder {
|
||||
self.remove(param.id).make_generic_params()
|
||||
} else {
|
||||
noop_flat_map_generic_param(param, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_param(&mut self, p: ast::Param) -> SmallVec<[ast::Param; 1]> {
|
||||
if p.is_placeholder {
|
||||
self.remove(p.id).make_params()
|
||||
} else {
|
||||
noop_flat_map_param(p, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_struct_field(&mut self, sf: ast::StructField) -> SmallVec<[ast::StructField; 1]> {
|
||||
if sf.is_placeholder {
|
||||
self.remove(sf.id).make_struct_fields()
|
||||
} else {
|
||||
noop_flat_map_struct_field(sf, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_variant(&mut self, variant: ast::Variant) -> SmallVec<[ast::Variant; 1]> {
|
||||
if variant.is_placeholder {
|
||||
self.remove(variant.id).make_variants()
|
||||
} else {
|
||||
noop_flat_map_variant(variant, self)
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_item(&mut self, item: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
|
||||
match item.kind {
|
||||
ast::ItemKind::MacCall(_) => return self.remove(item.id).make_items(),
|
||||
ast::ItemKind::MacroDef(_) => return smallvec![item],
|
||||
_ => {}
|
||||
}
|
||||
|
||||
noop_flat_map_item(item, self)
|
||||
}
|
||||
|
||||
fn flat_map_trait_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
|
||||
match item.kind {
|
||||
ast::AssocItemKind::MacCall(_) => self.remove(item.id).make_trait_items(),
|
||||
_ => noop_flat_map_assoc_item(item, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_impl_item(&mut self, item: P<ast::AssocItem>) -> SmallVec<[P<ast::AssocItem>; 1]> {
|
||||
match item.kind {
|
||||
ast::AssocItemKind::MacCall(_) => self.remove(item.id).make_impl_items(),
|
||||
_ => noop_flat_map_assoc_item(item, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_foreign_item(
|
||||
&mut self,
|
||||
item: P<ast::ForeignItem>,
|
||||
) -> SmallVec<[P<ast::ForeignItem>; 1]> {
|
||||
match item.kind {
|
||||
ast::ForeignItemKind::MacCall(_) => self.remove(item.id).make_foreign_items(),
|
||||
_ => noop_flat_map_foreign_item(item, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_expr(&mut self, expr: &mut P<ast::Expr>) {
|
||||
match expr.kind {
|
||||
ast::ExprKind::MacCall(_) => *expr = self.remove(expr.id).make_expr(),
|
||||
_ => noop_visit_expr(expr, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn filter_map_expr(&mut self, expr: P<ast::Expr>) -> Option<P<ast::Expr>> {
|
||||
match expr.kind {
|
||||
ast::ExprKind::MacCall(_) => self.remove(expr.id).make_opt_expr(),
|
||||
_ => noop_filter_map_expr(expr, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn flat_map_stmt(&mut self, stmt: ast::Stmt) -> SmallVec<[ast::Stmt; 1]> {
|
||||
let (style, mut stmts) = match stmt.kind {
|
||||
ast::StmtKind::MacCall(mac) => (mac.1, self.remove(stmt.id).make_stmts()),
|
||||
_ => return noop_flat_map_stmt(stmt, self),
|
||||
};
|
||||
|
||||
if style == ast::MacStmtStyle::Semicolon {
|
||||
if let Some(stmt) = stmts.pop() {
|
||||
stmts.push(stmt.add_trailing_semicolon());
|
||||
}
|
||||
}
|
||||
|
||||
stmts
|
||||
}
|
||||
|
||||
fn visit_pat(&mut self, pat: &mut P<ast::Pat>) {
|
||||
match pat.kind {
|
||||
ast::PatKind::MacCall(_) => *pat = self.remove(pat.id).make_pat(),
|
||||
_ => noop_visit_pat(pat, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_ty(&mut self, ty: &mut P<ast::Ty>) {
|
||||
match ty.kind {
|
||||
ast::TyKind::MacCall(_) => *ty = self.remove(ty.id).make_ty(),
|
||||
_ => noop_visit_ty(ty, self),
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_block(&mut self, block: &mut P<ast::Block>) {
|
||||
noop_visit_block(block, self);
|
||||
|
||||
for stmt in block.stmts.iter_mut() {
|
||||
if self.monotonic {
|
||||
assert_eq!(stmt.id, ast::DUMMY_NODE_ID);
|
||||
stmt.id = self.cx.resolver.next_node_id();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn visit_mod(&mut self, module: &mut ast::Mod) {
|
||||
noop_visit_mod(module, self);
|
||||
module.items.retain(|item| match item.kind {
|
||||
ast::ItemKind::MacCall(_) if !self.cx.ecfg.keep_macs => false, // remove macro definitions
|
||||
_ => true,
|
||||
});
|
||||
}
|
||||
|
||||
fn visit_mac(&mut self, _mac: &mut ast::MacCall) {
|
||||
// Do nothing.
|
||||
}
|
||||
}
|
224
compiler/rustc_expand/src/proc_macro.rs
Normal file
224
compiler/rustc_expand/src/proc_macro.rs
Normal file
|
@ -0,0 +1,224 @@
|
|||
use crate::base::{self, *};
|
||||
use crate::proc_macro_server;
|
||||
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenTree};
|
||||
use rustc_ast::{self as ast, *};
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::{Applicability, ErrorReported};
|
||||
use rustc_parse::nt_to_tokenstream;
|
||||
use rustc_span::symbol::sym;
|
||||
use rustc_span::{Span, DUMMY_SP};
|
||||
|
||||
const EXEC_STRATEGY: pm::bridge::server::SameThread = pm::bridge::server::SameThread;
|
||||
|
||||
pub struct BangProcMacro {
|
||||
pub client: pm::bridge::client::Client<fn(pm::TokenStream) -> pm::TokenStream>,
|
||||
}
|
||||
|
||||
impl base::ProcMacro for BangProcMacro {
|
||||
fn expand<'cx>(
|
||||
&self,
|
||||
ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
input: TokenStream,
|
||||
) -> Result<TokenStream, ErrorReported> {
|
||||
let server = proc_macro_server::Rustc::new(ecx);
|
||||
self.client.run(&EXEC_STRATEGY, server, input).map_err(|e| {
|
||||
let mut err = ecx.struct_span_err(span, "proc macro panicked");
|
||||
if let Some(s) = e.as_str() {
|
||||
err.help(&format!("message: {}", s));
|
||||
}
|
||||
err.emit();
|
||||
ErrorReported
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AttrProcMacro {
|
||||
pub client: pm::bridge::client::Client<fn(pm::TokenStream, pm::TokenStream) -> pm::TokenStream>,
|
||||
}
|
||||
|
||||
impl base::AttrProcMacro for AttrProcMacro {
|
||||
fn expand<'cx>(
|
||||
&self,
|
||||
ecx: &'cx mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
annotation: TokenStream,
|
||||
annotated: TokenStream,
|
||||
) -> Result<TokenStream, ErrorReported> {
|
||||
let server = proc_macro_server::Rustc::new(ecx);
|
||||
self.client.run(&EXEC_STRATEGY, server, annotation, annotated).map_err(|e| {
|
||||
let mut err = ecx.struct_span_err(span, "custom attribute panicked");
|
||||
if let Some(s) = e.as_str() {
|
||||
err.help(&format!("message: {}", s));
|
||||
}
|
||||
err.emit();
|
||||
ErrorReported
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct ProcMacroDerive {
|
||||
pub client: pm::bridge::client::Client<fn(pm::TokenStream) -> pm::TokenStream>,
|
||||
}
|
||||
|
||||
impl MultiItemModifier for ProcMacroDerive {
|
||||
fn expand(
|
||||
&self,
|
||||
ecx: &mut ExtCtxt<'_>,
|
||||
span: Span,
|
||||
_meta_item: &ast::MetaItem,
|
||||
item: Annotatable,
|
||||
) -> ExpandResult<Vec<Annotatable>, Annotatable> {
|
||||
let item = match item {
|
||||
Annotatable::Arm(..)
|
||||
| Annotatable::Field(..)
|
||||
| Annotatable::FieldPat(..)
|
||||
| Annotatable::GenericParam(..)
|
||||
| Annotatable::Param(..)
|
||||
| Annotatable::StructField(..)
|
||||
| Annotatable::Variant(..) => panic!("unexpected annotatable"),
|
||||
Annotatable::Item(item) => item,
|
||||
Annotatable::ImplItem(_)
|
||||
| Annotatable::TraitItem(_)
|
||||
| Annotatable::ForeignItem(_)
|
||||
| Annotatable::Stmt(_)
|
||||
| Annotatable::Expr(_) => {
|
||||
ecx.span_err(
|
||||
span,
|
||||
"proc-macro derives may only be applied to a struct, enum, or union",
|
||||
);
|
||||
return ExpandResult::Ready(Vec::new());
|
||||
}
|
||||
};
|
||||
match item.kind {
|
||||
ItemKind::Struct(..) | ItemKind::Enum(..) | ItemKind::Union(..) => {}
|
||||
_ => {
|
||||
ecx.span_err(
|
||||
span,
|
||||
"proc-macro derives may only be applied to a struct, enum, or union",
|
||||
);
|
||||
return ExpandResult::Ready(Vec::new());
|
||||
}
|
||||
}
|
||||
|
||||
let item = token::NtItem(item);
|
||||
let input = if item.pretty_printing_compatibility_hack() {
|
||||
TokenTree::token(token::Interpolated(Lrc::new(item)), DUMMY_SP).into()
|
||||
} else {
|
||||
nt_to_tokenstream(&item, &ecx.sess.parse_sess, DUMMY_SP)
|
||||
};
|
||||
|
||||
let server = proc_macro_server::Rustc::new(ecx);
|
||||
let stream = match self.client.run(&EXEC_STRATEGY, server, input) {
|
||||
Ok(stream) => stream,
|
||||
Err(e) => {
|
||||
let mut err = ecx.struct_span_err(span, "proc-macro derive panicked");
|
||||
if let Some(s) = e.as_str() {
|
||||
err.help(&format!("message: {}", s));
|
||||
}
|
||||
err.emit();
|
||||
return ExpandResult::Ready(vec![]);
|
||||
}
|
||||
};
|
||||
|
||||
let error_count_before = ecx.sess.parse_sess.span_diagnostic.err_count();
|
||||
let mut parser =
|
||||
rustc_parse::stream_to_parser(&ecx.sess.parse_sess, stream, Some("proc-macro derive"));
|
||||
let mut items = vec![];
|
||||
|
||||
loop {
|
||||
match parser.parse_item() {
|
||||
Ok(None) => break,
|
||||
Ok(Some(item)) => items.push(Annotatable::Item(item)),
|
||||
Err(mut err) => {
|
||||
err.emit();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// fail if there have been errors emitted
|
||||
if ecx.sess.parse_sess.span_diagnostic.err_count() > error_count_before {
|
||||
ecx.struct_span_err(span, "proc-macro derive produced unparseable tokens").emit();
|
||||
}
|
||||
|
||||
ExpandResult::Ready(items)
|
||||
}
|
||||
}
|
||||
|
||||
crate fn collect_derives(cx: &mut ExtCtxt<'_>, attrs: &mut Vec<ast::Attribute>) -> Vec<ast::Path> {
|
||||
let mut result = Vec::new();
|
||||
attrs.retain(|attr| {
|
||||
if !attr.has_name(sym::derive) {
|
||||
return true;
|
||||
}
|
||||
|
||||
// 1) First let's ensure that it's a meta item.
|
||||
let nmis = match attr.meta_item_list() {
|
||||
None => {
|
||||
cx.struct_span_err(attr.span, "malformed `derive` attribute input")
|
||||
.span_suggestion(
|
||||
attr.span,
|
||||
"missing traits to be derived",
|
||||
"#[derive(Trait1, Trait2, ...)]".to_owned(),
|
||||
Applicability::HasPlaceholders,
|
||||
)
|
||||
.emit();
|
||||
return false;
|
||||
}
|
||||
Some(x) => x,
|
||||
};
|
||||
|
||||
let mut error_reported_filter_map = false;
|
||||
let mut error_reported_map = false;
|
||||
let traits = nmis
|
||||
.into_iter()
|
||||
// 2) Moreover, let's ensure we have a path and not `#[derive("foo")]`.
|
||||
.filter_map(|nmi| match nmi {
|
||||
NestedMetaItem::Literal(lit) => {
|
||||
error_reported_filter_map = true;
|
||||
cx.struct_span_err(lit.span, "expected path to a trait, found literal")
|
||||
.help("for example, write `#[derive(Debug)]` for `Debug`")
|
||||
.emit();
|
||||
None
|
||||
}
|
||||
NestedMetaItem::MetaItem(mi) => Some(mi),
|
||||
})
|
||||
// 3) Finally, we only accept `#[derive($path_0, $path_1, ..)]`
|
||||
// but not e.g. `#[derive($path_0 = "value", $path_1(abc))]`.
|
||||
// In this case we can still at least determine that the user
|
||||
// wanted this trait to be derived, so let's keep it.
|
||||
.map(|mi| {
|
||||
let mut traits_dont_accept = |title, action| {
|
||||
error_reported_map = true;
|
||||
let sp = mi.span.with_lo(mi.path.span.hi());
|
||||
cx.struct_span_err(sp, title)
|
||||
.span_suggestion(
|
||||
sp,
|
||||
action,
|
||||
String::new(),
|
||||
Applicability::MachineApplicable,
|
||||
)
|
||||
.emit();
|
||||
};
|
||||
match &mi.kind {
|
||||
MetaItemKind::List(..) => traits_dont_accept(
|
||||
"traits in `#[derive(...)]` don't accept arguments",
|
||||
"remove the arguments",
|
||||
),
|
||||
MetaItemKind::NameValue(..) => traits_dont_accept(
|
||||
"traits in `#[derive(...)]` don't accept values",
|
||||
"remove the value",
|
||||
),
|
||||
MetaItemKind::Word => {}
|
||||
}
|
||||
mi.path
|
||||
});
|
||||
|
||||
result.extend(traits);
|
||||
!error_reported_filter_map && !error_reported_map
|
||||
});
|
||||
result
|
||||
}
|
712
compiler/rustc_expand/src/proc_macro_server.rs
Normal file
712
compiler/rustc_expand/src/proc_macro_server.rs
Normal file
|
@ -0,0 +1,712 @@
|
|||
use crate::base::ExtCtxt;
|
||||
|
||||
use rustc_ast as ast;
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
|
||||
use rustc_ast_pretty::pprust;
|
||||
use rustc_data_structures::sync::Lrc;
|
||||
use rustc_errors::Diagnostic;
|
||||
use rustc_parse::lexer::nfc_normalize;
|
||||
use rustc_parse::{nt_to_tokenstream, parse_stream_from_source_str};
|
||||
use rustc_session::parse::ParseSess;
|
||||
use rustc_span::symbol::{self, kw, sym, Symbol};
|
||||
use rustc_span::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
|
||||
|
||||
use pm::bridge::{server, TokenTree};
|
||||
use pm::{Delimiter, Level, LineColumn, Spacing};
|
||||
use std::ops::Bound;
|
||||
use std::{ascii, panic};
|
||||
|
||||
trait FromInternal<T> {
|
||||
fn from_internal(x: T) -> Self;
|
||||
}
|
||||
|
||||
trait ToInternal<T> {
|
||||
fn to_internal(self) -> T;
|
||||
}
|
||||
|
||||
impl FromInternal<token::DelimToken> for Delimiter {
|
||||
fn from_internal(delim: token::DelimToken) -> Delimiter {
|
||||
match delim {
|
||||
token::Paren => Delimiter::Parenthesis,
|
||||
token::Brace => Delimiter::Brace,
|
||||
token::Bracket => Delimiter::Bracket,
|
||||
token::NoDelim => Delimiter::None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToInternal<token::DelimToken> for Delimiter {
|
||||
fn to_internal(self) -> token::DelimToken {
|
||||
match self {
|
||||
Delimiter::Parenthesis => token::Paren,
|
||||
Delimiter::Brace => token::Brace,
|
||||
Delimiter::Bracket => token::Bracket,
|
||||
Delimiter::None => token::NoDelim,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
|
||||
for TokenTree<Group, Punct, Ident, Literal>
|
||||
{
|
||||
fn from_internal(
|
||||
((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>),
|
||||
) -> Self {
|
||||
use rustc_ast::token::*;
|
||||
|
||||
let joint = is_joint == Joint;
|
||||
let Token { kind, span } = match tree {
|
||||
tokenstream::TokenTree::Delimited(span, delim, tts) => {
|
||||
let delimiter = Delimiter::from_internal(delim);
|
||||
return TokenTree::Group(Group { delimiter, stream: tts, span, flatten: false });
|
||||
}
|
||||
tokenstream::TokenTree::Token(token) => token,
|
||||
};
|
||||
|
||||
macro_rules! tt {
|
||||
($ty:ident { $($field:ident $(: $value:expr)*),+ $(,)? }) => (
|
||||
TokenTree::$ty(self::$ty {
|
||||
$($field $(: $value)*,)+
|
||||
span,
|
||||
})
|
||||
);
|
||||
($ty:ident::$method:ident($($value:expr),*)) => (
|
||||
TokenTree::$ty(self::$ty::$method($($value,)* span))
|
||||
);
|
||||
}
|
||||
macro_rules! op {
|
||||
($a:expr) => {
|
||||
tt!(Punct::new($a, joint))
|
||||
};
|
||||
($a:expr, $b:expr) => {{
|
||||
stack.push(tt!(Punct::new($b, joint)));
|
||||
tt!(Punct::new($a, true))
|
||||
}};
|
||||
($a:expr, $b:expr, $c:expr) => {{
|
||||
stack.push(tt!(Punct::new($c, joint)));
|
||||
stack.push(tt!(Punct::new($b, true)));
|
||||
tt!(Punct::new($a, true))
|
||||
}};
|
||||
}
|
||||
|
||||
match kind {
|
||||
Eq => op!('='),
|
||||
Lt => op!('<'),
|
||||
Le => op!('<', '='),
|
||||
EqEq => op!('=', '='),
|
||||
Ne => op!('!', '='),
|
||||
Ge => op!('>', '='),
|
||||
Gt => op!('>'),
|
||||
AndAnd => op!('&', '&'),
|
||||
OrOr => op!('|', '|'),
|
||||
Not => op!('!'),
|
||||
Tilde => op!('~'),
|
||||
BinOp(Plus) => op!('+'),
|
||||
BinOp(Minus) => op!('-'),
|
||||
BinOp(Star) => op!('*'),
|
||||
BinOp(Slash) => op!('/'),
|
||||
BinOp(Percent) => op!('%'),
|
||||
BinOp(Caret) => op!('^'),
|
||||
BinOp(And) => op!('&'),
|
||||
BinOp(Or) => op!('|'),
|
||||
BinOp(Shl) => op!('<', '<'),
|
||||
BinOp(Shr) => op!('>', '>'),
|
||||
BinOpEq(Plus) => op!('+', '='),
|
||||
BinOpEq(Minus) => op!('-', '='),
|
||||
BinOpEq(Star) => op!('*', '='),
|
||||
BinOpEq(Slash) => op!('/', '='),
|
||||
BinOpEq(Percent) => op!('%', '='),
|
||||
BinOpEq(Caret) => op!('^', '='),
|
||||
BinOpEq(And) => op!('&', '='),
|
||||
BinOpEq(Or) => op!('|', '='),
|
||||
BinOpEq(Shl) => op!('<', '<', '='),
|
||||
BinOpEq(Shr) => op!('>', '>', '='),
|
||||
At => op!('@'),
|
||||
Dot => op!('.'),
|
||||
DotDot => op!('.', '.'),
|
||||
DotDotDot => op!('.', '.', '.'),
|
||||
DotDotEq => op!('.', '.', '='),
|
||||
Comma => op!(','),
|
||||
Semi => op!(';'),
|
||||
Colon => op!(':'),
|
||||
ModSep => op!(':', ':'),
|
||||
RArrow => op!('-', '>'),
|
||||
LArrow => op!('<', '-'),
|
||||
FatArrow => op!('=', '>'),
|
||||
Pound => op!('#'),
|
||||
Dollar => op!('$'),
|
||||
Question => op!('?'),
|
||||
SingleQuote => op!('\''),
|
||||
|
||||
Ident(name, false) if name == kw::DollarCrate => tt!(Ident::dollar_crate()),
|
||||
Ident(name, is_raw) => tt!(Ident::new(sess, name, is_raw)),
|
||||
Lifetime(name) => {
|
||||
let ident = symbol::Ident::new(name, span).without_first_quote();
|
||||
stack.push(tt!(Ident::new(sess, ident.name, false)));
|
||||
tt!(Punct::new('\'', true))
|
||||
}
|
||||
Literal(lit) => tt!(Literal { lit }),
|
||||
DocComment(_, attr_style, data) => {
|
||||
let mut escaped = String::new();
|
||||
for ch in data.as_str().chars() {
|
||||
escaped.extend(ch.escape_debug());
|
||||
}
|
||||
let stream = vec![
|
||||
Ident(sym::doc, false),
|
||||
Eq,
|
||||
TokenKind::lit(token::Str, Symbol::intern(&escaped), None),
|
||||
]
|
||||
.into_iter()
|
||||
.map(|kind| tokenstream::TokenTree::token(kind, span))
|
||||
.collect();
|
||||
stack.push(TokenTree::Group(Group {
|
||||
delimiter: Delimiter::Bracket,
|
||||
stream,
|
||||
span: DelimSpan::from_single(span),
|
||||
flatten: false,
|
||||
}));
|
||||
if attr_style == ast::AttrStyle::Inner {
|
||||
stack.push(tt!(Punct::new('!', false)));
|
||||
}
|
||||
tt!(Punct::new('#', false))
|
||||
}
|
||||
|
||||
Interpolated(nt) => {
|
||||
if let Some((name, is_raw)) =
|
||||
nt.ident_name_compatibility_hack(span, sess.source_map())
|
||||
{
|
||||
TokenTree::Ident(Ident::new(sess, name.name, is_raw, name.span))
|
||||
} else {
|
||||
let stream = nt_to_tokenstream(&nt, sess, span);
|
||||
TokenTree::Group(Group {
|
||||
delimiter: Delimiter::None,
|
||||
stream,
|
||||
span: DelimSpan::from_single(span),
|
||||
flatten: nt.pretty_printing_compatibility_hack(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
OpenDelim(..) | CloseDelim(..) => unreachable!(),
|
||||
Whitespace | Comment | Shebang(..) | Unknown(..) | Eof => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ToInternal<TokenStream> for TokenTree<Group, Punct, Ident, Literal> {
|
||||
fn to_internal(self) -> TokenStream {
|
||||
use rustc_ast::token::*;
|
||||
|
||||
let (ch, joint, span) = match self {
|
||||
TokenTree::Punct(Punct { ch, joint, span }) => (ch, joint, span),
|
||||
TokenTree::Group(Group { delimiter, stream, span, .. }) => {
|
||||
return tokenstream::TokenTree::Delimited(span, delimiter.to_internal(), stream)
|
||||
.into();
|
||||
}
|
||||
TokenTree::Ident(self::Ident { sym, is_raw, span }) => {
|
||||
return tokenstream::TokenTree::token(Ident(sym, is_raw), span).into();
|
||||
}
|
||||
TokenTree::Literal(self::Literal {
|
||||
lit: token::Lit { kind: token::Integer, symbol, suffix },
|
||||
span,
|
||||
}) if symbol.as_str().starts_with('-') => {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let integer = TokenKind::lit(token::Integer, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::token(minus, span);
|
||||
let b = tokenstream::TokenTree::token(integer, span);
|
||||
return vec![a, b].into_iter().collect();
|
||||
}
|
||||
TokenTree::Literal(self::Literal {
|
||||
lit: token::Lit { kind: token::Float, symbol, suffix },
|
||||
span,
|
||||
}) if symbol.as_str().starts_with('-') => {
|
||||
let minus = BinOp(BinOpToken::Minus);
|
||||
let symbol = Symbol::intern(&symbol.as_str()[1..]);
|
||||
let float = TokenKind::lit(token::Float, symbol, suffix);
|
||||
let a = tokenstream::TokenTree::token(minus, span);
|
||||
let b = tokenstream::TokenTree::token(float, span);
|
||||
return vec![a, b].into_iter().collect();
|
||||
}
|
||||
TokenTree::Literal(self::Literal { lit, span }) => {
|
||||
return tokenstream::TokenTree::token(Literal(lit), span).into();
|
||||
}
|
||||
};
|
||||
|
||||
let kind = match ch {
|
||||
'=' => Eq,
|
||||
'<' => Lt,
|
||||
'>' => Gt,
|
||||
'!' => Not,
|
||||
'~' => Tilde,
|
||||
'+' => BinOp(Plus),
|
||||
'-' => BinOp(Minus),
|
||||
'*' => BinOp(Star),
|
||||
'/' => BinOp(Slash),
|
||||
'%' => BinOp(Percent),
|
||||
'^' => BinOp(Caret),
|
||||
'&' => BinOp(And),
|
||||
'|' => BinOp(Or),
|
||||
'@' => At,
|
||||
'.' => Dot,
|
||||
',' => Comma,
|
||||
';' => Semi,
|
||||
':' => Colon,
|
||||
'#' => Pound,
|
||||
'$' => Dollar,
|
||||
'?' => Question,
|
||||
'\'' => SingleQuote,
|
||||
_ => unreachable!(),
|
||||
};
|
||||
|
||||
let tree = tokenstream::TokenTree::token(kind, span);
|
||||
TokenStream::new(vec![(tree, if joint { Joint } else { NonJoint })])
|
||||
}
|
||||
}
|
||||
|
||||
impl ToInternal<rustc_errors::Level> for Level {
|
||||
fn to_internal(self) -> rustc_errors::Level {
|
||||
match self {
|
||||
Level::Error => rustc_errors::Level::Error,
|
||||
Level::Warning => rustc_errors::Level::Warning,
|
||||
Level::Note => rustc_errors::Level::Note,
|
||||
Level::Help => rustc_errors::Level::Help,
|
||||
_ => unreachable!("unknown proc_macro::Level variant: {:?}", self),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct FreeFunctions;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct TokenStreamIter {
|
||||
cursor: tokenstream::Cursor,
|
||||
stack: Vec<TokenTree<Group, Punct, Ident, Literal>>,
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Group {
|
||||
delimiter: Delimiter,
|
||||
stream: TokenStream,
|
||||
span: DelimSpan,
|
||||
/// A hack used to pass AST fragments to attribute and derive macros
|
||||
/// as a single nonterminal token instead of a token stream.
|
||||
/// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||
flatten: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Punct {
|
||||
ch: char,
|
||||
// NB. not using `Spacing` here because it doesn't implement `Hash`.
|
||||
joint: bool,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Punct {
|
||||
fn new(ch: char, joint: bool, span: Span) -> Punct {
|
||||
const LEGAL_CHARS: &[char] = &[
|
||||
'=', '<', '>', '!', '~', '+', '-', '*', '/', '%', '^', '&', '|', '@', '.', ',', ';',
|
||||
':', '#', '$', '?', '\'',
|
||||
];
|
||||
if !LEGAL_CHARS.contains(&ch) {
|
||||
panic!("unsupported character `{:?}`", ch)
|
||||
}
|
||||
Punct { ch, joint, span }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
|
||||
pub struct Ident {
|
||||
sym: Symbol,
|
||||
is_raw: bool,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
impl Ident {
|
||||
fn new(sess: &ParseSess, sym: Symbol, is_raw: bool, span: Span) -> Ident {
|
||||
let sym = nfc_normalize(&sym.as_str());
|
||||
let string = sym.as_str();
|
||||
if !rustc_lexer::is_ident(&string) {
|
||||
panic!("`{:?}` is not a valid identifier", string)
|
||||
}
|
||||
if is_raw && !sym.can_be_raw() {
|
||||
panic!("`{}` cannot be a raw identifier", string);
|
||||
}
|
||||
sess.symbol_gallery.insert(sym, span);
|
||||
Ident { sym, is_raw, span }
|
||||
}
|
||||
fn dollar_crate(span: Span) -> Ident {
|
||||
// `$crate` is accepted as an ident only if it comes from the compiler.
|
||||
Ident { sym: kw::DollarCrate, is_raw: false, span }
|
||||
}
|
||||
}
|
||||
|
||||
// FIXME(eddyb) `Literal` should not expose internal `Debug` impls.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct Literal {
|
||||
lit: token::Lit,
|
||||
span: Span,
|
||||
}
|
||||
|
||||
pub(crate) struct Rustc<'a> {
|
||||
sess: &'a ParseSess,
|
||||
def_site: Span,
|
||||
call_site: Span,
|
||||
mixed_site: Span,
|
||||
span_debug: bool,
|
||||
}
|
||||
|
||||
impl<'a> Rustc<'a> {
|
||||
pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
|
||||
let expn_data = cx.current_expansion.id.expn_data();
|
||||
Rustc {
|
||||
sess: &cx.sess.parse_sess,
|
||||
def_site: cx.with_def_site_ctxt(expn_data.def_site),
|
||||
call_site: cx.with_call_site_ctxt(expn_data.call_site),
|
||||
mixed_site: cx.with_mixed_site_ctxt(expn_data.call_site),
|
||||
span_debug: cx.ecfg.span_debug,
|
||||
}
|
||||
}
|
||||
|
||||
fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
|
||||
Literal { lit: token::Lit::new(kind, symbol, suffix), span: server::Span::call_site(self) }
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Types for Rustc<'_> {
|
||||
type FreeFunctions = FreeFunctions;
|
||||
type TokenStream = TokenStream;
|
||||
type TokenStreamBuilder = tokenstream::TokenStreamBuilder;
|
||||
type TokenStreamIter = TokenStreamIter;
|
||||
type Group = Group;
|
||||
type Punct = Punct;
|
||||
type Ident = Ident;
|
||||
type Literal = Literal;
|
||||
type SourceFile = Lrc<SourceFile>;
|
||||
type MultiSpan = Vec<Span>;
|
||||
type Diagnostic = Diagnostic;
|
||||
type Span = Span;
|
||||
}
|
||||
|
||||
impl server::FreeFunctions for Rustc<'_> {
|
||||
fn track_env_var(&mut self, var: &str, value: Option<&str>) {
|
||||
self.sess.env_depinfo.borrow_mut().insert((Symbol::intern(var), value.map(Symbol::intern)));
|
||||
}
|
||||
}
|
||||
|
||||
impl server::TokenStream for Rustc<'_> {
|
||||
fn new(&mut self) -> Self::TokenStream {
|
||||
TokenStream::default()
|
||||
}
|
||||
fn is_empty(&mut self, stream: &Self::TokenStream) -> bool {
|
||||
stream.is_empty()
|
||||
}
|
||||
fn from_str(&mut self, src: &str) -> Self::TokenStream {
|
||||
parse_stream_from_source_str(
|
||||
FileName::proc_macro_source_code(src),
|
||||
src.to_string(),
|
||||
self.sess,
|
||||
Some(self.call_site),
|
||||
)
|
||||
}
|
||||
fn to_string(&mut self, stream: &Self::TokenStream) -> String {
|
||||
pprust::tts_to_string(stream)
|
||||
}
|
||||
fn from_token_tree(
|
||||
&mut self,
|
||||
tree: TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>,
|
||||
) -> Self::TokenStream {
|
||||
tree.to_internal()
|
||||
}
|
||||
fn into_iter(&mut self, stream: Self::TokenStream) -> Self::TokenStreamIter {
|
||||
TokenStreamIter { cursor: stream.trees(), stack: vec![] }
|
||||
}
|
||||
}
|
||||
|
||||
impl server::TokenStreamBuilder for Rustc<'_> {
|
||||
fn new(&mut self) -> Self::TokenStreamBuilder {
|
||||
tokenstream::TokenStreamBuilder::new()
|
||||
}
|
||||
fn push(&mut self, builder: &mut Self::TokenStreamBuilder, stream: Self::TokenStream) {
|
||||
builder.push(stream);
|
||||
}
|
||||
fn build(&mut self, builder: Self::TokenStreamBuilder) -> Self::TokenStream {
|
||||
builder.build()
|
||||
}
|
||||
}
|
||||
|
||||
impl server::TokenStreamIter for Rustc<'_> {
|
||||
fn next(
|
||||
&mut self,
|
||||
iter: &mut Self::TokenStreamIter,
|
||||
) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
|
||||
loop {
|
||||
let tree = iter.stack.pop().or_else(|| {
|
||||
let next = iter.cursor.next_with_joint()?;
|
||||
Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
|
||||
})?;
|
||||
// A hack used to pass AST fragments to attribute and derive macros
|
||||
// as a single nonterminal token instead of a token stream.
|
||||
// Such token needs to be "unwrapped" and not represented as a delimited group.
|
||||
// FIXME: It needs to be removed, but there are some compatibility issues (see #73345).
|
||||
if let TokenTree::Group(ref group) = tree {
|
||||
if group.flatten {
|
||||
iter.cursor.append(group.stream.clone());
|
||||
continue;
|
||||
}
|
||||
}
|
||||
return Some(tree);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Group for Rustc<'_> {
|
||||
fn new(&mut self, delimiter: Delimiter, stream: Self::TokenStream) -> Self::Group {
|
||||
Group {
|
||||
delimiter,
|
||||
stream,
|
||||
span: DelimSpan::from_single(server::Span::call_site(self)),
|
||||
flatten: false,
|
||||
}
|
||||
}
|
||||
fn delimiter(&mut self, group: &Self::Group) -> Delimiter {
|
||||
group.delimiter
|
||||
}
|
||||
fn stream(&mut self, group: &Self::Group) -> Self::TokenStream {
|
||||
group.stream.clone()
|
||||
}
|
||||
fn span(&mut self, group: &Self::Group) -> Self::Span {
|
||||
group.span.entire()
|
||||
}
|
||||
fn span_open(&mut self, group: &Self::Group) -> Self::Span {
|
||||
group.span.open
|
||||
}
|
||||
fn span_close(&mut self, group: &Self::Group) -> Self::Span {
|
||||
group.span.close
|
||||
}
|
||||
fn set_span(&mut self, group: &mut Self::Group, span: Self::Span) {
|
||||
group.span = DelimSpan::from_single(span);
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Punct for Rustc<'_> {
|
||||
fn new(&mut self, ch: char, spacing: Spacing) -> Self::Punct {
|
||||
Punct::new(ch, spacing == Spacing::Joint, server::Span::call_site(self))
|
||||
}
|
||||
fn as_char(&mut self, punct: Self::Punct) -> char {
|
||||
punct.ch
|
||||
}
|
||||
fn spacing(&mut self, punct: Self::Punct) -> Spacing {
|
||||
if punct.joint { Spacing::Joint } else { Spacing::Alone }
|
||||
}
|
||||
fn span(&mut self, punct: Self::Punct) -> Self::Span {
|
||||
punct.span
|
||||
}
|
||||
fn with_span(&mut self, punct: Self::Punct, span: Self::Span) -> Self::Punct {
|
||||
Punct { span, ..punct }
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Ident for Rustc<'_> {
|
||||
fn new(&mut self, string: &str, span: Self::Span, is_raw: bool) -> Self::Ident {
|
||||
Ident::new(self.sess, Symbol::intern(string), is_raw, span)
|
||||
}
|
||||
fn span(&mut self, ident: Self::Ident) -> Self::Span {
|
||||
ident.span
|
||||
}
|
||||
fn with_span(&mut self, ident: Self::Ident, span: Self::Span) -> Self::Ident {
|
||||
Ident { span, ..ident }
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Literal for Rustc<'_> {
|
||||
fn debug_kind(&mut self, literal: &Self::Literal) -> String {
|
||||
format!("{:?}", literal.lit.kind)
|
||||
}
|
||||
fn symbol(&mut self, literal: &Self::Literal) -> String {
|
||||
literal.lit.symbol.to_string()
|
||||
}
|
||||
fn suffix(&mut self, literal: &Self::Literal) -> Option<String> {
|
||||
literal.lit.suffix.as_ref().map(Symbol::to_string)
|
||||
}
|
||||
fn integer(&mut self, n: &str) -> Self::Literal {
|
||||
self.lit(token::Integer, Symbol::intern(n), None)
|
||||
}
|
||||
fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
|
||||
self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
|
||||
}
|
||||
fn float(&mut self, n: &str) -> Self::Literal {
|
||||
self.lit(token::Float, Symbol::intern(n), None)
|
||||
}
|
||||
fn f32(&mut self, n: &str) -> Self::Literal {
|
||||
self.lit(token::Float, Symbol::intern(n), Some(sym::f32))
|
||||
}
|
||||
fn f64(&mut self, n: &str) -> Self::Literal {
|
||||
self.lit(token::Float, Symbol::intern(n), Some(sym::f64))
|
||||
}
|
||||
fn string(&mut self, string: &str) -> Self::Literal {
|
||||
let mut escaped = String::new();
|
||||
for ch in string.chars() {
|
||||
escaped.extend(ch.escape_debug());
|
||||
}
|
||||
self.lit(token::Str, Symbol::intern(&escaped), None)
|
||||
}
|
||||
fn character(&mut self, ch: char) -> Self::Literal {
|
||||
let mut escaped = String::new();
|
||||
escaped.extend(ch.escape_unicode());
|
||||
self.lit(token::Char, Symbol::intern(&escaped), None)
|
||||
}
|
||||
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
|
||||
let string = bytes
|
||||
.iter()
|
||||
.cloned()
|
||||
.flat_map(ascii::escape_default)
|
||||
.map(Into::<char>::into)
|
||||
.collect::<String>();
|
||||
self.lit(token::ByteStr, Symbol::intern(&string), None)
|
||||
}
|
||||
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
|
||||
literal.span
|
||||
}
|
||||
fn set_span(&mut self, literal: &mut Self::Literal, span: Self::Span) {
|
||||
literal.span = span;
|
||||
}
|
||||
fn subspan(
|
||||
&mut self,
|
||||
literal: &Self::Literal,
|
||||
start: Bound<usize>,
|
||||
end: Bound<usize>,
|
||||
) -> Option<Self::Span> {
|
||||
let span = literal.span;
|
||||
let length = span.hi().to_usize() - span.lo().to_usize();
|
||||
|
||||
let start = match start {
|
||||
Bound::Included(lo) => lo,
|
||||
Bound::Excluded(lo) => lo + 1,
|
||||
Bound::Unbounded => 0,
|
||||
};
|
||||
|
||||
let end = match end {
|
||||
Bound::Included(hi) => hi + 1,
|
||||
Bound::Excluded(hi) => hi,
|
||||
Bound::Unbounded => length,
|
||||
};
|
||||
|
||||
// Bounds check the values, preventing addition overflow and OOB spans.
|
||||
if start > u32::MAX as usize
|
||||
|| end > u32::MAX as usize
|
||||
|| (u32::MAX - start as u32) < span.lo().to_u32()
|
||||
|| (u32::MAX - end as u32) < span.lo().to_u32()
|
||||
|| start >= end
|
||||
|| end > length
|
||||
{
|
||||
return None;
|
||||
}
|
||||
|
||||
let new_lo = span.lo() + BytePos::from_usize(start);
|
||||
let new_hi = span.lo() + BytePos::from_usize(end);
|
||||
Some(span.with_lo(new_lo).with_hi(new_hi))
|
||||
}
|
||||
}
|
||||
|
||||
impl server::SourceFile for Rustc<'_> {
|
||||
fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
|
||||
Lrc::ptr_eq(file1, file2)
|
||||
}
|
||||
fn path(&mut self, file: &Self::SourceFile) -> String {
|
||||
match file.name {
|
||||
FileName::Real(ref name) => name
|
||||
.local_path()
|
||||
.to_str()
|
||||
.expect("non-UTF8 file path in `proc_macro::SourceFile::path`")
|
||||
.to_string(),
|
||||
_ => file.name.to_string(),
|
||||
}
|
||||
}
|
||||
fn is_real(&mut self, file: &Self::SourceFile) -> bool {
|
||||
file.is_real_file()
|
||||
}
|
||||
}
|
||||
|
||||
impl server::MultiSpan for Rustc<'_> {
|
||||
fn new(&mut self) -> Self::MultiSpan {
|
||||
vec![]
|
||||
}
|
||||
fn push(&mut self, spans: &mut Self::MultiSpan, span: Self::Span) {
|
||||
spans.push(span)
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Diagnostic for Rustc<'_> {
|
||||
fn new(&mut self, level: Level, msg: &str, spans: Self::MultiSpan) -> Self::Diagnostic {
|
||||
let mut diag = Diagnostic::new(level.to_internal(), msg);
|
||||
diag.set_span(MultiSpan::from_spans(spans));
|
||||
diag
|
||||
}
|
||||
fn sub(
|
||||
&mut self,
|
||||
diag: &mut Self::Diagnostic,
|
||||
level: Level,
|
||||
msg: &str,
|
||||
spans: Self::MultiSpan,
|
||||
) {
|
||||
diag.sub(level.to_internal(), msg, MultiSpan::from_spans(spans), None);
|
||||
}
|
||||
fn emit(&mut self, diag: Self::Diagnostic) {
|
||||
self.sess.span_diagnostic.emit_diagnostic(&diag);
|
||||
}
|
||||
}
|
||||
|
||||
impl server::Span for Rustc<'_> {
|
||||
fn debug(&mut self, span: Self::Span) -> String {
|
||||
if self.span_debug {
|
||||
format!("{:?}", span)
|
||||
} else {
|
||||
format!("{:?} bytes({}..{})", span.ctxt(), span.lo().0, span.hi().0)
|
||||
}
|
||||
}
|
||||
fn def_site(&mut self) -> Self::Span {
|
||||
self.def_site
|
||||
}
|
||||
fn call_site(&mut self) -> Self::Span {
|
||||
self.call_site
|
||||
}
|
||||
fn mixed_site(&mut self) -> Self::Span {
|
||||
self.mixed_site
|
||||
}
|
||||
fn source_file(&mut self, span: Self::Span) -> Self::SourceFile {
|
||||
self.sess.source_map().lookup_char_pos(span.lo()).file
|
||||
}
|
||||
fn parent(&mut self, span: Self::Span) -> Option<Self::Span> {
|
||||
span.parent()
|
||||
}
|
||||
fn source(&mut self, span: Self::Span) -> Self::Span {
|
||||
span.source_callsite()
|
||||
}
|
||||
fn start(&mut self, span: Self::Span) -> LineColumn {
|
||||
let loc = self.sess.source_map().lookup_char_pos(span.lo());
|
||||
LineColumn { line: loc.line, column: loc.col.to_usize() }
|
||||
}
|
||||
fn end(&mut self, span: Self::Span) -> LineColumn {
|
||||
let loc = self.sess.source_map().lookup_char_pos(span.hi());
|
||||
LineColumn { line: loc.line, column: loc.col.to_usize() }
|
||||
}
|
||||
fn join(&mut self, first: Self::Span, second: Self::Span) -> Option<Self::Span> {
|
||||
let self_loc = self.sess.source_map().lookup_char_pos(first.lo());
|
||||
let other_loc = self.sess.source_map().lookup_char_pos(second.lo());
|
||||
|
||||
if self_loc.file.name != other_loc.file.name {
|
||||
return None;
|
||||
}
|
||||
|
||||
Some(first.to(second))
|
||||
}
|
||||
fn resolved_at(&mut self, span: Self::Span, at: Self::Span) -> Self::Span {
|
||||
span.with_ctxt(at.ctxt())
|
||||
}
|
||||
fn source_text(&mut self, span: Self::Span) -> Option<String> {
|
||||
self.sess.source_map().span_to_snippet(span).ok()
|
||||
}
|
||||
}
|
1012
compiler/rustc_expand/src/tests.rs
Normal file
1012
compiler/rustc_expand/src/tests.rs
Normal file
File diff suppressed because it is too large
Load diff
109
compiler/rustc_expand/src/tokenstream/tests.rs
Normal file
109
compiler/rustc_expand/src/tokenstream/tests.rs
Normal file
|
@ -0,0 +1,109 @@
|
|||
use crate::tests::string_to_stream;
|
||||
|
||||
use rustc_ast::token;
|
||||
use rustc_ast::tokenstream::{TokenStream, TokenStreamBuilder, TokenTree};
|
||||
use rustc_span::with_default_session_globals;
|
||||
use rustc_span::{BytePos, Span, Symbol};
|
||||
use smallvec::smallvec;
|
||||
|
||||
fn string_to_ts(string: &str) -> TokenStream {
|
||||
string_to_stream(string.to_owned())
|
||||
}
|
||||
|
||||
fn sp(a: u32, b: u32) -> Span {
|
||||
Span::with_root_ctxt(BytePos(a), BytePos(b))
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_concat() {
|
||||
with_default_session_globals(|| {
|
||||
let test_res = string_to_ts("foo::bar::baz");
|
||||
let test_fst = string_to_ts("foo::bar");
|
||||
let test_snd = string_to_ts("::baz");
|
||||
let eq_res = TokenStream::from_streams(smallvec![test_fst, test_snd]);
|
||||
assert_eq!(test_res.trees().count(), 5);
|
||||
assert_eq!(eq_res.trees().count(), 5);
|
||||
assert_eq!(test_res.eq_unspanned(&eq_res), true);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_from_bijection() {
|
||||
with_default_session_globals(|| {
|
||||
let test_start = string_to_ts("foo::bar(baz)");
|
||||
let test_end = test_start.trees().collect();
|
||||
assert_eq!(test_start, test_end)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eq_0() {
|
||||
with_default_session_globals(|| {
|
||||
let test_res = string_to_ts("foo");
|
||||
let test_eqs = string_to_ts("foo");
|
||||
assert_eq!(test_res, test_eqs)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eq_1() {
|
||||
with_default_session_globals(|| {
|
||||
let test_res = string_to_ts("::bar::baz");
|
||||
let test_eqs = string_to_ts("::bar::baz");
|
||||
assert_eq!(test_res, test_eqs)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_eq_3() {
|
||||
with_default_session_globals(|| {
|
||||
let test_res = string_to_ts("");
|
||||
let test_eqs = string_to_ts("");
|
||||
assert_eq!(test_res, test_eqs)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diseq_0() {
|
||||
with_default_session_globals(|| {
|
||||
let test_res = string_to_ts("::bar::baz");
|
||||
let test_eqs = string_to_ts("bar::baz");
|
||||
assert_eq!(test_res == test_eqs, false)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_diseq_1() {
|
||||
with_default_session_globals(|| {
|
||||
let test_res = string_to_ts("(bar,baz)");
|
||||
let test_eqs = string_to_ts("bar,baz");
|
||||
assert_eq!(test_res == test_eqs, false)
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_is_empty() {
|
||||
with_default_session_globals(|| {
|
||||
let test0: TokenStream = Vec::<TokenTree>::new().into_iter().collect();
|
||||
let test1: TokenStream =
|
||||
TokenTree::token(token::Ident(Symbol::intern("a"), false), sp(0, 1)).into();
|
||||
let test2 = string_to_ts("foo(bar::baz)");
|
||||
|
||||
assert_eq!(test0.is_empty(), true);
|
||||
assert_eq!(test1.is_empty(), false);
|
||||
assert_eq!(test2.is_empty(), false);
|
||||
})
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_dotdotdot() {
|
||||
with_default_session_globals(|| {
|
||||
let mut builder = TokenStreamBuilder::new();
|
||||
builder.push(TokenTree::token(token::Dot, sp(0, 1)).joint());
|
||||
builder.push(TokenTree::token(token::Dot, sp(1, 2)).joint());
|
||||
builder.push(TokenTree::token(token::Dot, sp(2, 3)));
|
||||
let stream = builder.build();
|
||||
assert!(stream.eq_unspanned(&string_to_ts("...")));
|
||||
assert_eq!(stream.trees().count(), 1);
|
||||
})
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue