Auto merge of #28642 - petrochenkov:name3, r=nrc

This PR removes random remaining `Ident`s outside of libsyntax and performs general cleanup
In particular, interfaces of `Name` and `Ident` are tidied up, `Name`s and `Ident`s being small `Copy` aggregates are always passed to functions by value, and `Ident`s are never used as keys in maps, because `Ident` comparisons are tricky.

Although this PR closes https://github.com/rust-lang/rust/issues/6993 there's still work related to it:
- `Name` can be made `NonZero` to compress numerous `Option<Name>`s and `Option<Ident>`s but it requires const unsafe functions.
- Implementation of `PartialEq` on `Ident` should be eliminated and replaced with explicit hygienic, non-hygienic or member-wise comparisons.
- Finally, large parts of AST can potentially be converted to `Name`s in the same way as HIR to clearly separate identifiers used in hygienic and non-hygienic contexts.

r? @nrc
This commit is contained in:
bors 2015-09-26 14:48:56 +00:00
commit 2e88c36ebc
70 changed files with 337 additions and 401 deletions

View file

@ -35,7 +35,7 @@ use syntax::parse::lexer::TokenAndSpan;
fn parse_token_list(file: &str) -> HashMap<String, token::Token> { fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
fn id() -> token::Token { fn id() -> token::Token {
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, token::Plain) token::Ident(ast::Ident::with_empty_ctxt(Name(0))), token::Plain)
} }
let mut res = HashMap::new(); let mut res = HashMap::new();
@ -75,7 +75,7 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
"RPAREN" => token::CloseDelim(token::Paren), "RPAREN" => token::CloseDelim(token::Paren),
"SLASH" => token::BinOp(token::Slash), "SLASH" => token::BinOp(token::Slash),
"COMMA" => token::Comma, "COMMA" => token::Comma,
"LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }), "LIFETIME" => token::Lifetime(ast::Ident::with_empty_ctxt(Name(0))),
"CARET" => token::BinOp(token::Caret), "CARET" => token::BinOp(token::Caret),
"TILDE" => token::Tilde, "TILDE" => token::Tilde,
"IDENT" => id(), "IDENT" => id(),
@ -208,9 +208,9 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
token::Literal(token::ByteStr(..), n) => token::Literal(token::ByteStr(nm), n), token::Literal(token::ByteStr(..), n) => token::Literal(token::ByteStr(nm), n),
token::Literal(token::ByteStrRaw(..), n) => token::Literal(token::ByteStrRaw(fix(content), token::Literal(token::ByteStrRaw(..), n) => token::Literal(token::ByteStrRaw(fix(content),
count(content)), n), count(content)), n),
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 }, token::Ident(..) => token::Ident(ast::Ident::with_empty_ctxt(nm)),
token::ModName), token::ModName),
token::Lifetime(..) => token::Lifetime(ast::Ident { name: nm, ctxt: 0 }), token::Lifetime(..) => token::Lifetime(ast::Ident::with_empty_ctxt(nm)),
ref t => t.clone() ref t => t.clone()
}; };

View file

@ -482,7 +482,7 @@ impl<'a> CrateReader<'a> {
let span = mk_sp(lo, p.last_span.hi); let span = mk_sp(lo, p.last_span.hi);
p.abort_if_errors(); p.abort_if_errors();
macros.push(ast::MacroDef { macros.push(ast::MacroDef {
ident: name.ident(), ident: ast::Ident::with_empty_ctxt(name),
attrs: attrs, attrs: attrs,
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
span: span, span: span,

View file

@ -520,9 +520,9 @@ fn encode_info_for_mod(ecx: &EncodeContext,
}); });
if let hir::ItemImpl(..) = item.node { if let hir::ItemImpl(..) = item.node {
let (ident, did) = (item.name, item.id); let (name, did) = (item.name, item.id);
debug!("(encoding info for module) ... encoding impl {} ({}/{})", debug!("(encoding info for module) ... encoding impl {} ({}/{})",
ident, name,
did, ecx.tcx.map.node_to_string(did)); did, ecx.tcx.map.node_to_string(did));
rbml_w.wr_tagged_u64(tag_mod_impl, def_to_u64(DefId::local(did))); rbml_w.wr_tagged_u64(tag_mod_impl, def_to_u64(DefId::local(did)));

View file

@ -184,8 +184,8 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> {
} }
'[' => { '[' => {
let def = self.parse_def(RegionParameter); let def = self.parse_def(RegionParameter);
let ident = token::str_to_ident(&self.parse_str(']')); let name = token::intern(&self.parse_str(']'));
ty::BrNamed(def, ident.name) ty::BrNamed(def, name)
} }
'f' => { 'f' => {
let id = self.parse_u32(); let id = self.parse_u32();
@ -219,12 +219,12 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> {
assert_eq!(self.next(), '|'); assert_eq!(self.next(), '|');
let index = self.parse_u32(); let index = self.parse_u32();
assert_eq!(self.next(), '|'); assert_eq!(self.next(), '|');
let nm = token::str_to_ident(&self.parse_str(']')); let name = token::intern(&self.parse_str(']'));
ty::ReEarlyBound(ty::EarlyBoundRegion { ty::ReEarlyBound(ty::EarlyBoundRegion {
param_id: node_id, param_id: node_id,
space: space, space: space,
index: index, index: index,
name: nm.name name: name
}) })
} }
'f' => { 'f' => {
@ -598,7 +598,7 @@ impl<'a,'tcx> TyDecoder<'a,'tcx> {
ty::ProjectionPredicate { ty::ProjectionPredicate {
projection_ty: ty::ProjectionTy { projection_ty: ty::ProjectionTy {
trait_ref: self.parse_trait_ref(), trait_ref: self.parse_trait_ref(),
item_name: token::str_to_ident(&self.parse_str('|')).name, item_name: token::intern(&self.parse_str('|')),
}, },
ty: self.parse_ty(), ty: self.parse_ty(),
} }

View file

@ -284,7 +284,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
} }
hir::ExprBreak(label) => { hir::ExprBreak(label) => {
let loop_scope = self.find_scope(expr, label.map(|l| l.node)); let loop_scope = self.find_scope(expr, label.map(|l| l.node.name));
let b = self.add_ast_node(expr.id, &[pred]); let b = self.add_ast_node(expr.id, &[pred]);
self.add_exiting_edge(expr, b, self.add_exiting_edge(expr, b,
loop_scope, loop_scope.break_index); loop_scope, loop_scope.break_index);
@ -292,7 +292,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
} }
hir::ExprAgain(label) => { hir::ExprAgain(label) => {
let loop_scope = self.find_scope(expr, label.map(|l| l.node)); let loop_scope = self.find_scope(expr, label.map(|l| l.node.name));
let a = self.add_ast_node(expr.id, &[pred]); let a = self.add_ast_node(expr.id, &[pred]);
self.add_exiting_edge(expr, a, self.add_exiting_edge(expr, a,
loop_scope, loop_scope.continue_index); loop_scope, loop_scope.continue_index);
@ -585,7 +585,7 @@ impl<'a, 'tcx> CFGBuilder<'a, 'tcx> {
fn find_scope(&self, fn find_scope(&self,
expr: &hir::Expr, expr: &hir::Expr,
label: Option<ast::Ident>) -> LoopScope { label: Option<ast::Name>) -> LoopScope {
if label.is_none() { if label.is_none() {
return *self.loop_scopes.last().unwrap(); return *self.loop_scopes.last().unwrap();
} }

View file

@ -114,7 +114,7 @@ impl<'a, 'tcx, O:DataFlowOperator> pprust::PpAnn for DataFlowContext<'a, 'tcx, O
ps: &mut pprust::State, ps: &mut pprust::State,
node: pprust::AnnNode) -> io::Result<()> { node: pprust::AnnNode) -> io::Result<()> {
let id = match node { let id = match node {
pprust::NodeIdent(_) | pprust::NodeName(_) => 0, pprust::NodeName(_) => 0,
pprust::NodeExpr(expr) => expr.id, pprust::NodeExpr(expr) => expr.id,
pprust::NodeBlock(blk) => blk.id, pprust::NodeBlock(blk) => blk.id,
pprust::NodeItem(_) | pprust::NodeSubItem(_) => 0, pprust::NodeItem(_) | pprust::NodeSubItem(_) => 0,

View file

@ -85,7 +85,7 @@ fn entry_point_type(item: &Item, depth: usize) -> EntryPointType {
EntryPointType::Start EntryPointType::Start
} else if attr::contains_name(&item.attrs, "main") { } else if attr::contains_name(&item.attrs, "main") {
EntryPointType::MainAttr EntryPointType::MainAttr
} else if item.name == "main" { } else if item.name.as_str() == "main" {
if depth == 1 { if depth == 1 {
// This is a top-level function so can be 'main' // This is a top-level function so can be 'main'
EntryPointType::MainNamed EntryPointType::MainNamed

View file

@ -1015,12 +1015,12 @@ impl<'a, 'tcx> ErrorReporting<'tcx> for InferCtxt<'a, 'tcx> {
}, },
None => None None => None
}; };
let (fn_decl, generics, unsafety, constness, ident, expl_self, span) let (fn_decl, generics, unsafety, constness, name, expl_self, span)
= node_inner.expect("expect item fn"); = node_inner.expect("expect item fn");
let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self, let rebuilder = Rebuilder::new(self.tcx, fn_decl, expl_self,
generics, same_regions, &life_giver); generics, same_regions, &life_giver);
let (fn_decl, expl_self, generics) = rebuilder.rebuild(); let (fn_decl, expl_self, generics) = rebuilder.rebuild();
self.give_expl_lifetime_param(&fn_decl, unsafety, constness, ident, self.give_expl_lifetime_param(&fn_decl, unsafety, constness, name,
expl_self.as_ref(), &generics, span); expl_self.as_ref(), &generics, span);
} }
} }
@ -1127,7 +1127,7 @@ impl<'a, 'tcx> Rebuilder<'a, 'tcx> {
names.push(lt_name); names.push(lt_name);
} }
names.sort(); names.sort();
let name = token::str_to_ident(&names[0]).name; let name = token::intern(&names[0]);
return (name_to_dummy_lifetime(name), Kept); return (name_to_dummy_lifetime(name), Kept);
} }
return (self.life_giver.give_lifetime(), Fresh); return (self.life_giver.give_lifetime(), Fresh);
@ -1938,8 +1938,7 @@ impl LifeGiver {
let mut s = String::from("'"); let mut s = String::from("'");
s.push_str(&num_to_string(self.counter.get())); s.push_str(&num_to_string(self.counter.get()));
if !self.taken.contains(&s) { if !self.taken.contains(&s) {
lifetime = name_to_dummy_lifetime( lifetime = name_to_dummy_lifetime(token::intern(&s[..]));
token::str_to_ident(&s[..]).name);
self.generated.borrow_mut().push(lifetime); self.generated.borrow_mut().push(lifetime);
break; break;
} }

View file

@ -55,7 +55,7 @@ impl<'a, 'tcx> IntrinsicCheckingVisitor<'a, 'tcx> {
ty::TyBareFn(_, ref bfty) => bfty.abi == RustIntrinsic, ty::TyBareFn(_, ref bfty) => bfty.abi == RustIntrinsic,
_ => return false _ => return false
}; };
intrinsic && self.tcx.item_name(def_id) == "transmute" intrinsic && self.tcx.item_name(def_id).as_str() == "transmute"
} }
fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>, id: ast::NodeId) { fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>, id: ast::NodeId) {

View file

@ -383,7 +383,7 @@ fn visit_fn(ir: &mut IrMaps,
&*arg.pat, &*arg.pat,
|_bm, arg_id, _x, path1| { |_bm, arg_id, _x, path1| {
debug!("adding argument {}", arg_id); debug!("adding argument {}", arg_id);
let name = path1.node.name; let name = path1.node;
fn_maps.add_variable(Arg(arg_id, name)); fn_maps.add_variable(Arg(arg_id, name));
}) })
}; };
@ -416,7 +416,7 @@ fn visit_fn(ir: &mut IrMaps,
fn visit_local(ir: &mut IrMaps, local: &hir::Local) { fn visit_local(ir: &mut IrMaps, local: &hir::Local) {
pat_util::pat_bindings(&ir.tcx.def_map, &*local.pat, |_, p_id, sp, path1| { pat_util::pat_bindings(&ir.tcx.def_map, &*local.pat, |_, p_id, sp, path1| {
debug!("adding local variable {}", p_id); debug!("adding local variable {}", p_id);
let name = path1.node.name; let name = path1.node;
ir.add_live_node_for_node(p_id, VarDefNode(sp)); ir.add_live_node_for_node(p_id, VarDefNode(sp));
ir.add_variable(Local(LocalInfo { ir.add_variable(Local(LocalInfo {
id: p_id, id: p_id,
@ -431,7 +431,7 @@ fn visit_arm(ir: &mut IrMaps, arm: &hir::Arm) {
pat_util::pat_bindings(&ir.tcx.def_map, &**pat, |bm, p_id, sp, path1| { pat_util::pat_bindings(&ir.tcx.def_map, &**pat, |bm, p_id, sp, path1| {
debug!("adding local variable {} from match with bm {:?}", debug!("adding local variable {} from match with bm {:?}",
p_id, bm); p_id, bm);
let name = path1.node.name; let name = path1.node;
ir.add_live_node_for_node(p_id, VarDefNode(sp)); ir.add_live_node_for_node(p_id, VarDefNode(sp));
ir.add_variable(Local(LocalInfo { ir.add_variable(Local(LocalInfo {
id: p_id, id: p_id,
@ -688,7 +688,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
} }
fn find_loop_scope(&self, fn find_loop_scope(&self,
opt_label: Option<ast::Ident>, opt_label: Option<ast::Name>,
id: NodeId, id: NodeId,
sp: Span) sp: Span)
-> NodeId { -> NodeId {
@ -1049,7 +1049,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
hir::ExprBreak(opt_label) => { hir::ExprBreak(opt_label) => {
// Find which label this break jumps to // Find which label this break jumps to
let sc = self.find_loop_scope(opt_label.map(|l| l.node), expr.id, expr.span); let sc = self.find_loop_scope(opt_label.map(|l| l.node.name), expr.id, expr.span);
// Now that we know the label we're going to, // Now that we know the label we're going to,
// look it up in the break loop nodes table // look it up in the break loop nodes table
@ -1063,7 +1063,7 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
hir::ExprAgain(opt_label) => { hir::ExprAgain(opt_label) => {
// Find which label this expr continues to // Find which label this expr continues to
let sc = self.find_loop_scope(opt_label.map(|l| l.node), expr.id, expr.span); let sc = self.find_loop_scope(opt_label.map(|l| l.node.name), expr.id, expr.span);
// Now that we know the label we're going to, // Now that we know the label we're going to,
// look it up in the continue loop nodes table // look it up in the continue loop nodes table
@ -1553,8 +1553,8 @@ impl<'a, 'tcx> Liveness<'a, 'tcx> {
|_bm, p_id, sp, path1| { |_bm, p_id, sp, path1| {
let var = self.variable(p_id, sp); let var = self.variable(p_id, sp);
// Ignore unused self. // Ignore unused self.
let ident = path1.node; let name = path1.node;
if ident.name != special_idents::self_.name { if name != special_idents::self_.name {
self.warn_about_unused(sp, p_id, entry_ln, var); self.warn_about_unused(sp, p_id, entry_ln, var);
} }
}) })

View file

@ -16,9 +16,9 @@ use util::nodemap::FnvHashMap;
use syntax::ast; use syntax::ast;
use rustc_front::hir; use rustc_front::hir;
use rustc_front::util::walk_pat; use rustc_front::util::walk_pat;
use syntax::codemap::{Span, Spanned, DUMMY_SP}; use syntax::codemap::{respan, Span, Spanned, DUMMY_SP};
pub type PatIdMap = FnvHashMap<ast::Ident, ast::NodeId>; pub type PatIdMap = FnvHashMap<ast::Name, ast::NodeId>;
// This is used because same-named variables in alternative patterns need to // This is used because same-named variables in alternative patterns need to
// use the NodeId of their namesake in the first pattern. // use the NodeId of their namesake in the first pattern.
@ -109,12 +109,26 @@ pub fn pat_is_binding_or_wild(dm: &DefMap, pat: &hir::Pat) -> bool {
/// Call `it` on every "binding" in a pattern, e.g., on `a` in /// Call `it` on every "binding" in a pattern, e.g., on `a` in
/// `match foo() { Some(a) => (), None => () }` /// `match foo() { Some(a) => (), None => () }`
pub fn pat_bindings<I>(dm: &DefMap, pat: &hir::Pat, mut it: I) where pub fn pat_bindings<I>(dm: &DefMap, pat: &hir::Pat, mut it: I) where
I: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned<ast::Name>),
{
walk_pat(pat, |p| {
match p.node {
hir::PatIdent(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
it(binding_mode, p.id, p.span, &respan(pth.span, pth.node.name));
}
_ => {}
}
true
});
}
pub fn pat_bindings_hygienic<I>(dm: &DefMap, pat: &hir::Pat, mut it: I) where
I: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned<ast::Ident>), I: FnMut(hir::BindingMode, ast::NodeId, Span, &Spanned<ast::Ident>),
{ {
walk_pat(pat, |p| { walk_pat(pat, |p| {
match p.node { match p.node {
hir::PatIdent(binding_mode, ref pth, _) if pat_is_binding(dm, p) => { hir::PatIdent(binding_mode, ref pth, _) if pat_is_binding(dm, p) => {
it(binding_mode, p.id, p.span, pth); it(binding_mode, p.id, p.span, &respan(pth.span, pth.node));
} }
_ => {} _ => {}
} }
@ -182,10 +196,10 @@ pub fn pat_contains_bindings_or_wild(dm: &DefMap, pat: &hir::Pat) -> bool {
contains_bindings contains_bindings
} }
pub fn simple_identifier<'a>(pat: &'a hir::Pat) -> Option<&'a ast::Ident> { pub fn simple_name<'a>(pat: &'a hir::Pat) -> Option<ast::Name> {
match pat.node { match pat.node {
hir::PatIdent(hir::BindByValue(_), ref path1, None) => { hir::PatIdent(hir::BindByValue(_), ref path1, None) => {
Some(&path1.node) Some(path1.node.name)
} }
_ => { _ => {
None None
@ -197,7 +211,7 @@ pub fn def_to_path(tcx: &ty::ctxt, id: DefId) -> hir::Path {
tcx.with_path(id, |path| hir::Path { tcx.with_path(id, |path| hir::Path {
global: false, global: false,
segments: path.last().map(|elem| hir::PathSegment { segments: path.last().map(|elem| hir::PathSegment {
identifier: ast::Ident::new(elem.name()), identifier: ast::Ident::with_empty_ctxt(elem.name()),
parameters: hir::PathParameters::none(), parameters: hir::PathParameters::none(),
}).into_iter().collect(), }).into_iter().collect(),
span: DUMMY_SP, span: DUMMY_SP,

View file

@ -73,7 +73,7 @@ struct LifetimeContext<'a> {
trait_ref_hack: bool, trait_ref_hack: bool,
// List of labels in the function/method currently under analysis. // List of labels in the function/method currently under analysis.
labels_in_fn: Vec<(ast::Ident, Span)>, labels_in_fn: Vec<(ast::Name, Span)>,
} }
enum ScopeChain<'a> { enum ScopeChain<'a> {
@ -381,7 +381,7 @@ fn extract_labels<'v, 'a>(ctxt: &mut LifetimeContext<'a>, b: &'v hir::Block) {
struct GatherLabels<'a> { struct GatherLabels<'a> {
sess: &'a Session, sess: &'a Session,
scope: Scope<'a>, scope: Scope<'a>,
labels_in_fn: &'a mut Vec<(ast::Ident, Span)>, labels_in_fn: &'a mut Vec<(ast::Name, Span)>,
} }
let mut gather = GatherLabels { let mut gather = GatherLabels {
@ -403,9 +403,9 @@ fn extract_labels<'v, 'a>(ctxt: &mut LifetimeContext<'a>, b: &'v hir::Block) {
if let Some(label) = expression_label(ex) { if let Some(label) = expression_label(ex) {
for &(prior, prior_span) in &self.labels_in_fn[..] { for &(prior, prior_span) in &self.labels_in_fn[..] {
// FIXME (#24278): non-hygienic comparison // FIXME (#24278): non-hygienic comparison
if label.name == prior.name { if label == prior {
signal_shadowing_problem(self.sess, signal_shadowing_problem(self.sess,
label.name, label,
original_label(prior_span), original_label(prior_span),
shadower_label(ex.span)); shadower_label(ex.span));
} }
@ -426,17 +426,17 @@ fn extract_labels<'v, 'a>(ctxt: &mut LifetimeContext<'a>, b: &'v hir::Block) {
} }
} }
fn expression_label(ex: &hir::Expr) -> Option<ast::Ident> { fn expression_label(ex: &hir::Expr) -> Option<ast::Name> {
match ex.node { match ex.node {
hir::ExprWhile(_, _, Some(label)) | hir::ExprWhile(_, _, Some(label)) |
hir::ExprLoop(_, Some(label)) => Some(label), hir::ExprLoop(_, Some(label)) => Some(label.name),
_ => None, _ => None,
} }
} }
fn check_if_label_shadows_lifetime<'a>(sess: &'a Session, fn check_if_label_shadows_lifetime<'a>(sess: &'a Session,
mut scope: Scope<'a>, mut scope: Scope<'a>,
label: ast::Ident, label: ast::Name,
label_span: Span) { label_span: Span) {
loop { loop {
match *scope { match *scope {
@ -447,10 +447,10 @@ fn extract_labels<'v, 'a>(ctxt: &mut LifetimeContext<'a>, b: &'v hir::Block) {
LateScope(lifetimes, s) => { LateScope(lifetimes, s) => {
for lifetime_def in lifetimes { for lifetime_def in lifetimes {
// FIXME (#24278): non-hygienic comparison // FIXME (#24278): non-hygienic comparison
if label.name == lifetime_def.lifetime.name { if label == lifetime_def.lifetime.name {
signal_shadowing_problem( signal_shadowing_problem(
sess, sess,
label.name, label,
original_lifetime(&lifetime_def.lifetime), original_lifetime(&lifetime_def.lifetime),
shadower_label(label_span)); shadower_label(label_span));
return; return;
@ -703,7 +703,7 @@ impl<'a> LifetimeContext<'a> {
{ {
for &(label, label_span) in &self.labels_in_fn { for &(label, label_span) in &self.labels_in_fn {
// FIXME (#24278): non-hygienic comparison // FIXME (#24278): non-hygienic comparison
if lifetime.name == label.name { if lifetime.name == label {
signal_shadowing_problem(self.sess, signal_shadowing_problem(self.sess,
lifetime.name, lifetime.name,
original_label(label_span), original_label(label_span),

View file

@ -336,7 +336,7 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Checker<'a, 'tcx> {
// When compiling with --test we don't enforce stability on the // When compiling with --test we don't enforce stability on the
// compiler-generated test module, demarcated with `DUMMY_SP` plus the // compiler-generated test module, demarcated with `DUMMY_SP` plus the
// name `__test` // name `__test`
if item.span == DUMMY_SP && item.name == "__test" { return } if item.span == DUMMY_SP && item.name.as_str() == "__test" { return }
check_item(self.tcx, item, true, check_item(self.tcx, item, true,
&mut |id, sp, stab| self.check(id, sp, stab)); &mut |id, sp, stab| self.check(id, sp, stab));

View file

@ -99,7 +99,7 @@ pub fn gather_move_from_pat<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
let pat_span_path_opt = match move_pat.node { let pat_span_path_opt = match move_pat.node {
hir::PatIdent(_, ref path1, _) => { hir::PatIdent(_, ref path1, _) => {
Some(MoveSpanAndPath{span: move_pat.span, Some(MoveSpanAndPath{span: move_pat.span,
ident: path1.node}) name: path1.node.name})
}, },
_ => None, _ => None,
}; };

View file

@ -15,7 +15,6 @@ use rustc::middle::ty;
use std::cell::RefCell; use std::cell::RefCell;
use syntax::ast; use syntax::ast;
use syntax::codemap; use syntax::codemap;
use rustc_front::print::pprust;
use rustc_front::hir; use rustc_front::hir;
pub struct MoveErrorCollector<'tcx> { pub struct MoveErrorCollector<'tcx> {
@ -57,7 +56,7 @@ impl<'tcx> MoveError<'tcx> {
#[derive(Clone)] #[derive(Clone)]
pub struct MoveSpanAndPath { pub struct MoveSpanAndPath {
pub span: codemap::Span, pub span: codemap::Span,
pub ident: ast::Ident pub name: ast::Name,
} }
pub struct GroupedMoveErrors<'tcx> { pub struct GroupedMoveErrors<'tcx> {
@ -73,7 +72,7 @@ fn report_move_errors<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
let mut is_first_note = true; let mut is_first_note = true;
for move_to in &error.move_to_places { for move_to in &error.move_to_places {
note_move_destination(bccx, move_to.span, note_move_destination(bccx, move_to.span,
&move_to.ident, is_first_note); move_to.name, is_first_note);
is_first_note = false; is_first_note = false;
} }
} }
@ -157,9 +156,8 @@ fn report_cannot_move_out_of<'a, 'tcx>(bccx: &BorrowckCtxt<'a, 'tcx>,
fn note_move_destination(bccx: &BorrowckCtxt, fn note_move_destination(bccx: &BorrowckCtxt,
move_to_span: codemap::Span, move_to_span: codemap::Span,
pat_ident: &ast::Ident, pat_name: ast::Name,
is_first_note: bool) { is_first_note: bool) {
let pat_name = pprust::ident_to_string(pat_ident);
if is_first_note { if is_first_note {
bccx.span_note( bccx.span_note(
move_to_span, move_to_span,

View file

@ -330,8 +330,7 @@ impl<'ast> pprust_hir::PpAnn for IdentifiedAnnotation<'ast> {
s: &mut pprust_hir::State, s: &mut pprust_hir::State,
node: pprust_hir::AnnNode) -> io::Result<()> { node: pprust_hir::AnnNode) -> io::Result<()> {
match node { match node {
pprust_hir::NodeIdent(_) | pprust_hir::NodeName(_) => Ok(()), pprust_hir::NodeName(_) => Ok(()),
pprust_hir::NodeItem(item) => { pprust_hir::NodeItem(item) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
s.synth_comment(item.id.to_string()) s.synth_comment(item.id.to_string())
@ -381,7 +380,7 @@ impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
// FIXME #16420: this doesn't display the connections // FIXME #16420: this doesn't display the connections
// between syntax contexts // between syntax contexts
s.synth_comment(format!("{}#{}", nm, ctxt)) s.synth_comment(format!("{}#{}", nm, ctxt.0))
} }
pprust::NodeName(&ast::Name(nm)) => { pprust::NodeName(&ast::Name(nm)) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));

View file

@ -296,8 +296,8 @@ pub fn noop_fold_meta_items<T: Folder>(meta_items: Vec<P<MetaItem>>, fld: &mut T
pub fn noop_fold_view_path<T: Folder>(view_path: P<ViewPath>, fld: &mut T) -> P<ViewPath> { pub fn noop_fold_view_path<T: Folder>(view_path: P<ViewPath>, fld: &mut T) -> P<ViewPath> {
view_path.map(|Spanned {node, span}| Spanned { view_path.map(|Spanned {node, span}| Spanned {
node: match node { node: match node {
ViewPathSimple(ident, path) => { ViewPathSimple(name, path) => {
ViewPathSimple(ident, fld.fold_path(path)) ViewPathSimple(name, fld.fold_path(path))
} }
ViewPathGlob(path) => { ViewPathGlob(path) => {
ViewPathGlob(fld.fold_path(path)) ViewPathGlob(fld.fold_path(path))
@ -520,11 +520,11 @@ pub fn noop_fold_explicit_self_underscore<T: Folder>(es: ExplicitSelf_, fld: &mu
-> ExplicitSelf_ { -> ExplicitSelf_ {
match es { match es {
SelfStatic | SelfValue(_) => es, SelfStatic | SelfValue(_) => es,
SelfRegion(lifetime, m, ident) => { SelfRegion(lifetime, m, name) => {
SelfRegion(fld.fold_opt_lifetime(lifetime), m, ident) SelfRegion(fld.fold_opt_lifetime(lifetime), m, name)
} }
SelfExplicit(typ, ident) => { SelfExplicit(typ, name) => {
SelfExplicit(fld.fold_ty(typ), ident) SelfExplicit(fld.fold_ty(typ), name)
} }
} }
} }
@ -1111,10 +1111,10 @@ pub fn noop_fold_expr<T: Folder>(Expr {id, node, span}: Expr, folder: &mut T) ->
respan(folder.new_span(name.span), respan(folder.new_span(name.span),
folder.fold_name(name.node))) folder.fold_name(name.node)))
} }
ExprTupField(el, ident) => { ExprTupField(el, index) => {
ExprTupField(folder.fold_expr(el), ExprTupField(folder.fold_expr(el),
respan(folder.new_span(ident.span), respan(folder.new_span(index.span),
folder.fold_usize(ident.node))) folder.fold_usize(index.node)))
} }
ExprIndex(el, er) => { ExprIndex(el, er) => {
ExprIndex(folder.fold_expr(el), folder.fold_expr(er)) ExprIndex(folder.fold_expr(el), folder.fold_expr(er))

View file

@ -629,7 +629,6 @@ pub enum Expr_ {
/// ///
/// `if expr { block } else { expr }` /// `if expr { block } else { expr }`
ExprIf(P<Expr>, P<Block>, Option<P<Expr>>), ExprIf(P<Expr>, P<Block>, Option<P<Expr>>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while loop, with an optional label /// A while loop, with an optional label
/// ///
/// `'label: while expr { block }` /// `'label: while expr { block }`
@ -637,7 +636,6 @@ pub enum Expr_ {
/// Conditionless loop (can be exited with break, continue, or return) /// Conditionless loop (can be exited with break, continue, or return)
/// ///
/// `'label: loop { block }` /// `'label: loop { block }`
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
ExprLoop(P<Block>, Option<Ident>), ExprLoop(P<Block>, Option<Ident>),
/// A `match` block, with a source that indicates whether or not it is /// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind. /// the result of a desugaring, and if so, which kind.

View file

@ -30,7 +30,6 @@ use hir::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
use std::io::{self, Write, Read}; use std::io::{self, Write, Read};
pub enum AnnNode<'a> { pub enum AnnNode<'a> {
NodeIdent(&'a ast::Ident),
NodeName(&'a ast::Name), NodeName(&'a ast::Name),
NodeBlock(&'a hir::Block), NodeBlock(&'a hir::Block),
NodeItem(&'a hir::Item), NodeItem(&'a hir::Item),
@ -264,8 +263,8 @@ pub fn path_to_string(p: &hir::Path) -> String {
to_string(|s| s.print_path(p, false, 0)) to_string(|s| s.print_path(p, false, 0))
} }
pub fn ident_to_string(id: &ast::Ident) -> String { pub fn name_to_string(name: ast::Name) -> String {
to_string(|s| s.print_ident(*id)) to_string(|s| s.print_name(name))
} }
pub fn fun_to_string(decl: &hir::FnDecl, pub fn fun_to_string(decl: &hir::FnDecl,
@ -1346,7 +1345,7 @@ impl<'a> State<'a> {
} }
hir::ExprWhile(ref test, ref blk, opt_ident) => { hir::ExprWhile(ref test, ref blk, opt_ident) => {
if let Some(ident) = opt_ident { if let Some(ident) = opt_ident {
try!(self.print_ident(ident)); try!(self.print_name(ident.name));
try!(self.word_space(":")); try!(self.word_space(":"));
} }
try!(self.head("while")); try!(self.head("while"));
@ -1356,7 +1355,7 @@ impl<'a> State<'a> {
} }
hir::ExprLoop(ref blk, opt_ident) => { hir::ExprLoop(ref blk, opt_ident) => {
if let Some(ident) = opt_ident { if let Some(ident) = opt_ident {
try!(self.print_ident(ident)); try!(self.print_name(ident.name));
try!(self.word_space(":")); try!(self.word_space(":"));
} }
try!(self.head("loop")); try!(self.head("loop"));
@ -1461,7 +1460,7 @@ impl<'a> State<'a> {
try!(word(&mut self.s, "break")); try!(word(&mut self.s, "break"));
try!(space(&mut self.s)); try!(space(&mut self.s));
if let Some(ident) = opt_ident { if let Some(ident) = opt_ident {
try!(self.print_ident(ident.node)); try!(self.print_name(ident.node.name));
try!(space(&mut self.s)); try!(space(&mut self.s));
} }
} }
@ -1469,7 +1468,7 @@ impl<'a> State<'a> {
try!(word(&mut self.s, "continue")); try!(word(&mut self.s, "continue"));
try!(space(&mut self.s)); try!(space(&mut self.s));
if let Some(ident) = opt_ident { if let Some(ident) = opt_ident {
try!(self.print_ident(ident.node)); try!(self.print_name(ident.node.name));
try!(space(&mut self.s)) try!(space(&mut self.s))
} }
} }
@ -1582,11 +1581,6 @@ impl<'a> State<'a> {
} }
} }
pub fn print_ident(&mut self, ident: ast::Ident) -> io::Result<()> {
try!(word(&mut self.s, &ident.name.as_str()));
self.ann.post(self, NodeIdent(&ident))
}
pub fn print_usize(&mut self, i: usize) -> io::Result<()> { pub fn print_usize(&mut self, i: usize) -> io::Result<()> {
word(&mut self.s, &i.to_string()) word(&mut self.s, &i.to_string())
} }
@ -1620,7 +1614,7 @@ impl<'a> State<'a> {
try!(word(&mut self.s, "::")) try!(word(&mut self.s, "::"))
} }
try!(self.print_ident(segment.identifier)); try!(self.print_name(segment.identifier.name));
try!(self.print_path_parameters(&segment.parameters, colons_before_params)); try!(self.print_path_parameters(&segment.parameters, colons_before_params));
} }
@ -1645,7 +1639,7 @@ impl<'a> State<'a> {
try!(word(&mut self.s, ">")); try!(word(&mut self.s, ">"));
try!(word(&mut self.s, "::")); try!(word(&mut self.s, "::"));
let item_segment = path.segments.last().unwrap(); let item_segment = path.segments.last().unwrap();
try!(self.print_ident(item_segment.identifier)); try!(self.print_name(item_segment.identifier.name));
self.print_path_parameters(&item_segment.parameters, colons_before_params) self.print_path_parameters(&item_segment.parameters, colons_before_params)
} }
@ -1741,7 +1735,7 @@ impl<'a> State<'a> {
try!(self.word_nbsp("mut")); try!(self.word_nbsp("mut"));
} }
} }
try!(self.print_ident(path1.node)); try!(self.print_name(path1.node.name));
match *sub { match *sub {
Some(ref p) => { Some(ref p) => {
try!(word(&mut self.s, "@")); try!(word(&mut self.s, "@"));
@ -2168,7 +2162,6 @@ impl<'a> State<'a> {
hir::ViewPathSimple(name, ref path) => { hir::ViewPathSimple(name, ref path) => {
try!(self.print_path(path, false, 0)); try!(self.print_path(path, false, 0));
// FIXME(#6993) can't compare identifiers directly here
if path.segments.last().unwrap().identifier.name != name { if path.segments.last().unwrap().identifier.name != name {
try!(space(&mut self.s)); try!(space(&mut self.s));
try!(self.word_space("as")); try!(self.word_space("as"));
@ -2183,14 +2176,14 @@ impl<'a> State<'a> {
word(&mut self.s, "::*") word(&mut self.s, "::*")
} }
hir::ViewPathList(ref path, ref idents) => { hir::ViewPathList(ref path, ref segments) => {
if path.segments.is_empty() { if path.segments.is_empty() {
try!(word(&mut self.s, "{")); try!(word(&mut self.s, "{"));
} else { } else {
try!(self.print_path(path, false, 0)); try!(self.print_path(path, false, 0));
try!(word(&mut self.s, "::{")); try!(word(&mut self.s, "::{"));
} }
try!(self.commasep(Inconsistent, &idents[..], |s, w| { try!(self.commasep(Inconsistent, &segments[..], |s, w| {
match w.node { match w.node {
hir::PathListIdent { name, .. } => { hir::PathListIdent { name, .. } => {
s.print_name(name) s.print_name(name)
@ -2268,7 +2261,7 @@ impl<'a> State<'a> {
abi: abi::Abi, abi: abi::Abi,
unsafety: hir::Unsafety, unsafety: hir::Unsafety,
decl: &hir::FnDecl, decl: &hir::FnDecl,
name: Option<ast::Ident>, name: Option<ast::Name>,
generics: &hir::Generics, generics: &hir::Generics,
opt_explicit_self: Option<&hir::ExplicitSelf_>) opt_explicit_self: Option<&hir::ExplicitSelf_>)
-> io::Result<()> { -> io::Result<()> {
@ -2289,7 +2282,7 @@ impl<'a> State<'a> {
unsafety, unsafety,
hir::Constness::NotConst, hir::Constness::NotConst,
abi, abi,
name.map(|x| x.name), name,
&generics, &generics,
opt_explicit_self, opt_explicit_self,
hir::Inherited)); hir::Inherited));

View file

@ -354,13 +354,13 @@ pub fn empty_generics() -> Generics {
// convert a span and an identifier to the corresponding // convert a span and an identifier to the corresponding
// 1-segment path // 1-segment path
pub fn ident_to_path(s: Span, identifier: Ident) -> Path { pub fn ident_to_path(s: Span, ident: Ident) -> Path {
hir::Path { hir::Path {
span: s, span: s,
global: false, global: false,
segments: vec!( segments: vec!(
hir::PathSegment { hir::PathSegment {
identifier: identifier, identifier: ident,
parameters: hir::AngleBracketedParameters(hir::AngleBracketedParameterData { parameters: hir::AngleBracketedParameters(hir::AngleBracketedParameterData {
lifetimes: Vec::new(), lifetimes: Vec::new(),
types: OwnedSlice::empty(), types: OwnedSlice::empty(),

View file

@ -761,9 +761,6 @@ impl LintPass for UnconditionalRecursion {
impl LateLintPass for UnconditionalRecursion { impl LateLintPass for UnconditionalRecursion {
fn check_fn(&mut self, cx: &LateContext, fn_kind: FnKind, _: &hir::FnDecl, fn check_fn(&mut self, cx: &LateContext, fn_kind: FnKind, _: &hir::FnDecl,
blk: &hir::Block, sp: Span, id: ast::NodeId) { blk: &hir::Block, sp: Span, id: ast::NodeId) {
type F = for<'tcx> fn(&ty::ctxt<'tcx>,
ast::NodeId, ast::NodeId, ast::Ident, ast::NodeId) -> bool;
let method = match fn_kind { let method = match fn_kind {
FnKind::ItemFn(..) => None, FnKind::ItemFn(..) => None,
FnKind::Method(..) => { FnKind::Method(..) => {

View file

@ -47,10 +47,10 @@ impl UnusedMut {
let mut mutables = FnvHashMap(); let mut mutables = FnvHashMap();
for p in pats { for p in pats {
pat_util::pat_bindings(&cx.tcx.def_map, p, |mode, id, _, path1| { pat_util::pat_bindings(&cx.tcx.def_map, p, |mode, id, _, path1| {
let ident = path1.node; let name = path1.node;
if let hir::BindByValue(hir::MutMutable) = mode { if let hir::BindByValue(hir::MutMutable) = mode {
if !ident.name.as_str().starts_with("_") { if !name.as_str().starts_with("_") {
match mutables.entry(ident.name.usize()) { match mutables.entry(name.0 as usize) {
Vacant(entry) => { entry.insert(vec![id]); }, Vacant(entry) => { entry.insert(vec![id]); },
Occupied(mut entry) => { entry.get_mut().push(id); }, Occupied(mut entry) => { entry.get_mut().push(id); },
} }

View file

@ -199,7 +199,7 @@ struct Candidate<H:Hair> {
struct Binding<H:Hair> { struct Binding<H:Hair> {
span: H::Span, span: H::Span,
source: Lvalue<H>, source: Lvalue<H>,
name: H::Ident, name: H::Name,
var_id: H::VarId, var_id: H::VarId,
var_ty: H::Ty, var_ty: H::Ty,
mutability: Mutability, mutability: Mutability,
@ -376,7 +376,7 @@ impl<H:Hair> Builder<H> {
fn declare_binding(&mut self, fn declare_binding(&mut self,
var_extent: H::CodeExtent, var_extent: H::CodeExtent,
mutability: Mutability, mutability: Mutability,
name: H::Ident, name: H::Name,
var_id: H::VarId, var_id: H::VarId,
var_ty: H::Ty, var_ty: H::Ty,
span: H::Span) span: H::Span)

View file

@ -29,7 +29,6 @@ pub trait Hair: Sized+Debug+Clone+Eq+Hash { // (*)
type DefId: Copy+Debug+Eq+Hash; // e.g., DefId type DefId: Copy+Debug+Eq+Hash; // e.g., DefId
type AdtDef: Copy+Debug+Eq+Hash; // e.g., AdtDef<'tcx> type AdtDef: Copy+Debug+Eq+Hash; // e.g., AdtDef<'tcx>
type Name: Copy+Debug+Eq+Hash; // e.g., ast::Name type Name: Copy+Debug+Eq+Hash; // e.g., ast::Name
type Ident: Copy+Debug+Eq+Hash; // e.g., ast::Ident
type InternedString: Clone+Debug+Eq+Hash; // e.g., InternedString type InternedString: Clone+Debug+Eq+Hash; // e.g., InternedString
type Bytes: Clone+Debug+Eq+Hash; // e.g., Rc<Vec<u8>> type Bytes: Clone+Debug+Eq+Hash; // e.g., Rc<Vec<u8>>
type Span: Copy+Debug+Eq; // e.g., syntax::codemap::Span type Span: Copy+Debug+Eq; // e.g., syntax::codemap::Span
@ -248,7 +247,7 @@ pub enum PatternKind<H:Hair> {
// x, ref x, x @ P, etc // x, ref x, x @ P, etc
Binding { mutability: Mutability, Binding { mutability: Mutability,
name: H::Ident, name: H::Name,
mode: BindingMode<H>, mode: BindingMode<H>,
var: H::VarId, var: H::VarId,
ty: H::Ty, ty: H::Ty,

View file

@ -113,7 +113,7 @@ pub enum BorrowKind {
// decl, a let, etc. // decl, a let, etc.
pub struct VarDecl<H:Hair> { pub struct VarDecl<H:Hair> {
pub mutability: Mutability, pub mutability: Mutability,
pub name: H::Ident, pub name: H::Name,
pub ty: H::Ty, pub ty: H::Ty,
} }

View file

@ -286,9 +286,9 @@ impl<'a,'tcx:'a> Mirror<Cx<'a,'tcx>> for &'tcx hir::Expr {
hir::ExprField(ref source, name) => hir::ExprField(ref source, name) =>
ExprKind::Field { lhs: source.to_ref(), ExprKind::Field { lhs: source.to_ref(),
name: Field::Named(name.node) }, name: Field::Named(name.node) },
hir::ExprTupField(ref source, ident) => hir::ExprTupField(ref source, index) =>
ExprKind::Field { lhs: source.to_ref(), ExprKind::Field { lhs: source.to_ref(),
name: Field::Indexed(ident.node) }, name: Field::Indexed(index.node) },
hir::ExprCast(ref source, _) => hir::ExprCast(ref source, _) =>
ExprKind::Cast { source: source.to_ref() }, ExprKind::Cast { source: source.to_ref() },
hir::ExprBox(ref value) => hir::ExprBox(ref value) =>

View file

@ -47,7 +47,6 @@ impl<'a,'tcx:'a> Hair for Cx<'a, 'tcx> {
type DefId = DefId; type DefId = DefId;
type AdtDef = ty::AdtDef<'tcx>; type AdtDef = ty::AdtDef<'tcx>;
type Name = ast::Name; type Name = ast::Name;
type Ident = ast::Ident;
type InternedString = InternedString; type InternedString = InternedString;
type Bytes = Rc<Vec<u8>>; type Bytes = Rc<Vec<u8>>;
type Span = Span; type Span = Span;

View file

@ -39,12 +39,12 @@ use tcx::to_ref::ToRef;
#[derive(Clone, Debug)] #[derive(Clone, Debug)]
pub struct PatNode<'tcx> { pub struct PatNode<'tcx> {
pat: &'tcx hir::Pat, pat: &'tcx hir::Pat,
binding_map: Option<Rc<FnvHashMap<ast::Ident, ast::NodeId>>> binding_map: Option<Rc<FnvHashMap<ast::Name, ast::NodeId>>>
} }
impl<'tcx> PatNode<'tcx> { impl<'tcx> PatNode<'tcx> {
pub fn new(pat: &'tcx hir::Pat, pub fn new(pat: &'tcx hir::Pat,
binding_map: Option<Rc<FnvHashMap<ast::Ident, ast::NodeId>>>) binding_map: Option<Rc<FnvHashMap<ast::Name, ast::NodeId>>>)
-> PatNode<'tcx> { -> PatNode<'tcx> {
PatNode { PatNode {
pat: pat, pat: pat,
@ -220,7 +220,7 @@ impl<'a,'tcx:'a> Mirror<Cx<'a,'tcx>> for PatNode<'tcx> {
{ {
let id = match self.binding_map { let id = match self.binding_map {
None => self.pat.id, None => self.pat.id,
Some(ref map) => map[&ident.node], Some(ref map) => map[&ident.node.name],
}; };
let var_ty = cx.tcx.node_id_to_type(self.pat.id); let var_ty = cx.tcx.node_id_to_type(self.pat.id);
let region = match var_ty.sty { let region = match var_ty.sty {
@ -240,7 +240,7 @@ impl<'a,'tcx:'a> Mirror<Cx<'a,'tcx>> for PatNode<'tcx> {
PatternKind::Binding { PatternKind::Binding {
mutability: mutability, mutability: mutability,
mode: mode, mode: mode,
name: ident.node, name: ident.node.name,
var: id, var: id,
ty: var_ty, ty: var_ty,
subpattern: self.opt_pat_ref(sub), subpattern: self.opt_pat_ref(sub),

View file

@ -392,7 +392,6 @@ enum PrivacyResult {
enum FieldName { enum FieldName {
UnnamedField(usize), // index UnnamedField(usize), // index
// (Name, not Ident, because struct fields are not macro-hygienic)
NamedField(ast::Name), NamedField(ast::Name),
} }

View file

@ -281,14 +281,14 @@ impl<'a, 'b:'a, 'tcx:'b> GraphBuilder<'a, 'b, 'tcx> {
ViewPathSimple(_, ref full_path) => { ViewPathSimple(_, ref full_path) => {
full_path.segments full_path.segments
.split_last().unwrap().1 .split_last().unwrap().1
.iter().map(|ident| ident.identifier.name) .iter().map(|seg| seg.identifier.name)
.collect() .collect()
} }
ViewPathGlob(ref module_ident_path) | ViewPathGlob(ref module_ident_path) |
ViewPathList(ref module_ident_path, _) => { ViewPathList(ref module_ident_path, _) => {
module_ident_path.segments module_ident_path.segments
.iter().map(|ident| ident.identifier.name).collect() .iter().map(|seg| seg.identifier.name).collect()
} }
}; };

View file

@ -57,7 +57,7 @@ use rustc::metadata::csearch;
use rustc::metadata::decoder::{DefLike, DlDef, DlField, DlImpl}; use rustc::metadata::decoder::{DefLike, DlDef, DlField, DlImpl};
use rustc::middle::def::*; use rustc::middle::def::*;
use rustc::middle::def_id::DefId; use rustc::middle::def_id::DefId;
use rustc::middle::pat_util::pat_bindings; use rustc::middle::pat_util::pat_bindings_hygienic;
use rustc::middle::privacy::*; use rustc::middle::privacy::*;
use rustc::middle::subst::{ParamSpace, FnSpace, TypeSpace}; use rustc::middle::subst::{ParamSpace, FnSpace, TypeSpace};
use rustc::middle::ty::{Freevar, FreevarMap, TraitMap, GlobMap}; use rustc::middle::ty::{Freevar, FreevarMap, TraitMap, GlobMap};
@ -2559,7 +2559,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
// user and one 'x' came from the macro. // user and one 'x' came from the macro.
fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap { fn binding_mode_map(&mut self, pat: &Pat) -> BindingMap {
let mut result = HashMap::new(); let mut result = HashMap::new();
pat_bindings(&self.def_map, pat, |binding_mode, _id, sp, path1| { pat_bindings_hygienic(&self.def_map, pat, |binding_mode, _id, sp, path1| {
let name = mtwt::resolve(path1.node); let name = mtwt::resolve(path1.node);
result.insert(name, BindingInfo { result.insert(name, BindingInfo {
span: sp, span: sp,
@ -3710,7 +3710,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
false // Stop advancing false // Stop advancing
}); });
if method_scope && special_names::self_ == path_name { if method_scope && special_names::self_.as_str() == &path_name[..] {
resolve_error( resolve_error(
self, self,
expr.span, expr.span,

View file

@ -537,7 +537,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
fn get_binding(this: &mut Resolver, fn get_binding(this: &mut Resolver,
import_resolution: &ImportResolution, import_resolution: &ImportResolution,
namespace: Namespace, namespace: Namespace,
source: &Name) source: Name)
-> NamespaceResult { -> NamespaceResult {
// Import resolutions must be declared with "pub" // Import resolutions must be declared with "pub"
@ -560,7 +560,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
let id = import_resolution.id(namespace); let id = import_resolution.id(namespace);
// track used imports and extern crates as well // track used imports and extern crates as well
this.used_imports.insert((id, namespace)); this.used_imports.insert((id, namespace));
this.record_import_use(id, *source); this.record_import_use(id, source);
match target_module.def_id.get() { match target_module.def_id.get() {
Some(DefId{krate: kid, ..}) => { Some(DefId{krate: kid, ..}) => {
this.used_crates.insert(kid); this.used_crates.insert(kid);
@ -578,14 +578,14 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
value_result = get_binding(self.resolver, value_result = get_binding(self.resolver,
import_resolution, import_resolution,
ValueNS, ValueNS,
&source); source);
value_used_reexport = import_resolution.is_public; value_used_reexport = import_resolution.is_public;
} }
if type_result.is_unknown() { if type_result.is_unknown() {
type_result = get_binding(self.resolver, type_result = get_binding(self.resolver,
import_resolution, import_resolution,
TypeNS, TypeNS,
&source); source);
type_used_reexport = import_resolution.is_public; type_used_reexport = import_resolution.is_public;
} }
@ -793,10 +793,10 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
))); )));
} }
for (ident, target_import_resolution) in import_resolutions.iter() { for (name, target_import_resolution) in import_resolutions.iter() {
debug!("(resolving glob import) writing module resolution \ debug!("(resolving glob import) writing module resolution \
{} into `{}`", {} into `{}`",
*ident, *name,
module_to_string(module_)); module_to_string(module_));
if !target_import_resolution.is_public { if !target_import_resolution.is_public {
@ -806,7 +806,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
// Here we merge two import resolutions. // Here we merge two import resolutions.
let mut import_resolutions = module_.import_resolutions.borrow_mut(); let mut import_resolutions = module_.import_resolutions.borrow_mut();
match import_resolutions.get_mut(ident) { match import_resolutions.get_mut(name) {
Some(dest_import_resolution) => { Some(dest_import_resolution) => {
// Merge the two import resolutions at a finer-grained // Merge the two import resolutions at a finer-grained
// level. // level.
@ -818,7 +818,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
Some(ref value_target) => { Some(ref value_target) => {
self.check_for_conflicting_import(&dest_import_resolution, self.check_for_conflicting_import(&dest_import_resolution,
import_directive.span, import_directive.span,
*ident, *name,
ValueNS); ValueNS);
dest_import_resolution.value_target = Some(value_target.clone()); dest_import_resolution.value_target = Some(value_target.clone());
} }
@ -830,7 +830,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
Some(ref type_target) => { Some(ref type_target) => {
self.check_for_conflicting_import(&dest_import_resolution, self.check_for_conflicting_import(&dest_import_resolution,
import_directive.span, import_directive.span,
*ident, *name,
TypeNS); TypeNS);
dest_import_resolution.type_target = Some(type_target.clone()); dest_import_resolution.type_target = Some(type_target.clone());
} }
@ -848,7 +848,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
new_import_resolution.type_target = new_import_resolution.type_target =
target_import_resolution.type_target.clone(); target_import_resolution.type_target.clone();
import_resolutions.insert(*ident, new_import_resolution); import_resolutions.insert(*name, new_import_resolution);
} }
// Add all children from the containing module. // Add all children from the containing module.

View file

@ -444,7 +444,7 @@ impl <'l, 'tcx> DumpCsvVisitor<'l, 'tcx> {
fn process_const(&mut self, fn process_const(&mut self,
id: ast::NodeId, id: ast::NodeId,
ident: &ast::Ident, name: ast::Name,
span: Span, span: Span,
typ: &ast::Ty, typ: &ast::Ty,
expr: &ast::Expr) { expr: &ast::Expr) {
@ -456,7 +456,7 @@ impl <'l, 'tcx> DumpCsvVisitor<'l, 'tcx> {
self.fmt.static_str(span, self.fmt.static_str(span,
sub_span, sub_span,
id, id,
&ident.name.as_str(), &name.as_str(),
&qualname, &qualname,
&self.span.snippet(expr.span), &self.span.snippet(expr.span),
&ty_to_string(&*typ), &ty_to_string(&*typ),
@ -988,7 +988,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DumpCsvVisitor<'l, 'tcx> {
fn visit_trait_item(&mut self, trait_item: &ast::TraitItem) { fn visit_trait_item(&mut self, trait_item: &ast::TraitItem) {
match trait_item.node { match trait_item.node {
ast::ConstTraitItem(ref ty, Some(ref expr)) => { ast::ConstTraitItem(ref ty, Some(ref expr)) => {
self.process_const(trait_item.id, &trait_item.ident, self.process_const(trait_item.id, trait_item.ident.name,
trait_item.span, &*ty, &*expr); trait_item.span, &*ty, &*expr);
} }
ast::MethodTraitItem(ref sig, ref body) => { ast::MethodTraitItem(ref sig, ref body) => {
@ -1006,7 +1006,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DumpCsvVisitor<'l, 'tcx> {
fn visit_impl_item(&mut self, impl_item: &ast::ImplItem) { fn visit_impl_item(&mut self, impl_item: &ast::ImplItem) {
match impl_item.node { match impl_item.node {
ast::ConstImplItem(ref ty, ref expr) => { ast::ConstImplItem(ref ty, ref expr) => {
self.process_const(impl_item.id, &impl_item.ident, self.process_const(impl_item.id, impl_item.ident.name,
impl_item.span, &ty, &expr); impl_item.span, &ty, &expr);
} }
ast::MethodImplItem(ref sig, ref body) => { ast::MethodImplItem(ref sig, ref body) => {

View file

@ -375,7 +375,7 @@ pub struct BindingInfo<'tcx> {
pub ty: Ty<'tcx>, pub ty: Ty<'tcx>,
} }
type BindingsMap<'tcx> = FnvHashMap<ast::Ident, BindingInfo<'tcx>>; type BindingsMap<'tcx> = FnvHashMap<ast::Name, BindingInfo<'tcx>>;
struct ArmData<'p, 'blk, 'tcx: 'blk> { struct ArmData<'p, 'blk, 'tcx: 'blk> {
bodycx: Block<'blk, 'tcx>, bodycx: Block<'blk, 'tcx>,
@ -390,7 +390,7 @@ struct ArmData<'p, 'blk, 'tcx: 'blk> {
struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> { struct Match<'a, 'p: 'a, 'blk: 'a, 'tcx: 'blk> {
pats: Vec<&'p hir::Pat>, pats: Vec<&'p hir::Pat>,
data: &'a ArmData<'p, 'blk, 'tcx>, data: &'a ArmData<'p, 'blk, 'tcx>,
bound_ptrs: Vec<(ast::Ident, ValueRef)>, bound_ptrs: Vec<(ast::Name, ValueRef)>,
// Thread along renamings done by the check_match::StaticInliner, so we can // Thread along renamings done by the check_match::StaticInliner, so we can
// map back to original NodeIds // map back to original NodeIds
pat_renaming_map: Option<&'a FnvHashMap<(NodeId, Span), NodeId>> pat_renaming_map: Option<&'a FnvHashMap<(NodeId, Span), NodeId>>
@ -464,7 +464,7 @@ fn expand_nested_bindings<'a, 'p, 'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
loop { loop {
pat = match pat.node { pat = match pat.node {
hir::PatIdent(_, ref path, Some(ref inner)) => { hir::PatIdent(_, ref path, Some(ref inner)) => {
bound_ptrs.push((path.node, val.val)); bound_ptrs.push((path.node.name, val.val));
&**inner &**inner
}, },
_ => break _ => break
@ -505,7 +505,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
match this.node { match this.node {
hir::PatIdent(_, ref path, None) => { hir::PatIdent(_, ref path, None) => {
if pat_is_binding(dm, &*this) { if pat_is_binding(dm, &*this) {
bound_ptrs.push((path.node, val.val)); bound_ptrs.push((path.node.name, val.val));
} }
} }
hir::PatVec(ref before, Some(ref slice), ref after) => { hir::PatVec(ref before, Some(ref slice), ref after) => {
@ -513,7 +513,7 @@ fn enter_match<'a, 'b, 'p, 'blk, 'tcx, F>(bcx: Block<'blk, 'tcx>,
let subslice_val = bind_subslice_pat( let subslice_val = bind_subslice_pat(
bcx, this.id, val, bcx, this.id, val,
before.len(), after.len()); before.len(), after.len());
bound_ptrs.push((path.node, subslice_val)); bound_ptrs.push((path.node.name, subslice_val));
} }
} }
_ => {} _ => {}
@ -943,7 +943,7 @@ fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
bindings_map: &BindingsMap<'tcx>, bindings_map: &BindingsMap<'tcx>,
cs: Option<cleanup::ScopeId>) cs: Option<cleanup::ScopeId>)
-> Block<'blk, 'tcx> { -> Block<'blk, 'tcx> {
for (&ident, &binding_info) in bindings_map { for (&name, &binding_info) in bindings_map {
let (llval, aliases_other_state) = match binding_info.trmode { let (llval, aliases_other_state) = match binding_info.trmode {
// By value mut binding for a copy type: load from the ptr // By value mut binding for a copy type: load from the ptr
// into the matched value and copy to our alloca // into the matched value and copy to our alloca
@ -1021,7 +1021,7 @@ fn insert_lllocals<'blk, 'tcx>(mut bcx: Block<'blk, 'tcx>,
debug!("binding {} to {}", binding_info.id, bcx.val_to_string(llval)); debug!("binding {} to {}", binding_info.id, bcx.val_to_string(llval));
bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum); bcx.fcx.lllocals.borrow_mut().insert(binding_info.id, datum);
debuginfo::create_match_binding_metadata(bcx, ident.name, binding_info); debuginfo::create_match_binding_metadata(bcx, name, binding_info);
} }
bcx bcx
} }
@ -1510,8 +1510,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat,
let reassigned = is_discr_reassigned(bcx, discr, body); let reassigned = is_discr_reassigned(bcx, discr, body);
let mut bindings_map = FnvHashMap(); let mut bindings_map = FnvHashMap();
pat_bindings(&tcx.def_map, &*pat, |bm, p_id, span, path1| { pat_bindings(&tcx.def_map, &*pat, |bm, p_id, span, path1| {
let ident = path1.node; let name = path1.node;
let name = ident.name;
let variable_ty = node_id_type(bcx, p_id); let variable_ty = node_id_type(bcx, p_id);
let llvariable_ty = type_of::type_of(ccx, variable_ty); let llvariable_ty = type_of::type_of(ccx, variable_ty);
let tcx = bcx.tcx(); let tcx = bcx.tcx();
@ -1543,7 +1542,7 @@ fn create_bindings_map<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pat: &hir::Pat,
trmode = TrByRef; trmode = TrByRef;
} }
}; };
bindings_map.insert(ident, BindingInfo { bindings_map.insert(name, BindingInfo {
llmatch: llmatch, llmatch: llmatch,
trmode: trmode, trmode: trmode,
id: p_id, id: p_id,
@ -1656,7 +1655,7 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pat_bindings(&tcx.def_map, pat, |_, p_id, _, path1| { pat_bindings(&tcx.def_map, pat, |_, p_id, _, path1| {
let scope = cleanup::var_scope(tcx, p_id); let scope = cleanup::var_scope(tcx, p_id);
bcx = mk_binding_alloca( bcx = mk_binding_alloca(
bcx, p_id, path1.node.name, scope, (), bcx, p_id, path1.node, scope, (),
"_match::store_local::create_dummy_locals", "_match::store_local::create_dummy_locals",
|(), bcx, Datum { val: llval, ty, kind }| { |(), bcx, Datum { val: llval, ty, kind }| {
// Dummy-locals start out uninitialized, so set their // Dummy-locals start out uninitialized, so set their
@ -1693,11 +1692,11 @@ pub fn store_local<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// //
// In such cases, the more general path is unsafe, because // In such cases, the more general path is unsafe, because
// it assumes it is matching against a valid value. // it assumes it is matching against a valid value.
match simple_identifier(&*pat) { match simple_name(pat) {
Some(ident) => { Some(name) => {
let var_scope = cleanup::var_scope(tcx, local.id); let var_scope = cleanup::var_scope(tcx, local.id);
return mk_binding_alloca( return mk_binding_alloca(
bcx, pat.id, ident.name, var_scope, (), bcx, pat.id, name, var_scope, (),
"_match::store_local", "_match::store_local",
|(), bcx, Datum { val: v, .. }| expr::trans_into(bcx, &**init_expr, |(), bcx, Datum { val: v, .. }| expr::trans_into(bcx, &**init_expr,
expr::SaveIn(v))); expr::SaveIn(v)));

View file

@ -40,7 +40,7 @@ use middle::cfg;
use middle::def_id::{DefId, LOCAL_CRATE}; use middle::def_id::{DefId, LOCAL_CRATE};
use middle::lang_items::{LangItem, ExchangeMallocFnLangItem, StartFnLangItem}; use middle::lang_items::{LangItem, ExchangeMallocFnLangItem, StartFnLangItem};
use middle::weak_lang_items; use middle::weak_lang_items;
use middle::pat_util::simple_identifier; use middle::pat_util::simple_name;
use middle::subst::Substs; use middle::subst::Substs;
use middle::ty::{self, Ty, HasTypeFlags}; use middle::ty::{self, Ty, HasTypeFlags};
use rustc::front::map as hir_map; use rustc::front::map as hir_map;
@ -1447,10 +1447,10 @@ pub fn create_datums_for_fn_args<'a, 'tcx>(mut bcx: Block<'a, 'tcx>,
}; };
let pat = &*args[i].pat; let pat = &*args[i].pat;
bcx = if let Some(ident) = simple_identifier(&*pat) { bcx = if let Some(name) = simple_name(pat) {
// Generate nicer LLVM for the common case of fn a pattern // Generate nicer LLVM for the common case of fn a pattern
// like `x: T` // like `x: T`
set_value_name(arg_datum.val, &bcx.name(ident.name)); set_value_name(arg_datum.val, &bcx.name(name));
bcx.fcx.lllocals.borrow_mut().insert(pat.id, arg_datum); bcx.fcx.lllocals.borrow_mut().insert(pat.id, arg_datum);
bcx bcx
} else { } else {

View file

@ -168,7 +168,7 @@ pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool {
/// Generates a unique symbol based off the name given. This is used to create /// Generates a unique symbol based off the name given. This is used to create
/// unique symbols for things like closures. /// unique symbols for things like closures.
pub fn gensym_name(name: &str) -> PathElem { pub fn gensym_name(name: &str) -> PathElem {
let num = token::gensym(name).usize(); let num = token::gensym(name).0;
// use one colon which will get translated to a period by the mangler, and // use one colon which will get translated to a period by the mangler, and
// we're guaranteed that `num` is globally unique for this crate. // we're guaranteed that `num` is globally unique for this crate.
PathName(token::gensym(&format!("{}:{}", name, num))) PathName(token::gensym(&format!("{}:{}", name, num)))
@ -829,7 +829,7 @@ pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> Va
!null_terminated as Bool); !null_terminated as Bool);
let gsym = token::gensym("str"); let gsym = token::gensym("str");
let sym = format!("str{}", gsym.usize()); let sym = format!("str{}", gsym.0);
let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{ let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{
cx.sess().bug(&format!("symbol `{}` is already defined", sym)); cx.sess().bug(&format!("symbol `{}` is already defined", sym));
}); });

View file

@ -116,7 +116,7 @@ fn addr_of_mut(ccx: &CrateContext,
// FIXME: this totally needs a better name generation scheme, perhaps a simple global // FIXME: this totally needs a better name generation scheme, perhaps a simple global
// counter? Also most other uses of gensym in trans. // counter? Also most other uses of gensym in trans.
let gsym = token::gensym("_"); let gsym = token::gensym("_");
let name = format!("{}{}", kind, gsym.usize()); let name = format!("{}{}", kind, gsym.0);
let gv = declare::define_global(ccx, &name[..], val_ty(cv)).unwrap_or_else(||{ let gv = declare::define_global(ccx, &name[..], val_ty(cv)).unwrap_or_else(||{
ccx.sess().bug(&format!("symbol `{}` is already defined", name)); ccx.sess().bug(&format!("symbol `{}` is already defined", name));
}); });

View file

@ -305,7 +305,7 @@ pub fn trans_loop<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &hir::Expr, expr: &hir::Expr,
opt_label: Option<ast::Ident>, opt_label: Option<ast::Name>,
exit: usize) exit: usize)
-> Block<'blk, 'tcx> { -> Block<'blk, 'tcx> {
let _icx = push_ctxt("trans_break_cont"); let _icx = push_ctxt("trans_break_cont");
@ -338,14 +338,14 @@ pub fn trans_break_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
pub fn trans_break<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pub fn trans_break<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &hir::Expr, expr: &hir::Expr,
label_opt: Option<ast::Ident>) label_opt: Option<ast::Name>)
-> Block<'blk, 'tcx> { -> Block<'blk, 'tcx> {
return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_BREAK); return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_BREAK);
} }
pub fn trans_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>, pub fn trans_cont<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
expr: &hir::Expr, expr: &hir::Expr,
label_opt: Option<ast::Ident>) label_opt: Option<ast::Name>)
-> Block<'blk, 'tcx> { -> Block<'blk, 'tcx> {
return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_LOOP); return trans_break_cont(bcx, expr, label_opt, cleanup::EXIT_LOOP);
} }

View file

@ -49,7 +49,7 @@ pub fn create_scope_map(cx: &CrateContext,
for arg in args { for arg in args {
pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, _, path1| { pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, _, path1| {
scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata, scope_stack.push(ScopeStackEntry { scope_metadata: fn_metadata,
name: Some(path1.node.name) }); name: Some(path1.node) });
scope_map.insert(node_id, fn_metadata); scope_map.insert(node_id, fn_metadata);
}) })
} }

View file

@ -1925,7 +1925,7 @@ pub fn create_local_var_metadata(bcx: Block, local: &hir::Local) {
let def_map = &cx.tcx().def_map; let def_map = &cx.tcx().def_map;
let locals = bcx.fcx.lllocals.borrow(); let locals = bcx.fcx.lllocals.borrow();
pat_util::pat_bindings(def_map, &*local.pat, |_, node_id, span, var_ident| { pat_util::pat_bindings(def_map, &*local.pat, |_, node_id, span, var_name| {
let datum = match locals.get(&node_id) { let datum = match locals.get(&node_id) {
Some(datum) => datum, Some(datum) => datum,
None => { None => {
@ -1943,7 +1943,7 @@ pub fn create_local_var_metadata(bcx: Block, local: &hir::Local) {
let scope_metadata = scope_metadata(bcx.fcx, node_id, span); let scope_metadata = scope_metadata(bcx.fcx, node_id, span);
declare_local(bcx, declare_local(bcx,
var_ident.node.name, var_name.node,
datum.ty, datum.ty,
scope_metadata, scope_metadata,
VariableAccess::DirectVariable { alloca: datum.val }, VariableAccess::DirectVariable { alloca: datum.val },
@ -2105,7 +2105,7 @@ pub fn create_argument_metadata(bcx: Block, arg: &hir::Arg) {
.fn_metadata; .fn_metadata;
let locals = bcx.fcx.lllocals.borrow(); let locals = bcx.fcx.lllocals.borrow();
pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, span, var_ident| { pat_util::pat_bindings(def_map, &*arg.pat, |_, node_id, span, var_name| {
let datum = match locals.get(&node_id) { let datum = match locals.get(&node_id) {
Some(v) => v, Some(v) => v,
None => { None => {
@ -2132,7 +2132,7 @@ pub fn create_argument_metadata(bcx: Block, arg: &hir::Arg) {
}; };
declare_local(bcx, declare_local(bcx,
var_ident.node.name, var_name.node,
datum.ty, datum.ty,
scope_metadata, scope_metadata,
VariableAccess::DirectVariable { alloca: datum.val }, VariableAccess::DirectVariable { alloca: datum.val },

View file

@ -963,10 +963,10 @@ fn trans_rvalue_stmt_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
match expr.node { match expr.node {
hir::ExprBreak(label_opt) => { hir::ExprBreak(label_opt) => {
controlflow::trans_break(bcx, expr, label_opt.map(|l| l.node)) controlflow::trans_break(bcx, expr, label_opt.map(|l| l.node.name))
} }
hir::ExprAgain(label_opt) => { hir::ExprAgain(label_opt) => {
controlflow::trans_cont(bcx, expr, label_opt.map(|l| l.node)) controlflow::trans_cont(bcx, expr, label_opt.map(|l| l.node.name))
} }
hir::ExprRet(ref ex) => { hir::ExprRet(ref ex) => {
// Check to see if the return expression itself is reachable. // Check to see if the return expression itself is reachable.
@ -1114,7 +1114,7 @@ fn trans_rvalue_dps_unadjusted<'blk, 'tcx>(bcx: Block<'blk, 'tcx>,
// trans. Shudder. // trans. Shudder.
fn make_field(field_name: &str, expr: P<hir::Expr>) -> hir::Field { fn make_field(field_name: &str, expr: P<hir::Expr>) -> hir::Field {
hir::Field { hir::Field {
name: codemap::dummy_spanned(token::str_to_ident(field_name).name), name: codemap::dummy_spanned(token::intern(field_name)),
expr: expr, expr: expr,
span: codemap::DUMMY_SP, span: codemap::DUMMY_SP,
} }

View file

@ -179,7 +179,7 @@ pub fn check_pat<'a, 'tcx>(pcx: &pat_ctxt<'a, 'tcx>,
// if there are multiple arms, make sure they all agree on // if there are multiple arms, make sure they all agree on
// what the type of the binding `x` ought to be // what the type of the binding `x` ought to be
let canon_id = *pcx.map.get(&path.node).unwrap(); let canon_id = *pcx.map.get(&path.node.name).unwrap();
if canon_id != pat.id { if canon_id != pat.id {
let ct = fcx.local_ty(pat.span, canon_id); let ct = fcx.local_ty(pat.span, canon_id);
demand::eqtype(fcx, pat.span, ct, typ); demand::eqtype(fcx, pat.span, ct, typ);

View file

@ -682,7 +682,7 @@ pub fn check_struct(ccx: &CrateCtxt, id: ast::NodeId, span: Span) {
} }
pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
debug!("check_item_type(it.id={}, it.ident={})", debug!("check_item_type(it.id={}, it.name={})",
it.id, it.id,
ccx.tcx.item_path_str(DefId::local(it.id))); ccx.tcx.item_path_str(DefId::local(it.id)));
let _indenter = indenter(); let _indenter = indenter();
@ -750,7 +750,7 @@ pub fn check_item_type<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
} }
pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) { pub fn check_item_body<'a,'tcx>(ccx: &CrateCtxt<'a,'tcx>, it: &'tcx hir::Item) {
debug!("check_item_body(it.id={}, it.ident={})", debug!("check_item_body(it.id={}, it.name={})",
it.id, it.id,
ccx.tcx.item_path_str(DefId::local(it.id))); ccx.tcx.item_path_str(DefId::local(it.id)));
let _indenter = indenter(); let _indenter = indenter();
@ -838,7 +838,7 @@ fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
Position::ArgumentNamed(s) if s == "Self" => (), Position::ArgumentNamed(s) if s == "Self" => (),
// So is `{A}` if A is a type parameter // So is `{A}` if A is a type parameter
Position::ArgumentNamed(s) => match types.iter().find(|t| { Position::ArgumentNamed(s) => match types.iter().find(|t| {
t.name == s t.name.as_str() == s
}) { }) {
Some(_) => (), Some(_) => (),
None => { None => {

View file

@ -55,7 +55,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
/// the types first. /// the types first.
fn check_item_well_formed(&mut self, item: &hir::Item) { fn check_item_well_formed(&mut self, item: &hir::Item) {
let ccx = self.ccx; let ccx = self.ccx;
debug!("check_item_well_formed(it.id={}, it.ident={})", debug!("check_item_well_formed(it.id={}, it.name={})",
item.id, item.id,
ccx.tcx.item_path_str(DefId::local(item.id))); ccx.tcx.item_path_str(DefId::local(item.id)));

View file

@ -61,7 +61,7 @@ impl<'ccx, 'tcx> CheckTypeWellFormedVisitor<'ccx, 'tcx> {
/// the types first. /// the types first.
fn check_item_well_formed(&mut self, item: &hir::Item) { fn check_item_well_formed(&mut self, item: &hir::Item) {
let ccx = self.ccx; let ccx = self.ccx;
debug!("check_item_well_formed(it.id={}, it.ident={})", debug!("check_item_well_formed(it.id={}, it.name={})",
item.id, item.id,
ccx.tcx.item_path_str(DefId::local(item.id))); ccx.tcx.item_path_str(DefId::local(item.id)));

View file

@ -699,12 +699,12 @@ fn convert_methods<'a,'tcx,'i,I>(ccx: &CrateCtxt<'a, 'tcx>,
rcvr_ty_generics, rcvr_ty_generics,
rcvr_ty_predicates); rcvr_ty_predicates);
for (sig, id, ident, vis, _span) in methods { for (sig, id, name, vis, _span) in methods {
convert_method(ccx, convert_method(ccx,
container, container,
sig, sig,
id, id,
ident, name,
vis, vis,
untransformed_rcvr_ty, untransformed_rcvr_ty,
rcvr_ty_generics, rcvr_ty_generics,

View file

@ -1583,7 +1583,7 @@ impl Clean<Type> for hir::Ty {
let mut trait_path = p.clone(); let mut trait_path = p.clone();
trait_path.segments.pop(); trait_path.segments.pop();
Type::QPath { Type::QPath {
name: p.segments.last().unwrap().identifier.clean(cx), name: p.segments.last().unwrap().identifier.name.clean(cx),
self_type: box qself.ty.clean(cx), self_type: box qself.ty.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.id) trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
} }
@ -2044,7 +2044,7 @@ pub struct PathSegment {
impl Clean<PathSegment> for hir::PathSegment { impl Clean<PathSegment> for hir::PathSegment {
fn clean(&self, cx: &DocContext) -> PathSegment { fn clean(&self, cx: &DocContext) -> PathSegment {
PathSegment { PathSegment {
name: self.identifier.clean(cx), name: self.identifier.name.clean(cx),
params: self.parameters.clean(cx) params: self.parameters.clean(cx)
} }
} }
@ -2064,12 +2064,6 @@ fn path_to_string(p: &hir::Path) -> String {
s s
} }
impl Clean<String> for ast::Ident {
fn clean(&self, _: &DocContext) -> String {
self.to_string()
}
}
impl Clean<String> for ast::Name { impl Clean<String> for ast::Name {
fn clean(&self, _: &DocContext) -> String { fn clean(&self, _: &DocContext) -> String {
self.to_string() self.to_string()

View file

@ -67,40 +67,38 @@ use std::fmt;
use std::rc::Rc; use std::rc::Rc;
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
// FIXME #6993: in librustc, uses of "ident" should be replaced /// A name is a part of an identifier, representing a string or gensym. It's
// by just "Name". /// the result of interning.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Name(pub u32);
/// A SyntaxContext represents a chain of macro-expandings
/// and renamings. Each macro expansion corresponds to
/// a fresh u32. This u32 is a reference to a table stored
// in thread-local storage.
// The special value EMPTY_CTXT is used to indicate an empty
// syntax context.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct SyntaxContext(pub u32);
/// An identifier contains a Name (index into the interner /// An identifier contains a Name (index into the interner
/// table) and a SyntaxContext to track renaming and /// table) and a SyntaxContext to track renaming and
/// macro expansion per Flatt et al., "Macros /// macro expansion per Flatt et al., "Macros That Work Together"
/// That Work Together" #[derive(Clone, Copy, Eq, Hash)]
#[derive(Clone, Copy, Hash, PartialOrd, Eq, Ord)]
pub struct Ident { pub struct Ident {
pub name: Name, pub name: Name,
pub ctxt: SyntaxContext pub ctxt: SyntaxContext
} }
impl Ident { impl Name {
/// Construct an identifier with the given name and an empty context: pub fn as_str(self) -> token::InternedString {
pub fn new(name: Name) -> Ident { Ident {name: name, ctxt: EMPTY_CTXT}} token::InternedString::new_from_name(self)
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}#{}", self.name, self.ctxt)
}
}
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.name, f)
} }
} }
impl fmt::Debug for Name { impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Name(nm) = *self; write!(f, "{}({})", self, self.0)
write!(f, "{}({})", self, nm)
} }
} }
@ -110,6 +108,29 @@ impl fmt::Display for Name {
} }
} }
impl Encodable for Name {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Name {
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
Ok(token::intern(&try!(d.read_str())[..]))
}
}
pub const EMPTY_CTXT : SyntaxContext = SyntaxContext(0);
impl Ident {
pub fn new(name: Name, ctxt: SyntaxContext) -> Ident {
Ident {name: name, ctxt: ctxt}
}
pub fn with_empty_ctxt(name: Name) -> Ident {
Ident {name: name, ctxt: EMPTY_CTXT}
}
}
impl PartialEq for Ident { impl PartialEq for Ident {
fn eq(&self, other: &Ident) -> bool { fn eq(&self, other: &Ident) -> bool {
if self.ctxt == other.ctxt { if self.ctxt == other.ctxt {
@ -119,74 +140,27 @@ impl PartialEq for Ident {
// idents that have different contexts. You can't fix this without // idents that have different contexts. You can't fix this without
// knowing whether the comparison should be hygienic or non-hygienic. // knowing whether the comparison should be hygienic or non-hygienic.
// if it should be non-hygienic (most things are), just compare the // if it should be non-hygienic (most things are), just compare the
// 'name' fields of the idents. Or, even better, replace the idents // 'name' fields of the idents.
// with Name's.
// //
// On the other hand, if the comparison does need to be hygienic, // On the other hand, if the comparison does need to be hygienic,
// one example and its non-hygienic counterpart would be: // one example and its non-hygienic counterpart would be:
// syntax::parse::token::Token::mtwt_eq // syntax::parse::token::Token::mtwt_eq
// syntax::ext::tt::macro_parser::token_name_eq // syntax::ext::tt::macro_parser::token_name_eq
panic!("not allowed to compare these idents: {:?}, {:?}. \ panic!("idents with different contexts are compared with operator `==`: \
Probably related to issue \\#6993", self, other); {:?}, {:?}.", self, other);
} }
} }
} }
/// A SyntaxContext represents a chain of macro-expandings impl fmt::Debug for Ident {
/// and renamings. Each macro expansion corresponds to fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
/// a fresh u32 write!(f, "{}#{}", self.name, self.ctxt.0)
// I'm representing this syntax context as an index into
// a table, in order to work around a compiler bug
// that's causing unreleased memory to cause core dumps
// and also perhaps to save some work in destructor checks.
// the special uint '0' will be used to indicate an empty
// syntax context.
// this uint is a reference to a table stored in thread-local
// storage.
pub type SyntaxContext = u32;
pub const EMPTY_CTXT : SyntaxContext = 0;
pub const ILLEGAL_CTXT : SyntaxContext = 1;
/// A name is a part of an identifier, representing a string or gensym. It's
/// the result of interning.
#[derive(Eq, Ord, PartialEq, PartialOrd, Hash, Clone, Copy)]
pub struct Name(pub u32);
impl<T: AsRef<str>> PartialEq<T> for Name {
fn eq(&self, other: &T) -> bool {
self.as_str() == other.as_ref()
} }
} }
impl Name { impl fmt::Display for Ident {
pub fn as_str(&self) -> token::InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
token::InternedString::new_from_name(*self) fmt::Display::fmt(&self.name, f)
}
pub fn usize(&self) -> usize {
let Name(nm) = *self;
nm as usize
}
pub fn ident(&self) -> Ident {
Ident { name: *self, ctxt: 0 }
}
}
/// A mark represents a unique id associated with a macro expansion
pub type Mrk = u32;
impl Encodable for Name {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Name {
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
Ok(token::intern(&try!(d.read_str())[..]))
} }
} }
@ -202,8 +176,8 @@ impl Decodable for Ident {
} }
} }
/// Function name (not all functions have names) /// A mark represents a unique id associated with a macro expansion
pub type FnIdent = Option<Ident>; pub type Mrk = u32;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime { pub struct Lifetime {
@ -841,19 +815,16 @@ pub enum Expr_ {
/// ///
/// This is desugared to a `match` expression. /// This is desugared to a `match` expression.
ExprIfLet(P<Pat>, P<Expr>, P<Block>, Option<P<Expr>>), ExprIfLet(P<Pat>, P<Expr>, P<Block>, Option<P<Expr>>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while loop, with an optional label /// A while loop, with an optional label
/// ///
/// `'label: while expr { block }` /// `'label: while expr { block }`
ExprWhile(P<Expr>, P<Block>, Option<Ident>), ExprWhile(P<Expr>, P<Block>, Option<Ident>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while-let loop, with an optional label /// A while-let loop, with an optional label
/// ///
/// `'label: while let pat = expr { block }` /// `'label: while let pat = expr { block }`
/// ///
/// This is desugared to a combination of `loop` and `match` expressions. /// This is desugared to a combination of `loop` and `match` expressions.
ExprWhileLet(P<Pat>, P<Expr>, P<Block>, Option<Ident>), ExprWhileLet(P<Pat>, P<Expr>, P<Block>, Option<Ident>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A for loop, with an optional label /// A for loop, with an optional label
/// ///
/// `'label: for pat in expr { block }` /// `'label: for pat in expr { block }`
@ -863,7 +834,6 @@ pub enum Expr_ {
/// Conditionless loop (can be exited with break, continue, or return) /// Conditionless loop (can be exited with break, continue, or return)
/// ///
/// `'label: loop { block }` /// `'label: loop { block }`
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
ExprLoop(P<Block>, Option<Ident>), ExprLoop(P<Block>, Option<Ident>),
/// A `match` block, with a source that indicates whether or not it is /// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind. /// the result of a desugaring, and if so, which kind.
@ -1223,13 +1193,6 @@ pub struct MutTy {
pub mutbl: Mutability, pub mutbl: Mutability,
} }
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeField {
pub ident: Ident,
pub mt: MutTy,
pub span: Span,
}
/// Represents a method's signature in a trait declaration, /// Represents a method's signature in a trait declaration,
/// or in an implementation. /// or in an implementation.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]

View file

@ -576,21 +576,21 @@ mod tests {
use ast::*; use ast::*;
use super::*; use super::*;
fn ident_to_segment(id : &Ident) -> PathSegment { fn ident_to_segment(id: Ident) -> PathSegment {
PathSegment {identifier: id.clone(), PathSegment {identifier: id,
parameters: PathParameters::none()} parameters: PathParameters::none()}
} }
#[test] fn idents_name_eq_test() { #[test] fn idents_name_eq_test() {
assert!(segments_name_eq( assert!(segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] &[Ident::new(Name(3),SyntaxContext(4)), Ident::new(Name(78),SyntaxContext(82))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(), .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}] &[Ident::new(Name(3),SyntaxContext(104)), Ident::new(Name(78),SyntaxContext(182))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>())); .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>()));
assert!(!segments_name_eq( assert!(!segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] &[Ident::new(Name(3),SyntaxContext(4)), Ident::new(Name(78),SyntaxContext(82))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(), .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}] &[Ident::new(Name(3),SyntaxContext(104)), Ident::new(Name(77),SyntaxContext(182))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>())); .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>()));
} }
} }

View file

@ -1083,7 +1083,6 @@ pub struct MalformedCodemapPositions {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::rc::Rc;
#[test] #[test]
fn t1 () { fn t1 () {

View file

@ -842,7 +842,7 @@ pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::{EmitterWriter, Level}; use super::{EmitterWriter, Level};
use codemap::{mk_sp, CodeMap, BytePos}; use codemap::{mk_sp, CodeMap};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::io::{self, Write}; use std::io::{self, Write};
use std::str::from_utf8; use std::str::from_utf8;

View file

@ -138,7 +138,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
)); ));
} }
}); });
let sym = Ident::new(token::gensym(&format!( let sym = Ident::with_empty_ctxt(token::gensym(&format!(
"__register_diagnostic_{}", code "__register_diagnostic_{}", code
))); )));
MacEager::items(SmallVector::many(vec![ MacEager::items(SmallVector::many(vec![

View file

@ -28,7 +28,7 @@ pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType {
EntryPointType::Start EntryPointType::Start
} else if attr::contains_name(&item.attrs, "main") { } else if attr::contains_name(&item.attrs, "main") {
EntryPointType::MainAttr EntryPointType::MainAttr
} else if item.ident.name == "main" { } else if item.ident.name.as_str() == "main" {
if depth == 1 { if depth == 1 {
// This is a top-level function so can be 'main' // This is a top-level function so can be 'main'
EntryPointType::MainNamed EntryPointType::MainNamed

View file

@ -646,7 +646,7 @@ impl<'a> ExtCtxt<'a> {
loop { loop {
if self.codemap().with_expn_info(expn_id, |info| { if self.codemap().with_expn_info(expn_id, |info| {
info.map_or(None, |i| { info.map_or(None, |i| {
if i.callee.name() == "include" { if i.callee.name().as_str() == "include" {
// Stop going up the backtrace once include! is encountered // Stop going up the backtrace once include! is encountered
return None; return None;
} }
@ -899,9 +899,9 @@ impl SyntaxEnv {
unreachable!() unreachable!()
} }
pub fn find(&self, k: &Name) -> Option<Rc<SyntaxExtension>> { pub fn find(&self, k: Name) -> Option<Rc<SyntaxExtension>> {
for frame in self.chain.iter().rev() { for frame in self.chain.iter().rev() {
match frame.map.get(k) { match frame.map.get(&k) {
Some(v) => return Some(v.clone()), Some(v) => return Some(v.clone()),
None => {} None => {}
} }

View file

@ -73,7 +73,6 @@ pub trait AstBuilder {
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ; fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ; fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField;
fn typaram(&self, fn typaram(&self,
span: Span, span: Span,
@ -443,14 +442,6 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
Vec::new())) Vec::new()))
} }
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField {
ast::TypeField {
ident: name,
mt: ast::MutTy { ty: ty, mutbl: ast::MutImmutable },
span: span,
}
}
fn ty_infer(&self, span: Span) -> P<ast::Ty> { fn ty_infer(&self, span: Span) -> P<ast::Ty> {
self.ty(span, ast::TyInfer) self.ty(span, ast::TyInfer)
} }

View file

@ -524,7 +524,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac,
return None; return None;
} }
let extname = pth.segments[0].identifier.name; let extname = pth.segments[0].identifier.name;
match fld.cx.syntax_env.find(&extname) { match fld.cx.syntax_env.find(extname) {
None => { None => {
fld.cx.span_err( fld.cx.span_err(
pth.span, pth.span,
@ -593,7 +593,7 @@ fn expand_loop_block(loop_block: P<Block>,
fld: &mut MacroExpander) -> (P<Block>, Option<Ident>) { fld: &mut MacroExpander) -> (P<Block>, Option<Ident>) {
match opt_ident { match opt_ident {
Some(label) => { Some(label) => {
let new_label = fresh_name(&label); let new_label = fresh_name(label);
let rename = (label, new_label); let rename = (label, new_label);
// The rename *must not* be added to the pending list of current // The rename *must not* be added to the pending list of current
@ -689,7 +689,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
let fm = fresh_mark(); let fm = fresh_mark();
let items = { let items = {
let expanded = match fld.cx.syntax_env.find(&extname) { let expanded = match fld.cx.syntax_env.find(extname) {
None => { None => {
fld.cx.span_err(path_span, fld.cx.span_err(path_span,
&format!("macro undefined: '{}!'", &format!("macro undefined: '{}!'",
@ -892,7 +892,7 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE
// generate fresh names, push them to a new pending list // generate fresh names, push them to a new pending list
let idents = pattern_bindings(&*expanded_pat); let idents = pattern_bindings(&*expanded_pat);
let mut new_pending_renames = let mut new_pending_renames =
idents.iter().map(|ident| (*ident, fresh_name(ident))).collect(); idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect();
// rewrite the pattern using the new names (the old // rewrite the pattern using the new names (the old
// ones have already been applied): // ones have already been applied):
let rewritten_pat = { let rewritten_pat = {
@ -951,7 +951,7 @@ fn expand_arm(arm: ast::Arm, fld: &mut MacroExpander) -> ast::Arm {
// all of the pats must have the same set of bindings, so use the // all of the pats must have the same set of bindings, so use the
// first one to extract them and generate new names: // first one to extract them and generate new names:
let idents = pattern_bindings(&*expanded_pats[0]); let idents = pattern_bindings(&*expanded_pats[0]);
let new_renames = idents.into_iter().map(|id| (id, fresh_name(&id))).collect(); let new_renames = idents.into_iter().map(|id| (id, fresh_name(id))).collect();
// apply the renaming, but only to the PatIdents: // apply the renaming, but only to the PatIdents:
let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames}; let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames};
let rewritten_pats = expanded_pats.move_map(|pat| rename_pats_fld.fold_pat(pat)); let rewritten_pats = expanded_pats.move_map(|pat| rename_pats_fld.fold_pat(pat));
@ -1061,7 +1061,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
return DummyResult::raw_pat(span); return DummyResult::raw_pat(span);
} }
let extname = pth.segments[0].identifier.name; let extname = pth.segments[0].identifier.name;
let marked_after = match fld.cx.syntax_env.find(&extname) { let marked_after = match fld.cx.syntax_env.find(extname) {
None => { None => {
fld.cx.span_err(pth.span, fld.cx.span_err(pth.span,
&format!("macro undefined: '{}!'", &format!("macro undefined: '{}!'",
@ -1134,10 +1134,7 @@ pub struct IdentRenamer<'a> {
impl<'a> Folder for IdentRenamer<'a> { impl<'a> Folder for IdentRenamer<'a> {
fn fold_ident(&mut self, id: Ident) -> Ident { fn fold_ident(&mut self, id: Ident) -> Ident {
Ident { Ident::new(id.name, mtwt::apply_renames(self.renames, id.ctxt))
name: id.name,
ctxt: mtwt::apply_renames(self.renames, id.ctxt),
}
} }
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self) fold::noop_fold_mac(mac, self)
@ -1161,8 +1158,8 @@ impl<'a> Folder for PatIdentRenamer<'a> {
pat.map(|ast::Pat {id, node, span}| match node { pat.map(|ast::Pat {id, node, span}| match node {
ast::PatIdent(binding_mode, Spanned{span: sp, node: ident}, sub) => { ast::PatIdent(binding_mode, Spanned{span: sp, node: ident}, sub) => {
let new_ident = Ident{name: ident.name, let new_ident = Ident::new(ident.name,
ctxt: mtwt::apply_renames(self.renames, ident.ctxt)}; mtwt::apply_renames(self.renames, ident.ctxt));
let new_node = let new_node =
ast::PatIdent(binding_mode, ast::PatIdent(binding_mode,
Spanned{span: self.new_span(sp), node: new_ident}, Spanned{span: self.new_span(sp), node: new_ident},
@ -1254,7 +1251,7 @@ macro_rules! partition {
fld: &MacroExpander) fld: &MacroExpander)
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) { -> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
attrs.iter().cloned().partition(|attr| { attrs.iter().cloned().partition(|attr| {
match fld.cx.syntax_env.find(&intern(&attr.name())) { match fld.cx.syntax_env.find(intern(&attr.name())) {
Some(rc) => match *rc { Some(rc) => match *rc {
$variant(..) => true, $variant(..) => true,
_ => false _ => false
@ -1276,7 +1273,7 @@ fn expand_decorators(a: Annotatable,
{ {
for attr in a.attrs() { for attr in a.attrs() {
let mname = intern(&attr.name()); let mname = intern(&attr.name());
match fld.cx.syntax_env.find(&mname) { match fld.cx.syntax_env.find(mname) {
Some(rc) => match *rc { Some(rc) => match *rc {
MultiDecorator(ref dec) => { MultiDecorator(ref dec) => {
attr::mark_used(&attr); attr::mark_used(&attr);
@ -1327,7 +1324,7 @@ fn expand_item_multi_modifier(mut it: Annotatable,
for attr in &modifiers { for attr in &modifiers {
let mname = intern(&attr.name()); let mname = intern(&attr.name());
match fld.cx.syntax_env.find(&mname) { match fld.cx.syntax_env.find(mname) {
Some(rc) => match *rc { Some(rc) => match *rc {
MultiModifier(ref mac) => { MultiModifier(ref mac) => {
attr::mark_used(attr); attr::mark_used(attr);
@ -1407,7 +1404,7 @@ fn expand_and_rename_fn_decl_and_block(fn_decl: P<ast::FnDecl>, block: P<ast::Bl
let expanded_decl = fld.fold_fn_decl(fn_decl); let expanded_decl = fld.fold_fn_decl(fn_decl);
let idents = fn_decl_arg_bindings(&*expanded_decl); let idents = fn_decl_arg_bindings(&*expanded_decl);
let renames = let renames =
idents.iter().map(|id : &ast::Ident| (*id,fresh_name(id))).collect(); idents.iter().map(|id| (*id,fresh_name(*id))).collect();
// first, a renamer for the PatIdents, for the fn_decl: // first, a renamer for the PatIdents, for the fn_decl:
let mut rename_pat_fld = PatIdentRenamer{renames: &renames}; let mut rename_pat_fld = PatIdentRenamer{renames: &renames};
let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(expanded_decl); let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(expanded_decl);
@ -1628,10 +1625,7 @@ struct Marker { mark: Mrk }
impl Folder for Marker { impl Folder for Marker {
fn fold_ident(&mut self, id: Ident) -> Ident { fn fold_ident(&mut self, id: Ident) -> Ident {
ast::Ident { ast::Ident::new(id.name, mtwt::apply_mark(self.mark, id.ctxt))
name: id.name,
ctxt: mtwt::apply_mark(self.mark, id.ctxt)
}
} }
fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac { fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac {
Spanned { Spanned {
@ -2104,7 +2098,7 @@ foo_module!();
// find the xx binding // find the xx binding
let bindings = crate_bindings(&cr); let bindings = crate_bindings(&cr);
let cxbinds: Vec<&ast::Ident> = let cxbinds: Vec<&ast::Ident> =
bindings.iter().filter(|b| b.name == "xx").collect(); bindings.iter().filter(|b| b.name.as_str() == "xx").collect();
let cxbinds: &[&ast::Ident] = &cxbinds[..]; let cxbinds: &[&ast::Ident] = &cxbinds[..];
let cxbind = match (cxbinds.len(), cxbinds.get(0)) { let cxbind = match (cxbinds.len(), cxbinds.get(0)) {
(1, Some(b)) => *b, (1, Some(b)) => *b,
@ -2116,7 +2110,7 @@ foo_module!();
// the xx binding should bind all of the xx varrefs: // the xx binding should bind all of the xx varrefs:
for (idx,v) in varrefs.iter().filter(|p| { for (idx,v) in varrefs.iter().filter(|p| {
p.segments.len() == 1 p.segments.len() == 1
&& p.segments[0].identifier.name == "xx" && p.segments[0].identifier.name.as_str() == "xx"
}).enumerate() { }).enumerate() {
if mtwt::resolve(v.segments[0].identifier) != resolved_binding { if mtwt::resolve(v.segments[0].identifier) != resolved_binding {
println!("uh oh, xx binding didn't match xx varref:"); println!("uh oh, xx binding didn't match xx varref:");

View file

@ -35,7 +35,7 @@ use std::collections::HashMap;
pub struct SCTable { pub struct SCTable {
table: RefCell<Vec<SyntaxContext_>>, table: RefCell<Vec<SyntaxContext_>>,
mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>, mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>, rename_memo: RefCell<HashMap<(SyntaxContext,Name,SyntaxContext,Name),SyntaxContext>>,
} }
#[derive(PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy, Clone)] #[derive(PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy, Clone)]
@ -66,8 +66,9 @@ pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
/// Extend a syntax context with a given mark and sctable (explicit memoization) /// Extend a syntax context with a given mark and sctable (explicit memoization)
fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext { fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
let key = (ctxt, m); let key = (ctxt, m);
* table.mark_memo.borrow_mut().entry(key) *table.mark_memo.borrow_mut().entry(key).or_insert_with(|| {
.or_insert_with(|| idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt))) SyntaxContext(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt)))
})
} }
/// Extend a syntax context with a given rename /// Extend a syntax context with a given rename
@ -81,10 +82,11 @@ fn apply_rename_internal(id: Ident,
to: Name, to: Name,
ctxt: SyntaxContext, ctxt: SyntaxContext,
table: &SCTable) -> SyntaxContext { table: &SCTable) -> SyntaxContext {
let key = (ctxt, id, to); let key = (ctxt, id.name, id.ctxt, to);
* table.rename_memo.borrow_mut().entry(key) *table.rename_memo.borrow_mut().entry(key).or_insert_with(|| {
.or_insert_with(|| idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt))) SyntaxContext(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt)))
})
} }
/// Apply a list of renamings to a context /// Apply a list of renamings to a context
@ -185,20 +187,20 @@ fn resolve_internal(id: Ident,
} }
let resolved = { let resolved = {
let result = (*table.table.borrow())[id.ctxt as usize]; let result = (*table.table.borrow())[id.ctxt.0 as usize];
match result { match result {
EmptyCtxt => id.name, EmptyCtxt => id.name,
// ignore marks here: // ignore marks here:
Mark(_,subctxt) => Mark(_,subctxt) =>
resolve_internal(Ident{name:id.name, ctxt: subctxt}, resolve_internal(Ident::new(id.name, subctxt),
table, resolve_table), table, resolve_table),
// do the rename if necessary: // do the rename if necessary:
Rename(Ident{name, ctxt}, toname, subctxt) => { Rename(Ident{name, ctxt}, toname, subctxt) => {
let resolvedfrom = let resolvedfrom =
resolve_internal(Ident{name:name, ctxt:ctxt}, resolve_internal(Ident::new(name, ctxt),
table, resolve_table); table, resolve_table);
let resolvedthis = let resolvedthis =
resolve_internal(Ident{name:id.name, ctxt:subctxt}, resolve_internal(Ident::new(id.name, subctxt),
table, resolve_table); table, resolve_table);
if (resolvedthis == resolvedfrom) if (resolvedthis == resolvedfrom)
&& (marksof_internal(ctxt, resolvedthis, table) && (marksof_internal(ctxt, resolvedthis, table)
@ -229,7 +231,7 @@ fn marksof_internal(ctxt: SyntaxContext,
let mut result = Vec::new(); let mut result = Vec::new();
let mut loopvar = ctxt; let mut loopvar = ctxt;
loop { loop {
let table_entry = (*table.table.borrow())[loopvar as usize]; let table_entry = (*table.table.borrow())[loopvar.0 as usize];
match table_entry { match table_entry {
EmptyCtxt => { EmptyCtxt => {
return result; return result;
@ -256,7 +258,7 @@ fn marksof_internal(ctxt: SyntaxContext,
/// FAILS when outside is not a mark. /// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk { pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| { with_sctable(|sctable| {
match (*sctable.table.borrow())[ctxt as usize] { match (*sctable.table.borrow())[ctxt.0 as usize] {
Mark(mrk, _) => mrk, Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark") _ => panic!("can't retrieve outer mark when outside is not a mark")
} }
@ -302,7 +304,7 @@ mod tests {
} }
fn id(n: u32, s: SyntaxContext) -> Ident { fn id(n: u32, s: SyntaxContext) -> Ident {
Ident {name: Name(n), ctxt: s} Ident::new(Name(n), s)
} }
// because of the SCTable, I now need a tidy way of // because of the SCTable, I now need a tidy way of
@ -328,7 +330,7 @@ mod tests {
let mut result = Vec::new(); let mut result = Vec::new();
loop { loop {
let table = table.table.borrow(); let table = table.table.borrow();
match (*table)[sc as usize] { match (*table)[sc.0 as usize] {
EmptyCtxt => {return result;}, EmptyCtxt => {return result;},
Mark(mrk,tail) => { Mark(mrk,tail) => {
result.push(M(mrk)); result.push(M(mrk));
@ -349,15 +351,15 @@ mod tests {
fn test_unfold_refold(){ fn test_unfold_refold(){
let mut t = new_sctable_internal(); let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),Name(14)),M(9)); let test_sc = vec!(M(3),R(id(101,EMPTY_CTXT),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4); assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),SyntaxContext(4));
{ {
let table = t.table.borrow(); let table = t.table.borrow();
assert!((*table)[2] == Mark(9,0)); assert!((*table)[2] == Mark(9,EMPTY_CTXT));
assert!((*table)[3] == Rename(id(101,0),Name(14),2)); assert!((*table)[3] == Rename(id(101,EMPTY_CTXT),Name(14),SyntaxContext(2)));
assert!((*table)[4] == Mark(3,3)); assert!((*table)[4] == Mark(3,SyntaxContext(3)));
} }
assert_eq!(refold_test_sc(4,&t),test_sc); assert_eq!(refold_test_sc(SyntaxContext(4),&t),test_sc);
} }
// extend a syntax context with a sequence of marks given // extend a syntax context with a sequence of marks given
@ -371,11 +373,11 @@ mod tests {
#[test] fn unfold_marks_test() { #[test] fn unfold_marks_test() {
let mut t = new_sctable_internal(); let mut t = new_sctable_internal();
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3); assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),SyntaxContext(3));
{ {
let table = t.table.borrow(); let table = t.table.borrow();
assert!((*table)[2] == Mark(7,0)); assert!((*table)[2] == Mark(7,EMPTY_CTXT));
assert!((*table)[3] == Mark(3,2)); assert!((*table)[3] == Mark(3,SyntaxContext(2)));
} }
} }
@ -396,7 +398,7 @@ mod tests {
assert_eq! (marksof_internal (ans, stopname,&t), [16]);} assert_eq! (marksof_internal (ans, stopname,&t), [16]);}
// rename where stop doesn't match: // rename where stop doesn't match:
{ let chain = vec!(M(9), { let chain = vec!(M(9),
R(id(name1.usize() as u32, R(id(name1.0,
apply_mark_internal (4, EMPTY_CTXT,&mut t)), apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)), Name(100101102)),
M(14)); M(14));
@ -405,7 +407,7 @@ mod tests {
// rename where stop does match // rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t); { let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9), let chain = vec!(M(9),
R(id(name1.usize() as u32, name1sc), R(id(name1.0, name1sc),
stopname), stopname),
M(14)); M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t); let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
@ -474,10 +476,10 @@ mod tests {
#[test] #[test]
fn hashing_tests () { fn hashing_tests () {
let mut t = new_sctable_internal(); let mut t = new_sctable_internal();
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2); assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),SyntaxContext(2));
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),3); assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),SyntaxContext(3));
// using the same one again should result in the same index: // using the same one again should result in the same index:
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2); assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),SyntaxContext(2));
// I'm assuming that the rename table will behave the same.... // I'm assuming that the rename table will behave the same....
} }
@ -496,10 +498,10 @@ mod tests {
#[test] #[test]
fn new_resolves_test() { fn new_resolves_test() {
let renames = vec!((Ident{name:Name(23),ctxt:EMPTY_CTXT},Name(24)), let renames = vec!((Ident::with_empty_ctxt(Name(23)),Name(24)),
(Ident{name:Name(29),ctxt:EMPTY_CTXT},Name(29))); (Ident::with_empty_ctxt(Name(29)),Name(29)));
let new_ctxt1 = apply_renames(&renames,EMPTY_CTXT); let new_ctxt1 = apply_renames(&renames,EMPTY_CTXT);
assert_eq!(resolve(Ident{name:Name(23),ctxt:new_ctxt1}),Name(24)); assert_eq!(resolve(Ident::new(Name(23),new_ctxt1)),Name(24));
assert_eq!(resolve(Ident{name:Name(29),ctxt:new_ctxt1}),Name(29)); assert_eq!(resolve(Ident::new(Name(29),new_ctxt1)),Name(29));
} }
} }

View file

@ -464,7 +464,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
($name: expr, $suffix: expr, $($args: expr),*) => {{ ($name: expr, $suffix: expr, $($args: expr),*) => {{
let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]); let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
let suffix = match $suffix { let suffix = match $suffix {
Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::new(name))), Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))),
None => cx.expr_none(sp) None => cx.expr_none(sp)
}; };
cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix]) cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix])
@ -489,31 +489,32 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
} }
token::Literal(token::Byte(i), suf) => { token::Literal(token::Byte(i), suf) => {
let e_byte = mk_name(cx, sp, i.ident()); let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Byte", suf, e_byte); return mk_lit!("Byte", suf, e_byte);
} }
token::Literal(token::Char(i), suf) => { token::Literal(token::Char(i), suf) => {
let e_char = mk_name(cx, sp, i.ident()); let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Char", suf, e_char); return mk_lit!("Char", suf, e_char);
} }
token::Literal(token::Integer(i), suf) => { token::Literal(token::Integer(i), suf) => {
let e_int = mk_name(cx, sp, i.ident()); let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Integer", suf, e_int); return mk_lit!("Integer", suf, e_int);
} }
token::Literal(token::Float(fident), suf) => { token::Literal(token::Float(fident), suf) => {
let e_fident = mk_name(cx, sp, fident.ident()); let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident));
return mk_lit!("Float", suf, e_fident); return mk_lit!("Float", suf, e_fident);
} }
token::Literal(token::Str_(ident), suf) => { token::Literal(token::Str_(ident), suf) => {
return mk_lit!("Str_", suf, mk_name(cx, sp, ident.ident())) return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))
} }
token::Literal(token::StrRaw(ident, n), suf) => { token::Literal(token::StrRaw(ident, n), suf) => {
return mk_lit!("StrRaw", suf, mk_name(cx, sp, ident.ident()), cx.expr_usize(sp, n)) return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)),
cx.expr_usize(sp, n))
} }
token::Ident(ident, style) => { token::Ident(ident, style) => {
@ -535,7 +536,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::DocComment(ident) => { token::DocComment(ident) => {
return cx.expr_call(sp, return cx.expr_call(sp,
mk_token_path(cx, sp, "DocComment"), mk_token_path(cx, sp, "DocComment"),
vec!(mk_name(cx, sp, ident.ident()))); vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))));
} }
token::MatchNt(name, kind, namep, kindp) => { token::MatchNt(name, kind, namep, kindp) => {

View file

@ -79,7 +79,7 @@ pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeVec::*; use self::TokenTreeOrTokenTreeVec::*;
use ast; use ast;
use ast::{TokenTree, Ident}; use ast::{TokenTree, Name};
use ast::{TtDelimited, TtSequence, TtToken}; use ast::{TtDelimited, TtSequence, TtToken};
use codemap::{BytePos, mk_sp, Span}; use codemap::{BytePos, mk_sp, Span};
use codemap; use codemap;
@ -202,9 +202,9 @@ pub enum NamedMatch {
} }
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> { -> HashMap<Name, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>], fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) { ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize) {
match m { match m {
&TtSequence(_, ref seq) => { &TtSequence(_, ref seq) => {
for next_m in &seq.tts { for next_m in &seq.tts {
@ -217,7 +217,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
} }
} }
&TtToken(sp, MatchNt(bind_name, _, _, _)) => { &TtToken(sp, MatchNt(bind_name, _, _, _)) => {
match ret_val.entry(bind_name) { match ret_val.entry(bind_name.name) {
Vacant(spot) => { Vacant(spot) => {
spot.insert(res[*idx].clone()); spot.insert(res[*idx].clone());
*idx += 1; *idx += 1;
@ -246,7 +246,7 @@ pub enum ParseResult<T> {
Error(codemap::Span, String) Error(codemap::Span, String)
} }
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>; pub type NamedParseResult = ParseResult<HashMap<Name, Rc<NamedMatch>>>;
pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>; pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
/// Perform a token equality check, ignoring syntax context (that is, an /// Perform a token equality check, ignoring syntax context (that is, an

View file

@ -282,7 +282,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
}; };
// Extract the arguments: // Extract the arguments:
let lhses = match **argument_map.get(&lhs_nm).unwrap() { let lhses = match **argument_map.get(&lhs_nm.name).unwrap() {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(), MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(def.span, "wrong-structured lhs") _ => cx.span_bug(def.span, "wrong-structured lhs")
}; };
@ -291,7 +291,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
check_lhs_nt_follows(cx, &**lhs, def.span); check_lhs_nt_follows(cx, &**lhs, def.span);
} }
let rhses = match **argument_map.get(&rhs_nm).unwrap() { let rhses = match **argument_map.get(&rhs_nm.name).unwrap() {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(), MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(def.span, "wrong-structured rhs") _ => cx.span_bug(def.span, "wrong-structured rhs")
}; };
@ -510,14 +510,14 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
"pat" => { "pat" => {
match *tok { match *tok {
FatArrow | Comma | Eq => Ok(true), FatArrow | Comma | Eq => Ok(true),
Ident(i, _) if i.name == "if" || i.name == "in" => Ok(true), Ident(i, _) if i.name.as_str() == "if" || i.name.as_str() == "in" => Ok(true),
_ => Ok(false) _ => Ok(false)
} }
}, },
"path" | "ty" => { "path" | "ty" => {
match *tok { match *tok {
Comma | FatArrow | Colon | Eq | Gt | Semi => Ok(true), Comma | FatArrow | Colon | Eq | Gt | Semi => Ok(true),
Ident(i, _) if i.name == "as" => Ok(true), Ident(i, _) if i.name.as_str() == "as" => Ok(true),
_ => Ok(false) _ => Ok(false)
} }
}, },

View file

@ -10,7 +10,7 @@
use self::LockstepIterSize::*; use self::LockstepIterSize::*;
use ast; use ast;
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident}; use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name};
use codemap::{Span, DUMMY_SP}; use codemap::{Span, DUMMY_SP};
use diagnostic::SpanHandler; use diagnostic::SpanHandler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@ -38,7 +38,7 @@ pub struct TtReader<'a> {
/// the unzipped tree: /// the unzipped tree:
stack: Vec<TtFrame>, stack: Vec<TtFrame>,
/* for MBE-style macro transcription */ /* for MBE-style macro transcription */
interpolations: HashMap<Ident, Rc<NamedMatch>>, interpolations: HashMap<Name, Rc<NamedMatch>>,
imported_from: Option<Ident>, imported_from: Option<Ident>,
// Some => return imported_from as the next token // Some => return imported_from as the next token
@ -56,7 +56,7 @@ pub struct TtReader<'a> {
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None. /// (and should) be None.
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>, interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>, imported_from: Option<Ident>,
src: Vec<ast::TokenTree>) src: Vec<ast::TokenTree>)
-> TtReader<'a> { -> TtReader<'a> {
@ -70,7 +70,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None. /// (and should) be None.
pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>, interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>, imported_from: Option<Ident>,
src: Vec<ast::TokenTree>, src: Vec<ast::TokenTree>,
desugar_doc_comments: bool) desugar_doc_comments: bool)
@ -117,7 +117,7 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<Name
} }
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> { fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
let matched_opt = r.interpolations.get(&name).cloned(); let matched_opt = r.interpolations.get(&name.name).cloned();
matched_opt.map(|s| lookup_cur_matched_by_matched(r, s)) matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
} }

View file

@ -35,7 +35,7 @@ use codemap::{CodeMap, Span};
use diagnostic::SpanHandler; use diagnostic::SpanHandler;
use visit; use visit;
use visit::{FnKind, Visitor}; use visit::{FnKind, Visitor};
use parse::token::{self, InternedString}; use parse::token::InternedString;
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::cmp; use std::cmp;
@ -673,7 +673,7 @@ struct MacroVisitor<'a> {
impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> { impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> {
fn visit_mac(&mut self, mac: &ast::Mac) { fn visit_mac(&mut self, mac: &ast::Mac) {
let path = &mac.node.path; let path = &mac.node.path;
let id = path.segments.last().unwrap().identifier; let name = path.segments.last().unwrap().identifier.name.as_str();
// Issue 22234: If you add a new case here, make sure to also // Issue 22234: If you add a new case here, make sure to also
// add code to catch the macro during or after expansion. // add code to catch the macro during or after expansion.
@ -683,19 +683,19 @@ impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> {
// catch uses of these macros within conditionally-compiled // catch uses of these macros within conditionally-compiled
// code, e.g. `#[cfg]`-guarded functions. // code, e.g. `#[cfg]`-guarded functions.
if id == token::str_to_ident("asm") { if name == "asm" {
self.context.gate_feature("asm", path.span, EXPLAIN_ASM); self.context.gate_feature("asm", path.span, EXPLAIN_ASM);
} }
else if id == token::str_to_ident("log_syntax") { else if name == "log_syntax" {
self.context.gate_feature("log_syntax", path.span, EXPLAIN_LOG_SYNTAX); self.context.gate_feature("log_syntax", path.span, EXPLAIN_LOG_SYNTAX);
} }
else if id == token::str_to_ident("trace_macros") { else if name == "trace_macros" {
self.context.gate_feature("trace_macros", path.span, EXPLAIN_TRACE_MACROS); self.context.gate_feature("trace_macros", path.span, EXPLAIN_TRACE_MACROS);
} }
else if id == token::str_to_ident("concat_idents") { else if name == "concat_idents" {
self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS); self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS);
} }
} }

View file

@ -40,7 +40,7 @@ impl<'a> ParserAttr for Parser<'a> {
token::DocComment(s) => { token::DocComment(s) => {
let attr = ::attr::mk_sugared_doc_attr( let attr = ::attr::mk_sugared_doc_attr(
attr::mk_attr_id(), attr::mk_attr_id(),
self.id_to_interned_str(s.ident()), self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)),
self.span.lo, self.span.lo,
self.span.hi self.span.hi
); );
@ -137,9 +137,8 @@ impl<'a> ParserAttr for Parser<'a> {
token::DocComment(s) => { token::DocComment(s) => {
// we need to get the position of this token before we bump. // we need to get the position of this token before we bump.
let Span { lo, hi, .. } = self.span; let Span { lo, hi, .. } = self.span;
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), let str = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
self.id_to_interned_str(s.ident()), let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
lo, hi);
if attr.node.style == ast::AttrInner { if attr.node.style == ast::AttrInner {
attrs.push(attr); attrs.push(attr);
panictry!(self.bump()); panictry!(self.bump());

View file

@ -744,8 +744,8 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))), Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))),
Some(&ast::TtDelimited(_, ref macro_delimed)), Some(&ast::TtDelimited(_, ref macro_delimed)),
) )
if name_macro_rules.name == "macro_rules" if name_macro_rules.name.as_str() == "macro_rules"
&& name_zip.name == "zip" => { && name_zip.name.as_str() == "zip" => {
let tts = &macro_delimed.tts[..]; let tts = &macro_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
( (
@ -763,7 +763,7 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
) )
if first_delimed.delim == token::Paren if first_delimed.delim == token::Paren
&& ident.name == "a" => {}, && ident.name.as_str() == "a" => {},
_ => panic!("value 3: {:?}", **first_delimed), _ => panic!("value 3: {:?}", **first_delimed),
} }
let tts = &second_delimed.tts[..]; let tts = &second_delimed.tts[..];
@ -774,7 +774,7 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
) )
if second_delimed.delim == token::Paren if second_delimed.delim == token::Paren
&& ident.name == "a" => {}, && ident.name.as_str() == "a" => {},
_ => panic!("value 4: {:?}", **second_delimed), _ => panic!("value 4: {:?}", **second_delimed),
} }
}, },

View file

@ -4658,7 +4658,7 @@ impl<'a> Parser<'a> {
(fields, None) (fields, None)
// Tuple-style struct definition with optional where-clause. // Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(token::Paren) { } else if self.token == token::OpenDelim(token::Paren) {
let fields = try!(self.parse_tuple_struct_body(&class_name, &mut generics)); let fields = try!(self.parse_tuple_struct_body(class_name, &mut generics));
(fields, Some(ast::DUMMY_NODE_ID)) (fields, Some(ast::DUMMY_NODE_ID))
} else { } else {
let token_str = self.this_token_to_string(); let token_str = self.this_token_to_string();
@ -4693,7 +4693,7 @@ impl<'a> Parser<'a> {
} }
pub fn parse_tuple_struct_body(&mut self, pub fn parse_tuple_struct_body(&mut self,
class_name: &ast::Ident, class_name: ast::Ident,
generics: &mut ast::Generics) generics: &mut ast::Generics)
-> PResult<Vec<StructField>> { -> PResult<Vec<StructField>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;` // This is the case where we find `struct Foo<T>(T) where T: Copy;`
@ -5723,10 +5723,10 @@ impl<'a> Parser<'a> {
Option<ast::Name>)>> { Option<ast::Name>)>> {
let ret = match self.token { let ret = match self.token {
token::Literal(token::Str_(s), suf) => { token::Literal(token::Str_(s), suf) => {
(self.id_to_interned_str(s.ident()), ast::CookedStr, suf) (self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)), ast::CookedStr, suf)
} }
token::Literal(token::StrRaw(s, n), suf) => { token::Literal(token::StrRaw(s, n), suf) => {
(self.id_to_interned_str(s.ident()), ast::RawStr(n), suf) (self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)), ast::RawStr(n), suf)
} }
_ => return Ok(None) _ => return Ok(None)
}; };

View file

@ -453,7 +453,7 @@ macro_rules! declare_special_idents_and_keywords {(
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const $si_static: ast::Ident = ast::Ident { pub const $si_static: ast::Ident = ast::Ident {
name: ast::Name($si_name), name: ast::Name($si_name),
ctxt: 0, ctxt: ast::EMPTY_CTXT,
}; };
)* )*
} }
@ -462,7 +462,7 @@ macro_rules! declare_special_idents_and_keywords {(
use ast; use ast;
$( $(
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const $si_static: ast::Name = ast::Name($si_name); pub const $si_static: ast::Name = ast::Name($si_name);
)* )*
} }
@ -729,19 +729,19 @@ pub fn gensym(s: &str) -> ast::Name {
/// Maps a string to an identifier with an empty syntax context. /// Maps a string to an identifier with an empty syntax context.
#[inline] #[inline]
pub fn str_to_ident(s: &str) -> ast::Ident { pub fn str_to_ident(s: &str) -> ast::Ident {
ast::Ident::new(intern(s)) ast::Ident::with_empty_ctxt(intern(s))
} }
/// Maps a string to a gensym'ed identifier. /// Maps a string to a gensym'ed identifier.
#[inline] #[inline]
pub fn gensym_ident(s: &str) -> ast::Ident { pub fn gensym_ident(s: &str) -> ast::Ident {
ast::Ident::new(gensym(s)) ast::Ident::with_empty_ctxt(gensym(s))
} }
// create a fresh name that maps to the same string as the old one. // create a fresh name that maps to the same string as the old one.
// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src))); // note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
// that is, that the new name and the old one are connected to ptr_eq strings. // that is, that the new name and the old one are connected to ptr_eq strings.
pub fn fresh_name(src: &ast::Ident) -> ast::Name { pub fn fresh_name(src: ast::Ident) -> ast::Name {
let interner = get_ident_interner(); let interner = get_ident_interner();
interner.gensym_copy(src.name) interner.gensym_copy(src.name)
// following: debug version. Could work in final except that it's incompatible with // following: debug version. Could work in final except that it's incompatible with
@ -753,7 +753,7 @@ pub fn fresh_name(src: &ast::Ident) -> ast::Name {
// create a fresh mark. // create a fresh mark.
pub fn fresh_mark() -> ast::Mrk { pub fn fresh_mark() -> ast::Mrk {
gensym("mark").usize() as u32 gensym("mark").0
} }
#[cfg(test)] #[cfg(test)]
@ -763,7 +763,7 @@ mod tests {
use ext::mtwt; use ext::mtwt;
fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident { fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident {
ast::Ident { name: id.name, ctxt:mtwt::apply_mark(m, id.ctxt) } ast::Ident::new(id.name, mtwt::apply_mark(m, id.ctxt))
} }
#[test] fn mtwt_token_eq_test() { #[test] fn mtwt_token_eq_test() {

View file

@ -297,7 +297,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::NtBlock(ref e) => block_to_string(&**e), token::NtBlock(ref e) => block_to_string(&**e),
token::NtStmt(ref e) => stmt_to_string(&**e), token::NtStmt(ref e) => stmt_to_string(&**e),
token::NtPat(ref e) => pat_to_string(&**e), token::NtPat(ref e) => pat_to_string(&**e),
token::NtIdent(ref e, _) => ident_to_string(&**e), token::NtIdent(ref e, _) => ident_to_string(**e),
token::NtTT(ref e) => tt_to_string(&**e), token::NtTT(ref e) => tt_to_string(&**e),
token::NtArm(ref e) => arm_to_string(&*e), token::NtArm(ref e) => arm_to_string(&*e),
token::NtImplItem(ref e) => impl_item_to_string(&**e), token::NtImplItem(ref e) => impl_item_to_string(&**e),
@ -376,8 +376,8 @@ pub fn path_to_string(p: &ast::Path) -> String {
to_string(|s| s.print_path(p, false, 0)) to_string(|s| s.print_path(p, false, 0))
} }
pub fn ident_to_string(id: &ast::Ident) -> String { pub fn ident_to_string(id: ast::Ident) -> String {
to_string(|s| s.print_ident(*id)) to_string(|s| s.print_ident(id))
} }
pub fn fun_to_string(decl: &ast::FnDecl, pub fn fun_to_string(decl: &ast::FnDecl,
@ -2857,7 +2857,6 @@ impl<'a> State<'a> {
ast::ViewPathSimple(ident, ref path) => { ast::ViewPathSimple(ident, ref path) => {
try!(self.print_path(path, false, 0)); try!(self.print_path(path, false, 0));
// FIXME(#6993) can't compare identifiers directly here
if path.segments.last().unwrap().identifier.name != if path.segments.last().unwrap().identifier.name !=
ident.name { ident.name {
try!(space(&mut self.s)); try!(space(&mut self.s));

View file

@ -69,7 +69,7 @@ impl<T: Eq + Hash + Clone + 'static> Interner<T> {
pub fn get(&self, idx: Name) -> T { pub fn get(&self, idx: Name) -> T {
let vect = self.vect.borrow(); let vect = self.vect.borrow();
(*vect)[idx.usize()].clone() (*vect)[idx.0 as usize].clone()
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
@ -196,13 +196,13 @@ impl StrInterner {
let new_idx = Name(self.len() as u32); let new_idx = Name(self.len() as u32);
// leave out of map to avoid colliding // leave out of map to avoid colliding
let mut vect = self.vect.borrow_mut(); let mut vect = self.vect.borrow_mut();
let existing = (*vect)[idx.usize()].clone(); let existing = (*vect)[idx.0 as usize].clone();
vect.push(existing); vect.push(existing);
new_idx new_idx
} }
pub fn get(&self, idx: Name) -> RcStr { pub fn get(&self, idx: Name) -> RcStr {
(*self.vect.borrow())[idx.usize()].clone() (*self.vect.borrow())[idx.0 as usize].clone()
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {

View file

@ -35,7 +35,7 @@ impl LintPass for Pass {
impl EarlyLintPass for Pass { impl EarlyLintPass for Pass {
fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) { fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
if it.ident.name == "lintme" { if it.ident.name.as_str() == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"); cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
} }
} }

View file

@ -33,7 +33,7 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
let mac_expr = match TokenTree::parse(cx, &mbe_matcher[..], args) { let mac_expr = match TokenTree::parse(cx, &mbe_matcher[..], args) {
Success(map) => { Success(map) => {
match (&*map[&str_to_ident("matched")], &*map[&str_to_ident("pat")]) { match (&*map[&str_to_ident("matched").name], &*map[&str_to_ident("pat").name]) {
(&MatchedNonterminal(NtExpr(ref matched_expr)), (&MatchedNonterminal(NtExpr(ref matched_expr)),
&MatchedSeq(ref pats, seq_sp)) => { &MatchedSeq(ref pats, seq_sp)) => {
let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt| let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt|