1
Fork 0

Cleanup interfaces of Name, SyntaxContext and Ident

Make sure Name, SyntaxContext and Ident are passed by value
Make sure Idents don't serve as keys (or parts of keys) in maps, Ident comparison is not well defined
This commit is contained in:
Vadim Petrochenkov 2015-09-24 23:05:02 +03:00
parent 40ce80484c
commit f284cbc7af
42 changed files with 197 additions and 256 deletions

View file

@ -35,7 +35,7 @@ use syntax::parse::lexer::TokenAndSpan;
fn parse_token_list(file: &str) -> HashMap<String, token::Token> { fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
fn id() -> token::Token { fn id() -> token::Token {
token::Ident(ast::Ident { name: Name(0), ctxt: 0, }, token::Plain) token::Ident(ast::Ident::with_empty_ctxt(Name(0))), token::Plain)
} }
let mut res = HashMap::new(); let mut res = HashMap::new();
@ -75,7 +75,7 @@ fn parse_token_list(file: &str) -> HashMap<String, token::Token> {
"RPAREN" => token::CloseDelim(token::Paren), "RPAREN" => token::CloseDelim(token::Paren),
"SLASH" => token::BinOp(token::Slash), "SLASH" => token::BinOp(token::Slash),
"COMMA" => token::Comma, "COMMA" => token::Comma,
"LIFETIME" => token::Lifetime(ast::Ident { name: Name(0), ctxt: 0 }), "LIFETIME" => token::Lifetime(ast::Ident::with_empty_ctxt(Name(0))),
"CARET" => token::BinOp(token::Caret), "CARET" => token::BinOp(token::Caret),
"TILDE" => token::Tilde, "TILDE" => token::Tilde,
"IDENT" => id(), "IDENT" => id(),
@ -208,9 +208,9 @@ fn parse_antlr_token(s: &str, tokens: &HashMap<String, token::Token>, surrogate_
token::Literal(token::ByteStr(..), n) => token::Literal(token::ByteStr(nm), n), token::Literal(token::ByteStr(..), n) => token::Literal(token::ByteStr(nm), n),
token::Literal(token::ByteStrRaw(..), n) => token::Literal(token::ByteStrRaw(fix(content), token::Literal(token::ByteStrRaw(..), n) => token::Literal(token::ByteStrRaw(fix(content),
count(content)), n), count(content)), n),
token::Ident(..) => token::Ident(ast::Ident { name: nm, ctxt: 0 }, token::Ident(..) => token::Ident(ast::Ident::with_empty_ctxt(nm)),
token::ModName), token::ModName),
token::Lifetime(..) => token::Lifetime(ast::Ident { name: nm, ctxt: 0 }), token::Lifetime(..) => token::Lifetime(ast::Ident::with_empty_ctxt(nm)),
ref t => t.clone() ref t => t.clone()
}; };

View file

@ -482,7 +482,7 @@ impl<'a> CrateReader<'a> {
let span = mk_sp(lo, p.last_span.hi); let span = mk_sp(lo, p.last_span.hi);
p.abort_if_errors(); p.abort_if_errors();
macros.push(ast::MacroDef { macros.push(ast::MacroDef {
ident: name.ident(), ident: ast::Ident::with_empty_ctxt(name),
attrs: attrs, attrs: attrs,
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
span: span, span: span,

View file

@ -85,7 +85,7 @@ fn entry_point_type(item: &Item, depth: usize) -> EntryPointType {
EntryPointType::Start EntryPointType::Start
} else if attr::contains_name(&item.attrs, "main") { } else if attr::contains_name(&item.attrs, "main") {
EntryPointType::MainAttr EntryPointType::MainAttr
} else if item.name == "main" { } else if item.name.as_str() == "main" {
if depth == 1 { if depth == 1 {
// This is a top-level function so can be 'main' // This is a top-level function so can be 'main'
EntryPointType::MainNamed EntryPointType::MainNamed

View file

@ -55,7 +55,7 @@ impl<'a, 'tcx> IntrinsicCheckingVisitor<'a, 'tcx> {
ty::TyBareFn(_, ref bfty) => bfty.abi == RustIntrinsic, ty::TyBareFn(_, ref bfty) => bfty.abi == RustIntrinsic,
_ => return false _ => return false
}; };
intrinsic && self.tcx.item_name(def_id) == "transmute" intrinsic && self.tcx.item_name(def_id).as_str() == "transmute"
} }
fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>, id: ast::NodeId) { fn check_transmute(&self, span: Span, from: Ty<'tcx>, to: Ty<'tcx>, id: ast::NodeId) {

View file

@ -211,7 +211,7 @@ pub fn def_to_path(tcx: &ty::ctxt, id: DefId) -> hir::Path {
tcx.with_path(id, |path| hir::Path { tcx.with_path(id, |path| hir::Path {
global: false, global: false,
segments: path.last().map(|elem| hir::PathSegment { segments: path.last().map(|elem| hir::PathSegment {
identifier: ast::Ident::new(elem.name()), identifier: ast::Ident::with_empty_ctxt(elem.name()),
parameters: hir::PathParameters::none(), parameters: hir::PathParameters::none(),
}).into_iter().collect(), }).into_iter().collect(),
span: DUMMY_SP, span: DUMMY_SP,

View file

@ -336,7 +336,7 @@ impl<'a, 'v, 'tcx> Visitor<'v> for Checker<'a, 'tcx> {
// When compiling with --test we don't enforce stability on the // When compiling with --test we don't enforce stability on the
// compiler-generated test module, demarcated with `DUMMY_SP` plus the // compiler-generated test module, demarcated with `DUMMY_SP` plus the
// name `__test` // name `__test`
if item.span == DUMMY_SP && item.name == "__test" { return } if item.span == DUMMY_SP && item.name.as_str() == "__test" { return }
check_item(self.tcx, item, true, check_item(self.tcx, item, true,
&mut |id, sp, stab| self.check(id, sp, stab)); &mut |id, sp, stab| self.check(id, sp, stab));

View file

@ -15,7 +15,6 @@ use rustc::middle::ty;
use std::cell::RefCell; use std::cell::RefCell;
use syntax::ast; use syntax::ast;
use syntax::codemap; use syntax::codemap;
use rustc_front::print::pprust;
use rustc_front::hir; use rustc_front::hir;
pub struct MoveErrorCollector<'tcx> { pub struct MoveErrorCollector<'tcx> {
@ -159,7 +158,6 @@ fn note_move_destination(bccx: &BorrowckCtxt,
move_to_span: codemap::Span, move_to_span: codemap::Span,
pat_name: ast::Name, pat_name: ast::Name,
is_first_note: bool) { is_first_note: bool) {
let pat_name = pprust::name_to_string(pat_name);
if is_first_note { if is_first_note {
bccx.span_note( bccx.span_note(
move_to_span, move_to_span,

View file

@ -380,7 +380,7 @@ impl<'ast> pprust::PpAnn for HygieneAnnotation<'ast> {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));
// FIXME #16420: this doesn't display the connections // FIXME #16420: this doesn't display the connections
// between syntax contexts // between syntax contexts
s.synth_comment(format!("{}#{}", nm, ctxt)) s.synth_comment(format!("{}#{}", nm, ctxt.0))
} }
pprust::NodeName(&ast::Name(nm)) => { pprust::NodeName(&ast::Name(nm)) => {
try!(pp::space(&mut s.s)); try!(pp::space(&mut s.s));

View file

@ -631,7 +631,6 @@ pub enum Expr_ {
/// ///
/// `if expr { block } else { expr }` /// `if expr { block } else { expr }`
ExprIf(P<Expr>, P<Block>, Option<P<Expr>>), ExprIf(P<Expr>, P<Block>, Option<P<Expr>>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while loop, with an optional label /// A while loop, with an optional label
/// ///
/// `'label: while expr { block }` /// `'label: while expr { block }`
@ -639,7 +638,6 @@ pub enum Expr_ {
/// Conditionless loop (can be exited with break, continue, or return) /// Conditionless loop (can be exited with break, continue, or return)
/// ///
/// `'label: loop { block }` /// `'label: loop { block }`
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
ExprLoop(P<Block>, Option<Ident>), ExprLoop(P<Block>, Option<Ident>),
/// A `match` block, with a source that indicates whether or not it is /// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind. /// the result of a desugaring, and if so, which kind.

View file

@ -2171,7 +2171,6 @@ impl<'a> State<'a> {
hir::ViewPathSimple(name, ref path) => { hir::ViewPathSimple(name, ref path) => {
try!(self.print_path(path, false, 0)); try!(self.print_path(path, false, 0));
// FIXME(#6993) can't compare identifiers directly here
if path.segments.last().unwrap().identifier.name != name { if path.segments.last().unwrap().identifier.name != name {
try!(space(&mut self.s)); try!(space(&mut self.s));
try!(self.word_space("as")); try!(self.word_space("as"));

View file

@ -50,7 +50,7 @@ impl UnusedMut {
let name = path1.node; let name = path1.node;
if let hir::BindByValue(hir::MutMutable) = mode { if let hir::BindByValue(hir::MutMutable) = mode {
if !name.as_str().starts_with("_") { if !name.as_str().starts_with("_") {
match mutables.entry(name.usize()) { match mutables.entry(name.0 as usize) {
Vacant(entry) => { entry.insert(vec![id]); }, Vacant(entry) => { entry.insert(vec![id]); },
Occupied(mut entry) => { entry.get_mut().push(id); }, Occupied(mut entry) => { entry.get_mut().push(id); },
} }

View file

@ -3710,7 +3710,7 @@ impl<'a, 'tcx> Resolver<'a, 'tcx> {
false // Stop advancing false // Stop advancing
}); });
if method_scope && special_names::self_ == path_name { if method_scope && special_names::self_.as_str() == &path_name[..] {
resolve_error( resolve_error(
self, self,
expr.span, expr.span,

View file

@ -537,7 +537,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
fn get_binding(this: &mut Resolver, fn get_binding(this: &mut Resolver,
import_resolution: &ImportResolution, import_resolution: &ImportResolution,
namespace: Namespace, namespace: Namespace,
source: &Name) source: Name)
-> NamespaceResult { -> NamespaceResult {
// Import resolutions must be declared with "pub" // Import resolutions must be declared with "pub"
@ -560,7 +560,7 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
let id = import_resolution.id(namespace); let id = import_resolution.id(namespace);
// track used imports and extern crates as well // track used imports and extern crates as well
this.used_imports.insert((id, namespace)); this.used_imports.insert((id, namespace));
this.record_import_use(id, *source); this.record_import_use(id, source);
match target_module.def_id.get() { match target_module.def_id.get() {
Some(DefId{krate: kid, ..}) => { Some(DefId{krate: kid, ..}) => {
this.used_crates.insert(kid); this.used_crates.insert(kid);
@ -578,14 +578,14 @@ impl<'a, 'b:'a, 'tcx:'b> ImportResolver<'a, 'b, 'tcx> {
value_result = get_binding(self.resolver, value_result = get_binding(self.resolver,
import_resolution, import_resolution,
ValueNS, ValueNS,
&source); source);
value_used_reexport = import_resolution.is_public; value_used_reexport = import_resolution.is_public;
} }
if type_result.is_unknown() { if type_result.is_unknown() {
type_result = get_binding(self.resolver, type_result = get_binding(self.resolver,
import_resolution, import_resolution,
TypeNS, TypeNS,
&source); source);
type_used_reexport = import_resolution.is_public; type_used_reexport = import_resolution.is_public;
} }

View file

@ -444,7 +444,7 @@ impl <'l, 'tcx> DumpCsvVisitor<'l, 'tcx> {
fn process_const(&mut self, fn process_const(&mut self,
id: ast::NodeId, id: ast::NodeId,
name: &ast::Name, name: ast::Name,
span: Span, span: Span,
typ: &ast::Ty, typ: &ast::Ty,
expr: &ast::Expr) { expr: &ast::Expr) {
@ -988,7 +988,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DumpCsvVisitor<'l, 'tcx> {
fn visit_trait_item(&mut self, trait_item: &ast::TraitItem) { fn visit_trait_item(&mut self, trait_item: &ast::TraitItem) {
match trait_item.node { match trait_item.node {
ast::ConstTraitItem(ref ty, Some(ref expr)) => { ast::ConstTraitItem(ref ty, Some(ref expr)) => {
self.process_const(trait_item.id, &trait_item.ident.name, self.process_const(trait_item.id, trait_item.ident.name,
trait_item.span, &*ty, &*expr); trait_item.span, &*ty, &*expr);
} }
ast::MethodTraitItem(ref sig, ref body) => { ast::MethodTraitItem(ref sig, ref body) => {
@ -1006,7 +1006,7 @@ impl<'l, 'tcx, 'v> Visitor<'v> for DumpCsvVisitor<'l, 'tcx> {
fn visit_impl_item(&mut self, impl_item: &ast::ImplItem) { fn visit_impl_item(&mut self, impl_item: &ast::ImplItem) {
match impl_item.node { match impl_item.node {
ast::ConstImplItem(ref ty, ref expr) => { ast::ConstImplItem(ref ty, ref expr) => {
self.process_const(impl_item.id, &impl_item.ident.name, self.process_const(impl_item.id, impl_item.ident.name,
impl_item.span, &ty, &expr); impl_item.span, &ty, &expr);
} }
ast::MethodImplItem(ref sig, ref body) => { ast::MethodImplItem(ref sig, ref body) => {

View file

@ -168,7 +168,7 @@ pub fn return_type_is_void(ccx: &CrateContext, ty: Ty) -> bool {
/// Generates a unique symbol based off the name given. This is used to create /// Generates a unique symbol based off the name given. This is used to create
/// unique symbols for things like closures. /// unique symbols for things like closures.
pub fn gensym_name(name: &str) -> PathElem { pub fn gensym_name(name: &str) -> PathElem {
let num = token::gensym(name).usize(); let num = token::gensym(name).0;
// use one colon which will get translated to a period by the mangler, and // use one colon which will get translated to a period by the mangler, and
// we're guaranteed that `num` is globally unique for this crate. // we're guaranteed that `num` is globally unique for this crate.
PathName(token::gensym(&format!("{}:{}", name, num))) PathName(token::gensym(&format!("{}:{}", name, num)))
@ -829,7 +829,7 @@ pub fn C_cstr(cx: &CrateContext, s: InternedString, null_terminated: bool) -> Va
!null_terminated as Bool); !null_terminated as Bool);
let gsym = token::gensym("str"); let gsym = token::gensym("str");
let sym = format!("str{}", gsym.usize()); let sym = format!("str{}", gsym.0);
let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{ let g = declare::define_global(cx, &sym[..], val_ty(sc)).unwrap_or_else(||{
cx.sess().bug(&format!("symbol `{}` is already defined", sym)); cx.sess().bug(&format!("symbol `{}` is already defined", sym));
}); });

View file

@ -116,7 +116,7 @@ fn addr_of_mut(ccx: &CrateContext,
// FIXME: this totally needs a better name generation scheme, perhaps a simple global // FIXME: this totally needs a better name generation scheme, perhaps a simple global
// counter? Also most other uses of gensym in trans. // counter? Also most other uses of gensym in trans.
let gsym = token::gensym("_"); let gsym = token::gensym("_");
let name = format!("{}{}", kind, gsym.usize()); let name = format!("{}{}", kind, gsym.0);
let gv = declare::define_global(ccx, &name[..], val_ty(cv)).unwrap_or_else(||{ let gv = declare::define_global(ccx, &name[..], val_ty(cv)).unwrap_or_else(||{
ccx.sess().bug(&format!("symbol `{}` is already defined", name)); ccx.sess().bug(&format!("symbol `{}` is already defined", name));
}); });

View file

@ -838,7 +838,7 @@ fn check_trait_on_unimplemented<'a, 'tcx>(ccx: &CrateCtxt<'a, 'tcx>,
Position::ArgumentNamed(s) if s == "Self" => (), Position::ArgumentNamed(s) if s == "Self" => (),
// So is `{A}` if A is a type parameter // So is `{A}` if A is a type parameter
Position::ArgumentNamed(s) => match types.iter().find(|t| { Position::ArgumentNamed(s) => match types.iter().find(|t| {
t.name == s t.name.as_str() == s
}) { }) {
Some(_) => (), Some(_) => (),
None => { None => {

View file

@ -67,40 +67,38 @@ use std::fmt;
use std::rc::Rc; use std::rc::Rc;
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
// FIXME #6993: in librustc, uses of "ident" should be replaced /// A name is a part of an identifier, representing a string or gensym. It's
// by just "Name". /// the result of interning.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Name(pub u32);
/// A SyntaxContext represents a chain of macro-expandings
/// and renamings. Each macro expansion corresponds to
/// a fresh u32. This u32 is a reference to a table stored
// in thread-local storage.
// The special value EMPTY_CTXT is used to indicate an empty
// syntax context.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct SyntaxContext(pub u32);
/// An identifier contains a Name (index into the interner /// An identifier contains a Name (index into the interner
/// table) and a SyntaxContext to track renaming and /// table) and a SyntaxContext to track renaming and
/// macro expansion per Flatt et al., "Macros /// macro expansion per Flatt et al., "Macros That Work Together"
/// That Work Together" #[derive(Clone, Copy, Eq, Hash)]
#[derive(Clone, Copy, Hash, PartialOrd, Eq, Ord)]
pub struct Ident { pub struct Ident {
pub name: Name, pub name: Name,
pub ctxt: SyntaxContext pub ctxt: SyntaxContext
} }
impl Ident { impl Name {
/// Construct an identifier with the given name and an empty context: pub fn as_str(self) -> token::InternedString {
pub fn new(name: Name) -> Ident { Ident {name: name, ctxt: EMPTY_CTXT}} token::InternedString::new_from_name(self)
}
impl fmt::Debug for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}#{}", self.name, self.ctxt)
}
}
impl fmt::Display for Ident {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&self.name, f)
} }
} }
impl fmt::Debug for Name { impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Name(nm) = *self; write!(f, "{}({})", self, self.0)
write!(f, "{}({})", self, nm)
} }
} }
@ -110,6 +108,29 @@ impl fmt::Display for Name {
} }
} }
impl Encodable for Name {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Name {
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
Ok(token::intern(&try!(d.read_str())[..]))
}
}
pub const EMPTY_CTXT : SyntaxContext = SyntaxContext(0);
impl Ident {
pub fn new(name: Name, ctxt: SyntaxContext) -> Ident {
Ident {name: name, ctxt: ctxt}
}
pub fn with_empty_ctxt(name: Name) -> Ident {
Ident {name: name, ctxt: EMPTY_CTXT}
}
}
impl PartialEq for Ident { impl PartialEq for Ident {
fn eq(&self, other: &Ident) -> bool { fn eq(&self, other: &Ident) -> bool {
if self.ctxt == other.ctxt { if self.ctxt == other.ctxt {
@ -119,74 +140,27 @@ impl PartialEq for Ident {
// idents that have different contexts. You can't fix this without // idents that have different contexts. You can't fix this without
// knowing whether the comparison should be hygienic or non-hygienic. // knowing whether the comparison should be hygienic or non-hygienic.
// if it should be non-hygienic (most things are), just compare the // if it should be non-hygienic (most things are), just compare the
// 'name' fields of the idents. Or, even better, replace the idents // 'name' fields of the idents.
// with Name's.
// //
// On the other hand, if the comparison does need to be hygienic, // On the other hand, if the comparison does need to be hygienic,
// one example and its non-hygienic counterpart would be: // one example and its non-hygienic counterpart would be:
// syntax::parse::token::Token::mtwt_eq // syntax::parse::token::Token::mtwt_eq
// syntax::ext::tt::macro_parser::token_name_eq // syntax::ext::tt::macro_parser::token_name_eq
panic!("not allowed to compare these idents: {:?}, {:?}. \ panic!("idents with different contexts are compared with operator `==`: \
Probably related to issue \\#6993", self, other); {:?}, {:?}.", self, other);
} }
} }
} }
/// A SyntaxContext represents a chain of macro-expandings impl fmt::Debug for Ident {
/// and renamings. Each macro expansion corresponds to fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
/// a fresh u32 write!(f, "{}#{}", self.name, self.ctxt.0)
// I'm representing this syntax context as an index into
// a table, in order to work around a compiler bug
// that's causing unreleased memory to cause core dumps
// and also perhaps to save some work in destructor checks.
// the special uint '0' will be used to indicate an empty
// syntax context.
// this uint is a reference to a table stored in thread-local
// storage.
pub type SyntaxContext = u32;
pub const EMPTY_CTXT : SyntaxContext = 0;
pub const ILLEGAL_CTXT : SyntaxContext = 1;
/// A name is a part of an identifier, representing a string or gensym. It's
/// the result of interning.
#[derive(Eq, Ord, PartialEq, PartialOrd, Hash, Clone, Copy)]
pub struct Name(pub u32);
impl<T: AsRef<str>> PartialEq<T> for Name {
fn eq(&self, other: &T) -> bool {
self.as_str() == other.as_ref()
} }
} }
impl Name { impl fmt::Display for Ident {
pub fn as_str(&self) -> token::InternedString { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
token::InternedString::new_from_name(*self) fmt::Display::fmt(&self.name, f)
}
pub fn usize(&self) -> usize {
let Name(nm) = *self;
nm as usize
}
pub fn ident(&self) -> Ident {
Ident { name: *self, ctxt: 0 }
}
}
/// A mark represents a unique id associated with a macro expansion
pub type Mrk = u32;
impl Encodable for Name {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(&self.as_str())
}
}
impl Decodable for Name {
fn decode<D: Decoder>(d: &mut D) -> Result<Name, D::Error> {
Ok(token::intern(&try!(d.read_str())[..]))
} }
} }
@ -202,8 +176,8 @@ impl Decodable for Ident {
} }
} }
/// Function name (not all functions have names) /// A mark represents a unique id associated with a macro expansion
pub type FnIdent = Option<Ident>; pub type Mrk = u32;
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Lifetime { pub struct Lifetime {
@ -841,19 +815,16 @@ pub enum Expr_ {
/// ///
/// This is desugared to a `match` expression. /// This is desugared to a `match` expression.
ExprIfLet(P<Pat>, P<Expr>, P<Block>, Option<P<Expr>>), ExprIfLet(P<Pat>, P<Expr>, P<Block>, Option<P<Expr>>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while loop, with an optional label /// A while loop, with an optional label
/// ///
/// `'label: while expr { block }` /// `'label: while expr { block }`
ExprWhile(P<Expr>, P<Block>, Option<Ident>), ExprWhile(P<Expr>, P<Block>, Option<Ident>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A while-let loop, with an optional label /// A while-let loop, with an optional label
/// ///
/// `'label: while let pat = expr { block }` /// `'label: while let pat = expr { block }`
/// ///
/// This is desugared to a combination of `loop` and `match` expressions. /// This is desugared to a combination of `loop` and `match` expressions.
ExprWhileLet(P<Pat>, P<Expr>, P<Block>, Option<Ident>), ExprWhileLet(P<Pat>, P<Expr>, P<Block>, Option<Ident>),
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
/// A for loop, with an optional label /// A for loop, with an optional label
/// ///
/// `'label: for pat in expr { block }` /// `'label: for pat in expr { block }`
@ -863,7 +834,6 @@ pub enum Expr_ {
/// Conditionless loop (can be exited with break, continue, or return) /// Conditionless loop (can be exited with break, continue, or return)
/// ///
/// `'label: loop { block }` /// `'label: loop { block }`
// FIXME #6993: change to Option<Name> ... or not, if these are hygienic.
ExprLoop(P<Block>, Option<Ident>), ExprLoop(P<Block>, Option<Ident>),
/// A `match` block, with a source that indicates whether or not it is /// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind. /// the result of a desugaring, and if so, which kind.
@ -1223,13 +1193,6 @@ pub struct MutTy {
pub mutbl: Mutability, pub mutbl: Mutability,
} }
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeField {
pub ident: Ident,
pub mt: MutTy,
pub span: Span,
}
/// Represents a method's signature in a trait declaration, /// Represents a method's signature in a trait declaration,
/// or in an implementation. /// or in an implementation.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)] #[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]

View file

@ -577,21 +577,21 @@ mod tests {
use ast::*; use ast::*;
use super::*; use super::*;
fn ident_to_segment(id : &Ident) -> PathSegment { fn ident_to_segment(id: Ident) -> PathSegment {
PathSegment {identifier: id.clone(), PathSegment {identifier: id,
parameters: PathParameters::none()} parameters: PathParameters::none()}
} }
#[test] fn idents_name_eq_test() { #[test] fn idents_name_eq_test() {
assert!(segments_name_eq( assert!(segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] &[Ident::new(Name(3),SyntaxContext(4)), Ident::new(Name(78),SyntaxContext(82))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(), .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(78),ctxt:182}] &[Ident::new(Name(3),SyntaxContext(104)), Ident::new(Name(78),SyntaxContext(182))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>())); .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>()));
assert!(!segments_name_eq( assert!(!segments_name_eq(
&[Ident{name:Name(3),ctxt:4}, Ident{name:Name(78),ctxt:82}] &[Ident::new(Name(3),SyntaxContext(4)), Ident::new(Name(78),SyntaxContext(82))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>(), .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>(),
&[Ident{name:Name(3),ctxt:104}, Ident{name:Name(77),ctxt:182}] &[Ident::new(Name(3),SyntaxContext(104)), Ident::new(Name(77),SyntaxContext(182))]
.iter().map(ident_to_segment).collect::<Vec<PathSegment>>())); .iter().cloned().map(ident_to_segment).collect::<Vec<PathSegment>>()));
} }
} }

View file

@ -1083,7 +1083,6 @@ pub struct MalformedCodemapPositions {
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::rc::Rc;
#[test] #[test]
fn t1 () { fn t1 () {

View file

@ -842,7 +842,7 @@ pub fn expect<T, M>(diag: &SpanHandler, opt: Option<T>, msg: M) -> T where
#[cfg(test)] #[cfg(test)]
mod test { mod test {
use super::{EmitterWriter, Level}; use super::{EmitterWriter, Level};
use codemap::{mk_sp, CodeMap, BytePos}; use codemap::{mk_sp, CodeMap};
use std::sync::{Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::io::{self, Write}; use std::io::{self, Write};
use std::str::from_utf8; use std::str::from_utf8;

View file

@ -138,7 +138,7 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
)); ));
} }
}); });
let sym = Ident::new(token::gensym(&format!( let sym = Ident::with_empty_ctxt(token::gensym(&format!(
"__register_diagnostic_{}", code "__register_diagnostic_{}", code
))); )));
MacEager::items(SmallVector::many(vec![ MacEager::items(SmallVector::many(vec![

View file

@ -28,7 +28,7 @@ pub fn entry_point_type(item: &Item, depth: usize) -> EntryPointType {
EntryPointType::Start EntryPointType::Start
} else if attr::contains_name(&item.attrs, "main") { } else if attr::contains_name(&item.attrs, "main") {
EntryPointType::MainAttr EntryPointType::MainAttr
} else if item.ident.name == "main" { } else if item.ident.name.as_str() == "main" {
if depth == 1 { if depth == 1 {
// This is a top-level function so can be 'main' // This is a top-level function so can be 'main'
EntryPointType::MainNamed EntryPointType::MainNamed

View file

@ -646,7 +646,7 @@ impl<'a> ExtCtxt<'a> {
loop { loop {
if self.codemap().with_expn_info(expn_id, |info| { if self.codemap().with_expn_info(expn_id, |info| {
info.map_or(None, |i| { info.map_or(None, |i| {
if i.callee.name() == "include" { if i.callee.name().as_str() == "include" {
// Stop going up the backtrace once include! is encountered // Stop going up the backtrace once include! is encountered
return None; return None;
} }
@ -899,9 +899,9 @@ impl SyntaxEnv {
unreachable!() unreachable!()
} }
pub fn find(&self, k: &Name) -> Option<Rc<SyntaxExtension>> { pub fn find(&self, k: Name) -> Option<Rc<SyntaxExtension>> {
for frame in self.chain.iter().rev() { for frame in self.chain.iter().rev() {
match frame.map.get(k) { match frame.map.get(&k) {
Some(v) => return Some(v.clone()), Some(v) => return Some(v.clone()),
None => {} None => {}
} }

View file

@ -73,7 +73,6 @@ pub trait AstBuilder {
fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ; fn ty_vars(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ; fn ty_vars_global(&self, ty_params: &OwnedSlice<ast::TyParam>) -> Vec<P<ast::Ty>> ;
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField;
fn typaram(&self, fn typaram(&self,
span: Span, span: Span,
@ -443,14 +442,6 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
Vec::new())) Vec::new()))
} }
fn ty_field_imm(&self, span: Span, name: Ident, ty: P<ast::Ty>) -> ast::TypeField {
ast::TypeField {
ident: name,
mt: ast::MutTy { ty: ty, mutbl: ast::MutImmutable },
span: span,
}
}
fn ty_infer(&self, span: Span) -> P<ast::Ty> { fn ty_infer(&self, span: Span) -> P<ast::Ty> {
self.ty(span, ast::TyInfer) self.ty(span, ast::TyInfer)
} }

View file

@ -524,7 +524,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac,
return None; return None;
} }
let extname = pth.segments[0].identifier.name; let extname = pth.segments[0].identifier.name;
match fld.cx.syntax_env.find(&extname) { match fld.cx.syntax_env.find(extname) {
None => { None => {
fld.cx.span_err( fld.cx.span_err(
pth.span, pth.span,
@ -593,7 +593,7 @@ fn expand_loop_block(loop_block: P<Block>,
fld: &mut MacroExpander) -> (P<Block>, Option<Ident>) { fld: &mut MacroExpander) -> (P<Block>, Option<Ident>) {
match opt_ident { match opt_ident {
Some(label) => { Some(label) => {
let new_label = fresh_name(&label); let new_label = fresh_name(label);
let rename = (label, new_label); let rename = (label, new_label);
// The rename *must not* be added to the pending list of current // The rename *must not* be added to the pending list of current
@ -689,7 +689,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
let fm = fresh_mark(); let fm = fresh_mark();
let items = { let items = {
let expanded = match fld.cx.syntax_env.find(&extname) { let expanded = match fld.cx.syntax_env.find(extname) {
None => { None => {
fld.cx.span_err(path_span, fld.cx.span_err(path_span,
&format!("macro undefined: '{}!'", &format!("macro undefined: '{}!'",
@ -892,7 +892,7 @@ fn expand_non_macro_stmt(Spanned {node, span: stmt_span}: Stmt, fld: &mut MacroE
// generate fresh names, push them to a new pending list // generate fresh names, push them to a new pending list
let idents = pattern_bindings(&*expanded_pat); let idents = pattern_bindings(&*expanded_pat);
let mut new_pending_renames = let mut new_pending_renames =
idents.iter().map(|ident| (*ident, fresh_name(ident))).collect(); idents.iter().map(|ident| (*ident, fresh_name(*ident))).collect();
// rewrite the pattern using the new names (the old // rewrite the pattern using the new names (the old
// ones have already been applied): // ones have already been applied):
let rewritten_pat = { let rewritten_pat = {
@ -951,7 +951,7 @@ fn expand_arm(arm: ast::Arm, fld: &mut MacroExpander) -> ast::Arm {
// all of the pats must have the same set of bindings, so use the // all of the pats must have the same set of bindings, so use the
// first one to extract them and generate new names: // first one to extract them and generate new names:
let idents = pattern_bindings(&*expanded_pats[0]); let idents = pattern_bindings(&*expanded_pats[0]);
let new_renames = idents.into_iter().map(|id| (id, fresh_name(&id))).collect(); let new_renames = idents.into_iter().map(|id| (id, fresh_name(id))).collect();
// apply the renaming, but only to the PatIdents: // apply the renaming, but only to the PatIdents:
let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames}; let mut rename_pats_fld = PatIdentRenamer{renames:&new_renames};
let rewritten_pats = expanded_pats.move_map(|pat| rename_pats_fld.fold_pat(pat)); let rewritten_pats = expanded_pats.move_map(|pat| rename_pats_fld.fold_pat(pat));
@ -1061,7 +1061,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
return DummyResult::raw_pat(span); return DummyResult::raw_pat(span);
} }
let extname = pth.segments[0].identifier.name; let extname = pth.segments[0].identifier.name;
let marked_after = match fld.cx.syntax_env.find(&extname) { let marked_after = match fld.cx.syntax_env.find(extname) {
None => { None => {
fld.cx.span_err(pth.span, fld.cx.span_err(pth.span,
&format!("macro undefined: '{}!'", &format!("macro undefined: '{}!'",
@ -1134,10 +1134,7 @@ pub struct IdentRenamer<'a> {
impl<'a> Folder for IdentRenamer<'a> { impl<'a> Folder for IdentRenamer<'a> {
fn fold_ident(&mut self, id: Ident) -> Ident { fn fold_ident(&mut self, id: Ident) -> Ident {
Ident { Ident::new(id.name, mtwt::apply_renames(self.renames, id.ctxt))
name: id.name,
ctxt: mtwt::apply_renames(self.renames, id.ctxt),
}
} }
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self) fold::noop_fold_mac(mac, self)
@ -1161,8 +1158,8 @@ impl<'a> Folder for PatIdentRenamer<'a> {
pat.map(|ast::Pat {id, node, span}| match node { pat.map(|ast::Pat {id, node, span}| match node {
ast::PatIdent(binding_mode, Spanned{span: sp, node: ident}, sub) => { ast::PatIdent(binding_mode, Spanned{span: sp, node: ident}, sub) => {
let new_ident = Ident{name: ident.name, let new_ident = Ident::new(ident.name,
ctxt: mtwt::apply_renames(self.renames, ident.ctxt)}; mtwt::apply_renames(self.renames, ident.ctxt));
let new_node = let new_node =
ast::PatIdent(binding_mode, ast::PatIdent(binding_mode,
Spanned{span: self.new_span(sp), node: new_ident}, Spanned{span: self.new_span(sp), node: new_ident},
@ -1254,7 +1251,7 @@ macro_rules! partition {
fld: &MacroExpander) fld: &MacroExpander)
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) { -> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
attrs.iter().cloned().partition(|attr| { attrs.iter().cloned().partition(|attr| {
match fld.cx.syntax_env.find(&intern(&attr.name())) { match fld.cx.syntax_env.find(intern(&attr.name())) {
Some(rc) => match *rc { Some(rc) => match *rc {
$variant(..) => true, $variant(..) => true,
_ => false _ => false
@ -1276,7 +1273,7 @@ fn expand_decorators(a: Annotatable,
{ {
for attr in a.attrs() { for attr in a.attrs() {
let mname = intern(&attr.name()); let mname = intern(&attr.name());
match fld.cx.syntax_env.find(&mname) { match fld.cx.syntax_env.find(mname) {
Some(rc) => match *rc { Some(rc) => match *rc {
MultiDecorator(ref dec) => { MultiDecorator(ref dec) => {
attr::mark_used(&attr); attr::mark_used(&attr);
@ -1327,7 +1324,7 @@ fn expand_item_multi_modifier(mut it: Annotatable,
for attr in &modifiers { for attr in &modifiers {
let mname = intern(&attr.name()); let mname = intern(&attr.name());
match fld.cx.syntax_env.find(&mname) { match fld.cx.syntax_env.find(mname) {
Some(rc) => match *rc { Some(rc) => match *rc {
MultiModifier(ref mac) => { MultiModifier(ref mac) => {
attr::mark_used(attr); attr::mark_used(attr);
@ -1407,7 +1404,7 @@ fn expand_and_rename_fn_decl_and_block(fn_decl: P<ast::FnDecl>, block: P<ast::Bl
let expanded_decl = fld.fold_fn_decl(fn_decl); let expanded_decl = fld.fold_fn_decl(fn_decl);
let idents = fn_decl_arg_bindings(&*expanded_decl); let idents = fn_decl_arg_bindings(&*expanded_decl);
let renames = let renames =
idents.iter().map(|id : &ast::Ident| (*id,fresh_name(id))).collect(); idents.iter().map(|id| (*id,fresh_name(*id))).collect();
// first, a renamer for the PatIdents, for the fn_decl: // first, a renamer for the PatIdents, for the fn_decl:
let mut rename_pat_fld = PatIdentRenamer{renames: &renames}; let mut rename_pat_fld = PatIdentRenamer{renames: &renames};
let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(expanded_decl); let rewritten_fn_decl = rename_pat_fld.fold_fn_decl(expanded_decl);
@ -1628,10 +1625,7 @@ struct Marker { mark: Mrk }
impl Folder for Marker { impl Folder for Marker {
fn fold_ident(&mut self, id: Ident) -> Ident { fn fold_ident(&mut self, id: Ident) -> Ident {
ast::Ident { ast::Ident::new(id.name, mtwt::apply_mark(self.mark, id.ctxt))
name: id.name,
ctxt: mtwt::apply_mark(self.mark, id.ctxt)
}
} }
fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac { fn fold_mac(&mut self, Spanned {node, span}: ast::Mac) -> ast::Mac {
Spanned { Spanned {
@ -2104,7 +2098,7 @@ foo_module!();
// find the xx binding // find the xx binding
let bindings = crate_bindings(&cr); let bindings = crate_bindings(&cr);
let cxbinds: Vec<&ast::Ident> = let cxbinds: Vec<&ast::Ident> =
bindings.iter().filter(|b| b.name == "xx").collect(); bindings.iter().filter(|b| b.name.as_str() == "xx").collect();
let cxbinds: &[&ast::Ident] = &cxbinds[..]; let cxbinds: &[&ast::Ident] = &cxbinds[..];
let cxbind = match (cxbinds.len(), cxbinds.get(0)) { let cxbind = match (cxbinds.len(), cxbinds.get(0)) {
(1, Some(b)) => *b, (1, Some(b)) => *b,
@ -2116,7 +2110,7 @@ foo_module!();
// the xx binding should bind all of the xx varrefs: // the xx binding should bind all of the xx varrefs:
for (idx,v) in varrefs.iter().filter(|p| { for (idx,v) in varrefs.iter().filter(|p| {
p.segments.len() == 1 p.segments.len() == 1
&& p.segments[0].identifier.name == "xx" && p.segments[0].identifier.name.as_str() == "xx"
}).enumerate() { }).enumerate() {
if mtwt::resolve(v.segments[0].identifier) != resolved_binding { if mtwt::resolve(v.segments[0].identifier) != resolved_binding {
println!("uh oh, xx binding didn't match xx varref:"); println!("uh oh, xx binding didn't match xx varref:");

View file

@ -35,7 +35,7 @@ use std::collections::HashMap;
pub struct SCTable { pub struct SCTable {
table: RefCell<Vec<SyntaxContext_>>, table: RefCell<Vec<SyntaxContext_>>,
mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>, mark_memo: RefCell<HashMap<(SyntaxContext,Mrk),SyntaxContext>>,
rename_memo: RefCell<HashMap<(SyntaxContext,Ident,Name),SyntaxContext>>, rename_memo: RefCell<HashMap<(SyntaxContext,Name,SyntaxContext,Name),SyntaxContext>>,
} }
#[derive(PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy, Clone)] #[derive(PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy, Clone)]
@ -66,8 +66,9 @@ pub fn apply_mark(m: Mrk, ctxt: SyntaxContext) -> SyntaxContext {
/// Extend a syntax context with a given mark and sctable (explicit memoization) /// Extend a syntax context with a given mark and sctable (explicit memoization)
fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext { fn apply_mark_internal(m: Mrk, ctxt: SyntaxContext, table: &SCTable) -> SyntaxContext {
let key = (ctxt, m); let key = (ctxt, m);
* table.mark_memo.borrow_mut().entry(key) *table.mark_memo.borrow_mut().entry(key).or_insert_with(|| {
.or_insert_with(|| idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt))) SyntaxContext(idx_push(&mut *table.table.borrow_mut(), Mark(m, ctxt)))
})
} }
/// Extend a syntax context with a given rename /// Extend a syntax context with a given rename
@ -81,10 +82,11 @@ fn apply_rename_internal(id: Ident,
to: Name, to: Name,
ctxt: SyntaxContext, ctxt: SyntaxContext,
table: &SCTable) -> SyntaxContext { table: &SCTable) -> SyntaxContext {
let key = (ctxt, id, to); let key = (ctxt, id.name, id.ctxt, to);
* table.rename_memo.borrow_mut().entry(key) *table.rename_memo.borrow_mut().entry(key).or_insert_with(|| {
.or_insert_with(|| idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt))) SyntaxContext(idx_push(&mut *table.table.borrow_mut(), Rename(id, to, ctxt)))
})
} }
/// Apply a list of renamings to a context /// Apply a list of renamings to a context
@ -185,20 +187,20 @@ fn resolve_internal(id: Ident,
} }
let resolved = { let resolved = {
let result = (*table.table.borrow())[id.ctxt as usize]; let result = (*table.table.borrow())[id.ctxt.0 as usize];
match result { match result {
EmptyCtxt => id.name, EmptyCtxt => id.name,
// ignore marks here: // ignore marks here:
Mark(_,subctxt) => Mark(_,subctxt) =>
resolve_internal(Ident{name:id.name, ctxt: subctxt}, resolve_internal(Ident::new(id.name, subctxt),
table, resolve_table), table, resolve_table),
// do the rename if necessary: // do the rename if necessary:
Rename(Ident{name, ctxt}, toname, subctxt) => { Rename(Ident{name, ctxt}, toname, subctxt) => {
let resolvedfrom = let resolvedfrom =
resolve_internal(Ident{name:name, ctxt:ctxt}, resolve_internal(Ident::new(name, ctxt),
table, resolve_table); table, resolve_table);
let resolvedthis = let resolvedthis =
resolve_internal(Ident{name:id.name, ctxt:subctxt}, resolve_internal(Ident::new(id.name, subctxt),
table, resolve_table); table, resolve_table);
if (resolvedthis == resolvedfrom) if (resolvedthis == resolvedfrom)
&& (marksof_internal(ctxt, resolvedthis, table) && (marksof_internal(ctxt, resolvedthis, table)
@ -229,7 +231,7 @@ fn marksof_internal(ctxt: SyntaxContext,
let mut result = Vec::new(); let mut result = Vec::new();
let mut loopvar = ctxt; let mut loopvar = ctxt;
loop { loop {
let table_entry = (*table.table.borrow())[loopvar as usize]; let table_entry = (*table.table.borrow())[loopvar.0 as usize];
match table_entry { match table_entry {
EmptyCtxt => { EmptyCtxt => {
return result; return result;
@ -256,7 +258,7 @@ fn marksof_internal(ctxt: SyntaxContext,
/// FAILS when outside is not a mark. /// FAILS when outside is not a mark.
pub fn outer_mark(ctxt: SyntaxContext) -> Mrk { pub fn outer_mark(ctxt: SyntaxContext) -> Mrk {
with_sctable(|sctable| { with_sctable(|sctable| {
match (*sctable.table.borrow())[ctxt as usize] { match (*sctable.table.borrow())[ctxt.0 as usize] {
Mark(mrk, _) => mrk, Mark(mrk, _) => mrk,
_ => panic!("can't retrieve outer mark when outside is not a mark") _ => panic!("can't retrieve outer mark when outside is not a mark")
} }
@ -302,7 +304,7 @@ mod tests {
} }
fn id(n: u32, s: SyntaxContext) -> Ident { fn id(n: u32, s: SyntaxContext) -> Ident {
Ident {name: Name(n), ctxt: s} Ident::new(Name(n), s)
} }
// because of the SCTable, I now need a tidy way of // because of the SCTable, I now need a tidy way of
@ -328,7 +330,7 @@ mod tests {
let mut result = Vec::new(); let mut result = Vec::new();
loop { loop {
let table = table.table.borrow(); let table = table.table.borrow();
match (*table)[sc as usize] { match (*table)[sc.0 as usize] {
EmptyCtxt => {return result;}, EmptyCtxt => {return result;},
Mark(mrk,tail) => { Mark(mrk,tail) => {
result.push(M(mrk)); result.push(M(mrk));
@ -349,15 +351,15 @@ mod tests {
fn test_unfold_refold(){ fn test_unfold_refold(){
let mut t = new_sctable_internal(); let mut t = new_sctable_internal();
let test_sc = vec!(M(3),R(id(101,0),Name(14)),M(9)); let test_sc = vec!(M(3),R(id(101,EMPTY_CTXT),Name(14)),M(9));
assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),4); assert_eq!(unfold_test_sc(test_sc.clone(),EMPTY_CTXT,&mut t),SyntaxContext(4));
{ {
let table = t.table.borrow(); let table = t.table.borrow();
assert!((*table)[2] == Mark(9,0)); assert!((*table)[2] == Mark(9,EMPTY_CTXT));
assert!((*table)[3] == Rename(id(101,0),Name(14),2)); assert!((*table)[3] == Rename(id(101,EMPTY_CTXT),Name(14),SyntaxContext(2)));
assert!((*table)[4] == Mark(3,3)); assert!((*table)[4] == Mark(3,SyntaxContext(3)));
} }
assert_eq!(refold_test_sc(4,&t),test_sc); assert_eq!(refold_test_sc(SyntaxContext(4),&t),test_sc);
} }
// extend a syntax context with a sequence of marks given // extend a syntax context with a sequence of marks given
@ -371,11 +373,11 @@ mod tests {
#[test] fn unfold_marks_test() { #[test] fn unfold_marks_test() {
let mut t = new_sctable_internal(); let mut t = new_sctable_internal();
assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),3); assert_eq!(unfold_marks(vec!(3,7),EMPTY_CTXT,&mut t),SyntaxContext(3));
{ {
let table = t.table.borrow(); let table = t.table.borrow();
assert!((*table)[2] == Mark(7,0)); assert!((*table)[2] == Mark(7,EMPTY_CTXT));
assert!((*table)[3] == Mark(3,2)); assert!((*table)[3] == Mark(3,SyntaxContext(2)));
} }
} }
@ -396,7 +398,7 @@ mod tests {
assert_eq! (marksof_internal (ans, stopname,&t), [16]);} assert_eq! (marksof_internal (ans, stopname,&t), [16]);}
// rename where stop doesn't match: // rename where stop doesn't match:
{ let chain = vec!(M(9), { let chain = vec!(M(9),
R(id(name1.usize() as u32, R(id(name1.0,
apply_mark_internal (4, EMPTY_CTXT,&mut t)), apply_mark_internal (4, EMPTY_CTXT,&mut t)),
Name(100101102)), Name(100101102)),
M(14)); M(14));
@ -405,7 +407,7 @@ mod tests {
// rename where stop does match // rename where stop does match
{ let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t); { let name1sc = apply_mark_internal(4, EMPTY_CTXT, &mut t);
let chain = vec!(M(9), let chain = vec!(M(9),
R(id(name1.usize() as u32, name1sc), R(id(name1.0, name1sc),
stopname), stopname),
M(14)); M(14));
let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t); let ans = unfold_test_sc(chain,EMPTY_CTXT,&mut t);
@ -474,10 +476,10 @@ mod tests {
#[test] #[test]
fn hashing_tests () { fn hashing_tests () {
let mut t = new_sctable_internal(); let mut t = new_sctable_internal();
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2); assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),SyntaxContext(2));
assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),3); assert_eq!(apply_mark_internal(13,EMPTY_CTXT,&mut t),SyntaxContext(3));
// using the same one again should result in the same index: // using the same one again should result in the same index:
assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),2); assert_eq!(apply_mark_internal(12,EMPTY_CTXT,&mut t),SyntaxContext(2));
// I'm assuming that the rename table will behave the same.... // I'm assuming that the rename table will behave the same....
} }
@ -496,10 +498,10 @@ mod tests {
#[test] #[test]
fn new_resolves_test() { fn new_resolves_test() {
let renames = vec!((Ident{name:Name(23),ctxt:EMPTY_CTXT},Name(24)), let renames = vec!((Ident::with_empty_ctxt(Name(23)),Name(24)),
(Ident{name:Name(29),ctxt:EMPTY_CTXT},Name(29))); (Ident::with_empty_ctxt(Name(29)),Name(29)));
let new_ctxt1 = apply_renames(&renames,EMPTY_CTXT); let new_ctxt1 = apply_renames(&renames,EMPTY_CTXT);
assert_eq!(resolve(Ident{name:Name(23),ctxt:new_ctxt1}),Name(24)); assert_eq!(resolve(Ident::new(Name(23),new_ctxt1)),Name(24));
assert_eq!(resolve(Ident{name:Name(29),ctxt:new_ctxt1}),Name(29)); assert_eq!(resolve(Ident::new(Name(29),new_ctxt1)),Name(29));
} }
} }

View file

@ -464,7 +464,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
($name: expr, $suffix: expr, $($args: expr),*) => {{ ($name: expr, $suffix: expr, $($args: expr),*) => {{
let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]); let inner = cx.expr_call(sp, mk_token_path(cx, sp, $name), vec![$($args),*]);
let suffix = match $suffix { let suffix = match $suffix {
Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::new(name))), Some(name) => cx.expr_some(sp, mk_name(cx, sp, ast::Ident::with_empty_ctxt(name))),
None => cx.expr_none(sp) None => cx.expr_none(sp)
}; };
cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix]) cx.expr_call(sp, mk_token_path(cx, sp, "Literal"), vec![inner, suffix])
@ -489,31 +489,32 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
} }
token::Literal(token::Byte(i), suf) => { token::Literal(token::Byte(i), suf) => {
let e_byte = mk_name(cx, sp, i.ident()); let e_byte = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Byte", suf, e_byte); return mk_lit!("Byte", suf, e_byte);
} }
token::Literal(token::Char(i), suf) => { token::Literal(token::Char(i), suf) => {
let e_char = mk_name(cx, sp, i.ident()); let e_char = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Char", suf, e_char); return mk_lit!("Char", suf, e_char);
} }
token::Literal(token::Integer(i), suf) => { token::Literal(token::Integer(i), suf) => {
let e_int = mk_name(cx, sp, i.ident()); let e_int = mk_name(cx, sp, ast::Ident::with_empty_ctxt(i));
return mk_lit!("Integer", suf, e_int); return mk_lit!("Integer", suf, e_int);
} }
token::Literal(token::Float(fident), suf) => { token::Literal(token::Float(fident), suf) => {
let e_fident = mk_name(cx, sp, fident.ident()); let e_fident = mk_name(cx, sp, ast::Ident::with_empty_ctxt(fident));
return mk_lit!("Float", suf, e_fident); return mk_lit!("Float", suf, e_fident);
} }
token::Literal(token::Str_(ident), suf) => { token::Literal(token::Str_(ident), suf) => {
return mk_lit!("Str_", suf, mk_name(cx, sp, ident.ident())) return mk_lit!("Str_", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)))
} }
token::Literal(token::StrRaw(ident, n), suf) => { token::Literal(token::StrRaw(ident, n), suf) => {
return mk_lit!("StrRaw", suf, mk_name(cx, sp, ident.ident()), cx.expr_usize(sp, n)) return mk_lit!("StrRaw", suf, mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident)),
cx.expr_usize(sp, n))
} }
token::Ident(ident, style) => { token::Ident(ident, style) => {
@ -535,7 +536,7 @@ fn expr_mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
token::DocComment(ident) => { token::DocComment(ident) => {
return cx.expr_call(sp, return cx.expr_call(sp,
mk_token_path(cx, sp, "DocComment"), mk_token_path(cx, sp, "DocComment"),
vec!(mk_name(cx, sp, ident.ident()))); vec!(mk_name(cx, sp, ast::Ident::with_empty_ctxt(ident))));
} }
token::MatchNt(name, kind, namep, kindp) => { token::MatchNt(name, kind, namep, kindp) => {

View file

@ -79,7 +79,7 @@ pub use self::ParseResult::*;
use self::TokenTreeOrTokenTreeVec::*; use self::TokenTreeOrTokenTreeVec::*;
use ast; use ast;
use ast::{TokenTree, Ident}; use ast::{TokenTree, Name};
use ast::{TtDelimited, TtSequence, TtToken}; use ast::{TtDelimited, TtSequence, TtToken};
use codemap::{BytePos, mk_sp, Span}; use codemap::{BytePos, mk_sp, Span};
use codemap; use codemap;
@ -202,9 +202,9 @@ pub enum NamedMatch {
} }
pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>]) pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-> HashMap<Ident, Rc<NamedMatch>> { -> HashMap<Name, Rc<NamedMatch>> {
fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>], fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize) { ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize) {
match m { match m {
&TtSequence(_, ref seq) => { &TtSequence(_, ref seq) => {
for next_m in &seq.tts { for next_m in &seq.tts {
@ -217,7 +217,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
} }
} }
&TtToken(sp, MatchNt(bind_name, _, _, _)) => { &TtToken(sp, MatchNt(bind_name, _, _, _)) => {
match ret_val.entry(bind_name) { match ret_val.entry(bind_name.name) {
Vacant(spot) => { Vacant(spot) => {
spot.insert(res[*idx].clone()); spot.insert(res[*idx].clone());
*idx += 1; *idx += 1;
@ -246,7 +246,7 @@ pub enum ParseResult<T> {
Error(codemap::Span, String) Error(codemap::Span, String)
} }
pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>; pub type NamedParseResult = ParseResult<HashMap<Name, Rc<NamedMatch>>>;
pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>; pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
/// Perform a token equality check, ignoring syntax context (that is, an /// Perform a token equality check, ignoring syntax context (that is, an

View file

@ -282,7 +282,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
}; };
// Extract the arguments: // Extract the arguments:
let lhses = match **argument_map.get(&lhs_nm).unwrap() { let lhses = match **argument_map.get(&lhs_nm.name).unwrap() {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(), MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(def.span, "wrong-structured lhs") _ => cx.span_bug(def.span, "wrong-structured lhs")
}; };
@ -291,7 +291,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
check_lhs_nt_follows(cx, &**lhs, def.span); check_lhs_nt_follows(cx, &**lhs, def.span);
} }
let rhses = match **argument_map.get(&rhs_nm).unwrap() { let rhses = match **argument_map.get(&rhs_nm.name).unwrap() {
MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(), MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
_ => cx.span_bug(def.span, "wrong-structured rhs") _ => cx.span_bug(def.span, "wrong-structured rhs")
}; };
@ -510,14 +510,14 @@ fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
"pat" => { "pat" => {
match *tok { match *tok {
FatArrow | Comma | Eq => Ok(true), FatArrow | Comma | Eq => Ok(true),
Ident(i, _) if i.name == "if" || i.name == "in" => Ok(true), Ident(i, _) if i.name.as_str() == "if" || i.name.as_str() == "in" => Ok(true),
_ => Ok(false) _ => Ok(false)
} }
}, },
"path" | "ty" => { "path" | "ty" => {
match *tok { match *tok {
Comma | FatArrow | Colon | Eq | Gt | Semi => Ok(true), Comma | FatArrow | Colon | Eq | Gt | Semi => Ok(true),
Ident(i, _) if i.name == "as" => Ok(true), Ident(i, _) if i.name.as_str() == "as" => Ok(true),
_ => Ok(false) _ => Ok(false)
} }
}, },

View file

@ -10,7 +10,7 @@
use self::LockstepIterSize::*; use self::LockstepIterSize::*;
use ast; use ast;
use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident}; use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident, Name};
use codemap::{Span, DUMMY_SP}; use codemap::{Span, DUMMY_SP};
use diagnostic::SpanHandler; use diagnostic::SpanHandler;
use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal}; use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@ -38,7 +38,7 @@ pub struct TtReader<'a> {
/// the unzipped tree: /// the unzipped tree:
stack: Vec<TtFrame>, stack: Vec<TtFrame>,
/* for MBE-style macro transcription */ /* for MBE-style macro transcription */
interpolations: HashMap<Ident, Rc<NamedMatch>>, interpolations: HashMap<Name, Rc<NamedMatch>>,
imported_from: Option<Ident>, imported_from: Option<Ident>,
// Some => return imported_from as the next token // Some => return imported_from as the next token
@ -56,7 +56,7 @@ pub struct TtReader<'a> {
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None. /// (and should) be None.
pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler, pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>, interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>, imported_from: Option<Ident>,
src: Vec<ast::TokenTree>) src: Vec<ast::TokenTree>)
-> TtReader<'a> { -> TtReader<'a> {
@ -70,7 +70,7 @@ pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
/// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
/// (and should) be None. /// (and should) be None.
pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler, pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
interp: Option<HashMap<Ident, Rc<NamedMatch>>>, interp: Option<HashMap<Name, Rc<NamedMatch>>>,
imported_from: Option<Ident>, imported_from: Option<Ident>,
src: Vec<ast::TokenTree>, src: Vec<ast::TokenTree>,
desugar_doc_comments: bool) desugar_doc_comments: bool)
@ -117,7 +117,7 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<Name
} }
fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> { fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
let matched_opt = r.interpolations.get(&name).cloned(); let matched_opt = r.interpolations.get(&name.name).cloned();
matched_opt.map(|s| lookup_cur_matched_by_matched(r, s)) matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
} }

View file

@ -35,7 +35,7 @@ use codemap::{CodeMap, Span};
use diagnostic::SpanHandler; use diagnostic::SpanHandler;
use visit; use visit;
use visit::{FnKind, Visitor}; use visit::{FnKind, Visitor};
use parse::token::{self, InternedString}; use parse::token::InternedString;
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::cmp; use std::cmp;
@ -667,7 +667,7 @@ struct MacroVisitor<'a> {
impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> { impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> {
fn visit_mac(&mut self, mac: &ast::Mac) { fn visit_mac(&mut self, mac: &ast::Mac) {
let path = &mac.node.path; let path = &mac.node.path;
let id = path.segments.last().unwrap().identifier; let name = path.segments.last().unwrap().identifier.name.as_str();
// Issue 22234: If you add a new case here, make sure to also // Issue 22234: If you add a new case here, make sure to also
// add code to catch the macro during or after expansion. // add code to catch the macro during or after expansion.
@ -677,19 +677,19 @@ impl<'a, 'v> Visitor<'v> for MacroVisitor<'a> {
// catch uses of these macros within conditionally-compiled // catch uses of these macros within conditionally-compiled
// code, e.g. `#[cfg]`-guarded functions. // code, e.g. `#[cfg]`-guarded functions.
if id == token::str_to_ident("asm") { if name == "asm" {
self.context.gate_feature("asm", path.span, EXPLAIN_ASM); self.context.gate_feature("asm", path.span, EXPLAIN_ASM);
} }
else if id == token::str_to_ident("log_syntax") { else if name == "log_syntax" {
self.context.gate_feature("log_syntax", path.span, EXPLAIN_LOG_SYNTAX); self.context.gate_feature("log_syntax", path.span, EXPLAIN_LOG_SYNTAX);
} }
else if id == token::str_to_ident("trace_macros") { else if name == "trace_macros" {
self.context.gate_feature("trace_macros", path.span, EXPLAIN_TRACE_MACROS); self.context.gate_feature("trace_macros", path.span, EXPLAIN_TRACE_MACROS);
} }
else if id == token::str_to_ident("concat_idents") { else if name == "concat_idents" {
self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS); self.context.gate_feature("concat_idents", path.span, EXPLAIN_CONCAT_IDENTS);
} }
} }

View file

@ -40,7 +40,7 @@ impl<'a> ParserAttr for Parser<'a> {
token::DocComment(s) => { token::DocComment(s) => {
let attr = ::attr::mk_sugared_doc_attr( let attr = ::attr::mk_sugared_doc_attr(
attr::mk_attr_id(), attr::mk_attr_id(),
self.id_to_interned_str(s.ident()), self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)),
self.span.lo, self.span.lo,
self.span.hi self.span.hi
); );
@ -137,9 +137,8 @@ impl<'a> ParserAttr for Parser<'a> {
token::DocComment(s) => { token::DocComment(s) => {
// we need to get the position of this token before we bump. // we need to get the position of this token before we bump.
let Span { lo, hi, .. } = self.span; let Span { lo, hi, .. } = self.span;
let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), let str = self.id_to_interned_str(ast::Ident::with_empty_ctxt(s));
self.id_to_interned_str(s.ident()), let attr = attr::mk_sugared_doc_attr(attr::mk_attr_id(), str, lo, hi);
lo, hi);
if attr.node.style == ast::AttrInner { if attr.node.style == ast::AttrInner {
attrs.push(attr); attrs.push(attr);
panictry!(self.bump()); panictry!(self.bump());

View file

@ -744,8 +744,8 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))), Some(&ast::TtToken(_, token::Ident(name_zip, token::Plain))),
Some(&ast::TtDelimited(_, ref macro_delimed)), Some(&ast::TtDelimited(_, ref macro_delimed)),
) )
if name_macro_rules.name == "macro_rules" if name_macro_rules.name.as_str() == "macro_rules"
&& name_zip.name == "zip" => { && name_zip.name.as_str() == "zip" => {
let tts = &macro_delimed.tts[..]; let tts = &macro_delimed.tts[..];
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) { match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
( (
@ -763,7 +763,7 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
) )
if first_delimed.delim == token::Paren if first_delimed.delim == token::Paren
&& ident.name == "a" => {}, && ident.name.as_str() == "a" => {},
_ => panic!("value 3: {:?}", **first_delimed), _ => panic!("value 3: {:?}", **first_delimed),
} }
let tts = &second_delimed.tts[..]; let tts = &second_delimed.tts[..];
@ -774,7 +774,7 @@ mod tests {
Some(&ast::TtToken(_, token::Ident(ident, token::Plain))), Some(&ast::TtToken(_, token::Ident(ident, token::Plain))),
) )
if second_delimed.delim == token::Paren if second_delimed.delim == token::Paren
&& ident.name == "a" => {}, && ident.name.as_str() == "a" => {},
_ => panic!("value 4: {:?}", **second_delimed), _ => panic!("value 4: {:?}", **second_delimed),
} }
}, },

View file

@ -4715,7 +4715,7 @@ impl<'a> Parser<'a> {
(fields, None) (fields, None)
// Tuple-style struct definition with optional where-clause. // Tuple-style struct definition with optional where-clause.
} else if self.token == token::OpenDelim(token::Paren) { } else if self.token == token::OpenDelim(token::Paren) {
let fields = try!(self.parse_tuple_struct_body(&class_name, &mut generics)); let fields = try!(self.parse_tuple_struct_body(class_name, &mut generics));
(fields, Some(ast::DUMMY_NODE_ID)) (fields, Some(ast::DUMMY_NODE_ID))
} else { } else {
let token_str = self.this_token_to_string(); let token_str = self.this_token_to_string();
@ -4750,7 +4750,7 @@ impl<'a> Parser<'a> {
} }
pub fn parse_tuple_struct_body(&mut self, pub fn parse_tuple_struct_body(&mut self,
class_name: &ast::Ident, class_name: ast::Ident,
generics: &mut ast::Generics) generics: &mut ast::Generics)
-> PResult<Vec<StructField>> { -> PResult<Vec<StructField>> {
// This is the case where we find `struct Foo<T>(T) where T: Copy;` // This is the case where we find `struct Foo<T>(T) where T: Copy;`
@ -5780,10 +5780,10 @@ impl<'a> Parser<'a> {
Option<ast::Name>)>> { Option<ast::Name>)>> {
let ret = match self.token { let ret = match self.token {
token::Literal(token::Str_(s), suf) => { token::Literal(token::Str_(s), suf) => {
(self.id_to_interned_str(s.ident()), ast::CookedStr, suf) (self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)), ast::CookedStr, suf)
} }
token::Literal(token::StrRaw(s, n), suf) => { token::Literal(token::StrRaw(s, n), suf) => {
(self.id_to_interned_str(s.ident()), ast::RawStr(n), suf) (self.id_to_interned_str(ast::Ident::with_empty_ctxt(s)), ast::RawStr(n), suf)
} }
_ => return Ok(None) _ => return Ok(None)
}; };

View file

@ -453,7 +453,7 @@ macro_rules! declare_special_idents_and_keywords {(
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const $si_static: ast::Ident = ast::Ident { pub const $si_static: ast::Ident = ast::Ident {
name: ast::Name($si_name), name: ast::Name($si_name),
ctxt: 0, ctxt: ast::EMPTY_CTXT,
}; };
)* )*
} }
@ -462,7 +462,7 @@ macro_rules! declare_special_idents_and_keywords {(
use ast; use ast;
$( $(
#[allow(non_upper_case_globals)] #[allow(non_upper_case_globals)]
pub const $si_static: ast::Name = ast::Name($si_name); pub const $si_static: ast::Name = ast::Name($si_name);
)* )*
} }
@ -729,19 +729,19 @@ pub fn gensym(s: &str) -> ast::Name {
/// Maps a string to an identifier with an empty syntax context. /// Maps a string to an identifier with an empty syntax context.
#[inline] #[inline]
pub fn str_to_ident(s: &str) -> ast::Ident { pub fn str_to_ident(s: &str) -> ast::Ident {
ast::Ident::new(intern(s)) ast::Ident::with_empty_ctxt(intern(s))
} }
/// Maps a string to a gensym'ed identifier. /// Maps a string to a gensym'ed identifier.
#[inline] #[inline]
pub fn gensym_ident(s: &str) -> ast::Ident { pub fn gensym_ident(s: &str) -> ast::Ident {
ast::Ident::new(gensym(s)) ast::Ident::with_empty_ctxt(gensym(s))
} }
// create a fresh name that maps to the same string as the old one. // create a fresh name that maps to the same string as the old one.
// note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src))); // note that this guarantees that str_ptr_eq(ident_to_string(src),interner_get(fresh_name(src)));
// that is, that the new name and the old one are connected to ptr_eq strings. // that is, that the new name and the old one are connected to ptr_eq strings.
pub fn fresh_name(src: &ast::Ident) -> ast::Name { pub fn fresh_name(src: ast::Ident) -> ast::Name {
let interner = get_ident_interner(); let interner = get_ident_interner();
interner.gensym_copy(src.name) interner.gensym_copy(src.name)
// following: debug version. Could work in final except that it's incompatible with // following: debug version. Could work in final except that it's incompatible with
@ -753,7 +753,7 @@ pub fn fresh_name(src: &ast::Ident) -> ast::Name {
// create a fresh mark. // create a fresh mark.
pub fn fresh_mark() -> ast::Mrk { pub fn fresh_mark() -> ast::Mrk {
gensym("mark").usize() as u32 gensym("mark").0
} }
#[cfg(test)] #[cfg(test)]
@ -763,7 +763,7 @@ mod tests {
use ext::mtwt; use ext::mtwt;
fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident { fn mark_ident(id : ast::Ident, m : ast::Mrk) -> ast::Ident {
ast::Ident { name: id.name, ctxt:mtwt::apply_mark(m, id.ctxt) } ast::Ident::new(id.name, mtwt::apply_mark(m, id.ctxt))
} }
#[test] fn mtwt_token_eq_test() { #[test] fn mtwt_token_eq_test() {

View file

@ -297,7 +297,7 @@ pub fn token_to_string(tok: &Token) -> String {
token::NtBlock(ref e) => block_to_string(&**e), token::NtBlock(ref e) => block_to_string(&**e),
token::NtStmt(ref e) => stmt_to_string(&**e), token::NtStmt(ref e) => stmt_to_string(&**e),
token::NtPat(ref e) => pat_to_string(&**e), token::NtPat(ref e) => pat_to_string(&**e),
token::NtIdent(ref e, _) => ident_to_string(&**e), token::NtIdent(ref e, _) => ident_to_string(**e),
token::NtTT(ref e) => tt_to_string(&**e), token::NtTT(ref e) => tt_to_string(&**e),
token::NtArm(ref e) => arm_to_string(&*e), token::NtArm(ref e) => arm_to_string(&*e),
token::NtImplItem(ref e) => impl_item_to_string(&**e), token::NtImplItem(ref e) => impl_item_to_string(&**e),
@ -376,8 +376,8 @@ pub fn path_to_string(p: &ast::Path) -> String {
to_string(|s| s.print_path(p, false, 0)) to_string(|s| s.print_path(p, false, 0))
} }
pub fn ident_to_string(id: &ast::Ident) -> String { pub fn ident_to_string(id: ast::Ident) -> String {
to_string(|s| s.print_ident(*id)) to_string(|s| s.print_ident(id))
} }
pub fn fun_to_string(decl: &ast::FnDecl, pub fn fun_to_string(decl: &ast::FnDecl,
@ -2854,7 +2854,6 @@ impl<'a> State<'a> {
ast::ViewPathSimple(ident, ref path) => { ast::ViewPathSimple(ident, ref path) => {
try!(self.print_path(path, false, 0)); try!(self.print_path(path, false, 0));
// FIXME(#6993) can't compare identifiers directly here
if path.segments.last().unwrap().identifier.name != if path.segments.last().unwrap().identifier.name !=
ident.name { ident.name {
try!(space(&mut self.s)); try!(space(&mut self.s));

View file

@ -69,7 +69,7 @@ impl<T: Eq + Hash + Clone + 'static> Interner<T> {
pub fn get(&self, idx: Name) -> T { pub fn get(&self, idx: Name) -> T {
let vect = self.vect.borrow(); let vect = self.vect.borrow();
(*vect)[idx.usize()].clone() (*vect)[idx.0 as usize].clone()
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {
@ -196,13 +196,13 @@ impl StrInterner {
let new_idx = Name(self.len() as u32); let new_idx = Name(self.len() as u32);
// leave out of map to avoid colliding // leave out of map to avoid colliding
let mut vect = self.vect.borrow_mut(); let mut vect = self.vect.borrow_mut();
let existing = (*vect)[idx.usize()].clone(); let existing = (*vect)[idx.0 as usize].clone();
vect.push(existing); vect.push(existing);
new_idx new_idx
} }
pub fn get(&self, idx: Name) -> RcStr { pub fn get(&self, idx: Name) -> RcStr {
(*self.vect.borrow())[idx.usize()].clone() (*self.vect.borrow())[idx.0 as usize].clone()
} }
pub fn len(&self) -> usize { pub fn len(&self) -> usize {

View file

@ -35,7 +35,7 @@ impl LintPass for Pass {
impl EarlyLintPass for Pass { impl EarlyLintPass for Pass {
fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) { fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
if it.ident.name == "lintme" { if it.ident.name.as_str() == "lintme" {
cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'"); cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
} }
} }

View file

@ -33,7 +33,7 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
let mac_expr = match TokenTree::parse(cx, &mbe_matcher[..], args) { let mac_expr = match TokenTree::parse(cx, &mbe_matcher[..], args) {
Success(map) => { Success(map) => {
match (&*map[&str_to_ident("matched")], &*map[&str_to_ident("pat")]) { match (&*map[&str_to_ident("matched").name], &*map[&str_to_ident("pat").name]) {
(&MatchedNonterminal(NtExpr(ref matched_expr)), (&MatchedNonterminal(NtExpr(ref matched_expr)),
&MatchedSeq(ref pats, seq_sp)) => { &MatchedSeq(ref pats, seq_sp)) => {
let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt| let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt|

View file

@ -16,7 +16,6 @@
// "enable" to 0 instead. // "enable" to 0 instead.
// compile-flags:-g -Cllvm-args=-enable-tail-merge=0 // compile-flags:-g -Cllvm-args=-enable-tail-merge=0
// ignore-pretty as this critically relies on line numbers // ignore-pretty as this critically relies on line numbers
// ignore-windows
use std::io; use std::io;
use std::io::prelude::*; use std::io::prelude::*;

View file

@ -11,7 +11,6 @@
// no-pretty-expanded FIXME #15189 // no-pretty-expanded FIXME #15189
// ignore-android FIXME #17520 // ignore-android FIXME #17520
// ignore-msvc FIXME #28133 // ignore-msvc FIXME #28133
// ignore-windows
use std::env; use std::env;
use std::process::{Command, Stdio}; use std::process::{Command, Stdio};