1
Fork 0

Replace the get method by the deref one on InternedString

This commit is contained in:
GuillaumeGomez 2015-02-03 00:23:08 +01:00
parent 966e6c0c37
commit d58c0a7597
24 changed files with 123 additions and 104 deletions

View file

@ -68,6 +68,7 @@ use std::fmt::Show;
use std::num::Int; use std::num::Int;
use std::rc::Rc; use std::rc::Rc;
use serialize::{Encodable, Decodable, Encoder, Decoder}; use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::ops::Deref;
// FIXME #6993: in librustc, uses of "ident" should be replaced // FIXME #6993: in librustc, uses of "ident" should be replaced
// by just "Name". // by just "Name".
@ -112,13 +113,13 @@ impl fmt::Display for Ident {
impl fmt::Debug for Name { impl fmt::Debug for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let Name(nm) = *self; let Name(nm) = *self;
write!(f, "{:?}({})", token::get_name(*self).get(), nm) write!(f, "{:?}({})", token::get_name(*self).deref(), nm)
} }
} }
impl fmt::Display for Name { impl fmt::Display for Name {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(token::get_name(*self).get(), f) fmt::Display::fmt(token::get_name(*self).deref(), f)
} }
} }
@ -174,7 +175,7 @@ impl Name {
pub fn as_str<'a>(&'a self) -> &'a str { pub fn as_str<'a>(&'a self) -> &'a str {
unsafe { unsafe {
// FIXME #12938: can't use copy_lifetime since &str isn't a &T // FIXME #12938: can't use copy_lifetime since &str isn't a &T
::std::mem::transmute::<&str,&str>(token::get_name(*self).get()) ::std::mem::transmute::<&str,&str>(token::get_name(*self).deref())
} }
} }
@ -193,7 +194,7 @@ pub type Mrk = u32;
impl Encodable for Ident { impl Encodable for Ident {
fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> { fn encode<S: Encoder>(&self, s: &mut S) -> Result<(), S::Error> {
s.emit_str(token::get_ident(*self).get()) s.emit_str(token::get_ident(*self).deref())
} }
} }

View file

@ -23,11 +23,12 @@ use visit;
use std::cmp; use std::cmp;
use std::u32; use std::u32;
use std::ops::Deref;
pub fn path_name_i(idents: &[Ident]) -> String { pub fn path_name_i(idents: &[Ident]) -> String {
// FIXME: Bad copies (#2543 -- same for everything else that says "bad") // FIXME: Bad copies (#2543 -- same for everything else that says "bad")
idents.iter().map(|i| { idents.iter().map(|i| {
token::get_ident(*i).get().to_string() token::get_ident(*i).deref().to_string()
}).collect::<Vec<String>>().connect("::") }).collect::<Vec<String>>().connect("::")
} }

View file

@ -29,6 +29,7 @@ use std::cell::{RefCell, Cell};
use std::collections::BitvSet; use std::collections::BitvSet;
use std::collections::HashSet; use std::collections::HashSet;
use std::fmt; use std::fmt;
use std::ops::Deref;
thread_local! { static USED_ATTRS: RefCell<BitvSet> = RefCell::new(BitvSet::new()) } thread_local! { static USED_ATTRS: RefCell<BitvSet> = RefCell::new(BitvSet::new()) }
@ -44,7 +45,7 @@ pub fn is_used(attr: &Attribute) -> bool {
pub trait AttrMetaMethods { pub trait AttrMetaMethods {
fn check_name(&self, name: &str) -> bool { fn check_name(&self, name: &str) -> bool {
name == self.name().get() name == self.name().deref()
} }
/// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`, /// Retrieve the name of the meta item, e.g. `foo` in `#[foo]`,
@ -62,7 +63,7 @@ pub trait AttrMetaMethods {
impl AttrMetaMethods for Attribute { impl AttrMetaMethods for Attribute {
fn check_name(&self, name: &str) -> bool { fn check_name(&self, name: &str) -> bool {
let matches = name == self.name().get(); let matches = name == self.name().deref();
if matches { if matches {
mark_used(self); mark_used(self);
} }
@ -142,7 +143,7 @@ impl AttributeMethods for Attribute {
let meta = mk_name_value_item_str( let meta = mk_name_value_item_str(
InternedString::new("doc"), InternedString::new("doc"),
token::intern_and_get_ident(&strip_doc_comment_decoration( token::intern_and_get_ident(&strip_doc_comment_decoration(
comment.get())[])); comment.deref())[]));
if self.node.style == ast::AttrOuter { if self.node.style == ast::AttrOuter {
f(&mk_attr_outer(self.node.id, meta)) f(&mk_attr_outer(self.node.id, meta))
} else { } else {
@ -209,7 +210,7 @@ pub fn mk_attr_outer(id: AttrId, item: P<MetaItem>) -> Attribute {
pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos, pub fn mk_sugared_doc_attr(id: AttrId, text: InternedString, lo: BytePos,
hi: BytePos) hi: BytePos)
-> Attribute { -> Attribute {
let style = doc_comment_style(text.get()); let style = doc_comment_style(text.deref());
let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr)); let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr));
let attr = Attribute_ { let attr = Attribute_ {
id: id, id: id,
@ -326,11 +327,11 @@ pub fn requests_inline(attrs: &[Attribute]) -> bool {
/// Tests if a cfg-pattern matches the cfg set /// Tests if a cfg-pattern matches the cfg set
pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool { pub fn cfg_matches(diagnostic: &SpanHandler, cfgs: &[P<MetaItem>], cfg: &ast::MetaItem) -> bool {
match cfg.node { match cfg.node {
ast::MetaList(ref pred, ref mis) if pred.get() == "any" => ast::MetaList(ref pred, ref mis) if pred.deref() == "any" =>
mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)), mis.iter().any(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
ast::MetaList(ref pred, ref mis) if pred.get() == "all" => ast::MetaList(ref pred, ref mis) if pred.deref() == "all" =>
mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)), mis.iter().all(|mi| cfg_matches(diagnostic, cfgs, &**mi)),
ast::MetaList(ref pred, ref mis) if pred.get() == "not" => { ast::MetaList(ref pred, ref mis) if pred.deref() == "not" => {
if mis.len() != 1 { if mis.len() != 1 {
diagnostic.span_err(cfg.span, "expected 1 cfg-pattern"); diagnostic.span_err(cfg.span, "expected 1 cfg-pattern");
return false; return false;
@ -382,7 +383,7 @@ fn find_stability_generic<'a,
'outer: for attr in attrs { 'outer: for attr in attrs {
let tag = attr.name(); let tag = attr.name();
let tag = tag.get(); let tag = tag.deref();
if tag != "deprecated" && tag != "unstable" && tag != "stable" { if tag != "deprecated" && tag != "unstable" && tag != "stable" {
continue // not a stability level continue // not a stability level
} }
@ -394,8 +395,8 @@ fn find_stability_generic<'a,
let mut feature = None; let mut feature = None;
let mut since = None; let mut since = None;
let mut reason = None; let mut reason = None;
for meta in metas { for meta in metas.iter() {
if meta.name().get() == "feature" { if meta.name() == "feature" {
match meta.value_str() { match meta.value_str() {
Some(v) => feature = Some(v), Some(v) => feature = Some(v),
None => { None => {
@ -404,7 +405,7 @@ fn find_stability_generic<'a,
} }
} }
} }
if meta.name().get() == "since" { if meta.name().deref() == "since" {
match meta.value_str() { match meta.value_str() {
Some(v) => since = Some(v), Some(v) => since = Some(v),
None => { None => {
@ -413,7 +414,7 @@ fn find_stability_generic<'a,
} }
} }
} }
if meta.name().get() == "reason" { if meta.name().deref() == "reason" {
match meta.value_str() { match meta.value_str() {
Some(v) => reason = Some(v), Some(v) => reason = Some(v),
None => { None => {
@ -521,11 +522,11 @@ pub fn find_repr_attrs(diagnostic: &SpanHandler, attr: &Attribute) -> Vec<ReprAt
for item in items { for item in items {
match item.node { match item.node {
ast::MetaWord(ref word) => { ast::MetaWord(ref word) => {
let hint = match word.get() { let hint = match word.deref() {
// Can't use "extern" because it's not a lexical identifier. // Can't use "extern" because it's not a lexical identifier.
"C" => Some(ReprExtern), "C" => Some(ReprExtern),
"packed" => Some(ReprPacked), "packed" => Some(ReprPacked),
_ => match int_type_of_word(word.get()) { _ => match int_type_of_word(word.deref()) {
Some(ity) => Some(ReprInt(item.span, ity)), Some(ity) => Some(ReprInt(item.span, ity)),
None => { None => {
// Not a word we recognize // Not a word we recognize

View file

@ -10,6 +10,8 @@
use std::cell::RefCell; use std::cell::RefCell;
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::ops::Deref;
use ast; use ast;
use ast::{Ident, Name, TokenTree}; use ast::{Ident, Name, TokenTree};
use codemap::Span; use codemap::Span;
@ -57,7 +59,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
match diagnostics.insert(code.name, span) { match diagnostics.insert(code.name, span) {
Some(previous_span) => { Some(previous_span) => {
ecx.span_warn(span, &format!( ecx.span_warn(span, &format!(
"diagnostic code {} already used", token::get_ident(code).get() "diagnostic code {} already used", token::get_ident(code).deref()
)[]); )[]);
ecx.span_note(previous_span, "previous invocation"); ecx.span_note(previous_span, "previous invocation");
}, },
@ -68,7 +70,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
with_registered_diagnostics(|diagnostics| { with_registered_diagnostics(|diagnostics| {
if !diagnostics.contains_key(&code.name) { if !diagnostics.contains_key(&code.name) {
ecx.span_err(span, &format!( ecx.span_err(span, &format!(
"used diagnostic code {} not registered", token::get_ident(code).get() "used diagnostic code {} not registered", token::get_ident(code).deref()
)[]); )[]);
} }
}); });
@ -93,12 +95,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
with_registered_diagnostics(|diagnostics| { with_registered_diagnostics(|diagnostics| {
if diagnostics.insert(code.name, description).is_some() { if diagnostics.insert(code.name, description).is_some() {
ecx.span_err(span, &format!( ecx.span_err(span, &format!(
"diagnostic code {} already registered", token::get_ident(*code).get() "diagnostic code {} already registered", token::get_ident(*code).deref()
)[]); )[]);
} }
}); });
let sym = Ident::new(token::gensym(&( let sym = Ident::new(token::gensym(&(
"__register_diagnostic_".to_string() + token::get_ident(*code).get() "__register_diagnostic_".to_string() + token::get_ident(*code).deref()
)[])); )[]));
MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter()) MacItems::new(vec![quote_item!(ecx, mod $sym {}).unwrap()].into_iter())
} }

View file

@ -22,6 +22,8 @@ use parse::token::InternedString;
use parse::token; use parse::token;
use ptr::P; use ptr::P;
use std::ops::Deref;
enum State { enum State {
Asm, Asm,
Outputs, Outputs,
@ -102,7 +104,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
// It's the opposite of '=&' which means that the memory // It's the opposite of '=&' which means that the memory
// cannot be shared with any other operand (usually when // cannot be shared with any other operand (usually when
// a register is clobbered early.) // a register is clobbered early.)
let output = match constraint.get().slice_shift_char() { let output = match constraint.deref().slice_shift_char() {
Some(('=', _)) => None, Some(('=', _)) => None,
Some(('+', operand)) => { Some(('+', operand)) => {
Some(token::intern_and_get_ident(&format!( Some(token::intern_and_get_ident(&format!(
@ -129,9 +131,9 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let (constraint, _str_style) = p.parse_str(); let (constraint, _str_style) = p.parse_str();
if constraint.get().starts_with("=") { if constraint.deref().starts_with("=") {
cx.span_err(p.last_span, "input operand constraint contains '='"); cx.span_err(p.last_span, "input operand constraint contains '='");
} else if constraint.get().starts_with("+") { } else if constraint.deref().starts_with("+") {
cx.span_err(p.last_span, "input operand constraint contains '+'"); cx.span_err(p.last_span, "input operand constraint contains '+'");
} }
@ -213,7 +215,7 @@ pub fn expand_asm<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
MacExpr::new(P(ast::Expr { MacExpr::new(P(ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::ExprInlineAsm(ast::InlineAsm { node: ast::ExprInlineAsm(ast::InlineAsm {
asm: token::intern_and_get_ident(asm.get()), asm: token::intern_and_get_ident(asm.deref()),
asm_str_style: asm_str_style.unwrap(), asm_str_style: asm_str_style.unwrap(),
outputs: outputs, outputs: outputs,
inputs: inputs, inputs: inputs,

View file

@ -28,6 +28,7 @@ use fold::Folder;
use std::collections::HashMap; use std::collections::HashMap;
use std::rc::Rc; use std::rc::Rc;
use std::ops::Deref;
pub trait ItemDecorator { pub trait ItemDecorator {
fn expand(&self, fn expand(&self,
@ -790,7 +791,7 @@ pub fn get_single_str_from_tts(cx: &mut ExtCtxt,
cx.span_err(sp, &format!("{} takes 1 argument", name)[]); cx.span_err(sp, &format!("{} takes 1 argument", name)[]);
} }
expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| { expr_to_string(cx, ret, "argument must be a string literal").map(|(s, _)| {
s.get().to_string() s.deref().to_string()
}) })
} }

View file

@ -21,6 +21,7 @@ use parse::token::InternedString;
use parse::token; use parse::token;
use ptr::P; use ptr::P;
use std::ops::Deref;
// Transitional reexports so qquote can find the paths it is looking for // Transitional reexports so qquote can find the paths it is looking for
mod syntax { mod syntax {
@ -576,7 +577,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> { fn expr_field_access(&self, sp: Span, expr: P<ast::Expr>, ident: ast::Ident) -> P<ast::Expr> {
let field_name = token::get_ident(ident); let field_name = token::get_ident(ident);
let field_span = Span { let field_span = Span {
lo: sp.lo - Pos::from_usize(field_name.get().len()), lo: sp.lo - Pos::from_usize(field_name.deref().len()),
hi: sp.hi, hi: sp.hi,
expn_id: sp.expn_id, expn_id: sp.expn_id,
}; };

View file

@ -15,6 +15,7 @@ use ext::build::AstBuilder;
use parse::token; use parse::token;
use std::string::String; use std::string::String;
use std::ops::Deref;
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
sp: codemap::Span, sp: codemap::Span,
@ -32,7 +33,7 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
ast::LitStr(ref s, _) | ast::LitStr(ref s, _) |
ast::LitFloat(ref s, _) | ast::LitFloat(ref s, _) |
ast::LitFloatUnsuffixed(ref s) => { ast::LitFloatUnsuffixed(ref s) => {
accumulator.push_str(s.get()); accumulator.push_str(s.deref());
} }
ast::LitChar(c) => { ast::LitChar(c) => {
accumulator.push(c); accumulator.push(c);

View file

@ -16,6 +16,8 @@ use parse::token;
use parse::token::{str_to_ident}; use parse::token::{str_to_ident};
use ptr::P; use ptr::P;
use std::ops::Deref;
pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> { -> Box<base::MacResult+'cx> {
let mut res_str = String::new(); let mut res_str = String::new();
@ -31,7 +33,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
} else { } else {
match *e { match *e {
ast::TtToken(_, token::Ident(ident, _)) => { ast::TtToken(_, token::Ident(ident, _)) => {
res_str.push_str(token::get_ident(ident).get()) res_str.push_str(token::get_ident(ident).deref())
}, },
_ => { _ => {
cx.span_err(sp, "concat_idents! requires ident args."); cx.span_err(sp, "concat_idents! requires ident args.");

View file

@ -15,6 +15,8 @@ use ext::deriving::generic::*;
use ext::deriving::generic::ty::*; use ext::deriving::generic::ty::*;
use ptr::P; use ptr::P;
use std::ops::Deref;
pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt, pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
span: Span, span: Span,
mitem: &MetaItem, mitem: &MetaItem,
@ -24,7 +26,7 @@ pub fn expand_deriving_bound<F>(cx: &mut ExtCtxt,
{ {
let name = match mitem.node { let name = match mitem.node {
MetaWord(ref tname) => { MetaWord(ref tname) => {
match tname.get() { match tname.deref() {
"Copy" => "Copy", "Copy" => "Copy",
"Send" | "Sync" => { "Send" | "Sync" => {
return cx.span_err(span, return cx.span_err(span,

View file

@ -189,6 +189,7 @@ use self::StructType::*;
use std::cell::RefCell; use std::cell::RefCell;
use std::vec; use std::vec;
use std::ops::Deref;
use abi::Abi; use abi::Abi;
use abi; use abi;
@ -363,7 +364,7 @@ impl<'a> TraitDef<'a> {
// generated implementations are linted // generated implementations are linted
let mut attrs = newitem.attrs.clone(); let mut attrs = newitem.attrs.clone();
attrs.extend(item.attrs.iter().filter(|a| { attrs.extend(item.attrs.iter().filter(|a| {
match a.name().get() { match a.name().deref() {
"allow" | "warn" | "deny" | "forbid" => true, "allow" | "warn" | "deny" | "forbid" => true,
_ => false, _ => false,
} }

View file

@ -18,6 +18,8 @@ use ext::base::ExtCtxt;
use codemap::Span; use codemap::Span;
use ptr::P; use ptr::P;
use std::ops::Deref;
pub mod bounds; pub mod bounds;
pub mod clone; pub mod clone;
pub mod encodable; pub mod encodable;
@ -74,7 +76,7 @@ pub fn expand_meta_derive(cx: &mut ExtCtxt,
|i| push(i))) |i| push(i)))
} }
match tname.get() { match tname.deref() {
"Clone" => expand!(clone::expand_deriving_clone), "Clone" => expand!(clone::expand_deriving_clone),
"Hash" => expand!(hash::expand_deriving_hash), "Hash" => expand!(hash::expand_deriving_hash),

View file

@ -20,6 +20,7 @@ use parse::token;
use ptr::P; use ptr::P;
use std::collections::HashMap; use std::collections::HashMap;
use std::ops::Deref;
pub fn expand_deriving_show<F>(cx: &mut ExtCtxt, pub fn expand_deriving_show<F>(cx: &mut ExtCtxt,
span: Span, span: Span,
@ -72,7 +73,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
} }
}; };
let mut format_string = String::from_str(token::get_ident(name).get()); let mut format_string = String::from_str(token::get_ident(name).deref());
// the internal fields we're actually formatting // the internal fields we're actually formatting
let mut exprs = Vec::new(); let mut exprs = Vec::new();
@ -107,7 +108,7 @@ fn show_substructure(cx: &mut ExtCtxt, span: Span,
let name = token::get_ident(field.name.unwrap()); let name = token::get_ident(field.name.unwrap());
format_string.push_str(" "); format_string.push_str(" ");
format_string.push_str(name.get()); format_string.push_str(name.deref());
format_string.push_str(": {:?}"); format_string.push_str(": {:?}");
exprs.push(field.self_.clone()); exprs.push(field.self_.clone());

View file

@ -22,6 +22,8 @@ use ext::build::AstBuilder;
use parse::token; use parse::token;
use std::env; use std::env;
use std::os;
use std::ops::Deref;
pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) pub fn expand_option_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> Box<base::MacResult+'cx> { -> Box<base::MacResult+'cx> {
@ -101,9 +103,9 @@ pub fn expand_env<'cx>(cx: &'cx mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
} }
} }
let e = match env::var_string(var.get()) { let e = match os::getenv(var.deref()) {
Err(..) => { None => {
cx.span_err(sp, msg.get()); cx.span_err(sp, msg.deref());
cx.expr_usize(sp, 0) cx.expr_usize(sp, 0)
} }
Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[])) Ok(s) => cx.expr_str(sp, token::intern_and_get_ident(&s[]))

View file

@ -32,6 +32,8 @@ use util::small_vector::SmallVector;
use visit; use visit;
use visit::Visitor; use visit::Visitor;
use std::ops::Deref;
pub fn expand_type(t: P<ast::Ty>, pub fn expand_type(t: P<ast::Ty>,
fld: &mut MacroExpander, fld: &mut MacroExpander,
impl_ty: Option<P<ast::Ty>>) impl_ty: Option<P<ast::Ty>>)
@ -375,7 +377,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.span_err( fld.cx.span_err(
pth.span, pth.span,
&format!("macro undefined: '{}!'", &format!("macro undefined: '{}!'",
extnamestr.get())[]); extnamestr.deref())[]);
// let compilation continue // let compilation continue
None None
@ -385,7 +387,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: span, call_site: span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_string(), name: extnamestr.deref().to_string(),
format: MacroBang, format: MacroBang,
span: exp_span, span: exp_span,
}, },
@ -411,7 +413,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.span_err( fld.cx.span_err(
pth.span, pth.span,
&format!("non-expression macro in expression position: {}", &format!("non-expression macro in expression position: {}",
&extnamestr.get()[] &extnamestr.deref()[]
)[]); )[]);
return None; return None;
} }
@ -422,7 +424,7 @@ fn expand_mac_invoc<T, F, G>(mac: ast::Mac, span: codemap::Span,
fld.cx.span_err( fld.cx.span_err(
pth.span, pth.span,
&format!("'{}' is not a tt-style macro", &format!("'{}' is not a tt-style macro",
extnamestr.get())[]); extnamestr.deref())[]);
None None
} }
} }
@ -506,14 +508,14 @@ fn expand_item_modifiers(mut it: P<ast::Item>, fld: &mut MacroExpander)
for attr in &modifiers { for attr in &modifiers {
let mname = attr.name(); let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) { match fld.cx.syntax_env.find(&intern(mname.deref())) {
Some(rc) => match *rc { Some(rc) => match *rc {
Modifier(ref mac) => { Modifier(ref mac) => {
attr::mark_used(attr); attr::mark_used(attr);
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: attr.span, call_site: attr.span,
callee: NameAndSpan { callee: NameAndSpan {
name: mname.get().to_string(), name: mname.deref().to_string(),
format: MacroAttribute, format: MacroAttribute,
span: None, span: None,
} }
@ -613,7 +615,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: it.span, call_site: it.span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_string(), name: extnamestr.deref().to_string(),
format: MacroBang, format: MacroBang,
span: span span: span
} }
@ -626,13 +628,13 @@ pub fn expand_item_mac(it: P<ast::Item>,
if it.ident.name == parse::token::special_idents::invalid.name { if it.ident.name == parse::token::special_idents::invalid.name {
fld.cx.span_err(path_span, fld.cx.span_err(path_span,
&format!("macro {}! expects an ident argument", &format!("macro {}! expects an ident argument",
extnamestr.get())[]); extnamestr.deref())[]);
return SmallVector::zero(); return SmallVector::zero();
} }
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: it.span, call_site: it.span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_string(), name: extnamestr.deref().to_string(),
format: MacroBang, format: MacroBang,
span: span span: span
} }
@ -651,7 +653,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: it.span, call_site: it.span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_string(), name: extnamestr.deref().to_string(),
format: MacroBang, format: MacroBang,
span: None, span: None,
} }
@ -677,7 +679,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
_ => { _ => {
fld.cx.span_err(it.span, fld.cx.span_err(it.span,
&format!("{}! is not legal in item position", &format!("{}! is not legal in item position",
extnamestr.get())[]); extnamestr.deref())[]);
return SmallVector::zero(); return SmallVector::zero();
} }
} }
@ -696,7 +698,7 @@ pub fn expand_item_mac(it: P<ast::Item>,
None => { None => {
fld.cx.span_err(path_span, fld.cx.span_err(path_span,
&format!("non-item macro in item position: {}", &format!("non-item macro in item position: {}",
extnamestr.get())[]); extnamestr.deref())[]);
return SmallVector::zero(); return SmallVector::zero();
} }
}; };
@ -950,7 +952,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: span, call_site: span,
callee: NameAndSpan { callee: NameAndSpan {
name: extnamestr.get().to_string(), name: extnamestr.deref().to_string(),
format: MacroBang, format: MacroBang,
span: tt_span span: tt_span
} }
@ -968,7 +970,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
pth.span, pth.span,
&format!( &format!(
"non-pattern macro in pattern position: {}", "non-pattern macro in pattern position: {}",
extnamestr.get() extnamestr.deref()
)[] )[]
); );
return DummyResult::raw_pat(span); return DummyResult::raw_pat(span);
@ -981,7 +983,7 @@ fn expand_pat(p: P<ast::Pat>, fld: &mut MacroExpander) -> P<ast::Pat> {
_ => { _ => {
fld.cx.span_err(span, fld.cx.span_err(span,
&format!("{}! is not legal in pattern position", &format!("{}! is not legal in pattern position",
extnamestr.get())[]); extnamestr.deref())[]);
return DummyResult::raw_pat(span); return DummyResult::raw_pat(span);
} }
} }
@ -1065,7 +1067,7 @@ fn expand_annotatable(a: Annotatable,
for attr in a.attrs() { for attr in a.attrs() {
let mname = attr.name(); let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) { match fld.cx.syntax_env.find(&intern(mname.deref())) {
Some(rc) => match *rc { Some(rc) => match *rc {
Decorator(ref dec) => { Decorator(ref dec) => {
let it = match a { let it = match a {
@ -1079,7 +1081,7 @@ fn expand_annotatable(a: Annotatable,
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: attr.span, call_site: attr.span,
callee: NameAndSpan { callee: NameAndSpan {
name: mname.get().to_string(), name: mname.deref().to_string(),
format: MacroAttribute, format: MacroAttribute,
span: None span: None
} }
@ -1180,7 +1182,7 @@ fn modifiers(attrs: &Vec<ast::Attribute>,
fld: &MacroExpander) fld: &MacroExpander)
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) { -> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
attrs.iter().cloned().partition(|attr| { attrs.iter().cloned().partition(|attr| {
match fld.cx.syntax_env.find(&intern(attr.name().get())) { match fld.cx.syntax_env.find(&intern(attr.name().deref())) {
Some(rc) => match *rc { Some(rc) => match *rc {
Modifier(_) => true, Modifier(_) => true,
_ => false _ => false
@ -1195,7 +1197,7 @@ fn multi_modifiers(attrs: &[ast::Attribute],
fld: &MacroExpander) fld: &MacroExpander)
-> (Vec<ast::Attribute>, Vec<ast::Attribute>) { -> (Vec<ast::Attribute>, Vec<ast::Attribute>) {
attrs.iter().cloned().partition(|attr| { attrs.iter().cloned().partition(|attr| {
match fld.cx.syntax_env.find(&intern(attr.name().get())) { match fld.cx.syntax_env.find(&intern(attr.name().deref())) {
Some(rc) => match *rc { Some(rc) => match *rc {
MultiModifier(_) => true, MultiModifier(_) => true,
_ => false _ => false
@ -1220,14 +1222,14 @@ fn expand_item_multi_modifier(mut it: Annotatable,
for attr in &modifiers { for attr in &modifiers {
let mname = attr.name(); let mname = attr.name();
match fld.cx.syntax_env.find(&intern(mname.get())) { match fld.cx.syntax_env.find(&intern(mname.deref())) {
Some(rc) => match *rc { Some(rc) => match *rc {
MultiModifier(ref mac) => { MultiModifier(ref mac) => {
attr::mark_used(attr); attr::mark_used(attr);
fld.cx.bt_push(ExpnInfo { fld.cx.bt_push(ExpnInfo {
call_site: attr.span, call_site: attr.span,
callee: NameAndSpan { callee: NameAndSpan {
name: mname.get().to_string(), name: mname.deref().to_string(),
format: MacroAttribute, format: MacroAttribute,
span: None, span: None,
} }

View file

@ -23,6 +23,7 @@ use ptr::P;
use std::collections::HashMap; use std::collections::HashMap;
use std::iter::repeat; use std::iter::repeat;
use std::ops::Deref;
#[derive(PartialEq)] #[derive(PartialEq)]
enum ArgumentType { enum ArgumentType {
@ -118,7 +119,7 @@ fn parse_args(ecx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
} }
}; };
let interned_name = token::get_ident(ident); let interned_name = token::get_ident(ident);
let name = interned_name.get(); let name = interned_name.deref();
p.expect(&token::Eq); p.expect(&token::Eq);
let e = p.parse_expr(); let e = p.parse_expr();
match names.get(name) { match names.get(name) {
@ -672,7 +673,7 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt, sp: Span,
None => return DummyResult::raw_expr(sp) None => return DummyResult::raw_expr(sp)
}; };
let mut parser = parse::Parser::new(fmt.get()); let mut parser = parse::Parser::new(fmt.deref());
loop { loop {
match parser.next() { match parser.next() {
Some(piece) => { Some(piece) => {

View file

@ -25,6 +25,8 @@ use ptr::P;
/// as antiquotes (splices). /// as antiquotes (splices).
pub mod rt { pub mod rt {
use std::ops::Deref;
use ast; use ast;
use codemap::Spanned; use codemap::Spanned;
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
@ -161,7 +163,7 @@ pub mod rt {
impl ToSource for ast::Ident { impl ToSource for ast::Ident {
fn to_source(&self) -> String { fn to_source(&self) -> String {
token::get_ident(*self).get().to_string() token::get_ident(*self).deref().to_string()
} }
} }

View file

@ -22,6 +22,7 @@ use util::small_vector::SmallVector;
use std::old_io::File; use std::old_io::File;
use std::rc::Rc; use std::rc::Rc;
use std::ops::Deref;
// These macros all relate to the file system; they either return // These macros all relate to the file system; they either return
// the column/row/filename of the expression, or they include // the column/row/filename of the expression, or they include
@ -73,7 +74,7 @@ pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
base::check_zero_tts(cx, sp, tts, "module_path!"); base::check_zero_tts(cx, sp, tts, "module_path!");
let string = cx.mod_path() let string = cx.mod_path()
.iter() .iter()
.map(|x| token::get_ident(*x).get().to_string()) .map(|x| token::get_ident(*x).deref().to_string())
.collect::<Vec<String>>() .collect::<Vec<String>>()
.connect("::"); .connect("::");
base::MacExpr::new(cx.expr_str( base::MacExpr::new(cx.expr_str(

View file

@ -99,6 +99,7 @@ use std::mem;
use std::rc::Rc; use std::rc::Rc;
use std::collections::HashMap; use std::collections::HashMap;
use std::collections::hash_map::Entry::{Vacant, Occupied}; use std::collections::hash_map::Entry::{Vacant, Occupied};
use std::ops::Deref;
// To avoid costly uniqueness checks, we require that `MatchSeq` always has // To avoid costly uniqueness checks, we require that `MatchSeq` always has
// a nonempty body. // a nonempty body.
@ -229,7 +230,7 @@ pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
p_s.span_diagnostic p_s.span_diagnostic
.span_fatal(sp, .span_fatal(sp,
&format!("duplicated bind name: {}", &format!("duplicated bind name: {}",
string.get())[]) string.deref())[])
} }
} }
} }
@ -487,8 +488,8 @@ pub fn parse(sess: &ParseSess,
let name_string = token::get_ident(name); let name_string = token::get_ident(name);
let match_cur = ei.match_cur; let match_cur = ei.match_cur;
(&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal( (&mut ei.matches[match_cur]).push(Rc::new(MatchedNonterminal(
parse_nt(&mut rust_parser, span, name_string.get())))); parse_nt(&mut rust_parser, span, name_string.deref()))));
ei.idx += 1; ei.idx += 1us;
ei.match_cur += 1; ei.match_cur += 1;
} }
_ => panic!() _ => panic!()

View file

@ -36,6 +36,7 @@ use parse::token::{self, InternedString};
use std::slice; use std::slice;
use std::ascii::AsciiExt; use std::ascii::AsciiExt;
use std::ops::Deref;
// If you change this list without updating src/doc/reference.md, @cmr will be sad // If you change this list without updating src/doc/reference.md, @cmr will be sad
// Don't ever remove anything from this list; set them to 'Removed'. // Don't ever remove anything from this list; set them to 'Removed'.
@ -251,7 +252,7 @@ impl<'a> PostExpansionVisitor<'a> {
impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> { impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
fn visit_name(&mut self, sp: Span, name: ast::Name) { fn visit_name(&mut self, sp: Span, name: ast::Name) {
if !token::get_name(name).get().is_ascii() { if !token::get_name(name).deref().is_ascii() {
self.gate_feature("non_ascii_idents", sp, self.gate_feature("non_ascii_idents", sp,
"non-ascii idents are not fully supported."); "non-ascii idents are not fully supported.");
} }
@ -378,7 +379,7 @@ impl<'a, 'v> Visitor<'v> for PostExpansionVisitor<'a> {
let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs, let links_to_llvm = match attr::first_attr_value_str_by_name(&i.attrs,
"link_name") { "link_name") {
Some(val) => val.get().starts_with("llvm."), Some(val) => val.deref().starts_with("llvm."),
_ => false _ => false
}; };
if links_to_llvm { if links_to_llvm {

View file

@ -84,6 +84,7 @@ use std::mem;
use std::num::Float; use std::num::Float;
use std::rc::Rc; use std::rc::Rc;
use std::slice; use std::slice;
use std::ops::Deref;
bitflags! { bitflags! {
flags Restrictions: u8 { flags Restrictions: u8 {
@ -5133,7 +5134,7 @@ impl<'a> Parser<'a> {
outer_attrs, "path") { outer_attrs, "path") {
Some(d) => (dir_path.join(d), true), Some(d) => (dir_path.join(d), true),
None => { None => {
let mod_name = mod_string.get().to_string(); let mod_name = mod_string.deref().to_string();
let default_path_str = format!("{}.rs", mod_name); let default_path_str = format!("{}.rs", mod_name);
let secondary_path_str = format!("{}/mod.rs", mod_name); let secondary_path_str = format!("{}/mod.rs", mod_name);
let default_path = dir_path.join(&default_path_str[]); let default_path = dir_path.join(&default_path_str[]);
@ -5145,7 +5146,7 @@ impl<'a> Parser<'a> {
self.span_err(id_sp, self.span_err(id_sp,
"cannot declare a new module at this location"); "cannot declare a new module at this location");
let this_module = match self.mod_path_stack.last() { let this_module = match self.mod_path_stack.last() {
Some(name) => name.get().to_string(), Some(name) => name.deref().to_string(),
None => self.root_module_name.as_ref().unwrap().clone(), None => self.root_module_name.as_ref().unwrap().clone(),
}; };
self.span_note(id_sp, self.span_note(id_sp,
@ -5191,7 +5192,7 @@ impl<'a> Parser<'a> {
}; };
self.eval_src_mod_from_path(file_path, owns_directory, self.eval_src_mod_from_path(file_path, owns_directory,
mod_string.get().to_string(), id_sp) mod_string.deref().to_string(), id_sp)
} }
fn eval_src_mod_from_path(&mut self, fn eval_src_mod_from_path(&mut self,

View file

@ -625,19 +625,6 @@ impl InternedString {
string: string, string: string,
} }
} }
#[inline]
#[deprecated = "use as_slice() instead"]
pub fn get<'a>(&'a self) -> &'a str {
&self.string[]
}
}
impl Str for InternedString {
#[inline]
fn as_slice<'a>(&'a self) -> &'a str {
&self.string[]
}
} }
impl Deref for InternedString { impl Deref for InternedString {
@ -652,7 +639,7 @@ impl BytesContainer for InternedString {
// of `BytesContainer`, which is itself a workaround for the lack of // of `BytesContainer`, which is itself a workaround for the lack of
// DST. // DST.
unsafe { unsafe {
let this = self.get(); let this = self.deref();
mem::transmute::<&[u8],&[u8]>(this.container_as_bytes()) mem::transmute::<&[u8],&[u8]>(this.container_as_bytes())
} }
} }

View file

@ -30,6 +30,7 @@ use ptr::P;
use std::{ascii, mem}; use std::{ascii, mem};
use std::old_io::{self, IoResult}; use std::old_io::{self, IoResult};
use std::iter; use std::iter;
use std::ops::Deref;
pub enum AnnNode<'a> { pub enum AnnNode<'a> {
NodeIdent(&'a ast::Ident), NodeIdent(&'a ast::Ident),
@ -258,7 +259,7 @@ pub fn token_to_string(tok: &Token) -> String {
} }
/* Name components */ /* Name components */
token::Ident(s, _) => token::get_ident(s).get().to_string(), token::Ident(s, _) => token::get_ident(s).deref().to_string(),
token::Lifetime(s) => format!("{}", token::get_ident(s)), token::Lifetime(s) => format!("{}", token::get_ident(s)),
token::Underscore => "_".to_string(), token::Underscore => "_".to_string(),
@ -798,7 +799,7 @@ impl<'a> State<'a> {
try!(self.head(&visibility_qualified(item.vis, try!(self.head(&visibility_qualified(item.vis,
"extern crate")[])); "extern crate")[]));
if let Some((ref p, style)) = *optional_path { if let Some((ref p, style)) = *optional_path {
try!(self.print_string(p.get(), style)); try!(self.print_string(p.deref(), style));
try!(space(&mut self.s)); try!(space(&mut self.s));
try!(word(&mut self.s, "as")); try!(word(&mut self.s, "as"));
try!(space(&mut self.s)); try!(space(&mut self.s));
@ -1313,7 +1314,7 @@ impl<'a> State<'a> {
try!(self.hardbreak_if_not_bol()); try!(self.hardbreak_if_not_bol());
try!(self.maybe_print_comment(attr.span.lo)); try!(self.maybe_print_comment(attr.span.lo));
if attr.node.is_sugared_doc { if attr.node.is_sugared_doc {
word(&mut self.s, attr.value_str().unwrap().get()) word(&mut self.s, attr.value_str().unwrap().deref())
} else { } else {
match attr.node.style { match attr.node.style {
ast::AttrInner => try!(word(&mut self.s, "#![")), ast::AttrInner => try!(word(&mut self.s, "#![")),
@ -1847,17 +1848,17 @@ impl<'a> State<'a> {
ast::ExprInlineAsm(ref a) => { ast::ExprInlineAsm(ref a) => {
try!(word(&mut self.s, "asm!")); try!(word(&mut self.s, "asm!"));
try!(self.popen()); try!(self.popen());
try!(self.print_string(a.asm.get(), a.asm_str_style)); try!(self.print_string(a.asm.deref(), a.asm_str_style));
try!(self.word_space(":")); try!(self.word_space(":"));
try!(self.commasep(Inconsistent, &a.outputs[], try!(self.commasep(Inconsistent, &a.outputs[],
|s, &(ref co, ref o, is_rw)| { |s, &(ref co, ref o, is_rw)| {
match co.get().slice_shift_char() { match co.deref().slice_shift_char() {
Some(('=', operand)) if is_rw => { Some(('=', operand)) if is_rw => {
try!(s.print_string(&format!("+{}", operand)[], try!(s.print_string(&format!("+{}", operand)[],
ast::CookedStr)) ast::CookedStr))
} }
_ => try!(s.print_string(co.get(), ast::CookedStr)) _ => try!(s.print_string(co.deref(), ast::CookedStr))
} }
try!(s.popen()); try!(s.popen());
try!(s.print_expr(&**o)); try!(s.print_expr(&**o));
@ -1869,7 +1870,7 @@ impl<'a> State<'a> {
try!(self.commasep(Inconsistent, &a.inputs[], try!(self.commasep(Inconsistent, &a.inputs[],
|s, &(ref co, ref o)| { |s, &(ref co, ref o)| {
try!(s.print_string(co.get(), ast::CookedStr)); try!(s.print_string(co.deref(), ast::CookedStr));
try!(s.popen()); try!(s.popen());
try!(s.print_expr(&**o)); try!(s.print_expr(&**o));
try!(s.pclose()); try!(s.pclose());
@ -1880,7 +1881,7 @@ impl<'a> State<'a> {
try!(self.commasep(Inconsistent, &a.clobbers[], try!(self.commasep(Inconsistent, &a.clobbers[],
|s, co| { |s, co| {
try!(s.print_string(co.get(), ast::CookedStr)); try!(s.print_string(co.deref(), ast::CookedStr));
Ok(()) Ok(())
})); }));
@ -1954,7 +1955,7 @@ impl<'a> State<'a> {
let encoded = ident.encode_with_hygiene(); let encoded = ident.encode_with_hygiene();
try!(word(&mut self.s, &encoded[])) try!(word(&mut self.s, &encoded[]))
} else { } else {
try!(word(&mut self.s, token::get_ident(ident).get())) try!(word(&mut self.s, token::get_ident(ident).deref()))
} }
self.ann.post(self, NodeIdent(&ident)) self.ann.post(self, NodeIdent(&ident))
} }
@ -1964,7 +1965,7 @@ impl<'a> State<'a> {
} }
pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> { pub fn print_name(&mut self, name: ast::Name) -> IoResult<()> {
try!(word(&mut self.s, token::get_name(name).get())); try!(word(&mut self.s, token::get_name(name).deref()));
self.ann.post(self, NodeName(&name)) self.ann.post(self, NodeName(&name))
} }
@ -2532,15 +2533,15 @@ impl<'a> State<'a> {
try!(self.ibox(indent_unit)); try!(self.ibox(indent_unit));
match item.node { match item.node {
ast::MetaWord(ref name) => { ast::MetaWord(ref name) => {
try!(word(&mut self.s, name.get())); try!(word(&mut self.s, name.deref()));
} }
ast::MetaNameValue(ref name, ref value) => { ast::MetaNameValue(ref name, ref value) => {
try!(self.word_space(name.get())); try!(self.word_space(name.deref()));
try!(self.word_space("=")); try!(self.word_space("="));
try!(self.print_literal(value)); try!(self.print_literal(value));
} }
ast::MetaList(ref name, ref items) => { ast::MetaList(ref name, ref items) => {
try!(word(&mut self.s, name.get())); try!(word(&mut self.s, name.deref()));
try!(self.popen()); try!(self.popen());
try!(self.commasep(Consistent, try!(self.commasep(Consistent,
&items[], &items[],
@ -2731,7 +2732,7 @@ impl<'a> State<'a> {
_ => () _ => ()
} }
match lit.node { match lit.node {
ast::LitStr(ref st, style) => self.print_string(st.get(), style), ast::LitStr(ref st, style) => self.print_string(st.deref(), style),
ast::LitByte(byte) => { ast::LitByte(byte) => {
let mut res = String::from_str("b'"); let mut res = String::from_str("b'");
ascii::escape_default(byte, |c| res.push(c as char)); ascii::escape_default(byte, |c| res.push(c as char));
@ -2772,10 +2773,10 @@ impl<'a> State<'a> {
word(&mut self.s, word(&mut self.s,
&format!( &format!(
"{}{}", "{}{}",
f.get(), f.deref(),
&ast_util::float_ty_to_string(t)[])[]) &ast_util::float_ty_to_string(t)[])[])
} }
ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.get()), ast::LitFloatUnsuffixed(ref f) => word(&mut self.s, f.deref()),
ast::LitBool(val) => { ast::LitBool(val) => {
if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") } if val { word(&mut self.s, "true") } else { word(&mut self.s, "false") }
} }

View file

@ -37,6 +37,8 @@ use {ast, ast_util};
use ptr::P; use ptr::P;
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::ops::Deref;
enum ShouldFail { enum ShouldFail {
No, No,
Yes(Option<InternedString>), Yes(Option<InternedString>),
@ -512,7 +514,7 @@ fn mk_test_module(cx: &mut TestCtxt) -> (P<ast::Item>, Option<P<ast::Item>>) {
}); });
let reexport = cx.reexport_test_harness_main.as_ref().map(|s| { let reexport = cx.reexport_test_harness_main.as_ref().map(|s| {
// building `use <ident> = __test::main` // building `use <ident> = __test::main`
let reexport_ident = token::str_to_ident(s.get()); let reexport_ident = token::str_to_ident(s.deref());
let use_path = let use_path =
nospan(ast::ViewPathSimple(reexport_ident, nospan(ast::ViewPathSimple(reexport_ident,
@ -575,7 +577,7 @@ fn mk_tests(cx: &TestCtxt) -> P<ast::Item> {
fn is_test_crate(krate: &ast::Crate) -> bool { fn is_test_crate(krate: &ast::Crate) -> bool {
match attr::find_crate_name(&krate.attrs[]) { match attr::find_crate_name(&krate.attrs[]) {
Some(ref s) if "test" == &s.get()[] => true, Some(ref s) if "test" == &s.deref()[] => true,
_ => false _ => false
} }
} }