1
Fork 0

libsyntax: De-@str literal strings in the AST

This commit is contained in:
Patrick Walton 2014-01-10 14:02:36 -08:00 committed by Huon Wilson
parent 70c5a0fbf7
commit 8e52b85d5a
45 changed files with 432 additions and 275 deletions

View file

@ -74,20 +74,20 @@ pub fn source_name(input: &Input) -> @str {
pub fn default_configuration(sess: Session) -> pub fn default_configuration(sess: Session) ->
ast::CrateConfig { ast::CrateConfig {
let tos = match sess.targ_cfg.os { let tos = match sess.targ_cfg.os {
abi::OsWin32 => @"win32", abi::OsWin32 => InternedString::new("win32"),
abi::OsMacos => @"macos", abi::OsMacos => InternedString::new("macos"),
abi::OsLinux => @"linux", abi::OsLinux => InternedString::new("linux"),
abi::OsAndroid => @"android", abi::OsAndroid => InternedString::new("android"),
abi::OsFreebsd => @"freebsd" abi::OsFreebsd => InternedString::new("freebsd"),
}; };
// ARM is bi-endian, however using NDK seems to default // ARM is bi-endian, however using NDK seems to default
// to little-endian unless a flag is provided. // to little-endian unless a flag is provided.
let (end,arch,wordsz) = match sess.targ_cfg.arch { let (end,arch,wordsz) = match sess.targ_cfg.arch {
abi::X86 => (@"little", @"x86", @"32"), abi::X86 => ("little", "x86", "32"),
abi::X86_64 => (@"little", @"x86_64", @"64"), abi::X86_64 => ("little", "x86_64", "64"),
abi::Arm => (@"little", @"arm", @"32"), abi::Arm => ("little", "arm", "32"),
abi::Mips => (@"big", @"mips", @"32") abi::Mips => ("big", "mips", "32")
}; };
let fam = match sess.targ_cfg.os { let fam = match sess.targ_cfg.os {
@ -99,10 +99,11 @@ pub fn default_configuration(sess: Session) ->
return ~[ // Target bindings. return ~[ // Target bindings.
attr::mk_word_item(fam.clone()), attr::mk_word_item(fam.clone()),
mk(InternedString::new("target_os"), tos), mk(InternedString::new("target_os"), tos),
mk(InternedString::new("target_family"), fam.get().to_managed()), mk(InternedString::new("target_family"), fam),
mk(InternedString::new("target_arch"), arch), mk(InternedString::new("target_arch"), InternedString::new(arch)),
mk(InternedString::new("target_endian"), end), mk(InternedString::new("target_endian"), InternedString::new(end)),
mk(InternedString::new("target_word_size"), wordsz), mk(InternedString::new("target_word_size"),
InternedString::new(wordsz)),
]; ];
} }

View file

@ -417,7 +417,12 @@ pub fn building_library(options: &Options, crate: &ast::Crate) -> bool {
} }
} }
match syntax::attr::first_attr_value_str_by_name(crate.attrs, "crate_type") { match syntax::attr::first_attr_value_str_by_name(crate.attrs, "crate_type") {
Some(s) => "lib" == s || "rlib" == s || "dylib" == s || "staticlib" == s, Some(s) => {
s.equiv(&("lib")) ||
s.equiv(&("rlib")) ||
s.equiv(&("dylib")) ||
s.equiv(&("staticlib"))
}
_ => false _ => false
} }
} }
@ -437,14 +442,20 @@ pub fn collect_outputs(session: &Session,
let mut iter = attrs.iter().filter_map(|a| { let mut iter = attrs.iter().filter_map(|a| {
if a.name().equiv(&("crate_type")) { if a.name().equiv(&("crate_type")) {
match a.value_str() { match a.value_str() {
Some(n) if "rlib" == n => Some(OutputRlib), Some(ref n) if n.equiv(&("rlib")) => Some(OutputRlib),
Some(n) if "dylib" == n => Some(OutputDylib), Some(ref n) if n.equiv(&("dylib")) => Some(OutputDylib),
Some(n) if "lib" == n => Some(default_lib_output()), Some(ref n) if n.equiv(&("lib")) => {
Some(n) if "staticlib" == n => Some(OutputStaticlib), Some(default_lib_output())
Some(n) if "bin" == n => Some(OutputExecutable), }
Some(ref n) if n.equiv(&("staticlib")) => {
Some(OutputStaticlib)
}
Some(ref n) if n.equiv(&("bin")) => Some(OutputExecutable),
Some(_) => { Some(_) => {
session.add_lint(lint::UnknownCrateType, ast::CRATE_NODE_ID, session.add_lint(lint::UnknownCrateType,
a.span, ~"invalid `crate_type` value"); ast::CRATE_NODE_ID,
a.span,
~"invalid `crate_type` value");
None None
} }
_ => { _ => {

View file

@ -28,6 +28,7 @@ use syntax::fold::Folder;
use syntax::fold; use syntax::fold;
use syntax::opt_vec; use syntax::opt_vec;
use syntax::parse::token::InternedString; use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::print::pprust; use syntax::print::pprust;
use syntax::{ast, ast_util}; use syntax::{ast, ast_util};
use syntax::util::small_vector::SmallVector; use syntax::util::small_vector::SmallVector;
@ -426,7 +427,8 @@ fn mk_test_desc_and_fn_rec(cx: &TestCtxt, test: &Test) -> @ast::Expr {
debug!("encoding {}", ast_util::path_name_i(path)); debug!("encoding {}", ast_util::path_name_i(path));
let name_lit: ast::Lit = let name_lit: ast::Lit =
nospan(ast::LitStr(ast_util::path_name_i(path).to_managed(), ast::CookedStr)); nospan(ast::LitStr(token::intern_and_get_ident(
ast_util::path_name_i(path)), ast::CookedStr));
let name_expr = @ast::Expr { let name_expr = @ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,

View file

@ -27,8 +27,8 @@ use syntax::attr::AttrMetaMethods;
use syntax::codemap::{Span, DUMMY_SP}; use syntax::codemap::{Span, DUMMY_SP};
use syntax::diagnostic::SpanHandler; use syntax::diagnostic::SpanHandler;
use syntax::ext::base::{CrateLoader, MacroCrate}; use syntax::ext::base::{CrateLoader, MacroCrate};
use syntax::parse::token::{IdentInterner, InternedString};
use syntax::parse::token; use syntax::parse::token;
use syntax::parse::token::IdentInterner;
use syntax::crateid::CrateId; use syntax::crateid::CrateId;
use syntax::visit; use syntax::visit;
@ -126,10 +126,8 @@ fn visit_crate(e: &Env, c: &ast::Crate) {
for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) { for a in c.attrs.iter().filter(|m| m.name().equiv(&("link_args"))) {
match a.value_str() { match a.value_str() {
Some(ref linkarg) => { Some(ref linkarg) => cstore.add_used_link_args(linkarg.get()),
cstore.add_used_link_args(*linkarg); None => { /* fallthrough */ }
}
None => {/* fallthrough */ }
} }
} }
} }
@ -214,9 +212,7 @@ fn visit_item(e: &Env, i: &ast::Item) {
.to_owned_vec(); .to_owned_vec();
for m in link_args.iter() { for m in link_args.iter() {
match m.value_str() { match m.value_str() {
Some(linkarg) => { Some(linkarg) => cstore.add_used_link_args(linkarg.get()),
cstore.add_used_link_args(linkarg);
}
None => { /* fallthrough */ } None => { /* fallthrough */ }
} }
} }
@ -238,12 +234,12 @@ fn visit_item(e: &Env, i: &ast::Item) {
}).and_then(|a| a.value_str()); }).and_then(|a| a.value_str());
let kind = match kind { let kind = match kind {
Some(k) => { Some(k) => {
if "static" == k { if k.equiv(&("static")) {
cstore::NativeStatic cstore::NativeStatic
} else if e.sess.targ_cfg.os == abi::OsMacos && } else if e.sess.targ_cfg.os == abi::OsMacos &&
"framework" == k { k.equiv(&("framework")) {
cstore::NativeFramework cstore::NativeFramework
} else if "framework" == k { } else if k.equiv(&("framework")) {
e.sess.span_err(m.span, e.sess.span_err(m.span,
"native frameworks are only available \ "native frameworks are only available \
on OSX targets"); on OSX targets");
@ -265,13 +261,13 @@ fn visit_item(e: &Env, i: &ast::Item) {
e.sess.span_err(m.span, e.sess.span_err(m.span,
"#[link(...)] specified without \ "#[link(...)] specified without \
`name = \"foo\"`"); `name = \"foo\"`");
@"foo" InternedString::new("foo")
} }
}; };
if n.is_empty() { if n.get().is_empty() {
e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name"); e.sess.span_err(m.span, "#[link(name = \"\")] given with empty name");
} else { } else {
cstore.add_used_library(n.to_owned(), kind); cstore.add_used_library(n.get().to_owned(), kind);
} }
} }
None => {} None => {}

View file

@ -1050,7 +1050,7 @@ fn get_meta_items(md: ebml::Doc) -> ~[@ast::MetaItem] {
let nd = reader::get_doc(meta_item_doc, tag_meta_item_name); let nd = reader::get_doc(meta_item_doc, tag_meta_item_name);
let vd = reader::get_doc(meta_item_doc, tag_meta_item_value); let vd = reader::get_doc(meta_item_doc, tag_meta_item_value);
let n = token::intern_and_get_ident(nd.as_str_slice()); let n = token::intern_and_get_ident(nd.as_str_slice());
let v = vd.as_str_slice().to_managed(); let v = token::intern_and_get_ident(vd.as_str_slice());
// FIXME (#623): Should be able to decode MetaNameValue variants, // FIXME (#623): Should be able to decode MetaNameValue variants,
// but currently the encoder just drops them // but currently the encoder just drops them
items.push(attr::mk_name_value_item_str(n, v)); items.push(attr::mk_name_value_item_str(n, v));

View file

@ -1513,15 +1513,15 @@ fn encode_meta_item(ebml_w: &mut writer::Encoder, mi: @MetaItem) {
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
MetaNameValue(ref name, value) => { MetaNameValue(ref name, ref value) => {
match value.node { match value.node {
LitStr(value, _) => { LitStr(ref value, _) => {
ebml_w.start_tag(tag_meta_item_name_value); ebml_w.start_tag(tag_meta_item_name_value);
ebml_w.start_tag(tag_meta_item_name); ebml_w.start_tag(tag_meta_item_name);
ebml_w.writer.write(name.get().as_bytes()); ebml_w.writer.write(name.get().as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.start_tag(tag_meta_item_value); ebml_w.start_tag(tag_meta_item_value);
ebml_w.writer.write(value.as_bytes()); ebml_w.writer.write(value.get().as_bytes());
ebml_w.end_tag(); ebml_w.end_tag();
ebml_w.end_tag(); ebml_w.end_tag();
} }
@ -1563,7 +1563,7 @@ fn synthesize_crate_attrs(ecx: &EncodeContext,
attr::mk_attr( attr::mk_attr(
attr::mk_name_value_item_str( attr::mk_name_value_item_str(
InternedString::new("crate_id"), InternedString::new("crate_id"),
ecx.link_meta.crateid.to_str().to_managed())) token::intern_and_get_ident(ecx.link_meta.crateid.to_str())))
} }
let mut attrs = ~[]; let mut attrs = ~[];

View file

@ -214,9 +214,14 @@ fn check_exhaustive(cx: &MatchCheckCtxt, sp: Span, pats: ~[@Pat]) {
type matrix = ~[~[@Pat]]; type matrix = ~[~[@Pat]];
enum useful { useful(ty::t, ctor), useful_, not_useful } #[deriving(Clone)]
enum useful {
useful(ty::t, ctor),
useful_,
not_useful,
}
#[deriving(Eq)] #[deriving(Clone, Eq)]
enum ctor { enum ctor {
single, single,
variant(DefId), variant(DefId),
@ -261,7 +266,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
val(const_bool(false)), val(const_bool(false)),
0u, left_ty) 0u, left_ty)
} }
ref u => *u, ref u => (*u).clone(),
} }
} }
ty::ty_enum(eid, _) => { ty::ty_enum(eid, _) => {
@ -269,7 +274,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
match is_useful_specialized(cx, m, v, variant(va.id), match is_useful_specialized(cx, m, v, variant(va.id),
va.args.len(), left_ty) { va.args.len(), left_ty) {
not_useful => (), not_useful => (),
ref u => return *u, ref u => return (*u).clone(),
} }
} }
not_useful not_useful
@ -289,7 +294,7 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
for n in iter::range(0u, max_len + 1) { for n in iter::range(0u, max_len + 1) {
match is_useful_specialized(cx, m, v, vec(n), n, left_ty) { match is_useful_specialized(cx, m, v, vec(n), n, left_ty) {
not_useful => (), not_useful => (),
ref u => return *u, ref u => return (*u).clone(),
} }
} }
not_useful not_useful
@ -304,15 +309,15 @@ fn is_useful(cx: &MatchCheckCtxt, m: &matrix, v: &[@Pat]) -> useful {
match is_useful(cx, match is_useful(cx,
&m.iter().filter_map(|r| default(cx, *r)).collect::<matrix>(), &m.iter().filter_map(|r| default(cx, *r)).collect::<matrix>(),
v.tail()) { v.tail()) {
useful_ => useful(left_ty, *ctor), useful_ => useful(left_ty, (*ctor).clone()),
ref u => *u, ref u => (*u).clone(),
} }
} }
} }
} }
Some(ref v0_ctor) => { Some(ref v0_ctor) => {
let arity = ctor_arity(cx, v0_ctor, left_ty); let arity = ctor_arity(cx, v0_ctor, left_ty);
is_useful_specialized(cx, m, v, *v0_ctor, arity, left_ty) is_useful_specialized(cx, m, v, (*v0_ctor).clone(), arity, left_ty)
} }
} }
} }
@ -329,7 +334,7 @@ fn is_useful_specialized(cx: &MatchCheckCtxt,
cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap()); cx, &ms, specialize(cx, v, &ctor, arity, lty).unwrap());
match could_be_useful { match could_be_useful {
useful_ => useful(lty, ctor), useful_ => useful(lty, ctor),
ref u => *u, ref u => (*u).clone(),
} }
} }
@ -407,7 +412,7 @@ fn missing_ctor(cx: &MatchCheckCtxt,
let r = pat_ctor_id(cx, r[0]); let r = pat_ctor_id(cx, r[0]);
for id in r.iter() { for id in r.iter() {
if !found.contains(id) { if !found.contains(id) {
found.push(*id); found.push((*id).clone());
} }
} }
} }
@ -770,8 +775,8 @@ fn specialize(cx: &MatchCheckCtxt,
} }
PatRange(lo, hi) => { PatRange(lo, hi) => {
let (c_lo, c_hi) = match *ctor_id { let (c_lo, c_hi) = match *ctor_id {
val(ref v) => (*v, *v), val(ref v) => ((*v).clone(), (*v).clone()),
range(ref lo, ref hi) => (*lo, *hi), range(ref lo, ref hi) => ((*lo).clone(), (*hi).clone()),
single => return Some(r.tail().to_owned()), single => return Some(r.tail().to_owned()),
_ => fail!("type error") _ => fail!("type error")
}; };

View file

@ -16,10 +16,11 @@ use middle::ty;
use middle::typeck::astconv; use middle::typeck::astconv;
use middle; use middle;
use syntax::{ast, ast_map, ast_util};
use syntax::visit;
use syntax::visit::Visitor;
use syntax::ast::*; use syntax::ast::*;
use syntax::parse::token::InternedString;
use syntax::visit::Visitor;
use syntax::visit;
use syntax::{ast, ast_map, ast_util};
use std::cell::RefCell; use std::cell::RefCell;
use std::hashmap::HashMap; use std::hashmap::HashMap;
@ -319,7 +320,7 @@ pub enum const_val {
const_float(f64), const_float(f64),
const_int(i64), const_int(i64),
const_uint(u64), const_uint(u64),
const_str(@str), const_str(InternedString),
const_binary(@[u8]), const_binary(@[u8]),
const_bool(bool) const_bool(bool)
} }
@ -508,7 +509,7 @@ pub fn eval_const_expr_partial<T: ty::ExprTyProvider>(tcx: &T, e: &Expr)
pub fn lit_to_const(lit: &Lit) -> const_val { pub fn lit_to_const(lit: &Lit) -> const_val {
match lit.node { match lit.node {
LitStr(s, _) => const_str(s), LitStr(ref s, _) => const_str((*s).clone()),
LitBinary(data) => const_binary(data), LitBinary(data) => const_binary(data),
LitChar(n) => const_uint(n as u64), LitChar(n) => const_uint(n as u64),
LitInt(n, _) => const_int(n), LitInt(n, _) => const_int(n),
@ -530,7 +531,7 @@ pub fn compare_const_vals(a: &const_val, b: &const_val) -> Option<int> {
(&const_int(a), &const_int(b)) => compare_vals(a, b), (&const_int(a), &const_int(b)) => compare_vals(a, b),
(&const_uint(a), &const_uint(b)) => compare_vals(a, b), (&const_uint(a), &const_uint(b)) => compare_vals(a, b),
(&const_float(a), &const_float(b)) => compare_vals(a, b), (&const_float(a), &const_float(b)) => compare_vals(a, b),
(&const_str(a), &const_str(b)) => compare_vals(a, b), (&const_str(ref a), &const_str(ref b)) => compare_vals(a, b),
(&const_bool(a), &const_bool(b)) => compare_vals(a, b), (&const_bool(a), &const_bool(b)) => compare_vals(a, b),
_ => None _ => None
} }

View file

@ -26,8 +26,9 @@ use middle::ty::{BuiltinBound, BoundFreeze, BoundPod, BoundSend, BoundSized};
use syntax::ast; use syntax::ast;
use syntax::ast_util::local_def; use syntax::ast_util::local_def;
use syntax::attr::AttrMetaMethods; use syntax::attr::AttrMetaMethods;
use syntax::visit; use syntax::parse::token::InternedString;
use syntax::visit::Visitor; use syntax::visit::Visitor;
use syntax::visit;
use std::hashmap::HashMap; use std::hashmap::HashMap;
use std::iter::Enumerate; use std::iter::Enumerate;
@ -182,11 +183,11 @@ impl LanguageItemCollector {
} }
} }
pub fn extract(attrs: &[ast::Attribute]) -> Option<@str> { pub fn extract(attrs: &[ast::Attribute]) -> Option<InternedString> {
for attribute in attrs.iter() { for attribute in attrs.iter() {
match attribute.name_str_pair() { match attribute.name_str_pair() {
Some((ref key, value)) if key.equiv(&("lang")) => { Some((ref key, ref value)) if key.equiv(&("lang")) => {
return Some(value); return Some((*value).clone());
} }
Some(..) | None => {} Some(..) | None => {}
} }

View file

@ -229,6 +229,7 @@ use syntax::ast::Ident;
use syntax::ast_util::path_to_ident; use syntax::ast_util::path_to_ident;
use syntax::ast_util; use syntax::ast_util;
use syntax::codemap::{Span, DUMMY_SP}; use syntax::codemap::{Span, DUMMY_SP};
use syntax::parse::token::InternedString;
// An option identifying a literal: either a unit-like struct or an // An option identifying a literal: either a unit-like struct or an
// expression. // expression.
@ -1174,7 +1175,7 @@ fn any_tuple_struct_pat(bcx: &Block, m: &[Match], col: uint) -> bool {
struct DynamicFailureHandler<'a> { struct DynamicFailureHandler<'a> {
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
sp: Span, sp: Span,
msg: @str, msg: InternedString,
finished: @Cell<Option<BasicBlockRef>>, finished: @Cell<Option<BasicBlockRef>>,
} }
@ -1187,7 +1188,7 @@ impl<'a> DynamicFailureHandler<'a> {
let fcx = self.bcx.fcx; let fcx = self.bcx.fcx;
let fail_cx = fcx.new_block(false, "case_fallthrough", None); let fail_cx = fcx.new_block(false, "case_fallthrough", None);
controlflow::trans_fail(fail_cx, Some(self.sp), self.msg); controlflow::trans_fail(fail_cx, Some(self.sp), self.msg.clone());
self.finished.set(Some(fail_cx.llbb)); self.finished.set(Some(fail_cx.llbb));
fail_cx.llbb fail_cx.llbb
} }
@ -1891,7 +1892,8 @@ fn trans_match_inner<'a>(scope_cx: &'a Block<'a>,
let fail_handler = ~DynamicFailureHandler { let fail_handler = ~DynamicFailureHandler {
bcx: scope_cx, bcx: scope_cx,
sp: discr_expr.span, sp: discr_expr.span,
msg: @"scrutinizing value that can't exist", msg: InternedString::new("scrutinizing value that can't \
exist"),
finished: fail_cx, finished: fail_cx,
}; };
DynamicFailureHandlerClass(fail_handler) DynamicFailureHandlerClass(fail_handler)

View file

@ -79,13 +79,18 @@ use std::local_data;
use syntax::ast_map::{PathName, PathPrettyName, path_elem_to_str}; use syntax::ast_map::{PathName, PathPrettyName, path_elem_to_str};
use syntax::ast_util::{local_def, is_local}; use syntax::ast_util::{local_def, is_local};
use syntax::attr; use syntax::attr;
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
use syntax::attr::AttrMetaMethods;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token::{InternedString, special_idents};
use syntax::parse::token; use syntax::parse::token;
use syntax::{ast, ast_util, ast_map}; use syntax::{ast, ast_util, ast_map};
use syntax::attr::AttrMetaMethods; use syntax::attr::AttrMetaMethods;
use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32}; use syntax::abi::{X86, X86_64, Arm, Mips, Rust, RustIntrinsic, OsWin32};
use syntax::visit; use syntax::visit;
use syntax::visit::Visitor; use syntax::visit::Visitor;
use syntax::visit;
use syntax::{ast, ast_util, codemap, ast_map};
pub use middle::trans::context::task_llcx; pub use middle::trans::context::task_llcx;
@ -604,7 +609,8 @@ pub fn compare_scalar_types<'a>(
rslt( rslt(
controlflow::trans_fail( controlflow::trans_fail(
cx, None, cx, None,
@"attempt to compare values of type type"), InternedString::new("attempt to compare values of type \
type")),
C_nil()) C_nil())
} }
_ => { _ => {
@ -856,9 +862,9 @@ pub fn fail_if_zero<'a>(
rhs_t: ty::t) rhs_t: ty::t)
-> &'a Block<'a> { -> &'a Block<'a> {
let text = if divrem == ast::BiDiv { let text = if divrem == ast::BiDiv {
@"attempted to divide by zero" "attempted to divide by zero"
} else { } else {
@"attempted remainder with a divisor of zero" "attempted remainder with a divisor of zero"
}; };
let is_zero = match ty::get(rhs_t).sty { let is_zero = match ty::get(rhs_t).sty {
ty::ty_int(t) => { ty::ty_int(t) => {
@ -875,7 +881,7 @@ pub fn fail_if_zero<'a>(
} }
}; };
with_cond(cx, is_zero, |bcx| { with_cond(cx, is_zero, |bcx| {
controlflow::trans_fail(bcx, Some(span), text) controlflow::trans_fail(bcx, Some(span), InternedString::new(text))
}) })
} }
@ -1951,7 +1957,7 @@ fn exported_name(ccx: &CrateContext, path: ast_map::Path,
ty: ty::t, attrs: &[ast::Attribute]) -> ~str { ty: ty::t, attrs: &[ast::Attribute]) -> ~str {
match attr::first_attr_value_str_by_name(attrs, "export_name") { match attr::first_attr_value_str_by_name(attrs, "export_name") {
// Use provided name // Use provided name
Some(name) => name.to_owned(), Some(name) => name.get().to_owned(),
// Don't mangle // Don't mangle
_ if attr::contains_name(attrs, "no_mangle") _ if attr::contains_name(attrs, "no_mangle")
@ -2099,7 +2105,7 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
match attr::first_attr_value_str_by_name(i.attrs, "link_section") { match attr::first_attr_value_str_by_name(i.attrs, "link_section") {
Some(sect) => unsafe { Some(sect) => unsafe {
sect.with_c_str(|buf| { sect.get().with_c_str(|buf| {
llvm::LLVMSetSection(v, buf); llvm::LLVMSetSection(v, buf);
}) })
}, },
@ -2161,9 +2167,9 @@ pub fn get_item_val(ccx: @CrateContext, id: ast::NodeId) -> ValueRef {
ccx.crate_map ccx.crate_map
} }
} else { } else {
let ident = foreign::link_name(ccx, ni); let ident = foreign::link_name(ni);
unsafe { unsafe {
ident.with_c_str(|buf| { ident.get().with_c_str(|buf| {
let ty = type_of(ccx, ty); let ty = type_of(ccx, ty);
llvm::LLVMAddGlobal(ccx.llmod, llvm::LLVMAddGlobal(ccx.llmod,
ty.to_ref(), buf) ty.to_ref(), buf)
@ -2476,21 +2482,21 @@ pub fn create_module_map(ccx: &CrateContext) -> (ValueRef, uint) {
let mut keys = ~[]; let mut keys = ~[];
let module_data = ccx.module_data.borrow(); let module_data = ccx.module_data.borrow();
for (k, _) in module_data.get().iter() { for (k, _) in module_data.get().iter() {
keys.push(k.to_managed()); keys.push(k.clone());
} }
keys keys
}; };
for key in keys.iter() { for key in keys.iter() {
let llstrval = C_str_slice(ccx, *key); let llstrval = C_str_slice(ccx, token::intern_and_get_ident(*key));
let module_data = ccx.module_data.borrow(); let module_data = ccx.module_data.borrow();
let val = *module_data.get().find_equiv(key).unwrap(); let val = *module_data.get().find_equiv(key).unwrap();
let v_ptr = p2i(ccx, val); let v_ptr = p2i(ccx, val);
let elt = C_struct([ let elt = C_struct([
llstrval, llstrval,
v_ptr v_ptr
], false); ], false);
elts.push(elt); elts.push(elt);
} }
unsafe { unsafe {
llvm::LLVMSetInitializer(map, C_array(elttype, elts)); llvm::LLVMSetInitializer(map, C_array(elttype, elts));

View file

@ -30,7 +30,6 @@ use middle::ty;
use middle::typeck; use middle::typeck;
use util::ppaux::Repr; use util::ppaux::Repr;
use arena::TypedArena; use arena::TypedArena;
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::cast::transmute; use std::cast::transmute;
@ -41,6 +40,7 @@ use std::libc::{c_uint, c_longlong, c_ulonglong, c_char};
use syntax::ast::{Ident}; use syntax::ast::{Ident};
use syntax::ast_map::{Path, PathElem, PathPrettyName}; use syntax::ast_map::{Path, PathElem, PathPrettyName};
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::parse::token; use syntax::parse::token;
use syntax::{ast, ast_map}; use syntax::{ast, ast_map};
@ -597,18 +597,19 @@ pub fn C_u8(i: uint) -> ValueRef {
// This is a 'c-like' raw string, which differs from // This is a 'c-like' raw string, which differs from
// our boxed-and-length-annotated strings. // our boxed-and-length-annotated strings.
pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef { pub fn C_cstr(cx: &CrateContext, s: InternedString) -> ValueRef {
unsafe { unsafe {
{ {
let const_cstr_cache = cx.const_cstr_cache.borrow(); let const_cstr_cache = cx.const_cstr_cache.borrow();
match const_cstr_cache.get().find_equiv(&s) { match const_cstr_cache.get().find(&s) {
Some(&llval) => return llval, Some(&llval) => return llval,
None => () None => ()
} }
} }
let sc = llvm::LLVMConstStringInContext(cx.llcx, let sc = llvm::LLVMConstStringInContext(cx.llcx,
s.as_ptr() as *c_char, s.len() as c_uint, s.get().as_ptr() as *c_char,
s.get().len() as c_uint,
False); False);
let gsym = token::gensym("str"); let gsym = token::gensym("str");
@ -627,9 +628,9 @@ pub fn C_cstr(cx: &CrateContext, s: @str) -> ValueRef {
// NB: Do not use `do_spill_noroot` to make this into a constant string, or // NB: Do not use `do_spill_noroot` to make this into a constant string, or
// you will be kicked off fast isel. See issue #4352 for an example of this. // you will be kicked off fast isel. See issue #4352 for an example of this.
pub fn C_str_slice(cx: &CrateContext, s: @str) -> ValueRef { pub fn C_str_slice(cx: &CrateContext, s: InternedString) -> ValueRef {
unsafe { unsafe {
let len = s.len(); let len = s.get().len();
let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), Type::i8p().to_ref()); let cs = llvm::LLVMConstPointerCast(C_cstr(cx, s), Type::i8p().to_ref());
C_struct([cs, C_uint(cx, len)], false) C_struct([cs, C_uint(cx, len)], false)
} }
@ -970,7 +971,8 @@ pub fn dummy_substs(tps: ~[ty::t]) -> ty::substs {
pub fn filename_and_line_num_from_span(bcx: &Block, span: Span) pub fn filename_and_line_num_from_span(bcx: &Block, span: Span)
-> (ValueRef, ValueRef) { -> (ValueRef, ValueRef) {
let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo); let loc = bcx.sess().parse_sess.cm.lookup_char_pos(span.lo);
let filename_cstr = C_cstr(bcx.ccx(), loc.file.name); let filename_cstr = C_cstr(bcx.ccx(),
token::intern_and_get_ident(loc.file.name));
let filename = build::PointerCast(bcx, filename_cstr, Type::i8p()); let filename = build::PointerCast(bcx, filename_cstr, Type::i8p());
let line = C_int(bcx.ccx(), loc.line as int); let line = C_int(bcx.ccx(), loc.line as int);
(filename, line) (filename, line)

View file

@ -72,7 +72,7 @@ pub fn const_lit(cx: &CrateContext, e: &ast::Expr, lit: ast::Lit)
} }
ast::LitBool(b) => C_bool(b), ast::LitBool(b) => C_bool(b),
ast::LitNil => C_nil(), ast::LitNil => C_nil(),
ast::LitStr(s, _) => C_str_slice(cx, s), ast::LitStr(ref s, _) => C_str_slice(cx, (*s).clone()),
ast::LitBinary(data) => C_binary_slice(cx, data), ast::LitBinary(data) => C_binary_slice(cx, data),
} }
} }
@ -312,7 +312,9 @@ fn const_expr_unadjusted(cx: @CrateContext, e: &ast::Expr,
unsafe { unsafe {
let _icx = push_ctxt("const_expr"); let _icx = push_ctxt("const_expr");
return match e.node { return match e.node {
ast::ExprLit(lit) => (consts::const_lit(cx, e, *lit), true), ast::ExprLit(lit) => {
(consts::const_lit(cx, e, (*lit).clone()), true)
}
ast::ExprBinary(_, b, e1, e2) => { ast::ExprBinary(_, b, e1, e2) => {
let (te1, _) = const_expr(cx, e1, is_local); let (te1, _) = const_expr(cx, e1, is_local);
let (te2, _) = const_expr(cx, e2, is_local); let (te2, _) = const_expr(cx, e2, is_local);

View file

@ -19,12 +19,12 @@ use middle::resolve;
use middle::trans::adt; use middle::trans::adt;
use middle::trans::base; use middle::trans::base;
use middle::trans::builder::Builder; use middle::trans::builder::Builder;
use middle::trans::debuginfo;
use middle::trans::common::{C_i32, C_null}; use middle::trans::common::{C_i32, C_null};
use middle::ty; use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats};
use middle::trans::base::{decl_crate_map};
use middle::trans::debuginfo;
use middle::trans::type_::Type; use middle::trans::type_::Type;
use middle::ty;
use util::sha2::Sha256; use util::sha2::Sha256;
use std::cell::{Cell, RefCell}; use std::cell::{Cell, RefCell};
@ -33,10 +33,7 @@ use std::hashmap::{HashMap, HashSet};
use std::local_data; use std::local_data;
use std::libc::c_uint; use std::libc::c_uint;
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString;
use middle::trans::common::{mono_id,ExternMap,tydesc_info,BuilderRef_res,Stats};
use middle::trans::base::{decl_crate_map};
pub struct CrateContext { pub struct CrateContext {
sess: session::Session, sess: session::Session,
@ -71,7 +68,7 @@ pub struct CrateContext {
// Cache generated vtables // Cache generated vtables
vtables: RefCell<HashMap<(ty::t, mono_id), ValueRef>>, vtables: RefCell<HashMap<(ty::t, mono_id), ValueRef>>,
// Cache of constant strings, // Cache of constant strings,
const_cstr_cache: RefCell<HashMap<@str, ValueRef>>, const_cstr_cache: RefCell<HashMap<InternedString, ValueRef>>,
// Reverse-direction for const ptrs cast from globals. // Reverse-direction for const ptrs cast from globals.
// Key is an int, cast from a ValueRef holding a *T, // Key is an int, cast from a ValueRef holding a *T,

View file

@ -28,6 +28,8 @@ use syntax::ast;
use syntax::ast::Name; use syntax::ast::Name;
use syntax::ast_util; use syntax::ast_util;
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::visit::Visitor; use syntax::visit::Visitor;
pub fn trans_stmt<'a>(cx: &'a Block<'a>, pub fn trans_stmt<'a>(cx: &'a Block<'a>,
@ -342,14 +344,14 @@ pub fn trans_fail_expr<'a>(
ppaux::ty_to_str(tcx, arg_datum.ty)); ppaux::ty_to_str(tcx, arg_datum.ty));
} }
} }
_ => trans_fail(bcx, sp_opt, @"explicit failure") _ => trans_fail(bcx, sp_opt, InternedString::new("explicit failure"))
} }
} }
pub fn trans_fail<'a>( pub fn trans_fail<'a>(
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
sp_opt: Option<Span>, sp_opt: Option<Span>,
fail_str: @str) fail_str: InternedString)
-> &'a Block<'a> { -> &'a Block<'a> {
let _icx = push_ctxt("trans_fail"); let _icx = push_ctxt("trans_fail");
let V_fail_str = C_cstr(bcx.ccx(), fail_str); let V_fail_str = C_cstr(bcx.ccx(), fail_str);
@ -367,11 +369,11 @@ fn trans_fail_value<'a>(
Some(sp) => { Some(sp) => {
let sess = bcx.sess(); let sess = bcx.sess();
let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo); let loc = sess.parse_sess.cm.lookup_char_pos(sp.lo);
(C_cstr(bcx.ccx(), loc.file.name), (C_cstr(bcx.ccx(), token::intern_and_get_ident(loc.file.name)),
loc.line as int) loc.line as int)
} }
None => { None => {
(C_cstr(bcx.ccx(), @"<runtime>"), 0) (C_cstr(bcx.ccx(), InternedString::new("<runtime>")), 0)
} }
}; };
let V_str = PointerCast(bcx, V_fail_str, Type::i8p()); let V_str = PointerCast(bcx, V_fail_str, Type::i8p());

View file

@ -543,9 +543,7 @@ fn trans_datum_unadjusted<'a>(bcx: &'a Block<'a>,
let heap = heap_exchange; let heap = heap_exchange;
return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap) return trans_boxed_expr(bcx, box_ty, contents, contents_ty, heap)
} }
ast::ExprLit(lit) => { ast::ExprLit(lit) => trans_immediate_lit(bcx, expr, (*lit).clone()),
trans_immediate_lit(bcx, expr, *lit)
}
ast::ExprBinary(_, op, lhs, rhs) => { ast::ExprBinary(_, op, lhs, rhs) => {
// if overloaded, would be RvalueDpsExpr // if overloaded, would be RvalueDpsExpr
{ {
@ -836,8 +834,8 @@ fn trans_rvalue_dps_unadjusted<'a>(bcx: &'a Block<'a>,
} }
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
ast::LitStr(s, _) => { ast::LitStr(ref s, _) => {
tvec::trans_lit_str(bcx, expr, s, dest) tvec::trans_lit_str(bcx, expr, (*s).clone(), dest)
} }
_ => { _ => {
bcx.tcx() bcx.tcx()

View file

@ -31,7 +31,8 @@ use std::vec;
use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64}; use syntax::abi::{Cdecl, Aapcs, C, AbiSet, Win64};
use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System}; use syntax::abi::{RustIntrinsic, Rust, Stdcall, Fastcall, System};
use syntax::codemap::Span; use syntax::codemap::Span;
use syntax::parse::token::special_idents; use syntax::parse::token::{InternedString, special_idents};
use syntax::parse::token;
use syntax::{ast}; use syntax::{ast};
use syntax::{attr, ast_map}; use syntax::{attr, ast_map};
use util::ppaux::{Repr, UserString}; use util::ppaux::{Repr, UserString};
@ -135,7 +136,7 @@ pub fn register_foreign_item_fn(ccx: @CrateContext,
}; };
// Register the function as a C extern fn // Register the function as a C extern fn
let lname = link_name(ccx, foreign_item); let lname = link_name(foreign_item);
let tys = foreign_types_for_id(ccx, foreign_item.id); let tys = foreign_types_for_id(ccx, foreign_item.id);
// Make sure the calling convention is right for variadic functions // Make sure the calling convention is right for variadic functions
@ -150,8 +151,12 @@ pub fn register_foreign_item_fn(ccx: @CrateContext,
let llfn; let llfn;
{ {
let mut externs = ccx.externs.borrow_mut(); let mut externs = ccx.externs.borrow_mut();
llfn = base::get_extern_fn(externs.get(), ccx.llmod, lname, llfn = base::get_extern_fn(externs.get(),
cc, llfn_ty, tys.fn_sig.output); ccx.llmod,
lname.get(),
cc,
llfn_ty,
tys.fn_sig.output);
}; };
add_argument_attributes(&tys, llfn); add_argument_attributes(&tys, llfn);
@ -372,9 +377,9 @@ pub fn trans_foreign_mod(ccx: @CrateContext,
_ => () _ => ()
} }
let lname = link_name(ccx, foreign_item); let lname = link_name(foreign_item);
let mut item_symbols = ccx.item_symbols.borrow_mut(); let mut item_symbols = ccx.item_symbols.borrow_mut();
item_symbols.get().insert(foreign_item.id, lname.to_owned()); item_symbols.get().insert(foreign_item.id, lname.get().to_owned());
} }
} }
@ -726,10 +731,10 @@ pub fn trans_rust_fn_with_foreign_abi(ccx: @CrateContext,
// This code is kind of a confused mess and needs to be reworked given // This code is kind of a confused mess and needs to be reworked given
// the massive simplifications that have occurred. // the massive simplifications that have occurred.
pub fn link_name(ccx: &CrateContext, i: @ast::ForeignItem) -> @str { pub fn link_name(i: @ast::ForeignItem) -> InternedString {
match attr::first_attr_value_str_by_name(i.attrs, "link_name") { match attr::first_attr_value_str_by_name(i.attrs, "link_name") {
None => ccx.sess.str_of(i.ident), None => token::get_ident(i.ident.name),
Some(ln) => ln, Some(ln) => ln.clone(),
} }
} }

View file

@ -15,32 +15,33 @@
use back::abi; use back::abi;
use back::link::*; use back::link::*;
use lib;
use lib::llvm::{llvm, ValueRef, True}; use lib::llvm::{llvm, ValueRef, True};
use lib;
use middle::lang_items::{FreeFnLangItem, ExchangeFreeFnLangItem}; use middle::lang_items::{FreeFnLangItem, ExchangeFreeFnLangItem};
use middle::trans::adt; use middle::trans::adt;
use middle::trans::base::*; use middle::trans::base::*;
use middle::trans::build::*;
use middle::trans::callee; use middle::trans::callee;
use middle::trans::cleanup; use middle::trans::cleanup;
use middle::trans::cleanup::CleanupMethods; use middle::trans::cleanup::CleanupMethods;
use middle::trans::common::*; use middle::trans::common::*;
use middle::trans::build::*; use middle::trans::datum::immediate_rvalue;
use middle::trans::expr; use middle::trans::expr;
use middle::trans::machine::*; use middle::trans::machine::*;
use middle::trans::reflect; use middle::trans::reflect;
use middle::trans::tvec; use middle::trans::tvec;
use middle::trans::type_::Type;
use middle::trans::type_of::type_of; use middle::trans::type_of::type_of;
use middle::ty; use middle::ty;
use util::ppaux;
use util::ppaux::ty_to_short_str; use util::ppaux::ty_to_short_str;
use util::ppaux;
use middle::trans::type_::Type;
use arena::TypedArena; use arena::TypedArena;
use std::c_str::ToCStr; use std::c_str::ToCStr;
use std::cell::Cell; use std::cell::Cell;
use std::libc::c_uint; use std::libc::c_uint;
use syntax::ast; use syntax::ast;
use syntax::parse::token;
pub fn trans_free<'a>(cx: &'a Block<'a>, v: ValueRef) -> &'a Block<'a> { pub fn trans_free<'a>(cx: &'a Block<'a>, v: ValueRef) -> &'a Block<'a> {
let _icx = push_ctxt("trans_free"); let _icx = push_ctxt("trans_free");
@ -480,7 +481,8 @@ pub fn declare_tydesc(ccx: &CrateContext, t: ty::t) -> @tydesc_info {
} }
}); });
let ty_name = C_str_slice(ccx, ppaux::ty_to_str(ccx.tcx, t).to_managed()); let ty_name = token::intern_and_get_ident(ppaux::ty_to_str(ccx.tcx, t));
let ty_name = C_estr_slice(ccx, ty_name);
let inf = @tydesc_info { let inf = @tydesc_info {
ty: t, ty: t,

View file

@ -20,6 +20,7 @@ use middle::trans::datum::*;
use middle::trans::glue; use middle::trans::glue;
use middle::trans::machine; use middle::trans::machine;
use middle::trans::meth; use middle::trans::meth;
use middle::trans::type_::Type;
use middle::trans::type_of::*; use middle::trans::type_of::*;
use middle::ty; use middle::ty;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
@ -31,9 +32,8 @@ use std::vec;
use syntax::ast::DefId; use syntax::ast::DefId;
use syntax::ast; use syntax::ast;
use syntax::ast_map::PathName; use syntax::ast_map::PathName;
use syntax::parse::token::special_idents; use syntax::parse::token::{InternedString, special_idents};
use syntax::parse::token;
use middle::trans::type_::Type;
pub struct Reflector<'a> { pub struct Reflector<'a> {
visitor_val: ValueRef, visitor_val: ValueRef,
@ -56,14 +56,14 @@ impl<'a> Reflector<'a> {
C_bool(b) C_bool(b)
} }
pub fn c_slice(&mut self, s: @str) -> ValueRef { pub fn c_slice(&mut self, s: InternedString) -> ValueRef {
// We're careful to not use first class aggregates here because that // We're careful to not use first class aggregates here because that
// will kick us off fast isel. (Issue #4352.) // will kick us off fast isel. (Issue #4352.)
let bcx = self.bcx; let bcx = self.bcx;
let str_vstore = ty::vstore_slice(ty::ReStatic); let str_vstore = ty::vstore_slice(ty::ReStatic);
let str_ty = ty::mk_str(bcx.tcx(), str_vstore); let str_ty = ty::mk_str(bcx.tcx(), str_vstore);
let scratch = rvalue_scratch_datum(bcx, str_ty, ""); let scratch = rvalue_scratch_datum(bcx, str_ty, "");
let len = C_uint(bcx.ccx(), s.len()); let len = C_uint(bcx.ccx(), s.get().len());
let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s), Type::i8p()); let c_str = PointerCast(bcx, C_cstr(bcx.ccx(), s), Type::i8p());
Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ])); Store(bcx, c_str, GEPi(bcx, scratch.val, [ 0, 0 ]));
Store(bcx, len, GEPi(bcx, scratch.val, [ 0, 1 ])); Store(bcx, len, GEPi(bcx, scratch.val, [ 0, 1 ]));
@ -260,15 +260,19 @@ impl<'a> Reflector<'a> {
fields[0].ident.name != special_idents::unnamed_field.name; fields[0].ident.name != special_idents::unnamed_field.name;
} }
let extra = ~[self.c_slice(ty_to_str(tcx, t).to_managed()), let extra = ~[
self.c_bool(named_fields), self.c_slice(token::intern_and_get_ident(ty_to_str(tcx,
self.c_uint(fields.len())] + self.c_size_and_align(t); t))),
self.c_bool(named_fields),
self.c_uint(fields.len())
] + self.c_size_and_align(t);
self.bracketed("class", extra, |this| { self.bracketed("class", extra, |this| {
for (i, field) in fields.iter().enumerate() { for (i, field) in fields.iter().enumerate() {
let extra = ~[this.c_uint(i), let extra = ~[
this.c_slice(bcx.ccx().sess.str_of(field.ident)), this.c_uint(i),
this.c_bool(named_fields)] this.c_slice(token::get_ident(field.ident.name)),
+ this.c_mt(&field.mt); this.c_bool(named_fields)
] + this.c_mt(&field.mt);
this.visit("class_field", extra); this.visit("class_field", extra);
} }
}) })
@ -330,7 +334,7 @@ impl<'a> Reflector<'a> {
+ self.c_size_and_align(t); + self.c_size_and_align(t);
self.bracketed("enum", enum_args, |this| { self.bracketed("enum", enum_args, |this| {
for (i, v) in variants.iter().enumerate() { for (i, v) in variants.iter().enumerate() {
let name = ccx.sess.str_of(v.name); let name = token::get_ident(v.name.name);
let variant_args = ~[this.c_uint(i), let variant_args = ~[this.c_uint(i),
C_u64(v.disr_val), C_u64(v.disr_val),
this.c_uint(v.args.len()), this.c_uint(v.args.len()),
@ -352,7 +356,9 @@ impl<'a> Reflector<'a> {
} }
ty::ty_trait(_, _, _, _, _) => { ty::ty_trait(_, _, _, _, _) => {
let extra = [self.c_slice(ty_to_str(tcx, t).to_managed())]; let extra = [
self.c_slice(token::intern_and_get_ident(ty_to_str(tcx, t)))
];
self.visit("trait", extra); self.visit("trait", extra);
} }

View file

@ -31,6 +31,7 @@ use middle::ty;
use util::ppaux::ty_to_str; use util::ppaux::ty_to_str;
use syntax::ast; use syntax::ast;
use syntax::parse::token::InternedString;
// Boxed vector types are in some sense currently a "shorthand" for a box // Boxed vector types are in some sense currently a "shorthand" for a box
// containing an unboxed vector. This expands a boxed vector type into such an // containing an unboxed vector. This expands a boxed vector type into such an
@ -232,7 +233,10 @@ pub fn trans_slice_vstore<'a>(
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
ast::LitStr(s, _) => { ast::LitStr(s, _) => {
return trans_lit_str(bcx, content_expr, s, dest); return trans_lit_str(bcx,
content_expr,
(*s).clone(),
dest)
} }
_ => {} _ => {}
} }
@ -284,7 +288,7 @@ pub fn trans_slice_vstore<'a>(
pub fn trans_lit_str<'a>( pub fn trans_lit_str<'a>(
bcx: &'a Block<'a>, bcx: &'a Block<'a>,
lit_expr: &ast::Expr, lit_expr: &ast::Expr,
str_lit: @str, str_lit: InternedString,
dest: Dest) dest: Dest)
-> &'a Block<'a> { -> &'a Block<'a> {
/*! /*!
@ -301,7 +305,7 @@ pub fn trans_lit_str<'a>(
Ignore => bcx, Ignore => bcx,
SaveIn(lldest) => { SaveIn(lldest) => {
unsafe { unsafe {
let bytes = str_lit.len(); let bytes = str_lit.get().len();
let llbytes = C_uint(bcx.ccx(), bytes); let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), str_lit); let llcstr = C_cstr(bcx.ccx(), str_lit);
let llcstr = llvm::LLVMConstPointerCast(llcstr, Type::i8p().to_ref()); let llcstr = llvm::LLVMConstPointerCast(llcstr, Type::i8p().to_ref());
@ -337,11 +341,15 @@ pub fn trans_uniq_or_managed_vstore<'a>(bcx: &'a Block<'a>,
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
ast::LitStr(s, _) => { ast::LitStr(s, _) => {
let llptrval = C_cstr(bcx.ccx(), s); let llptrval = C_cstr(bcx.ccx(), (*s).clone());
let llptrval = PointerCast(bcx, llptrval, Type::i8p()); let llptrval = PointerCast(bcx,
llptrval,
Type::i8p());
let llsizeval = C_uint(bcx.ccx(), s.len()); let llsizeval = C_uint(bcx.ccx(), s.len());
let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq); let typ = ty::mk_str(bcx.tcx(), ty::vstore_uniq);
let lldestval = rvalue_scratch_datum(bcx, typ, ""); let lldestval = rvalue_scratch_datum(bcx,
typ,
"");
let alloc_fn = langcall(bcx, let alloc_fn = langcall(bcx,
Some(lit.span), Some(lit.span),
"", "",
@ -405,15 +413,13 @@ pub fn write_content<'a>(
match content_expr.node { match content_expr.node {
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
ast::LitStr(s, _) => { ast::LitStr(ref s, _) => {
match dest { match dest {
Ignore => { Ignore => return bcx,
return bcx;
}
SaveIn(lldest) => { SaveIn(lldest) => {
let bytes = s.len(); let bytes = s.get().len();
let llbytes = C_uint(bcx.ccx(), bytes); let llbytes = C_uint(bcx.ccx(), bytes);
let llcstr = C_cstr(bcx.ccx(), s); let llcstr = C_cstr(bcx.ccx(), (*s).clone());
base::call_memcpy(bcx, base::call_memcpy(bcx,
lldest, lldest,
llcstr, llcstr,
@ -516,7 +522,7 @@ pub fn elements_required(bcx: &Block, content_expr: &ast::Expr) -> uint {
match content_expr.node { match content_expr.node {
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
ast::LitStr(s, _) => s.len(), ast::LitStr(ref s, _) => s.get().len(),
_ => { _ => {
bcx.tcx().sess.span_bug(content_expr.span, bcx.tcx().sess.span_bug(content_expr.span,
"Unexpected evec content") "Unexpected evec content")

View file

@ -252,14 +252,16 @@ impl<'a> attr::AttrMetaMethods for &'a Attribute {
} }
} }
fn value_str(&self) -> Option<@str> { fn value_str(&self) -> Option<InternedString> {
match **self { match **self {
NameValue(_, ref v) => Some(v.to_managed()), NameValue(_, ref v) => Some(token::intern_and_get_ident(*v)),
_ => None, _ => None,
} }
} }
fn meta_item_list<'a>(&'a self) -> Option<&'a [@ast::MetaItem]> { None } fn meta_item_list<'a>(&'a self) -> Option<&'a [@ast::MetaItem]> { None }
fn name_str_pair(&self) -> Option<(InternedString, @str)> { None } fn name_str_pair(&self) -> Option<(InternedString, InternedString)> {
None
}
} }
#[deriving(Clone, Encodable, Decodable)] #[deriving(Clone, Encodable, Decodable)]
@ -1144,7 +1146,7 @@ impl ToSource for syntax::codemap::Span {
fn lit_to_str(lit: &ast::Lit) -> ~str { fn lit_to_str(lit: &ast::Lit) -> ~str {
match lit.node { match lit.node {
ast::LitStr(st, _) => st.to_owned(), ast::LitStr(ref st, _) => st.get().to_owned(),
ast::LitBinary(data) => format!("{:?}", data.as_slice()), ast::LitBinary(data) => format!("{:?}", data.as_slice()),
ast::LitChar(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'", ast::LitChar(c) => ~"'" + std::char::from_u32(c).unwrap().to_str() + "'",
ast::LitInt(i, _t) => i.to_str(), ast::LitInt(i, _t) => i.to_str(),

View file

@ -45,6 +45,7 @@ use extra::arc::Arc;
use extra::json::ToJson; use extra::json::ToJson;
use syntax::ast; use syntax::ast;
use syntax::attr; use syntax::attr;
use syntax::parse::token::InternedString;
use clean; use clean;
use doctree; use doctree;
@ -803,12 +804,13 @@ impl<'a> Item<'a> {
impl<'a> fmt::Default for Item<'a> { impl<'a> fmt::Default for Item<'a> {
fn fmt(it: &Item<'a>, fmt: &mut fmt::Formatter) { fn fmt(it: &Item<'a>, fmt: &mut fmt::Formatter) {
match attr::find_stability(it.item.attrs.iter()) { match attr::find_stability(it.item.attrs.iter()) {
Some(stability) => { Some(ref stability) => {
write!(fmt.buf, write!(fmt.buf,
"<a class='stability {lvl}' title='{reason}'>{lvl}</a>", "<a class='stability {lvl}' title='{reason}'>{lvl}</a>",
lvl = stability.level.to_str(), lvl = stability.level.to_str(),
reason = match stability.text { reason = match stability.text {
Some(s) => s, None => @"", Some(ref s) => (*s).clone(),
None => InternedString::new(""),
}); });
} }
None => {} None => {}

View file

@ -31,6 +31,7 @@ use syntax::{ast, attr, codemap, diagnostic, fold, visit};
use syntax::attr::AttrMetaMethods; use syntax::attr::AttrMetaMethods;
use syntax::fold::Folder; use syntax::fold::Folder;
use syntax::parse::token::InternedString; use syntax::parse::token::InternedString;
use syntax::parse::token;
use syntax::visit::Visitor; use syntax::visit::Visitor;
use syntax::util::small_vector::SmallVector; use syntax::util::small_vector::SmallVector;
use syntax::crateid::CrateId; use syntax::crateid::CrateId;
@ -317,8 +318,9 @@ pub fn compile_input(context: &BuildContext,
if !attr::contains_name(crate.attrs, "crate_id") { if !attr::contains_name(crate.attrs, "crate_id") {
// FIXME (#9639): This needs to handle non-utf8 paths // FIXME (#9639): This needs to handle non-utf8 paths
let crateid_attr = let crateid_attr =
attr::mk_name_value_item_str(InternedString::new("crate_id"), attr::mk_name_value_item_str(
crate_id.to_str().to_managed()); InternedString::new("crate_id"),
token::intern_and_get_ident(crate_id.to_str()));
debug!("crateid attr: {:?}", crateid_attr); debug!("crateid attr: {:?}", crateid_attr);
crate.attrs.push(attr::mk_attr(crateid_attr)); crate.attrs.push(attr::mk_attr(crateid_attr));
@ -646,7 +648,7 @@ pub fn find_and_install_dependencies(installer: &mut CrateInstaller,
visit::walk_crate(installer, c, ()) visit::walk_crate(installer, c, ())
} }
pub fn mk_string_lit(s: @str) -> ast::Lit { pub fn mk_string_lit(s: InternedString) -> ast::Lit {
Spanned { Spanned {
node: ast::LitStr(s, ast::CookedStr), node: ast::LitStr(s, ast::CookedStr),
span: DUMMY_SP span: DUMMY_SP

View file

@ -722,7 +722,7 @@ pub type Lit = Spanned<Lit_>;
#[deriving(Clone, Eq, Encodable, Decodable, IterBytes)] #[deriving(Clone, Eq, Encodable, Decodable, IterBytes)]
pub enum Lit_ { pub enum Lit_ {
LitStr(@str, StrStyle), LitStr(InternedString, StrStyle),
LitBinary(@[u8]), LitBinary(@[u8]),
LitChar(u32), LitChar(u32),
LitInt(i64, IntTy), LitInt(i64, IntTy),

View file

@ -17,6 +17,7 @@ use codemap::BytePos;
use diagnostic::SpanHandler; use diagnostic::SpanHandler;
use parse::comments::{doc_comment_style, strip_doc_comment_decoration}; use parse::comments::{doc_comment_style, strip_doc_comment_decoration};
use parse::token::InternedString; use parse::token::InternedString;
use parse::token;
use crateid::CrateId; use crateid::CrateId;
use std::hashmap::HashSet; use std::hashmap::HashSet;
@ -34,7 +35,7 @@ pub trait AttrMetaMethods {
* Gets the string value if self is a MetaNameValue variant * Gets the string value if self is a MetaNameValue variant
* containing a string, otherwise None. * containing a string, otherwise None.
*/ */
fn value_str(&self) -> Option<@str>; fn value_str(&self) -> Option<InternedString>;
/// Gets a list of inner meta items from a list MetaItem type. /// Gets a list of inner meta items from a list MetaItem type.
fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]>; fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]>;
@ -42,16 +43,18 @@ pub trait AttrMetaMethods {
* If the meta item is a name-value type with a string value then returns * If the meta item is a name-value type with a string value then returns
* a tuple containing the name and string value, otherwise `None` * a tuple containing the name and string value, otherwise `None`
*/ */
fn name_str_pair(&self) -> Option<(InternedString, @str)>; fn name_str_pair(&self) -> Option<(InternedString,InternedString)>;
} }
impl AttrMetaMethods for Attribute { impl AttrMetaMethods for Attribute {
fn name(&self) -> InternedString { self.meta().name() } fn name(&self) -> InternedString { self.meta().name() }
fn value_str(&self) -> Option<@str> { self.meta().value_str() } fn value_str(&self) -> Option<InternedString> {
self.meta().value_str()
}
fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> { fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> {
self.node.value.meta_item_list() self.node.value.meta_item_list()
} }
fn name_str_pair(&self) -> Option<(InternedString, @str)> { fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
self.meta().name_str_pair() self.meta().name_str_pair()
} }
} }
@ -65,11 +68,11 @@ impl AttrMetaMethods for MetaItem {
} }
} }
fn value_str(&self) -> Option<@str> { fn value_str(&self) -> Option<InternedString> {
match self.node { match self.node {
MetaNameValue(_, ref v) => { MetaNameValue(_, ref v) => {
match v.node { match v.node {
ast::LitStr(s, _) => Some(s), ast::LitStr(ref s, _) => Some((*s).clone()),
_ => None, _ => None,
} }
}, },
@ -84,7 +87,7 @@ impl AttrMetaMethods for MetaItem {
} }
} }
fn name_str_pair(&self) -> Option<(InternedString, @str)> { fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
self.value_str().map(|s| (self.name(), s)) self.value_str().map(|s| (self.name(), s))
} }
} }
@ -92,11 +95,11 @@ impl AttrMetaMethods for MetaItem {
// Annoying, but required to get test_cfg to work // Annoying, but required to get test_cfg to work
impl AttrMetaMethods for @MetaItem { impl AttrMetaMethods for @MetaItem {
fn name(&self) -> InternedString { (**self).name() } fn name(&self) -> InternedString { (**self).name() }
fn value_str(&self) -> Option<@str> { (**self).value_str() } fn value_str(&self) -> Option<InternedString> { (**self).value_str() }
fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> { fn meta_item_list<'a>(&'a self) -> Option<&'a [@MetaItem]> {
(**self).meta_item_list() (**self).meta_item_list()
} }
fn name_str_pair(&self) -> Option<(InternedString, @str)> { fn name_str_pair(&self) -> Option<(InternedString,InternedString)> {
(**self).name_str_pair() (**self).name_str_pair()
} }
} }
@ -119,8 +122,10 @@ impl AttributeMethods for Attribute {
fn desugar_doc(&self) -> Attribute { fn desugar_doc(&self) -> Attribute {
if self.node.is_sugared_doc { if self.node.is_sugared_doc {
let comment = self.value_str().unwrap(); let comment = self.value_str().unwrap();
let meta = mk_name_value_item_str(InternedString::new("doc"), let meta = mk_name_value_item_str(
strip_doc_comment_decoration(comment).to_managed()); InternedString::new("doc"),
token::intern_and_get_ident(strip_doc_comment_decoration(
comment.get())));
mk_attr(meta) mk_attr(meta)
} else { } else {
*self *self
@ -130,7 +135,7 @@ impl AttributeMethods for Attribute {
/* Constructors */ /* Constructors */
pub fn mk_name_value_item_str(name: InternedString, value: @str) pub fn mk_name_value_item_str(name: InternedString, value: InternedString)
-> @MetaItem { -> @MetaItem {
let value_lit = dummy_spanned(ast::LitStr(value, ast::CookedStr)); let value_lit = dummy_spanned(ast::LitStr(value, ast::CookedStr));
mk_name_value_item(name, value_lit) mk_name_value_item(name, value_lit)
@ -157,8 +162,9 @@ pub fn mk_attr(item: @MetaItem) -> Attribute {
}) })
} }
pub fn mk_sugared_doc_attr(text: @str, lo: BytePos, hi: BytePos) -> Attribute { pub fn mk_sugared_doc_attr(text: InternedString, lo: BytePos, hi: BytePos)
let style = doc_comment_style(text); -> Attribute {
let style = doc_comment_style(text.get());
let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr)); let lit = spanned(lo, hi, ast::LitStr(text, ast::CookedStr));
let attr = Attribute_ { let attr = Attribute_ {
style: style, style: style,
@ -191,14 +197,14 @@ pub fn contains_name<AM: AttrMetaMethods>(metas: &[AM], name: &str) -> bool {
} }
pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str) pub fn first_attr_value_str_by_name(attrs: &[Attribute], name: &str)
-> Option<@str> { -> Option<InternedString> {
attrs.iter() attrs.iter()
.find(|at| at.name().equiv(&name)) .find(|at| at.name().equiv(&name))
.and_then(|at| at.value_str()) .and_then(|at| at.value_str())
} }
pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str) pub fn last_meta_item_value_str_by_name(items: &[@MetaItem], name: &str)
-> Option<@str> { -> Option<InternedString> {
items.rev_iter() items.rev_iter()
.find(|mi| mi.name().equiv(&name)) .find(|mi| mi.name().equiv(&name))
.and_then(|i| i.value_str()) .and_then(|i| i.value_str())
@ -247,7 +253,7 @@ pub fn find_linkage_metas(attrs: &[Attribute]) -> ~[@MetaItem] {
pub fn find_crateid(attrs: &[Attribute]) -> Option<CrateId> { pub fn find_crateid(attrs: &[Attribute]) -> Option<CrateId> {
match first_attr_value_str_by_name(attrs, "crate_id") { match first_attr_value_str_by_name(attrs, "crate_id") {
None => None, None => None,
Some(id) => from_str::<CrateId>(id), Some(id) => from_str::<CrateId>(id.get()),
} }
} }
@ -331,7 +337,7 @@ pub fn test_cfg<AM: AttrMetaMethods, It: Iterator<AM>>
/// Represents the #[deprecated="foo"] (etc) attributes. /// Represents the #[deprecated="foo"] (etc) attributes.
pub struct Stability { pub struct Stability {
level: StabilityLevel, level: StabilityLevel,
text: Option<@str> text: Option<InternedString>
} }
/// The available stability levels. /// The available stability levels.
@ -346,7 +352,8 @@ pub enum StabilityLevel {
} }
/// Find the first stability attribute. `None` if none exists. /// Find the first stability attribute. `None` if none exists.
pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It) -> Option<Stability> { pub fn find_stability<AM: AttrMetaMethods, It: Iterator<AM>>(mut metas: It)
-> Option<Stability> {
for m in metas { for m in metas {
let level = match m.name().get() { let level = match m.name().get() {
"deprecated" => Deprecated, "deprecated" => Deprecated,

View file

@ -17,6 +17,7 @@ use codemap::Span;
use ext::base; use ext::base;
use ext::base::*; use ext::base::*;
use parse; use parse;
use parse::token::InternedString;
use parse::token; use parse::token;
enum State { enum State {
@ -43,7 +44,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
cx.cfg(), cx.cfg(),
tts.to_owned()); tts.to_owned());
let mut asm = @""; let mut asm = InternedString::new("");
let mut asm_str_style = None; let mut asm_str_style = None;
let mut outputs = ~[]; let mut outputs = ~[];
let mut inputs = ~[]; let mut inputs = ~[];
@ -191,7 +192,7 @@ pub fn expand_asm(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
MRExpr(@ast::Expr { MRExpr(@ast::Expr {
id: ast::DUMMY_NODE_ID, id: ast::DUMMY_NODE_ID,
node: ast::ExprInlineAsm(ast::InlineAsm { node: ast::ExprInlineAsm(ast::InlineAsm {
asm: asm, asm: asm.get().to_managed(),
asm_str_style: asm_str_style.unwrap(), asm_str_style: asm_str_style.unwrap(),
clobbers: cons.to_managed(), clobbers: cons.to_managed(),
inputs: inputs, inputs: inputs,

View file

@ -16,7 +16,7 @@ use ext;
use ext::expand; use ext::expand;
use parse; use parse;
use parse::token; use parse::token;
use parse::token::{ident_to_str, intern, str_to_ident}; use parse::token::{InternedString, ident_to_str, intern, str_to_ident};
use util::small_vector::SmallVector; use util::small_vector::SmallVector;
use std::hashmap::HashMap; use std::hashmap::HashMap;
@ -407,11 +407,11 @@ impl<'a> ExtCtxt<'a> {
/// Extract a string literal from `expr`, emitting `err_msg` if `expr` /// Extract a string literal from `expr`, emitting `err_msg` if `expr`
/// is not a string literal. This does not stop compilation on error, /// is not a string literal. This does not stop compilation on error,
/// merely emits a non-fatal error and returns None. /// merely emits a non-fatal error and returns None.
pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr, err_msg: &str)
err_msg: &str) -> Option<(@str, ast::StrStyle)> { -> Option<(InternedString, ast::StrStyle)> {
match expr.node { match expr.node {
ast::ExprLit(l) => match l.node { ast::ExprLit(l) => match l.node {
ast::LitStr(s, style) => return Some((s, style)), ast::LitStr(s, style) => return Some(((*s).clone(), style)),
_ => cx.span_err(l.span, err_msg) _ => cx.span_err(l.span, err_msg)
}, },
_ => cx.span_err(expr.span, err_msg) _ => cx.span_err(expr.span, err_msg)
@ -424,7 +424,9 @@ pub fn expr_to_str(cx: &ExtCtxt, expr: @ast::Expr,
/// compilation should call /// compilation should call
/// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be /// `cx.parse_sess.span_diagnostic.abort_if_errors()` (this should be
/// done as rarely as possible). /// done as rarely as possible).
pub fn check_zero_tts(cx: &ExtCtxt, sp: Span, tts: &[ast::TokenTree], pub fn check_zero_tts(cx: &ExtCtxt,
sp: Span,
tts: &[ast::TokenTree],
name: &str) { name: &str) {
if tts.len() != 0 { if tts.len() != 0 {
cx.span_err(sp, format!("{} takes no arguments", name)); cx.span_err(sp, format!("{} takes no arguments", name));

View file

@ -19,6 +19,7 @@ use fold::Folder;
use opt_vec; use opt_vec;
use opt_vec::OptVec; use opt_vec::OptVec;
use parse::token::special_idents; use parse::token::special_idents;
use parse::token;
pub struct Field { pub struct Field {
ident: ast::Ident, ident: ast::Ident,
@ -134,13 +135,13 @@ pub trait AstBuilder {
fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; fn expr_vec(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr;
fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; fn expr_vec_uniq(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr;
fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr; fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr;
fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr; fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr;
fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr; fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr;
fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr; fn expr_some(&self, sp: Span, expr: @ast::Expr) -> @ast::Expr;
fn expr_none(&self, sp: Span) -> @ast::Expr; fn expr_none(&self, sp: Span) -> @ast::Expr;
fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr; fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr;
fn expr_unreachable(&self, span: Span) -> @ast::Expr; fn expr_unreachable(&self, span: Span) -> @ast::Expr;
fn pat(&self, span: Span, pat: ast::Pat_) -> @ast::Pat; fn pat(&self, span: Span, pat: ast::Pat_) -> @ast::Pat;
@ -589,10 +590,10 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr { fn expr_vec_slice(&self, sp: Span, exprs: ~[@ast::Expr]) -> @ast::Expr {
self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice) self.expr_vstore(sp, self.expr_vec(sp, exprs), ast::ExprVstoreSlice)
} }
fn expr_str(&self, sp: Span, s: @str) -> @ast::Expr { fn expr_str(&self, sp: Span, s: InternedString) -> @ast::Expr {
self.expr_lit(sp, ast::LitStr(s, ast::CookedStr)) self.expr_lit(sp, ast::LitStr(s, ast::CookedStr))
} }
fn expr_str_uniq(&self, sp: Span, s: @str) -> @ast::Expr { fn expr_str_uniq(&self, sp: Span, s: InternedString) -> @ast::Expr {
self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq) self.expr_vstore(sp, self.expr_str(sp, s), ast::ExprVstoreUniq)
} }
@ -620,7 +621,7 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
self.expr_path(none) self.expr_path(none)
} }
fn expr_fail(&self, span: Span, msg: @str) -> @ast::Expr { fn expr_fail(&self, span: Span, msg: InternedString) -> @ast::Expr {
let loc = self.codemap().lookup_char_pos(span.lo); let loc = self.codemap().lookup_char_pos(span.lo);
self.expr_call_global( self.expr_call_global(
span, span,
@ -631,13 +632,16 @@ impl<'a> AstBuilder for ExtCtxt<'a> {
], ],
~[ ~[
self.expr_str(span, msg), self.expr_str(span, msg),
self.expr_str(span, loc.file.name), self.expr_str(span,
token::intern_and_get_ident(loc.file.name)),
self.expr_uint(span, loc.line), self.expr_uint(span, loc.line),
]) ])
} }
fn expr_unreachable(&self, span: Span) -> @ast::Expr { fn expr_unreachable(&self, span: Span) -> @ast::Expr {
self.expr_fail(span, @"internal error: entered unreachable code") self.expr_fail(span,
InternedString::new(
"internal error: entered unreachable code"))
} }

View file

@ -31,8 +31,8 @@ pub fn expand_syntax_ext(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) ->
// expression is a literal // expression is a literal
ast::ExprLit(lit) => match lit.node { ast::ExprLit(lit) => match lit.node {
// string literal, push each byte to vector expression // string literal, push each byte to vector expression
ast::LitStr(s, _) => { ast::LitStr(ref s, _) => {
for byte in s.bytes() { for byte in s.get().bytes() {
bytes.push(cx.expr_u8(expr.span, byte)); bytes.push(cx.expr_u8(expr.span, byte));
} }
} }

View file

@ -14,6 +14,7 @@ use ast;
use codemap; use codemap;
use ext::base; use ext::base;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use parse::token;
pub fn expand_syntax_ext(cx: &mut base::ExtCtxt, pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
sp: codemap::Span, sp: codemap::Span,
@ -28,8 +29,10 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
match e.node { match e.node {
ast::ExprLit(lit) => { ast::ExprLit(lit) => {
match lit.node { match lit.node {
ast::LitStr(s, _) | ast::LitFloat(s, _) ast::LitStr(ref s, _) => {
| ast::LitFloatUnsuffixed(s) => { accumulator.push_str(s.get());
}
ast::LitFloat(s, _) | ast::LitFloatUnsuffixed(s) => {
accumulator.push_str(s); accumulator.push_str(s);
} }
ast::LitChar(c) => { ast::LitChar(c) => {
@ -55,5 +58,5 @@ pub fn expand_syntax_ext(cx: &mut base::ExtCtxt,
} }
} }
} }
return base::MRExpr(cx.expr_str(sp, accumulator.to_managed())); base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(accumulator)))
} }

View file

@ -18,6 +18,8 @@ use codemap::Span;
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use parse::token::InternedString;
use parse::token;
pub fn expand_deriving_decodable(cx: &ExtCtxt, pub fn expand_deriving_decodable(cx: &ExtCtxt,
span: Span, span: Span,
@ -82,10 +84,15 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span,
cx.expr_uint(span, field), cx.expr_uint(span, field),
lambdadecode]) lambdadecode])
}); });
cx.expr_method_call(trait_span, decoder, cx.ident_of("read_struct"), cx.expr_method_call(trait_span,
~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), decoder,
cx.expr_uint(trait_span, nfields), cx.ident_of("read_struct"),
cx.lambda_expr_1(trait_span, result, blkarg)]) ~[
cx.expr_str(trait_span,
token::get_ident(substr.type_ident.name)),
cx.expr_uint(trait_span, nfields),
cx.lambda_expr_1(trait_span, result, blkarg)
])
} }
StaticEnum(_, ref fields) => { StaticEnum(_, ref fields) => {
let variant = cx.ident_of("i"); let variant = cx.ident_of("i");
@ -95,7 +102,8 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span,
let rvariant_arg = cx.ident_of("read_enum_variant_arg"); let rvariant_arg = cx.ident_of("read_enum_variant_arg");
for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() { for (i, &(name, v_span, ref parts)) in fields.iter().enumerate() {
variants.push(cx.expr_str(v_span, cx.str_of(name))); variants.push(cx.expr_str(v_span,
token::get_ident(name.name)));
let decoded = decode_static_fields(cx, let decoded = decode_static_fields(cx,
v_span, v_span,
@ -120,9 +128,14 @@ fn decodable_substructure(cx: &ExtCtxt, trait_span: Span,
let result = cx.expr_method_call(trait_span, blkdecoder, let result = cx.expr_method_call(trait_span, blkdecoder,
cx.ident_of("read_enum_variant"), cx.ident_of("read_enum_variant"),
~[variant_vec, lambda]); ~[variant_vec, lambda]);
cx.expr_method_call(trait_span, decoder, cx.ident_of("read_enum"), cx.expr_method_call(trait_span,
~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), decoder,
cx.lambda_expr_1(trait_span, result, blkarg)]) cx.ident_of("read_enum"),
~[
cx.expr_str(trait_span,
token::get_ident(substr.type_ident.name)),
cx.lambda_expr_1(trait_span, result, blkarg)
])
} }
_ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)") _ => cx.bug("expected StaticEnum or StaticStruct in deriving(Decodable)")
}; };
@ -135,7 +148,7 @@ fn decode_static_fields(cx: &ExtCtxt,
trait_span: Span, trait_span: Span,
outer_pat_ident: Ident, outer_pat_ident: Ident,
fields: &StaticFields, fields: &StaticFields,
getarg: |Span, @str, uint| -> @Expr) getarg: |Span, InternedString, uint| -> @Expr)
-> @Expr { -> @Expr {
match *fields { match *fields {
Unnamed(ref fields) => { Unnamed(ref fields) => {
@ -143,7 +156,10 @@ fn decode_static_fields(cx: &ExtCtxt,
cx.expr_ident(trait_span, outer_pat_ident) cx.expr_ident(trait_span, outer_pat_ident)
} else { } else {
let fields = fields.iter().enumerate().map(|(i, &span)| { let fields = fields.iter().enumerate().map(|(i, &span)| {
getarg(span, format!("_field{}", i).to_managed(), i) getarg(span,
token::intern_and_get_ident(format!("_field{}",
i)),
i)
}).collect(); }).collect();
cx.expr_call_ident(trait_span, outer_pat_ident, fields) cx.expr_call_ident(trait_span, outer_pat_ident, fields)
@ -152,7 +168,9 @@ fn decode_static_fields(cx: &ExtCtxt,
Named(ref fields) => { Named(ref fields) => {
// use the field's span to get nicer error messages. // use the field's span to get nicer error messages.
let fields = fields.iter().enumerate().map(|(i, &(name, span))| { let fields = fields.iter().enumerate().map(|(i, &(name, span))| {
cx.field_imm(span, name, getarg(span, cx.str_of(name), i)) cx.field_imm(span,
name,
getarg(span, token::get_ident(name.name), i))
}).collect(); }).collect();
cx.expr_struct_ident(trait_span, outer_pat_ident, fields) cx.expr_struct_ident(trait_span, outer_pat_ident, fields)
} }

View file

@ -80,6 +80,7 @@ use codemap::Span;
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use parse::token;
pub fn expand_deriving_encodable(cx: &ExtCtxt, pub fn expand_deriving_encodable(cx: &ExtCtxt,
span: Span, span: Span,
@ -125,10 +126,17 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
Struct(ref fields) => { Struct(ref fields) => {
let emit_struct_field = cx.ident_of("emit_struct_field"); let emit_struct_field = cx.ident_of("emit_struct_field");
let mut stmts = ~[]; let mut stmts = ~[];
for (i, &FieldInfo { name, self_, span, .. }) in fields.iter().enumerate() { for (i, &FieldInfo {
name,
self_,
span,
..
}) in fields.iter().enumerate() {
let name = match name { let name = match name {
Some(id) => cx.str_of(id), Some(id) => token::get_ident(id),
None => format!("_field{}", i).to_managed() None => {
token::intern_and_get_ident(format!("_field{}", i))
}
}; };
let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]); let enc = cx.expr_method_call(span, self_, encode, ~[blkencoder]);
let lambda = cx.lambda_expr_1(span, enc, blkarg); let lambda = cx.lambda_expr_1(span, enc, blkarg);
@ -141,10 +149,15 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
} }
let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
cx.expr_method_call(trait_span, encoder, cx.ident_of("emit_struct"), cx.expr_method_call(trait_span,
~[cx.expr_str(trait_span, cx.str_of(substr.type_ident)), encoder,
cx.expr_uint(trait_span, fields.len()), cx.ident_of("emit_struct"),
blk]) ~[
cx.expr_str(trait_span,
token::get_ident(substr.type_ident.name)),
cx.expr_uint(trait_span, fields.len()),
blk
])
} }
EnumMatching(idx, variant, ref fields) => { EnumMatching(idx, variant, ref fields) => {
@ -167,7 +180,8 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
} }
let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg); let blk = cx.lambda_stmts_1(trait_span, stmts, blkarg);
let name = cx.expr_str(trait_span, cx.str_of(variant.node.name)); let name = cx.expr_str(trait_span,
token::get_ident(variant.node.name));
let call = cx.expr_method_call(trait_span, blkencoder, let call = cx.expr_method_call(trait_span, blkencoder,
cx.ident_of("emit_enum_variant"), cx.ident_of("emit_enum_variant"),
~[name, ~[name,
@ -175,11 +189,14 @@ fn encodable_substructure(cx: &ExtCtxt, trait_span: Span,
cx.expr_uint(trait_span, fields.len()), cx.expr_uint(trait_span, fields.len()),
blk]); blk]);
let blk = cx.lambda_expr_1(trait_span, call, blkarg); let blk = cx.lambda_expr_1(trait_span, call, blkarg);
let ret = cx.expr_method_call(trait_span, encoder, let ret = cx.expr_method_call(trait_span,
encoder,
cx.ident_of("emit_enum"), cx.ident_of("emit_enum"),
~[cx.expr_str(trait_span, ~[
cx.str_of(substr.type_ident)), cx.expr_str(trait_span,
blk]); token::get_ident(substr.type_ident.name)),
blk
]);
cx.expr_block(cx.block(trait_span, ~[me], Some(ret))) cx.expr_block(cx.block(trait_span, ~[me], Some(ret)))
} }

View file

@ -185,6 +185,7 @@ use codemap;
use codemap::Span; use codemap::Span;
use opt_vec; use opt_vec;
use parse::token::InternedString; use parse::token::InternedString;
use parse::token;
use std::vec; use std::vec;
@ -398,7 +399,9 @@ impl<'a> TraitDef<'a> {
self.span, self.span,
cx.meta_name_value(self.span, cx.meta_name_value(self.span,
InternedString::new("doc"), InternedString::new("doc"),
ast::LitStr(@"Automatically derived.", ast::CookedStr))); ast::LitStr(token::intern_and_get_ident(
"Automatically derived."),
ast::CookedStr)));
cx.item( cx.item(
self.span, self.span,
::parse::token::special_idents::clownshoes_extensions, ::parse::token::special_idents::clownshoes_extensions,

View file

@ -14,6 +14,7 @@ use codemap::Span;
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use parse::token::InternedString;
pub fn expand_deriving_from_primitive(cx: &ExtCtxt, pub fn expand_deriving_from_primitive(cx: &ExtCtxt,
span: Span, span: Span,
@ -73,13 +74,13 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) ->
match *substr.fields { match *substr.fields {
StaticStruct(..) => { StaticStruct(..) => {
cx.span_err(trait_span, "`FromPrimitive` cannot be derived for structs"); cx.span_err(trait_span, "`FromPrimitive` cannot be derived for structs");
return cx.expr_fail(trait_span, @""); return cx.expr_fail(trait_span, InternedString::new(""));
} }
StaticEnum(enum_def, _) => { StaticEnum(enum_def, _) => {
if enum_def.variants.is_empty() { if enum_def.variants.is_empty() {
cx.span_err(trait_span, cx.span_err(trait_span,
"`FromPrimitive` cannot be derived for enums with no variants"); "`FromPrimitive` cannot be derived for enums with no variants");
return cx.expr_fail(trait_span, @""); return cx.expr_fail(trait_span, InternedString::new(""));
} }
let mut arms = ~[]; let mut arms = ~[];
@ -91,7 +92,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) ->
cx.span_err(trait_span, cx.span_err(trait_span,
"`FromPrimitive` cannot be derived for \ "`FromPrimitive` cannot be derived for \
enum variants with arguments"); enum variants with arguments");
return cx.expr_fail(trait_span, @""); return cx.expr_fail(trait_span,
InternedString::new(""));
} }
let span = variant.span; let span = variant.span;
@ -117,7 +119,8 @@ fn cs_from(name: &str, cx: &ExtCtxt, trait_span: Span, substr: &Substructure) ->
cx.span_err(trait_span, cx.span_err(trait_span,
"`FromPrimitive` cannot be derived for enums \ "`FromPrimitive` cannot be derived for enums \
with struct variants"); with struct variants");
return cx.expr_fail(trait_span, @""); return cx.expr_fail(trait_span,
InternedString::new(""));
} }
} }
} }

View file

@ -14,6 +14,8 @@ use codemap::Span;
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use ext::deriving::generic::*; use ext::deriving::generic::*;
use parse::token::InternedString;
use parse::token;
pub fn expand_deriving_to_str(cx: &ExtCtxt, pub fn expand_deriving_to_str(cx: &ExtCtxt,
span: Span, span: Span,
@ -47,18 +49,20 @@ pub fn expand_deriving_to_str(cx: &ExtCtxt,
// doesn't invoke the to_str() method on each field. Hence we mirror // doesn't invoke the to_str() method on each field. Hence we mirror
// the logic of the repr_to_str() method, but with tweaks to call to_str() // the logic of the repr_to_str() method, but with tweaks to call to_str()
// on sub-fields. // on sub-fields.
fn to_str_substructure(cx: &ExtCtxt, span: Span, fn to_str_substructure(cx: &ExtCtxt, span: Span, substr: &Substructure)
substr: &Substructure) -> @Expr { -> @Expr {
let to_str = cx.ident_of("to_str"); let to_str = cx.ident_of("to_str");
let doit = |start: &str, end: @str, name: ast::Ident, let doit = |start: &str,
end: InternedString,
name: ast::Ident,
fields: &[FieldInfo]| { fields: &[FieldInfo]| {
if fields.len() == 0 { if fields.len() == 0 {
cx.expr_str_uniq(span, cx.str_of(name)) cx.expr_str_uniq(span, token::get_ident(name.name))
} else { } else {
let buf = cx.ident_of("buf"); let buf = cx.ident_of("buf");
let start = cx.str_of(name) + start; let start = token::intern_and_get_ident(cx.str_of(name) + start);
let init = cx.expr_str_uniq(span, start.to_managed()); let init = cx.expr_str_uniq(span, start);
let mut stmts = ~[cx.stmt_let(span, true, buf, init)]; let mut stmts = ~[cx.stmt_let(span, true, buf, init)];
let push_str = cx.ident_of("push_str"); let push_str = cx.ident_of("push_str");
@ -70,38 +74,52 @@ fn to_str_substructure(cx: &ExtCtxt, span: Span,
for (i, &FieldInfo {name, span, self_, .. }) in fields.iter().enumerate() { for (i, &FieldInfo {name, span, self_, .. }) in fields.iter().enumerate() {
if i > 0 { if i > 0 {
push(cx.expr_str(span, @", ")); push(cx.expr_str(span, InternedString::new(", ")));
} }
match name { match name {
None => {} None => {}
Some(id) => { Some(id) => {
let name = cx.str_of(id) + ": "; let name = cx.str_of(id) + ": ";
push(cx.expr_str(span, name.to_managed())); push(cx.expr_str(span,
token::intern_and_get_ident(name)));
} }
} }
push(cx.expr_method_call(span, self_, to_str, ~[])); push(cx.expr_method_call(span, self_, to_str, ~[]));
} }
push(cx.expr_str(span, end)); push(cx.expr_str(span, end));
cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span, buf)))) cx.expr_block(cx.block(span, stmts, Some(cx.expr_ident(span,
buf))))
} }
}; };
return match *substr.fields { return match *substr.fields {
Struct(ref fields) => { Struct(ref fields) => {
if fields.len() == 0 || fields[0].name.is_none() { if fields.len() == 0 || fields[0].name.is_none() {
doit("(", @")", substr.type_ident, *fields) doit("(",
InternedString::new(")"),
substr.type_ident,
*fields)
} else { } else {
doit("{", @"}", substr.type_ident, *fields) doit("{",
InternedString::new("}"),
substr.type_ident,
*fields)
} }
} }
EnumMatching(_, variant, ref fields) => { EnumMatching(_, variant, ref fields) => {
match variant.node.kind { match variant.node.kind {
ast::TupleVariantKind(..) => ast::TupleVariantKind(..) =>
doit("(", @")", variant.node.name, *fields), doit("(",
InternedString::new(")"),
variant.node.name,
*fields),
ast::StructVariantKind(..) => ast::StructVariantKind(..) =>
doit("{", @"}", variant.node.name, *fields), doit("{",
InternedString::new("}"),
variant.node.name,
*fields),
} }
} }

View file

@ -19,6 +19,7 @@ use codemap::Span;
use ext::base::*; use ext::base::*;
use ext::base; use ext::base;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use parse::token;
use std::os; use std::os;
@ -52,7 +53,11 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
Some((v, _style)) => v Some((v, _style)) => v
}; };
let msg = match exprs.len() { let msg = match exprs.len() {
1 => format!("environment variable `{}` not defined", var).to_managed(), 1 => {
token::intern_and_get_ident(format!("environment variable `{}` \
not defined",
var))
}
2 => { 2 => {
match expr_to_str(cx, exprs[1], "expected string literal") { match expr_to_str(cx, exprs[1], "expected string literal") {
None => return MacResult::dummy_expr(), None => return MacResult::dummy_expr(),
@ -65,12 +70,12 @@ pub fn expand_env(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
} }
}; };
let e = match os::getenv(var) { let e = match os::getenv(var.get()) {
None => { None => {
cx.span_err(sp, msg); cx.span_err(sp, msg.get());
cx.expr_uint(sp, 0) cx.expr_uint(sp, 0)
} }
Some(s) => cx.expr_str(sp, s.to_managed()) Some(s) => cx.expr_str(sp, token::intern_and_get_ident(s))
}; };
MRExpr(e) MRExpr(e)
} }

View file

@ -416,7 +416,7 @@ impl<'a> Context<'a> {
let result = arm.result.iter().map(|p| { let result = arm.result.iter().map(|p| {
self.trans_piece(p) self.trans_piece(p)
}).collect(); }).collect();
let s = arm.selector.to_managed(); let s = token::intern_and_get_ident(arm.selector);
let selector = self.ecx.expr_str(sp, s); let selector = self.ecx.expr_str(sp, s);
self.ecx.expr_struct(sp, p, ~[ self.ecx.expr_struct(sp, p, ~[
self.ecx.field_imm(sp, self.ecx.field_imm(sp,
@ -492,8 +492,12 @@ impl<'a> Context<'a> {
match *piece { match *piece {
parse::String(s) => { parse::String(s) => {
self.ecx.expr_call_global(sp, rtpath("String"), let s = token::intern_and_get_ident(s);
~[self.ecx.expr_str(sp, s.to_managed())]) self.ecx.expr_call_global(sp,
rtpath("String"),
~[
self.ecx.expr_str(sp, s)
])
} }
parse::CurrentArgument => { parse::CurrentArgument => {
let nil = self.ecx.expr_lit(sp, ast::LitNil); let nil = self.ecx.expr_lit(sp, ast::LitNil);
@ -763,8 +767,9 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span,
// Be sure to recursively expand macros just in case the format string uses // Be sure to recursively expand macros just in case the format string uses
// a macro to build the format expression. // a macro to build the format expression.
let expr = cx.ecx.expand_expr(efmt); let expr = cx.ecx.expand_expr(efmt);
let fmt = match expr_to_str(cx.ecx, expr, let fmt = match expr_to_str(cx.ecx,
"format argument must be a string literal.") { expr,
"format argument must be a string literal.") {
Some((fmt, _)) => fmt, Some((fmt, _)) => fmt,
None => return MacResult::dummy_expr() None => return MacResult::dummy_expr()
}; };
@ -776,7 +781,7 @@ pub fn expand_args(ecx: &mut ExtCtxt, sp: Span,
cx.ecx.span_err(efmt.span, m); cx.ecx.span_err(efmt.span, m);
} }
}).inside(|| { }).inside(|| {
for piece in parse::Parser::new(fmt) { for piece in parse::Parser::new(fmt.get()) {
if !err { if !err {
cx.verify_piece(&piece); cx.verify_piece(&piece);
let piece = cx.trans_piece(&piece); let piece = cx.trans_piece(&piece);

View file

@ -31,6 +31,7 @@ use parse;
pub mod rt { pub mod rt {
use ast; use ast;
use ext::base::ExtCtxt; use ext::base::ExtCtxt;
use parse::token;
use parse; use parse;
use print::pprust; use print::pprust;
@ -118,7 +119,8 @@ pub mod rt {
impl<'a> ToSource for &'a str { impl<'a> ToSource for &'a str {
fn to_source(&self) -> @str { fn to_source(&self) -> @str {
let lit = dummy_spanned(ast::LitStr(self.to_managed(), ast::CookedStr)); let lit = dummy_spanned(ast::LitStr(
token::intern_and_get_ident(*self), ast::CookedStr));
pprust::lit_to_str(&lit).to_managed() pprust::lit_to_str(&lit).to_managed()
} }
} }
@ -349,7 +351,7 @@ fn id_ext(str: &str) -> ast::Ident {
// Lift an ident to the expr that evaluates to that ident. // Lift an ident to the expr that evaluates to that ident.
fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr { fn mk_ident(cx: &ExtCtxt, sp: Span, ident: ast::Ident) -> @ast::Expr {
let e_str = cx.expr_str(sp, cx.str_of(ident)); let e_str = cx.expr_str(sp, token::get_ident(ident.name));
cx.expr_method_call(sp, cx.expr_method_call(sp,
cx.expr_ident(sp, id_ext("ext_cx")), cx.expr_ident(sp, id_ext("ext_cx")),
id_ext("ident_of"), id_ext("ident_of"),

View file

@ -16,7 +16,8 @@ use ext::base::*;
use ext::base; use ext::base;
use ext::build::AstBuilder; use ext::build::AstBuilder;
use parse; use parse;
use parse::token::{get_ident_interner}; use parse::token::get_ident_interner;
use parse::token;
use print::pprust; use print::pprust;
use std::io; use std::io;
@ -57,21 +58,21 @@ pub fn expand_file(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let topmost = topmost_expn_info(cx.backtrace().unwrap()); let topmost = topmost_expn_info(cx.backtrace().unwrap());
let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo); let loc = cx.codemap().lookup_char_pos(topmost.call_site.lo);
let filename = loc.file.name; let filename = token::intern_and_get_ident(loc.file.name);
base::MRExpr(cx.expr_str(topmost.call_site, filename)) base::MRExpr(cx.expr_str(topmost.call_site, filename))
} }
pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) pub fn expand_stringify(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> base::MacResult { -> base::MacResult {
let s = pprust::tts_to_str(tts, get_ident_interner()); let s = pprust::tts_to_str(tts, get_ident_interner());
base::MRExpr(cx.expr_str(sp, s.to_managed())) base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(s)))
} }
pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]) pub fn expand_mod(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
-> base::MacResult { -> base::MacResult {
base::check_zero_tts(cx, sp, tts, "module_path!"); base::check_zero_tts(cx, sp, tts, "module_path!");
base::MRExpr(cx.expr_str(sp, let string = cx.mod_path().map(|x| cx.str_of(*x)).connect("::");
cx.mod_path().map(|x| cx.str_of(*x)).connect("::").to_managed())) base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(string)))
} }
// include! : parse the given file as an expr // include! : parse the given file as an expr
@ -117,7 +118,7 @@ pub fn expand_include_str(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree])
let filename = file.display().to_str().to_managed(); let filename = file.display().to_str().to_managed();
cx.parse_sess.cm.new_filemap(filename, src); cx.parse_sess.cm.new_filemap(filename, src);
base::MRExpr(cx.expr_str(sp, src)) base::MRExpr(cx.expr_str(sp, token::intern_and_get_ident(src)))
} }
None => { None => {
cx.span_err(sp, format!("{} wasn't a utf-8 file", file.display())); cx.span_err(sp, format!("{} wasn't a utf-8 file", file.display()));

View file

@ -326,7 +326,9 @@ fn fold_meta_item_<T: Folder>(mi: @MetaItem, fld: &mut T) -> @MetaItem {
let fold_meta_item = |x| fold_meta_item_(x, fld); let fold_meta_item = |x| fold_meta_item_(x, fld);
MetaList((*id).clone(), mis.map(|e| fold_meta_item(*e))) MetaList((*id).clone(), mis.map(|e| fold_meta_item(*e)))
} }
MetaNameValue(ref id, s) => MetaNameValue((*id).clone(), s) MetaNameValue(ref id, ref s) => {
MetaNameValue((*id).clone(), (*s).clone())
}
}, },
span: fld.new_span(mi.span) } span: fld.new_span(mi.span) }
} }

View file

@ -45,7 +45,7 @@ impl ParserAttr for Parser {
} }
token::DOC_COMMENT(s) => { token::DOC_COMMENT(s) => {
let attr = ::attr::mk_sugared_doc_attr( let attr = ::attr::mk_sugared_doc_attr(
self.id_to_str(s), self.id_to_interned_str(s),
self.span.lo, self.span.lo,
self.span.hi self.span.hi
); );
@ -133,7 +133,7 @@ impl ParserAttr for Parser {
} }
token::DOC_COMMENT(s) => { token::DOC_COMMENT(s) => {
self.bump(); self.bump();
::attr::mk_sugared_doc_attr(self.id_to_str(s), ::attr::mk_sugared_doc_attr(self.id_to_interned_str(s),
self.span.lo, self.span.lo,
self.span.hi) self.span.hi)
} }

View file

@ -54,7 +54,6 @@ pub fn doc_comment_style(comment: &str) -> ast::AttrStyle {
} }
pub fn strip_doc_comment_decoration(comment: &str) -> ~str { pub fn strip_doc_comment_decoration(comment: &str) -> ~str {
/// remove whitespace-only lines from the start/end of lines /// remove whitespace-only lines from the start/end of lines
fn vertical_trim(lines: ~[~str]) -> ~[~str] { fn vertical_trim(lines: ~[~str]) -> ~[~str] {
let mut i = 0u; let mut i = 0u;

View file

@ -345,7 +345,7 @@ pub struct Parser {
/// extra detail when the same error is seen twice /// extra detail when the same error is seen twice
obsolete_set: HashSet<ObsoleteSyntax>, obsolete_set: HashSet<ObsoleteSyntax>,
/// Used to determine the path to externally loaded source files /// Used to determine the path to externally loaded source files
mod_path_stack: ~[@str], mod_path_stack: ~[InternedString],
/// Stack of spans of open delimiters. Used for error message. /// Stack of spans of open delimiters. Used for error message.
open_braces: ~[Span], open_braces: ~[Span],
/* do not copy the parser; its state is tied to outside state */ /* do not copy the parser; its state is tied to outside state */
@ -1408,8 +1408,12 @@ impl Parser {
token::LIT_FLOAT(s, ft) => LitFloat(self.id_to_str(s), ft), token::LIT_FLOAT(s, ft) => LitFloat(self.id_to_str(s), ft),
token::LIT_FLOAT_UNSUFFIXED(s) => token::LIT_FLOAT_UNSUFFIXED(s) =>
LitFloatUnsuffixed(self.id_to_str(s)), LitFloatUnsuffixed(self.id_to_str(s)),
token::LIT_STR(s) => LitStr(self.id_to_str(s), ast::CookedStr), token::LIT_STR(s) => {
token::LIT_STR_RAW(s, n) => LitStr(self.id_to_str(s), ast::RawStr(n)), LitStr(self.id_to_interned_str(s), ast::CookedStr)
}
token::LIT_STR_RAW(s, n) => {
LitStr(self.id_to_interned_str(s), ast::RawStr(n))
}
token::LPAREN => { self.expect(&token::RPAREN); LitNil }, token::LPAREN => { self.expect(&token::RPAREN); LitNil },
_ => { self.unexpected_last(tok); } _ => { self.unexpected_last(tok); }
} }
@ -4146,11 +4150,11 @@ impl Parser {
} }
fn push_mod_path(&mut self, id: Ident, attrs: &[Attribute]) { fn push_mod_path(&mut self, id: Ident, attrs: &[Attribute]) {
let default_path = token::interner_get(id.name); let default_path = self.id_to_interned_str(id);
let file_path = match ::attr::first_attr_value_str_by_name(attrs, let file_path = match ::attr::first_attr_value_str_by_name(attrs,
"path") { "path") {
Some(d) => d, Some(d) => d,
None => default_path None => default_path,
}; };
self.mod_path_stack.push(file_path) self.mod_path_stack.push(file_path)
} }

View file

@ -20,6 +20,7 @@ use std::cast;
use std::char; use std::char;
use std::fmt; use std::fmt;
use std::local_data; use std::local_data;
use std::path::BytesContainer;
#[allow(non_camel_case_types)] #[allow(non_camel_case_types)]
#[deriving(Clone, Encodable, Decodable, Eq, IterBytes)] #[deriving(Clone, Encodable, Decodable, Eq, IterBytes)]
@ -537,7 +538,7 @@ pub fn get_ident_interner() -> @IdentInterner {
/// be fixed in the future by just leaking all strings until task death /// be fixed in the future by just leaking all strings until task death
/// somehow. /// somehow.
#[no_send] #[no_send]
#[deriving(Clone, Eq, IterBytes, TotalEq, TotalOrd)] #[deriving(Clone, Eq, IterBytes, Ord, TotalEq, TotalOrd)]
pub struct InternedString { pub struct InternedString {
priv string: @str, priv string: @str,
} }
@ -571,6 +572,17 @@ impl InternedString {
} }
} }
impl BytesContainer for InternedString {
fn container_as_bytes<'a>(&'a self) -> &'a [u8] {
// XXX(pcwalton): This is a workaround for the incorrect signature of
// `BytesContainer`, which is itself a workaround for the lack of DST.
unsafe {
let this = self.get();
cast::transmute(this.container_as_bytes())
}
}
}
impl fmt::Default for InternedString { impl fmt::Default for InternedString {
fn fmt(obj: &InternedString, f: &mut fmt::Formatter) { fn fmt(obj: &InternedString, f: &mut fmt::Formatter) {
write!(f.buf, "{}", obj.string); write!(f.buf, "{}", obj.string);

View file

@ -897,7 +897,7 @@ pub fn print_attribute(s: &mut State, attr: &ast::Attribute) {
maybe_print_comment(s, attr.span.lo); maybe_print_comment(s, attr.span.lo);
if attr.node.is_sugared_doc { if attr.node.is_sugared_doc {
let comment = attr.value_str().unwrap(); let comment = attr.value_str().unwrap();
word(&mut s.s, comment); word(&mut s.s, comment.get());
} else { } else {
word(&mut s.s, "#["); word(&mut s.s, "#[");
print_meta_item(s, attr.meta()); print_meta_item(s, attr.meta());
@ -1931,10 +1931,10 @@ pub fn print_meta_item(s: &mut State, item: &ast::MetaItem) {
ibox(s, indent_unit); ibox(s, indent_unit);
match item.node { match item.node {
ast::MetaWord(ref name) => word(&mut s.s, name.get()), ast::MetaWord(ref name) => word(&mut s.s, name.get()),
ast::MetaNameValue(ref name, value) => { ast::MetaNameValue(ref name, ref value) => {
word_space(s, name.get()); word_space(s, name.get());
word_space(s, "="); word_space(s, "=");
print_literal(s, &value); print_literal(s, value);
} }
ast::MetaList(ref name, ref items) => { ast::MetaList(ref name, ref items) => {
word(&mut s.s, name.get()); word(&mut s.s, name.get());
@ -2172,7 +2172,7 @@ pub fn print_literal(s: &mut State, lit: &ast::Lit) {
_ => () _ => ()
} }
match lit.node { match lit.node {
ast::LitStr(st, style) => print_string(s, st, style), ast::LitStr(ref st, style) => print_string(s, st.get(), style),
ast::LitChar(ch) => { ast::LitChar(ch) => {
let mut res = ~"'"; let mut res = ~"'";
char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c)); char::from_u32(ch).unwrap().escape_default(|c| res.push_char(c));